gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Copyright (C) 2014-2017 Xavier Witdouck
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zavtech.morpheus.array.dense;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Arrays;
import java.util.function.Predicate;
import gnu.trove.set.TIntSet;
import gnu.trove.set.hash.TIntHashSet;
import com.zavtech.morpheus.array.Array;
import com.zavtech.morpheus.array.ArrayBase;
import com.zavtech.morpheus.array.ArrayBuilder;
import com.zavtech.morpheus.array.ArrayCursor;
import com.zavtech.morpheus.array.ArrayException;
import com.zavtech.morpheus.array.ArrayStyle;
import com.zavtech.morpheus.array.ArrayValue;
import com.zavtech.morpheus.array.coding.IntCoding;
import com.zavtech.morpheus.array.coding.WithIntCoding;
/**
* A dense array implementation that maintains a primitive int array of codes that apply to Object values exposed through the IntCoding interface.
*
* @author Xavier Witdouck
*
* <p><strong>This is open source software released under the <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache 2.0 License</a></strong></p>
*/
class DenseArrayWithIntCoding<T> extends ArrayBase<T> implements WithIntCoding<T> {
private static final long serialVersionUID = 1L;
private int[] codes;
private T defaultValue;
private int defaultCode;
private IntCoding<T> coding;
/**
* Constructor
* @param length the length for this array
* @param defaultValue the default value for array
* @param coding the coding for this array
*/
DenseArrayWithIntCoding(int length, T defaultValue, IntCoding<T> coding) {
super(coding.getType(), ArrayStyle.DENSE, false);
this.coding = coding;
this.codes = new int[length];
this.defaultValue = defaultValue;
this.defaultCode = coding.getCode(defaultValue);
Arrays.fill(codes, defaultCode);
}
/**
* Constructor
* @param source the source array to copy
* @param parallel true for the parallel version
*/
private DenseArrayWithIntCoding(DenseArrayWithIntCoding<T> source, boolean parallel) {
super(source.type(), ArrayStyle.DENSE, parallel);
this.coding = source.coding;
this.codes = source.codes;
this.defaultValue = source.defaultValue;
this.defaultCode = source.defaultCode;
}
@Override
public final IntCoding<T> getCoding() {
return coding;
}
@Override
public final int length() {
return codes.length;
}
@Override
public float loadFactor() {
return 1F;
}
@Override
public final T defaultValue() {
return defaultValue;
}
@Override
public final Array<T> parallel() {
return isParallel() ? this : new DenseArrayWithIntCoding<>(this, true);
}
@Override
public final Array<T> sequential() {
return isParallel() ? new DenseArrayWithIntCoding<>(this, false) : this;
}
@Override()
@SuppressWarnings("unchecked")
public final Array<T> copy() {
try {
final DenseArrayWithIntCoding<T> copy = (DenseArrayWithIntCoding<T>)super.clone();
copy.defaultValue = this.defaultValue;
copy.defaultCode = this.defaultCode;
copy.coding = this.coding;
copy.codes = this.codes.clone();
return copy;
} catch (Exception ex) {
throw new ArrayException("Failed to copy Array: " + this, ex);
}
}
@Override()
public final Array<T> copy(int[] indexes) {
final DenseArrayWithIntCoding<T> clone = new DenseArrayWithIntCoding<>(indexes.length, defaultValue, coding);
for (int i = 0; i < indexes.length; ++i) {
clone.codes[i] = this.codes[indexes[i]];
}
return clone;
}
@Override()
public final Array<T> copy(int start, int end) {
final int length = end - start;
final DenseArrayWithIntCoding<T> clone = new DenseArrayWithIntCoding<>(length, defaultValue, coding);
System.arraycopy(codes, start, clone.codes, 0, length);
return clone;
}
@Override
protected final Array<T> sort(int start, int end, int multiplier) {
return doSort(start, end, (i, j) -> multiplier * Integer.compare(codes[i], codes[j]));
}
@Override
public final int compare(int i, int j) {
return Integer.compare(codes[i], codes[j]);
}
@Override
public final Array<T> swap(int i, int j) {
final int v1 = codes[i];
final int v2 = codes[j];
this.codes[i] = v2;
this.codes[j] = v1;
return this;
}
@Override
public final Array<T> filter(Predicate<ArrayValue<T>> predicate) {
final ArrayCursor<T> cursor = cursor();
final ArrayBuilder<T> builder = ArrayBuilder.of(length(), type());
for (int i = 0; i< length(); ++i) {
cursor.moveTo(i);
final boolean match = predicate.test(cursor);
if (match) {
builder.add(cursor.getValue());
}
}
return builder.toArray();
}
@Override
public final Array<T> update(Array<T> from, int[] fromIndexes, int[] toIndexes) {
if (fromIndexes.length != toIndexes.length) {
throw new ArrayException("The from index array must have the same length as the to index array");
} else {
for (int i=0; i<fromIndexes.length; ++i) {
final int toIndex = toIndexes[i];
final int fromIndex = fromIndexes[i];
final T update = from.getValue(fromIndex);
this.setValue(toIndex, update);
}
}
return this;
}
@Override
public final Array<T> update(int toIndex, Array<T> from, int fromIndex, int length) {
if (from instanceof DenseArrayWithIntCoding) {
final DenseArrayWithIntCoding other = (DenseArrayWithIntCoding) from;
for (int i = 0; i < length; ++i) {
this.codes[toIndex + i] = other.codes[fromIndex + i];
}
} else if (from instanceof DenseArrayOfInts) {
for (int i = 0; i < length; ++i) {
this.codes[toIndex + i] = from.getInt(fromIndex + i);
}
} else {
for (int i=0; i<length; ++i) {
final T update = from.getValue(fromIndex + i);
this.setValue(toIndex + i, update);
}
}
return this;
}
@Override
public final Array<T> expand(int newLength) {
if (newLength > codes.length) {
final int[] newCodes = new int[newLength];
System.arraycopy(codes, 0, newCodes, 0, codes.length);
Arrays.fill(newCodes, codes.length, newCodes.length, defaultCode);
this.codes = newCodes;
}
return this;
}
@Override
public Array<T> fill(T value, int start, int end) {
final int code = coding.getCode(value);
Arrays.fill(codes, start, end, code);
return this;
}
@Override
public final boolean isNull(int index) {
return codes[index] == coding.getCode(null);
}
@Override
public final boolean isEqualTo(int index, T value) {
if (value == null) {
return isNull(index);
} else {
final int code = coding.getCode(value);
return code == codes[index];
}
}
@Override
public int getInt(int index) {
return codes[index];
}
@Override
public final T getValue(int index) {
return coding.getValue(codes[index]);
}
@Override
public final T setValue(int index, T value) {
final T oldValue = getValue(index);
this.codes[index] = coding.getCode(value);
return oldValue;
}
@Override
public Array<T> distinct(int limit) {
final int capacity = limit < Integer.MAX_VALUE ? limit : 100;
final TIntSet set = new TIntHashSet(capacity);
final ArrayBuilder<T> builder = ArrayBuilder.of(capacity, type());
for (int i=0; i<length(); ++i) {
final int code = getInt(i);
if (set.add(code)) {
final T value = getValue(i);
builder.add(value);
if (set.size() >= limit) {
break;
}
}
}
return builder.toArray();
}
@Override
public final void read(ObjectInputStream is, int count) throws IOException {
for (int i=0; i<count; ++i) {
this.codes[i] = is.readInt();
}
}
@Override
public final void write(ObjectOutputStream os, int[] indexes) throws IOException {
for (int index : indexes) {
os.writeInt(codes[index]);
}
}
/** Custom serialization */
private void writeObject(ObjectOutputStream os) throws IOException {
os.writeObject(coding);
os.writeInt(codes.length);
for (int value : codes) {
os.writeInt(value);
}
}
@SuppressWarnings("unchecked")
/** Custom serialization */
private void readObject(ObjectInputStream is) throws IOException, ClassNotFoundException {
this.coding = (IntCoding<T>)is.readObject();
final int length = is.readInt();
this.codes = new int[length];
for (int i=0; i<length; ++i) {
codes[i] = is.readInt();
}
}
}
| |
/*
* Copyright 2012 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.JSDocInfo.Visibility;
import com.google.javascript.rhino.JSDocInfoBuilder;
import com.google.javascript.rhino.Node;
import java.util.ArrayList;
import java.util.List;
/**
* Compiler pass for AngularJS-specific needs. Generates {@code $inject} \
* properties for functions (class constructors, wrappers, etc) annotated with
* @ngInject. Without this pass, AngularJS will not work properly if variable
* renaming is enabled, because the function arguments will be renamed.
* @see http://docs.angularjs.org/tutorial/step_05#a-note-on-minification
*
* <p>For example, the following code:</p>
* <pre><code>
*
* /** @ngInject * /
* function Controller(dependency1, dependency2) {
* // do something
* }
*
* </code></pre>
*
* <p>will be transformed into:
* <pre><code>
*
* function Controller(dependency1, dependency2) {
* // do something
* }
* Controller.$inject = ['dependency1', 'dependency2'];
*
* </code></pre>
*
* <p> This pass also supports assignments of function expressions to variables
* like:
* <pre><code>
*
* /** @ngInject * /
* var filter = function(a, b) {};
*
* var ns = {};
* /** @ngInject * /
* ns.method = function(a,b,c) {};
*
* /** @ngInject * /
* var shorthand = ns.method2 = function(a,b,c,) {}
*
* </code></pre>
*/
class AngularPass extends AbstractPostOrderCallback
implements HotSwapCompilerPass {
final AbstractCompiler compiler;
/** Nodes annotated with @ngInject */
private final List<NodeContext> injectables = new ArrayList<>();
public AngularPass(AbstractCompiler compiler) {
this.compiler = compiler;
}
public static final String INJECT_PROPERTY_NAME = "$inject";
static final DiagnosticType INJECT_IN_NON_GLOBAL_OR_BLOCK_ERROR =
DiagnosticType.error("JSC_INJECT_IN_NON_GLOBAL_OR_BLOCK_ERROR",
"@ngInject only applies to functions defined in blocks or " +
"global scope.");
static final DiagnosticType INJECT_NON_FUNCTION_ERROR =
DiagnosticType.error("JSC_INJECT_NON_FUNCTION_ERROR",
"@ngInject can only be used when defining a function or " +
"assigning a function expression.");
static final DiagnosticType INJECTED_FUNCTION_HAS_DESTRUCTURED_PARAM =
DiagnosticType.error("JSC_INJECTED_FUNCTION_HAS_DESTRUCTURED_PARAM",
"@ngInject cannot be used on functions containing "
+ "destructured parameter.");
static final DiagnosticType INJECTED_FUNCTION_HAS_DEFAULT_VALUE =
DiagnosticType.error("JSC_INJECTED_FUNCTION_HAS_DEFAULT_VALUE",
"@ngInject cannot be used on functions containing default value.");
static final DiagnosticType INJECTED_FUNCTION_ON_NON_QNAME =
DiagnosticType.error("JSC_INJECTED_FUNCTION_ON_NON_QNAME",
"@ngInject can only be used on qualified names.");
@Override
public void process(Node externs, Node root) {
hotSwapScript(root, null);
}
@Override
public void hotSwapScript(Node scriptRoot, Node originalRoot) {
// Traverses AST looking for nodes annotated with @ngInject.
NodeTraversal.traverse(compiler, scriptRoot, this);
// iterates through annotated nodes adding $inject property to elements.
for (NodeContext entry : injectables) {
String name = entry.getName();
Node fn = entry.getFunctionNode();
List<Node> dependencies = createDependenciesList(fn);
// skips entry if it does not have any dependencies.
if (dependencies.isEmpty()) {
continue;
}
Node dependenciesArray = IR.arraylit(dependencies.toArray(new Node[0]));
// creates `something.$inject = ['param1', 'param2']` node.
Node statement = IR.exprResult(
IR.assign(
IR.getelem(
NodeUtil.newQName(compiler, name),
IR.string(INJECT_PROPERTY_NAME)),
dependenciesArray
)
);
statement.useSourceInfoFromForTree(entry.getNode());
statement.setOriginalName(name);
// Set the visibility of the newly created property.
JSDocInfoBuilder newPropertyDoc = new JSDocInfoBuilder(false);
newPropertyDoc.recordVisibility(Visibility.PUBLIC);
statement.getFirstChild().setJSDocInfo(newPropertyDoc.build());
// adds `something.$inject = [...]` node after the annotated node or the following
// goog.inherits call.
Node insertionPoint = entry.getTarget();
Node next = insertionPoint.getNext();
while (next != null
&& NodeUtil.isExprCall(next)
&& compiler.getCodingConvention().getClassesDefinedByCall(
next.getFirstChild()) != null) {
insertionPoint = next;
next = insertionPoint.getNext();
}
insertionPoint.getParent().addChildAfter(statement, insertionPoint);
compiler.reportChangeToEnclosingScope(statement);
}
}
/**
* Given a FUNCTION node returns array of STRING nodes representing function
* parameters.
* @param n the FUNCTION node.
* @return STRING nodes.
*/
private List<Node> createDependenciesList(Node n) {
checkArgument(n.isFunction());
Node params = NodeUtil.getFunctionParameters(n);
if (params != null) {
return createStringsFromParamList(params);
}
return new ArrayList<>();
}
/**
* Given a PARAM_LIST node creates an array of corresponding STRING nodes.
* @param params PARAM_LIST node.
* @return array of STRING nodes.
*/
private List<Node> createStringsFromParamList(Node params) {
Node param = params.getFirstChild();
ArrayList<Node> names = new ArrayList<>();
while (param != null) {
if (param.isName()) {
names.add(IR.string(param.getString()).srcref(param));
} else if (param.isDestructuringPattern()) {
compiler.report(JSError.make(param,
INJECTED_FUNCTION_HAS_DESTRUCTURED_PARAM));
return new ArrayList<>();
} else if (param.isDefaultValue()) {
compiler.report(JSError.make(param,
INJECTED_FUNCTION_HAS_DEFAULT_VALUE));
return new ArrayList<>();
}
param = param.getNext();
}
return names;
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
JSDocInfo docInfo = n.getJSDocInfo();
if (docInfo != null && docInfo.isNgInject()) {
addNode(n, t);
}
}
/**
* Add node to the list of injectables.
* @param n node to add.
* @param t node traversal instance.
*/
private void addNode(Node n, NodeTraversal t) {
Node target = null;
Node fn = null;
String name = null;
switch (n.getToken()) {
// handles assignment cases like:
// a = function() {}
// a = b = c = function() {}
case ASSIGN:
if (!n.getFirstChild().isQualifiedName()) {
compiler.report(t.makeError(n, INJECTED_FUNCTION_ON_NON_QNAME));
return;
}
name = n.getFirstChild().getQualifiedName();
// last node of chained assignment.
fn = n;
while (fn.isAssign()) {
fn = fn.getLastChild();
}
target = n.getParent();
break;
// handles function case:
// function fnName() {}
case FUNCTION:
name = NodeUtil.getName(n);
fn = n;
target = n;
if (n.getParent().isAssign()
&& n.getParent().getJSDocInfo().isNgInject()) {
// This is a function assigned into a symbol, e.g. a regular function
// declaration in a goog.module or goog.scope.
// Skip in this traversal, it is handled when visiting the assign.
return;
}
break;
// handles var declaration cases like:
// var a = function() {}
// var a = b = function() {}
case VAR:
case LET:
case CONST:
name = n.getFirstChild().getString();
// looks for a function node.
fn = getDeclarationRValue(n);
target = n;
break;
// handles class method case:
// class clName(){
// constructor(){}
// someMethod(){} <===
// }
case MEMBER_FUNCTION_DEF:
Node parent = n.getParent();
if (parent.isClassMembers()){
Node classNode = parent.getParent();
String midPart = n.isStaticMember() ? "." : ".prototype.";
name = NodeUtil.getName(classNode) + midPart + n.getString();
if (n.getString().equals("constructor")) {
name = NodeUtil.getName(classNode);
}
fn = n.getFirstChild();
if (classNode.getParent().isAssign() || classNode.getParent().isName()) {
target = classNode.getGrandparent();
} else {
target = classNode;
}
}
break;
default:
break;
}
if (fn == null || !fn.isFunction()) {
compiler.report(t.makeError(n, INJECT_NON_FUNCTION_ERROR));
return;
}
// report an error if the function declaration did not take place in a block or global scope
if (!target.getParent().isScript()
&& !target.getParent().isBlock()
&& !target.getParent().isModuleBody()) {
compiler.report(t.makeError(n, INJECT_IN_NON_GLOBAL_OR_BLOCK_ERROR));
return;
}
// checks that name is present, which must always be the case unless the
// compiler allowed a syntax error or a dangling anonymous function
// expression.
checkNotNull(name);
// registers the node.
injectables.add(new NodeContext(name, n, fn, target));
}
/**
* Given a VAR node (variable declaration) returns the node of initial value.
*
* <pre><code>
* var x; // null
* var y = "value"; // STRING "value" node
* var z = x = y = function() {}; // FUNCTION node
* <code></pre>
* @param n VAR node.
* @return the assigned initial value, or the rightmost rvalue of an assignment
* chain, or null.
*/
private static Node getDeclarationRValue(Node n) {
checkNotNull(n);
checkArgument(NodeUtil.isNameDeclaration(n));
n = n.getFirstFirstChild();
if (n == null) {
return null;
}
while (n.isAssign()) {
n = n.getLastChild();
}
return n;
}
static class NodeContext {
/** Name of the function/object. */
private final String name;
/** Node jsDoc is attached to. */
private final Node node;
/** Function node */
private final Node functionNode;
/** Node after which to inject the new code */
private final Node target;
public NodeContext(String name, Node node, Node functionNode, Node target) {
this.name = name;
this.node = node;
this.functionNode = functionNode;
this.target = target;
}
/**
* @return the name.
*/
public String getName() {
return name;
}
/**
* @return the node.
*/
public Node getNode() {
return node;
}
/**
* @return the context.
*/
public Node getFunctionNode() {
return functionNode;
}
/**
* @return the context.
*/
public Node getTarget() {
return target;
}
}
}
| |
// This file is part of JavaSMT,
// an API wrapper for a collection of SMT solvers:
// https://github.com/sosy-lab/java-smt
//
// SPDX-FileCopyrightText: 2021 Dirk Beyer <https://www.sosy-lab.org>
//
// SPDX-License-Identifier: Apache-2.0
package org.sosy_lab.java_smt.test;
import static com.google.common.truth.TruthJUnit.assume;
import com.google.common.collect.ImmutableList;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import java.util.List;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import org.sosy_lab.java_smt.SolverContextFactory.Solvers;
import org.sosy_lab.java_smt.api.BooleanFormula;
import org.sosy_lab.java_smt.api.NumeralFormula.IntegerFormula;
import org.sosy_lab.java_smt.api.RegexFormula;
import org.sosy_lab.java_smt.api.SolverException;
import org.sosy_lab.java_smt.api.StringFormula;
@SuppressWarnings("ConstantConditions")
@SuppressFBWarnings(value = "DLS_DEAD_LOCAL_STORE", justification = "test code")
@RunWith(Parameterized.class)
public class StringFormulaManagerTest extends SolverBasedTest0 {
private static final ImmutableList<String> WORDS =
ImmutableList.of(
"",
"0",
"1",
"10",
"a",
"b",
"A",
"B",
"aa",
"Aa",
"aA",
"AA",
"ab",
"aB",
"Ab",
"AB",
"ac",
"bb",
"aaa",
"Aaa",
"aAa",
"aAA",
"aab",
"aaabbb",
"bbbccc",
"abcde",
"abdde",
"abcdf",
"abchurrdurr",
"abcdefaaaaa");
private StringFormula hello;
private RegexFormula a2z;
@Parameters(name = "{0}")
public static Object[] getAllSolvers() {
return Solvers.values();
}
@Parameter public Solvers solverUnderTest;
@Override
protected Solvers solverToUse() {
return solverUnderTest;
}
@Before
public void setup() {
requireStrings();
hello = smgr.makeString("hello");
a2z = smgr.range('a', 'z');
}
// Utility methods
private void assertEqual(IntegerFormula num1, IntegerFormula num2)
throws SolverException, InterruptedException {
assertThatFormula(imgr.equal(num1, num2)).isTautological();
}
private void assertDistinct(IntegerFormula num1, IntegerFormula num2)
throws SolverException, InterruptedException {
assertThatFormula(imgr.distinct(List.of(num1, num2))).isTautological();
}
private void assertEqual(StringFormula str1, StringFormula str2)
throws SolverException, InterruptedException {
assertThatFormula(smgr.equal(str1, str2)).isTautological();
}
private void assertDistinct(StringFormula str1, StringFormula str2)
throws SolverException, InterruptedException {
assertThatFormula(smgr.equal(str1, str2)).isUnsatisfiable();
}
// Tests
@Test
public void testRegexAll() throws SolverException, InterruptedException {
RegexFormula regex = smgr.all();
assertThatFormula(smgr.in(hello, regex)).isSatisfiable();
}
@Test
public void testRegexAll3() throws SolverException, InterruptedException {
// This is not ALL_CHAR! This matches ".*" literally!
RegexFormula regex = smgr.makeRegex(".*");
assertThatFormula(smgr.in(hello, regex)).isUnsatisfiable();
assertThatFormula(smgr.in(smgr.makeString(".*"), regex)).isSatisfiable();
}
@Test
public void testStringRegex2() throws SolverException, InterruptedException {
RegexFormula regex = smgr.concat(smgr.closure(a2z), smgr.makeRegex("ll"), smgr.closure(a2z));
assertThatFormula(smgr.in(hello, regex)).isSatisfiable();
}
@Test
public void testStringRegex3() throws SolverException, InterruptedException {
RegexFormula regex = smgr.makeRegex(".*ll.*");
assertThatFormula(smgr.in(hello, regex)).isUnsatisfiable();
}
@Test
public void testEmptyRegex() throws SolverException, InterruptedException {
RegexFormula regex = smgr.none();
assertThatFormula(smgr.in(hello, regex)).isUnsatisfiable();
}
@Test
public void testStringConcat() throws SolverException, InterruptedException {
StringFormula str1 = smgr.makeString("hello");
StringFormula str2 = smgr.makeString("world");
StringFormula concat = smgr.concat(str1, str2);
StringFormula complete = smgr.makeString("helloworld");
assertEqual(concat, complete);
}
@Test
public void testStringConcatEmpty() throws SolverException, InterruptedException {
StringFormula empty = smgr.makeString("");
assertEqual(empty, smgr.concat(ImmutableList.of()));
assertEqual(empty, smgr.concat(empty));
assertEqual(empty, smgr.concat(empty, empty));
assertEqual(empty, smgr.concat(ImmutableList.of(empty, empty, empty, empty)));
}
@Test
public void testStringPrefixSuffixConcat() throws SolverException, InterruptedException {
// check whether "prefix + suffix == concat"
StringFormula prefix = smgr.makeVariable("prefix");
StringFormula suffix = smgr.makeVariable("suffix");
StringFormula concat = smgr.makeVariable("concat");
assertThatFormula(
bmgr.and(
smgr.prefix(prefix, concat),
smgr.suffix(suffix, concat),
imgr.equal(
smgr.length(concat), imgr.add(smgr.length(prefix), smgr.length(suffix)))))
.implies(smgr.equal(concat, smgr.concat(prefix, suffix)));
}
@Test
public void testStringPrefixSuffix() throws SolverException, InterruptedException {
// check whether "prefix == suffix iff equal length"
StringFormula prefix = smgr.makeVariable("prefix");
StringFormula suffix = smgr.makeVariable("suffix");
assertThatFormula(bmgr.and(smgr.prefix(prefix, suffix), smgr.suffix(suffix, prefix)))
.implies(smgr.equal(prefix, suffix));
assertThatFormula(
bmgr.and(
smgr.prefix(prefix, suffix), imgr.equal(smgr.length(prefix), smgr.length(suffix))))
.implies(smgr.equal(prefix, suffix));
assertThatFormula(
bmgr.and(
smgr.suffix(suffix, prefix), imgr.equal(smgr.length(prefix), smgr.length(suffix))))
.implies(smgr.equal(prefix, suffix));
}
@Test
public void testStringToIntConversion() throws SolverException, InterruptedException {
IntegerFormula ten = imgr.makeNumber(10);
StringFormula zeroStr = smgr.makeString("0");
for (int i = 0; i < 100; i += 7) {
StringFormula str = smgr.makeString(Integer.toString(i));
IntegerFormula num = imgr.makeNumber(i);
IntegerFormula numInc = imgr.makeNumber(i + 1);
assertEqual(str, smgr.toStringFormula(num));
assertDistinct(str, smgr.toStringFormula(numInc));
assertDistinct(str, smgr.toStringFormula(imgr.add(num, numInc)));
assertEqual(num, smgr.toIntegerFormula(str));
assertDistinct(numInc, smgr.toIntegerFormula(str));
assertEqual(imgr.multiply(num, ten), smgr.toIntegerFormula(smgr.concat(str, zeroStr)));
assertDistinct(imgr.multiply(numInc, ten), smgr.toIntegerFormula(smgr.concat(str, zeroStr)));
assertEqual(num, smgr.toIntegerFormula(smgr.toStringFormula(num)));
assertEqual(numInc, smgr.toIntegerFormula(smgr.toStringFormula(numInc)));
}
}
@Test
public void testStringToIntConversionCornerCases() throws SolverException, InterruptedException {
assertEqual(imgr.makeNumber(-1), smgr.toIntegerFormula(smgr.makeString("-1")));
assertEqual(imgr.makeNumber(-1), smgr.toIntegerFormula(smgr.makeString("-12")));
assertEqual(imgr.makeNumber(-1), smgr.toIntegerFormula(smgr.makeString("-123")));
assertEqual(imgr.makeNumber(-1), smgr.toIntegerFormula(smgr.makeString("-1234")));
assertDistinct(imgr.makeNumber(-12), smgr.toIntegerFormula(smgr.makeString("-1")));
assertDistinct(imgr.makeNumber(-12), smgr.toIntegerFormula(smgr.makeString("-12")));
assertDistinct(imgr.makeNumber(-12), smgr.toIntegerFormula(smgr.makeString("-123")));
assertDistinct(imgr.makeNumber(-12), smgr.toIntegerFormula(smgr.makeString("-1234")));
assertEqual(imgr.makeNumber(-1), smgr.toIntegerFormula(smgr.makeString("")));
assertEqual(imgr.makeNumber(-1), smgr.toIntegerFormula(smgr.makeString("a")));
assertEqual(imgr.makeNumber(-1), smgr.toIntegerFormula(smgr.makeString("1a")));
assertEqual(imgr.makeNumber(-1), smgr.toIntegerFormula(smgr.makeString("a1")));
assertDistinct(imgr.makeNumber(-12), smgr.toIntegerFormula(smgr.makeString("")));
assertDistinct(imgr.makeNumber(-12), smgr.toIntegerFormula(smgr.makeString("a")));
assertDistinct(imgr.makeNumber(-12), smgr.toIntegerFormula(smgr.makeString("1a")));
assertDistinct(imgr.makeNumber(-12), smgr.toIntegerFormula(smgr.makeString("a1")));
}
@Test
public void testIntToStringConversionCornerCases() throws SolverException, InterruptedException {
assertEqual(smgr.makeString("123"), smgr.toStringFormula(imgr.makeNumber(123)));
assertEqual(smgr.makeString("1"), smgr.toStringFormula(imgr.makeNumber(1)));
assertEqual(smgr.makeString("0"), smgr.toStringFormula(imgr.makeNumber(0)));
assertEqual(smgr.makeString(""), smgr.toStringFormula(imgr.makeNumber(-1)));
assertEqual(smgr.makeString(""), smgr.toStringFormula(imgr.makeNumber(-123)));
assertDistinct(smgr.makeString("1"), smgr.toStringFormula(imgr.makeNumber(-1)));
}
@Test
public void testStringLength() throws SolverException, InterruptedException {
assertEqual(imgr.makeNumber(0), smgr.length(smgr.makeString("")));
assertEqual(imgr.makeNumber(1), smgr.length(smgr.makeString("a")));
assertEqual(imgr.makeNumber(2), smgr.length(smgr.makeString("aa")));
assertEqual(imgr.makeNumber(9), smgr.length(smgr.makeString("aaabbbccc")));
assertDistinct(imgr.makeNumber(5), smgr.length(smgr.makeString("")));
assertDistinct(imgr.makeNumber(5), smgr.length(smgr.makeString("a")));
assertDistinct(imgr.makeNumber(5), smgr.length(smgr.makeString("aa")));
assertDistinct(imgr.makeNumber(5), smgr.length(smgr.makeString("aaabbbcc")));
}
@Test
public void testStringLengthWithVariable() throws SolverException, InterruptedException {
StringFormula var = smgr.makeVariable("var");
assertThatFormula(imgr.equal(imgr.makeNumber(0), smgr.length(var)))
.implies(smgr.equal(var, smgr.makeString("")));
assertThatFormula(
bmgr.and(
imgr.equal(imgr.makeNumber(5), smgr.length(var)),
smgr.prefix(smgr.makeString("aba"), var),
smgr.suffix(smgr.makeString("aba"), var)))
.implies(smgr.equal(smgr.makeVariable("var"), smgr.makeString("ababa")));
assertThatFormula(
bmgr.and(
imgr.equal(imgr.makeNumber(4), smgr.length(var)),
smgr.prefix(smgr.makeString("aba"), var),
smgr.suffix(smgr.makeString("aba"), var)))
.isUnsatisfiable();
assertThatFormula(
bmgr.and(
imgr.equal(imgr.makeNumber(4), smgr.length(var)),
smgr.prefix(smgr.makeString("ab"), var),
smgr.suffix(smgr.makeString("ba"), var),
smgr.equal(smgr.makeString("c"), smgr.charAt(var, imgr.makeNumber(3)))))
.isUnsatisfiable();
assertThatFormula(
bmgr.and(
imgr.equal(imgr.makeNumber(5), smgr.length(var)),
smgr.prefix(smgr.makeString("ab"), var),
smgr.suffix(smgr.makeString("ba"), var),
smgr.equal(smgr.makeString("c"), smgr.charAt(var, imgr.makeNumber(3)))))
.implies(smgr.equal(smgr.makeVariable("var"), smgr.makeString("abcba")));
}
@Test
public void testStringLengthPositiv() throws SolverException, InterruptedException {
assertThatFormula(imgr.lessOrEquals(imgr.makeNumber(0), smgr.length(smgr.makeVariable("x"))))
.isTautological();
assertThatFormula(imgr.greaterThan(imgr.makeNumber(0), smgr.length(smgr.makeVariable("x"))))
.isUnsatisfiable();
}
@Test
public void testStringCompare() throws SolverException, InterruptedException {
StringFormula var1 = smgr.makeVariable("0");
StringFormula var2 = smgr.makeVariable("1");
assertThatFormula(bmgr.and(smgr.lessOrEquals(var1, var2), smgr.greaterOrEquals(var1, var2)))
.implies(smgr.equal(var1, var2));
assertThatFormula(smgr.equal(var1, var2))
.implies(bmgr.and(smgr.lessOrEquals(var1, var2), smgr.greaterOrEquals(var1, var2)));
}
/** Test const Strings = String variables + prefix and suffix constraints. */
@Test
public void testConstStringEqStringVar() throws SolverException, InterruptedException {
String string1 = "";
String string2 = "a";
String string3 = "ab";
String string4 = "abcdefghijklmnopqrstuvwxyz";
StringFormula string1c = smgr.makeString(string1);
StringFormula string2c = smgr.makeString(string2);
StringFormula string3c = smgr.makeString(string3);
StringFormula string4c = smgr.makeString(string4);
StringFormula string1v = smgr.makeVariable("string1v");
StringFormula string2v = smgr.makeVariable("string1v");
StringFormula string3v = smgr.makeVariable("string1v");
StringFormula string4v = smgr.makeVariable("string1v");
BooleanFormula formula =
bmgr.and(
smgr.equal(string1c, string1v),
smgr.equal(string2c, string2v),
smgr.equal(string3c, string3v),
smgr.equal(string4c, string4v));
BooleanFormula string1PrefixFormula =
bmgr.and(
smgr.prefix(string1c, string1v),
bmgr.not(smgr.prefix(string2c, string1v)),
bmgr.not(smgr.prefix(string3c, string1v)),
bmgr.not(smgr.prefix(string4c, string1v)));
BooleanFormula string2PrefixFormula =
bmgr.and(
smgr.prefix(string1c, string2v),
smgr.prefix(string2c, string2v),
bmgr.not(smgr.prefix(string3c, string2v)),
bmgr.not(smgr.prefix(string4c, string2v)));
BooleanFormula string3PrefixFormula =
bmgr.and(
smgr.prefix(string1c, string3v),
smgr.prefix(string2c, string3v),
smgr.prefix(string3c, string3v),
bmgr.not(smgr.prefix(string4c, string3v)));
BooleanFormula string4PrefixFormula =
bmgr.and(
smgr.prefix(string1c, string4v),
smgr.prefix(string2c, string4v),
smgr.prefix(string3c, string4v),
smgr.prefix(string4c, string4v));
BooleanFormula string1SuffixFormula =
bmgr.and(
smgr.suffix(string1c, string1v),
bmgr.not(smgr.suffix(string2c, string1v)),
bmgr.not(smgr.suffix(string3c, string1v)),
bmgr.not(smgr.suffix(string4c, string1v)));
BooleanFormula string2SuffixFormula =
bmgr.and(
smgr.suffix(string1c, string2v),
bmgr.not(smgr.suffix(string3c, string2v)),
bmgr.not(smgr.suffix(string4c, string2v)));
BooleanFormula string3SuffixFormula =
bmgr.and(
smgr.suffix(string1c, string3v),
smgr.suffix(string3c, string3v),
bmgr.not(smgr.suffix(string4c, string3v)));
BooleanFormula string4SuffixFormula =
bmgr.and(smgr.suffix(string1c, string4v), smgr.suffix(string4c, string4v));
assertThatFormula(bmgr.and(formula))
.implies(
bmgr.and(
string1PrefixFormula,
string2PrefixFormula,
string3PrefixFormula,
string4PrefixFormula,
string1SuffixFormula,
string2SuffixFormula,
string3SuffixFormula,
string4SuffixFormula));
}
/** Test String variables with negative length (UNSAT). */
@Test
public void testStringVariableLengthNegative() throws SolverException, InterruptedException {
StringFormula stringVariable1 = smgr.makeVariable("zeroLength");
StringFormula stringVariable2 = smgr.makeVariable("negLength");
// SAT + UNSAT Formula -> UNSAT
assertThatFormula(
bmgr.and(
imgr.equal(smgr.length(stringVariable1), imgr.makeNumber(0)),
imgr.equal(smgr.length(stringVariable2), imgr.makeNumber(-100))))
.isUnsatisfiable();
// UNSAT below
assertThatFormula(imgr.equal(smgr.length(stringVariable2), imgr.makeNumber(-1)))
.isUnsatisfiable();
assertThatFormula(imgr.equal(smgr.length(stringVariable2), imgr.makeNumber(-100)))
.isUnsatisfiable();
}
/**
* Test String formulas with inequalities in the negative range.
*
* <p>-10000 < stringVariable length < 0 -> UNSAT
*
* <p>-10000 < stringVariable length < -1 -> UNSAT
*
* <p>-10000 <= stringVariable length <= -1 -> UNSAT
*
* <p>-10000 <= stringVariable length <= 0 AND stringVariable != "" -> UNSAT
*
* <p>-10000 <= stringVariable length <= 0 -> SAT implies stringVariable = ""
*/
@Test
public void testStringLengthInequalityNegativeRange()
throws SolverException, InterruptedException {
StringFormula stringVariable = smgr.makeVariable("stringVariable");
IntegerFormula stringVariableLength = smgr.length(stringVariable);
IntegerFormula minusTenThousand = imgr.makeNumber(-10000);
IntegerFormula minusOne = imgr.makeNumber(-1);
IntegerFormula zero = imgr.makeNumber(0);
// -10000 < stringVariable length < 0 -> UNSAT
assertThatFormula(
bmgr.and(
imgr.lessThan(minusTenThousand, stringVariableLength),
imgr.lessThan(stringVariableLength, zero)))
.isUnsatisfiable();
// -10000 < stringVariable length < -1 -> UNSAT
assertThatFormula(
bmgr.and(
imgr.lessThan(minusTenThousand, stringVariableLength),
imgr.lessThan(stringVariableLength, minusOne)))
.isUnsatisfiable();
// -10000 <= stringVariable length <= -1 -> UNSAT
assertThatFormula(
bmgr.and(
imgr.lessOrEquals(minusTenThousand, stringVariableLength),
imgr.lessOrEquals(stringVariableLength, minusOne)))
.isUnsatisfiable();
// -10000 <= stringVariable length <= 0 AND stringVariable != "" -> UNSAT
assertThatFormula(
bmgr.and(
imgr.lessOrEquals(minusTenThousand, stringVariableLength),
imgr.lessOrEquals(stringVariableLength, zero),
bmgr.not(smgr.equal(stringVariable, smgr.makeString("")))))
.isUnsatisfiable();
// -10000 <= stringVariable length <= 0 -> SAT implies stringVariable = ""
assertThatFormula(
bmgr.and(
imgr.lessOrEquals(minusTenThousand, stringVariableLength),
imgr.lessOrEquals(stringVariableLength, zero)))
.implies(smgr.equal(stringVariable, smgr.makeString("")));
}
/**
* Test String formulas with inequalities in the negative range.
*
* <p>0 < stringVariable length < 1 -> UNSAT
*
* <p>0 < stringVariable length < 2 -> SAT
*
* <p>0 <= stringVariable length < 1 -> SAT implies stringVariable = ""
*
* <p>1 < stringVariable length < 3 -> SAT implies stringVariable length = 2
*/
@Test
public void testStringLengthInequalityPositiveRange()
throws SolverException, InterruptedException {
StringFormula stringVariable = smgr.makeVariable("stringVariable");
IntegerFormula stringVariableLength = smgr.length(stringVariable);
IntegerFormula three = imgr.makeNumber(3);
IntegerFormula two = imgr.makeNumber(2);
IntegerFormula one = imgr.makeNumber(1);
IntegerFormula zero = imgr.makeNumber(0);
// 0 < stringVariable length < 1 -> UNSAT
assertThatFormula(
bmgr.and(
imgr.lessThan(zero, stringVariableLength),
imgr.lessThan(stringVariableLength, one)))
.isUnsatisfiable();
// 0 < stringVariable length < 2 -> SAT
assertThatFormula(
bmgr.and(
imgr.lessThan(zero, stringVariableLength),
imgr.lessThan(stringVariableLength, two)))
.isSatisfiable();
// 0 <= stringVariable length < 1 -> SAT implies stringVariable = ""
assertThatFormula(
bmgr.and(
imgr.lessOrEquals(zero, stringVariableLength),
imgr.lessThan(stringVariableLength, one)))
.implies(smgr.equal(stringVariable, smgr.makeString("")));
// 1 < stringVariable length < 3 -> SAT implies stringVariable length = 2
assertThatFormula(
bmgr.and(
imgr.lessThan(one, stringVariableLength),
imgr.lessThan(stringVariableLength, three)))
.implies(imgr.equal(smgr.length(stringVariable), two));
}
/** Test simple String lexicographic ordering (< <= > >=) for constant Strings. */
@Test
public void testSimpleConstStringLexicographicOrdering()
throws SolverException, InterruptedException {
List<String> words = ImmutableList.sortedCopyOf(WORDS);
for (int i = 1; i < words.size(); i++) {
StringFormula word1 = smgr.makeString(words.get(i - 1));
StringFormula word2 = smgr.makeString(words.get(i));
assertThatFormula(smgr.lessThan(word1, word1)).isUnsatisfiable();
assertThatFormula(smgr.lessOrEquals(word1, word1)).isSatisfiable();
assertThatFormula(smgr.greaterOrEquals(word1, word1)).isSatisfiable();
assertThatFormula(smgr.lessThan(word1, word2)).isSatisfiable();
assertThatFormula(smgr.lessOrEquals(word1, word2)).isSatisfiable();
assertThatFormula(smgr.greaterThan(word1, word2)).isUnsatisfiable();
assertThatFormula(smgr.greaterOrEquals(word1, word2)).isUnsatisfiable();
}
}
/** Test simple String lexicographic ordering (< <= > >=) for String variables. */
@Test
public void testSimpleStringVariableLexicographicOrdering()
throws SolverException, InterruptedException {
StringFormula a = smgr.makeString("a");
StringFormula b = smgr.makeString("b");
StringFormula ab = smgr.makeString("ab");
StringFormula abc = smgr.makeString("abc");
StringFormula abd = smgr.makeString("abd");
StringFormula abe = smgr.makeString("abe");
StringFormula abaab = smgr.makeString("abaab");
StringFormula abbab = smgr.makeString("abbab");
StringFormula abcab = smgr.makeString("abcab");
StringFormula stringVariable = smgr.makeVariable("stringVariable");
assertThatFormula(
bmgr.and(
smgr.lessThan(a, stringVariable),
smgr.lessThan(stringVariable, b),
imgr.equal(imgr.makeNumber(0), smgr.length(stringVariable))))
.implies(smgr.equal(stringVariable, smgr.makeString("")));
assertThatFormula(
bmgr.and(
smgr.lessOrEquals(a, stringVariable),
smgr.lessThan(stringVariable, b),
imgr.equal(imgr.makeNumber(1), smgr.length(stringVariable))))
.implies(smgr.equal(stringVariable, smgr.makeString("a")));
assertThatFormula(
bmgr.and(
smgr.lessThan(a, stringVariable),
smgr.lessOrEquals(stringVariable, b),
imgr.equal(imgr.makeNumber(1), smgr.length(stringVariable))))
.implies(smgr.equal(stringVariable, b));
assertThatFormula(
bmgr.and(
smgr.lessOrEquals(abc, stringVariable),
smgr.lessThan(stringVariable, abd),
imgr.equal(imgr.makeNumber(3), smgr.length(stringVariable))))
.implies(smgr.equal(stringVariable, abc));
assertThatFormula(
bmgr.and(
smgr.lessThan(abc, stringVariable),
smgr.lessThan(stringVariable, abe),
imgr.equal(imgr.makeNumber(3), smgr.length(stringVariable))))
.implies(smgr.equal(stringVariable, abd));
assertThatFormula(
bmgr.and(
smgr.lessThan(abc, stringVariable),
smgr.lessOrEquals(stringVariable, abd),
imgr.equal(imgr.makeNumber(3), smgr.length(stringVariable))))
.implies(smgr.equal(stringVariable, abd));
assertThatFormula(
bmgr.and(
smgr.lessThan(abaab, stringVariable),
smgr.lessThan(stringVariable, abcab),
smgr.prefix(ab, stringVariable),
smgr.suffix(ab, stringVariable),
imgr.equal(imgr.makeNumber(5), smgr.length(stringVariable))))
.implies(smgr.equal(stringVariable, abbab));
}
/** Takeaway: invalid positions always refer to the empty string! */
@Test
public void testCharAtWithConstString() throws SolverException, InterruptedException {
StringFormula empty = smgr.makeString("");
StringFormula a = smgr.makeString("a");
StringFormula b = smgr.makeString("b");
StringFormula ab = smgr.makeString("ab");
assertEqual(smgr.charAt(empty, imgr.makeNumber(1)), empty);
assertEqual(smgr.charAt(empty, imgr.makeNumber(0)), empty);
assertEqual(smgr.charAt(empty, imgr.makeNumber(-1)), empty);
assertDistinct(smgr.charAt(a, imgr.makeNumber(-1)), a);
assertEqual(smgr.charAt(a, imgr.makeNumber(-1)), empty);
assertEqual(smgr.charAt(a, imgr.makeNumber(0)), a);
assertDistinct(smgr.charAt(a, imgr.makeNumber(1)), a);
assertEqual(smgr.charAt(a, imgr.makeNumber(1)), empty);
assertDistinct(smgr.charAt(a, imgr.makeNumber(2)), a);
assertEqual(smgr.charAt(a, imgr.makeNumber(2)), empty);
assertEqual(smgr.charAt(ab, imgr.makeNumber(0)), a);
assertEqual(smgr.charAt(ab, imgr.makeNumber(1)), b);
assertDistinct(smgr.charAt(ab, imgr.makeNumber(0)), b);
assertDistinct(smgr.charAt(ab, imgr.makeNumber(1)), a);
}
/**
* Test escapecharacter treatment. Escape characters are treated as a single char! Example:
* "a\u1234T" has "a" at position 0, "\u1234" at position 1 and "T" at position 2
*
* <p>SMTLIB2 uses an escape sequence for the numerals of the sort: {1234}.
*/
@Test
public void testCharAtWithSpecialCharacters() throws SolverException, InterruptedException {
assume()
.withMessage("Solver %s does only support 2 byte unicode", solverToUse())
.that(solverToUse())
.isNotEqualTo(Solvers.Z3);
StringFormula num1 = smgr.makeString("1");
StringFormula u = smgr.makeString("u");
StringFormula curlyOpen = smgr.makeString("{");
StringFormula curlyClose = smgr.makeString("}");
StringFormula u1234WOEscape = smgr.makeString("u1234");
StringFormula au1234WOEscape = smgr.makeString("au1234");
// Java needs a double {{ as the first one is needed as an escape char for the second, this is a
// workaround
String workaround = "au{1234}";
StringFormula au1234WOEscapeCurly = smgr.makeString(workaround);
StringFormula backSlash = smgr.makeString("\\");
StringFormula a = smgr.makeString("a");
StringFormula b = smgr.makeString("b");
StringFormula u1234 = smgr.makeString("\\u{1234}");
StringFormula au1234b = smgr.makeString("a\\u{1234}b");
StringFormula stringVariable = smgr.makeVariable("stringVariable");
// Javas backslash (double written) is just 1 char
assertThatFormula(imgr.equal(smgr.length(backSlash), imgr.makeNumber(1))).isSatisfiable();
assertThatFormula(smgr.equal(smgr.charAt(au1234b, imgr.makeNumber(0)), stringVariable))
.implies(smgr.equal(stringVariable, a));
// It seems like CVC4 sees the backslash as its own char!
assertThatFormula(smgr.equal(smgr.charAt(au1234b, imgr.makeNumber(1)), stringVariable))
.implies(smgr.equal(stringVariable, u1234));
assertThatFormula(smgr.equal(smgr.charAt(au1234b, imgr.makeNumber(2)), stringVariable))
.implies(smgr.equal(stringVariable, b));
assertThatFormula(
bmgr.and(
smgr.equal(smgr.charAt(u1234WOEscape, imgr.makeNumber(0)), u),
smgr.equal(smgr.charAt(u1234WOEscape, imgr.makeNumber(1)), num1)))
.isSatisfiable();
assertThatFormula(
bmgr.and(
smgr.equal(smgr.charAt(au1234WOEscape, imgr.makeNumber(0)), a),
smgr.equal(smgr.charAt(au1234WOEscape, imgr.makeNumber(1)), u),
smgr.equal(smgr.charAt(au1234WOEscape, imgr.makeNumber(2)), num1)))
.isSatisfiable();
assertThatFormula(
bmgr.and(
smgr.equal(smgr.charAt(au1234WOEscapeCurly, imgr.makeNumber(0)), a),
smgr.equal(smgr.charAt(au1234WOEscapeCurly, imgr.makeNumber(1)), u),
smgr.equal(smgr.charAt(au1234WOEscapeCurly, imgr.makeNumber(2)), curlyOpen),
smgr.equal(smgr.charAt(au1234WOEscapeCurly, imgr.makeNumber(7)), curlyClose)))
.isSatisfiable();
// Check that the unicode is not treated as seperate chars
assertThatFormula(
bmgr.and(
smgr.equal(smgr.charAt(u1234, imgr.makeNumber(0)), smgr.makeString("\\")),
smgr.equal(smgr.charAt(u1234, imgr.makeNumber(1)), u),
smgr.equal(smgr.charAt(u1234, imgr.makeNumber(2)), num1)))
.isUnsatisfiable();
}
/**
* Same as {@link #testCharAtWithSpecialCharacters} but only with 2 Byte special chars as Z3 only
* supports those.
*/
@Test
public void testCharAtWithSpecialCharacters2Byte() throws SolverException, InterruptedException {
StringFormula num7 = smgr.makeString("7");
StringFormula u = smgr.makeString("u");
StringFormula curlyOpen2BUnicode = smgr.makeString("\\u{7B}");
StringFormula curlyClose2BUnicode = smgr.makeString("\\u{7D}");
StringFormula acurlyClose2BUnicodeb = smgr.makeString("a\\u{7D}b");
// Java needs a double {{ as the first one is needed as a escape char for the second, this is a
// workaround
String workaround = "au{7B}";
StringFormula acurlyOpen2BUnicodeWOEscapeCurly = smgr.makeString(workaround);
// StringFormula backSlash = smgr.makeString("\\");
StringFormula a = smgr.makeString("a");
StringFormula b = smgr.makeString("b");
StringFormula stringVariable = smgr.makeVariable("stringVariable");
// Curly braces unicode is treated as 1 char
assertThatFormula(imgr.equal(smgr.length(curlyOpen2BUnicode), imgr.makeNumber(1)))
.isSatisfiable();
assertThatFormula(imgr.equal(smgr.length(curlyClose2BUnicode), imgr.makeNumber(1)))
.isSatisfiable();
// check a}b
assertThatFormula(
smgr.equal(smgr.charAt(acurlyClose2BUnicodeb, imgr.makeNumber(0)), stringVariable))
.implies(smgr.equal(stringVariable, a));
assertThatFormula(
smgr.equal(smgr.charAt(acurlyClose2BUnicodeb, imgr.makeNumber(1)), stringVariable))
.implies(smgr.equal(stringVariable, curlyClose2BUnicode));
assertThatFormula(
smgr.equal(smgr.charAt(acurlyClose2BUnicodeb, imgr.makeNumber(2)), stringVariable))
.implies(smgr.equal(stringVariable, b));
// Check the unescaped version (missing backslash)
assertThatFormula(
bmgr.and(
smgr.equal(smgr.charAt(acurlyOpen2BUnicodeWOEscapeCurly, imgr.makeNumber(0)), a),
smgr.equal(smgr.charAt(acurlyOpen2BUnicodeWOEscapeCurly, imgr.makeNumber(1)), u),
smgr.equal(
smgr.charAt(acurlyOpen2BUnicodeWOEscapeCurly, imgr.makeNumber(2)),
curlyOpen2BUnicode),
smgr.equal(smgr.charAt(acurlyOpen2BUnicodeWOEscapeCurly, imgr.makeNumber(3)), num7),
smgr.equal(
smgr.charAt(acurlyOpen2BUnicodeWOEscapeCurly, imgr.makeNumber(4)),
smgr.makeString("B")),
smgr.equal(
smgr.charAt(acurlyOpen2BUnicodeWOEscapeCurly, imgr.makeNumber(5)),
curlyClose2BUnicode)))
.isSatisfiable();
}
@Test
public void testCharAtWithStringVariable() throws SolverException, InterruptedException {
StringFormula a = smgr.makeString("a");
StringFormula b = smgr.makeString("b");
StringFormula ab = smgr.makeString("ab");
StringFormula aa = smgr.makeString("aa");
StringFormula abc = smgr.makeString("abc");
StringFormula aabc = smgr.makeString("aabc");
StringFormula abcb = smgr.makeString("abcb");
StringFormula stringVariable = smgr.makeVariable("stringVariable");
assertThatFormula(smgr.equal(smgr.charAt(ab, imgr.makeNumber(0)), stringVariable))
.implies(smgr.equal(stringVariable, a));
assertThatFormula(smgr.equal(smgr.charAt(ab, imgr.makeNumber(1)), stringVariable))
.implies(smgr.equal(stringVariable, b));
assertThatFormula(
bmgr.and(
smgr.equal(smgr.charAt(ab, imgr.makeNumber(0)), stringVariable),
smgr.equal(smgr.charAt(ab, imgr.makeNumber(1)), stringVariable)))
.isUnsatisfiable();
assertThatFormula(
bmgr.and(
smgr.equal(smgr.charAt(aa, imgr.makeNumber(0)), stringVariable),
smgr.equal(smgr.charAt(aa, imgr.makeNumber(1)), stringVariable)))
.implies(smgr.equal(stringVariable, a));
assertThatFormula(
bmgr.and(
smgr.equal(smgr.charAt(stringVariable, imgr.makeNumber(0)), a),
smgr.equal(smgr.charAt(stringVariable, imgr.makeNumber(1)), b),
imgr.equal(imgr.makeNumber(2), smgr.length(stringVariable))))
.implies(smgr.equal(stringVariable, ab));
assertThatFormula(
bmgr.and(
smgr.equal(smgr.charAt(stringVariable, imgr.makeNumber(0)), a),
smgr.equal(smgr.charAt(stringVariable, imgr.makeNumber(2)), b),
imgr.equal(imgr.makeNumber(4), smgr.length(stringVariable)),
smgr.suffix(abc, stringVariable)))
.implies(smgr.equal(stringVariable, aabc));
assertThatFormula(
bmgr.and(
smgr.equal(smgr.charAt(stringVariable, imgr.makeNumber(0)), a),
smgr.equal(smgr.charAt(stringVariable, imgr.makeNumber(3)), b),
imgr.equal(imgr.makeNumber(4), smgr.length(stringVariable)),
smgr.suffix(abc, stringVariable)))
.implies(smgr.equal(stringVariable, abcb));
assertThatFormula(
bmgr.and(
smgr.equal(smgr.charAt(stringVariable, imgr.makeNumber(0)), a),
smgr.equal(smgr.charAt(stringVariable, imgr.makeNumber(3)), b),
imgr.equal(imgr.makeNumber(4), smgr.length(stringVariable)),
smgr.prefix(abc, stringVariable)))
.implies(smgr.equal(stringVariable, abcb));
}
@Test
public void testConstStringContains() throws SolverException, InterruptedException {
StringFormula empty = smgr.makeString("");
StringFormula a = smgr.makeString("a");
StringFormula aUppercase = smgr.makeString("A");
StringFormula bUppercase = smgr.makeString("B");
StringFormula b = smgr.makeString("b");
StringFormula bbbbbb = smgr.makeString("bbbbbb");
StringFormula bbbbbbb = smgr.makeString("bbbbbbb");
StringFormula abbbbbb = smgr.makeString("abbbbbb");
StringFormula aaaaaaaB = smgr.makeString("aaaaaaaB");
StringFormula abcAndSoOn = smgr.makeString("abcdefghijklmnopqrstuVwxyz");
StringFormula curlyOpen2BUnicode = smgr.makeString("\\u{7B}");
StringFormula curlyClose2BUnicode = smgr.makeString("\\u{7D}");
StringFormula multipleCurlys2BUnicode = smgr.makeString("\\u{7B}\\u{7D}\\u{7B}\\u{7B}");
StringFormula curlyClose2BUnicodeEncased = smgr.makeString("blabla\\u{7D}bla");
assertThatFormula(smgr.contains(empty, empty)).isSatisfiable();
assertThatFormula(smgr.contains(empty, a)).isUnsatisfiable();
assertThatFormula(smgr.contains(a, empty)).isSatisfiable();
assertThatFormula(smgr.contains(a, a)).isSatisfiable();
assertThatFormula(smgr.contains(a, aUppercase)).isUnsatisfiable();
assertThatFormula(smgr.contains(aUppercase, a)).isUnsatisfiable();
assertThatFormula(smgr.contains(a, b)).isUnsatisfiable();
assertThatFormula(smgr.contains(b, b)).isSatisfiable();
assertThatFormula(smgr.contains(abbbbbb, a)).isSatisfiable();
assertThatFormula(smgr.contains(abbbbbb, b)).isSatisfiable();
assertThatFormula(smgr.contains(abbbbbb, bbbbbb)).isSatisfiable();
assertThatFormula(smgr.contains(abbbbbb, bbbbbbb)).isUnsatisfiable();
assertThatFormula(smgr.contains(abbbbbb, aUppercase)).isUnsatisfiable();
assertThatFormula(smgr.contains(abbbbbb, aUppercase)).isUnsatisfiable();
assertThatFormula(smgr.contains(aaaaaaaB, a)).isSatisfiable();
assertThatFormula(smgr.contains(aaaaaaaB, b)).isUnsatisfiable();
assertThatFormula(smgr.contains(aaaaaaaB, bUppercase)).isSatisfiable();
assertThatFormula(smgr.contains(aaaaaaaB, curlyOpen2BUnicode)).isUnsatisfiable();
assertThatFormula(smgr.contains(abcAndSoOn, smgr.makeString("xyz"))).isSatisfiable();
assertThatFormula(smgr.contains(abcAndSoOn, smgr.makeString("Vwxyz"))).isSatisfiable();
assertThatFormula(smgr.contains(abcAndSoOn, smgr.makeString("Vwxyza"))).isUnsatisfiable();
assertThatFormula(smgr.contains(abcAndSoOn, smgr.makeString("t Vwxyz"))).isUnsatisfiable();
assertThatFormula(smgr.contains(multipleCurlys2BUnicode, curlyOpen2BUnicode)).isSatisfiable();
assertThatFormula(smgr.contains(multipleCurlys2BUnicode, curlyClose2BUnicode)).isSatisfiable();
assertThatFormula(smgr.contains(curlyClose2BUnicodeEncased, curlyClose2BUnicode))
.isSatisfiable();
}
@Test
public void testStringVariableContains() throws SolverException, InterruptedException {
StringFormula var1 = smgr.makeVariable("var1");
StringFormula var2 = smgr.makeVariable("var2");
StringFormula empty = smgr.makeString("");
StringFormula bUppercase = smgr.makeString("B");
StringFormula ab = smgr.makeString("ab");
StringFormula bbbbbb = smgr.makeString("bbbbbb");
StringFormula abbbbbb = smgr.makeString("abbbbbb");
StringFormula curlyOpen2BUnicode = smgr.makeString("\\u{7B}");
StringFormula curlyClose2BUnicode = smgr.makeString("\\u{7D}");
assertThatFormula(
bmgr.and(smgr.contains(var1, empty), imgr.equal(imgr.makeNumber(0), smgr.length(var1))))
.implies(smgr.equal(var1, empty));
assertThatFormula(bmgr.and(smgr.contains(var1, var2), smgr.contains(var2, var1)))
.implies(smgr.equal(var1, var2));
// Unicode is treated as 1 char. So \\u{7B} is treated as { and the B inside is not contained!
assertThatFormula(
bmgr.and(
smgr.contains(var1, curlyOpen2BUnicode),
smgr.contains(var1, bUppercase),
imgr.equal(imgr.makeNumber(1), smgr.length(var1))))
.isUnsatisfiable();
// Same goes for the curly brackets used as escape sequence
assertThatFormula(
bmgr.and(
smgr.contains(var1, curlyOpen2BUnicode),
smgr.contains(var1, curlyClose2BUnicode),
imgr.equal(imgr.makeNumber(1), smgr.length(var1))))
.isUnsatisfiable();
assertThatFormula(
bmgr.and(
smgr.contains(var1, bbbbbb),
smgr.contains(var1, ab),
imgr.equal(imgr.makeNumber(7), smgr.length(var1))))
.implies(smgr.equal(var1, abbbbbb));
}
@Test
public void testStringContainsOtherVariable() throws SolverException, InterruptedException {
assume()
.withMessage("Solver %s runs endlessly on this task", solverToUse())
.that(solverToUse())
.isNotEqualTo(Solvers.Z3);
StringFormula var1 = smgr.makeVariable("var1");
StringFormula var2 = smgr.makeVariable("var2");
StringFormula abUppercase = smgr.makeString("AB");
StringFormula ab = smgr.makeString("ab");
assertThatFormula(
bmgr.and(
smgr.contains(var1, ab),
smgr.contains(var2, abUppercase),
smgr.contains(var1, var2)))
.implies(smgr.contains(var1, abUppercase));
}
@Test
public void testConstStringIndexOf() throws SolverException, InterruptedException {
StringFormula empty = smgr.makeString("");
StringFormula a = smgr.makeString("a");
StringFormula aUppercase = smgr.makeString("A");
StringFormula b = smgr.makeString("b");
StringFormula ab = smgr.makeString("ab");
StringFormula bbbbbb = smgr.makeString("bbbbbb");
StringFormula bbbbbbb = smgr.makeString("bbbbbbb");
StringFormula abbbbbb = smgr.makeString("abbbbbb");
StringFormula abcAndSoOn = smgr.makeString("abcdefghijklmnopqrstuVwxyz");
StringFormula curlyOpen2BUnicode = smgr.makeString("\\u{7B}");
StringFormula curlyClose2BUnicode = smgr.makeString("\\u{7D}");
StringFormula multipleCurlys2BUnicode = smgr.makeString("\\u{7B}\\u{7D}\\u{7B}\\u{7B}");
// Z3 transforms this into {}, but CVC4 does not! CVC4 is on the side of the SMTLIB2 standard as
// far as I can see.
StringFormula curlys2BUnicodeWOEscape = smgr.makeString("\\u7B\\u7D");
IntegerFormula zero = imgr.makeNumber(0);
assertEqual(smgr.indexOf(empty, empty, zero), zero);
assertEqual(smgr.indexOf(a, empty, zero), zero);
assertEqual(smgr.indexOf(a, a, zero), zero);
assertEqual(smgr.indexOf(a, aUppercase, zero), imgr.makeNumber(-1));
assertEqual(smgr.indexOf(abbbbbb, a, zero), zero);
assertEqual(smgr.indexOf(abbbbbb, b, zero), imgr.makeNumber(1));
assertEqual(smgr.indexOf(abbbbbb, ab, zero), zero);
assertEqual(smgr.indexOf(abbbbbb, bbbbbb, zero), imgr.makeNumber(1));
assertEqual(smgr.indexOf(abbbbbb, bbbbbbb, zero), imgr.makeNumber(-1));
assertEqual(smgr.indexOf(abbbbbb, smgr.makeString("c"), zero), imgr.makeNumber(-1));
assertEqual(smgr.indexOf(abcAndSoOn, smgr.makeString("z"), zero), imgr.makeNumber(25));
assertEqual(smgr.indexOf(abcAndSoOn, smgr.makeString("V"), zero), imgr.makeNumber(21));
assertEqual(smgr.indexOf(abcAndSoOn, smgr.makeString("v"), zero), imgr.makeNumber(-1));
assertEqual(smgr.indexOf(multipleCurlys2BUnicode, curlyOpen2BUnicode, zero), zero);
assertEqual(
smgr.indexOf(multipleCurlys2BUnicode, curlyClose2BUnicode, zero), imgr.makeNumber(1));
// TODO: Z3 and CVC4 handle this differently!
// assertEqual(smgr.indexOf(multipleCurlys2BUnicode, curlys2BUnicodeWOEscape, zero), zero);
assertEqual(
smgr.indexOf(multipleCurlys2BUnicode, curlys2BUnicodeWOEscape, imgr.makeNumber(1)),
imgr.makeNumber(-1));
assertEqual(
smgr.indexOf(multipleCurlys2BUnicode, smgr.makeString("B"), zero), imgr.makeNumber(-1));
}
@Test
public void testStringVariableIndexOf() throws SolverException, InterruptedException {
StringFormula var1 = smgr.makeVariable("var1");
StringFormula var2 = smgr.makeVariable("var2");
IntegerFormula intVar = imgr.makeVariable("intVar");
StringFormula empty = smgr.makeString("");
StringFormula curlyOpen2BUnicode = smgr.makeString("\\u{7B}");
IntegerFormula zero = imgr.makeNumber(0);
// If the index of var2 is not -1, it is contained in var1.
assertThatFormula(
bmgr.and(
bmgr.not(imgr.equal(intVar, imgr.makeNumber(-1))),
imgr.equal(intVar, smgr.indexOf(var1, var2, zero))))
.implies(smgr.contains(var1, var2));
// If the index is less than 0 (only -1 possible) it is not contained.
assertThatFormula(
bmgr.and(
imgr.equal(intVar, smgr.indexOf(var1, var2, zero)), imgr.lessThan(intVar, zero)))
.implies(bmgr.not(smgr.contains(var1, var2)));
// If the index of var2 in var is >= 0 and vice versa, both contain each other.
assertThatFormula(
bmgr.and(
imgr.greaterOrEquals(smgr.indexOf(var1, var2, zero), zero),
imgr.greaterOrEquals(smgr.indexOf(var2, var1, zero), zero)))
.implies(bmgr.and(smgr.contains(var1, var2), smgr.contains(var2, var1)));
// If the are indices equal and one is >= 0 and the strings are not "", both are contained in
// each other and the chars at the position must be the same.
assertThatFormula(
bmgr.and(
imgr.equal(smgr.indexOf(var1, var2, zero), smgr.indexOf(var2, var1, zero)),
imgr.greaterOrEquals(smgr.indexOf(var1, var2, zero), zero),
bmgr.not(smgr.equal(empty, smgr.charAt(var1, smgr.indexOf(var1, var2, zero))))))
.implies(
bmgr.and(
smgr.contains(var1, var2),
smgr.contains(var2, var1),
smgr.equal(
smgr.charAt(var1, smgr.indexOf(var2, var1, zero)),
smgr.charAt(var1, smgr.indexOf(var1, var2, zero)))));
// If a String contains {, but not B, the index of B must be -1. (unicode of { contains B)
assertThatFormula(
bmgr.and(
smgr.contains(var1, curlyOpen2BUnicode),
bmgr.not(smgr.contains(var1, smgr.makeString("B")))))
.implies(
bmgr.and(
imgr.greaterOrEquals(smgr.indexOf(var1, curlyOpen2BUnicode, zero), zero),
imgr.equal(imgr.makeNumber(-1), smgr.indexOf(var1, smgr.makeString("B"), zero))));
}
@Test
public void testStringIndexOfWithSubStrings() throws SolverException, InterruptedException {
assume()
.withMessage("Solver %s runs endlessly on this task", solverToUse())
.that(solverToUse())
.isNotEqualTo(Solvers.Z3);
StringFormula var1 = smgr.makeVariable("var1");
IntegerFormula zero = imgr.makeNumber(0);
// If the index of the string abba is 0, the index of the string bba is 1, and b is 1, and ba is
// 2
assertThatFormula(imgr.equal(zero, smgr.indexOf(var1, smgr.makeString("abba"), zero)))
.implies(
bmgr.and(
smgr.contains(var1, smgr.makeString("abba")),
imgr.equal(imgr.makeNumber(1), smgr.indexOf(var1, smgr.makeString("bba"), zero)),
imgr.equal(imgr.makeNumber(1), smgr.indexOf(var1, smgr.makeString("b"), zero)),
imgr.equal(imgr.makeNumber(2), smgr.indexOf(var1, smgr.makeString("ba"), zero))));
}
@Test
public void testStringPrefixImpliesPrefixIndexOf() throws SolverException, InterruptedException {
assume()
.withMessage("Solver %s runs endlessly on this task", solverToUse())
.that(solverToUse())
.isNoneOf(Solvers.Z3, Solvers.CVC4);
StringFormula var1 = smgr.makeVariable("var1");
StringFormula var2 = smgr.makeVariable("var2");
IntegerFormula zero = imgr.makeNumber(0);
// If a prefix (var2) is non empty, the length of the string (var1) has to be larger or equal to
// the prefix
// and the chars have to be the same for the lenth of the prefix, meaning the indexOf the prefix
// must be 0 in the string.
assertThatFormula(bmgr.and(imgr.greaterThan(smgr.length(var2), zero), smgr.prefix(var2, var1)))
.implies(
bmgr.and(
smgr.contains(var1, var2),
imgr.greaterOrEquals(smgr.length(var1), smgr.length(var2)),
imgr.equal(zero, smgr.indexOf(var1, var2, zero))));
}
@Test
public void testConstStringSubStrings() throws SolverException, InterruptedException {
StringFormula empty = smgr.makeString("");
StringFormula a = smgr.makeString("a");
StringFormula aUppercase = smgr.makeString("A");
StringFormula bUppercase = smgr.makeString("B");
StringFormula bbbbbb = smgr.makeString("bbbbbb");
StringFormula curlyOpen2BUnicode = smgr.makeString("\\u{7B}");
StringFormula curlyClose2BUnicode = smgr.makeString("\\u{7D}");
StringFormula multipleCurlys2BUnicode = smgr.makeString("\\u{7B}\\u{7D}\\u{7B}\\u{7B}");
IntegerFormula zero = imgr.makeNumber(0);
IntegerFormula one = imgr.makeNumber(1);
// Check empty string
assertEqual(smgr.substring(empty, zero, zero), empty);
// Check length 0 = empty string
assertEqual(smgr.substring(a, one, zero), empty);
// Check that it correctly recognized uppercase
assertDistinct(smgr.substring(a, zero, one), aUppercase);
assertDistinct(smgr.substring(aUppercase, zero, one), a);
assertDistinct(smgr.substring(bbbbbb, zero, one), bUppercase);
// Check smgr length interaction
assertEqual(smgr.substring(bbbbbb, zero, smgr.length(bbbbbb)), bbbbbb);
// Check unicode substrings
assertEqual(smgr.substring(multipleCurlys2BUnicode, zero, one), curlyOpen2BUnicode);
assertEqual(smgr.substring(multipleCurlys2BUnicode, one, one), curlyClose2BUnicode);
}
@Test
public void testConstStringAllPossibleSubStrings() throws SolverException, InterruptedException {
for (String wordString : WORDS) {
StringFormula word = smgr.makeString(wordString);
for (int j = 0; j < wordString.length(); j++) {
for (int k = j; k < wordString.length(); k++) {
// Loop through all combinations of substrings
// Note: String.substring uses begin index and end index (non-including) while SMT based
// substring uses length!
// Length = endIndex - beginIndex
String wordSubString = wordString.substring(j, k);
assertEqual(
smgr.substring(word, imgr.makeNumber(j), imgr.makeNumber(k - j)),
smgr.makeString(wordSubString));
}
}
}
}
@Test
public void testStringSubstringOutOfBounds() throws SolverException, InterruptedException {
StringFormula bbbbbb = smgr.makeString("bbbbbb");
StringFormula b = smgr.makeString("b");
StringFormula abbbbbb = smgr.makeString("abbbbbb");
StringFormula multipleCurlys2BUnicode = smgr.makeString("\\u{7B}\\u{7D}\\u{7B}\\u{7B}");
StringFormula multipleCurlys2BUnicodeFromIndex1 = smgr.makeString("\\u{7D}\\u{7B}\\u{7B}");
assertEqual(smgr.substring(abbbbbb, imgr.makeNumber(0), imgr.makeNumber(10000)), abbbbbb);
assertEqual(smgr.substring(abbbbbb, imgr.makeNumber(6), imgr.makeNumber(10000)), b);
assertEqual(smgr.substring(abbbbbb, imgr.makeNumber(1), imgr.makeNumber(10000)), bbbbbb);
assertEqual(
smgr.substring(multipleCurlys2BUnicode, imgr.makeNumber(1), imgr.makeNumber(10000)),
multipleCurlys2BUnicodeFromIndex1);
}
@Test
public void testStringVariablesSubstring() throws SolverException, InterruptedException {
StringFormula var1 = smgr.makeVariable("var1");
StringFormula var2 = smgr.makeVariable("var2");
IntegerFormula intVar1 = imgr.makeVariable("intVar1");
IntegerFormula intVar2 = imgr.makeVariable("intVar2");
// If a Prefix of a certain length exists, the substring over that equals the prefix
assertThatFormula(smgr.prefix(var2, var1))
.implies(smgr.equal(var2, smgr.substring(var1, imgr.makeNumber(0), smgr.length(var2))));
// Same with suffix
assertThatFormula(smgr.suffix(var2, var1))
.implies(
smgr.equal(
var2,
smgr.substring(
var1, imgr.subtract(smgr.length(var1), smgr.length(var2)), smgr.length(var2))));
// If a string has a char at a specified position, a substring beginning with the same index
// must have the same char, independent of the length of the substring.
// But its not really relevant to check out of bounds cases, hence the exclusion.
// So we test substring length 1 (== charAt) and larger
assertThatFormula(
bmgr.and(
imgr.greaterThan(intVar2, imgr.makeNumber(1)),
smgr.equal(var2, smgr.charAt(var1, intVar1)),
imgr.greaterThan(smgr.length(var1), intVar1)))
.implies(
smgr.equal(
var2, smgr.charAt(smgr.substring(var1, intVar1, intVar2), imgr.makeNumber(0))));
assertThatFormula(smgr.equal(var2, smgr.charAt(var1, intVar1)))
.implies(smgr.equal(var2, smgr.substring(var1, intVar1, imgr.makeNumber(1))));
}
@Test
public void testConstStringReplace() throws SolverException, InterruptedException {
for (int i = 0; i < WORDS.size(); i++) {
for (int j = 2; j < WORDS.size(); j++) {
String word1 = WORDS.get(j - 1);
String word2 = WORDS.get(j);
String word3 = WORDS.get(i);
StringFormula word1F = smgr.makeString(word1);
StringFormula word2F = smgr.makeString(word2);
StringFormula word3F = smgr.makeString(word3);
StringFormula result = smgr.makeString(word3.replaceFirst(word2, word1));
assertEqual(smgr.replace(word3F, word2F, word1F), result);
}
}
}
// Neither CVC4 nor Z3 can solve this!
@Ignore
@Test
public void testStringVariableReplacePrefix() throws SolverException, InterruptedException {
StringFormula var1 = smgr.makeVariable("var1");
StringFormula var2 = smgr.makeVariable("var2");
StringFormula var3 = smgr.makeVariable("var3");
StringFormula prefix = smgr.makeVariable("prefix");
// If var1 has a prefix, and you replace said prefix with var3 (saved in var2), the prefix of
// var2 is var3
assertThatFormula(
bmgr.and(
smgr.equal(var2, smgr.replace(var1, prefix, var3)),
smgr.prefix(prefix, var1),
bmgr.not(smgr.equal(prefix, var3)),
imgr.greaterThan(smgr.length(prefix), imgr.makeNumber(0)),
imgr.greaterThan(smgr.length(var3), imgr.makeNumber(0))))
.implies(bmgr.and(bmgr.not(smgr.equal(var1, var2)), smgr.prefix(var3, var2)));
assertThatFormula(
bmgr.and(
smgr.equal(var2, smgr.replace(var1, prefix, var3)),
smgr.prefix(prefix, var1),
bmgr.not(smgr.equal(prefix, var3))))
.implies(bmgr.and(smgr.prefix(var3, var2), bmgr.not(smgr.equal(var1, var2))));
}
@Test
public void testStringVariableReplaceSubstring() throws SolverException, InterruptedException {
// I couldn't find stronger constraints in the implication that don't run endlessly.....
StringFormula original = smgr.makeVariable("original");
StringFormula prefix = smgr.makeVariable("prefix");
StringFormula replacement = smgr.makeVariable("replacement");
StringFormula replaced = smgr.makeVariable("replaced");
// Set a prefix that does not contain the suffix substring, make sure that the substring that
// comes after the prefix is replaced
assertThatFormula(
bmgr.and(
smgr.prefix(prefix, original),
imgr.equal(
smgr.length(prefix),
smgr.indexOf(
original,
smgr.substring(original, smgr.length(prefix), smgr.length(original)),
imgr.makeNumber(0))),
imgr.greaterThan(smgr.length(original), smgr.length(prefix)),
imgr.greaterThan(smgr.length(prefix), imgr.makeNumber(0)),
imgr.greaterThan(
smgr.length(
smgr.substring(original, smgr.length(prefix), smgr.length(original))),
imgr.makeNumber(0)),
smgr.equal(
replaced,
smgr.replace(
original,
smgr.substring(original, smgr.length(prefix), smgr.length(original)),
replacement))))
.implies(
smgr.equal(
replacement, smgr.substring(replaced, smgr.length(prefix), smgr.length(replaced))));
// In this version it is still possible that parts of the prefix and suffix together build the
// suffix, replacing parts of the prefix additionally to the implication above (or the
// replacement is empty)
assertThatFormula(
bmgr.and(
smgr.prefix(prefix, original),
bmgr.not(smgr.contains(original, replacement)),
bmgr.not(
smgr.contains(
smgr.substring(original, smgr.length(prefix), smgr.length(original)),
prefix)),
bmgr.not(
smgr.contains(
prefix,
smgr.substring(original, smgr.length(prefix), smgr.length(original)))),
imgr.greaterThan(smgr.length(original), smgr.length(prefix)),
imgr.greaterThan(smgr.length(prefix), imgr.makeNumber(0)),
smgr.equal(
replaced,
smgr.replace(
original,
smgr.substring(original, smgr.length(prefix), smgr.length(original)),
replacement))))
.implies(smgr.contains(replacement, replacement));
// This version may have the original as a larger version of the prefix; prefix: a, original:
// aaa
assertThatFormula(
bmgr.and(
smgr.prefix(prefix, original),
bmgr.not(smgr.contains(original, replacement)),
bmgr.not(
smgr.contains(
prefix,
smgr.substring(original, smgr.length(prefix), smgr.length(original)))),
imgr.greaterThan(smgr.length(original), smgr.length(prefix)),
imgr.greaterThan(smgr.length(prefix), imgr.makeNumber(0)),
smgr.equal(
replaced,
smgr.replace(
original,
smgr.substring(original, smgr.length(prefix), smgr.length(original)),
replacement))))
.implies(smgr.contains(replacement, replacement));
// This version can contain the substring in the prefix!
assertThatFormula(
bmgr.and(
smgr.prefix(prefix, original),
bmgr.not(smgr.contains(original, replacement)),
imgr.greaterThan(smgr.length(original), smgr.length(prefix)),
imgr.greaterThan(smgr.length(prefix), imgr.makeNumber(0)),
smgr.equal(
replaced,
smgr.replace(
original,
smgr.substring(original, smgr.length(prefix), smgr.length(original)),
replacement))))
.implies(smgr.contains(replacement, replacement));
}
@Test
public void testStringVariableReplaceMiddle() throws SolverException, InterruptedException {
// TODO: either rework that this terminates, or remove
assume()
.withMessage("Solver %s runs endlessly on this task.", solverToUse())
.that(solverToUse())
.isNoneOf(Solvers.CVC4, Solvers.Z3);
StringFormula original = smgr.makeVariable("original");
StringFormula replacement = smgr.makeVariable("replacement");
StringFormula replaced = smgr.makeVariable("replaced");
StringFormula beginning = smgr.makeVariable("beginning");
StringFormula middle = smgr.makeVariable("middle");
StringFormula end = smgr.makeVariable("end");
// If beginning + middle + end (length of each > 0) get concated (in original), replacing
// beginning/middle/end
// with replacement (result = replaces; replacement > 0 and != the replaced) results in a
// string that is equal to the concat of the 2 remaining start strings and the replaced one
// replaced
// This is tested with 2 different implications, 1 that only checks wheter or not the
// replacement is contained in the string and not in the original and vice verse for the
// replaced String
BooleanFormula formula =
bmgr.and(
smgr.equal(original, smgr.concat(beginning, middle, end)),
smgr.equal(replaced, smgr.replace(original, middle, replacement)),
bmgr.not(smgr.equal(middle, replacement)),
bmgr.not(smgr.equal(beginning, replacement)),
bmgr.not(smgr.equal(end, replacement)),
bmgr.not(smgr.equal(beginning, middle)),
imgr.greaterThan(smgr.length(middle), imgr.makeNumber(0)),
imgr.greaterThan(smgr.length(replacement), imgr.makeNumber(0)),
imgr.greaterThan(smgr.length(beginning), imgr.makeNumber(0)));
assertThatFormula(formula)
.implies(
bmgr.and(
bmgr.not(smgr.equal(original, replaced)), smgr.contains(replaced, replacement)));
// Same as above, but with concat instead of contains
assertThatFormula(formula)
.implies(
bmgr.and(
bmgr.not(smgr.equal(original, replaced)),
smgr.equal(replaced, smgr.concat(beginning, replacement, end))));
}
@Test
public void testStringVariableReplaceFront() throws SolverException, InterruptedException {
assume()
.withMessage("Solver %s runs endlessly on this task.", solverToUse())
.that(solverToUse())
.isNotEqualTo(Solvers.Z3);
StringFormula var1 = smgr.makeVariable("var1");
StringFormula var2 = smgr.makeVariable("var2");
StringFormula var3 = smgr.makeVariable("var3");
StringFormula var4 = smgr.makeVariable("var4");
StringFormula var5 = smgr.makeVariable("var5");
// If var1 and 2 get concated (in var4) such that var1 is in front, replacing var1 with var3
// (var5) results in a
// string that is equal to var3 + var2
// First with length constraints, second without
assertThatFormula(
bmgr.and(
smgr.equal(var4, smgr.concat(var1, var2)),
smgr.equal(var5, smgr.replace(var4, var1, var3)),
bmgr.not(smgr.equal(var1, var3)),
imgr.greaterThan(smgr.length(var1), imgr.makeNumber(0)),
imgr.greaterThan(smgr.length(var3), imgr.makeNumber(0))))
.implies(bmgr.and(bmgr.not(smgr.equal(var4, var5)), smgr.prefix(var3, var5)));
assertThatFormula(
bmgr.and(
smgr.equal(var4, smgr.concat(var1, var2)),
smgr.equal(var5, smgr.replace(var4, var1, var3)),
bmgr.not(smgr.equal(var1, var3))))
.implies(bmgr.and(bmgr.not(smgr.equal(var4, var5)), smgr.prefix(var3, var5)));
}
@Test
public void testConstStringReplaceAll() throws SolverException, InterruptedException {
assume()
.withMessage("Solver %s does not support replaceAll()", solverToUse())
.that(solverToUse())
.isNotEqualTo(Solvers.Z3);
for (int i = 0; i < WORDS.size(); i++) {
for (int j = 1; j < WORDS.size(); j++) {
String word1 = WORDS.get(i);
String word2 = WORDS.get(j);
String word3 = "replacement";
StringFormula word1F = smgr.makeString(word1);
StringFormula word2F = smgr.makeString(word2);
StringFormula word3F = smgr.makeString(word3);
StringFormula result = smgr.makeString(word3.replaceAll(word2, word1));
assertEqual(smgr.replaceAll(word3F, word2F, word1F), result);
}
}
}
/**
* Concat a String that consists of a String that is later replaces with replaceAll. The resulting
* String should consist of only concatinated versions of itself.
*/
@Test
public void testStringVariableReplaceAllConcatedString()
throws SolverException, InterruptedException {
assume()
.withMessage("Solver %s does not support replaceAll()", solverToUse())
.that(solverToUse())
.isNotEqualTo(Solvers.Z3);
// 2 concats is the max number CVC4 supports without running endlessly
for (int numOfConcats = 0; numOfConcats < 3; numOfConcats++) {
StringFormula original = smgr.makeVariable("original");
StringFormula replacement = smgr.makeVariable("replacement");
StringFormula replaced = smgr.makeVariable("replaced");
StringFormula segment = smgr.makeVariable("segment");
StringFormula[] concatSegments = new StringFormula[numOfConcats];
StringFormula[] concatReplacements = new StringFormula[numOfConcats];
for (int i = 0; i < numOfConcats; i++) {
concatSegments[i] = segment;
concatReplacements[i] = replacement;
}
BooleanFormula formula =
bmgr.and(
smgr.equal(original, smgr.concat(concatSegments)),
smgr.equal(replaced, smgr.replaceAll(original, segment, replacement)),
bmgr.not(smgr.equal(segment, replacement)),
imgr.greaterThan(smgr.length(segment), imgr.makeNumber(0)),
imgr.greaterThan(smgr.length(replacement), imgr.makeNumber(0)));
assertThatFormula(formula).implies(smgr.equal(replaced, smgr.concat(concatReplacements)));
}
}
@Test
public void testStringVariableReplaceAllSubstring() throws SolverException, InterruptedException {
assume()
.withMessage("Solver %s does not support replaceAll()", solverToUse())
.that(solverToUse())
.isNotEqualTo(Solvers.Z3);
// I couldn't find stronger constraints in the implication that don't run endlessly.....
StringFormula original = smgr.makeVariable("original");
StringFormula prefix = smgr.makeVariable("prefix");
StringFormula replacement = smgr.makeVariable("replacement");
StringFormula replaced = smgr.makeVariable("replaced");
// Set a prefix that does not contain the suffix substring, make sure that the substring that
// comes after the prefix is replaced
assertThatFormula(
bmgr.and(
smgr.prefix(prefix, original),
imgr.equal(
smgr.length(prefix),
smgr.indexOf(
original,
smgr.substring(original, smgr.length(prefix), smgr.length(original)),
imgr.makeNumber(0))),
imgr.greaterThan(smgr.length(original), smgr.length(prefix)),
imgr.greaterThan(smgr.length(prefix), imgr.makeNumber(0)),
imgr.greaterThan(
smgr.length(
smgr.substring(original, smgr.length(prefix), smgr.length(original))),
imgr.makeNumber(0)),
smgr.equal(
replaced,
smgr.replaceAll(
original,
smgr.substring(original, smgr.length(prefix), smgr.length(original)),
replacement))))
.implies(
smgr.equal(
replacement, smgr.substring(replaced, smgr.length(prefix), smgr.length(replaced))));
}
@Test
public void testStringConcatWUnicode() throws SolverException, InterruptedException {
StringFormula backslash = smgr.makeString("\\");
StringFormula u = smgr.makeString("u");
StringFormula curlyOpen = smgr.makeString("\\u{7B}");
StringFormula sevenB = smgr.makeString("7B");
StringFormula curlyClose = smgr.makeString("\\u{7D}");
StringFormula concat = smgr.concat(backslash, u, curlyOpen, sevenB, curlyClose);
StringFormula complete = smgr.makeString("\\u{7B}");
// Concatting parts of unicode does not result in the unicode char!
assertDistinct(concat, complete);
}
@Test
public void testStringSimpleRegex() {
// TODO
}
}
| |
package com.huetoyou.chatexchange.ui.misc;
import android.app.Activity;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.DividerItemDecoration;
import android.support.v7.widget.LinearLayoutCompat;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.SimpleItemAnimator;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.View;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.OvershootInterpolator;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.github.clans.fab.FloatingActionButton;
import com.github.clans.fab.FloatingActionMenu;
import com.github.clans.fab.Util;
import com.h6ah4i.android.widget.advrecyclerview.swipeable.RecyclerViewSwipeManager;
import com.huetoyou.chatexchange.R;
import com.huetoyou.chatexchange.ui.activity.ChatroomsExplorationActivity;
import com.huetoyou.chatexchange.ui.activity.main.MainActivity;
import com.huetoyou.chatexchange.ui.frags.UserTileFragment;
import com.jeremyfeinstein.slidingmenu.lib.SlidingMenu;
import com.wooplr.spotlight.SpotlightConfig;
import com.wooplr.spotlight.SpotlightView;
import com.wooplr.spotlight.prefs.PreferencesManager;
import com.wooplr.spotlight.utils.SpotlightListener;
import com.wooplr.spotlight.utils.SpotlightSequence;
import java.util.ArrayList;
import java.util.List;
public class TutorialStuff
{
private static SharedPreferences mSharedPreferences;
private static SpotlightConfig mCategoryConfig;
private static final String CHAT_ITEM = "ChatItem";
private static final String CHAT_ITEM_SLIDE = "ChatItemSlide";
private static final String CHAT_ITEM_FAM = "ChatItemFam";
private static final String CHAT_ITEM_ADD = "ChatItemAdd";
private static final String CHAT_ITEM_HOME = "ChatItemHome";
private static final String CHAT_ITEM_REMOVE_ALL = "ChatItemRemAll";
private static final String CHAT_FRAG_MENU_BTN = "ChatFragMenuBtn";
private static final String MAIN_DRAWER = "MainDrawer";
private static final String MAIN_MENU = "MainMenu";
private static final String CHAT_FRAG_FAM = "ChatFragFam";
private static final String CHAT_FRAG_USERS_FAB = "ChatFragUsersFab";
private static final String CHAT_FRAG_INFO_FAB = "ChatFragInfoFab";
private static final String CHAT_FRAG_STARS_FAB = "ChatFragStarsFab";
private static final String CHAT_FRAG_OPENINBROWSER_FAB = "ChatFragOpeninbrowserFab";
private static final String CHAT_FRAG_MESSG_ENTRY_BOX = "ChatFragMessgEntryBox";
private static final String CHAT_FRAG_SEND_MESSG_BTN = "ChatFragSendMessgBtn";
private static final String USERS_SLIDE_INTRO = "UsersSlideIntro";
private static final String USERS_SLIDE_INTRO_MORE = "UsersSlideIntroMore";
private static final String USER_ONE = "User1";
private static final String USER_MOD = "UserMod";
private static final String USER_OWNER = "UserOwner";
private static final String USER_NAME_KEY = "userName";
private static final String USER_AVATAR_URL_KEY = "userAvatarUrl";
private static final String USER_URL_KEY = "chatUrl";
private static final String USER_ID_KEY = "id";
private static final String USER_LAST_POST_KEY = "lastPost";
private static final String USER_REP_KEY = "rep";
private static final String USER_IS_MOD_KEY = "isMod";
private static final String USER_IS_OWNER_KEY = "isOwner";
private static final String SE_ROOMS_TAB = "SErooms";
private static final String SO_ROOMS_TAB = "SOrooms";
private static SpotlightConfig mItemConfig;
/*
* Main Activity
*/
public static void showChatSliderTutorial_MainActivity(final Activity activity)
{
if (mSharedPreferences == null)
{
mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(activity);
}
if (mCategoryConfig == null)
{
setCategoryConfig(activity);
}
if (mItemConfig == null)
{
setItemConfig(activity);
}
final FloatingActionMenu chatFam = activity.findViewById(R.id.chat_slide_menu);
final FloatingActionButton home = activity.findViewById(R.id.home_fab);
final FloatingActionButton add = activity.findViewById(R.id.add_chat_fab);
final FloatingActionButton removeAll = activity.findViewById(R.id.remove_all_chats_fab);
final CustomRecyclerView dummyChats = activity.findViewById(R.id.dummy_chat_list);
final Drawable ico = activity.getResources().getDrawable(R.mipmap.ic_launcher);
final RecyclerViewSwipeManager swipeManager = new RecyclerViewSwipeManager();
final RecyclerAdapter recyclerAdapter = new RecyclerAdapter(activity, null, swipeManager);
recyclerAdapter.addItem(new ChatroomRecyclerObject(
0, "Example 1", "U", ico, 0, 0, 0
));
recyclerAdapter.addItem(new ChatroomRecyclerObject(
1, "Example 2", "U", ico, 0, 0, 1
));
recyclerAdapter.addItem(new ChatroomRecyclerObject(
2, "Example 3", "U", ico, 0, 0, 2
));
RecyclerView.Adapter adapter = swipeManager.createWrappedAdapter(recyclerAdapter);
dummyChats.setAdapter(adapter);
// disable change animations
((SimpleItemAnimator) dummyChats.getItemAnimator()).setSupportsChangeAnimations(false);
swipeManager.attachRecyclerView(dummyChats);
DividerItemDecoration dividerItemDecoration = new DividerItemDecoration(dummyChats.getContext(),
DividerItemDecoration.VERTICAL);
dummyChats.addItemDecoration(dividerItemDecoration);
final OnSwipeListener onSwipeListener = new OnSwipeListener()
{
@Override
public void onSwipeRight(RecyclerView.ViewHolder viewHolder)
{
swipeManager.performFakeSwipe(viewHolder, RecyclerViewSwipeManager.RESULT_SWIPED_RIGHT);
}
@Override
public void onSwipeLeft(RecyclerView.ViewHolder viewHolder)
{
swipeManager.performFakeSwipe(viewHolder, RecyclerViewSwipeManager.RESULT_SWIPED_LEFT);
}
};
PreferencesManager manager = new PreferencesManager(activity);
if (!manager.isDisplayed(CHAT_ITEM))
{
activity.findViewById(R.id.chatroomsListView).setVisibility(View.GONE);
dummyChats.setVisibility(View.VISIBLE);
MainActivity.touchesBlocked = true;
}
SpotlightView chats = new SpotlightView.Builder(activity)
.target(dummyChats)
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.chatrooms_slidingMenu_chats_tutorial_text_title))
.subHeadingTvText(activity.getResources().getString(R.string.chatrooms_slidingMenu_chats_tutorial_text))
.usageId(CHAT_ITEM)
.targetPadding(Util.dpToPx(activity, 50))
.show();
final SpotlightView.Builder chatsSwipe = new SpotlightView.Builder(activity)
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.chatrooms_slidingMenu_chats_tutorial_text_swipe_delete))
.subHeadingTvText(activity.getResources().getString(R.string.chatrooms_slidingMenu_chats_tutorial_swipe_left_text))
.usageId(CHAT_ITEM_SLIDE);
final SpotlightView.Builder chatFAM = new SpotlightView.Builder(activity)
.setConfiguration(mCategoryConfig)
.target(chatFam.getMenuButton())
.headingTvText(activity.getResources().getString(R.string.tutorial_menu))
.subHeadingTvText(activity.getResources().getString(R.string.chatrooms_slidingMenu_FAM_tutorial_text))
.usageId(CHAT_ITEM_FAM);
final SpotlightView.Builder chatHome = new SpotlightView.Builder(activity)
.setConfiguration(mItemConfig)
.target(home)
.headingTvText(activity.getResources().getString(R.string.tutorial_home))
.subHeadingTvText(activity.getResources().getString(R.string.chatrooms_slidingMenu_homeFAB_tutorial_text))
.usageId(CHAT_ITEM_HOME);
final SpotlightView.Builder chatAdd = new SpotlightView.Builder(activity)
.setConfiguration(mItemConfig)
.target(add)
.headingTvText(activity.getResources().getString(R.string.tutorial_add))
.subHeadingTvText(activity.getResources().getString(R.string.chatrooms_slidingMenu_addChatFAB_tutorial_text))
.usageId(CHAT_ITEM_ADD);
final SpotlightView.Builder chatRemAll = new SpotlightView.Builder(activity)
.setConfiguration(mItemConfig)
.target(removeAll)
.headingTvText(activity.getResources().getString(R.string.tutorial_remove_all))
.subHeadingTvText(activity.getResources().getString(R.string.chatrooms_slidingMenu_removeALlChatsFAB_tutorial_text))
.usageId(CHAT_ITEM_REMOVE_ALL);
SpotlightListener listener = new SpotlightListener()
{
@Override
public void onUserClicked(String s)
{
switch (s)
{
case CHAT_ITEM:
chatsSwipe.target(recyclerAdapter.getViewHolderAt(0).getCloseChatButton()).show();
onSwipeListener.onSwipeRight(recyclerAdapter.getViewHolderAt(0));
MainActivity.touchesBlocked = true;
break;
case CHAT_ITEM_SLIDE:
onSwipeListener.onSwipeLeft(recyclerAdapter.getViewHolderAt(0));
MainActivity.touchesBlocked = true;
chatFAM.show();
break;
case CHAT_ITEM_FAM:
chatFam.open(true);
chatHome.show();
MainActivity.touchesBlocked = true;
break;
case CHAT_ITEM_HOME:
chatAdd.show();
MainActivity.touchesBlocked = true;
break;
case CHAT_ITEM_ADD:
chatRemAll.show();
MainActivity.touchesBlocked = true;
break;
case CHAT_ITEM_REMOVE_ALL:
chatFam.close(true);
activity.findViewById(R.id.chatroomsListView).setVisibility(View.VISIBLE);
dummyChats.setVisibility(View.GONE);
MainActivity.touchesBlocked = false;
break;
}
}
@Override
public void onFinishedDrawingSpotlight()
{
MainActivity.touchesBlocked = false;
}
@Override
public void onStartedDrawingSpotlight()
{
MainActivity.touchesBlocked = false;
}
};
chats.setListener(listener);
chatsSwipe.setListener(listener);
chatFAM.setListener(listener);
chatHome.setListener(listener);
chatAdd.setListener(listener);
chatRemAll.setListener(listener);
}
public static void chatsExplorationTutorial(final Activity activity, final LinearLayoutCompat hueLayout)
{
PreferencesManager manager = new PreferencesManager(activity);
if (!manager.isDisplayed(SE_ROOMS_TAB))
{
ChatroomsExplorationActivity.touchesBlocked = true;
}
if (mCategoryConfig == null)
{
setCategoryConfig(activity);
}
ArrayList<View> seTxtView = new ArrayList<>();
final ArrayList<View> soTxtView = new ArrayList<>();
hueLayout.getChildAt(0).findViewsWithText(seTxtView, "SE", View.FIND_VIEWS_WITH_TEXT);
hueLayout.getChildAt(1).findViewsWithText(soTxtView, "SO", View.FIND_VIEWS_WITH_TEXT);
SpotlightView SErooms = new SpotlightView.Builder(activity)
.target(seTxtView.get(0))
.usageId(SE_ROOMS_TAB)
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.CEA_SErooms_tab_tutorial_heading))
.subHeadingTvText(activity.getResources().getString(R.string.CEA_SErooms_tab_tutorial_text))
.show();
final SpotlightView.Builder SOrooms = new SpotlightView.Builder(activity)
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.CEA_SOrooms_tab_tutorial_heading))
.subHeadingTvText(activity.getResources().getString(R.string.CEA_SOrooms_tab_tutorial_text))
.usageId(SO_ROOMS_TAB);
SpotlightListener listener = new SpotlightListener()
{
@Override
public void onUserClicked(String s)
{
switch (s)
{
case SE_ROOMS_TAB:
ChatroomsExplorationActivity.touchesBlocked = true;
SOrooms.target(soTxtView.get(0)).show();
break;
case SO_ROOMS_TAB:
ChatroomsExplorationActivity.touchesBlocked = false;
break;
}
}
@Override
public void onFinishedDrawingSpotlight()
{
ChatroomsExplorationActivity.touchesBlocked = false;
}
@Override
public void onStartedDrawingSpotlight()
{
ChatroomsExplorationActivity.touchesBlocked = false;
}
};
SErooms.setListener(listener);
SOrooms.setListener(listener);
}
/*
* Home fragment
*/
public static void homeFragTutorial(final MainActivity activity)
{
if (mSharedPreferences == null)
{
mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(activity);
}
if (mCategoryConfig == null)
{
setCategoryConfig(activity);
}
// SpotlightSequence.getInstance(activity, mCategoryConfig)
// .addSpotlight(Utils.getActionBar(activity.getWindow().getDecorView()).getChildAt(1),
// activity.getResources().getString(R.string.tutorial_drawer),
// activity.getResources().getString(R.string.homeFrag_hamburger_tutorial_text),
// MAIN_DRAWER)
// .addSpotlight(Utils.getActionBar(activity.getWindow().getDecorView()).getChildAt(2),
// activity.getResources().getString(R.string.tutorial_menu),
// activity.getResources().getString(R.string.homeFrag_options_menu_tutorial_text),
// MAIN_MENU)
// .startSequence();
PreferencesManager manager = new PreferencesManager(activity);
if (!manager.isDisplayed(MAIN_DRAWER))
{
MainActivity.touchesBlocked = true;
}
SpotlightView drawer = new SpotlightView.Builder(activity)
.target(Utils.getActionBar(activity.getWindow().getDecorView()).getChildAt(1))
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.tutorial_drawer))
.subHeadingTvText(activity.getResources().getString(R.string.homeFrag_hamburger_tutorial_text))
.usageId(MAIN_DRAWER)
.show();
final SpotlightView.Builder menu = new SpotlightView.Builder(activity)
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.tutorial_menu))
.subHeadingTvText(activity.getResources().getString(R.string.homeFrag_options_menu_tutorial_text))
.usageId(MAIN_MENU);
SpotlightListener listener = new SpotlightListener()
{
@Override
public void onUserClicked(String s)
{
switch (s)
{
case MAIN_DRAWER:
menu.target(Utils.getActionBar(activity.getWindow().getDecorView()).getChildAt(2)).show();
MainActivity.touchesBlocked = true;
break;
case MAIN_MENU:
MainActivity.touchesBlocked = false;
break;
}
}
@Override
public void onFinishedDrawingSpotlight()
{
MainActivity.touchesBlocked = false;
}
@Override
public void onStartedDrawingSpotlight()
{
MainActivity.touchesBlocked = false;
}
};
drawer.setListener(listener);
menu.setListener(listener);
}
/*
* Chat fragment
*/
public static void chatFragTutorial(Activity activity, View view, int mAppBarColor)
{
SlidingMenu chatroomsMenu = ((MainActivity) activity).getmChatroomSlidingMenu();
if (chatroomsMenu.isMenuShowing())
{
chatroomsMenu.hideMenu(false);
}
if (mSharedPreferences == null)
{
mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(activity);
}
if (mCategoryConfig == null)
{
setCategoryConfig(activity);
}
if (mItemConfig == null)
{
setItemConfig(activity);
}
PreferencesManager manager = new PreferencesManager(activity);
if (!manager.isDisplayed(CHAT_FRAG_MENU_BTN))
{
MainActivity.touchesBlocked = true;
}
final FloatingActionMenu fam = view.findViewById(R.id.chat_menu);
final FloatingActionButton users = view.findViewById(R.id.show_users_fab);
final FloatingActionButton info = view.findViewById(R.id.room_info_fab);
final FloatingActionButton stars = view.findViewById(R.id.star_fab);
final FloatingActionButton openInBrowser = view.findViewById(R.id.open_in_browser_fab);
final EditText messageEntryBox = view.findViewById(R.id.messageToSend);
final ImageButton sendMsg = view.findViewById(R.id.sendMessageBtn);
final SpotlightView menuBtn = new SpotlightView.Builder(activity)
.target(Utils.getActionBar(activity.getWindow().getDecorView()).getChildAt(1))
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.tutorial_menu))
.subHeadingTvText(activity.getResources().getString(R.string.chatFrag_hamburger_tutorial_text))
.usageId(CHAT_FRAG_MENU_BTN)
.show();
final SpotlightView.Builder chatFragFam = new SpotlightView.Builder(activity)
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.tutorial_menu))
.target(fam.getMenuButton())
.subHeadingTvText(activity.getResources().getString(R.string.chatFrag_FAM_tutorial_text))
.usageId(CHAT_FRAG_FAM);
final SpotlightView.Builder chatFragUsersFAB = new SpotlightView.Builder(activity)
.setConfiguration(mItemConfig)
.headingTvText(activity.getResources().getString(R.string.chatFrag_usersSlidingPanel_tutorial_text_title_main))
.target(users)
.subHeadingTvText(activity.getResources().getString(R.string.chatFrag_showUsersFAB_tutorial_text))
.usageId(CHAT_FRAG_USERS_FAB);
final SpotlightView.Builder chatFragInfoFAB = new SpotlightView.Builder(activity)
.setConfiguration(mItemConfig)
.headingTvText(activity.getResources().getString(R.string.tutorial_info))
.target(info)
.subHeadingTvText(activity.getResources().getString(R.string.chatFrag_roomInfoFAB_tutorial_text))
.usageId(CHAT_FRAG_INFO_FAB);
final SpotlightView.Builder chatFragStarsFAB = new SpotlightView.Builder(activity)
.setConfiguration(mItemConfig)
.headingTvText(activity.getResources().getString(R.string.tutorial_stars))
.target(stars)
.subHeadingTvText(activity.getResources().getString(R.string.chatFrag_starredMessagesFAB_tutorial_text))
.usageId(CHAT_FRAG_STARS_FAB);
final SpotlightView.Builder chatFragOpenInBrowserFAB = new SpotlightView.Builder(activity)
.setConfiguration(mItemConfig)
.headingTvText(activity.getResources().getString(R.string.tutorial_open_browser))
.target(openInBrowser)
.subHeadingTvText(activity.getResources().getString(R.string.chatFrag_openInBrowserFAB_tutorial_text))
.usageId(CHAT_FRAG_OPENINBROWSER_FAB);
final SpotlightView.Builder chatFragMessageEntryBox = new SpotlightView.Builder(activity)
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.tutorial_msg_box))
.target(messageEntryBox)
.subHeadingTvText(activity.getResources().getString(R.string.chatFrag_messageEntryBox_tutorial_text))
.usageId(CHAT_FRAG_MESSG_ENTRY_BOX);
final SpotlightView.Builder chatFragSendMessageButton = new SpotlightView.Builder(activity)
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.tutorial_send_btn))
.target(sendMsg)
.subHeadingTvText(activity.getResources().getString(R.string.chatFrag_sendMsgBtn_tutorial_text))
.usageId(CHAT_FRAG_SEND_MESSG_BTN);
SpotlightListener huehuelistener = new SpotlightListener()
{
@Override
public void onUserClicked(String s)
{
switch (s)
{
case CHAT_FRAG_MENU_BTN:
chatFragFam.show();
MainActivity.touchesBlocked = true;
break;
case CHAT_FRAG_FAM:
fam.open(true);
chatFragUsersFAB.show();
MainActivity.touchesBlocked = true;
break;
case CHAT_FRAG_USERS_FAB:
chatFragInfoFAB.show();
MainActivity.touchesBlocked = true;
break;
case CHAT_FRAG_INFO_FAB:
chatFragStarsFAB.show();
MainActivity.touchesBlocked = true;
break;
case CHAT_FRAG_STARS_FAB:
chatFragOpenInBrowserFAB.show();
MainActivity.touchesBlocked = true;
break;
case CHAT_FRAG_OPENINBROWSER_FAB:
fam.close(true);
chatFragMessageEntryBox.show();
MainActivity.touchesBlocked = true;
break;
case CHAT_FRAG_MESSG_ENTRY_BOX:
chatFragSendMessageButton.show();
MainActivity.touchesBlocked = false;
break;
}
}
@Override
public void onFinishedDrawingSpotlight()
{
MainActivity.touchesBlocked = false;
}
@Override
public void onStartedDrawingSpotlight()
{
MainActivity.touchesBlocked = false;
}
};
menuBtn.setListener(huehuelistener);
chatFragFam.setListener(huehuelistener);
chatFragUsersFAB.setListener(huehuelistener);
chatFragInfoFAB.setListener(huehuelistener);
chatFragStarsFAB.setListener(huehuelistener);
chatFragOpenInBrowserFAB.setListener(huehuelistener);
chatFragMessageEntryBox.setListener(huehuelistener);
chatFragSendMessageButton.setListener(huehuelistener);
}
public static void showUsersTutorial(final Activity activity)
{
if (mSharedPreferences == null)
{
mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(activity);
}
if (mCategoryConfig == null)
{
setCategoryConfig(activity);
}
if (mItemConfig == null)
{
setItemConfig(activity);
}
Bundle args = new Bundle();
args.putString(USER_NAME_KEY, "Edwinksl");
args.putString(USER_AVATAR_URL_KEY, "https://images.duckduckgo.com/iu/?u=http%3A%2F%2Fimages.clipshrine.com%2Fdownload%2Fdownloadpnglarge%2FBlack-Question-Mark-2269-large.png&f=1");
args.putString(USER_URL_KEY, "https://example.stackexchange.com");
args.putBoolean("IsExampleTile", true);
args.putInt("ExampleNum", 0);
args.putInt(USER_ID_KEY, 12345);
args.putInt(USER_LAST_POST_KEY, 0);
args.putInt(USER_REP_KEY, 123);
args.putBoolean(USER_IS_MOD_KEY, false);
args.putBoolean(USER_IS_OWNER_KEY, false);
final UserTileFragment userTileFragment = new UserTileFragment();
userTileFragment.setArguments(args);
args = new Bundle();
args.putString(USER_NAME_KEY, "Thomas Ward");
args.putString(USER_AVATAR_URL_KEY, "https://images.duckduckgo.com/iu/?u=http%3A%2F%2Fimages.clipshrine.com%2Fdownload%2Fdownloadpnglarge%2FBlack-Question-Mark-2269-large.png&f=1");
args.putString(USER_URL_KEY, "https://example.stackexchange.com");
args.putBoolean("IsExampleTile", true);
args.putInt("ExampleNum", 1);
args.putInt(USER_ID_KEY, 12346);
args.putInt(USER_LAST_POST_KEY, 0);
args.putInt(USER_REP_KEY, 123);
args.putBoolean(USER_IS_MOD_KEY, true);
args.putBoolean(USER_IS_OWNER_KEY, false);
final UserTileFragment userTileFragment1 = new UserTileFragment();
userTileFragment1.setArguments(args);
args = new Bundle();
args.putString(USER_NAME_KEY, "Rinzwind");
args.putString(USER_AVATAR_URL_KEY, "https://images.duckduckgo.com/iu/?u=http%3A%2F%2Fimages.clipshrine.com%2Fdownload%2Fdownloadpnglarge%2FBlack-Question-Mark-2269-large.png&f=1");
args.putString(USER_URL_KEY, "https://example.stackexchange.com");
args.putBoolean("IsExampleTile", true);
args.putInt("ExampleNum", 2);
args.putInt(USER_ID_KEY, 12347);
args.putInt(USER_LAST_POST_KEY, 0);
args.putInt(USER_REP_KEY, 123);
args.putBoolean(USER_IS_MOD_KEY, false);
args.putBoolean(USER_IS_OWNER_KEY, true);
final UserTileFragment userTileFragment2 = new UserTileFragment();
userTileFragment2.setArguments(args);
PreferencesManager manager = new PreferencesManager(activity);
LinearLayout users = activity.findViewById(R.id.users_scroll_slide);
if (!manager.isDisplayed(USERS_SLIDE_INTRO))
{
List<android.support.v4.app.Fragment> fragments = ((AppCompatActivity) activity).getSupportFragmentManager().getFragments();
for (int i = 0; i < fragments.size(); i++)
{
((AppCompatActivity) activity).getSupportFragmentManager().beginTransaction().hide(fragments.get(i)).commit();
}
((AppCompatActivity) activity).getSupportFragmentManager().beginTransaction().add(R.id.users_scroll_slide, userTileFragment, "user_" + 12345).commit();
((AppCompatActivity) activity).getSupportFragmentManager().beginTransaction().add(R.id.users_scroll_slide, userTileFragment1, "user_" + 12346).commit();
((AppCompatActivity) activity).getSupportFragmentManager().beginTransaction().add(R.id.users_scroll_slide, userTileFragment2, "user_" + 12347).commit();
MainActivity.touchesBlocked = true;
}
SpotlightView usersOverview = new SpotlightView.Builder(activity)
.target(users)
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.chatFrag_usersSlidingPanel_tutorial_text_title_main))
.subHeadingTvText(activity.getResources().getString(R.string.chatFrag_usersSlidingPanel_tutorial_text))
.usageId(USERS_SLIDE_INTRO)
.targetPadding(Util.dpToPx(activity, 50))
.show();
final SpotlightView.Builder overviewMore = new SpotlightView.Builder(activity)
.target(users)
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.chatFrag_usersSlidingPanel_tutorial_text_title_main))
.subHeadingTvText(activity.getResources().getString(R.string.chatFrag_usersSlidingPanel_tutorial_text_more))
.usageId(USERS_SLIDE_INTRO_MORE)
.targetPadding(Util.dpToPx(activity, 50));
final SpotlightView.Builder user1 = new SpotlightView.Builder(activity)
.target(users.getChildAt(0))
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.chatFrag_usersSlidingPanel_tutorial_text_title_user_normal))
.subHeadingTvText(activity.getResources().getString(R.string.chatFrag_normalUser_tutorial_text))
.usageId(USER_ONE);
final SpotlightView.Builder userMod = new SpotlightView.Builder(activity)
.target(users.getChildAt(1))
.setConfiguration(mCategoryConfig)
.headingTvText(activity.getResources().getString(R.string.chatFrag_usersSlidingPanel_tutorial_text_title_user_mod))
.subHeadingTvText(activity.getResources().getString(R.string.chatFrag_modUser_tutorial_text))
.usageId(USER_MOD);
final SpotlightView.Builder userOwner = new SpotlightView.Builder(activity)
.setConfiguration(mCategoryConfig)
.target(users.getChildAt(2))
.headingTvText(activity.getResources().getString(R.string.chatFrag_usersSlidingPanel_tutorial_text_title_user_owner))
.subHeadingTvText(activity.getResources().getString(R.string.chatFrag_ROuser_tutorial_text))
.usageId(USER_OWNER);
SpotlightListener listener = new SpotlightListener()
{
@Override
public void onUserClicked(String s)
{
Log.e("Which", s);
switch (s)
{
case USERS_SLIDE_INTRO:
overviewMore.show();
MainActivity.touchesBlocked = true;
break;
case USERS_SLIDE_INTRO_MORE:
user1.show();
MainActivity.touchesBlocked = true;
break;
case USER_ONE:
userMod.show();
MainActivity.touchesBlocked = true;
break;
case USER_MOD:
userOwner.show();
MainActivity.touchesBlocked = true;
break;
case USER_OWNER:
List<android.support.v4.app.Fragment> fragments = ((AppCompatActivity) activity).getSupportFragmentManager().getFragments();
for (int i = 0; i < fragments.size(); i++)
{
((AppCompatActivity) activity).getSupportFragmentManager().beginTransaction().show(fragments.get(i)).commit();
}
((AppCompatActivity) activity).getSupportFragmentManager().beginTransaction().remove(userTileFragment).remove(userTileFragment1).remove(userTileFragment2).commit();
MainActivity.touchesBlocked = false;
break;
}
}
@Override
public void onFinishedDrawingSpotlight()
{
MainActivity.touchesBlocked = false;
}
@Override
public void onStartedDrawingSpotlight()
{
MainActivity.touchesBlocked = false;
}
};
usersOverview.setListener(listener);
overviewMore.setListener(listener);
user1.setListener(listener);
userMod.setListener(listener);
userOwner.setListener(listener);
}
public interface OnSwipeListener
{
void onSwipeRight(RecyclerView.ViewHolder viewHolder);
void onSwipeLeft(RecyclerView.ViewHolder viewHolder);
}
private static void setCategoryConfig(Activity activity)
{
mCategoryConfig = new SpotlightConfig();
mCategoryConfig.setIntroAnimationDuration((long) Utils.getAnimDuration(300, activity));
mCategoryConfig.setRevealAnimationEnabled(true);
mCategoryConfig.setPerformClick(false);
mCategoryConfig.setFadingTextDuration((long) Utils.getAnimDuration(200, activity));
mCategoryConfig.setHeadingTvColor(Color.WHITE);
mCategoryConfig.setHeadingTvText("Drawer");
mCategoryConfig.setSubHeadingTvColor(Color.WHITE);
mCategoryConfig.setHeadingTvText(activity.getResources().getString(R.string.homeFrag_hamburger_tutorial_text));
mCategoryConfig.setMaskColor(Color.parseColor("#aa000000"));
mCategoryConfig.setLineAnimationDuration((long) Utils.getAnimDuration(300, activity));
mCategoryConfig.setLineAndArcColor(Color.LTGRAY);
mCategoryConfig.setDismissOnTouch(true);
mCategoryConfig.setDismissOnBackpress(true);
mCategoryConfig.setShowTargetArc(true);
DisplayMetrics metrics = activity.getResources().getDisplayMetrics();
float density = metrics.density;
float dpWidth = metrics.widthPixels / density;
if (dpWidth > 600)
{
mCategoryConfig.setHeadingTvSize(48);
mCategoryConfig.setSubHeadingTvSize(32);
}
else
{
mCategoryConfig.setHeadingTvSize(24);
mCategoryConfig.setSubHeadingTvSize(18);
}
}
private static void setItemConfig(Activity activity)
{
if (mCategoryConfig == null)
{
setCategoryConfig(activity);
}
mItemConfig = mCategoryConfig;
/*mItemConfig.setIntroAnimationDuration(100L);
mItemConfig.setFadingTextDuration(100L);
mItemConfig.setLineAnimationDuration(100L);*/
}
public static void resetSpotlights(Activity activity)
{
PreferencesManager manager = new PreferencesManager(activity);
manager.resetAll();
}
}
| |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.nd4j.aeron.ipc;
import io.aeron.Aeron;
import io.aeron.FragmentAssembler;
import io.aeron.Subscription;
import lombok.Builder;
import lombok.Data;
import org.agrona.CloseHelper;
import org.agrona.concurrent.SigInt;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
/**
*
* Subscriber for ndarrays.
* This is a pass through class for aeron
* that will pass ndarrays received from channels
* to an {@link NDArrayCallback} for operation after
* assembling the ndaray from a raw {@link org.agrona.concurrent.UnsafeBuffer}
*
* @author Adam Gibson
*/
@Data
@Builder
public class AeronNDArraySubscriber implements AutoCloseable {
// The channel (an endpoint identifier) to receive messages from
private String channel;
// A unique identifier for a stream within a channel. Stream ID 0 is reserved
// for internal use and should not be used by applications.
private int streamId = -1;
// Maximum number of message fragments to receive during a single 'poll' operation
private int fragmentLimitCount;
// Create a context, needed for client connection to media driver
// A separate media driver process need to run prior to running this application
private Aeron.Context ctx;
private AtomicBoolean running = new AtomicBoolean(true);
private final AtomicBoolean init = new AtomicBoolean(false);
private static Logger log = LoggerFactory.getLogger(AeronNDArraySubscriber.class);
private NDArrayCallback ndArrayCallback;
private Aeron aeron;
private Subscription subscription;
private AtomicBoolean launched = new AtomicBoolean(false);
private Executor executors;
private void init() {
ctx = ctx == null ? new Aeron.Context() : ctx;
channel = channel == null ? "aeron:udp?endpoint=localhost:40123" : channel;
fragmentLimitCount = fragmentLimitCount == 0 ? 1000 : fragmentLimitCount;
streamId = streamId == 0 ? 10 : streamId;
running = running == null ? new AtomicBoolean(true) : running;
if (ndArrayCallback == null)
throw new IllegalStateException("NDArray callback must be specified in the builder.");
init.set(true);
log.info("Channel subscriber " + channel + " and stream id " + streamId);
launched = new AtomicBoolean(false);
}
/**
* Returns true if the subscriber
* is launched or not
* @return true if the subscriber is launched, false otherwise
*/
public boolean launched() {
if (launched == null)
launched = new AtomicBoolean(false);
return launched.get();
}
/**
* Launch a background thread
* that subscribes to the aeron context
* @throws Exception
*/
public void launch() throws Exception {
if (init.get())
return;
// Register a SIGINT handler for graceful shutdown.
if (!init.get())
init();
log.info("Subscribing to " + channel + " on stream Id " + streamId);
log.info("Using aeron directory " + ctx.aeronDirectoryName());
// Register a SIGINT handler for graceful shutdown.
SigInt.register(() -> running.set(false));
// Create an Aeron instance with client-provided context configuration, connect to the
// media driver, and add a subscription for the given channel and stream using the supplied
// dataHandler method, which will be called with new messages as they are received.
// The Aeron and Subscription classes implement AutoCloseable, and will automatically
// clean up resources when this try block is finished.
//Note here that we are either creating 1 or 2 subscriptions.
//The first one is a normal 1 subscription listener.
//The second one is when we want to send responses
if (channel == null)
throw new IllegalStateException("No channel for subscriber defined");
if (streamId <= 0)
throw new IllegalStateException("No stream for subscriber defined");
if (aeron == null)
throw new IllegalStateException("No aeron instance defined");
boolean started = false;
while (!started) {
try (final Subscription subscription = aeron.addSubscription(channel, streamId)) {
this.subscription = subscription;
log.info("Beginning subscribe on channel " + channel + " and stream " + streamId);
AeronUtil.subscriberLoop(new FragmentAssembler(new NDArrayFragmentHandler(ndArrayCallback)),
fragmentLimitCount, running, launched).accept(subscription);
started = true;
} catch (Exception e) {
log.warn("Unable to connect...trying again on channel " + channel, e);
}
}
}
/**
* Returns the connection uri in the form of:
* host:port:streamId
* @return
*/
public String connectionUrl() {
String[] split = channel.replace("aeron:udp?endpoint=", "").split(":");
String host = split[0];
int port = Integer.parseInt(split[1]);
return AeronConnectionInformation.of(host, port, streamId).toString();
}
/**
* Start a subscriber in another thread
* based on the given parameters
* @param aeron the aeron instance to use
* @param host the host opName to bind to
* @param port the port to bind to
* @param callback the call back to use for the subscriber
* @param streamId the stream id to subscribe to
* @return the subscriber reference
*/
public static AeronNDArraySubscriber startSubscriber(Aeron aeron, String host, int port, NDArrayCallback callback,
int streamId, AtomicBoolean running) {
AeronNDArraySubscriber subscriber = AeronNDArraySubscriber.builder().streamId(streamId).aeron(aeron)
.channel(AeronUtil.aeronChannel(host, port)).running(running).ndArrayCallback(callback).build();
Thread t = new Thread(() -> {
try {
subscriber.launch();
} catch (Exception e) {
log.error("",e);
}
});
t.start();
return subscriber;
}
/**
* Start a subscriber in another thread
* based on the given parameters
* @param context the context to use
* @param host the host opName to bind to
* @param port the port to bind to
* @param callback the call back to use for the subscriber
* @param streamId the stream id to subscribe to
* @return the subscriber reference
*/
public static AeronNDArraySubscriber startSubscriber(Aeron.Context context, String host, int port,
NDArrayCallback callback, int streamId, AtomicBoolean running) {
AeronNDArraySubscriber subscriber = AeronNDArraySubscriber.builder().streamId(streamId).ctx(context)
.channel(AeronUtil.aeronChannel(host, port)).running(running).ndArrayCallback(callback).build();
Thread t = new Thread(() -> {
try {
subscriber.launch();
} catch (Exception e) {
log.error("",e);
}
});
t.start();
return subscriber;
}
/**
* Closes this resource, relinquishing any underlying resources.
* This method is invoked automatically on objects managed by the
* {@code try}-with-resources statement.
* <p>
* <p>While this interface method is declared to throw {@code
* Exception}, implementers are <em>strongly</em> encouraged to
* declare concrete implementations of the {@code close} method to
* throw more specific exceptions, or to throw no exception at all
* if the close operation cannot fail.
* <p>
* <p> Cases where the close operation may fail require careful
* attention by implementers. It is strongly advised to relinquish
* the underlying resources and to internally <em>mark</em> the
* resource as closed, prior to throwing the exception. The {@code
* close} method is unlikely to be invoked more than once and so
* this ensures that the resources are released in a timely manner.
* Furthermore it reduces problems that could arise when the resource
* wraps, or is wrapped, by another resource.
* <p>
* <p><em>Implementers of this interface are also strongly advised
* to not have the {@code close} method throw {@link
* InterruptedException}.</em>
* <p>
* This exception interacts with a thread's interrupted status,
* and runtime misbehavior is likely to occur if an {@code
* InterruptedException} is {@linkplain Throwable#addSuppressed
* suppressed}.
* <p>
* More generally, if it would cause problems for an
* exception to be suppressed, the {@code AutoCloseable.close}
* method should not throw it.
* <p>
* <p>Note that unlike the {@link Closeable#close close}
* method of {@link Closeable}, this {@code close} method
* is <em>not</em> required to be idempotent. In other words,
* calling this {@code close} method more than once may have some
* visible side effect, unlike {@code Closeable.close} which is
* required to have no effect if called more than once.
* <p>
* However, implementers of this interface are strongly encouraged
* to make their {@code close} methods idempotent.
*
* @throws Exception if this resource cannot be closed
*/
@Override
public void close() throws Exception {
CloseHelper.quietClose(subscription);
}
}
| |
/*
* This file is part of FastClasspathScanner.
*
* Author: Luke Hutchison <luke .dot. hutch .at. gmail .dot. com>
*
* Hosted at: https://github.com/lukehutch/fast-classpath-scanner
*
* --
*
* The MIT License (MIT)
*
* Copyright (c) 2015 Luke Hutchison
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
* LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
* EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
* AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
* OR OTHER DEALINGS IN THE SOFTWARE.
*/
package io.github.lukehutch.fastclasspathscanner.classgraph;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map.Entry;
public class ClassGraphBuilder {
/** A map from fully-qualified class name to the corresponding ClassNode object. */
private final HashMap<String, ClassNode> classNameToClassNode = new HashMap<>();
/** A map from fully-qualified class name to the corresponding InterfaceNode object. */
private final HashMap<String, InterfaceNode> interfaceNameToInterfaceNode = new HashMap<>();
/** Reverse mapping from annotation to classes that have the annotation. */
private final HashMap<String, ArrayList<String>> annotationNameToClassName = new HashMap<>();
/** Reverse mapping from interface to classes that implement the interface */
private final HashMap<String, ArrayList<String>> interfaceNameToClassNames = new HashMap<>();
// -----------------------------------------------------------------------------------------------------------------
public ArrayList<String> getClassesWithAnnotation(String annotationName) {
return annotationNameToClassName.get(annotationName);
}
public ArrayList<String> getClassesImplementing(String interfaceName) {
return interfaceNameToClassNames.get(interfaceName);
}
public ArrayList<String> getSubclassesOf(String className) {
ArrayList<String> subclasses = new ArrayList<>();
ClassNode classNode = classNameToClassNode.get(className);
if (classNode != null) {
for (DAGNode subNode : classNode.allSubNodes) {
subclasses.add(subNode.name);
}
}
return subclasses;
}
public ArrayList<String> getSuperclassesOf(String className) {
ArrayList<String> superclasses = new ArrayList<>();
ClassNode classNode = classNameToClassNode.get(className);
if (classNode != null) {
for (DAGNode subNode : classNode.allSuperNodes) {
superclasses.add(subNode.name);
}
}
return superclasses;
}
public ArrayList<String> getSubinterfacesOf(String interfaceName) {
ArrayList<String> subinterfaces = new ArrayList<>();
InterfaceNode interfaceNode = interfaceNameToInterfaceNode.get(interfaceName);
if (interfaceNode != null) {
for (DAGNode subNode : interfaceNode.allSubNodes) {
subinterfaces.add(subNode.name);
}
}
return subinterfaces;
}
public ArrayList<String> getSuperinterfacesOf(String interfaceName) {
ArrayList<String> superinterfaces = new ArrayList<>();
InterfaceNode interfaceNode = interfaceNameToInterfaceNode.get(interfaceName);
if (interfaceNode != null) {
for (DAGNode superNode : interfaceNode.allSuperNodes) {
superinterfaces.add(superNode.name);
}
}
return superinterfaces;
}
// -----------------------------------------------------------------------------------------------------------------
/** Link a class to its superclass and to the interfaces it implements, and save the class annotations. */
public void linkToSuperclassAndInterfaces(String className, String superclassName, ArrayList<String> interfaces,
HashSet<String> annotations) {
// Save the info recovered from the classfile for a class
// Look up ClassNode object for this class
ClassNode thisClassNode = classNameToClassNode.get(className);
if (thisClassNode == null) {
// This class has not been encountered before on the classpath
classNameToClassNode.put(className, thisClassNode = new ClassNode(className, interfaces, annotations));
} else {
// This is the first time this class has been encountered on the classpath, but
// it was previously cited as a superclass of another class
thisClassNode.encounter(interfaces, annotations);
}
// Look up ClassNode object for superclass, and connect it to this class
ClassNode superclassNode = classNameToClassNode.get(superclassName);
if (superclassNode == null) {
// The superclass of this class has not yet been encountered on the classpath
classNameToClassNode.put(superclassName, superclassNode = new ClassNode(superclassName, thisClassNode));
} else {
superclassNode.addSubNode(thisClassNode);
}
}
/** Save the mapping from an interface to its superinterfaces. */
public void linkToSuperinterfaces(String interfaceName, ArrayList<String> superInterfaces) {
// Look up InterfaceNode for this interface
InterfaceNode thisInterfaceInfo = interfaceNameToInterfaceNode.get(interfaceName);
if (thisInterfaceInfo == null) {
// This interface has not been encountered before on the classpath
interfaceNameToInterfaceNode.put(interfaceName, thisInterfaceInfo = new InterfaceNode(interfaceName));
} else {
// This is the first time this interface has been encountered on the classpath, but
// it was previously cited as a superinterface of another interface
thisInterfaceInfo.encounter();
}
if (superInterfaces != null) {
for (String superInterfaceName : superInterfaces) {
// Look up InterfaceNode objects for superinterfaces, and connect them to this interface
InterfaceNode superInterfaceNode = interfaceNameToInterfaceNode.get(superInterfaceName);
if (superInterfaceNode == null) {
// The superinterface of this interface has not yet been encountered on the classpath
interfaceNameToInterfaceNode.put(superInterfaceName, superInterfaceNode =
new InterfaceNode(superInterfaceName, thisInterfaceInfo));
} else {
superInterfaceNode.addSubNode(thisInterfaceInfo);
}
}
}
}
// -----------------------------------------------------------------------------------------------------------------
/**
* Find all superclasses and subclasses for each class and superinterfaces and subinterfaces of each interface.
* Called once all classes have been read.
*/
public void finalizeNodes() {
if (classNameToClassNode.isEmpty() && interfaceNameToInterfaceNode.isEmpty()) {
// If no classes or interfaces were matched, there is no hierarchy to build
return;
}
// Perform topological sort on class tree
ArrayList<DAGNode> classNodeTopoOrder = DAGNode.topoSort(classNameToClassNode.values());
// Accumulate all superclasses of each class by traversing from highest to lowest class
for (int i = 0, n = classNodeTopoOrder.size(); i < n; i++) {
DAGNode classNode = classNodeTopoOrder.get(i);
HashSet<DAGNode> allSuperNodes = new HashSet<>(classNode.allSuperNodes);
for (DAGNode superclassNode : classNode.allSuperNodes) {
allSuperNodes.addAll(superclassNode.allSuperNodes);
}
classNode.allSuperNodes = allSuperNodes;
}
// Accumulate all subclasses of each class by traversing from lowest to highest class
for (int i = classNodeTopoOrder.size() - 1; i >= 0; --i) {
DAGNode classNode = classNodeTopoOrder.get(i);
HashSet<DAGNode> allSubNodes = new HashSet<>(classNode.allSubNodes);
for (DAGNode subclassNode : classNode.allSubNodes) {
allSubNodes.addAll(subclassNode.allSubNodes);
}
classNode.allSubNodes = allSubNodes;
}
// Perform topological sort on interface DAG
ArrayList<DAGNode> interfaceNodeTopoOrder = DAGNode.topoSort(interfaceNameToInterfaceNode.values());
// Accumulate all superinterfaces of each interface by traversing from highest to lowest interface
for (int i = 0, n = interfaceNodeTopoOrder.size(); i < n; i++) {
DAGNode interfaceNode = interfaceNodeTopoOrder.get(i);
HashSet<DAGNode> allSuperNodes = new HashSet<>(interfaceNode.allSuperNodes);
for (DAGNode superinterfaceNode : interfaceNode.allSuperNodes) {
allSuperNodes.addAll(superinterfaceNode.allSuperNodes);
}
interfaceNode.allSuperNodes = allSuperNodes;
}
// Accumulate all subinterfaces of each interface by traversing from lowest to highest interface
for (int i = interfaceNodeTopoOrder.size() - 1; i >= 0; --i) {
DAGNode interfaceNode = interfaceNodeTopoOrder.get(i);
HashSet<DAGNode> allSubNodes = new HashSet<>(interfaceNode.allSubNodes);
for (DAGNode subinterfaceNode : interfaceNode.allSubNodes) {
allSubNodes.addAll(subinterfaceNode.allSubNodes);
}
interfaceNode.allSubNodes = allSubNodes;
}
// Reverse mapping from annotation to classes that have the annotation.
HashMap<String, HashSet<DAGNode>> annotationToClassNodes = new HashMap<>();
// Reverse mapping from interface to classes that implement the interface.
HashMap<String, HashSet<DAGNode>> interfaceToClassNodes = new HashMap<>();
// Create reverse mapping from annotation to the names of classes that have the annotation,
// and from interface names to the names of classes that implement the interface.
for (DAGNode classDAGNode : classNodeTopoOrder) {
ClassNode classNode = (ClassNode) classDAGNode;
if (classNode.annotationNames != null) {
// Map from annotation back to classes that have the annotation
for (String annotation : classNode.annotationNames) {
HashSet<DAGNode> classList = annotationToClassNodes.get(annotation);
if (classList == null) {
annotationToClassNodes.put(annotation, classList = new HashSet<>());
}
classList.add(classDAGNode);
}
}
if (classNode.interfaceNames != null) {
// Map from interface back to classes that implement the interface
HashSet<String> interfacesAndSuperinterfaces = new HashSet<>();
for (String interfaceName : classNode.interfaceNames) {
// Any class that implements an interface also implements all its superinterfaces
interfacesAndSuperinterfaces.add(interfaceName);
InterfaceNode interfaceNode = interfaceNameToInterfaceNode.get(interfaceName);
if (interfaceNode != null) {
for (DAGNode superinterfaceNode : interfaceNode.allSuperNodes) {
interfacesAndSuperinterfaces.add(superinterfaceNode.name);
}
}
}
for (String interfaceName : interfacesAndSuperinterfaces) {
// Add mapping from interface back to implementing class
HashSet<DAGNode> classList = interfaceToClassNodes.get(interfaceName);
if (classList == null) {
interfaceToClassNodes.put(interfaceName, classList = new HashSet<>());
}
classList.add(classDAGNode);
}
}
}
// Classes that subclass another class that implements an interface also implement the same interface.
// Add these to the mapping from interface back to the classes that implement the interface.
for (DAGNode interfaceNode : interfaceNodeTopoOrder) {
// Get all classes that implement this interface
HashSet<DAGNode> implementingClasses = interfaceToClassNodes.get(interfaceNode.name);
if (implementingClasses != null) {
// Get the union of all subclasses of all classes that implement this interface
HashSet<DAGNode> allSubClasses = new HashSet<DAGNode>(implementingClasses);
for (DAGNode implementingClass : implementingClasses) {
allSubClasses.addAll(implementingClass.allSubNodes);
}
// Add to the mapping from the interface to each subclass of a class that implements the interface
HashSet<DAGNode> classList = interfaceToClassNodes.get(interfaceNode.name);
if (classList == null) {
interfaceToClassNodes.put(interfaceNode.name, classList = new HashSet<>());
}
classList.addAll(allSubClasses);
}
}
// Convert annotation and interface mappings to String->String
for (Entry<String, HashSet<DAGNode>> ent : annotationToClassNodes.entrySet()) {
ArrayList<String> classNameList = new ArrayList<>();
annotationNameToClassName.put(ent.getKey(), classNameList);
HashSet<DAGNode> classNodes = ent.getValue();
if (classNodes != null) {
for (DAGNode classNode : classNodes) {
classNameList.add(classNode.name);
}
}
}
for (Entry<String, HashSet<DAGNode>> ent : interfaceToClassNodes.entrySet()) {
ArrayList<String> classNameList = new ArrayList<>();
interfaceNameToClassNames.put(ent.getKey(), classNameList);
HashSet<DAGNode> classNodes = ent.getValue();
if (classNodes != null) {
for (DAGNode classNode : classNodes) {
classNameList.add(classNode.name);
}
}
}
}
// -----------------------------------------------------------------------------------------------------------------
public void reset() {
classNameToClassNode.clear();
interfaceNameToInterfaceNode.clear();
annotationNameToClassName.clear();
interfaceNameToClassNames.clear();
}
}
| |
/**
* Copyright (c) 2013-2019 Contributors to the Eclipse Foundation
*
* <p> See the NOTICE file distributed with this work for additional information regarding copyright
* ownership. All rights reserved. This program and the accompanying materials are made available
* under the terms of the Apache License, Version 2.0 which accompanies this distribution and is
* available at http://www.apache.org/licenses/LICENSE-2.0.txt
*/
package org.locationtech.geowave.datastore.redis.util;
import java.io.Serializable;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.lang3.tuple.Pair;
import org.locationtech.geowave.core.index.ByteArray;
import org.locationtech.geowave.core.index.ByteArrayUtils;
import org.locationtech.geowave.core.store.adapter.InternalDataAdapter;
import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;
import org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;
import org.locationtech.geowave.core.store.entities.GeoWaveMetadata;
import org.locationtech.geowave.core.store.entities.GeoWaveRow;
import org.locationtech.geowave.core.store.operations.MetadataType;
import org.locationtech.geowave.core.store.operations.RangeReaderParams;
import org.locationtech.geowave.datastore.redis.config.RedisOptions.Compression;
import org.redisson.api.RScoredSortedSet;
import org.redisson.api.RedissonClient;
import org.redisson.client.protocol.ScoredEntry;
import org.redisson.codec.FstCodec;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.MultimapBuilder;
import com.google.common.collect.Streams;
import com.google.common.primitives.Bytes;
import com.google.common.primitives.UnsignedBytes;
public class RedisUtils {
protected static final int MAX_ROWS_FOR_PAGINATION = 1000000;
public static int REDIS_DEFAULT_MAX_RANGE_DECOMPOSITION = 250;
public static int REDIS_DEFAULT_AGGREGATION_MAX_RANGE_DECOMPOSITION = 250;
private static FstCodec DEFAULT_CODEC = new FstCodec();
public static RScoredSortedSet<GeoWaveMetadata> getMetadataSet(
final RedissonClient client,
final Compression compression,
final String namespace,
final MetadataType metadataType,
final boolean visibilityEnabled) {
// stats also store a timestamp because stats can be the exact same but
// need to still be unique (consider multiple count statistics that are
// exactly the same count, but need to be merged)
return client.getScoredSortedSet(
namespace + "_" + metadataType.toString(),
compression.getCodec(
MetadataType.STATS.equals(metadataType)
? visibilityEnabled ? GeoWaveMetadataWithTimestampCodec.SINGLETON_WITH_VISIBILITY
: GeoWaveMetadataWithTimestampCodec.SINGLETON_WITHOUT_VISIBILITY
: visibilityEnabled ? GeoWaveMetadataCodec.SINGLETON_WITH_VISIBILITY
: GeoWaveMetadataCodec.SINGLETON_WITHOUT_VISIBILITY));
}
public static String getRowSetPrefix(
final String namespace,
final String typeName,
final String indexName) {
return namespace + "_" + typeName + "_" + indexName;
}
public static RedisScoredSetWrapper<GeoWaveRedisPersistedRow> getRowSet(
final RedissonClient client,
final Compression compression,
final String setNamePrefix,
final byte[] partitionKey,
final boolean requiresTimestamp,
final boolean visibilityEnabled) {
return getRowSet(
client,
compression,
getRowSetName(setNamePrefix, partitionKey),
requiresTimestamp,
visibilityEnabled);
}
public static String getRowSetName(
final String namespace,
final String typeName,
final String indexName,
final byte[] partitionKey) {
return getRowSetName(getRowSetPrefix(namespace, typeName, indexName), partitionKey);
}
public static String getRowSetName(final String setNamePrefix, final byte[] partitionKey) {
String partitionStr;
if ((partitionKey != null) && (partitionKey.length > 0)) {
partitionStr = "_" + ByteArrayUtils.byteArrayToString(partitionKey);
} else {
partitionStr = "";
}
return setNamePrefix + partitionStr;
}
public static RedisMapWrapper getDataIndexMap(
final RedissonClient client,
final Compression compression,
final String namespace,
final String typeName,
final boolean visibilityEnabled) {
return new RedisMapWrapper(
client,
getRowSetPrefix(namespace, typeName, DataIndexUtils.DATA_ID_INDEX.getName()),
compression.getCodec(DEFAULT_CODEC),
visibilityEnabled);
}
public static RedisScoredSetWrapper<GeoWaveRedisPersistedRow> getRowSet(
final RedissonClient client,
final Compression compression,
final String setName,
final boolean requiresTimestamp,
final boolean visibilityEnabled) {
return new RedisScoredSetWrapper<>(
client,
setName,
compression.getCodec(
requiresTimestamp
? visibilityEnabled ? GeoWaveRedisRowWithTimestampCodec.SINGLETON_WITH_VISIBILITY
: GeoWaveRedisRowWithTimestampCodec.SINGLETON_WITH_VISIBILITY
: visibilityEnabled ? GeoWaveRedisRowCodec.SINGLETON_WITH_VISIBILITY
: GeoWaveRedisRowCodec.SINGLETON_WITHOUT_VISIBILITY));
}
public static RedisScoredSetWrapper<GeoWaveRedisPersistedRow> getRowSet(
final RedissonClient client,
final Compression compression,
final String namespace,
final String typeName,
final String indexName,
final byte[] partitionKey,
final boolean requiresTimestamp,
final boolean visibilityEnabled) {
return getRowSet(
client,
compression,
getRowSetPrefix(namespace, typeName, indexName),
partitionKey,
requiresTimestamp,
visibilityEnabled);
}
public static double getScore(final byte[] byteArray) {
return bytesToLong(byteArray);
}
public static byte[] getSortKey(final double score) {
return longToBytes((long) score);
}
private static byte[] longToBytes(long val) {
final int radix = 1 << 8;
final int mask = radix - 1;
// we want to eliminate trailing 0's (ie. truncate the byte array by
// trailing 0's)
int trailingZeros = 0;
while ((((int) val) & mask) == 0) {
val >>>= 8;
trailingZeros++;
if (trailingZeros == 8) {
return new byte[0];
}
}
final byte[] array = new byte[8 - trailingZeros];
int pos = array.length;
do {
array[--pos] = (byte) (((int) val) & mask);
val >>>= 8;
} while ((val != 0) && (pos > 0));
return array;
}
private static long bytesToLong(final byte[] bytes) {
long value = 0;
for (int i = 0; i < 8; i++) {
value = (value << 8);
if (i < bytes.length) {
value += (bytes[i] & 0xff);
}
}
return value;
}
public static Set<ByteArray> getPartitions(
final RedissonClient client,
final String setNamePrefix) {
return Streams.stream(client.getKeys().getKeysByPattern(setNamePrefix + "*")).map(
str -> str.length() > (setNamePrefix.length() + 1)
? new ByteArray(
ByteArrayUtils.byteArrayFromString(str.substring(setNamePrefix.length() + 1)))
: new ByteArray()).collect(Collectors.toSet());
}
public static Iterator<GeoWaveMetadata> groupByIds(final Iterable<GeoWaveMetadata> result) {
final ListMultimap<ByteArray, GeoWaveMetadata> multimap =
MultimapBuilder.hashKeys().arrayListValues().build();
result.forEach(
r -> multimap.put(new ByteArray(Bytes.concat(r.getPrimaryId(), r.getSecondaryId())), r));
return multimap.values().iterator();
}
public static Iterator<ScoredEntry<GeoWaveRedisPersistedRow>> groupByRow(
final Iterator<ScoredEntry<GeoWaveRedisPersistedRow>> result,
final boolean sortByTime) {
final ListMultimap<Pair<Double, ByteArray>, ScoredEntry<GeoWaveRedisPersistedRow>> multimap =
MultimapBuilder.hashKeys().arrayListValues().build();
result.forEachRemaining(
r -> multimap.put(Pair.of(r.getScore(), new ByteArray(r.getValue().getDataId())), r));
if (sortByTime) {
multimap.asMap().forEach(
(k, v) -> Collections.sort(
(List<ScoredEntry<GeoWaveRedisPersistedRow>>) v,
TIMESTAMP_COMPARATOR));
}
return multimap.values().iterator();
}
public static boolean isSortByTime(final InternalDataAdapter<?> adapter) {
return adapter.getAdapter() instanceof RowMergingDataAdapter;
}
public static boolean isSortByKeyRequired(final RangeReaderParams<?> params) {
// subsampling needs to be sorted by sort key to work properly
return (params.getMaxResolutionSubsamplingPerDimension() != null)
&& (params.getMaxResolutionSubsamplingPerDimension().length > 0);
}
public static Iterator<GeoWaveRow> sortBySortKey(final Iterator<GeoWaveRow> it) {
return Streams.stream(it).sorted(SortKeyOrder.SINGLETON).iterator();
}
public static Pair<Boolean, Boolean> isGroupByRowAndIsSortByTime(
final RangeReaderParams<?> readerParams,
final short adapterId) {
final boolean sortByTime = isSortByTime(readerParams.getAdapterStore().getAdapter(adapterId));
return Pair.of(readerParams.isMixedVisibility() || sortByTime, sortByTime);
}
private static final ReverseTimestampComparator TIMESTAMP_COMPARATOR =
new ReverseTimestampComparator();
private static class ReverseTimestampComparator implements
Comparator<ScoredEntry<GeoWaveRedisPersistedRow>>,
Serializable {
private static final long serialVersionUID = 2894647323275155231L;
@Override
public int compare(
final ScoredEntry<GeoWaveRedisPersistedRow> o1,
final ScoredEntry<GeoWaveRedisPersistedRow> o2) {
final GeoWaveRedisPersistedTimestampRow row1 =
(GeoWaveRedisPersistedTimestampRow) o1.getValue();
final GeoWaveRedisPersistedTimestampRow row2 =
(GeoWaveRedisPersistedTimestampRow) o2.getValue();
// we are purposely reversing the order because we want it to be
// sorted from most recent to least recent
final int compare = Long.compare(row2.getSecondsSinceEpic(), row1.getSecondsSinceEpic());
if (compare != 0) {
return compare;
}
return Integer.compare(row2.getNanoOfSecond(), row1.getNanoOfSecond());
}
}
private static class SortKeyOrder implements Comparator<GeoWaveRow>, Serializable {
private static SortKeyOrder SINGLETON = new SortKeyOrder();
private static final long serialVersionUID = 23275155231L;
@Override
public int compare(final GeoWaveRow o1, final GeoWaveRow o2) {
if (o1 == o2) {
return 0;
}
if (o1 == null) {
return 1;
}
if (o2 == null) {
return -1;
}
byte[] otherComp = o2.getSortKey() == null ? new byte[0] : o2.getSortKey();
byte[] thisComp = o1.getSortKey() == null ? new byte[0] : o1.getSortKey();
int comp = UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);
if (comp != 0) {
return comp;
}
otherComp = o2.getPartitionKey() == null ? new byte[0] : o2.getPartitionKey();
thisComp = o1.getPartitionKey() == null ? new byte[0] : o1.getPartitionKey();
comp = UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);
if (comp != 0) {
return comp;
}
comp = Short.compare(o1.getAdapterId(), o2.getAdapterId());
if (comp != 0) {
return comp;
}
otherComp = o2.getDataId() == null ? new byte[0] : o2.getDataId();
thisComp = o1.getDataId() == null ? new byte[0] : o1.getDataId();
comp = UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);
if (comp != 0) {
return comp;
}
return Integer.compare(o1.getNumberOfDuplicates(), o2.getNumberOfDuplicates());
}
}
}
| |
package org.marketcetera.photon.strategy.engine;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertThat;
import static org.marketcetera.photon.strategy.engine.model.core.test.StrategyEngineCoreTestUtil.assertDeployedStrategy;
import static org.marketcetera.photon.strategy.engine.model.core.test.StrategyEngineCoreTestUtil.createStrategy;
import java.io.File;
import java.text.MessageFormat;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.marketcetera.module.ModuleURN;
import org.marketcetera.photon.commons.ValidateTest.ExpectedNullArgumentFailure;
import org.marketcetera.photon.strategy.engine.model.core.DeployedStrategy;
import org.marketcetera.photon.strategy.engine.model.core.Strategy;
import org.marketcetera.photon.strategy.engine.model.core.StrategyState;
import org.marketcetera.photon.test.ExpectedFailure;
import org.marketcetera.photon.test.PhotonTestBase;
import org.marketcetera.util.except.I18NException;
import org.marketcetera.util.file.CopyCharsUtils;
import com.google.common.collect.ImmutableMap;
/* $License$ */
/**
* Test {@link AbstractStrategyEngineConnection}.
*
* @author <a href="mailto:will@marketcetera.com">Will Horn</a>
* @version $Id: AbstractStrategyEngineConnectionTestBase.java 16154 2012-07-14 16:34:05Z colin $
* @since 2.0.0
*/
public abstract class AbstractStrategyEngineConnectionTestBase extends
PhotonTestBase {
abstract protected AbstractStrategyEngineConnection createFixture();
private File mTempScript;
@Before
public void before() throws Exception {
mTempScript = File.createTempFile("my_strategy", "rb");
CopyCharsUtils
.copy(
"include_class \"org.marketcetera.strategy.ruby.Strategy\"\nclass MyStrategy < Strategy\nend"
.toCharArray(), mTempScript.getAbsolutePath());
}
@After
public void after() throws Exception {
mTempScript.delete();
}
protected File getTempScript() {
return mTempScript;
}
protected Strategy createStrategyToDeploy() {
Strategy strategy = createStrategy("strat1");
strategy.setScriptPath(getTempScript().getPath());
strategy.setClassName("MyStrategy");
strategy.setLanguage("RUBY");
return strategy;
}
@Test
public void testDeployNull() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
new ExpectedNullArgumentFailure("strategy") {
@Override
protected void run() throws Exception {
fixture.deploy(null);
}
};
}
@Test
public void testDeployNoScript() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
final Strategy strategy = createStrategyToDeploy();
strategy.setScriptPath(null);
new ExpectedFailure<I18NException>(
"The strategy script path was not specified.") {
@Override
protected void run() throws Exception {
fixture.deploy(strategy);
}
};
}
@Test
public void testDeployBlankScript() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
final Strategy strategy = createStrategyToDeploy();
strategy.setScriptPath(" ");
new ExpectedFailure<I18NException>(
"The strategy script path was not specified.") {
@Override
protected void run() throws Exception {
fixture.deploy(strategy);
}
};
}
@Test
public void testDeployNoClassName() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
final Strategy strategy = createStrategyToDeploy();
strategy.setClassName(null);
new ExpectedFailure<I18NException>(
"The strategy class name was not specified.") {
@Override
protected void run() throws Exception {
fixture.deploy(strategy);
}
};
}
@Test
public void testDeployBlankClassName() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
final Strategy strategy = createStrategyToDeploy();
strategy.setClassName("");
new ExpectedFailure<I18NException>(
"The strategy class name was not specified.") {
@Override
protected void run() throws Exception {
fixture.deploy(strategy);
}
};
}
@Test
public void testDeployNoLanguage() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
final Strategy strategy = createStrategyToDeploy();
strategy.setLanguage(null);
new ExpectedFailure<I18NException>(
"The strategy language was not specified.") {
@Override
protected void run() throws Exception {
fixture.deploy(strategy);
}
};
}
@Test
public void testDeployBlankLanguage() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
final Strategy strategy = createStrategyToDeploy();
strategy.setLanguage(" ");
new ExpectedFailure<I18NException>(
"The strategy language was not specified.") {
@Override
protected void run() throws Exception {
fixture.deploy(strategy);
}
};
}
@Test
public void testDeployNoInstanceName() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
final Strategy strategy = createStrategyToDeploy();
strategy.setInstanceName(null);
new ExpectedFailure<I18NException>(
"The strategy instance name was not specified.") {
@Override
protected void run() throws Exception {
fixture.deploy(strategy);
}
};
}
@Test
public void testDeployBlankInstanceName() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
final Strategy strategy = createStrategyToDeploy();
strategy.setInstanceName("");
new ExpectedFailure<I18NException>(
"The strategy instance name was not specified.") {
@Override
protected void run() throws Exception {
fixture.deploy(strategy);
}
};
}
@Test
public void testDeploy() throws Exception {
AbstractStrategyEngineConnection fixture = createFixture();
Strategy strategy = createStrategyToDeploy();
DeployedStrategy deployed = fixture.deploy(strategy);
assertThat(fixture.getEngine().getDeployedStrategies(),
hasItem(deployed));
assertDeployedStrategy(deployed, fixture.getEngine(),
StrategyState.STOPPED, "strat1", "MyStrategy", "RUBY",
mTempScript.getPath(), false, null);
}
@Test
public void testDeployWithParameters() throws Exception {
AbstractStrategyEngineConnection fixture = createFixture();
Strategy strategy = createStrategy("strat2");
strategy.setScriptPath(getTempScript().getPath());
strategy.setClassName("MyStrategy2");
strategy.setLanguage("RUBY");
strategy.setRouteOrdersToServer(true);
strategy.getParameters().put("abc", "xyz");
strategy.getParameters().put("123", "abc");
DeployedStrategy deployed = fixture.deploy(strategy);
assertDeployedStrategy(deployed, fixture.getEngine(),
StrategyState.STOPPED, "strat2", "MyStrategy2", "RUBY",
mTempScript.getPath(), true, ImmutableMap.of("abc", "xyz",
"123", "abc"));
}
@Test
public void testDeployWithScriptURL() throws Exception {
AbstractStrategyEngineConnection fixture = createFixture();
setLevel(fixture.getClass().getName(), Level.DEBUG);
String url = mTempScript.toURI().toString();
String actualPath = mTempScript.getAbsolutePath();
Strategy strategy = createStrategy("strat3");
strategy.setScriptPath(url);
strategy.setClassName("Clazz123");
strategy.setLanguage("RUBY");
DeployedStrategy deployed = fixture.deploy(strategy);
assertDeployedStrategy(deployed, fixture.getEngine(),
StrategyState.STOPPED, "strat3", "Clazz123", "RUBY", url,
false, null);
// there should also be a debug message
assertLastEvent(
Level.DEBUG,
fixture.getClass().getName(),
MessageFormat
.format(
"Resolved strategy scriptPath ''{0}'' as a URL to file ''{1}''.",
url, actualPath),
AbstractStrategyEngineConnection.class.getName());
}
@Test
public void testUndeploy() throws Exception {
AbstractStrategyEngineConnection fixture = createFixture();
Strategy strategy = createStrategyToDeploy();
DeployedStrategy deployed = fixture.deploy(strategy);
fixture.undeploy(deployed);
assertThat(fixture.getEngine().getDeployedStrategies(),
not(hasItem(deployed)));
}
@Test
public void testUndeployNull() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
new ExpectedNullArgumentFailure("deployedStrategy") {
@Override
protected void run() throws Exception {
fixture.undeploy(null);
}
};
}
@Test
public void testStart() throws Exception {
AbstractStrategyEngineConnection fixture = createFixture();
Strategy strategy = createStrategyToDeploy();
DeployedStrategy deployed = fixture.deploy(strategy);
fixture.start(deployed);
assertThat(deployed.getState(), is(StrategyState.RUNNING));
}
@Test
public void testStartNull() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
new ExpectedNullArgumentFailure("deployedStrategy") {
@Override
protected void run() throws Exception {
fixture.start(null);
}
};
}
@Test
public void testStop() throws Exception {
AbstractStrategyEngineConnection fixture = createFixture();
Strategy strategy = createStrategyToDeploy();
DeployedStrategy deployed = fixture.deploy(strategy);
fixture.start(deployed);
fixture.stop(deployed);
assertThat(deployed.getState(), is(StrategyState.STOPPED));
}
@Test
public void testStopNull() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
new ExpectedNullArgumentFailure("deployedStrategy") {
@Override
protected void run() throws Exception {
fixture.stop(null);
}
};
}
@Test
public void testRestart() throws Exception {
AbstractStrategyEngineConnection fixture = createFixture();
Strategy strategy = createStrategyToDeploy();
DeployedStrategy deployed = fixture.deploy(strategy);
fixture.start(deployed);
fixture.stop(deployed);
fixture.start(deployed);
assertThat(deployed.getState(), is(StrategyState.RUNNING));
}
@Test
public void testUndeployRunningStrategy() throws Exception {
AbstractStrategyEngineConnection fixture = createFixture();
Strategy strategy = createStrategyToDeploy();
DeployedStrategy deployed = fixture.deploy(strategy);
fixture.start(deployed);
fixture.undeploy(deployed);
assertThat(fixture.getEngine().getDeployedStrategies(),
not(hasItem(deployed)));
}
@Test
public void testUpdate() throws Exception {
AbstractStrategyEngineConnection fixture = createFixture();
Strategy strategy = createStrategyToDeploy();
DeployedStrategy deployed = fixture.deploy(strategy);
Strategy newConfiguration = createStrategy(null);
newConfiguration.setRouteOrdersToServer(true);
newConfiguration.getParameters().put("xyz", "123");
fixture.update(deployed, newConfiguration);
assertDeployedStrategy(deployed, fixture.getEngine(),
StrategyState.STOPPED, "strat1", "MyStrategy", "RUBY",
mTempScript.getPath(), true, ImmutableMap.of("xyz", "123"));
}
@Test
public void testUpdateNullStrategy() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
final Strategy newConfiguration = createStrategy(null);
new ExpectedNullArgumentFailure("deployedStrategy") {
@Override
protected void run() throws Exception {
fixture.update(null, newConfiguration);
}
};
}
@Test
public void testUpdateNullConfiguration() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
Strategy strategy = createStrategyToDeploy();
final DeployedStrategy deployed = fixture.deploy(strategy);
new ExpectedNullArgumentFailure("newConfiguration") {
@Override
protected void run() throws Exception {
fixture.update(deployed, null);
}
};
}
@Test
public void testRefreshDeployedStrategy() throws Exception {
AbstractStrategyEngineConnection fixture = createFixture();
Strategy strategy = createStrategyToDeploy();
DeployedStrategy deployed = fixture.deploy(strategy);
Strategy newConfiguration = createStrategy(null);
newConfiguration.setRouteOrdersToServer(true);
newConfiguration.getParameters().put("xyz", "123");
externalUpdateAndStartStrategy(deployed.getUrn(), newConfiguration);
fixture.refresh(deployed);
assertDeployedStrategy(deployed, fixture.getEngine(),
StrategyState.RUNNING, "strat1", "MyStrategy", "RUBY",
mTempScript.getPath(), true, ImmutableMap.of("xyz", "123"));
}
abstract protected void externalUpdateAndStartStrategy(ModuleURN urn,
Strategy newConfiguration) throws Exception;
@Test
public void testRefreshDeployedStrategyThatNoLongerExists()
throws Exception {
AbstractStrategyEngineConnection fixture = createFixture();
Strategy strategy = createStrategyToDeploy();
DeployedStrategy deployed = fixture.deploy(strategy);
externalUndeployStrategy(deployed.getUrn());
fixture.refresh(deployed);
assertThat(fixture.getEngine().getDeployedStrategies(),
not(hasItem(deployed)));
}
abstract protected void externalUndeployStrategy(ModuleURN urn)
throws Exception;
@Test
public void testRefreshNullStrategy() throws Exception {
final AbstractStrategyEngineConnection fixture = createFixture();
new ExpectedNullArgumentFailure("deployedStrategy") {
@Override
protected void run() throws Exception {
fixture.refresh(null);
}
};
}
@Test
public void testRefresh() throws Exception {
AbstractStrategyEngineConnection fixture = createFixture();
/*
* Deploy a strategy and change it externally.
*/
Strategy strategy = createStrategyToDeploy();
DeployedStrategy deployed1 = fixture.deploy(strategy);
Strategy newConfiguration = createStrategy(null);
newConfiguration.setRouteOrdersToServer(true);
newConfiguration.getParameters().put("xyz", "123");
externalUpdateAndStartStrategy(deployed1.getUrn(), newConfiguration);
/*
* Deploy a strategy and undeploy it externally.
*/
strategy = createStrategyToDeploy();
strategy.setInstanceName("strat2");
DeployedStrategy deployed2 = fixture.deploy(strategy);
externalUndeployStrategy(deployed2.getUrn());
/*
* Deploy a strategy externally.
*/
strategy = createStrategyToDeploy();
strategy.setInstanceName("strat3");
externalDeployStrategy(strategy);
/*
* Refresh and validate.
*/
fixture.refresh();
assertThat(fixture.getEngine().getDeployedStrategies().size(), is(2));
for (DeployedStrategy deployed : fixture.getEngine()
.getDeployedStrategies()) {
if (deployed == deployed1) {
assertDeployedStrategy(deployed, fixture.getEngine(),
StrategyState.RUNNING, "strat1", "MyStrategy", "RUBY",
getTempScript().getPath(), true, ImmutableMap.of("xyz",
"123"));
} else {
assertDeployedStrategy(deployed, fixture.getEngine(),
StrategyState.STOPPED, "strat3", "MyStrategy", "RUBY",
null, false, null);
}
}
}
abstract protected void externalDeployStrategy(Strategy strategy)
throws Exception;
}
| |
package android.app;
/*
* #%L
* Matos
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2010 - 2014 Orange SA
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
@com.francetelecom.rd.stubs.annotation.ClassDone(0)
public class Notification
implements android.os.Parcelable
{
// Classes
public static class Builder
{
// Constructors
public Builder(android.content.Context arg1){
}
// Methods
public Notification.Builder setProgress(int arg1, int arg2, boolean arg3){
return (Notification.Builder) null;
}
public Notification.Builder setContent(android.widget.RemoteViews arg1){
return (Notification.Builder) null;
}
public Notification.Builder setWhen(long arg1){
return (Notification.Builder) null;
}
public Notification.Builder setSmallIcon(int arg1){
return (Notification.Builder) null;
}
public Notification.Builder setSmallIcon(int arg1, int arg2){
return (Notification.Builder) null;
}
public Notification.Builder setContentTitle(java.lang.CharSequence arg1){
return (Notification.Builder) null;
}
public Notification.Builder setContentText(java.lang.CharSequence arg1){
return (Notification.Builder) null;
}
public Notification.Builder setNumber(int arg1){
return (Notification.Builder) null;
}
public Notification.Builder setContentInfo(java.lang.CharSequence arg1){
return (Notification.Builder) null;
}
public Notification.Builder setContentIntent(PendingIntent arg1){
return (Notification.Builder) null;
}
public Notification.Builder setDeleteIntent(PendingIntent arg1){
return (Notification.Builder) null;
}
public Notification.Builder setFullScreenIntent(PendingIntent arg1, boolean arg2){
return (Notification.Builder) null;
}
public Notification.Builder setTicker(java.lang.CharSequence arg1){
return (Notification.Builder) null;
}
public Notification.Builder setTicker(java.lang.CharSequence arg1, android.widget.RemoteViews arg2){
return (Notification.Builder) null;
}
public Notification.Builder setLargeIcon(android.graphics.Bitmap arg1){
return (Notification.Builder) null;
}
public Notification.Builder setSound(android.net.Uri arg1){
return (Notification.Builder) null;
}
public Notification.Builder setSound(android.net.Uri arg1, int arg2){
return (Notification.Builder) null;
}
public Notification.Builder setVibrate(long [] arg1){
return (Notification.Builder) null;
}
public Notification.Builder setLights(int arg1, int arg2, int arg3){
return (Notification.Builder) null;
}
public Notification.Builder setOngoing(boolean arg1){
return (Notification.Builder) null;
}
public Notification.Builder setOnlyAlertOnce(boolean arg1){
return (Notification.Builder) null;
}
public Notification.Builder setAutoCancel(boolean arg1){
return (Notification.Builder) null;
}
public Notification.Builder setDefaults(int arg1){
return (Notification.Builder) null;
}
public Notification getNotification(){
return (Notification) null;
}
}
// Fields
public static final int DEFAULT_ALL = -1;
public static final int DEFAULT_SOUND = 1;
public static final int DEFAULT_VIBRATE = 2;
public static final int DEFAULT_LIGHTS = 4;
public long when;
public int icon;
public int iconLevel;
public int number;
public PendingIntent contentIntent;
public PendingIntent deleteIntent;
public PendingIntent fullScreenIntent;
public java.lang.CharSequence tickerText;
public android.widget.RemoteViews tickerView;
public android.widget.RemoteViews contentView;
public android.graphics.Bitmap largeIcon;
public android.net.Uri sound;
public static final int STREAM_DEFAULT = -1;
public int audioStreamType;
public long [] vibrate;
public int ledARGB;
public int ledOnMS;
public int ledOffMS;
public int defaults;
public static final int FLAG_SHOW_LIGHTS = 1;
public static final int FLAG_ONGOING_EVENT = 2;
public static final int FLAG_INSISTENT = 4;
public static final int FLAG_ONLY_ALERT_ONCE = 8;
public static final int FLAG_AUTO_CANCEL = 16;
public static final int FLAG_NO_CLEAR = 32;
public static final int FLAG_FOREGROUND_SERVICE = 64;
public static final int FLAG_HIGH_PRIORITY = 128;
public int flags;
public static final android.os.Parcelable.Creator<Notification> CREATOR = null;
// Constructors
public Notification(){
}
public Notification(android.content.Context arg1, int arg2, java.lang.CharSequence arg3, long arg4, java.lang.CharSequence arg5, java.lang.CharSequence arg6, android.content.Intent arg7){
}
public Notification(int arg1, java.lang.CharSequence arg2, long arg3){
}
public Notification(android.os.Parcel arg1){
}
// Methods
public java.lang.String toString(){
return (java.lang.String) null;
}
public Notification clone(){
return (Notification) null;
}
public void writeToParcel(android.os.Parcel arg1, int arg2){
}
public int describeContents(){
return 0;
}
public void setLatestEventInfo(android.content.Context arg1, java.lang.CharSequence arg2, java.lang.CharSequence arg3, PendingIntent arg4){
}
}
| |
package org.spincast.plugins.request;
import java.io.InputStream;
import java.net.URL;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.spincast.core.config.SpincastConfig;
import org.spincast.core.config.SpincastConstants;
import org.spincast.core.dictionary.Dictionary;
import org.spincast.core.exchange.RequestContext;
import org.spincast.core.exchange.RequestRequestContextAddon;
import org.spincast.core.flash.FlashMessage;
import org.spincast.core.flash.FlashMessagesHolder;
import org.spincast.core.json.JsonArray;
import org.spincast.core.json.JsonManager;
import org.spincast.core.json.JsonObject;
import org.spincast.core.request.Form;
import org.spincast.core.request.FormFactory;
import org.spincast.core.routing.ETag;
import org.spincast.core.routing.ETagFactory;
import org.spincast.core.routing.HttpMethod;
import org.spincast.core.server.Server;
import org.spincast.core.server.UploadedFile;
import org.spincast.core.utils.ContentTypeDefaults;
import org.spincast.core.utils.SpincastStatics;
import org.spincast.core.utils.SpincastUtils;
import org.spincast.core.xml.XmlManager;
import org.spincast.shaded.org.apache.commons.io.IOUtils;
import org.spincast.shaded.org.apache.commons.lang3.StringUtils;
import org.spincast.shaded.org.apache.http.HttpHeaders;
import org.spincast.shaded.org.apache.http.client.utils.DateUtils;
import com.google.inject.Inject;
public class SpincastRequestRequestContextAddon<R extends RequestContext<?>>
implements RequestRequestContextAddon<R> {
protected static final Logger logger = LoggerFactory.getLogger(SpincastRequestRequestContextAddon.class);
private String fullUrlOriginalWithCacheBustersNonDecoded = null;
private String fullUrlOriginalNoCacheBustersNonDecoded = null;
private String fullUrlProxiedWithCacheBustersNonDecoded = null;
private String fullUrlProxiedNoCacheBustersNonDecoded = null;
private String fullUrlWithCacheBustersNonDecoded = null;
private String fullUrlWithCacheBustersDecoded = null;
private String fullUrlNoCacheBustersDecoded = null;
private String requestPathNoCacheBusters = null;
private String requestPathWithCacheBusters = null;
private String queryStringNonDecoded = "";
private String queryStringDecoded = "";
private List<ETag> ifMatchETags = null;
private Object ifMatchETagsLock = new Object();
private List<ETag> ifNoneMatchETags = null;
private Object ifNoneMatchETagsLock = new Object();
private Date ifModifiedSinceDate = null;
private Object ifModifiedSinceDateLock = new Object();
private Date ifUnmodifiedSinceDate = null;
private Object ifUnmodifiedSinceDateLock = new Object();
private Map<String, List<String>> queryStringParams;
private Map<String, List<String>> formDatasAsImmutableMap;
private JsonObject formDatasAsImmutableJsonObject;
private Map<String, List<UploadedFile>> uploadedFiles;
private Map<String, List<String>> headers;
private Map<String, Form> scopedForms;
private final R requestContext;
private final Server server;
private final JsonManager jsonManager;
private final XmlManager xmlManager;
private final SpincastUtils spincastUtils;
private final SpincastConfig spincastConfig;
private final ETagFactory etagFactory;
private final FlashMessagesHolder flashMessagesHolder;
private final Dictionary dictionary;
private final FormFactory formFactory;
private boolean flashMessageRetrieved = false;
private FlashMessage flashMessage;
private Pattern formDataArrayPattern;
private Map<String, String> cookies;
@Inject
public SpincastRequestRequestContextAddon(R requestContext,
Server server,
JsonManager jsonManager,
XmlManager xmlManager,
SpincastUtils spincastUtils,
SpincastConfig spincastConfig,
ETagFactory etagFactory,
FlashMessagesHolder flashMessagesHolder,
FormFactory formFactory,
Dictionary dictionary) {
this.requestContext = requestContext;
this.server = server;
this.jsonManager = jsonManager;
this.xmlManager = xmlManager;
this.spincastUtils = spincastUtils;
this.spincastConfig = spincastConfig;
this.etagFactory = etagFactory;
this.flashMessagesHolder = flashMessagesHolder;
this.formFactory = formFactory;
this.dictionary = dictionary;
}
protected R getRequestContext() {
return this.requestContext;
}
protected Server getServer() {
return this.server;
}
protected JsonManager getJsonManager() {
return this.jsonManager;
}
protected XmlManager getXmlManager() {
return this.xmlManager;
}
protected SpincastUtils getSpincastUtils() {
return this.spincastUtils;
}
protected SpincastConfig getSpincastConfig() {
return this.spincastConfig;
}
protected ETagFactory getEtagFactory() {
return this.etagFactory;
}
protected FlashMessagesHolder getFlashMessagesHolder() {
return this.flashMessagesHolder;
}
protected FormFactory getFormFactory() {
return this.formFactory;
}
protected Dictionary getDictionary() {
return this.dictionary;
}
protected Object getExchange() {
return getRequestContext().exchange();
}
@Override
public HttpMethod getHttpMethod() {
return getServer().getHttpMethod(getExchange());
}
@Override
public ContentTypeDefaults getContentTypeBestMatch() {
return getServer().getContentTypeBestMatch(getExchange());
}
@Override
public boolean isJsonShouldBeReturn() {
return ContentTypeDefaults.JSON == getContentTypeBestMatch();
}
@Override
public boolean isHTMLShouldBeReturn() {
return ContentTypeDefaults.HTML == getContentTypeBestMatch();
}
@Override
public boolean isXMLShouldBeReturn() {
return ContentTypeDefaults.XML == getContentTypeBestMatch();
}
@Override
public boolean isPlainTextShouldBeReturn() {
return ContentTypeDefaults.TEXT == getContentTypeBestMatch();
}
protected Pattern getFormDataArrayPattern() {
if (this.formDataArrayPattern == null) {
this.formDataArrayPattern = Pattern.compile(".*(\\[(0|[1-9]+[0-9]?)\\])$");
}
return this.formDataArrayPattern;
}
@Override
public String getCookieValue(String name) {
return getCookiesValues().get(name);
}
@Override
public Map<String, String> getCookiesValues() {
if (this.cookies == null) {
this.cookies = getServer().getCookies(getRequestContext().exchange());
}
return this.cookies;
}
@Override
public boolean isCookiesEnabledValidated() {
return getCookiesValues().size() > 0;
}
@Override
public Map<String, List<String>> getHeaders() {
if (this.headers == null) {
Map<String, List<String>> headersServer = getServer().getRequestHeaders(getExchange());
//==========================================
// We use a TreeMap with String.CASE_INSENSITIVE_ORDER so the
// keys are case insensitive!
//==========================================
Map<String, List<String>> headersFinal = new TreeMap<String, List<String>>(String.CASE_INSENSITIVE_ORDER);
if (headersServer == null) {
headersServer = Collections.emptyMap();
} else {
for (Entry<String, List<String>> entry : headersServer.entrySet()) {
if (entry.getValue() == null) {
headersFinal.put(entry.getKey(), Collections.<String>emptyList());
} else {
//==========================================
// We make sure everything is immutable
//==========================================
headersFinal.put(entry.getKey(), Collections.unmodifiableList(entry.getValue()));
}
}
//==========================================
// We make sure everything is immutable
//==========================================
headersFinal = Collections.unmodifiableMap(headersFinal);
}
this.headers = headersFinal;
}
return this.headers;
}
@Override
public List<String> getHeader(String name) {
if (StringUtils.isBlank(name)) {
return new LinkedList<String>();
}
// This get is case insensitive.
List<String> values = getHeaders().get(name);
if (values == null) {
values = new LinkedList<String>();
}
return values;
}
@Override
public String getHeaderFirst(String name) {
List<String> values = getHeader(name);
if (values != null && values.size() > 0) {
return values.get(0);
}
return null;
}
protected String getFullUrlOriginalNoCacheBustersNonDecoded() {
if (this.fullUrlOriginalNoCacheBustersNonDecoded == null) {
this.fullUrlOriginalNoCacheBustersNonDecoded = getServer().getFullUrlOriginal(getExchange(), false);
}
return this.fullUrlOriginalNoCacheBustersNonDecoded;
}
protected String getFullUrlOriginalWithCacheBustersNonDecoded() {
if (this.fullUrlOriginalWithCacheBustersNonDecoded == null) {
this.fullUrlOriginalWithCacheBustersNonDecoded = getServer().getFullUrlOriginal(getExchange(), true);
}
return this.fullUrlOriginalWithCacheBustersNonDecoded;
}
protected String getFullUrlProxiedNoCacheBustersNonDecoded() {
if (this.fullUrlProxiedNoCacheBustersNonDecoded == null) {
this.fullUrlProxiedNoCacheBustersNonDecoded = getServer().getFullUrlProxied(getExchange(), false);
}
return this.fullUrlProxiedNoCacheBustersNonDecoded;
}
protected String getFullUrlProxiedWithCacheBustersNonDecoded() {
if (this.fullUrlProxiedWithCacheBustersNonDecoded == null) {
this.fullUrlProxiedWithCacheBustersNonDecoded = getServer().getFullUrlProxied(getExchange(), true);
}
return this.fullUrlProxiedWithCacheBustersNonDecoded;
}
@Override
public String getFullUrlOriginal() {
return getFullUrlOriginal(false);
}
@Override
public String getFullUrlOriginal(boolean keepCacheBusters) {
if (keepCacheBusters) {
return getFullUrlOriginalWithCacheBustersNonDecoded();
} else {
return getFullUrlOriginalNoCacheBustersNonDecoded();
}
}
@Override
public String getFullUrlProxied() {
return getFullUrlProxied(false);
}
@Override
public String getFullUrlProxied(boolean keepCacheBusters) {
if (keepCacheBusters) {
return getFullUrlProxiedWithCacheBustersNonDecoded();
} else {
return getFullUrlProxiedNoCacheBustersNonDecoded();
}
}
@Override
public String getFullUrl() {
return getFullUrl(false);
}
@Override
public String getFullUrl(boolean keepCacheBusters) {
validateFullUrlInfoCache();
if (keepCacheBusters) {
return this.fullUrlWithCacheBustersDecoded;
} else {
return this.fullUrlNoCacheBustersDecoded;
}
}
protected void validateFullUrlInfoCache() {
try {
String urlToUse;
//==========================================
// If the URL has been forwarded, we use it
// as the regular "full Url".
//==========================================
String forwardUrl =
getRequestContext().variables().getAsString(SpincastConstants.RequestScopedVariables.FORWARD_ROUTE_URL);
if (forwardUrl != null) {
urlToUse = forwardUrl;
} else {
urlToUse = getFullUrlOriginalWithCacheBustersNonDecoded();
}
//==========================================
// The regular "full URL" is already up-to-date.
//==========================================
if (this.fullUrlWithCacheBustersNonDecoded != null && this.fullUrlWithCacheBustersNonDecoded.equals(urlToUse)) {
return;
}
this.fullUrlWithCacheBustersNonDecoded = urlToUse;
this.fullUrlWithCacheBustersDecoded = URLDecoder.decode(urlToUse, "UTF-8");
this.fullUrlNoCacheBustersDecoded = getSpincastUtils().removeCacheBusterCodes(this.fullUrlWithCacheBustersDecoded);
parseUrl();
} catch (Exception ex) {
throw SpincastStatics.runtimize(ex);
}
}
protected void parseUrl() {
parseRequestPath();
parseQueryString();
parseQueryStringParams();
}
protected void parseRequestPath() {
try {
URL url = new URL(this.fullUrlWithCacheBustersDecoded);
this.requestPathWithCacheBusters = url.getPath();
url = new URL(this.fullUrlNoCacheBustersDecoded);
this.requestPathNoCacheBusters = url.getPath();
} catch (Exception ex) {
throw SpincastStatics.runtimize(ex);
}
}
protected void parseQueryString() {
try {
String qs = getSpincastUtils().getQuerystringFromUrl(this.fullUrlWithCacheBustersNonDecoded);
this.queryStringNonDecoded = qs;
this.queryStringDecoded = URLDecoder.decode(qs, "UTF-8");
} catch (Exception ex) {
throw SpincastStatics.runtimize(ex);
}
}
public void parseQueryStringParams() {
Map<String, List<String>> paramsFinal =
getSpincastUtils().getParametersFromQuerystring(this.queryStringNonDecoded, false);
//==========================================
// Make sure everything is immutable.
//==========================================
Map<String, List<String>> mapWithInmutableLists = new HashMap<String, List<String>>();
for (Entry<String, List<String>> entry : paramsFinal.entrySet()) {
mapWithInmutableLists.put(entry.getKey(), Collections.unmodifiableList(entry.getValue()));
}
this.queryStringParams = paramsFinal;
}
@Override
public String getRequestPath() {
return getRequestPath(false);
}
@Override
public String getRequestPath(boolean keepCacheBusters) {
validateFullUrlInfoCache();
if (keepCacheBusters) {
return this.requestPathWithCacheBusters;
} else {
return this.requestPathNoCacheBusters;
}
}
@Override
public String getQueryString(boolean withQuestionMark) {
validateFullUrlInfoCache();
if (StringUtils.isBlank(this.queryStringDecoded)) {
return "";
}
return (withQuestionMark ? "?" : "") + this.queryStringDecoded;
}
@Override
public Map<String, List<String>> getQueryStringParams() {
validateFullUrlInfoCache();
return this.queryStringParams;
}
@Override
public List<String> getQueryStringParam(String name) {
List<String> values = getQueryStringParams().get(name);
if (values == null) {
values = Collections.emptyList();
}
return values;
}
@Override
public String getQueryStringParamFirst(String name) {
List<String> values = getQueryStringParam(name);
if (values != null && values.size() > 0) {
return values.get(0);
}
return null;
}
@Override
public Map<String, String> getPathParams() {
//==========================================
// We make sure everything is immutable
//==========================================
Map<String, String> pathParams = getRequestContext().routing().getCurrentRouteHandlerMatch().getPathParams();
if (pathParams == null) {
pathParams = Collections.emptyMap();
} else {
pathParams = Collections.unmodifiableMap(pathParams);
}
return pathParams;
}
@Override
public String getPathParam(String name) {
return getPathParams().get(name);
}
@Override
public InputStream getBodyAsInputStream() {
return getServer().getRawInputStream(getExchange());
}
@Override
public String getBodyAsString() {
return getStringBody("UTF-8");
}
@Override
public String getStringBody(String encoding) {
try {
InputStream inputStream = getBodyAsInputStream();
if (inputStream == null) {
return "";
}
return IOUtils.toString(inputStream, encoding);
} catch (Exception ex) {
throw SpincastStatics.runtimize(ex);
}
}
@Override
public byte[] getBodyAsByteArray() {
try {
InputStream inputStream = getBodyAsInputStream();
if (inputStream == null) {
return new byte[0];
}
return IOUtils.toByteArray(inputStream);
} catch (Exception ex) {
throw SpincastStatics.runtimize(ex);
}
}
@Override
public JsonObject getJsonBody() {
JsonObject obj = getJsonBody(JsonObject.class);
return obj.clone(false);
}
@Override
public Map<String, Object> getJsonBodyAsMap() {
@SuppressWarnings("unchecked")
Map<String, Object> obj = getJsonBody(Map.class);
return obj;
}
@Override
public <T> T getJsonBody(Class<T> clazz) {
try {
InputStream inputStream = getBodyAsInputStream();
if (inputStream == null) {
return null;
}
return getJsonManager().fromInputStream(inputStream, clazz);
} catch (Exception ex) {
throw SpincastStatics.runtimize(ex);
}
}
@Override
public JsonObject getXmlBodyAsJsonObject() {
JsonObject obj = getXmlBody(JsonObject.class);
return obj.clone(false);
}
@Override
public Map<String, Object> getXmlBodyAsMap() {
@SuppressWarnings("unchecked")
Map<String, Object> obj = getXmlBody(Map.class);
return obj;
}
@Override
public <T> T getXmlBody(Class<T> clazz) {
try {
InputStream inputStream = getBodyAsInputStream();
if (inputStream == null) {
return null;
}
return getXmlManager().fromXmlInputStream(inputStream, clazz);
} catch (Exception ex) {
throw SpincastStatics.runtimize(ex);
}
}
@Override
public Map<String, List<String>> getFormBodyRaw() {
if (this.formDatasAsImmutableMap == null) {
Map<String, List<String>> formDatasServer = getServer().getFormData(getExchange());
Map<String, List<String>> formDatasFinal = new HashMap<String, List<String>>();
if (formDatasServer == null) {
formDatasServer = Collections.emptyMap();
} else {
for (Entry<String, List<String>> entry : formDatasServer.entrySet()) {
if (entry.getValue() == null) {
formDatasFinal.put(entry.getKey(), Collections.<String>emptyList());
} else {
formDatasFinal.put(entry.getKey(), Collections.unmodifiableList(entry.getValue()));
}
}
formDatasFinal = Collections.unmodifiableMap(formDatasFinal);
}
this.formDatasAsImmutableMap = formDatasFinal;
}
return this.formDatasAsImmutableMap;
}
@Override
public JsonObject getFormBodyAsJsonObject() {
if (this.formDatasAsImmutableJsonObject == null) {
Map<String, List<String>> formDatasRaw = getFormBodyRaw();
Map<String, Map<Integer, String>> formDataArrays = new LinkedHashMap<String, Map<Integer, String>>();
JsonObject obj = getJsonManager().create();
for (Entry<String, List<String>> entry : formDatasRaw.entrySet()) {
String key = entry.getKey();
if (key == null) {
continue;
}
List<String> values = entry.getValue();
//==========================================
// If the key ends with "[X]" we considere
// that it is part of an *ordered* array, which
// means the position of the elements are defined.
//==========================================
Matcher matcher = getFormDataArrayPattern().matcher(key);
if (matcher.matches()) {
if (values.size() > 1) {
logger.error("More than one Form Data received with the array " +
"index name \"" + key + "\", " +
"we'll only keep the last element.");
values = values.subList(values.size() - 1, values.size());
}
String value = values.get(0);
String arrayPart = matcher.group(1);
String indexStr = matcher.group(2);
Integer index = Integer.parseInt(indexStr);
key = key.substring(0, key.length() - arrayPart.length());
Map<Integer, String> valuesMap = formDataArrays.get(key);
if (valuesMap == null) {
//==========================================
// TreeMap : we sort the values using there
// required position in the final array.
//==========================================
valuesMap = new TreeMap<>();
formDataArrays.put(key, valuesMap);
}
valuesMap.put(index, value);
} else {
//==========================================
// If there are multiple elements with the same key,
// we create an array.
// Also if the key ends with "[]" we also considere
// the element as being is part of an array.
//
// The elemens in this array are ordred as they
// are received.
//==========================================
if (values.size() > 1 || key.endsWith("[]")) {
if (key.endsWith("[]")) {
key = key.substring(0, key.length() - "[]".length());
}
JsonArray array = getJsonManager().createArray();
array.addAll(values);
obj.set(key, array);
} else {
if (values.size() > 0) {
obj.set(key, values.get(0));
} else {
obj.set(key, null);
}
}
}
}
//==========================================
// We add the ordered arrays
//==========================================
for (Entry<String, Map<Integer, String>> entry : formDataArrays.entrySet()) {
String key = entry.getKey();
Map<Integer, String> valuesMap = entry.getValue();
//==========================================
// The base array may already exist...
//==========================================
JsonArray array;
Object arrayObj = obj.getObject(key);
if (arrayObj != null && (arrayObj instanceof JsonArray)) {
array = (JsonArray)arrayObj;
} else {
array = getJsonManager().createArray();
}
LinkedList<Integer> indexes = new LinkedList<Integer>(valuesMap.keySet());
Integer lastIndex = indexes.getLast();
for (int i = 0; i <= lastIndex; i++) {
if (valuesMap.containsKey(i)) {
array.add(valuesMap.get(i));
} else if (!array.isElementExists(i)) {
array.add(null);
}
}
obj.set(key, array);
}
//==========================================
// We make it immutable! This allows the caching
// of already evaluated JsonPaths...
//==========================================
this.formDatasAsImmutableJsonObject = obj.clone(false);
}
return this.formDatasAsImmutableJsonObject;
}
@Override
public Form getFormOrCreate(String rootKey) {
return getForm(rootKey, true);
}
@Override
public Form getForm(String rootKey) {
return getForm(rootKey, false);
}
protected Form getForm(String rootKey, boolean createIfNotFound) {
if (StringUtils.isBlank(rootKey)) {
//==========================================
// This message doesn't have to be localized, but
// we do it anyway simply as an example on how
// plugins can add messages to the
// Dictionary...
//==========================================
String msg = getDictionary().get(SpincastRequestPluginDictionaryEntries.MESSAGE_KEY_FORM_GET_EMPTYNAME);
throw new RuntimeException(msg);
}
if (this.scopedForms == null) {
this.scopedForms = new HashMap<String, Form>();
}
if (!this.scopedForms.containsKey(rootKey)) {
if (createIfNotFound) {
JsonObject formData = getFormBodyAsJsonObject().getJsonObjectOrEmpty(rootKey);
Form form = getFormFactory().createForm(rootKey, formData);
this.scopedForms.put(rootKey, form);
} else {
return null;
}
}
Form form = this.scopedForms.get(rootKey);
return form;
}
@Override
public Map<String, List<UploadedFile>> getUploadedFiles() {
if (this.uploadedFiles == null) {
//==========================================
// We make sure everything is immutable
//==========================================
Map<String, List<UploadedFile>> uploadedFilesServer = getServer().getUploadedFiles(getExchange());
Map<String, List<UploadedFile>> uploadedFilesFinal = new HashMap<String, List<UploadedFile>>();
if (uploadedFilesServer == null) {
uploadedFilesServer = Collections.emptyMap();
} else {
for (Entry<String, List<UploadedFile>> entry : uploadedFilesServer.entrySet()) {
if (entry.getValue() == null) {
uploadedFilesFinal.put(entry.getKey(), Collections.<UploadedFile>emptyList());
} else {
uploadedFilesFinal.put(entry.getKey(), Collections.unmodifiableList(entry.getValue()));
}
}
uploadedFilesFinal = Collections.unmodifiableMap(uploadedFilesFinal);
}
this.uploadedFiles = uploadedFilesFinal;
}
return this.uploadedFiles;
}
@Override
public List<UploadedFile> getUploadedFiles(String name) {
List<UploadedFile> files = getUploadedFiles().get(name);
if (files == null) {
files = Collections.emptyList();
}
return files;
}
@Override
public UploadedFile getUploadedFileFirst(String name) {
List<UploadedFile> files = getUploadedFiles(name);
if (files.size() > 0) {
return files.get(0);
}
return null;
}
@Override
public Locale getLocaleBestMatch() {
String header = getHeaderFirst(HttpHeaders.ACCEPT_LANGUAGE);
Locale locale = getSpincastUtils().getLocaleBestMatchFromAcceptLanguageHeader(header);
if (locale == null) {
locale = getSpincastConfig().getDefaultLocale();
}
return locale;
}
@Override
public String getContentType() {
String contentType = getHeaderFirst(HttpHeaders.CONTENT_TYPE);
return contentType;
}
@Override
public boolean isHttps() {
return getFullUrl().trim().toLowerCase().startsWith("https://");
}
@Override
public List<ETag> getEtagsFromIfMatchHeader() {
if (this.ifMatchETags == null) {
synchronized (this.ifMatchETagsLock) {
if (this.ifMatchETags == null) {
this.ifMatchETags = parseETagHeader(HttpHeaders.IF_MATCH);
}
}
}
return this.ifMatchETags;
}
@Override
public List<ETag> getEtagsFromIfNoneMatchHeader() {
if (this.ifNoneMatchETags == null) {
synchronized (this.ifNoneMatchETagsLock) {
if (this.ifNoneMatchETags == null) {
this.ifNoneMatchETags = parseETagHeader(HttpHeaders.IF_NONE_MATCH);
}
}
}
return this.ifNoneMatchETags;
}
protected List<ETag> parseETagHeader(String headerName) {
List<ETag> etags = new ArrayList<ETag>();
List<String> eTagHeaders = getHeader(headerName);
if (eTagHeaders != null) {
for (String eTagHeader : eTagHeaders) {
// Thanks to http://stackoverflow.com/a/1757107/843699
String[] tokens = eTagHeader.split(",(?=([^\"]*\"[^\"]*\")*[^\"]*$)", -1);
for (String eTagStr : tokens) {
try {
ETag eTag = getEtagFactory().deserializeHeaderValue(eTagStr);
etags.add(eTag);
} catch (Exception ex) {
logger.info("Invalid " + headerName + " ETag header value received: " + eTagStr);
}
}
}
}
return etags;
}
@Override
public Date getDateFromIfModifiedSinceHeader() {
if (this.ifModifiedSinceDate == null) {
synchronized (this.ifModifiedSinceDateLock) {
if (this.ifModifiedSinceDate == null) {
this.ifModifiedSinceDate = parseDateHeader(HttpHeaders.IF_MODIFIED_SINCE);
}
}
}
return this.ifModifiedSinceDate;
}
@Override
public Date getDateFromIfUnmodifiedSinceHeader() {
if (this.ifUnmodifiedSinceDate == null) {
synchronized (this.ifUnmodifiedSinceDateLock) {
if (this.ifUnmodifiedSinceDate == null) {
this.ifUnmodifiedSinceDate = parseDateHeader(HttpHeaders.IF_UNMODIFIED_SINCE);
}
}
}
return this.ifUnmodifiedSinceDate;
}
/**
* Returns NULL if the date is not there or not parsable.
*/
protected Date parseDateHeader(String headerName) {
String value = getHeaderFirst(headerName);
if (value == null) {
return null;
}
try {
Date date = DateUtils.parseDate(value);
return date;
} catch (Exception ex) {
logger.info("Invalid '" + headerName + "' date received: " + value);
}
return null;
}
@Override
public boolean isFlashMessageExists() {
return getFlashMessage(false) != null;
}
@Override
public FlashMessage getFlashMessage() {
if (!this.flashMessageRetrieved) {
this.flashMessageRetrieved = true;
this.flashMessage = getFlashMessage(true);
}
return this.flashMessage;
}
protected FlashMessage getFlashMessage(boolean removeIt) {
String flashMessageId = null;
//==========================================
// TODO Maybe we should use a user session.
//==========================================
flashMessageId = getCookieValue(getSpincastConfig().getCookieNameFlashMessage());
if (flashMessageId != null) {
if (removeIt) {
getRequestContext().response().deleteCookie(getSpincastConfig().getCookieNameFlashMessage());
}
//==========================================@formatter:off
// In the queryString?
//==========================================@formatter:on
} else {
flashMessageId = getQueryStringParamFirst(getSpincastConfig().getQueryParamFlashMessageId());
}
FlashMessage flashMessage = null;
if (flashMessageId != null) {
flashMessage = getFlashMessagesHolder().getFlashMessage(flashMessageId, removeIt);
}
return flashMessage;
}
@Override
public String getIp() {
return getServer().getIp(getExchange());
}
}
| |
/**
* Copyright 2009-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.mapping;
import java.sql.ResultSet;
import org.apache.ibatis.session.Configuration;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.TypeHandler;
import org.apache.ibatis.type.TypeHandlerRegistry;
/**
* @author Clinton Begin
*/
public class ParameterMapping {
private Configuration configuration;
private String property;
private ParameterMode mode;
private Class<?> javaType = Object.class;
private JdbcType jdbcType;
private Integer numericScale;
private TypeHandler<?> typeHandler;
private String resultMapId;
private String jdbcTypeName;
private String expression;
private ParameterMapping() {
}
public static class Builder {
private ParameterMapping parameterMapping = new ParameterMapping();
public Builder(Configuration configuration, String property, TypeHandler<?> typeHandler) {
parameterMapping.configuration = configuration;
parameterMapping.property = property;
parameterMapping.typeHandler = typeHandler;
parameterMapping.mode = ParameterMode.IN;
}
public Builder(Configuration configuration, String property, Class<?> javaType) {
parameterMapping.configuration = configuration;
parameterMapping.property = property;
parameterMapping.javaType = javaType;
parameterMapping.mode = ParameterMode.IN;
}
public Builder mode(ParameterMode mode) {
parameterMapping.mode = mode;
return this;
}
public Builder javaType(Class<?> javaType) {
parameterMapping.javaType = javaType;
return this;
}
public Builder jdbcType(JdbcType jdbcType) {
parameterMapping.jdbcType = jdbcType;
return this;
}
public Builder numericScale(Integer numericScale) {
parameterMapping.numericScale = numericScale;
return this;
}
public Builder resultMapId(String resultMapId) {
parameterMapping.resultMapId = resultMapId;
return this;
}
public Builder typeHandler(TypeHandler<?> typeHandler) {
parameterMapping.typeHandler = typeHandler;
return this;
}
public Builder jdbcTypeName(String jdbcTypeName) {
parameterMapping.jdbcTypeName = jdbcTypeName;
return this;
}
public Builder expression(String expression) {
parameterMapping.expression = expression;
return this;
}
public ParameterMapping build() {
resolveTypeHandler();
validate();
return parameterMapping;
}
private void validate() {
if (ResultSet.class.equals(parameterMapping.javaType)) {
if (parameterMapping.resultMapId == null) {
throw new IllegalStateException("Missing resultmap in property '"
+ parameterMapping.property + "'. "
+ "Parameters of type java.sql.ResultSet require a resultmap.");
}
} else {
if (parameterMapping.typeHandler == null) {
throw new IllegalStateException("Type handler was null on parameter mapping for property '"
+ parameterMapping.property + "'. It was either not specified and/or could not be found for the javaType ("
+ parameterMapping.javaType.getName() + ") : jdbcType (" + parameterMapping.jdbcType + ") combination.");
}
}
}
private void resolveTypeHandler() {
if (parameterMapping.typeHandler == null && parameterMapping.javaType != null) {
Configuration configuration = parameterMapping.configuration;
TypeHandlerRegistry typeHandlerRegistry = configuration.getTypeHandlerRegistry();
parameterMapping.typeHandler = typeHandlerRegistry.getTypeHandler(parameterMapping.javaType, parameterMapping.jdbcType);
}
}
}
public String getProperty() {
return property;
}
/**
* Used for handling output of callable statements
* @return
*/
public ParameterMode getMode() {
return mode;
}
/**
* Used for handling output of callable statements
* @return
*/
public Class<?> getJavaType() {
return javaType;
}
/**
* Used in the UnknownTypeHandler in case there is no handler for the property type
* @return
*/
public JdbcType getJdbcType() {
return jdbcType;
}
/**
* Used for handling output of callable statements
* @return
*/
public Integer getNumericScale() {
return numericScale;
}
/**
* Used when setting parameters to the PreparedStatement
* @return
*/
public TypeHandler<?> getTypeHandler() {
return typeHandler;
}
/**
* Used for handling output of callable statements
* @return
*/
public String getResultMapId() {
return resultMapId;
}
/**
* Used for handling output of callable statements
* @return
*/
public String getJdbcTypeName() {
return jdbcTypeName;
}
/**
* Not used
* @return
*/
public String getExpression() {
return expression;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("ParameterMapping{");
//sb.append("configuration=").append(configuration); // configuration doesn't have a useful .toString()
sb.append("property='").append(property).append('\'');
sb.append(", mode=").append(mode);
sb.append(", javaType=").append(javaType);
sb.append(", jdbcType=").append(jdbcType);
sb.append(", numericScale=").append(numericScale);
//sb.append(", typeHandler=").append(typeHandler); // typeHandler also doesn't have a useful .toString()
sb.append(", resultMapId='").append(resultMapId).append('\'');
sb.append(", jdbcTypeName='").append(jdbcTypeName).append('\'');
sb.append(", expression='").append(expression).append('\'');
sb.append('}');
return sb.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.rest.swagger;
import java.io.IOException;
import java.net.URI;
import java.util.Arrays;
import java.util.Collections;
import io.swagger.models.Operation;
import io.swagger.models.Scheme;
import io.swagger.models.Swagger;
import io.swagger.models.parameters.QueryParameter;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.Producer;
import org.apache.camel.impl.DefaultClassResolver;
import org.apache.camel.spi.RestConfiguration;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.entry;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class RestSwaggerEndpointTest {
URI componentJsonUri = URI.create("component.json");
URI endpointUri = URI.create("endpoint.json");
@Test(expected = IllegalArgumentException.class)
public void shouldComplainForUnknownOperations() throws Exception {
final CamelContext camelContext = mock(CamelContext.class);
when(camelContext.getClassResolver()).thenReturn(new DefaultClassResolver());
final RestSwaggerComponent component = new RestSwaggerComponent(camelContext);
final RestSwaggerEndpoint endpoint = new RestSwaggerEndpoint("rest-swagger:unknown", "unknown", component);
endpoint.createProducer();
}
@Test
public void shouldCreateProducers() throws Exception {
final CamelContext camelContext = mock(CamelContext.class);
when(camelContext.getClassResolver()).thenReturn(new DefaultClassResolver());
final Endpoint endpointDelegate = mock(Endpoint.class);
when(camelContext.getEndpoint("rest:GET:/v2:/pet/{petId}")).thenReturn(endpointDelegate);
final Producer delegateProducer = mock(Producer.class);
when(endpointDelegate.createProducer()).thenReturn(delegateProducer);
final RestSwaggerComponent component = new RestSwaggerComponent(camelContext);
component.setHost("http://petstore.swagger.io");
final RestSwaggerEndpoint endpoint = new RestSwaggerEndpoint("rest-swagger:getPetById", "getPetById",
component);
final Producer producer = endpoint.createProducer();
assertThat(producer).isSameAs(delegateProducer);
}
@Test
public void shouldCreateQueryParameterExpressions() {
assertThat(RestSwaggerEndpoint.queryParameterExpression(new QueryParameter())).isEmpty();
assertThat(RestSwaggerEndpoint.queryParameterExpression(new QueryParameter().name("q").required(true)))
.isEqualTo("q={q}");
assertThat(RestSwaggerEndpoint.queryParameterExpression(new QueryParameter().name("q").required(false)))
.isEqualTo("q={q?}");
}
@Test
public void shouldDetermineBasePath() {
final RestConfiguration restConfiguration = new RestConfiguration();
final CamelContext camelContext = mock(CamelContext.class);
when(camelContext.getRestConfiguration("rest-swagger", true)).thenReturn(restConfiguration);
final Swagger swagger = new Swagger();
final RestSwaggerComponent component = new RestSwaggerComponent();
component.setCamelContext(camelContext);
final RestSwaggerEndpoint endpoint = new RestSwaggerEndpoint("rest-swagger:getPetById", "getPetById",
component);
assertThat(endpoint.determineBasePath(swagger))
.as("When no base path is specified on component, endpoint or rest configuration it should default to `/`")
.isEqualTo("/");
restConfiguration.setContextPath("/rest");
assertThat(endpoint.determineBasePath(swagger))
.as("When base path is specified in REST configuration and not specified in component the base path should be from the REST configuration")
.isEqualTo("/rest");
swagger.basePath("/specification");
assertThat(endpoint.determineBasePath(swagger))
.as("When base path is specified in the specification it should take precedence the one specified in the REST configuration")
.isEqualTo("/specification");
component.setBasePath("/component");
assertThat(endpoint.determineBasePath(swagger))
.as("When base path is specified on the component it should take precedence over Swagger specification and REST configuration")
.isEqualTo("/component");
endpoint.setBasePath("/endpoint");
assertThat(endpoint.determineBasePath(swagger))
.as("When base path is specified on the endpoint it should take precedence over any other")
.isEqualTo("/endpoint");
}
@Test
public void shouldDetermineEndpointParameters() {
final CamelContext camelContext = mock(CamelContext.class);
final RestSwaggerComponent component = new RestSwaggerComponent();
component.setCamelContext(camelContext);
final RestSwaggerEndpoint endpoint = new RestSwaggerEndpoint("uri", "remaining", component);
endpoint.setHost("http://petstore.swagger.io");
final Swagger swagger = new Swagger();
final Operation operation = new Operation();
assertThat(endpoint.determineEndpointParameters(swagger, operation))
.containsOnly(entry("host", "http://petstore.swagger.io"));
component.setComponentName("xyz");
assertThat(endpoint.determineEndpointParameters(swagger, operation))
.containsOnly(entry("host", "http://petstore.swagger.io"), entry("componentName", "xyz"));
swagger.consumes("application/json").produces("application/xml");
assertThat(endpoint.determineEndpointParameters(swagger, operation)).containsOnly(
entry("host", "http://petstore.swagger.io"), entry("componentName", "xyz"),
entry("consumes", "application/xml"), entry("produces", "application/json"));
component.setProduces("application/json");
component.setConsumes("application/atom+xml");
assertThat(endpoint.determineEndpointParameters(swagger, operation)).containsOnly(
entry("host", "http://petstore.swagger.io"), entry("componentName", "xyz"),
entry("consumes", "application/atom+xml"), entry("produces", "application/json"));
endpoint.setProduces("application/atom+xml");
endpoint.setConsumes("application/json");
assertThat(endpoint.determineEndpointParameters(swagger, operation)).containsOnly(
entry("host", "http://petstore.swagger.io"), entry("componentName", "xyz"),
entry("consumes", "application/json"), entry("produces", "application/atom+xml"));
endpoint.setComponentName("zyx");
assertThat(endpoint.determineEndpointParameters(swagger, operation)).containsOnly(
entry("host", "http://petstore.swagger.io"), entry("componentName", "zyx"),
entry("consumes", "application/json"), entry("produces", "application/atom+xml"));
operation.addParameter(new QueryParameter().name("q").required(true));
assertThat(endpoint.determineEndpointParameters(swagger, operation)).containsOnly(
entry("host", "http://petstore.swagger.io"), entry("componentName", "zyx"),
entry("consumes", "application/json"), entry("produces", "application/atom+xml"),
entry("queryParameters", "q={q}"));
operation.addParameter(new QueryParameter().name("o"));
assertThat(endpoint.determineEndpointParameters(swagger, operation)).containsOnly(
entry("host", "http://petstore.swagger.io"), entry("componentName", "zyx"),
entry("consumes", "application/json"), entry("produces", "application/atom+xml"),
entry("queryParameters", "q={q}&o={o?}"));
}
@Test
public void shouldDetermineHostFromRestConfiguration() {
assertThat(RestSwaggerEndpoint.hostFrom(null)).isNull();
final RestConfiguration configuration = new RestConfiguration();
assertThat(RestSwaggerEndpoint.hostFrom(configuration)).isNull();
configuration.setScheme("ftp");
assertThat(RestSwaggerEndpoint.hostFrom(configuration)).isNull();
configuration.setScheme("http");
assertThat(RestSwaggerEndpoint.hostFrom(configuration)).isNull();
configuration.setHost("petstore.swagger.io");
assertThat(RestSwaggerEndpoint.hostFrom(configuration)).isEqualTo("http://petstore.swagger.io");
configuration.setPort(80);
assertThat(RestSwaggerEndpoint.hostFrom(configuration)).isEqualTo("http://petstore.swagger.io");
configuration.setPort(8080);
assertThat(RestSwaggerEndpoint.hostFrom(configuration)).isEqualTo("http://petstore.swagger.io:8080");
configuration.setScheme("https");
configuration.setPort(80);
assertThat(RestSwaggerEndpoint.hostFrom(configuration)).isEqualTo("https://petstore.swagger.io:80");
configuration.setPort(443);
assertThat(RestSwaggerEndpoint.hostFrom(configuration)).isEqualTo("https://petstore.swagger.io");
}
@Test
public void shouldDetermineHostFromSpecification() {
final RestSwaggerComponent component = new RestSwaggerComponent();
final RestSwaggerEndpoint endpoint = new RestSwaggerEndpoint("rest-swagger:http://some-uri#getPetById",
"http://some-uri#getPetById", component);
final Swagger swagger = new Swagger();
swagger.host("petstore.swagger.io");
assertThat(endpoint.determineHost(swagger)).isEqualTo("http://petstore.swagger.io");
swagger.schemes(Arrays.asList(Scheme.HTTPS));
assertThat(endpoint.determineHost(swagger)).isEqualTo("https://petstore.swagger.io");
}
@Test
public void shouldDetermineOptions() {
assertThat(RestSwaggerEndpoint.determineOption(null, null, null, null)).isNull();
assertThat(RestSwaggerEndpoint.determineOption(Collections.emptyList(), Collections.emptyList(), "", ""))
.isNull();
assertThat(RestSwaggerEndpoint.determineOption(Arrays.asList("specification"), null, null, null))
.isEqualTo("specification");
assertThat(
RestSwaggerEndpoint.determineOption(Arrays.asList("specification"), Arrays.asList("operation"), null, null))
.isEqualTo("operation");
assertThat(RestSwaggerEndpoint.determineOption(Arrays.asList("specification"), Arrays.asList("operation"),
"component", null)).isEqualTo("component");
assertThat(RestSwaggerEndpoint.determineOption(Arrays.asList("specification"), Arrays.asList("operation"),
"component", "operation")).isEqualTo("operation");
}
@Test
public void shouldHonourComponentSpecificationPathProperty() throws Exception {
final RestSwaggerComponent component = new RestSwaggerComponent();
component.setSpecificationUri(componentJsonUri);
final RestSwaggerEndpoint endpoint = new RestSwaggerEndpoint("rest-swagger:getPetById", "getPetById",
component);
assertThat(endpoint.getSpecificationUri()).isEqualTo(componentJsonUri);
}
@Test
public void shouldHonourEndpointUriPathSpecificationPathProperty() throws Exception {
final RestSwaggerComponent component = new RestSwaggerComponent();
component.setSpecificationUri(componentJsonUri);
final RestSwaggerEndpoint endpoint = new RestSwaggerEndpoint("rest-swagger:endpoint.json#getPetById",
"endpoint.json#getPetById", component);
assertThat(endpoint.getSpecificationUri()).isEqualTo(endpointUri);
}
@Test
public void shouldHonourHostPrecedence() {
final RestConfiguration globalRestConfiguration = new RestConfiguration();
final RestConfiguration componentRestConfiguration = new RestConfiguration();
final RestConfiguration specificRestConfiguration = new RestConfiguration();
final CamelContext camelContext = mock(CamelContext.class);
when(camelContext.getRestConfiguration()).thenReturn(globalRestConfiguration);
when(camelContext.getRestConfiguration("rest-swagger", false)).thenReturn(componentRestConfiguration);
when(camelContext.getRestConfiguration("petstore", false)).thenReturn(specificRestConfiguration);
final RestSwaggerComponent component = new RestSwaggerComponent();
component.setCamelContext(camelContext);
final RestSwaggerEndpoint endpoint = new RestSwaggerEndpoint("petstore:http://specification-uri#getPetById",
"http://specification-uri#getPetById", component);
final Swagger swagger = new Swagger();
assertThat(endpoint.determineHost(swagger)).isEqualTo("http://specification-uri");
globalRestConfiguration.setHost("global-rest");
globalRestConfiguration.setScheme("http");
assertThat(endpoint.determineHost(swagger)).isEqualTo("http://global-rest");
globalRestConfiguration.setHost("component-rest");
globalRestConfiguration.setScheme("http");
assertThat(endpoint.determineHost(swagger)).isEqualTo("http://component-rest");
specificRestConfiguration.setHost("specific-rest");
specificRestConfiguration.setScheme("http");
assertThat(endpoint.determineHost(swagger)).isEqualTo("http://specific-rest");
swagger.host("specification").scheme(Scheme.HTTP);
assertThat(endpoint.determineHost(swagger)).isEqualTo("http://specification");
component.setHost("http://component");
assertThat(endpoint.determineHost(swagger)).isEqualTo("http://component");
endpoint.setHost("http://endpoint");
assertThat(endpoint.determineHost(swagger)).isEqualTo("http://endpoint");
}
@Test
public void shouldLoadSwaggerSpecifications() throws IOException {
final CamelContext camelContext = mock(CamelContext.class);
when(camelContext.getClassResolver()).thenReturn(new DefaultClassResolver());
assertThat(
RestSwaggerEndpoint.loadSpecificationFrom(camelContext, RestSwaggerComponent.DEFAULT_SPECIFICATION_URI))
.isNotNull();
}
@Test
public void shouldPickBestScheme() {
assertThat(RestSwaggerEndpoint.pickBestScheme("http", Arrays.asList(Scheme.HTTP, Scheme.HTTPS)))
.isEqualTo("https");
assertThat(RestSwaggerEndpoint.pickBestScheme("https", Arrays.asList(Scheme.HTTP))).isEqualTo("http");
assertThat(RestSwaggerEndpoint.pickBestScheme("http", Collections.emptyList())).isEqualTo("http");
assertThat(RestSwaggerEndpoint.pickBestScheme("http", null)).isEqualTo("http");
assertThat(RestSwaggerEndpoint.pickBestScheme(null, Collections.emptyList())).isNull();
assertThat(RestSwaggerEndpoint.pickBestScheme(null, null)).isNull();
}
@Test(expected = IllegalArgumentException.class)
public void shouldRaiseExceptionsForMissingSpecifications() throws IOException {
final CamelContext camelContext = mock(CamelContext.class);
when(camelContext.getClassResolver()).thenReturn(new DefaultClassResolver());
RestSwaggerEndpoint.loadSpecificationFrom(camelContext, URI.create("non-existant.json"));
}
@Test
public void shouldUseDefaultSpecificationUri() throws Exception {
final RestSwaggerComponent component = new RestSwaggerComponent();
final RestSwaggerEndpoint endpoint = new RestSwaggerEndpoint("rest-swagger:getPetById", "getPetById",
component);
assertThat(endpoint.getSpecificationUri()).isEqualTo(RestSwaggerComponent.DEFAULT_SPECIFICATION_URI);
}
@Test
public void shouldUseDefaultSpecificationUriEvenIfHashIsPresent() throws Exception {
final RestSwaggerComponent component = new RestSwaggerComponent();
final RestSwaggerEndpoint endpoint = new RestSwaggerEndpoint("rest-swagger:#getPetById", "#getPetById",
component);
assertThat(endpoint.getSpecificationUri()).isEqualTo(RestSwaggerComponent.DEFAULT_SPECIFICATION_URI);
}
}
| |
/*
* Copyright 2016 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel.epoll;
import io.netty.channel.ChannelException;
import io.netty.channel.DefaultFileRegion;
import io.netty.channel.unix.NativeInetAddress;
import io.netty.channel.unix.PeerCredentials;
import io.netty.channel.unix.Socket;
import io.netty.channel.socket.InternetProtocolFamily;
import io.netty.util.internal.PlatformDependent;
import io.netty.util.internal.SocketUtils;
import java.io.IOException;
import java.net.InetAddress;
import java.net.Inet6Address;
import java.net.NetworkInterface;
import java.net.UnknownHostException;
import java.util.Enumeration;
import static io.netty.channel.unix.Errors.ioResult;
/**
* A socket which provides access Linux native methods.
*/
final class LinuxSocket extends Socket {
static final InetAddress INET6_ANY = unsafeInetAddrByName("::");
private static final InetAddress INET_ANY = unsafeInetAddrByName("0.0.0.0");
private static final long MAX_UINT32_T = 0xFFFFFFFFL;
LinuxSocket(int fd) {
super(fd);
}
InternetProtocolFamily family() {
return ipv6 ? InternetProtocolFamily.IPv6 : InternetProtocolFamily.IPv4;
}
int sendmmsg(NativeDatagramPacketArray.NativeDatagramPacket[] msgs,
int offset, int len) throws IOException {
return Native.sendmmsg(intValue(), ipv6, msgs, offset, len);
}
int recvmmsg(NativeDatagramPacketArray.NativeDatagramPacket[] msgs,
int offset, int len) throws IOException {
return Native.recvmmsg(intValue(), ipv6, msgs, offset, len);
}
void setTimeToLive(int ttl) throws IOException {
setTimeToLive(intValue(), ttl);
}
void setInterface(InetAddress address) throws IOException {
final NativeInetAddress a = NativeInetAddress.newInstance(address);
setInterface(intValue(), ipv6, a.address(), a.scopeId(), interfaceIndex(address));
}
void setNetworkInterface(NetworkInterface netInterface) throws IOException {
InetAddress address = deriveInetAddress(netInterface, family() == InternetProtocolFamily.IPv6);
if (address.equals(family() == InternetProtocolFamily.IPv4 ? INET_ANY : INET6_ANY)) {
throw new IOException("NetworkInterface does not support " + family());
}
final NativeInetAddress nativeAddress = NativeInetAddress.newInstance(address);
setInterface(intValue(), ipv6, nativeAddress.address(), nativeAddress.scopeId(), interfaceIndex(netInterface));
}
InetAddress getInterface() throws IOException {
NetworkInterface inf = getNetworkInterface();
if (inf != null) {
Enumeration<InetAddress> addresses = SocketUtils.addressesFromNetworkInterface(inf);
if (addresses.hasMoreElements()) {
return addresses.nextElement();
}
}
return null;
}
NetworkInterface getNetworkInterface() throws IOException {
int ret = getInterface(intValue(), ipv6);
if (ipv6) {
return PlatformDependent.javaVersion() >= 7 ? NetworkInterface.getByIndex(ret) : null;
}
InetAddress address = inetAddress(ret);
return address != null ? NetworkInterface.getByInetAddress(address) : null;
}
private static InetAddress inetAddress(int value) {
byte[] var1 = {
(byte) (value >>> 24 & 255),
(byte) (value >>> 16 & 255),
(byte) (value >>> 8 & 255),
(byte) (value & 255)
};
try {
return InetAddress.getByAddress(var1);
} catch (UnknownHostException ignore) {
return null;
}
}
void joinGroup(InetAddress group, NetworkInterface netInterface, InetAddress source) throws IOException {
final NativeInetAddress g = NativeInetAddress.newInstance(group);
final boolean isIpv6 = group instanceof Inet6Address;
final NativeInetAddress i = NativeInetAddress.newInstance(deriveInetAddress(netInterface, isIpv6));
if (source != null) {
if (source.getClass() != group.getClass()) {
throw new IllegalArgumentException("Source address is different type to group");
}
final NativeInetAddress s = NativeInetAddress.newInstance(source);
joinSsmGroup(intValue(), ipv6 && isIpv6, g.address(), i.address(),
g.scopeId(), interfaceIndex(netInterface), s.address());
} else {
joinGroup(intValue(), ipv6 && isIpv6, g.address(), i.address(), g.scopeId(), interfaceIndex(netInterface));
}
}
void leaveGroup(InetAddress group, NetworkInterface netInterface, InetAddress source) throws IOException {
final NativeInetAddress g = NativeInetAddress.newInstance(group);
final boolean isIpv6 = group instanceof Inet6Address;
final NativeInetAddress i = NativeInetAddress.newInstance(deriveInetAddress(netInterface, isIpv6));
if (source != null) {
if (source.getClass() != group.getClass()) {
throw new IllegalArgumentException("Source address is different type to group");
}
final NativeInetAddress s = NativeInetAddress.newInstance(source);
leaveSsmGroup(intValue(), ipv6 && isIpv6, g.address(), i.address(),
g.scopeId(), interfaceIndex(netInterface), s.address());
} else {
leaveGroup(intValue(), ipv6 && isIpv6, g.address(), i.address(), g.scopeId(), interfaceIndex(netInterface));
}
}
private static int interfaceIndex(NetworkInterface networkInterface) {
return PlatformDependent.javaVersion() >= 7 ? networkInterface.getIndex() : -1;
}
private static int interfaceIndex(InetAddress address) throws IOException {
if (PlatformDependent.javaVersion() >= 7) {
NetworkInterface iface = NetworkInterface.getByInetAddress(address);
if (iface != null) {
return iface.getIndex();
}
}
return -1;
}
void setTcpDeferAccept(int deferAccept) throws IOException {
setTcpDeferAccept(intValue(), deferAccept);
}
void setTcpQuickAck(boolean quickAck) throws IOException {
setTcpQuickAck(intValue(), quickAck ? 1 : 0);
}
void setTcpCork(boolean tcpCork) throws IOException {
setTcpCork(intValue(), tcpCork ? 1 : 0);
}
void setSoBusyPoll(int loopMicros) throws IOException {
setSoBusyPoll(intValue(), loopMicros);
}
void setTcpNotSentLowAt(long tcpNotSentLowAt) throws IOException {
if (tcpNotSentLowAt < 0 || tcpNotSentLowAt > MAX_UINT32_T) {
throw new IllegalArgumentException("tcpNotSentLowAt must be a uint32_t");
}
setTcpNotSentLowAt(intValue(), (int) tcpNotSentLowAt);
}
void setTcpFastOpen(int tcpFastopenBacklog) throws IOException {
setTcpFastOpen(intValue(), tcpFastopenBacklog);
}
void setTcpKeepIdle(int seconds) throws IOException {
setTcpKeepIdle(intValue(), seconds);
}
void setTcpKeepIntvl(int seconds) throws IOException {
setTcpKeepIntvl(intValue(), seconds);
}
void setTcpKeepCnt(int probes) throws IOException {
setTcpKeepCnt(intValue(), probes);
}
void setTcpUserTimeout(int milliseconds) throws IOException {
setTcpUserTimeout(intValue(), milliseconds);
}
void setIpFreeBind(boolean enabled) throws IOException {
setIpFreeBind(intValue(), enabled ? 1 : 0);
}
void setIpTransparent(boolean enabled) throws IOException {
setIpTransparent(intValue(), enabled ? 1 : 0);
}
void setIpRecvOrigDestAddr(boolean enabled) throws IOException {
setIpRecvOrigDestAddr(intValue(), enabled ? 1 : 0);
}
int getTimeToLive() throws IOException {
return getTimeToLive(intValue());
}
void getTcpInfo(EpollTcpInfo info) throws IOException {
getTcpInfo(intValue(), info.info);
}
void setTcpMd5Sig(InetAddress address, byte[] key) throws IOException {
final NativeInetAddress a = NativeInetAddress.newInstance(address);
setTcpMd5Sig(intValue(), ipv6, a.address(), a.scopeId(), key);
}
boolean isTcpCork() throws IOException {
return isTcpCork(intValue()) != 0;
}
int getSoBusyPoll() throws IOException {
return getSoBusyPoll(intValue());
}
int getTcpDeferAccept() throws IOException {
return getTcpDeferAccept(intValue());
}
boolean isTcpQuickAck() throws IOException {
return isTcpQuickAck(intValue()) != 0;
}
long getTcpNotSentLowAt() throws IOException {
return getTcpNotSentLowAt(intValue()) & MAX_UINT32_T;
}
int getTcpKeepIdle() throws IOException {
return getTcpKeepIdle(intValue());
}
int getTcpKeepIntvl() throws IOException {
return getTcpKeepIntvl(intValue());
}
int getTcpKeepCnt() throws IOException {
return getTcpKeepCnt(intValue());
}
int getTcpUserTimeout() throws IOException {
return getTcpUserTimeout(intValue());
}
boolean isIpFreeBind() throws IOException {
return isIpFreeBind(intValue()) != 0;
}
boolean isIpTransparent() throws IOException {
return isIpTransparent(intValue()) != 0;
}
boolean isIpRecvOrigDestAddr() throws IOException {
return isIpRecvOrigDestAddr(intValue()) != 0;
}
PeerCredentials getPeerCredentials() throws IOException {
return getPeerCredentials(intValue());
}
boolean isLoopbackModeDisabled() throws IOException {
return getIpMulticastLoop(intValue(), ipv6) == 0;
}
void setLoopbackModeDisabled(boolean loopbackModeDisabled) throws IOException {
setIpMulticastLoop(intValue(), ipv6, loopbackModeDisabled ? 0 : 1);
}
long sendFile(DefaultFileRegion src, long baseOffset, long offset, long length) throws IOException {
// Open the file-region as it may be created via the lazy constructor. This is needed as we directly access
// the FileChannel field via JNI.
src.open();
long res = sendFile(intValue(), src, baseOffset, offset, length);
if (res >= 0) {
return res;
}
return ioResult("sendfile", (int) res);
}
private static InetAddress deriveInetAddress(NetworkInterface netInterface, boolean ipv6) {
final InetAddress ipAny = ipv6 ? INET6_ANY : INET_ANY;
if (netInterface != null) {
final Enumeration<InetAddress> ias = netInterface.getInetAddresses();
while (ias.hasMoreElements()) {
final InetAddress ia = ias.nextElement();
final boolean isV6 = ia instanceof Inet6Address;
if (isV6 == ipv6) {
return ia;
}
}
}
return ipAny;
}
public static LinuxSocket newSocketStream(boolean ipv6) {
return new LinuxSocket(newSocketStream0(ipv6));
}
public static LinuxSocket newSocketStream() {
return newSocketStream(isIPv6Preferred());
}
public static LinuxSocket newSocketDgram(boolean ipv6) {
return new LinuxSocket(newSocketDgram0(ipv6));
}
public static LinuxSocket newSocketDgram() {
return newSocketDgram(isIPv6Preferred());
}
public static LinuxSocket newSocketDomain() {
return new LinuxSocket(newSocketDomain0());
}
private static InetAddress unsafeInetAddrByName(String inetName) {
try {
return InetAddress.getByName(inetName);
} catch (UnknownHostException uhe) {
throw new ChannelException(uhe);
}
}
private static native void joinGroup(int fd, boolean ipv6, byte[] group, byte[] interfaceAddress,
int scopeId, int interfaceIndex) throws IOException;
private static native void joinSsmGroup(int fd, boolean ipv6, byte[] group, byte[] interfaceAddress,
int scopeId, int interfaceIndex, byte[] source) throws IOException;
private static native void leaveGroup(int fd, boolean ipv6, byte[] group, byte[] interfaceAddress,
int scopeId, int interfaceIndex) throws IOException;
private static native void leaveSsmGroup(int fd, boolean ipv6, byte[] group, byte[] interfaceAddress,
int scopeId, int interfaceIndex, byte[] source) throws IOException;
private static native long sendFile(int socketFd, DefaultFileRegion src, long baseOffset,
long offset, long length) throws IOException;
private static native int getTcpDeferAccept(int fd) throws IOException;
private static native int isTcpQuickAck(int fd) throws IOException;
private static native int isTcpCork(int fd) throws IOException;
private static native int getSoBusyPoll(int fd) throws IOException;
private static native int getTcpNotSentLowAt(int fd) throws IOException;
private static native int getTcpKeepIdle(int fd) throws IOException;
private static native int getTcpKeepIntvl(int fd) throws IOException;
private static native int getTcpKeepCnt(int fd) throws IOException;
private static native int getTcpUserTimeout(int fd) throws IOException;
private static native int getTimeToLive(int fd) throws IOException;
private static native int isIpFreeBind(int fd) throws IOException;
private static native int isIpTransparent(int fd) throws IOException;
private static native int isIpRecvOrigDestAddr(int fd) throws IOException;
private static native void getTcpInfo(int fd, long[] array) throws IOException;
private static native PeerCredentials getPeerCredentials(int fd) throws IOException;
private static native void setTcpDeferAccept(int fd, int deferAccept) throws IOException;
private static native void setTcpQuickAck(int fd, int quickAck) throws IOException;
private static native void setTcpCork(int fd, int tcpCork) throws IOException;
private static native void setSoBusyPoll(int fd, int loopMicros) throws IOException;
private static native void setTcpNotSentLowAt(int fd, int tcpNotSentLowAt) throws IOException;
private static native void setTcpFastOpen(int fd, int tcpFastopenBacklog) throws IOException;
private static native void setTcpKeepIdle(int fd, int seconds) throws IOException;
private static native void setTcpKeepIntvl(int fd, int seconds) throws IOException;
private static native void setTcpKeepCnt(int fd, int probes) throws IOException;
private static native void setTcpUserTimeout(int fd, int milliseconds)throws IOException;
private static native void setIpFreeBind(int fd, int freeBind) throws IOException;
private static native void setIpTransparent(int fd, int transparent) throws IOException;
private static native void setIpRecvOrigDestAddr(int fd, int transparent) throws IOException;
private static native void setTcpMd5Sig(
int fd, boolean ipv6, byte[] address, int scopeId, byte[] key) throws IOException;
private static native void setInterface(
int fd, boolean ipv6, byte[] interfaceAddress, int scopeId, int networkInterfaceIndex) throws IOException;
private static native int getInterface(int fd, boolean ipv6);
private static native int getIpMulticastLoop(int fd, boolean ipv6) throws IOException;
private static native void setIpMulticastLoop(int fd, boolean ipv6, int enabled) throws IOException;
private static native void setTimeToLive(int fd, int ttl) throws IOException;
}
| |
package org.reactfx.util;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.Optional;
import java.util.function.BiFunction;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.function.IntFunction;
public abstract class ListHelper<T> {
public static <T> T get(ListHelper<T> listHelper, int index) {
Lists.checkIndex(index, size(listHelper)); // always throws for listHelper == null
return listHelper.get(index);
}
public static <T> ListHelper<T> add(ListHelper<T> listHelper, T elem) {
if(listHelper == null) {
return new SingleElemHelper<>(elem);
} else {
return listHelper.add(elem);
}
}
public static <T> ListHelper<T> remove(ListHelper<T> listHelper, T elem) {
if(listHelper == null) {
return listHelper;
} else {
return listHelper.remove(elem);
}
}
public static <T> void forEach(ListHelper<T> listHelper, Consumer<? super T> f) {
if(listHelper != null) {
listHelper.forEach(f);
}
}
public static <T> void forEachBetween(
ListHelper<T> listHelper, int from, int to, Consumer<? super T> f) {
Lists.checkRange(from, to, size(listHelper));
if(from < to) {
listHelper.forEachBetween(from, to, f);
}
}
public static <T> Iterator<T> iterator(ListHelper<T> listHelper) {
if(listHelper != null) {
return listHelper.iterator();
} else {
return Collections.emptyIterator();
}
}
public static <T> Iterator<T> iterator(ListHelper<T> listHelper, int from, int to) {
Lists.checkRange(from, to, size(listHelper));
if(from < to) {
return listHelper.iterator(from, to);
} else {
return Collections.emptyIterator();
}
}
public static <T> Optional<T> reduce(ListHelper<T> listHelper, BinaryOperator<T> f) {
if(listHelper == null) {
return Optional.empty();
} else {
return listHelper.reduce(f);
}
}
public static <T, U> U reduce(ListHelper<T> listHelper, U unit, BiFunction<U, T, U> f) {
if(listHelper == null) {
return unit;
} else {
return listHelper.reduce(unit, f);
}
}
public static <T> T[] toArray(ListHelper<T> listHelper, IntFunction<T[]> allocator) {
if(listHelper == null) {
return allocator.apply(0);
} else {
return listHelper.toArray(allocator);
}
}
public static <T> boolean isEmpty(ListHelper<T> listHelper) {
return listHelper == null;
}
public static <T> int size(ListHelper<T> listHelper) {
if(listHelper == null) {
return 0;
} else {
return listHelper.size();
}
}
private ListHelper() {
// private constructor to prevent subclassing
};
abstract T get(int index);
abstract ListHelper<T> add(T elem);
abstract ListHelper<T> remove(T elem);
abstract void forEach(Consumer<? super T> f);
abstract void forEachBetween(int from, int to, Consumer<? super T> f);
abstract Iterator<T> iterator();
abstract Iterator<T> iterator(int from, int to);
abstract Optional<T> reduce(BinaryOperator<T> f);
abstract <U> U reduce(U unit, BiFunction<U, T, U> f);
abstract T[] toArray(IntFunction<T[]> allocator);
abstract int size();
private static class SingleElemHelper<T> extends ListHelper<T> {
private final T elem;
SingleElemHelper(T elem) {
this.elem = elem;
}
@Override
T get(int index) {
assert index == 0;
return elem;
}
@Override
ListHelper<T> add(T elem) {
return new MultiElemHelper<>(this.elem, elem);
}
@Override
ListHelper<T> remove(T elem) {
if(Objects.equals(this.elem, elem)) {
return null;
} else {
return this;
}
}
@Override
void forEach(Consumer<? super T> f) {
f.accept(elem);
}
@Override
void forEachBetween(int from, int to, Consumer<? super T> f) {
assert from == 0 && to == 1;
f.accept(elem);
}
@Override
Iterator<T> iterator() {
return new Iterator<T>() {
boolean hasNext = true;
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public T next() {
if(hasNext) {
hasNext = false;
return elem;
} else {
throw new NoSuchElementException();
}
}
};
}
@Override
Iterator<T> iterator(int from, int to) {
assert from == 0 && to == 1;
return iterator();
}
@Override
Optional<T> reduce(BinaryOperator<T> f) {
return Optional.of(elem);
}
@Override
<U> U reduce(U unit, BiFunction<U, T, U> f) {
return f.apply(unit, elem);
}
@Override
T[] toArray(IntFunction<T[]> allocator) {
T[] res = allocator.apply(1);
res[0] = elem;
return res;
}
@Override
int size() {
return 1;
}
}
private static class MultiElemHelper<T> extends ListHelper<T> {
private final List<T> elems;
// when > 0, this ListHelper must be immutable,
// i.e. use copy-on-write for mutating operations
private int iterating = 0;
@SafeVarargs
MultiElemHelper(T... elems) {
this(Arrays.asList(elems));
}
private MultiElemHelper(List<T> elems) {
this.elems = new ArrayList<>(elems);
}
private MultiElemHelper<T> copy() {
return new MultiElemHelper<>(elems);
}
@Override
T get(int index) {
return elems.get(index);
}
@Override
ListHelper<T> add(T elem) {
if(iterating > 0) {
return copy().add(elem);
} else {
elems.add(elem);
return this;
}
}
@Override
ListHelper<T> remove(T elem) {
int idx = elems.indexOf(elem);
if(idx == -1) {
return this;
} else {
switch(elems.size()) {
case 0: // fall through
case 1: throw new AssertionError();
case 2: return new SingleElemHelper<>(elems.get(1-idx));
default:
if(iterating > 0) {
return copy().remove(elem);
} else {
elems.remove(elem);
return this;
}
}
}
}
@Override
void forEach(Consumer<? super T> f) {
++iterating;
try {
elems.forEach(f);
} finally {
--iterating;
}
}
@Override
void forEachBetween(int from, int to, Consumer<? super T> f) {
++iterating;
try {
elems.subList(from, to).forEach(f);
} finally {
--iterating;
}
}
@Override
Iterator<T> iterator() {
return iterator(0, elems.size());
}
@Override
Iterator<T> iterator(int from, int to) {
assert from < to;
++iterating;
return new Iterator<T>() {
int next = from;
@Override
public boolean hasNext() {
return next < to;
}
@Override
public T next() {
if(next < to) {
T res = elems.get(next);
++next;
if(next == to) {
--iterating;
}
return res;
} else {
throw new NoSuchElementException();
}
}
};
}
@Override
Optional<T> reduce(BinaryOperator<T> f) {
return elems.stream().reduce(f);
}
@Override
<U> U reduce(U unit, BiFunction<U, T, U> f) {
U u = unit;
for(T elem: elems) {
u = f.apply(u, elem);
}
return u;
}
@Override
T[] toArray(IntFunction<T[]> allocator) {
return elems.toArray(allocator.apply(size()));
}
@Override
int size() {
return elems.size();
}
}
}
| |
package com.nenc.interpreter;
import java.util.HashMap;
public class SystemContextMap {
private static HashMap<String, IValue> systemContextMap = initSystemContext();
private SystemContextMap() {
}
public static boolean toBool(Object obj) {
if (obj instanceof Boolean) return (boolean) obj;
if (obj instanceof Integer) return !((Integer) obj).equals(0);
if (obj instanceof String) return !((String) obj).equals("");
if (obj instanceof Double) return !((Double) obj).equals(0.0);
if (obj instanceof Float) return !((Float) obj).equals(0);
return !obj.equals(null);
}
private static HashMap<String, IValue> initSystemContext() {
HashMap<String, IValue> variableMap = new HashMap<>();
// true
variableMap.put("true", new Sys_Abstraction(new Sys_Variable[]{}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
return true;
}
}));
// false
variableMap.put("false", new Sys_Abstraction(new Sys_Variable[]{}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
return false;
}
}));
// null
variableMap.put("null", new Sys_Abstraction(new Sys_Variable[]{}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
return null;
}
}));
// +
variableMap.put("+", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("v1"),
new Sys_Variable("v2")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
double v1 = (double) ctx.getVariable("v1").value.getValue(ctx);
double v2 = (double) ctx.getVariable("v2").value.getValue(ctx);
return v1 + v2;
}
}));
// -
variableMap.put("-", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("v1"),
new Sys_Variable("v2")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
double v1 = (double) ctx.getVariable("v1").value.getValue(ctx);
double v2 = (double) ctx.getVariable("v2").value.getValue(ctx);
return v1 - v2;
}
}));
// *
variableMap.put("*", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("v1"),
new Sys_Variable("v2")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
double v1 = (double) ctx.getVariable("v1").value.getValue(ctx);
double v2 = (double) ctx.getVariable("v2").value.getValue(ctx);
return v1 * v2;
}
}));
// /
variableMap.put("/", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("v1"),
new Sys_Variable("v2")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
double v1 = (double) ctx.getVariable("v1").value.getValue(ctx);
double v2 = (double) ctx.getVariable("v2").value.getValue(ctx);
return v1 / v2;
}
}));
// >
variableMap.put(">", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("v1"),
new Sys_Variable("v2")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
double v1 = (double) ctx.getVariable("v1").value.getValue(ctx);
double v2 = (double) ctx.getVariable("v2").value.getValue(ctx);
return v1 > v2;
}
}));
// >
variableMap.put("<", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("v1"),
new Sys_Variable("v2")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
double v1 = (double) ctx.getVariable("v1").value.getValue(ctx);
double v2 = (double) ctx.getVariable("v2").value.getValue(ctx);
return v1 < v2;
}
}));
// &&
variableMap.put("&&", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("v1"),
new Sys_Variable("v2")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
boolean v1 = toBool(ctx.getVariable("v1").value.getValue(ctx));
if (!v1) return false;
return toBool(ctx.getVariable("v2").value.getValue(ctx));
}
}));
// ||
variableMap.put("||", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("v1"),
new Sys_Variable("v2")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
boolean v1 = toBool(ctx.getVariable("v1").value.getValue(ctx));
if (v1) return true;
return toBool(ctx.getVariable("v2").value.getValue(ctx));
}
}));
// std::if
variableMap.put("std::if", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("c"),
new Sys_Variable("p1"),
new Sys_Variable("p2")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
boolean c = toBool(ctx.getVariable("c").value.getValue(ctx));
if (c) {
return ctx.getVariable("p1").value.getValue(ctx);
} else {
return ctx.getVariable("p2").value.getValue(ctx);
}
}
}));
// std::error
variableMap.put("std::error", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("error")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
throw new Error((String) ctx.getVariable("error").value.getValue(ctx));
}
}));
// std::number
variableMap.put("std::number", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("num")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
String numStr = (String) ctx.getVariable("num").value.getValue(ctx);
return Double.parseDouble(numStr);
}
}));
// std::string
variableMap.put("std::string", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("str")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
return (String) ctx.getVariable("str").value.getValue(ctx);
}
}));
// std::object
variableMap.put("std::object", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("list")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
Object[] list = (Object[]) ctx.getVariable("list").value.getValue(ctx);
HashMap<String, Object> result = new <String, Object>HashMap();
int i = 0;
while (i < list.length - 1) {
String key = (String) list[i];
i++;
Object value = list[i];
result.put(key, value);
}
return result;
}
}));
// std::object
variableMap.put("std::statements", new Sys_Abstraction(new Sys_Variable[]{
new Sys_Variable("statements")
}, new ProgramTypes() {
@Override
public Object getValue(Context ctx) {
Object[] statements = (Object[]) ctx.getVariable("statements").value.getValue(ctx);
Object result = null;
for (int i = 0; i < statements.length; i++) {
result = statements[i];
}
return result;
}
}));
return variableMap;
}
public static HashMap<String, IValue> getSystemContextMap() {
return systemContextMap;
}
}
| |
package com.dayuan.bean;
import java.util.Date;
public class BusInsuranceBaseMaterial {
private Integer id;
private String companyName;
private String setupTime;
private String companyAddress;
private String employeeNumber;
private String companyLegal;
private String associationCompany;
private String otherHappening;
private String controllerName;
private String controllerGender;
private String controllerIdCard;
private String controllerPhone;
private String controllerAddress;
private String ifLegal;
private String spouseName;
private String spousePhone;
private String spouseIdCard;
private String familyAssets;
private String mainAssets;
private String liabilities;
private String mortgageOwner;
private String mortgageAddress;
private String mortgageArea;
private String propertyNumber;
private String evaluationPrice;
private String evaluationTotalPrice;
private String uName;
private String uId;
private Date createTime;
private Date updateTime;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getCompanyName() {
return companyName;
}
public void setCompanyName(String companyName) {
this.companyName = companyName;
}
public String getSetupTime() {
return setupTime;
}
public void setSetupTime(String setupTime) {
this.setupTime = setupTime;
}
public String getCompanyAddress() {
return companyAddress;
}
public void setCompanyAddress(String companyAddress) {
this.companyAddress = companyAddress;
}
public String getEmployeeNumber() {
return employeeNumber;
}
public void setEmployeeNumber(String employeeNumber) {
this.employeeNumber = employeeNumber;
}
public String getCompanyLegal() {
return companyLegal;
}
public void setCompanyLegal(String companyLegal) {
this.companyLegal = companyLegal;
}
public String getAssociationCompany() {
return associationCompany;
}
public void setAssociationCompany(String associationCompany) {
this.associationCompany = associationCompany;
}
public String getOtherHappening() {
return otherHappening;
}
public void setOtherHappening(String otherHappening) {
this.otherHappening = otherHappening;
}
public String getControllerName() {
return controllerName;
}
public void setControllerName(String controllerName) {
this.controllerName = controllerName;
}
public String getControllerGender() {
return controllerGender;
}
public void setControllerGender(String controllerGender) {
this.controllerGender = controllerGender;
}
public String getControllerIdCard() {
return controllerIdCard;
}
public void setControllerIdCard(String controllerIdCard) {
this.controllerIdCard = controllerIdCard;
}
public String getControllerPhone() {
return controllerPhone;
}
public void setControllerPhone(String controllerPhone) {
this.controllerPhone = controllerPhone;
}
public String getControllerAddress() {
return controllerAddress;
}
public void setControllerAddress(String controllerAddress) {
this.controllerAddress = controllerAddress;
}
public String getIfLegal() {
return ifLegal;
}
public void setIfLegal(String ifLegal) {
this.ifLegal = ifLegal;
}
public String getSpouseName() {
return spouseName;
}
public void setSpouseName(String spouseName) {
this.spouseName = spouseName;
}
public String getSpousePhone() {
return spousePhone;
}
public void setSpousePhone(String spousePhone) {
this.spousePhone = spousePhone;
}
public String getSpouseIdCard() {
return spouseIdCard;
}
public void setSpouseIdCard(String spouseIdCard) {
this.spouseIdCard = spouseIdCard;
}
public String getFamilyAssets() {
return familyAssets;
}
public void setFamilyAssets(String familyAssets) {
this.familyAssets = familyAssets;
}
public String getMainAssets() {
return mainAssets;
}
public void setMainAssets(String mainAssets) {
this.mainAssets = mainAssets;
}
public String getLiabilities() {
return liabilities;
}
public void setLiabilities(String liabilities) {
this.liabilities = liabilities;
}
public String getMortgageOwner() {
return mortgageOwner;
}
public void setMortgageOwner(String mortgageOwner) {
this.mortgageOwner = mortgageOwner;
}
public String getMortgageAddress() {
return mortgageAddress;
}
public void setMortgageAddress(String mortgageAddress) {
this.mortgageAddress = mortgageAddress;
}
public String getMortgageArea() {
return mortgageArea;
}
public void setMortgageArea(String mortgageArea) {
this.mortgageArea = mortgageArea;
}
public String getPropertyNumber() {
return propertyNumber;
}
public void setPropertyNumber(String propertyNumber) {
this.propertyNumber = propertyNumber;
}
public String getEvaluationPrice() {
return evaluationPrice;
}
public void setEvaluationPrice(String evaluationPrice) {
this.evaluationPrice = evaluationPrice;
}
public String getEvaluationTotalPrice() {
return evaluationTotalPrice;
}
public void setEvaluationTotalPrice(String evaluationTotalPrice) {
this.evaluationTotalPrice = evaluationTotalPrice;
}
public String getuName() {
return uName;
}
public void setuName(String uName) {
this.uName = uName;
}
public String getuId() {
return uId;
}
public void setuId(String uId) {
this.uId = uId;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.nio.ByteBuffer;
import java.util.*;
import com.google.common.collect.Sets;
import org.apache.cassandra.concurrent.Stage;
import org.apache.cassandra.concurrent.StageManager;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.db.lifecycle.SSTableSet;
import org.apache.cassandra.db.lifecycle.View;
import org.apache.cassandra.db.rows.*;
import org.apache.cassandra.db.partitions.*;
import org.apache.cassandra.db.filter.*;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.metrics.TableMetrics;
import org.apache.cassandra.thrift.ThriftResultsMerger;
import org.apache.cassandra.tracing.Tracing;
import org.apache.cassandra.utils.SearchIterator;
import org.apache.cassandra.utils.btree.BTreeSet;
import org.apache.cassandra.utils.memory.HeapAllocator;
/**
* General interface for storage-engine read queries.
*/
public class SinglePartitionNamesCommand extends SinglePartitionReadCommand<ClusteringIndexNamesFilter>
{
private int oldestUnrepairedDeletionTime = Integer.MAX_VALUE;
protected SinglePartitionNamesCommand(boolean isDigest,
int digestVersion,
boolean isForThrift,
CFMetaData metadata,
int nowInSec,
ColumnFilter columnFilter,
RowFilter rowFilter,
DataLimits limits,
DecoratedKey partitionKey,
ClusteringIndexNamesFilter clusteringIndexFilter)
{
super(isDigest, digestVersion, isForThrift, metadata, nowInSec, columnFilter, rowFilter, limits, partitionKey, clusteringIndexFilter);
}
public SinglePartitionNamesCommand(CFMetaData metadata,
int nowInSec,
ColumnFilter columnFilter,
RowFilter rowFilter,
DataLimits limits,
DecoratedKey partitionKey,
ClusteringIndexNamesFilter clusteringIndexFilter)
{
this(false, 0, false, metadata, nowInSec, columnFilter, rowFilter, limits, partitionKey, clusteringIndexFilter);
}
public SinglePartitionNamesCommand(CFMetaData metadata,
int nowInSec,
ColumnFilter columnFilter,
RowFilter rowFilter,
DataLimits limits,
ByteBuffer key,
ClusteringIndexNamesFilter clusteringIndexFilter)
{
this(metadata, nowInSec, columnFilter, rowFilter, limits, metadata.decorateKey(key), clusteringIndexFilter);
}
public SinglePartitionNamesCommand copy()
{
return new SinglePartitionNamesCommand(isDigestQuery(), digestVersion(), isForThrift(), metadata(), nowInSec(), columnFilter(), rowFilter(), limits(), partitionKey(), clusteringIndexFilter());
}
@Override
protected int oldestUnrepairedTombstone()
{
return oldestUnrepairedDeletionTime;
}
protected UnfilteredRowIterator queryMemtableAndDiskInternal(ColumnFamilyStore cfs, boolean copyOnHeap)
{
Tracing.trace("Acquiring sstable references");
ColumnFamilyStore.ViewFragment view = cfs.select(View.select(SSTableSet.LIVE, partitionKey()));
ImmutableBTreePartition result = null;
ClusteringIndexNamesFilter filter = clusteringIndexFilter();
Tracing.trace("Merging memtable contents");
for (Memtable memtable : view.memtables)
{
Partition partition = memtable.getPartition(partitionKey());
if (partition == null)
continue;
try (UnfilteredRowIterator iter = filter.getUnfilteredRowIterator(columnFilter(), partition))
{
if (iter.isEmpty())
continue;
UnfilteredRowIterator clonedFilter = copyOnHeap
? UnfilteredRowIterators.cloningIterator(iter, HeapAllocator.instance)
: iter;
result = add(isForThrift() ? ThriftResultsMerger.maybeWrap(clonedFilter, nowInSec()) : clonedFilter, result, false);
}
}
/* add the SSTables on disk */
Collections.sort(view.sstables, SSTableReader.maxTimestampComparator);
int sstablesIterated = 0;
// read sorted sstables
for (SSTableReader sstable : view.sstables)
{
// if we've already seen a partition tombstone with a timestamp greater
// than the most recent update to this sstable, we're done, since the rest of the sstables
// will also be older
if (result != null && sstable.getMaxTimestamp() < result.partitionLevelDeletion().markedForDeleteAt())
break;
long currentMaxTs = sstable.getMaxTimestamp();
filter = reduceFilter(filter, result, currentMaxTs);
if (filter == null)
break;
Tracing.trace("Merging data from sstable {}", sstable.descriptor.generation);
sstable.incrementReadCount();
try (UnfilteredRowIterator iter = filter.filter(sstable.iterator(partitionKey(), columnFilter(), filter.isReversed(), isForThrift())))
{
if (iter.isEmpty())
continue;
sstablesIterated++;
result = add(isForThrift() ? ThriftResultsMerger.maybeWrap(iter, nowInSec()) : iter, result, sstable.isRepaired());
}
}
cfs.metric.updateSSTableIterated(sstablesIterated);
if (result == null || result.isEmpty())
return EmptyIterators.unfilteredRow(metadata(), partitionKey(), false);
DecoratedKey key = result.partitionKey();
cfs.metric.samplers.get(TableMetrics.Sampler.READS).addSample(key.getKey(), key.hashCode(), 1);
// "hoist up" the requested data into a more recent sstable
if (sstablesIterated > cfs.getMinimumCompactionThreshold()
&& !cfs.isAutoCompactionDisabled()
&& cfs.getCompactionStrategyManager().shouldDefragment())
{
// !!WARNING!! if we stop copying our data to a heap-managed object,
// we will need to track the lifetime of this mutation as well
Tracing.trace("Defragmenting requested data");
try (UnfilteredRowIterator iter = result.unfilteredIterator(columnFilter(), Slices.ALL, false))
{
final Mutation mutation = new Mutation(PartitionUpdate.fromIterator(iter));
StageManager.getStage(Stage.MUTATION).execute(new Runnable()
{
public void run()
{
// skipping commitlog and index updates is fine since we're just de-fragmenting existing data
Keyspace.open(mutation.getKeyspaceName()).apply(mutation, false, false);
}
});
}
}
return withStateTracking(result.unfilteredIterator(columnFilter(), Slices.ALL, clusteringIndexFilter().isReversed()));
}
private ImmutableBTreePartition add(UnfilteredRowIterator iter, ImmutableBTreePartition result, boolean isRepaired)
{
if (!isRepaired)
oldestUnrepairedDeletionTime = Math.min(oldestUnrepairedDeletionTime, iter.stats().minLocalDeletionTime);
int maxRows = Math.max(clusteringIndexFilter().requestedRows().size(), 1);
if (result == null)
return ImmutableBTreePartition.create(iter, maxRows);
try (UnfilteredRowIterator merged = UnfilteredRowIterators.merge(Arrays.asList(iter, result.unfilteredIterator(columnFilter(), Slices.ALL, clusteringIndexFilter().isReversed())), nowInSec()))
{
return ImmutableBTreePartition.create(merged, maxRows);
}
}
private ClusteringIndexNamesFilter reduceFilter(ClusteringIndexNamesFilter filter, Partition result, long sstableTimestamp)
{
if (result == null)
return filter;
SearchIterator<Clustering, Row> searchIter = result.searchIterator(columnFilter(), false);
PartitionColumns columns = columnFilter().fetchedColumns();
NavigableSet<Clustering> clusterings = filter.requestedRows();
// We want to remove rows for which we have values for all requested columns. We have to deal with both static and regular rows.
// TODO: we could also remove a selected column if we've found values for every requested row but we'll leave
// that for later.
boolean removeStatic = false;
if (!columns.statics.isEmpty())
{
Row staticRow = searchIter.next(Clustering.STATIC_CLUSTERING);
removeStatic = staticRow != null && canRemoveRow(staticRow, columns.statics, sstableTimestamp);
}
NavigableSet<Clustering> toRemove = null;
for (Clustering clustering : clusterings)
{
if (!searchIter.hasNext())
break;
Row row = searchIter.next(clustering);
if (row == null || !canRemoveRow(row, columns.regulars, sstableTimestamp))
continue;
if (toRemove == null)
toRemove = new TreeSet<>(result.metadata().comparator);
toRemove.add(clustering);
}
if (!removeStatic && toRemove == null)
return filter;
// Check if we have everything we need
boolean hasNoMoreStatic = columns.statics.isEmpty() || removeStatic;
boolean hasNoMoreClusterings = clusterings.isEmpty() || (toRemove != null && toRemove.size() == clusterings.size());
if (hasNoMoreStatic && hasNoMoreClusterings)
return null;
if (toRemove != null)
{
BTreeSet.Builder<Clustering> newClusterings = BTreeSet.builder(result.metadata().comparator);
newClusterings.addAll(Sets.difference(clusterings, toRemove));
clusterings = newClusterings.build();
}
return new ClusteringIndexNamesFilter(clusterings, filter.isReversed());
}
private boolean canRemoveRow(Row row, Columns requestedColumns, long sstableTimestamp)
{
// We can remove a row if it has data that is more recent that the next sstable to consider for the data that the query
// cares about. And the data we care about is 1) the row timestamp (since every query cares if the row exists or not)
// and 2) the requested columns.
if (row.primaryKeyLivenessInfo().isEmpty() || row.primaryKeyLivenessInfo().timestamp() <= sstableTimestamp)
return false;
for (ColumnDefinition column : requestedColumns)
{
// We can never be sure we have all of a collection, so never remove rows in that case.
if (column.type.isCollection())
return false;
Cell cell = row.getCell(column);
if (cell == null || cell.timestamp() <= sstableTimestamp)
return false;
}
return true;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.spark.execution;
import com.facebook.airlift.concurrent.SetThreadName;
import com.facebook.presto.event.SplitMonitor;
import com.facebook.presto.execution.Lifespan;
import com.facebook.presto.execution.ScheduledSplit;
import com.facebook.presto.execution.SplitRunner;
import com.facebook.presto.execution.TaskId;
import com.facebook.presto.execution.TaskSource;
import com.facebook.presto.execution.TaskStateMachine;
import com.facebook.presto.execution.executor.TaskExecutor;
import com.facebook.presto.execution.executor.TaskHandle;
import com.facebook.presto.operator.Driver;
import com.facebook.presto.operator.DriverContext;
import com.facebook.presto.operator.DriverFactory;
import com.facebook.presto.operator.DriverStats;
import com.facebook.presto.operator.PipelineContext;
import com.facebook.presto.operator.TaskContext;
import com.facebook.presto.spi.plan.PlanNodeId;
import com.facebook.presto.sql.planner.LocalExecutionPlanner.LocalExecutionPlan;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ListMultimap;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import io.airlift.units.Duration;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static com.facebook.presto.SystemSessionProperties.getInitialSplitsPerNode;
import static com.facebook.presto.SystemSessionProperties.getMaxDriversPerTask;
import static com.facebook.presto.SystemSessionProperties.getSplitConcurrencyAdjustmentInterval;
import static com.facebook.presto.operator.PipelineExecutionStrategy.UNGROUPED_EXECUTION;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* The PrestoSparkTaskExecution is a simplified version of SqlTaskExecution.
* It doesn't support grouped execution that is not needed on Presto on Spark.
* Unlike the SqlTaskExecution the PrestoSparkTaskExecution does not require
* the output buffer to be drained to mark the task as finished. As long as
* all driver as finished the task execution is marked as finished. That allows to
* have more control over the output Iterator lifecycle in the PrestoSparkTaskExecutor
*/
public class PrestoSparkTaskExecution
{
private final TaskId taskId;
private final TaskStateMachine taskStateMachine;
private final TaskContext taskContext;
private final TaskHandle taskHandle;
private final TaskExecutor taskExecutor;
private final Executor notificationExecutor;
private final SplitMonitor splitMonitor;
private final List<PlanNodeId> schedulingOrder;
private final Map<PlanNodeId, DriverSplitRunnerFactory> driverRunnerFactoriesWithSplitLifeCycle;
private final List<DriverSplitRunnerFactory> driverRunnerFactoriesWithTaskLifeCycle;
/**
* Number of drivers that have been sent to the TaskExecutor that have not finished.
*/
private final AtomicInteger remainingDrivers = new AtomicInteger();
private final AtomicBoolean started = new AtomicBoolean();
public PrestoSparkTaskExecution(
TaskStateMachine taskStateMachine,
TaskContext taskContext,
LocalExecutionPlan localExecutionPlan,
TaskExecutor taskExecutor,
SplitMonitor splitMonitor,
Executor notificationExecutor,
ScheduledExecutorService memoryUpdateExecutor)
{
this.taskStateMachine = requireNonNull(taskStateMachine, "taskStateMachine is null");
this.taskId = taskStateMachine.getTaskId();
this.taskContext = requireNonNull(taskContext, "taskContext is null");
this.taskExecutor = requireNonNull(taskExecutor, "driverExecutor is null");
this.notificationExecutor = requireNonNull(notificationExecutor, "notificationExecutor is null");
this.splitMonitor = requireNonNull(splitMonitor, "splitMonitor is null");
// index driver factories
schedulingOrder = localExecutionPlan.getTableScanSourceOrder();
Set<PlanNodeId> tableScanSources = ImmutableSet.copyOf(schedulingOrder);
ImmutableMap.Builder<PlanNodeId, DriverSplitRunnerFactory> driverRunnerFactoriesWithSplitLifeCycle = ImmutableMap.builder();
ImmutableList.Builder<DriverSplitRunnerFactory> driverRunnerFactoriesWithTaskLifeCycle = ImmutableList.builder();
for (DriverFactory driverFactory : localExecutionPlan.getDriverFactories()) {
Optional<PlanNodeId> sourceId = driverFactory.getSourceId();
if (sourceId.isPresent() && tableScanSources.contains(sourceId.get())) {
driverRunnerFactoriesWithSplitLifeCycle.put(sourceId.get(), new DriverSplitRunnerFactory(driverFactory, true));
}
else {
checkArgument(
driverFactory.getPipelineExecutionStrategy() == UNGROUPED_EXECUTION,
"unexpected pipeline execution strategy: %s",
driverFactory.getPipelineExecutionStrategy());
driverRunnerFactoriesWithTaskLifeCycle.add(new DriverSplitRunnerFactory(driverFactory, false));
}
}
this.driverRunnerFactoriesWithSplitLifeCycle = driverRunnerFactoriesWithSplitLifeCycle.build();
this.driverRunnerFactoriesWithTaskLifeCycle = driverRunnerFactoriesWithTaskLifeCycle.build();
checkArgument(this.driverRunnerFactoriesWithSplitLifeCycle.keySet().equals(tableScanSources),
"Fragment is partitioned, but not all partitioned drivers were found");
taskHandle = createTaskHandle(taskStateMachine, taskContext, localExecutionPlan, taskExecutor);
requireNonNull(memoryUpdateExecutor, "memoryUpdateExecutor is null");
memoryUpdateExecutor.schedule(taskContext::updatePeakMemory, 1, SECONDS);
}
// this is a separate method to ensure that the `this` reference is not leaked during construction
private static TaskHandle createTaskHandle(
TaskStateMachine taskStateMachine,
TaskContext taskContext,
LocalExecutionPlan localExecutionPlan,
TaskExecutor taskExecutor)
{
TaskHandle taskHandle = taskExecutor.addTask(
taskStateMachine.getTaskId(),
() -> 0,
getInitialSplitsPerNode(taskContext.getSession()),
getSplitConcurrencyAdjustmentInterval(taskContext.getSession()),
getMaxDriversPerTask(taskContext.getSession()));
taskStateMachine.addStateChangeListener(state -> {
if (state.isDone()) {
taskExecutor.removeTask(taskHandle);
for (DriverFactory factory : localExecutionPlan.getDriverFactories()) {
factory.noMoreDrivers();
}
}
});
return taskHandle;
}
public void start(List<TaskSource> sources)
{
requireNonNull(sources, "sources is null");
checkState(started.compareAndSet(false, true), "already started");
scheduleDriversForTaskLifeCycle();
scheduleDriversForSplitLifeCycle(sources);
checkTaskCompletion();
}
private void scheduleDriversForTaskLifeCycle()
{
List<DriverSplitRunner> runners = new ArrayList<>();
for (DriverSplitRunnerFactory driverRunnerFactory : driverRunnerFactoriesWithTaskLifeCycle) {
for (int i = 0; i < driverRunnerFactory.getDriverInstances().orElse(1); i++) {
runners.add(driverRunnerFactory.createDriverRunner(null));
}
}
enqueueDriverSplitRunner(true, runners);
for (DriverSplitRunnerFactory driverRunnerFactory : driverRunnerFactoriesWithTaskLifeCycle) {
driverRunnerFactory.noMoreDriverRunner();
verify(driverRunnerFactory.isNoMoreDriverRunner());
}
}
private synchronized void scheduleDriversForSplitLifeCycle(List<TaskSource> sources)
{
checkArgument(sources.stream().allMatch(TaskSource::isNoMoreSplits), "All task sources are expected to be final");
ListMultimap<PlanNodeId, ScheduledSplit> splits = ArrayListMultimap.create();
for (TaskSource taskSource : sources) {
splits.putAll(taskSource.getPlanNodeId(), taskSource.getSplits());
}
for (PlanNodeId planNodeId : schedulingOrder) {
DriverSplitRunnerFactory driverSplitRunnerFactory = driverRunnerFactoriesWithSplitLifeCycle.get(planNodeId);
List<ScheduledSplit> planNodeSplits = splits.get(planNodeId);
scheduleTableScanSource(driverSplitRunnerFactory, planNodeSplits);
}
}
private synchronized void scheduleTableScanSource(DriverSplitRunnerFactory factory, List<ScheduledSplit> splits)
{
factory.splitsAdded(splits.size());
// Enqueue driver runners with split lifecycle for this plan node and driver life cycle combination.
ImmutableList.Builder<DriverSplitRunner> runners = ImmutableList.builder();
for (ScheduledSplit scheduledSplit : splits) {
// create a new driver for the split
runners.add(factory.createDriverRunner(scheduledSplit));
}
enqueueDriverSplitRunner(false, runners.build());
factory.noMoreDriverRunner();
}
private synchronized void enqueueDriverSplitRunner(boolean forceRunSplit, List<DriverSplitRunner> runners)
{
// schedule driver to be executed
List<ListenableFuture<?>> finishedFutures = taskExecutor.enqueueSplits(taskHandle, forceRunSplit, runners);
checkState(finishedFutures.size() == runners.size(), "Expected %s futures but got %s", runners.size(), finishedFutures.size());
// when driver completes, update state and fire events
for (int i = 0; i < finishedFutures.size(); i++) {
ListenableFuture<?> finishedFuture = finishedFutures.get(i);
final DriverSplitRunner splitRunner = runners.get(i);
// record new driver
remainingDrivers.incrementAndGet();
Futures.addCallback(finishedFuture, new FutureCallback<Object>()
{
@Override
public void onSuccess(Object result)
{
try (SetThreadName ignored = new SetThreadName("Task-%s", taskId)) {
// record driver is finished
remainingDrivers.decrementAndGet();
checkTaskCompletion();
splitMonitor.splitCompletedEvent(taskId, getDriverStats());
}
}
@Override
public void onFailure(Throwable cause)
{
try (SetThreadName ignored = new SetThreadName("Task-%s", taskId)) {
taskStateMachine.failed(cause);
// record driver is finished
remainingDrivers.decrementAndGet();
// fire failed event with cause
splitMonitor.splitFailedEvent(taskId, getDriverStats(), cause);
}
}
private DriverStats getDriverStats()
{
DriverContext driverContext = splitRunner.getDriverContext();
DriverStats driverStats;
if (driverContext != null) {
driverStats = driverContext.getDriverStats();
}
else {
// split runner did not start successfully
driverStats = new DriverStats();
}
return driverStats;
}
}, notificationExecutor);
}
}
private synchronized void checkTaskCompletion()
{
if (taskStateMachine.getState().isDone()) {
return;
}
// are there more partition splits expected?
for (DriverSplitRunnerFactory driverSplitRunnerFactory : driverRunnerFactoriesWithSplitLifeCycle.values()) {
if (!driverSplitRunnerFactory.isNoMoreDriverRunner()) {
return;
}
}
// do we still have running tasks?
if (remainingDrivers.get() != 0) {
return;
}
// Cool! All done!
taskStateMachine.finished();
}
@Override
public String toString()
{
return toStringHelper(this)
.add("taskId", taskId)
.add("remainingDrivers", remainingDrivers.get())
.toString();
}
private class DriverSplitRunnerFactory
{
private final DriverFactory driverFactory;
private final PipelineContext pipelineContext;
private final AtomicInteger pendingCreation = new AtomicInteger();
private final AtomicBoolean noMoreDriverRunner = new AtomicBoolean();
private final AtomicBoolean closed = new AtomicBoolean();
private DriverSplitRunnerFactory(DriverFactory driverFactory, boolean partitioned)
{
this.driverFactory = requireNonNull(driverFactory, "driverFactory is null");
this.pipelineContext = taskContext.addPipelineContext(driverFactory.getPipelineId(), driverFactory.isInputDriver(), driverFactory.isOutputDriver(), partitioned);
}
public DriverSplitRunner createDriverRunner(@Nullable ScheduledSplit partitionedSplit)
{
checkState(!noMoreDriverRunner.get(), "Cannot create driver for pipeline: %s", pipelineContext.getPipelineId());
pendingCreation.incrementAndGet();
// create driver context immediately so the driver existence is recorded in the stats
// the number of drivers is used to balance work across nodes
DriverContext driverContext = pipelineContext.addDriverContext(Lifespan.taskWide(), driverFactory.getFragmentResultCacheContext());
return new DriverSplitRunner(this, driverContext, partitionedSplit);
}
public Driver createDriver(DriverContext driverContext, @Nullable ScheduledSplit partitionedSplit)
{
Driver driver = driverFactory.createDriver(driverContext);
if (partitionedSplit != null) {
// TableScanOperator requires partitioned split to be added before the first call to process
driver.updateSource(new TaskSource(partitionedSplit.getPlanNodeId(), ImmutableSet.of(partitionedSplit), true));
}
verify(pendingCreation.get() > 0, "pendingCreation is expected to be greater than zero");
pendingCreation.decrementAndGet();
closeDriverFactoryIfFullyCreated();
return driver;
}
public void noMoreDriverRunner()
{
if (noMoreDriverRunner.get()) {
return;
}
noMoreDriverRunner.set(true);
closeDriverFactoryIfFullyCreated();
}
public boolean isNoMoreDriverRunner()
{
return noMoreDriverRunner.get();
}
public void closeDriverFactoryIfFullyCreated()
{
if (closed.get()) {
return;
}
if (isNoMoreDriverRunner() && pendingCreation.get() == 0) {
// ensure noMoreDrivers is called only once
if (!closed.compareAndSet(false, true)) {
return;
}
driverFactory.noMoreDrivers(Lifespan.taskWide());
driverFactory.noMoreDrivers();
}
}
public OptionalInt getDriverInstances()
{
return driverFactory.getDriverInstances();
}
public void splitsAdded(int count)
{
pipelineContext.splitsAdded(count);
}
}
private static class DriverSplitRunner
implements SplitRunner
{
private final DriverSplitRunnerFactory driverSplitRunnerFactory;
private final DriverContext driverContext;
@GuardedBy("this")
private boolean closed;
@Nullable
private final ScheduledSplit partitionedSplit;
@GuardedBy("this")
private Driver driver;
private DriverSplitRunner(DriverSplitRunnerFactory driverSplitRunnerFactory, DriverContext driverContext, @Nullable ScheduledSplit partitionedSplit)
{
this.driverSplitRunnerFactory = requireNonNull(driverSplitRunnerFactory, "driverFactory is null");
this.driverContext = requireNonNull(driverContext, "driverContext is null");
this.partitionedSplit = partitionedSplit;
}
public synchronized DriverContext getDriverContext()
{
if (driver == null) {
return null;
}
return driver.getDriverContext();
}
@Override
public synchronized boolean isFinished()
{
if (closed) {
return true;
}
return driver != null && driver.isFinished();
}
@Override
public ListenableFuture<?> processFor(Duration duration)
{
Driver driver;
synchronized (this) {
// if close() was called before we get here, there's not point in even creating the driver
if (closed) {
return Futures.immediateFuture(null);
}
if (this.driver == null) {
this.driver = driverSplitRunnerFactory.createDriver(driverContext, partitionedSplit);
}
driver = this.driver;
}
return driver.processFor(duration);
}
@Override
public String getInfo()
{
return (partitionedSplit == null) ? "" : partitionedSplit.getSplit().getInfo().toString();
}
@Override
public void close()
{
Driver driver;
synchronized (this) {
closed = true;
driver = this.driver;
}
if (driver != null) {
driver.close();
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.tez.dag.app;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.ipc.ProtocolSignature;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.service.AbstractService;
import org.apache.tez.dag.api.TezException;
import org.apache.tez.dag.api.TezUncheckedException;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.tez.common.ContainerContext;
import org.apache.tez.common.ContainerTask;
import org.apache.tez.common.TezConverterUtils;
import org.apache.tez.common.TezLocalResource;
import org.apache.tez.common.TezTaskUmbilicalProtocol;
import org.apache.tez.dag.api.TezConfiguration;
import org.apache.tez.dag.app.dag.DAG;
import org.apache.tez.dag.app.dag.Task;
import org.apache.tez.dag.app.dag.event.TaskAttemptEventStartedRemotely;
import org.apache.tez.dag.app.dag.event.VertexEventRouteEvent;
import org.apache.tez.dag.app.rm.container.AMContainerImpl;
import org.apache.tez.dag.app.rm.container.AMContainerTask;
import org.apache.tez.dag.app.security.authorize.TezAMPolicyProvider;
import org.apache.tez.dag.records.TezTaskAttemptID;
import org.apache.tez.dag.records.TezVertexID;
import org.apache.tez.runtime.api.impl.TezEvent;
import org.apache.tez.runtime.api.impl.TezHeartbeatRequest;
import org.apache.tez.runtime.api.impl.TezHeartbeatResponse;
import org.apache.tez.common.security.JobTokenSecretManager;
import com.google.common.collect.Maps;
@SuppressWarnings("unchecked")
public class TaskAttemptListenerImpTezDag extends AbstractService implements
TezTaskUmbilicalProtocol, TaskAttemptListener {
private static final ContainerTask TASK_FOR_INVALID_JVM = new ContainerTask(
null, true, null, null, false);
private static final Log LOG = LogFactory
.getLog(TaskAttemptListenerImpTezDag.class);
private final AppContext context;
protected final TaskHeartbeatHandler taskHeartbeatHandler;
protected final ContainerHeartbeatHandler containerHeartbeatHandler;
private final JobTokenSecretManager jobTokenSecretManager;
private InetSocketAddress address;
private Server server;
class ContainerInfo {
ContainerInfo(ContainerId containerId) {
this.containerId = containerId;
this.lastReponse = null;
this.lastRequestId = 0;
this.currentAttemptId = null;
}
ContainerId containerId;
long lastRequestId;
TezHeartbeatResponse lastReponse;
TezTaskAttemptID currentAttemptId;
}
private ConcurrentMap<TezTaskAttemptID, ContainerId> attemptToInfoMap =
new ConcurrentHashMap<TezTaskAttemptID, ContainerId>();
private ConcurrentHashMap<ContainerId, ContainerInfo> registeredContainers =
new ConcurrentHashMap<ContainerId, ContainerInfo>();
public TaskAttemptListenerImpTezDag(AppContext context,
TaskHeartbeatHandler thh, ContainerHeartbeatHandler chh,
JobTokenSecretManager jobTokenSecretManager) {
super(TaskAttemptListenerImpTezDag.class.getName());
this.context = context;
this.jobTokenSecretManager = jobTokenSecretManager;
this.taskHeartbeatHandler = thh;
this.containerHeartbeatHandler = chh;
}
@Override
public void serviceStart() {
startRpcServer();
}
protected void startRpcServer() {
Configuration conf = getConfig();
try {
server = new RPC.Builder(conf)
.setProtocol(TezTaskUmbilicalProtocol.class)
.setBindAddress("0.0.0.0")
.setPort(0)
.setInstance(this)
.setNumHandlers(
conf.getInt(TezConfiguration.TEZ_AM_TASK_LISTENER_THREAD_COUNT,
TezConfiguration.TEZ_AM_TASK_LISTENER_THREAD_COUNT_DEFAULT))
.setSecretManager(jobTokenSecretManager).build();
// Enable service authorization?
if (conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
refreshServiceAcls(conf, new TezAMPolicyProvider());
}
server.start();
this.address = NetUtils.getConnectAddress(server);
} catch (IOException e) {
throw new TezUncheckedException(e);
}
}
void refreshServiceAcls(Configuration configuration,
PolicyProvider policyProvider) {
this.server.refreshServiceAcl(configuration, policyProvider);
}
@Override
public void serviceStop() {
stopRpcServer();
}
protected void stopRpcServer() {
if (server != null) {
server.stop();
}
}
public InetSocketAddress getAddress() {
return address;
}
@Override
public long getProtocolVersion(String protocol, long clientVersion)
throws IOException {
return versionID;
}
@Override
public ProtocolSignature getProtocolSignature(String protocol,
long clientVersion, int clientMethodsHash) throws IOException {
return ProtocolSignature.getProtocolSignature(this, protocol,
clientVersion, clientMethodsHash);
}
@Override
public ContainerTask getTask(ContainerContext containerContext)
throws IOException {
ContainerTask task = null;
if (containerContext == null || containerContext.getContainerIdentifier() == null) {
LOG.info("Invalid task request with an empty containerContext or containerId");
task = TASK_FOR_INVALID_JVM;
} else {
ContainerId containerId = ConverterUtils.toContainerId(containerContext
.getContainerIdentifier());
if (LOG.isDebugEnabled()) {
LOG.debug("Container with id: " + containerId + " asked for a task");
}
if (!registeredContainers.containsKey(containerId)) {
if(context.getAllContainers().get(containerId) == null) {
LOG.info("Container with id: " + containerId
+ " is invalid and will be killed");
} else {
LOG.info("Container with id: " + containerId
+ " is valid, but no longer registered, and will be killed");
}
task = TASK_FOR_INVALID_JVM;
} else {
pingContainerHeartbeatHandler(containerId);
AMContainerTask taskContext = pullTaskAttemptContext(containerId);
if (taskContext.shouldDie()) {
LOG.info("No more tasks for container with id : " + containerId
+ ". Asking it to die");
task = TASK_FOR_INVALID_JVM; // i.e. ask the child to die.
} else {
if (taskContext.getTask() == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("No task currently assigned to Container with id: "
+ containerId);
}
} else {
registerTaskAttempt(taskContext.getTask().getTaskAttemptID(),
containerId);
task = new ContainerTask(taskContext.getTask(), false,
convertLocalResourceMap(taskContext.getAdditionalResources()),
taskContext.getCredentials(), taskContext.haveCredentialsChanged());
context.getEventHandler().handle(
new TaskAttemptEventStartedRemotely(taskContext.getTask()
.getTaskAttemptID(), containerId, context
.getApplicationACLs()));
LOG.info("Container with id: " + containerId + " given task: "
+ taskContext.getTask().getTaskAttemptID());
}
}
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("getTask returning task: " + task);
}
return task;
}
/*
@Override
public boolean statusUpdate(TezTaskAttemptID taskAttemptId,
TezTaskStatus taskStatus) throws IOException, InterruptedException {
if (LOG.isDebugEnabled()) {
LOG.debug("Status update from: " + taskAttemptId);
}
taskHeartbeatHandler.progressing(taskAttemptId);
pingContainerHeartbeatHandler(taskAttemptId);
TaskAttemptStatusOld taskAttemptStatus = new TaskAttemptStatusOld();
taskAttemptStatus.id = taskAttemptId;
// Task sends the updated progress to the TT.
taskAttemptStatus.progress = taskStatus.getProgress();
if (LOG.isDebugEnabled()) {
LOG.debug("Progress of TaskAttempt " + taskAttemptId + " is : "
+ taskStatus.getProgress());
}
// Task sends the updated state-string to the TT.
taskAttemptStatus.stateString = taskStatus.getStateString();
// Set the output-size when map-task finishes. Set by the task itself.
// outputSize is never used.
taskAttemptStatus.outputSize = taskStatus.getLocalOutputSize();
// TODO Phase
// Task sends the updated phase to the TT.
//taskAttemptStatus.phase = MRxTypeConverters.toYarn(taskStatus.getPhase());
// TODO MRXAM3 - AVoid the 10 layers of convresion.
// Counters are updated by the task. Convert counters into new format as
// that is the primary storage format inside the AM to avoid multiple
// conversions and unnecessary heap usage.
taskAttemptStatus.counters = taskStatus.getCounters();
// Map Finish time set by the task (map only)
// TODO CLEANMRXAM - maybe differentiate between map / reduce / types
if (taskStatus.getMapFinishTime() != 0) {
taskAttemptStatus.mapFinishTime = taskStatus.getMapFinishTime();
}
// Shuffle Finish time set by the task (reduce only).
if (taskStatus.getShuffleFinishTime() != 0) {
taskAttemptStatus.shuffleFinishTime = taskStatus.getShuffleFinishTime();
}
// Sort finish time set by the task (reduce only).
if (taskStatus.getSortFinishTime() != 0) {
taskAttemptStatus.sortFinishTime = taskStatus.getSortFinishTime();
}
// Not Setting the task state. Used by speculation - will be set in
// TaskAttemptImpl
// taskAttemptStatus.taskState =
// TypeConverter.toYarn(taskStatus.getRunState());
// set the fetch failures
if (taskStatus.getFailedDependencies() != null
&& taskStatus.getFailedDependencies().size() > 0) {
LOG.warn("Failed dependencies are not handled at the moment." +
" The job is likely to fail / hang");
taskAttemptStatus.fetchFailedMaps = new ArrayList<TezTaskAttemptID>();
for (TezTaskAttemptID failedAttemptId : taskStatus
.getFailedDependencies()) {
taskAttemptStatus.fetchFailedMaps.add(failedAttemptId);
}
}
// Task sends the information about the nextRecordRange to the TT
// TODO: The following are not needed here, but needed to be set somewhere
// inside AppMaster.
// taskStatus.getRunState(); // Set by the TT/JT. Transform into a state
// TODO
// taskStatus.getStartTime(); // Used to be set by the TaskTracker. This
// should be set by getTask().
// taskStatus.getFinishTime(); // Used to be set by TT/JT. Should be set
// when task finishes
// // This was used by TT to do counter updates only once every minute. So
// this
// // isn't ever changed by the Task itself.
// taskStatus.getIncludeCounters();
context.getEventHandler().handle(
new TaskAttemptEventStatusUpdate(taskAttemptStatus.id,
taskAttemptStatus));
return true;
}
*/
/**
* Child checking whether it can commit.
*
* <br/>
* Repeatedly polls the ApplicationMaster whether it
* {@link Task#canCommit(TezTaskAttemptID)} This is * a legacy from the
* centralized commit protocol handling by the JobTracker.
*/
@Override
public boolean canCommit(TezTaskAttemptID taskAttemptId) throws IOException {
LOG.info("Commit go/no-go request from " + taskAttemptId.toString());
// An attempt is asking if it can commit its output. This can be decided
// only by the task which is managing the multiple attempts. So redirect the
// request there.
taskHeartbeatHandler.progressing(taskAttemptId);
pingContainerHeartbeatHandler(taskAttemptId);
DAG job = context.getCurrentDAG();
Task task =
job.getVertex(taskAttemptId.getTaskID().getVertexID()).
getTask(taskAttemptId.getTaskID());
return task.canCommit(taskAttemptId);
}
@Override
public void unregisterTaskAttempt(TezTaskAttemptID attemptId) {
ContainerId containerId = attemptToInfoMap.get(attemptId);
if(containerId == null) {
LOG.warn("Unregister task attempt: " + attemptId + " from unknown container");
return;
}
ContainerInfo containerInfo = registeredContainers.get(containerId);
if(containerInfo == null) {
LOG.warn("Unregister task attempt: " + attemptId +
" from non-registered container: " + containerId);
return;
}
synchronized (containerInfo) {
containerInfo.currentAttemptId = null;
attemptToInfoMap.remove(attemptId);
}
}
public AMContainerTask pullTaskAttemptContext(ContainerId containerId) {
AMContainerImpl container = (AMContainerImpl) context.getAllContainers()
.get(containerId);
return container.pullTaskContext();
}
@Override
public void registerRunningContainer(ContainerId containerId) {
if (LOG.isDebugEnabled()) {
LOG.debug("ContainerId: " + containerId
+ " registered with TaskAttemptListener");
}
ContainerInfo oldInfo = registeredContainers.put(containerId,
new ContainerInfo(containerId));
if(oldInfo != null) {
throw new TezUncheckedException(
"Multiple registrations for containerId: " + containerId);
}
}
@Override
public void registerTaskAttempt(TezTaskAttemptID attemptId,
ContainerId containerId) {
ContainerInfo containerInfo = registeredContainers.get(containerId);
if(containerInfo == null) {
throw new TezUncheckedException("Registering task attempt: "
+ attemptId + " to unknown container: " + containerId);
}
synchronized (containerInfo) {
if(containerInfo.currentAttemptId != null) {
throw new TezUncheckedException("Registering task attempt: "
+ attemptId + " to container: " + containerId
+ " with existing assignment to: " + containerInfo.currentAttemptId);
}
containerInfo.currentAttemptId = attemptId;
ContainerId containerIdFromMap = attemptToInfoMap.put(attemptId, containerId);
if(containerIdFromMap != null) {
throw new TezUncheckedException("Registering task attempt: "
+ attemptId + " to container: " + containerId
+ " when already assigned to: " + containerIdFromMap);
}
}
}
@Override
public void unregisterRunningContainer(ContainerId containerId) {
if (LOG.isDebugEnabled()) {
LOG.debug("Unregistering Container from TaskAttemptListener: "
+ containerId);
}
registeredContainers.remove(containerId);
}
private void pingContainerHeartbeatHandler(ContainerId containerId) {
containerHeartbeatHandler.pinged(containerId);
}
private void pingContainerHeartbeatHandler(TezTaskAttemptID taskAttemptId) {
ContainerId containerId = attemptToInfoMap.get(taskAttemptId);
if (containerId != null) {
containerHeartbeatHandler.pinged(containerId);
} else {
LOG.warn("Handling communication from attempt: " + taskAttemptId
+ ", ContainerId not known for this attempt");
}
}
@Override
public TezHeartbeatResponse heartbeat(TezHeartbeatRequest request)
throws IOException, TezException {
ContainerId containerId = ConverterUtils.toContainerId(request
.getContainerIdentifier());
long requestId = request.getRequestId();
if (LOG.isDebugEnabled()) {
LOG.debug("Received heartbeat from container"
+ ", request=" + request);
}
ContainerInfo containerInfo = registeredContainers.get(containerId);
if(containerInfo == null) {
TezHeartbeatResponse response = new TezHeartbeatResponse();
response.setLastRequestId(requestId);
response.setShouldDie();
return response;
}
synchronized (containerInfo) {
pingContainerHeartbeatHandler(containerId);
if(containerInfo.lastRequestId == requestId) {
LOG.warn("Old sequenceId received: " + requestId
+ ", Re-sending last response to client");
return containerInfo.lastReponse;
}
TezHeartbeatResponse response = new TezHeartbeatResponse();
response.setLastRequestId(requestId);
TezTaskAttemptID taskAttemptID = request.getCurrentTaskAttemptID();
if (taskAttemptID != null) {
ContainerId containerIdFromMap = attemptToInfoMap.get(taskAttemptID);
if(containerIdFromMap == null || !containerIdFromMap.equals(containerId)) {
throw new TezException("Attempt " + taskAttemptID
+ " is not recognized for heartbeat");
}
if(containerInfo.lastRequestId+1 != requestId) {
throw new TezException("Container " + containerId
+ " has invalid request id. Expected: "
+ containerInfo.lastRequestId+1
+ " and actual: " + requestId);
}
List<TezEvent> inEvents = request.getEvents();
if (LOG.isDebugEnabled()) {
LOG.debug("Ping from " + taskAttemptID.toString() +
" events: " + (inEvents != null? inEvents.size() : -1));
}
if(inEvents!=null && !inEvents.isEmpty()) {
TezVertexID vertexId = taskAttemptID.getTaskID().getVertexID();
context.getEventHandler().handle(
new VertexEventRouteEvent(vertexId, inEvents));
}
taskHeartbeatHandler.pinged(taskAttemptID);
List<TezEvent> outEvents = context
.getCurrentDAG()
.getVertex(taskAttemptID.getTaskID().getVertexID())
.getTask(taskAttemptID.getTaskID())
.getTaskAttemptTezEvents(taskAttemptID, request.getStartIndex(),
request.getMaxEvents());
response.setEvents(outEvents);
}
containerInfo.lastRequestId = requestId;
containerInfo.lastReponse = response;
return response;
}
}
private Map<String, TezLocalResource> convertLocalResourceMap(Map<String, LocalResource> ylrs)
throws IOException {
Map<String, TezLocalResource> tlrs = Maps.newHashMap();
if (ylrs != null) {
for (Entry<String, LocalResource> ylrEntry : ylrs.entrySet()) {
TezLocalResource tlr;
try {
tlr = TezConverterUtils.convertYarnLocalResourceToTez(ylrEntry.getValue());
} catch (URISyntaxException e) {
throw new IOException(e);
}
tlrs.put(ylrEntry.getKey(), tlr);
}
}
return tlrs;
}
}
| |
package org.spongycastle.math.ec;
import java.math.BigInteger;
public abstract class WNafUtil
{
public static final String PRECOMP_NAME = "bc_wnaf";
private static final int[] DEFAULT_WINDOW_SIZE_CUTOFFS = new int[]{ 13, 41, 121, 337, 897, 2305 };
private static final byte[] EMPTY_BYTES = new byte[0];
private static final int[] EMPTY_INTS = new int[0];
private static final ECPoint[] EMPTY_POINTS = new ECPoint[0];
public static int[] generateCompactNaf(BigInteger k)
{
if ((k.bitLength() >>> 16) != 0)
{
throw new IllegalArgumentException("'k' must have bitlength < 2^16");
}
if (k.signum() == 0)
{
return EMPTY_INTS;
}
BigInteger _3k = k.shiftLeft(1).add(k);
int bits = _3k.bitLength();
int[] naf = new int[bits >> 1];
BigInteger diff = _3k.xor(k);
int highBit = bits - 1, length = 0, zeroes = 0;
for (int i = 1; i < highBit; ++i)
{
if (!diff.testBit(i))
{
++zeroes;
continue;
}
int digit = k.testBit(i) ? -1 : 1;
naf[length++] = (digit << 16) | zeroes;
zeroes = 1;
++i;
}
naf[length++] = (1 << 16) | zeroes;
if (naf.length > length)
{
naf = trim(naf, length);
}
return naf;
}
public static int[] generateCompactWindowNaf(int width, BigInteger k)
{
if (width == 2)
{
return generateCompactNaf(k);
}
if (width < 2 || width > 16)
{
throw new IllegalArgumentException("'width' must be in the range [2, 16]");
}
if ((k.bitLength() >>> 16) != 0)
{
throw new IllegalArgumentException("'k' must have bitlength < 2^16");
}
if (k.signum() == 0)
{
return EMPTY_INTS;
}
int[] wnaf = new int[k.bitLength() / width + 1];
// 2^width and a mask and sign bit set accordingly
int pow2 = 1 << width;
int mask = pow2 - 1;
int sign = pow2 >>> 1;
boolean carry = false;
int length = 0, pos = 0;
while (pos <= k.bitLength())
{
if (k.testBit(pos) == carry)
{
++pos;
continue;
}
k = k.shiftRight(pos);
int digit = k.intValue() & mask;
if (carry)
{
++digit;
}
carry = (digit & sign) != 0;
if (carry)
{
digit -= pow2;
}
int zeroes = length > 0 ? pos - 1 : pos;
wnaf[length++] = (digit << 16) | zeroes;
pos = width;
}
// Reduce the WNAF array to its actual length
if (wnaf.length > length)
{
wnaf = trim(wnaf, length);
}
return wnaf;
}
public static byte[] generateJSF(BigInteger g, BigInteger h)
{
int digits = Math.max(g.bitLength(), h.bitLength()) + 1;
byte[] jsf = new byte[digits];
BigInteger k0 = g, k1 = h;
int j = 0, d0 = 0, d1 = 0;
int offset = 0;
while ((d0 | d1) != 0 || k0.bitLength() > offset || k1.bitLength() > offset)
{
int n0 = ((k0.intValue() >>> offset) + d0) & 7, n1 = ((k1.intValue() >>> offset) + d1) & 7;
int u0 = n0 & 1;
if (u0 != 0)
{
u0 -= (n0 & 2);
if ((n0 + u0) == 4 && (n1 & 3) == 2)
{
u0 = -u0;
}
}
int u1 = n1 & 1;
if (u1 != 0)
{
u1 -= (n1 & 2);
if ((n1 + u1) == 4 && (n0 & 3) == 2)
{
u1 = -u1;
}
}
if ((d0 << 1) == 1 + u0)
{
d0 ^= 1;
}
if ((d1 << 1) == 1 + u1)
{
d1 ^= 1;
}
if (++offset == 30)
{
offset = 0;
k0 = k0.shiftRight(30);
k1 = k1.shiftRight(30);
}
jsf[j++] = (byte)((u0 << 4) | (u1 & 0xF));
}
// Reduce the JSF array to its actual length
if (jsf.length > j)
{
jsf = trim(jsf, j);
}
return jsf;
}
public static byte[] generateNaf(BigInteger k)
{
if (k.signum() == 0)
{
return EMPTY_BYTES;
}
BigInteger _3k = k.shiftLeft(1).add(k);
int digits = _3k.bitLength() - 1;
byte[] naf = new byte[digits];
BigInteger diff = _3k.xor(k);
for (int i = 1; i < digits; ++i)
{
if (diff.testBit(i))
{
naf[i - 1] = (byte)(k.testBit(i) ? -1 : 1);
++i;
}
}
naf[digits - 1] = 1;
return naf;
}
/**
* Computes the Window NAF (non-adjacent Form) of an integer.
* @param width The width <code>w</code> of the Window NAF. The width is
* defined as the minimal number <code>w</code>, such that for any
* <code>w</code> consecutive digits in the resulting representation, at
* most one is non-zero.
* @param k The integer of which the Window NAF is computed.
* @return The Window NAF of the given width, such that the following holds:
* <code>k = ∑<sub>i=0</sub><sup>l-1</sup> k<sub>i</sub>2<sup>i</sup>
* </code>, where the <code>k<sub>i</sub></code> denote the elements of the
* returned <code>byte[]</code>.
*/
public static byte[] generateWindowNaf(int width, BigInteger k)
{
if (width == 2)
{
return generateNaf(k);
}
if (width < 2 || width > 8)
{
throw new IllegalArgumentException("'width' must be in the range [2, 8]");
}
if (k.signum() == 0)
{
return EMPTY_BYTES;
}
byte[] wnaf = new byte[k.bitLength() + 1];
// 2^width and a mask and sign bit set accordingly
int pow2 = 1 << width;
int mask = pow2 - 1;
int sign = pow2 >>> 1;
boolean carry = false;
int length = 0, pos = 0;
while (pos <= k.bitLength())
{
if (k.testBit(pos) == carry)
{
++pos;
continue;
}
k = k.shiftRight(pos);
int digit = k.intValue() & mask;
if (carry)
{
++digit;
}
carry = (digit & sign) != 0;
if (carry)
{
digit -= pow2;
}
length += (length > 0) ? pos - 1 : pos;
wnaf[length++] = (byte)digit;
pos = width;
}
// Reduce the WNAF array to its actual length
if (wnaf.length > length)
{
wnaf = trim(wnaf, length);
}
return wnaf;
}
public static int getNafWeight(BigInteger k)
{
if (k.signum() == 0)
{
return 0;
}
BigInteger _3k = k.shiftLeft(1).add(k);
BigInteger diff = _3k.xor(k);
return diff.bitCount();
}
public static WNafPreCompInfo getWNafPreCompInfo(ECPoint p)
{
return getWNafPreCompInfo(p.getCurve().getPreCompInfo(p, PRECOMP_NAME));
}
public static WNafPreCompInfo getWNafPreCompInfo(PreCompInfo preCompInfo)
{
if ((preCompInfo != null) && (preCompInfo instanceof WNafPreCompInfo))
{
return (WNafPreCompInfo)preCompInfo;
}
return new WNafPreCompInfo();
}
/**
* Determine window width to use for a scalar multiplication of the given size.
*
* @param bits the bit-length of the scalar to multiply by
* @return the window size to use
*/
public static int getWindowSize(int bits)
{
return getWindowSize(bits, DEFAULT_WINDOW_SIZE_CUTOFFS);
}
/**
* Determine window width to use for a scalar multiplication of the given size.
*
* @param bits the bit-length of the scalar to multiply by
* @param windowSizeCutoffs a monotonically increasing list of bit sizes at which to increment the window width
* @return the window size to use
*/
public static int getWindowSize(int bits, int[] windowSizeCutoffs)
{
int w = 0;
for (; w < windowSizeCutoffs.length; ++w)
{
if (bits < windowSizeCutoffs[w])
{
break;
}
}
return w + 2;
}
public static ECPoint mapPointWithPrecomp(ECPoint p, int width, boolean includeNegated,
ECPointMap pointMap)
{
ECCurve c = p.getCurve();
WNafPreCompInfo wnafPreCompP = precompute(p, width, includeNegated);
ECPoint q = pointMap.map(p);
WNafPreCompInfo wnafPreCompQ = getWNafPreCompInfo(c.getPreCompInfo(q, PRECOMP_NAME));
ECPoint twiceP = wnafPreCompP.getTwice();
if (twiceP != null)
{
ECPoint twiceQ = pointMap.map(twiceP);
wnafPreCompQ.setTwice(twiceQ);
}
ECPoint[] preCompP = wnafPreCompP.getPreComp();
ECPoint[] preCompQ = new ECPoint[preCompP.length];
for (int i = 0; i < preCompP.length; ++i)
{
preCompQ[i] = pointMap.map(preCompP[i]);
}
wnafPreCompQ.setPreComp(preCompQ);
if (includeNegated)
{
ECPoint[] preCompNegQ = new ECPoint[preCompQ.length];
for (int i = 0; i < preCompNegQ.length; ++i)
{
preCompNegQ[i] = preCompQ[i].negate();
}
wnafPreCompQ.setPreCompNeg(preCompNegQ);
}
c.setPreCompInfo(q, PRECOMP_NAME, wnafPreCompQ);
return q;
}
public static WNafPreCompInfo precompute(ECPoint p, int width, boolean includeNegated)
{
ECCurve c = p.getCurve();
WNafPreCompInfo wnafPreCompInfo = getWNafPreCompInfo(c.getPreCompInfo(p, PRECOMP_NAME));
int iniPreCompLen = 0, reqPreCompLen = 1 << Math.max(0, width - 2);
ECPoint[] preComp = wnafPreCompInfo.getPreComp();
if (preComp == null)
{
preComp = EMPTY_POINTS;
}
else
{
iniPreCompLen = preComp.length;
}
if (iniPreCompLen < reqPreCompLen)
{
preComp = resizeTable(preComp, reqPreCompLen);
if (reqPreCompLen == 1)
{
preComp[0] = p.normalize();
}
else
{
int curPreCompLen = iniPreCompLen;
if (curPreCompLen == 0)
{
preComp[0] = p;
curPreCompLen = 1;
}
ECFieldElement iso = null;
if (reqPreCompLen == 2)
{
preComp[1] = p.threeTimes();
}
else
{
ECPoint twiceP = wnafPreCompInfo.getTwice(), last = preComp[curPreCompLen - 1];
if (twiceP == null)
{
twiceP = preComp[0].twice();
wnafPreCompInfo.setTwice(twiceP);
/*
* For Fp curves with Jacobian projective coordinates, use a (quasi-)isomorphism
* where 'twiceP' is "affine", so that the subsequent additions are cheaper. This
* also requires scaling the initial point's X, Y coordinates, and reversing the
* isomorphism as part of the subsequent normalization.
*
* NOTE: The correctness of this optimization depends on:
* 1) additions do not use the curve's A, B coefficients.
* 2) no special cases (i.e. Q +/- Q) when calculating 1P, 3P, 5P, ...
*/
if (ECAlgorithms.isFpCurve(c) && c.getFieldSize() >= 64)
{
switch (c.getCoordinateSystem())
{
case ECCurve.COORD_JACOBIAN:
case ECCurve.COORD_JACOBIAN_CHUDNOVSKY:
case ECCurve.COORD_JACOBIAN_MODIFIED:
{
iso = twiceP.getZCoord(0);
twiceP = c.createPoint(twiceP.getXCoord().toBigInteger(), twiceP.getYCoord()
.toBigInteger());
ECFieldElement iso2 = iso.square(), iso3 = iso2.multiply(iso);
last = last.scaleX(iso2).scaleY(iso3);
if (iniPreCompLen == 0)
{
preComp[0] = last;
}
break;
}
}
}
}
while (curPreCompLen < reqPreCompLen)
{
/*
* Compute the new ECPoints for the precomputation array. The values 1, 3,
* 5, ..., 2^(width-1)-1 times p are computed
*/
preComp[curPreCompLen++] = last = last.add(twiceP);
}
}
/*
* Having oft-used operands in affine form makes operations faster.
*/
c.normalizeAll(preComp, iniPreCompLen, reqPreCompLen - iniPreCompLen, iso);
}
}
wnafPreCompInfo.setPreComp(preComp);
if (includeNegated)
{
ECPoint[] preCompNeg = wnafPreCompInfo.getPreCompNeg();
int pos;
if (preCompNeg == null)
{
pos = 0;
preCompNeg = new ECPoint[reqPreCompLen];
}
else
{
pos = preCompNeg.length;
if (pos < reqPreCompLen)
{
preCompNeg = resizeTable(preCompNeg, reqPreCompLen);
}
}
while (pos < reqPreCompLen)
{
preCompNeg[pos] = preComp[pos].negate();
++pos;
}
wnafPreCompInfo.setPreCompNeg(preCompNeg);
}
c.setPreCompInfo(p, PRECOMP_NAME, wnafPreCompInfo);
return wnafPreCompInfo;
}
private static byte[] trim(byte[] a, int length)
{
byte[] result = new byte[length];
System.arraycopy(a, 0, result, 0, result.length);
return result;
}
private static int[] trim(int[] a, int length)
{
int[] result = new int[length];
System.arraycopy(a, 0, result, 0, result.length);
return result;
}
private static ECPoint[] resizeTable(ECPoint[] a, int length)
{
ECPoint[] result = new ECPoint[length];
System.arraycopy(a, 0, result, 0, a.length);
return result;
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication.regionserver;
import java.io.Closeable;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.OptionalLong;
import java.util.concurrent.PriorityBlockingQueue;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.LeaseNotRecoveredException;
import org.apache.hadoop.hbase.util.RecoverLeaseFSUtils;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WAL.Reader;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Streaming access to WAL entries. This class is given a queue of WAL {@link Path}, and continually
* iterates through all the WAL {@link Entry} in the queue. When it's done reading from a Path, it
* dequeues it and starts reading from the next.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
class WALEntryStream implements Closeable {
private static final Logger LOG = LoggerFactory.getLogger(WALEntryStream.class);
private Reader reader;
private Path currentPath;
// cache of next entry for hasNext()
private Entry currentEntry;
// position for the current entry. As now we support peek, which means that the upper layer may
// choose to return before reading the current entry, so it is not safe to return the value below
// in getPosition.
private long currentPositionOfEntry = 0;
// position after reading current entry
private long currentPositionOfReader = 0;
private final ReplicationSourceLogQueue logQueue;
private final String walGroupId;
private final FileSystem fs;
private final Configuration conf;
private final WALFileLengthProvider walFileLengthProvider;
// which region server the WALs belong to
private final ServerName serverName;
private final MetricsSource metrics;
/**
* Create an entry stream over the given queue at the given start position
* @param logQueue the queue of WAL paths
* @param conf the {@link Configuration} to use to create {@link Reader} for this stream
* @param startPosition the position in the first WAL to start reading at
* @param walFileLengthProvider provides the length of the WAL file
* @param serverName the server name which all WALs belong to
* @param metrics the replication metrics
* @throws IOException throw IO exception from stream
*/
public WALEntryStream(ReplicationSourceLogQueue logQueue, Configuration conf,
long startPosition, WALFileLengthProvider walFileLengthProvider, ServerName serverName,
MetricsSource metrics, String walGroupId) throws IOException {
this.logQueue = logQueue;
this.fs = CommonFSUtils.getWALFileSystem(conf);
this.conf = conf;
this.currentPositionOfEntry = startPosition;
this.walFileLengthProvider = walFileLengthProvider;
this.serverName = serverName;
this.metrics = metrics;
this.walGroupId = walGroupId;
}
/**
* @return true if there is another WAL {@link Entry}
*/
public boolean hasNext() throws IOException {
if (currentEntry == null) {
tryAdvanceEntry();
}
return currentEntry != null;
}
/**
* Returns the next WAL entry in this stream but does not advance.
*/
public Entry peek() throws IOException {
return hasNext() ? currentEntry: null;
}
/**
* Returns the next WAL entry in this stream and advance the stream.
*/
public Entry next() throws IOException {
Entry save = peek();
currentPositionOfEntry = currentPositionOfReader;
currentEntry = null;
return save;
}
/**
* {@inheritDoc}
*/
@Override
public void close() throws IOException {
closeReader();
}
/**
* @return the position of the last Entry returned by next()
*/
public long getPosition() {
return currentPositionOfEntry;
}
/**
* @return the {@link Path} of the current WAL
*/
public Path getCurrentPath() {
return currentPath;
}
private String getCurrentPathStat() {
StringBuilder sb = new StringBuilder();
if (currentPath != null) {
sb.append("currently replicating from: ").append(currentPath).append(" at position: ")
.append(currentPositionOfEntry).append("\n");
} else {
sb.append("no replication ongoing, waiting for new log");
}
return sb.toString();
}
/**
* Should be called if the stream is to be reused (i.e. used again after hasNext() has returned
* false)
*/
public void reset() throws IOException {
if (reader != null && currentPath != null) {
resetReader();
}
}
private void setPosition(long position) {
currentPositionOfEntry = position;
}
private void setCurrentPath(Path path) {
this.currentPath = path;
}
private void tryAdvanceEntry() throws IOException {
if (checkReader()) {
boolean beingWritten = readNextEntryAndRecordReaderPosition();
LOG.trace("Reading WAL {}; currently open for write={}", this.currentPath, beingWritten);
if (currentEntry == null && !beingWritten) {
// no more entries in this log file, and the file is already closed, i.e, rolled
// Before dequeueing, we should always get one more attempt at reading.
// This is in case more entries came in after we opened the reader, and the log is rolled
// while we were reading. See HBASE-6758
resetReader();
readNextEntryAndRecordReaderPosition();
if (currentEntry == null) {
if (checkAllBytesParsed()) { // now we're certain we're done with this log file
dequeueCurrentLog();
if (openNextLog()) {
readNextEntryAndRecordReaderPosition();
}
}
}
}
// if currentEntry != null then just return
// if currentEntry == null but the file is still being written, then we should not switch to
// the next log either, just return here and try next time to see if there are more entries in
// the current file
}
// do nothing if we don't have a WAL Reader (e.g. if there's no logs in queue)
}
// HBASE-15984 check to see we have in fact parsed all data in a cleanly closed file
private boolean checkAllBytesParsed() throws IOException {
// -1 means the wal wasn't closed cleanly.
final long trailerSize = currentTrailerSize();
FileStatus stat = null;
try {
stat = fs.getFileStatus(this.currentPath);
} catch (IOException exception) {
LOG.warn("Couldn't get file length information about log {}, it {} closed cleanly {}",
currentPath, trailerSize < 0 ? "was not" : "was", getCurrentPathStat());
metrics.incrUnknownFileLengthForClosedWAL();
}
// Here we use currentPositionOfReader instead of currentPositionOfEntry.
// We only call this method when currentEntry is null so usually they are the same, but there
// are two exceptions. One is we have nothing in the file but only a header, in this way
// the currentPositionOfEntry will always be 0 since we have no change to update it. The other
// is that we reach the end of file, then currentPositionOfEntry will point to the tail of the
// last valid entry, and the currentPositionOfReader will usually point to the end of the file.
if (stat != null) {
if (trailerSize < 0) {
if (currentPositionOfReader < stat.getLen()) {
final long skippedBytes = stat.getLen() - currentPositionOfReader;
// See the commits in HBASE-25924/HBASE-25932 for context.
LOG.warn("Reached the end of WAL {}. It was not closed cleanly," +
" so we did not parse {} bytes of data.", currentPath, skippedBytes);
metrics.incrUncleanlyClosedWALs();
metrics.incrBytesSkippedInUncleanlyClosedWALs(skippedBytes);
}
} else if (currentPositionOfReader + trailerSize < stat.getLen()) {
LOG.warn(
"Processing end of WAL {} at position {}, which is too far away from" +
" reported file length {}. Restarting WAL reading (see HBASE-15983 for details). {}",
currentPath, currentPositionOfReader, stat.getLen(), getCurrentPathStat());
setPosition(0);
resetReader();
metrics.incrRestartedWALReading();
metrics.incrRepeatedFileBytes(currentPositionOfReader);
return false;
}
}
if (LOG.isTraceEnabled()) {
LOG.trace("Reached the end of " + this.currentPath + " and length of the file is " +
(stat == null ? "N/A" : stat.getLen()));
}
metrics.incrCompletedWAL();
return true;
}
private void dequeueCurrentLog() throws IOException {
LOG.debug("EOF, closing {}", currentPath);
closeReader();
logQueue.remove(walGroupId);
setCurrentPath(null);
setPosition(0);
}
/**
* Returns whether the file is opened for writing.
*/
private boolean readNextEntryAndRecordReaderPosition() throws IOException {
Entry readEntry = reader.next();
long readerPos = reader.getPosition();
OptionalLong fileLength = walFileLengthProvider.getLogFileSizeIfBeingWritten(currentPath);
if (fileLength.isPresent() && readerPos > fileLength.getAsLong()) {
// See HBASE-14004, for AsyncFSWAL which uses fan-out, it is possible that we read uncommitted
// data, so we need to make sure that we do not read beyond the committed file length.
if (LOG.isDebugEnabled()) {
LOG.debug("The provider tells us the valid length for " + currentPath + " is " +
fileLength.getAsLong() + ", but we have advanced to " + readerPos);
}
resetReader();
return true;
}
if (readEntry != null) {
LOG.trace("reading entry: {} ", readEntry);
metrics.incrLogEditsRead();
metrics.incrLogReadInBytes(readerPos - currentPositionOfEntry);
}
currentEntry = readEntry; // could be null
this.currentPositionOfReader = readerPos;
return fileLength.isPresent();
}
private void closeReader() throws IOException {
if (reader != null) {
reader.close();
reader = null;
}
}
// if we don't have a reader, open a reader on the next log
private boolean checkReader() throws IOException {
if (reader == null) {
return openNextLog();
}
return true;
}
// open a reader on the next log in queue
private boolean openNextLog() throws IOException {
PriorityBlockingQueue<Path> queue = logQueue.getQueue(walGroupId);
Path nextPath = queue.peek();
if (nextPath != null) {
openReader(nextPath);
if (reader != null) {
return true;
}
} else {
// no more files in queue, this could only happen for recovered queue.
setCurrentPath(null);
}
return false;
}
private void handleFileNotFound(Path path, FileNotFoundException fnfe) throws IOException {
// If the log was archived, continue reading from there
Path archivedLog = AbstractFSWALProvider.findArchivedLog(path, conf);
// archivedLog can be null if unable to locate in archiveDir.
if (archivedLog != null) {
openReader(archivedLog);
} else {
throw fnfe;
}
}
private void openReader(Path path) throws IOException {
try {
// Detect if this is a new file, if so get a new reader else
// reset the current reader so that we see the new data
if (reader == null || !getCurrentPath().equals(path)) {
closeReader();
reader = WALFactory.createReader(fs, path, conf);
seek();
setCurrentPath(path);
} else {
resetReader();
}
} catch (FileNotFoundException fnfe) {
handleFileNotFound(path, fnfe);
} catch (RemoteException re) {
IOException ioe = re.unwrapRemoteException(FileNotFoundException.class);
if (!(ioe instanceof FileNotFoundException)) {
throw ioe;
}
handleFileNotFound(path, (FileNotFoundException)ioe);
} catch (LeaseNotRecoveredException lnre) {
// HBASE-15019 the WAL was not closed due to some hiccup.
LOG.warn("Try to recover the WAL lease " + path, lnre);
recoverLease(conf, path);
reader = null;
} catch (NullPointerException npe) {
// Workaround for race condition in HDFS-4380
// which throws a NPE if we open a file before any data node has the most recent block
// Just sleep and retry. Will require re-reading compressed WALs for compressionContext.
LOG.warn("Got NPE opening reader, will retry.");
reader = null;
}
}
// For HBASE-15019
private void recoverLease(final Configuration conf, final Path path) {
try {
final FileSystem dfs = CommonFSUtils.getWALFileSystem(conf);
RecoverLeaseFSUtils.recoverFileLease(dfs, path, conf, new CancelableProgressable() {
@Override
public boolean progress() {
LOG.debug("recover WAL lease: " + path);
return true;
}
});
} catch (IOException e) {
LOG.warn("unable to recover lease for WAL: " + path, e);
}
}
private void resetReader() throws IOException {
try {
currentEntry = null;
reader.reset();
seek();
} catch (FileNotFoundException fnfe) {
// If the log was archived, continue reading from there
Path archivedLog = AbstractFSWALProvider.findArchivedLog(currentPath, conf);
// archivedLog can be null if unable to locate in archiveDir.
if (archivedLog != null) {
openReader(archivedLog);
} else {
throw fnfe;
}
} catch (NullPointerException npe) {
throw new IOException("NPE resetting reader, likely HDFS-4380", npe);
}
}
private void seek() throws IOException {
if (currentPositionOfEntry != 0) {
reader.seek(currentPositionOfEntry);
}
}
private long currentTrailerSize() {
long size = -1L;
if (reader instanceof ProtobufLogReader) {
final ProtobufLogReader pblr = (ProtobufLogReader) reader;
size = pblr.trailerSize();
}
return size;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.netty4.http;
import java.util.ArrayList;
import java.util.List;
import io.netty.channel.ChannelHandler;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.component.netty4.NettyConfiguration;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriParams;
import org.apache.camel.spi.UriPath;
/**
* Extended configuration for using HTTP with Netty.
*/
@UriParams
public class NettyHttpConfiguration extends NettyConfiguration {
@UriPath(enums = "http,https", defaultValue = "http") @Metadata(required = "true")
private String protocol;
@UriPath @Metadata(required = "true")
private String path;
@UriParam(label = "consumer,advanced")
private boolean urlDecodeHeaders;
@UriParam(label = "consumer,advanced", defaultValue = "true")
private boolean mapHeaders = true;
@UriParam(label = "consumer,advanced")
private boolean compression;
@UriParam(label = "producer", defaultValue = "true")
private boolean throwExceptionOnFailure = true;
@UriParam(label = "advanced")
private boolean transferException;
@UriParam(label = "consumer")
private boolean matchOnUriPrefix;
@UriParam
private boolean bridgeEndpoint;
@UriParam(label = "consumer,advanced")
private boolean disableStreamCache;
@UriParam(label = "consumer", defaultValue = "true")
private boolean send503whenSuspended = true;
@UriParam(label = "consumer,advanced", defaultValue = "" + 1024 * 1024)
private int chunkedMaxContentLength = 1024 * 1024;
@UriParam(label = "consumer,advanced", defaultValue = "8192")
private int maxHeaderSize = 8192;
@UriParam(label = "producer,advanced", defaultValue = "200-299")
private String okStatusCodeRange = "200-299";
@UriParam(label = "producer,advanced")
private boolean useRelativePath;
public NettyHttpConfiguration() {
// we need sync=true as http is request/reply by nature
setSync(true);
setReuseAddress(true);
setServerInitializerFactory(new HttpServerInitializerFactory());
setClientInitializerFactory(new HttpClientInitializerFactory());
}
@Override
public NettyHttpConfiguration copy() {
try {
// clone as NettyHttpConfiguration
NettyHttpConfiguration answer = (NettyHttpConfiguration) clone();
// make sure the lists is copied in its own instance
List<ChannelHandler> encodersCopy = new ArrayList<ChannelHandler>(getEncoders());
answer.setEncoders(encodersCopy);
List<ChannelHandler> decodersCopy = new ArrayList<ChannelHandler>(getDecoders());
answer.setDecoders(decodersCopy);
return answer;
} catch (CloneNotSupportedException e) {
throw new RuntimeCamelException(e);
}
}
public String getProtocol() {
return protocol;
}
/**
* The protocol to use which is either http or https
*/
public void setProtocol(String protocol) {
this.protocol = protocol;
}
public boolean isCompression() {
return compression;
}
/**
* Allow using gzip/deflate for compression on the Netty HTTP server if the client supports it from the HTTP headers.
*/
public void setCompression(boolean compression) {
this.compression = compression;
}
public boolean isThrowExceptionOnFailure() {
return throwExceptionOnFailure;
}
/**
* Option to disable throwing the HttpOperationFailedException in case of failed responses from the remote server.
* This allows you to get all responses regardless of the HTTP status code.
*/
public void setThrowExceptionOnFailure(boolean throwExceptionOnFailure) {
this.throwExceptionOnFailure = throwExceptionOnFailure;
}
public boolean isTransferException() {
return transferException;
}
/**
* If enabled and an Exchange failed processing on the consumer side, and if the caused Exception was send back serialized
* in the response as a application/x-java-serialized-object content type.
* On the producer side the exception will be deserialized and thrown as is, instead of the HttpOperationFailedException.
* The caused exception is required to be serialized.
* <p/>
* This is by default turned off. If you enable this then be aware that Java will deserialize the incoming
* data from the request to Java and that can be a potential security risk.
*/
public void setTransferException(boolean transferException) {
this.transferException = transferException;
}
public boolean isUrlDecodeHeaders() {
return urlDecodeHeaders;
}
/**
* If this option is enabled, then during binding from Netty to Camel Message then the header values will be URL decoded
* (eg %20 will be a space character. Notice this option is used by the default org.apache.camel.component.netty.http.NettyHttpBinding
* and therefore if you implement a custom org.apache.camel.component.netty4.http.NettyHttpBinding then you would
* need to decode the headers accordingly to this option.
*/
public void setUrlDecodeHeaders(boolean urlDecodeHeaders) {
this.urlDecodeHeaders = urlDecodeHeaders;
}
public boolean isMapHeaders() {
return mapHeaders;
}
/**
* If this option is enabled, then during binding from Netty to Camel Message then the headers will be mapped as well
* (eg added as header to the Camel Message as well). You can turn off this option to disable this.
* The headers can still be accessed from the org.apache.camel.component.netty.http.NettyHttpMessage message with
* the method getHttpRequest() that returns the Netty HTTP request io.netty.handler.codec.http.HttpRequest instance.
*/
public void setMapHeaders(boolean mapHeaders) {
this.mapHeaders = mapHeaders;
}
public boolean isMatchOnUriPrefix() {
return matchOnUriPrefix;
}
/**
* Whether or not Camel should try to find a target consumer by matching the URI prefix if no exact match is found.
*/
public void setMatchOnUriPrefix(boolean matchOnUriPrefix) {
this.matchOnUriPrefix = matchOnUriPrefix;
}
public boolean isBridgeEndpoint() {
return bridgeEndpoint;
}
/**
* If the option is true, the producer will ignore the Exchange.HTTP_URI header, and use the endpoint's URI for request.
* You may also set the throwExceptionOnFailure to be false to let the producer send all the fault response back.
* The consumer working in the bridge mode will skip the gzip compression and WWW URL form encoding (by adding the Exchange.SKIP_GZIP_ENCODING
* and Exchange.SKIP_WWW_FORM_URLENCODED headers to the consumed exchange).
*/
public void setBridgeEndpoint(boolean bridgeEndpoint) {
this.bridgeEndpoint = bridgeEndpoint;
}
public String getPath() {
return path;
}
/**
* Resource path
*/
public void setPath(String path) {
this.path = path;
}
public boolean isDisableStreamCache() {
return disableStreamCache;
}
/**
* Determines whether or not the raw input stream from Netty HttpRequest#getContent() is cached or not
* (Camel will read the stream into a in light-weight memory based Stream caching) cache.
* By default Camel will cache the Netty input stream to support reading it multiple times to ensure it Camel
* can retrieve all data from the stream. However you can set this option to true when you for example need to
* access the raw stream, such as streaming it directly to a file or other persistent store. Mind that
* if you enable this option, then you cannot read the Netty stream multiple times out of the box, and you would
* need manually to reset the reader index on the Netty raw stream.
*/
public void setDisableStreamCache(boolean disableStreamCache) {
this.disableStreamCache = disableStreamCache;
}
public boolean isSend503whenSuspended() {
return send503whenSuspended;
}
/**
* Whether to send back HTTP status code 503 when the consumer has been suspended.
* If the option is false then the Netty Acceptor is unbound when the consumer is suspended, so clients cannot connect anymore.
*/
public void setSend503whenSuspended(boolean send503whenSuspended) {
this.send503whenSuspended = send503whenSuspended;
}
public int getChunkedMaxContentLength() {
return chunkedMaxContentLength;
}
/**
* Value in bytes the max content length per chunked frame received on the Netty HTTP server.
*/
public void setChunkedMaxContentLength(int chunkedMaxContentLength) {
this.chunkedMaxContentLength = chunkedMaxContentLength;
}
public int getMaxHeaderSize() {
return maxHeaderSize;
}
/**
* The maximum length of all headers.
* If the sum of the length of each header exceeds this value, a {@link io.netty.handler.codec.TooLongFrameException} will be raised.
*/
public void setMaxHeaderSize(int maxHeaderSize) {
this.maxHeaderSize = maxHeaderSize;
}
// Don't support allowDefaultCodec
public boolean isAllowDefaultCodec() {
return false;
}
public void setAllowDefaultCodec(boolean allowDefaultCodec) {
throw new UnsupportedOperationException("You cannot setAllowDefaultCodec here.");
}
public String getOkStatusCodeRange() {
return okStatusCodeRange;
}
/**
* The status codes which is considered a success response. The values are inclusive. The range must be defined as from-to with the dash included.
* <p/>
* The default range is <tt>200-299</tt>
*/
public void setOkStatusCodeRange(String okStatusCodeRange) {
this.okStatusCodeRange = okStatusCodeRange;
}
/**
* Sets whether to use a relative path in HTTP requests.
*/
public void setUseRelativePath(boolean useRelativePath) {
this.useRelativePath = useRelativePath;
}
public boolean isUseRelativePath() {
return this.useRelativePath;
}
}
| |
package com.herak.bouldershare.data;
import android.annotation.TargetApi;
import android.content.ContentProvider;
import android.content.ContentValues;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteQueryBuilder;
import android.net.Uri;
/**
* Created by darko on 18.8.2017..
*/
public class BoulderProvider extends ContentProvider{
static final int BOULDER_PROBLEM_INFO = 100;
static final int BOULDER_PROBLEM_INFOS = 101;
static final int HOLDS = 200;
static final int HOLDS_FROM_BOULDER_PROBLEM = 201;
private static final SQLiteQueryBuilder sBoulderProblemQueryBuilder;
private static final SQLiteQueryBuilder sHoldQueryBuilder;
static{
sBoulderProblemQueryBuilder = new SQLiteQueryBuilder();
sHoldQueryBuilder = new SQLiteQueryBuilder();
//This is an inner join which looks like
//weather INNER JOIN location ON weather.location_id = location._id
sBoulderProblemQueryBuilder.setTables(
BoulderContract.BoulderProblemInfoEntry.TABLE_NAME
// + " INNER JOIN " +
// WeatherContract.LocationEntry.TABLE_NAME +
// " ON " + WeatherContract.WeatherEntry.TABLE_NAME +
// "." + WeatherContract.WeatherEntry.COLUMN_LOC_KEY +
// " = " + WeatherContract.LocationEntry.TABLE_NAME +
// "." + WeatherContract.LocationEntry._ID
);
sHoldQueryBuilder.setTables(
BoulderContract.HoldsEntry.TABLE_NAME);
}
private BoulderDbHelper mOpenHelper;
private static final UriMatcher sUriMatcher = buildUriMatcher();
static UriMatcher buildUriMatcher() {
// I know what you're thinking. Why create a UriMatcher when you can use regular
// expressions instead? Because you're not crazy, that's why.
// All paths added to the UriMatcher have a corresponding code to return when a match is
// found. The code passed into the constructor represents the code to return for the root
// URI. It's common to use NO_MATCH as the code for this case.
final UriMatcher matcher = new UriMatcher(UriMatcher.NO_MATCH);
final String authority = BoulderContract.CONTENT_AUTHORITY;
// For each type of URI you want to add, create a corresponding code.
matcher.addURI(authority, BoulderContract.PATH_BOULDER_PROBLEM_INFO + "/#", BOULDER_PROBLEM_INFO);
matcher.addURI(authority, BoulderContract.PATH_HOLDS + "/#", HOLDS);
matcher.addURI(authority, BoulderContract.PATH_HOLDS, HOLDS_FROM_BOULDER_PROBLEM);
matcher.addURI(authority, BoulderContract.PATH_BOULDER_PROBLEM_INFO, BOULDER_PROBLEM_INFOS);
return matcher;
}
@Override
public boolean onCreate(){
mOpenHelper = new BoulderDbHelper(getContext());
return true;
}
@Override
public String getType(Uri uri) {
// Use the Uri Matcher to determine what kind of URI this is.
final int match = sUriMatcher.match(uri);
switch (match) {
// Student: Uncomment and fill out these two cases
case BOULDER_PROBLEM_INFO:
return BoulderContract.BoulderProblemInfoEntry.CONTENT_ITEM_TYPE;
case BOULDER_PROBLEM_INFOS:
return BoulderContract.BoulderProblemInfoEntry.CONTENT_TYPE;
case HOLDS:
return BoulderContract.HoldsEntry.CONTENT_ITEM_TYPE;
case HOLDS_FROM_BOULDER_PROBLEM:
return BoulderContract.HoldsEntry.CONTENT_TYPE;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
}
private static final String sHoldsWithBoulderProblemIdSelection =
BoulderContract.HoldsEntry.TABLE_NAME +
"." + BoulderContract.HoldsEntry.COLUMN_BOULDER_PROBLEM_ID + " = ?";
private static final String sBoulderProblemWithIdSelection =
BoulderContract.BoulderProblemInfoEntry.TABLE_NAME +
"." + BoulderContract.BoulderProblemInfoEntry._ID + " = ?";
@Override
public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs,
String sortOrder) {
// Here's the switch statement that, given a URI, will determine what kind of request it is,
// and query the database accordingly.
Cursor retCursor;
switch (sUriMatcher.match(uri)) {
// "weather/*/*"
case BOULDER_PROBLEM_INFO:
{
retCursor = getBoulderProblemInfo(uri, projection, sortOrder);
break;
}
case BOULDER_PROBLEM_INFOS:
{
retCursor = getBoulderProblemInfos(uri, projection, sortOrder);
break;
}
// "weather/*"
// case HOLDS: {
// retCursor = getHoldWithId(uri, projection, sortOrder);
// break;
// }
// "weather"
case HOLDS_FROM_BOULDER_PROBLEM: {
retCursor = getHoldsWithBoulderProblemId(uri, projection, sortOrder);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
retCursor.setNotificationUri(getContext().getContentResolver(), uri);
return retCursor;
}
private Cursor getHoldsWithBoulderProblemId(Uri uri, String[] projection, String sortOrder) {
String id = BoulderContract.HoldsEntry.getBoulderProblemIdFromUri(uri);
return sHoldQueryBuilder.query(mOpenHelper.getReadableDatabase(),
projection,
sHoldsWithBoulderProblemIdSelection,
new String[]{id},
null,
null,
sortOrder
);
}
private Cursor getBoulderProblemInfo(Uri uri, String[] projection, String sortOrder) {
String id = BoulderContract.BoulderProblemInfoEntry.getBoulderProblemIdFromUri(uri);
return sBoulderProblemQueryBuilder.query(mOpenHelper.getReadableDatabase(),
projection,
sBoulderProblemWithIdSelection,
new String[]{id},
null,
null,
sortOrder
);
}
private Cursor getBoulderProblemInfos(Uri uri, String[] projection, String sortOrder) {
String id = BoulderContract.BoulderProblemInfoEntry.getBoulderProblemIdFromUri(uri);
return sBoulderProblemQueryBuilder.query(mOpenHelper.getReadableDatabase(),
projection,
null,
null,
null,
null,
sortOrder
);
}
@Override
public Uri insert(Uri uri, ContentValues values) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
Uri returnUri;
switch (match) {
case HOLDS: {
long _id = db.insert(BoulderContract.HoldsEntry.TABLE_NAME, null, values);
if ( _id > 0 )
returnUri = BoulderContract.HoldsEntry.buildHoldsUri(_id);
else
throw new android.database.SQLException("Failed to insert row into " + uri);
break;
}
case BOULDER_PROBLEM_INFO:
case BOULDER_PROBLEM_INFOS:
{
long _id = db.insert(BoulderContract.BoulderProblemInfoEntry.TABLE_NAME, null, values);
if ( _id > 0 )
returnUri = BoulderContract.BoulderProblemInfoEntry.buildBoulderProblemInfoUri(_id);
else
throw new android.database.SQLException("Failed to insert row into " + uri);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
getContext().getContentResolver().notifyChange(uri, null);
return returnUri;
}
@Override
public int bulkInsert(Uri uri, ContentValues[] values) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
switch (match) {
case HOLDS:
case HOLDS_FROM_BOULDER_PROBLEM:
db.beginTransaction();
int returnCount = 0;
try {
for (ContentValues value : values) {
long _id = db.insert(BoulderContract.HoldsEntry.TABLE_NAME, null, value);
if (_id != -1) {
returnCount++;
}
}
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
getContext().getContentResolver().notifyChange(uri, null);
return returnCount;
default:
return super.bulkInsert(uri, values);
}
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
// Student: Start by getting a writable database
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
int rowsDeleted;
// Student: Use the uriMatcher to match the WEATHER and LOCATION URI's we are going to
// handle. If it doesn't match these, throw an UnsupportedOperationException.
if(null == selection) selection = "1";
switch (match) {
case HOLDS:
case HOLDS_FROM_BOULDER_PROBLEM:
{
rowsDeleted = db.delete(BoulderContract.HoldsEntry.TABLE_NAME, selection, selectionArgs);
break;
}
case BOULDER_PROBLEM_INFO:
case BOULDER_PROBLEM_INFOS:
{
rowsDeleted = db.delete(BoulderContract.BoulderProblemInfoEntry.TABLE_NAME, selection, selectionArgs);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
if(rowsDeleted > 0) {
getContext().getContentResolver().notifyChange(uri, null);
}
return rowsDeleted;
}
@Override
public int update(
Uri uri, ContentValues values, String selection, String[] selectionArgs) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
int rowsUpdated;
switch (match) {
case HOLDS:
case HOLDS_FROM_BOULDER_PROBLEM:
{
rowsUpdated = db.update(BoulderContract.HoldsEntry.TABLE_NAME, values, selection, selectionArgs);
break;
}
case BOULDER_PROBLEM_INFO:
case BOULDER_PROBLEM_INFOS:
{
rowsUpdated = db.update(BoulderContract.BoulderProblemInfoEntry.TABLE_NAME, values, selection, selectionArgs);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
if(rowsUpdated > 0) {
getContext().getContentResolver().notifyChange(uri, null);
}
return rowsUpdated;
}
@Override
@TargetApi(11)
public void shutdown() {
mOpenHelper.close();
super.shutdown();
}
}
| |
/**
* Copyright (c) 2011-2015, jcabi.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met: 1) Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following
* disclaimer. 2) Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution. 3) Neither the name of the jcabi.com nor
* the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
* NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jcabi.http.mock;
import com.jcabi.log.Logger;
import com.sun.grizzly.tcp.http11.GrizzlyAdapter;
import com.sun.grizzly.tcp.http11.GrizzlyRequest;
import com.sun.grizzly.tcp.http11.GrizzlyResponse;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.nio.charset.Charset;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger;
import lombok.EqualsAndHashCode;
import org.apache.http.HttpHeaders;
import org.hamcrest.Matcher;
/**
* Mocker of Java Servlet container.
*
* @author Yegor Bugayenko (yegor@tpc2.com)
* @version $Id$
* @since 0.10
*/
@SuppressWarnings("PMD.TooManyMethods")
final class MkGrizzlyAdapter extends GrizzlyAdapter {
/**
* The encoding to use.
*/
private static final String ENCODING = "UTF-8";
/**
* The Charset to use.
*/
private static final Charset CHARSET = Charset.forName(ENCODING);
/**
* Queries received.
*/
private final transient Queue<QueryWithAnswer> queue =
new ConcurrentLinkedQueue<QueryWithAnswer>();
/**
* Answers to give conditionally.
*/
private final transient Queue<Conditional> conditionals =
new ConcurrentLinkedQueue<Conditional>();
// @checkstyle ExecutableStatementCount (55 lines)
@Override
@SuppressWarnings(
{
"PMD.AvoidCatchingThrowable",
"PMD.AvoidInstantiatingObjectsInLoops",
"rawtypes"
}
)
public void service(final GrizzlyRequest request,
final GrizzlyResponse response) {
try {
final MkQuery query = new GrizzlyQuery(request);
final Iterator<Conditional> iter = this.conditionals.iterator();
boolean matched = false;
while (iter.hasNext()) {
final Conditional cond = iter.next();
if (cond.matches(query)) {
matched = true;
final MkAnswer answer = cond.answer();
this.queue.add(new QueryWithAnswer(query, answer));
for (final String name : answer.headers().keySet()) {
// @checkstyle NestedForDepth (3 lines)
for (final String value : answer.headers().get(name)) {
response.addHeader(name, value);
}
}
response.addHeader(
HttpHeaders.SERVER,
String.format(
"%s query #%d, %d answer(s) left",
this.getClass().getName(),
this.queue.size(), this.conditionals.size()
)
);
response.setStatus(answer.status());
final byte[] body =
answer.body().getBytes(MkGrizzlyAdapter.CHARSET);
response.getStream().write(body);
response.setContentLength(body.length);
if (cond.decrement() == 0) {
iter.remove();
}
break;
}
}
if (!matched) {
throw new NoSuchElementException("No matching answers found.");
}
// @checkstyle IllegalCatch (1 line)
} catch (final Throwable ex) {
MkGrizzlyAdapter.fail(response, ex);
}
}
/**
* Give this answer on the next request(s) if they match the given condition
* a certain number of consecutive times.
* @param answer Next answer to give
* @param query The query that should be satisfied to return this answer
* @param count The number of times this answer can be returned for matching
* requests
*/
public void next(final MkAnswer answer, final Matcher<MkQuery> query,
final int count) {
this.conditionals.add(new Conditional(answer, query, count));
}
/**
* Get the oldest request received.
* @return Request received
*/
public MkQuery take() {
return this.queue.remove().que;
}
/**
* Get the oldest request received subject to the matching condition.
* ({@link java.util.NoSuchElementException} if no elements satisfy the
* condition).
* @param matcher The matcher specifying the condition
* @return Request received satisfying the matcher
*/
public MkQuery take(final Matcher<MkAnswer> matcher) {
MkQuery result = null;
final Iterator<QueryWithAnswer> iter = this.queue.iterator();
while (iter.hasNext()) {
final QueryWithAnswer candidate = iter.next();
if (matcher.matches(candidate.answer())) {
result = candidate.query();
iter.remove();
break;
}
}
if (result == null) {
// @checkstyle MultipleStringLiterals (1 line)
throw new NoSuchElementException("No matching results found");
}
return result;
}
/**
* Get the all requests received satisfying the given matcher.
* ({@link java.util.NoSuchElementException} if no elements satisfy the
* condition).
* @param matcher The matcher specifying the condition
* @return Collection of all requests satisfying the matcher, ordered from
* oldest to newest.
*/
public Collection<MkQuery> takeAll(final Matcher<MkAnswer> matcher) {
final Collection<MkQuery> results = new LinkedList<MkQuery>();
final Iterator<QueryWithAnswer> iter = this.queue.iterator();
while (iter.hasNext()) {
final QueryWithAnswer candidate = iter.next();
if (matcher.matches(candidate.answer())) {
results.add(candidate.query());
iter.remove();
}
}
if (results.isEmpty()) {
throw new NoSuchElementException("No matching results found");
}
return results;
}
/**
* Total number of available queue.
* @return Number of them
*/
public int queries() {
return this.queue.size();
}
/**
* Notify this response about failure.
* @param response The response to notify
* @param failure The failure just happened
*/
private static void fail(final GrizzlyResponse<?> response,
final Throwable failure) {
response.setStatus(HttpURLConnection.HTTP_INTERNAL_ERROR);
final PrintWriter writer;
try {
writer = new PrintWriter(
new OutputStreamWriter(
response.getStream(),
MkGrizzlyAdapter.ENCODING
)
);
} catch (final UnsupportedEncodingException ex) {
throw new IllegalStateException(ex);
}
try {
writer.print(Logger.format("%[exception]s", failure));
} finally {
writer.close();
}
}
/**
* Answer with condition.
*/
@EqualsAndHashCode(of = { "answr", "condition" })
private static final class Conditional {
/**
* The MkAnswer.
*/
private final transient MkAnswer answr;
/**
* Condition for this answer.
*/
private final transient Matcher<MkQuery> condition;
/**
* The number of times the answer is expected to appear.
*/
private final transient AtomicInteger count;
/**
* Ctor.
* @param ans The answer.
* @param matcher The matcher.
* @param times Number of times the answer should appear.
*/
Conditional(final MkAnswer ans, final Matcher<MkQuery> matcher,
final int times) {
this.answr = ans;
this.condition = matcher;
if (times < 1) {
throw new IllegalArgumentException(
"Answer must be returned at least once."
);
} else {
this.count = new AtomicInteger(times);
}
}
/**
* Get the answer.
* @return The answer
*/
public MkAnswer answer() {
return this.answr;
}
/**
* Does the query match the answer?
* @param query The query to match
* @return True, if the query matches the condition
*/
public boolean matches(final MkQuery query) {
return this.condition.matches(query);
}
/**
* Decrement the count for this conditional.
* @return The updated count
*/
public int decrement() {
return this.count.decrementAndGet();
}
}
/**
* Query with answer.
*/
@EqualsAndHashCode(of = { "answr", "que" })
private static final class QueryWithAnswer {
/**
* The answer.
*/
private final transient MkAnswer answr;
/**
* The query.
*/
private final transient MkQuery que;
/**
* Ctor.
* @param qry The query
* @param ans The answer
*/
QueryWithAnswer(final MkQuery qry, final MkAnswer ans) {
this.answr = ans;
this.que = qry;
}
/**
* Get the query.
* @return The query.
*/
public MkQuery query() {
return this.que;
}
/**
* Get the answer.
* @return Answer
*/
public MkAnswer answer() {
return this.answr;
}
}
}
| |
/*
* JBoss, Home of Professional Open Source
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.security.auth.provider.ldap;
import static org.wildfly.security._private.ElytronMessages.log;
import org.wildfly.common.Assert;
import org.wildfly.security.auth.server.NameRewriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Builder for the security realm implementation backed by LDAP.
*
* @author <a href="mailto:darran.lofthouse@jboss.com">Darran Lofthouse</a>
*/
public class LdapSecurityRealmBuilder {
private boolean built = false;
private DirContextFactory dirContextFactory;
private NameRewriter nameRewriter = NameRewriter.IDENTITY_REWRITER;
private LdapSecurityRealm.PrincipalMapping principalMapping;
private LdapSecurityRealmBuilder() {
}
/**
* Construct a new instance.
*
* @return the new builder instance
*/
public static LdapSecurityRealmBuilder builder() {
return new LdapSecurityRealmBuilder();
}
/**
* Set the directory context factory.
*
* @param dirContextFactory the directory context factory
* @return this builder
*/
public LdapSecurityRealmBuilder setDirContextFactory(final DirContextFactory dirContextFactory) {
assertNotBuilt();
this.dirContextFactory = dirContextFactory;
return this;
}
/**
* Add a name rewriter to this builder.
*
* @param nameRewriter the name rewriter
* @return this builder
*/
public LdapSecurityRealmBuilder setNameRewriter(final NameRewriter nameRewriter) {
Assert.checkNotNullParam("nameRewriter", nameRewriter);
assertNotBuilt();
this.nameRewriter = nameRewriter;
return this;
}
/**
* Add a principal mapping to this builder.
*
* @return the builder for the principal mapping
*/
public LdapSecurityRealmBuilder setPrincipalMapping(LdapSecurityRealm.PrincipalMapping principalMapping) {
assertNotBuilt();
this.principalMapping = principalMapping;
return this;
}
/**
* Build this realm.
*
* @return the built realm
*/
public LdapSecurityRealm build() {
assertNotBuilt();
if (dirContextFactory == null) {
throw log.noDirContextFactorySet();
}
if (principalMapping == null) {
throw log.noPrincipalMappingDefinition();
}
built = true;
return new LdapSecurityRealm(dirContextFactory, nameRewriter, principalMapping);
}
private void assertNotBuilt() {
if (built) {
throw log.builderAlreadyBuilt();
}
}
/**
* A builder for a principal mapping.
*/
public static class PrincipalMappingBuilder {
private String searchDn = null;
private boolean searchRecursive = false;
private String nameAttribute;
private String passwordAttribute = UserPasswordCredentialLoader.DEFAULT_USER_PASSWORD_ATTRIBUTE_NAME;
private int searchTimeLimit = 10000;
private List<Attribute> attributes = new ArrayList<>();
public static PrincipalMappingBuilder builder() {
return new PrincipalMappingBuilder();
}
/**
* <p>Set the name of the context to be used when executing queries.
*
* <p>This option is specially useful when authenticating users based on names that don't use a X.500 format such as <em>plainUser</em>.
* In this case, you must also provide {@link #setRdnIdentifier(String)} with the attribute name that contains the user name.</p>
*
* <p>If the names used to authenticate users are based on the X.500 format, this configuration can be suppressed.
*
* <p>Please note that by using this option the realm is able to authenticate users based on their simple or X.500 names.
*
* @param searchDn the name of the context to search
* @return this builder
*/
public PrincipalMappingBuilder setSearchDn(final String searchDn) {
this.searchDn = searchDn;
return this;
}
/**
* Indicate if queries are searchRecursive, searching the entire subtree rooted at the name specified in {@link #setSearchDn(String)}.
* Otherwise search one level of the named context.
*
* @return this builder
*/
public PrincipalMappingBuilder searchRecursive() {
this.searchRecursive = true;
return this;
}
/**
* Sets the time limit of the SearchControls in milliseconds.
*
* @param limit the limit in milliseconds. Defaults to 5000 milliseconds.
* @return this builder
*/
public PrincipalMappingBuilder setSearchTimeLimit(int limit) {
this.searchTimeLimit = limit;
return this;
}
/**
* Set the name of the attribute in LDAP that holds the user name.
*
* @param nameAttribute the name attribute
* @return this builder
*/
public PrincipalMappingBuilder setRdnIdentifier(final String nameAttribute) {
this.nameAttribute = nameAttribute;
return this;
}
/**
* <p>Set the name of the attribute in LDAP that holds the user's password. Use this this option if you want to
* obtain credentials from Ldap based on the built-in supported types.
*
* @param passwordAttribute the password attribute name. Defaults to {@link UserPasswordCredentialLoader#DEFAULT_USER_PASSWORD_ATTRIBUTE_NAME}.
* @return this builder
*/
public PrincipalMappingBuilder setPasswordAttribute(final String passwordAttribute) {
this.passwordAttribute = passwordAttribute;
return this;
}
/**
* Define an attribute mapping configuration.
*
* @param attributes one or more {@link org.wildfly.security.auth.provider.ldap.LdapSecurityRealmBuilder.PrincipalMappingBuilder.Attribute} configuration
* @return this builder
*/
public PrincipalMappingBuilder map(Attribute... attributes) {
this.attributes.addAll(Arrays.asList(attributes));
return this;
}
/**
* Build this principal mapping.
*
* @return a {@link org.wildfly.security.auth.provider.ldap.LdapSecurityRealm.PrincipalMapping} instance with all the configuration.
*/
public LdapSecurityRealm.PrincipalMapping build() {
return new LdapSecurityRealm.PrincipalMapping(
searchDn, searchRecursive, searchTimeLimit, nameAttribute, this.passwordAttribute, this.attributes);
}
public static class Attribute {
private final String ldapName;
private final String searchDn;
private final String filter;
private String name;
private String rdn;
/**
* Create an attribute mapping based on the given attribute in LDAP.
*
* @param ldapName the name of the attribute in LDAP from where values are obtained
* @return this builder
*/
public static Attribute from(String ldapName) {
Assert.checkNotNullParam("ldapName", ldapName);
return new Attribute(ldapName);
}
/**
* <p>Create an attribute mapping based on the results of the given {@code filter}.
*
* <p>The {@code filter} <em>may</em> have one and exactly one <em>{0}</em> string that will be used to replace with the distinguished
* name of the identity. In this case, the filter is specially useful when the values for this attribute should be obtained from a
* separated entry. For instance, retrieving roles from entries with a object class of <em>groupOfNames</em> where the identity's DN is
* a value of a <em>member</em> attribute.
*
* @param searchDn the name of the context to be used when executing the filter
* @param filter the filter that is going to be used to search for entries and obtain values for this attribute
* @param ldapName the name of the attribute in LDAP from where the values are obtained
* @return this builder
*/
public static Attribute fromFilter(String searchDn, String filter, String ldapName) {
Assert.checkNotNullParam("searchDn", searchDn);
Assert.checkNotNullParam("filter", filter);
Assert.checkNotNullParam("ldapName", ldapName);
return new Attribute(searchDn, filter, ldapName);
}
/**
* <p>The behavior is exactly the same as {@link #fromFilter(String, String, String)}, except that it uses the
* same name of the context defined in {@link org.wildfly.security.auth.provider.ldap.LdapSecurityRealmBuilder.PrincipalMappingBuilder#setSearchDn(String)}.
*
* @param filter the filter that is going to be used to search for entries and obtain values for this attribute
* @param ldapName the name of the attribute in LDAP from where the values are obtained
* @return this builder
*/
public static Attribute fromFilter(String filter, String ldapName) {
Assert.checkNotNullParam("filter", filter);
Assert.checkNotNullParam("ldapName", ldapName);
return new Attribute(null, filter, ldapName);
}
Attribute(String ldapName) {
this(null, null, ldapName);
}
Attribute(String searchDn, String filter, String ldapName) {
Assert.checkNotNullParam("ldapName", ldapName);
this.searchDn = searchDn;
this.filter = filter;
this.ldapName = ldapName.toUpperCase();
}
public Attribute asRdn(String rdn) {
Assert.checkNotNullParam("rdn", rdn);
this.rdn = rdn;
return this;
}
public Attribute to(String name) {
Assert.checkNotNullParam("to", name);
this.name = name;
return this;
}
String getLdapName() {
return this.ldapName;
}
String getName() {
if (this.name == null) {
return this.ldapName;
}
return this.name;
}
String getSearchDn() {
return this.searchDn;
}
String getFilter() {
return this.filter;
}
String getRdn() {
return this.rdn;
}
}
}
}
| |
/*
* Copyright 2015 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.provider.netconf.device.impl;
import static com.google.common.base.Strings.isNullOrEmpty;
import static org.onlab.util.Tools.delay;
import static org.onlab.util.Tools.get;
import static org.onlab.util.Tools.groupedThreads;
import static org.slf4j.LoggerFactory.getLogger;
import java.io.IOException;
import java.net.SocketTimeoutException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Dictionary;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Modified;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.onlab.packet.ChassisId;
import org.onosproject.cfg.ComponentConfigService;
import org.onosproject.cluster.ClusterService;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.MastershipRole;
import org.onosproject.net.device.DefaultDeviceDescription;
import org.onosproject.net.device.DeviceDescription;
import org.onosproject.net.device.DeviceProvider;
import org.onosproject.net.device.DeviceProviderRegistry;
import org.onosproject.net.device.DeviceProviderService;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.provider.AbstractProvider;
import org.onosproject.net.provider.ProviderId;
import org.onosproject.provider.netconf.device.impl.NetconfDevice.DeviceState;
import org.osgi.service.component.ComponentContext;
import org.slf4j.Logger;
/**
* Provider which will try to fetch the details of NETCONF devices from the core
* and run a capability discovery on each of the device.
*/
@Component(immediate = true)
public class NetconfDeviceProvider extends AbstractProvider
implements DeviceProvider {
private final Logger log = getLogger(NetconfDeviceProvider.class);
protected Map<DeviceId, NetconfDevice> netconfDeviceMap = new ConcurrentHashMap<DeviceId, NetconfDevice>();
private DeviceProviderService providerService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected DeviceProviderRegistry providerRegistry;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected DeviceService deviceService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected ClusterService clusterService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected ComponentConfigService cfgService;
private ExecutorService deviceBuilder = Executors
.newFixedThreadPool(1, groupedThreads("onos/netconf", "device-creator"));
// Delay between events in ms.
private static final int EVENTINTERVAL = 5;
private static final String SCHEME = "netconf";
@Property(name = "devConfigs", value = "", label = "Instance-specific configurations")
private String devConfigs = null;
@Property(name = "devPasswords", value = "", label = "Instance-specific password")
private String devPasswords = null;
/**
* Creates a provider with the supplier identifier.
*/
public NetconfDeviceProvider() {
super(new ProviderId("netconf", "org.onosproject.provider.netconf"));
}
@Activate
public void activate(ComponentContext context) {
cfgService.registerProperties(getClass());
providerService = providerRegistry.register(this);
modified(context);
log.info("Started");
}
@Deactivate
public void deactivate(ComponentContext context) {
cfgService.unregisterProperties(getClass(), false);
try {
for (Entry<DeviceId, NetconfDevice> deviceEntry : netconfDeviceMap
.entrySet()) {
deviceBuilder.submit(new DeviceCreator(deviceEntry.getValue(),
false));
}
deviceBuilder.awaitTermination(1000, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
log.error("Device builder did not terminate");
}
deviceBuilder.shutdownNow();
netconfDeviceMap.clear();
providerRegistry.unregister(this);
providerService = null;
log.info("Stopped");
}
@Modified
public void modified(ComponentContext context) {
if (context == null) {
log.info("No configuration file");
return;
}
Dictionary<?, ?> properties = context.getProperties();
String deviceCfgValue = get(properties, "devConfigs");
log.info("Settings: devConfigs={}", deviceCfgValue);
if (!isNullOrEmpty(deviceCfgValue)) {
addOrRemoveDevicesConfig(deviceCfgValue);
}
}
private void addOrRemoveDevicesConfig(String deviceConfig) {
for (String deviceEntry : deviceConfig.split(",")) {
NetconfDevice device = processDeviceEntry(deviceEntry);
if (device != null) {
log.info("Device Detail: username: {}, host={}, port={}, state={}",
device.getUsername(), device.getSshHost(),
device.getSshPort(), device.getDeviceState().name());
if (device.isActive()) {
deviceBuilder.submit(new DeviceCreator(device, true));
} else {
deviceBuilder.submit(new DeviceCreator(device, false));
}
}
}
}
private NetconfDevice processDeviceEntry(String deviceEntry) {
if (deviceEntry == null) {
log.info("No content for Device Entry, so cannot proceed further.");
return null;
}
log.info("Trying to convert Device Entry String: " + deviceEntry
+ " to a Netconf Device Object");
NetconfDevice device = null;
try {
String userInfo = deviceEntry.substring(0, deviceEntry
.lastIndexOf('@'));
String hostInfo = deviceEntry.substring(deviceEntry
.lastIndexOf('@') + 1);
String[] infoSplit = userInfo.split(":");
String username = infoSplit[0];
String password = infoSplit[1];
infoSplit = hostInfo.split(":");
String hostIp = infoSplit[0];
Integer hostPort;
try {
hostPort = Integer.parseInt(infoSplit[1]);
} catch (NumberFormatException nfe) {
log.error("Bad Configuration Data: Failed to parse host port number string: "
+ infoSplit[1]);
throw nfe;
}
String deviceState = infoSplit[2];
if (isNullOrEmpty(username) || isNullOrEmpty(password)
|| isNullOrEmpty(hostIp) || hostPort == 0) {
log.warn("Bad Configuration Data: both user and device information parts of Configuration "
+ deviceEntry + " should be non-nullable");
} else {
device = new NetconfDevice(hostIp, hostPort, username, password);
if (!isNullOrEmpty(deviceState)) {
if (deviceState.toUpperCase().equals(DeviceState.ACTIVE
.name())) {
device.setDeviceState(DeviceState.ACTIVE);
} else if (deviceState.toUpperCase()
.equals(DeviceState.INACTIVE.name())) {
device.setDeviceState(DeviceState.INACTIVE);
} else {
log.warn("Device State Information can not be empty, so marking the state as INVALID");
device.setDeviceState(DeviceState.INVALID);
}
} else {
log.warn("The device entry do not specify state information, so marking the state as INVALID");
device.setDeviceState(DeviceState.INVALID);
}
}
} catch (ArrayIndexOutOfBoundsException aie) {
log.error("Error while reading config infromation from the config file: "
+ "The user, host and device state infomation should be "
+ "in the order 'userInfo@hostInfo:deviceState'"
+ deviceEntry, aie);
} catch (Exception e) {
log.error("Error while parsing config information for the device entry: "
+ deviceEntry, e);
}
return device;
}
@Override
public void triggerProbe(DeviceId deviceId) {
// TODO Auto-generated method stub
}
@Override
public void roleChanged(DeviceId deviceId, MastershipRole newRole) {
}
@Override
public boolean isReachable(DeviceId deviceId) {
NetconfDevice netconfDevice = netconfDeviceMap.get(deviceId);
if (netconfDevice == null) {
log.warn("BAD REQUEST: the requested device id: "
+ deviceId.toString()
+ " is not associated to any NETCONF Device");
return false;
}
return netconfDevice.isReachable();
}
/**
* This class is intended to add or remove Configured Netconf Devices.
* Functionality relies on 'createFlag' and 'NetconfDevice' content. The
* functionality runs as a thread and dependening on the 'createFlag' value
* it will create or remove Device entry from the core.
*/
private class DeviceCreator implements Runnable {
private NetconfDevice device;
private boolean createFlag;
public DeviceCreator(NetconfDevice device, boolean createFlag) {
this.device = device;
this.createFlag = createFlag;
}
@Override
public void run() {
if (createFlag) {
log.info("Trying to create Device Info on ONOS core");
advertiseDevices();
} else {
log.info("Trying to remove Device Info on ONOS core");
removeDevices();
}
}
/**
* For each Netconf Device, remove the entry from the device store.
*/
private void removeDevices() {
if (device == null) {
log.warn("The Request Netconf Device is null, cannot proceed further");
return;
}
try {
DeviceId did = getDeviceId();
if (!netconfDeviceMap.containsKey(did)) {
log.error("BAD Request: 'Currently device is not discovered, "
+ "so cannot remove/disconnect the device: "
+ device.deviceInfo() + "'");
return;
}
providerService.deviceDisconnected(did);
device.disconnect();
netconfDeviceMap.remove(did);
delay(EVENTINTERVAL);
} catch (URISyntaxException uriSyntaxExcpetion) {
log.error("Syntax Error while creating URI for the device: "
+ device.deviceInfo()
+ " couldn't remove the device from the store",
uriSyntaxExcpetion);
}
}
/**
* Initialize Netconf Device object, and notify core saying device
* connected.
*/
private void advertiseDevices() {
try {
if (device == null) {
log.warn("The Request Netconf Device is null, cannot proceed further");
return;
}
device.init();
DeviceId did = getDeviceId();
ChassisId cid = new ChassisId();
DeviceDescription desc = new DefaultDeviceDescription(
did.uri(),
Device.Type.OTHER,
"", "",
"", "",
cid);
log.info("Persisting Device" + did.uri().toString());
netconfDeviceMap.put(did, device);
providerService.deviceConnected(did, desc);
log.info("Done with Device Info Creation on ONOS core. Device Info: "
+ device.deviceInfo() + " " + did.uri().toString());
delay(EVENTINTERVAL);
} catch (URISyntaxException e) {
log.error("Syntax Error while creating URI for the device: "
+ device.deviceInfo()
+ " couldn't persist the device onto the store", e);
} catch (SocketTimeoutException e) {
log.error("Error while setting connection for the device: "
+ device.deviceInfo(), e);
} catch (IOException e) {
log.error("Error while setting connection for the device: "
+ device.deviceInfo(), e);
} catch (Exception e) {
log.error("Error while initializing session for the device: "
+ (device != null ? device.deviceInfo() : null), e);
}
}
/**
* This will build a device id for the device.
*/
private DeviceId getDeviceId() throws URISyntaxException {
String additionalSSP = new StringBuilder(device.getUsername())
.append("@").append(device.getSshHost()).append(":")
.append(device.getSshPort()).toString();
DeviceId did = DeviceId.deviceId(new URI(SCHEME, additionalSSP,
null));
return did;
}
}
}
| |
// HTMLParser Library $Name: $ - A java-based parser for HTML
// http://sourceforge.org/projects/htmlparser
// Copyright (C) 2004 Rogers George
//
//
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
package org.htmlparser.filters;
//import checkers.inference.ownership.quals.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.htmlparser.Node;
import org.htmlparser.NodeFilter;
import org.htmlparser.Tag;
import org.htmlparser.util.NodeList;
/**
* A NodeFilter that accepts nodes based on whether they match a CSS2 selector.
* Refer to <a href="http://www.w3.org/TR/REC-CSS2/selector.html">
* http://www.w3.org/TR/REC-CSS2/selector.html</a> for syntax.
* <p>
* Todo: more thorough testing, any relevant pseudo-classes, css3 features
*/
public class CssSelectorNodeFilter implements NodeFilter
{
/**
* Regular expression to split the selector into tokens.
*/
private static Pattern tokens =
Pattern.compile("("
+ "/\\*.*?\\*/" // comments
+ ") | ("
+ " \".*?[^\"]\"" // double quoted string
+ " | \'.*?[^\']\'" // single quoted string
+ " | \"\" | \'\' " // empty quoted string
+ ") | ("
+ " [\\~\\*\\$\\^]? = " // attrib-val relations
+ ") | ("
+ " [a-zA-Z_\\*](?:[a-zA-Z0-9_-]|\\\\.)* " // bare name
+ ") | \\s*("
+ " [+>~\\s] " // combinators
+ ")\\s* | ("
+ " [\\.\\[\\]\\#\\:)(] " // class/id/attr/param delims
+ ") | ("
+ " [\\,] " // comma
+ ") | ( . )" // everything else (bogus)
,
Pattern.CASE_INSENSITIVE
| Pattern.DOTALL
| Pattern.COMMENTS);
/**
* Comment token type.
*/
private static final int COMMENT = 1;
/**
* quoted string token type.
*/
private static final int QUOTEDSTRING = 2;
/**
* Relation token type.
*/
private static final int RELATION = 3;
/**
* Name token type.
*/
private static final int NAME = 4;
/**
* Combinator token type.
*/
private static final int COMBINATOR = 5;
/**
* Delimiter token type.
*/
private static final int DELIM = 6;
/**
* Comma token type.
*/
private static final int COMMA = 7;
private NodeFilter therule;
private Matcher m = null;
private int tokentype = 0;
private String token = null;
/**
* Create a Cascading Style Sheet node filter.
* @param selector The selector expression.
*/
public CssSelectorNodeFilter(String selector)
{
m = tokens.matcher (selector);
if (nextToken ())
therule = parse ();
}
/**
* Accept nodes that match the selector expression.
* @param node The node to check.
* @return <code>true</code> if the node matches,
* <code>false</code> otherwise.
*/
public boolean accept (Node node)
{
return (therule.accept (node));
}
private boolean nextToken ()
{
if (m != null && m.find ())
for (int i = 1; i < m.groupCount (); i++)
if (null != m.group (i))
{
tokentype = i;
token = m.group (i);
return true;
}
tokentype = 0;
token = null;
return (false);
}
private NodeFilter parse ()
{
NodeFilter ret;
ret = null;
do
{
switch (tokentype)
{
case COMMENT:
case NAME:
case DELIM:
if (ret == null)
ret = parseSimple ();
else
ret = new AndFilter (ret, parseSimple ());
break;
case COMBINATOR:
switch (token.charAt (0))
{
case '+':
ret = new AdjacentFilter (ret);
break;
case '>':
ret = new HasParentFilter (ret);
break;
default: // whitespace
ret = new HasAncestorFilter (ret);
}
nextToken ();
break;
case COMMA:
ret = new OrFilter (ret, parse ());
nextToken ();
break;
}
}
while (token != null);
return (ret);
}
private NodeFilter parseSimple()
{
boolean done = false;
NodeFilter ret = null;
if (token != null)
do
{
switch (tokentype)
{
case COMMENT:
nextToken();
break;
case NAME:
if ("*".equals(token))
ret = new YesFilter();
else if (ret == null)
ret = new TagNameFilter(unescape(token));
else
ret = new AndFilter(ret, new TagNameFilter(unescape(token)));
nextToken();
break;
case DELIM:
switch (token.charAt(0))
{
case '.':
nextToken();
if (tokentype != NAME)
throw new IllegalArgumentException("Syntax error at " + token);
if (ret == null)
ret = new HasAttributeFilter("class", unescape(token));
else
ret
= new AndFilter(ret, new HasAttributeFilter("class", unescape(token)));
break;
case '#':
nextToken();
if (tokentype != NAME)
throw new IllegalArgumentException("Syntax error at " + token);
if (ret == null)
ret = new HasAttributeFilter("id", unescape(token));
else
ret = new AndFilter(ret, new HasAttributeFilter("id", unescape(token)));
break;
case ':':
nextToken();
if (ret == null)
ret = parsePseudoClass();
else
ret = new AndFilter(ret, parsePseudoClass());
break;
case '[':
nextToken();
if (ret == null)
ret = parseAttributeExp();
else
ret = new AndFilter(ret, parseAttributeExp());
break;
}
nextToken();
break;
default:
done = true;
}
}
while (!done && token != null);
return ret;
}
private NodeFilter parsePseudoClass()
{
throw new IllegalArgumentException("pseudoclasses not implemented yet");
}
private NodeFilter parseAttributeExp()
{
NodeFilter ret = null;
if (tokentype == NAME)
{
String attrib = token;
nextToken();
if ("]".equals(token))
ret = new HasAttributeFilter(unescape(attrib));
else if (tokentype == RELATION)
{
String val = null, rel = token;
nextToken();
if (tokentype == QUOTEDSTRING)
val = unescape(token.substring(1, token.length() - 1));
else if (tokentype == NAME)
val = unescape(token);
if ("~=".equals(rel) && val != null)
ret = new AttribMatchFilter(unescape(attrib),
"\\b"
+ val.replaceAll("([^a-zA-Z0-9])", "\\\\$1")
+ "\\b");
else if ("=".equals(rel) && val != null)
ret = new HasAttributeFilter(attrib, val);
}
}
if (ret == null)
throw new IllegalArgumentException("Syntax error at " + token + tokentype);
nextToken();
return ret;
}
/**
* Replace escape sequences in a string.
* @param escaped The string to examine.
* @return The argument with escape sequences replaced by their
* equivalent character.
*/
public static String unescape(String escaped)
{
StringBuffer result = new StringBuffer(escaped.length());
Matcher m = Pattern.compile("\\\\(?:([a-fA-F0-9]{2,6})|(.))").matcher(
escaped);
while (m.find())
{
if (m.group(1) != null)
m.appendReplacement(result,
String.valueOf((char)Integer.parseInt(m.group(1), 16)));
else if (m.group(2) != null)
m.appendReplacement(result, m.group(2));
}
m.appendTail(result);
return result.toString();
}
private static class HasAncestorFilter implements NodeFilter
{
private NodeFilter atest;
public HasAncestorFilter(NodeFilter n)
{
atest = n;
}
public boolean accept(Node n)
{
while (n != null)
{
n = n.getParent();
if (atest.accept(n))
return true;
}
return false;
}
}
private static class AdjacentFilter implements NodeFilter
{
private NodeFilter sibtest;
public AdjacentFilter(NodeFilter n)
{
sibtest = n;
}
public boolean accept(Node n)
{
if (n.getParent() != null)
{
NodeList l = n.getParent().getChildren();
for (int i = 0; i < l.size(); i++)
if (l.elementAt(i) == n && i > 0)
return (sibtest.accept(l.elementAt(i - 1)));
}
return false;
}
}
private static class YesFilter implements NodeFilter
{
public boolean accept(Node n)
{return true;}
}
private static class AttribMatchFilter implements NodeFilter
{
private Pattern rel;
private String attrib;
public AttribMatchFilter(String attrib, String regex)
{
rel = Pattern.compile(regex);
this.attrib = attrib;
}
public boolean accept(Node node)
{
if (node instanceof Tag && ((Tag)node).getAttribute(attrib) != null)
if (rel != null
&& !rel.matcher(((Tag)node).getAttribute(attrib)).find())
return false;
else
return true;
else
return false;
}
}
}
| |
/**
* Copyright 2011, Big Switch Networks, Inc.
* Originally created by David Erickson, Stanford University
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
**/
package net.floodlightcontroller.forwarding;
import static org.easymock.EasyMock.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import net.floodlightcontroller.core.FloodlightContext;
import net.floodlightcontroller.core.IFloodlightProviderService;
import net.floodlightcontroller.core.IOFSwitch;
import net.floodlightcontroller.core.module.FloodlightModuleContext;
import net.floodlightcontroller.core.test.MockThreadPoolService;
import net.floodlightcontroller.counter.CounterStore;
import net.floodlightcontroller.counter.ICounterStoreService;
import net.floodlightcontroller.devicemanager.IDevice;
import net.floodlightcontroller.devicemanager.IDeviceService;
import net.floodlightcontroller.devicemanager.IEntityClassifierService;
import net.floodlightcontroller.devicemanager.internal.DefaultEntityClassifier;
import net.floodlightcontroller.novo.devicemanager.test.MockDeviceManager;
import net.floodlightcontroller.packet.Data;
import net.floodlightcontroller.packet.Ethernet;
import net.floodlightcontroller.packet.IPacket;
import net.floodlightcontroller.packet.IPv4;
import net.floodlightcontroller.packet.UDP;
import net.floodlightcontroller.routing.IRoutingService;
import net.floodlightcontroller.routing.Route;
import net.floodlightcontroller.test.FloodlightTestCase;
import net.floodlightcontroller.threadpool.IThreadPoolService;
import net.floodlightcontroller.topology.ITopologyListener;
import net.floodlightcontroller.topology.ITopologyService;
import net.floodlightcontroller.topology.NodePortTuple;
import net.floodlightcontroller.flowcache.FlowReconcileManager;
import net.floodlightcontroller.flowcache.IFlowReconcileService;
import net.floodlightcontroller.forwarding.Forwarding;
import org.easymock.Capture;
import org.easymock.CaptureType;
import org.easymock.EasyMock;
import org.junit.Test;
import org.openflow.protocol.OFFeaturesReply;
import org.openflow.protocol.OFFlowMod;
import org.openflow.protocol.OFMatch;
import org.openflow.protocol.OFMessage;
import org.openflow.protocol.OFPacketIn;
import org.openflow.protocol.OFPacketOut;
import org.openflow.protocol.OFPort;
import org.openflow.protocol.OFType;
import org.openflow.protocol.OFPacketIn.OFPacketInReason;
import org.openflow.protocol.action.OFAction;
import org.openflow.protocol.action.OFActionOutput;
import org.openflow.util.HexString;
public class ForwardingTest extends FloodlightTestCase {
protected FloodlightContext cntx;
protected MockDeviceManager deviceManager;
protected IRoutingService routingEngine;
protected Forwarding forwarding;
protected FlowReconcileManager flowReconcileMgr;
protected ITopologyService topology;
protected MockThreadPoolService threadPool;
protected IOFSwitch sw1, sw2;
protected OFFeaturesReply swFeatures;
protected IDevice srcDevice, dstDevice1, dstDevice2;
protected OFPacketIn packetIn;
protected OFPacketOut packetOut;
protected OFPacketOut packetOutFlooded;
protected IPacket testPacket;
protected byte[] testPacketSerialized;
protected int expected_wildcards;
protected Date currentDate;
@Override
public void setUp() throws Exception {
super.setUp();
cntx = new FloodlightContext();
// Module loader setup
/*
Collection<Class<? extends IFloodlightModule>> mods = new ArrayList<Class<? extends IFloodlightModule>>();
Collection<IFloodlightService> mockedServices = new ArrayList<IFloodlightService>();
mods.add(Forwarding.class);
routingEngine = createMock(IRoutingService.class);
topology = createMock(ITopologyService.class);
mockedServices.add(routingEngine);
mockedServices.add(topology);
FloodlightTestModuleLoader fml = new FloodlightTestModuleLoader();
fml.setupModules(mods, mockedServices);
mockFloodlightProvider =
(MockFloodlightProvider) fml.getModuleByName(MockFloodlightProvider.class);
deviceManager =
(MockDeviceManager) fml.getModuleByName(MockDeviceManager.class);
threadPool =
(MockThreadPoolService) fml.getModuleByName(MockThreadPoolService.class);
forwarding =
(Forwarding) fml.getModuleByName(Forwarding.class);
*/
mockFloodlightProvider = getMockFloodlightProvider();
forwarding = new Forwarding();
threadPool = new MockThreadPoolService();
deviceManager = new MockDeviceManager();
flowReconcileMgr = new FlowReconcileManager();
routingEngine = createMock(IRoutingService.class);
topology = createMock(ITopologyService.class);
DefaultEntityClassifier entityClassifier = new DefaultEntityClassifier();
FloodlightModuleContext fmc = new FloodlightModuleContext();
fmc.addService(IFloodlightProviderService.class,
mockFloodlightProvider);
fmc.addService(IThreadPoolService.class, threadPool);
fmc.addService(ITopologyService.class, topology);
fmc.addService(IRoutingService.class, routingEngine);
fmc.addService(ICounterStoreService.class, new CounterStore());
fmc.addService(IDeviceService.class, deviceManager);
fmc.addService(IFlowReconcileService.class, flowReconcileMgr);
fmc.addService(IEntityClassifierService.class, entityClassifier);
topology.addListener(anyObject(ITopologyListener.class));
expectLastCall().anyTimes();
replay(topology);
threadPool.init(fmc);
forwarding.init(fmc);
deviceManager.init(fmc);
flowReconcileMgr.init(fmc);
entityClassifier.init(fmc);
threadPool.startUp(fmc);
deviceManager.startUp(fmc);
forwarding.startUp(fmc);
flowReconcileMgr.startUp(fmc);
entityClassifier.startUp(fmc);
verify(topology);
swFeatures = new OFFeaturesReply();
swFeatures.setBuffers(1000);
// Mock switches
sw1 = EasyMock.createMock(IOFSwitch.class);
expect(sw1.getId()).andReturn(1L).anyTimes();
expect(sw1.getBuffers()).andReturn(swFeatures.getBuffers()).anyTimes();
expect(sw1.getStringId())
.andReturn(HexString.toHexString(1L)).anyTimes();
sw2 = EasyMock.createMock(IOFSwitch.class);
expect(sw2.getId()).andReturn(2L).anyTimes();
expect(sw2.getBuffers()).andReturn(swFeatures.getBuffers()).anyTimes();
expect(sw2.getStringId())
.andReturn(HexString.toHexString(2L)).anyTimes();
//fastWilcards mocked as this constant
int fastWildcards =
OFMatch.OFPFW_IN_PORT |
OFMatch.OFPFW_NW_PROTO |
OFMatch.OFPFW_TP_SRC |
OFMatch.OFPFW_TP_DST |
OFMatch.OFPFW_NW_SRC_ALL |
OFMatch.OFPFW_NW_DST_ALL |
OFMatch.OFPFW_NW_TOS;
expect(sw1.getAttribute(IOFSwitch.PROP_FASTWILDCARDS)).andReturn((Integer)fastWildcards).anyTimes();
expect(sw1.hasAttribute(IOFSwitch.PROP_SUPPORTS_OFPP_TABLE)).andReturn(true).anyTimes();
expect(sw2.getAttribute(IOFSwitch.PROP_FASTWILDCARDS)).andReturn((Integer)fastWildcards).anyTimes();
expect(sw2.hasAttribute(IOFSwitch.PROP_SUPPORTS_OFPP_TABLE)).andReturn(true).anyTimes();
// Load the switch map
Map<Long, IOFSwitch> switches = new HashMap<Long, IOFSwitch>();
switches.put(1L, sw1);
switches.put(2L, sw2);
mockFloodlightProvider.setSwitches(switches);
// Build test packet
testPacket = new Ethernet()
.setDestinationMACAddress("00:11:22:33:44:55")
.setSourceMACAddress("00:44:33:22:11:00")
.setEtherType(Ethernet.TYPE_IPv4)
.setPayload(
new IPv4()
.setTtl((byte) 128)
.setSourceAddress("192.168.1.1")
.setDestinationAddress("192.168.1.2")
.setPayload(new UDP()
.setSourcePort((short) 5000)
.setDestinationPort((short) 5001)
.setPayload(new Data(new byte[] {0x01}))));
currentDate = new Date();
// Mock Packet-in
testPacketSerialized = testPacket.serialize();
packetIn =
((OFPacketIn) mockFloodlightProvider.getOFMessageFactory().
getMessage(OFType.PACKET_IN))
.setBufferId(-1)
.setInPort((short) 1)
.setPacketData(testPacketSerialized)
.setReason(OFPacketInReason.NO_MATCH)
.setTotalLength((short) testPacketSerialized.length);
// Mock Packet-out
packetOut =
(OFPacketOut) mockFloodlightProvider.getOFMessageFactory().
getMessage(OFType.PACKET_OUT);
packetOut.setBufferId(this.packetIn.getBufferId())
.setInPort(this.packetIn.getInPort());
List<OFAction> poactions = new ArrayList<OFAction>();
poactions.add(new OFActionOutput((short) 3, (short) 0xffff));
packetOut.setActions(poactions)
.setActionsLength((short) OFActionOutput.MINIMUM_LENGTH)
.setPacketData(testPacketSerialized)
.setLengthU(OFPacketOut.MINIMUM_LENGTH+
packetOut.getActionsLength()+
testPacketSerialized.length);
// Mock Packet-out with OFPP_FLOOD action
packetOutFlooded =
(OFPacketOut) mockFloodlightProvider.getOFMessageFactory().
getMessage(OFType.PACKET_OUT);
packetOutFlooded.setBufferId(this.packetIn.getBufferId())
.setInPort(this.packetIn.getInPort());
poactions = new ArrayList<OFAction>();
poactions.add(new OFActionOutput(OFPort.OFPP_FLOOD.getValue(),
(short) 0xffff));
packetOutFlooded.setActions(poactions)
.setActionsLength((short) OFActionOutput.MINIMUM_LENGTH)
.setPacketData(testPacketSerialized)
.setLengthU(OFPacketOut.MINIMUM_LENGTH+
packetOutFlooded.getActionsLength()+
testPacketSerialized.length);
expected_wildcards = fastWildcards;
expected_wildcards &= ~OFMatch.OFPFW_IN_PORT &
~OFMatch.OFPFW_DL_VLAN &
~OFMatch.OFPFW_DL_SRC &
~OFMatch.OFPFW_DL_DST;
expected_wildcards &= ~OFMatch.OFPFW_NW_SRC_MASK &
~OFMatch.OFPFW_NW_DST_MASK;
IFloodlightProviderService.bcStore.
put(cntx,
IFloodlightProviderService.CONTEXT_PI_PAYLOAD,
(Ethernet)testPacket);
}
enum DestDeviceToLearn { NONE, DEVICE1 ,DEVICE2 };
public void learnDevices(DestDeviceToLearn destDeviceToLearn) {
// Build src and dest devices
byte[] dataLayerSource = ((Ethernet)testPacket).getSourceMACAddress();
byte[] dataLayerDest =
((Ethernet)testPacket).getDestinationMACAddress();
int networkSource =
((IPv4)((Ethernet)testPacket).getPayload()).
getSourceAddress();
int networkDest =
((IPv4)((Ethernet)testPacket).getPayload()).
getDestinationAddress();
reset(topology);
expect(topology.isAttachmentPointPort(1L, (short)1))
.andReturn(true)
.anyTimes();
expect(topology.isAttachmentPointPort(2L, (short)3))
.andReturn(true)
.anyTimes();
expect(topology.isAttachmentPointPort(1L, (short)3))
.andReturn(true)
.anyTimes();
replay(topology);
srcDevice =
deviceManager.learnEntity(Ethernet.toLong(dataLayerSource),
null, networkSource,
1L, 1);
IDeviceService.fcStore. put(cntx,
IDeviceService.CONTEXT_SRC_DEVICE,
srcDevice);
if (destDeviceToLearn == DestDeviceToLearn.DEVICE1) {
dstDevice1 =
deviceManager.learnEntity(Ethernet.toLong(dataLayerDest),
null, networkDest,
2L, 3);
IDeviceService.fcStore.put(cntx,
IDeviceService.CONTEXT_DST_DEVICE,
dstDevice1);
}
if (destDeviceToLearn == DestDeviceToLearn.DEVICE2) {
dstDevice2 =
deviceManager.learnEntity(Ethernet.toLong(dataLayerDest),
null, networkDest,
1L, 3);
IDeviceService.fcStore.put(cntx,
IDeviceService.CONTEXT_DST_DEVICE,
dstDevice2);
}
verify(topology);
}
@Test
public void testForwardMultiSwitchPath() throws Exception {
learnDevices(DestDeviceToLearn.DEVICE1);
Capture<OFMessage> wc1 = new Capture<OFMessage>(CaptureType.ALL);
Capture<OFMessage> wc2 = new Capture<OFMessage>(CaptureType.ALL);
Capture<FloodlightContext> bc1 =
new Capture<FloodlightContext>(CaptureType.ALL);
Capture<FloodlightContext> bc2 =
new Capture<FloodlightContext>(CaptureType.ALL);
Route route = new Route(1L, 2L);
List<NodePortTuple> nptList = new ArrayList<NodePortTuple>();
nptList.add(new NodePortTuple(1L, (short)1));
nptList.add(new NodePortTuple(1L, (short)3));
nptList.add(new NodePortTuple(2L, (short)1));
nptList.add(new NodePortTuple(2L, (short)3));
route.setPath(nptList);
expect(routingEngine.getRoute(1L, (short)1, 2L, (short)3, 0)).andReturn(route).atLeastOnce();
// Expected Flow-mods
OFMatch match = new OFMatch();
match.loadFromPacket(testPacketSerialized, (short) 1);
OFActionOutput action = new OFActionOutput((short)3, (short)0xffff);
List<OFAction> actions = new ArrayList<OFAction>();
actions.add(action);
OFFlowMod fm1 =
(OFFlowMod) mockFloodlightProvider.getOFMessageFactory().
getMessage(OFType.FLOW_MOD);
fm1.setIdleTimeout((short)5)
.setMatch(match.clone()
.setWildcards(expected_wildcards))
.setActions(actions)
.setBufferId(OFPacketOut.BUFFER_ID_NONE)
.setCookie(2L << 52)
.setLengthU(OFFlowMod.MINIMUM_LENGTH+OFActionOutput.MINIMUM_LENGTH);
OFFlowMod fm2 = fm1.clone();
((OFActionOutput)fm2.getActions().get(0)).setPort((short) 3);
sw1.write(capture(wc1), capture(bc1));
expectLastCall().anyTimes();
sw2.write(capture(wc2), capture(bc2));
expectLastCall().anyTimes();
reset(topology);
expect(topology.getL2DomainId(1L)).andReturn(1L).anyTimes();
expect(topology.getL2DomainId(2L)).andReturn(1L).anyTimes();
expect(topology.isAttachmentPointPort(1L, (short)1)).andReturn(true).anyTimes();
expect(topology.isAttachmentPointPort(2L, (short)3)).andReturn(true).anyTimes();
expect(topology.isIncomingBroadcastAllowed(anyLong(), anyShort())).andReturn(true).anyTimes();
// Reset mocks, trigger the packet in, and validate results
replay(sw1, sw2, routingEngine, topology);
forwarding.receive(sw1, this.packetIn, cntx);
verify(sw1, sw2, routingEngine);
assertTrue(wc1.hasCaptured()); // wc1 should get packetout + flowmod.
assertTrue(wc2.hasCaptured()); // wc2 should be a flowmod.
List<OFMessage> msglist = wc1.getValues();
for (OFMessage m: msglist) {
if (m instanceof OFFlowMod)
assertEquals(fm1, m);
else if (m instanceof OFPacketOut)
assertEquals(packetOut, m);
}
OFMessage m = wc2.getValue();
assert (m instanceof OFFlowMod);
assertTrue(m.equals(fm2));
}
@Test
public void testForwardSingleSwitchPath() throws Exception {
learnDevices(DestDeviceToLearn.DEVICE2);
Route route = new Route(1L, 1L);
route.getPath().add(new NodePortTuple(1L, (short)1));
route.getPath().add(new NodePortTuple(1L, (short)3));
expect(routingEngine.getRoute(1L, (short)1, 1L, (short)3, 0)).andReturn(route).atLeastOnce();
// Expected Flow-mods
OFMatch match = new OFMatch();
match.loadFromPacket(testPacketSerialized, (short) 1);
OFActionOutput action = new OFActionOutput((short)3, (short)0xffff);
List<OFAction> actions = new ArrayList<OFAction>();
actions.add(action);
OFFlowMod fm1 =
(OFFlowMod) mockFloodlightProvider.getOFMessageFactory().
getMessage(OFType.FLOW_MOD);
fm1.setIdleTimeout((short)5)
.setMatch(match.clone()
.setWildcards(expected_wildcards))
.setActions(actions)
.setBufferId(OFPacketOut.BUFFER_ID_NONE)
.setCookie(2L << 52)
.setLengthU(OFFlowMod.MINIMUM_LENGTH +
OFActionOutput.MINIMUM_LENGTH);
// Record expected packet-outs/flow-mods
sw1.write(fm1, cntx);
sw1.write(packetOut, cntx);
reset(topology);
expect(topology.isIncomingBroadcastAllowed(anyLong(), anyShort())).andReturn(true).anyTimes();
expect(topology.getL2DomainId(1L)).andReturn(1L).anyTimes();
expect(topology.isAttachmentPointPort(1L, (short)1)).andReturn(true).anyTimes();
expect(topology.isAttachmentPointPort(1L, (short)3)).andReturn(true).anyTimes();
// Reset mocks, trigger the packet in, and validate results
replay(sw1, sw2, routingEngine, topology);
forwarding.receive(sw1, this.packetIn, cntx);
verify(sw1, sw2, routingEngine);
}
@Test
public void testFlowModDampening() throws Exception {
learnDevices(DestDeviceToLearn.DEVICE2);
reset(topology);
expect(topology.isAttachmentPointPort(EasyMock.anyLong(), EasyMock.anyShort()))
.andReturn(true).anyTimes();
expect(topology.getL2DomainId(1L)).andReturn(1L).anyTimes();
replay(topology);
Route route = new Route(1L, 1L);
route.getPath().add(new NodePortTuple(1L, (short)1));
route.getPath().add(new NodePortTuple(1L, (short)3));
expect(routingEngine.getRoute(1L, (short)1, 1L, (short)3, 0)).andReturn(route).atLeastOnce();
// Expected Flow-mods
OFMatch match = new OFMatch();
match.loadFromPacket(testPacketSerialized, (short) 1);
OFActionOutput action = new OFActionOutput((short)3, (short)0xffff);
List<OFAction> actions = new ArrayList<OFAction>();
actions.add(action);
OFFlowMod fm1 =
(OFFlowMod) mockFloodlightProvider.getOFMessageFactory().
getMessage(OFType.FLOW_MOD);
fm1.setIdleTimeout((short)5)
.setMatch(match.clone()
.setWildcards(expected_wildcards))
.setActions(actions)
.setBufferId(OFPacketOut.BUFFER_ID_NONE)
.setCookie(2L << 52)
.setLengthU(OFFlowMod.MINIMUM_LENGTH +
OFActionOutput.MINIMUM_LENGTH);
// Record expected packet-outs/flow-mods
// We will inject the packet_in 3 times and expect 1 flow mod and
// 3 packet outs due to flow mod dampening
sw1.write(fm1, cntx);
expectLastCall().once();
sw1.write(packetOut, cntx);
expectLastCall().times(3);
reset(topology);
expect(topology.isIncomingBroadcastAllowed(anyLong(), anyShort())).andReturn(true).anyTimes();
expect(topology.getL2DomainId(1L)).andReturn(1L).anyTimes();
expect(topology.isAttachmentPointPort(1L, (short)1)).andReturn(true).anyTimes();
expect(topology.isAttachmentPointPort(1L, (short)3)).andReturn(true).anyTimes();
// Reset mocks, trigger the packet in, and validate results
replay(sw1, routingEngine, topology);
forwarding.receive(sw1, this.packetIn, cntx);
forwarding.receive(sw1, this.packetIn, cntx);
forwarding.receive(sw1, this.packetIn, cntx);
verify(sw1, routingEngine);
}
@Test
public void testForwardNoPath() throws Exception {
learnDevices(DestDeviceToLearn.NONE);
// Set no destination attachment point or route
// expect no Flow-mod but expect the packet to be flooded
// Reset mocks, trigger the packet in, and validate results
reset(topology);
expect(topology.isIncomingBroadcastAllowed(1L, (short)1)).andReturn(true).anyTimes();
expect(topology.isAttachmentPointPort(EasyMock.anyLong(),
EasyMock.anyShort()))
.andReturn(true)
.anyTimes();
expect(sw1.hasAttribute(IOFSwitch.PROP_SUPPORTS_OFPP_FLOOD))
.andReturn(true).anyTimes();
sw1.write(packetOutFlooded, cntx);
expectLastCall().once();
replay(sw1, sw2, routingEngine, topology);
forwarding.receive(sw1, this.packetIn, cntx);
verify(sw1, sw2, routingEngine);
}
}
| |
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.ldap.authentication.ad;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.naming.AuthenticationException;
import javax.naming.Context;
import javax.naming.NamingException;
import javax.naming.OperationNotSupportedException;
import javax.naming.directory.DirContext;
import javax.naming.directory.SearchControls;
import javax.naming.ldap.InitialLdapContext;
import org.springframework.core.log.LogMessage;
import org.springframework.dao.IncorrectResultSizeDataAccessException;
import org.springframework.ldap.CommunicationException;
import org.springframework.ldap.core.DirContextOperations;
import org.springframework.ldap.core.DistinguishedName;
import org.springframework.ldap.core.support.DefaultDirObjectFactory;
import org.springframework.ldap.support.LdapUtils;
import org.springframework.security.authentication.AccountExpiredException;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.authentication.CredentialsExpiredException;
import org.springframework.security.authentication.DisabledException;
import org.springframework.security.authentication.InternalAuthenticationServiceException;
import org.springframework.security.authentication.LockedException;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.security.ldap.SpringSecurityLdapTemplate;
import org.springframework.security.ldap.authentication.AbstractLdapAuthenticationProvider;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
/**
* Specialized LDAP authentication provider which uses Active Directory configuration
* conventions.
* <p>
* It will authenticate using the Active Directory
* <a href="https://msdn.microsoft.com/en-us/library/ms680857%28VS.85%29.aspx">
* {@code userPrincipalName}</a> or a custom {@link #setSearchFilter(String) searchFilter}
* in the form {@code username@domain}. If the username does not already end with the
* domain name, the {@code userPrincipalName} will be built by appending the configured
* domain name to the username supplied in the authentication request. If no domain name
* is configured, it is assumed that the username will always contain the domain name.
* <p>
* The user authorities are obtained from the data contained in the {@code memberOf}
* attribute.
*
* <h3>Active Directory Sub-Error Codes</h3>
*
* When an authentication fails, resulting in a standard LDAP 49 error code, Active
* Directory also supplies its own sub-error codes within the error message. These will be
* used to provide additional log information on why an authentication has failed. Typical
* examples are
*
* <ul>
* <li>525 - user not found</li>
* <li>52e - invalid credentials</li>
* <li>530 - not permitted to logon at this time</li>
* <li>532 - password expired</li>
* <li>533 - account disabled</li>
* <li>701 - account expired</li>
* <li>773 - user must reset password</li>
* <li>775 - account locked</li>
* </ul>
*
* If you set the {@link #setConvertSubErrorCodesToExceptions(boolean)
* convertSubErrorCodesToExceptions} property to {@code true}, the codes will also be used
* to control the exception raised.
*
* @author Luke Taylor
* @author Rob Winch
* @since 3.1
*/
public final class ActiveDirectoryLdapAuthenticationProvider extends AbstractLdapAuthenticationProvider {
private static final Pattern SUB_ERROR_CODE = Pattern.compile(".*data\\s([0-9a-f]{3,4}).*");
// Error codes
private static final int USERNAME_NOT_FOUND = 0x525;
private static final int INVALID_PASSWORD = 0x52e;
private static final int NOT_PERMITTED = 0x530;
private static final int PASSWORD_EXPIRED = 0x532;
private static final int ACCOUNT_DISABLED = 0x533;
private static final int ACCOUNT_EXPIRED = 0x701;
private static final int PASSWORD_NEEDS_RESET = 0x773;
private static final int ACCOUNT_LOCKED = 0x775;
private final String domain;
private final String rootDn;
private final String url;
private boolean convertSubErrorCodesToExceptions;
private String searchFilter = "(&(objectClass=user)(userPrincipalName={0}))";
private Map<String, Object> contextEnvironmentProperties = new HashMap<>();
// Only used to allow tests to substitute a mock LdapContext
ContextFactory contextFactory = new ContextFactory();
/**
* @param domain the domain name (may be null or empty)
* @param url an LDAP url (or multiple URLs)
* @param rootDn the root DN (may be null or empty)
*/
public ActiveDirectoryLdapAuthenticationProvider(String domain, String url, String rootDn) {
Assert.isTrue(StringUtils.hasText(url), "Url cannot be empty");
this.domain = StringUtils.hasText(domain) ? domain.toLowerCase() : null;
this.url = url;
this.rootDn = StringUtils.hasText(rootDn) ? rootDn.toLowerCase() : null;
}
/**
* @param domain the domain name (may be null or empty)
* @param url an LDAP url (or multiple URLs)
*/
public ActiveDirectoryLdapAuthenticationProvider(String domain, String url) {
Assert.isTrue(StringUtils.hasText(url), "Url cannot be empty");
this.domain = StringUtils.hasText(domain) ? domain.toLowerCase() : null;
this.url = url;
this.rootDn = (this.domain != null) ? rootDnFromDomain(this.domain) : null;
}
@Override
protected DirContextOperations doAuthentication(UsernamePasswordAuthenticationToken auth) {
String username = auth.getName();
String password = (String) auth.getCredentials();
DirContext ctx = null;
try {
ctx = bindAsUser(username, password);
return searchForUser(ctx, username);
}
catch (CommunicationException ex) {
throw badLdapConnection(ex);
}
catch (NamingException ex) {
this.logger.error("Failed to locate directory entry for authenticated user: " + username, ex);
throw badCredentials(ex);
}
finally {
LdapUtils.closeContext(ctx);
}
}
/**
* Creates the user authority list from the values of the {@code memberOf} attribute
* obtained from the user's Active Directory entry.
*/
@Override
protected Collection<? extends GrantedAuthority> loadUserAuthorities(DirContextOperations userData, String username,
String password) {
String[] groups = userData.getStringAttributes("memberOf");
if (groups == null) {
this.logger.debug("No values for 'memberOf' attribute.");
return AuthorityUtils.NO_AUTHORITIES;
}
if (this.logger.isDebugEnabled()) {
this.logger.debug("'memberOf' attribute values: " + Arrays.asList(groups));
}
List<GrantedAuthority> authorities = new ArrayList<>(groups.length);
for (String group : groups) {
authorities.add(new SimpleGrantedAuthority(new DistinguishedName(group).removeLast().getValue()));
}
return authorities;
}
private DirContext bindAsUser(String username, String password) {
// TODO. add DNS lookup based on domain
final String bindUrl = this.url;
Hashtable<String, Object> env = new Hashtable<>();
env.put(Context.SECURITY_AUTHENTICATION, "simple");
String bindPrincipal = createBindPrincipal(username);
env.put(Context.SECURITY_PRINCIPAL, bindPrincipal);
env.put(Context.PROVIDER_URL, bindUrl);
env.put(Context.SECURITY_CREDENTIALS, password);
env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
env.put(Context.OBJECT_FACTORIES, DefaultDirObjectFactory.class.getName());
env.putAll(this.contextEnvironmentProperties);
try {
return this.contextFactory.createContext(env);
}
catch (NamingException ex) {
if ((ex instanceof AuthenticationException) || (ex instanceof OperationNotSupportedException)) {
handleBindException(bindPrincipal, ex);
throw badCredentials(ex);
}
throw LdapUtils.convertLdapException(ex);
}
}
private void handleBindException(String bindPrincipal, NamingException exception) {
this.logger.debug(LogMessage.format("Authentication for %s failed:%s", bindPrincipal, exception));
handleResolveObj(exception);
int subErrorCode = parseSubErrorCode(exception.getMessage());
if (subErrorCode <= 0) {
this.logger.debug("Failed to locate AD-specific sub-error code in message");
return;
}
this.logger.info(
LogMessage.of(() -> "Active Directory authentication failed: " + subCodeToLogMessage(subErrorCode)));
if (this.convertSubErrorCodesToExceptions) {
raiseExceptionForErrorCode(subErrorCode, exception);
}
}
private void handleResolveObj(NamingException exception) {
Object resolvedObj = exception.getResolvedObj();
boolean serializable = resolvedObj instanceof Serializable;
if (resolvedObj != null && !serializable) {
exception.setResolvedObj(null);
}
}
private int parseSubErrorCode(String message) {
Matcher matcher = SUB_ERROR_CODE.matcher(message);
if (matcher.matches()) {
return Integer.parseInt(matcher.group(1), 16);
}
return -1;
}
private void raiseExceptionForErrorCode(int code, NamingException exception) {
String hexString = Integer.toHexString(code);
Throwable cause = new ActiveDirectoryAuthenticationException(hexString, exception.getMessage(), exception);
switch (code) {
case PASSWORD_EXPIRED:
throw new CredentialsExpiredException(this.messages.getMessage(
"LdapAuthenticationProvider.credentialsExpired", "User credentials have expired"), cause);
case ACCOUNT_DISABLED:
throw new DisabledException(
this.messages.getMessage("LdapAuthenticationProvider.disabled", "User is disabled"), cause);
case ACCOUNT_EXPIRED:
throw new AccountExpiredException(
this.messages.getMessage("LdapAuthenticationProvider.expired", "User account has expired"), cause);
case ACCOUNT_LOCKED:
throw new LockedException(
this.messages.getMessage("LdapAuthenticationProvider.locked", "User account is locked"), cause);
default:
throw badCredentials(cause);
}
}
private String subCodeToLogMessage(int code) {
switch (code) {
case USERNAME_NOT_FOUND:
return "User was not found in directory";
case INVALID_PASSWORD:
return "Supplied password was invalid";
case NOT_PERMITTED:
return "User not permitted to logon at this time";
case PASSWORD_EXPIRED:
return "Password has expired";
case ACCOUNT_DISABLED:
return "Account is disabled";
case ACCOUNT_EXPIRED:
return "Account expired";
case PASSWORD_NEEDS_RESET:
return "User must reset password";
case ACCOUNT_LOCKED:
return "Account locked";
}
return "Unknown (error code " + Integer.toHexString(code) + ")";
}
private BadCredentialsException badCredentials() {
return new BadCredentialsException(
this.messages.getMessage("LdapAuthenticationProvider.badCredentials", "Bad credentials"));
}
private BadCredentialsException badCredentials(Throwable cause) {
return (BadCredentialsException) badCredentials().initCause(cause);
}
private InternalAuthenticationServiceException badLdapConnection(Throwable cause) {
return new InternalAuthenticationServiceException(this.messages.getMessage(
"LdapAuthenticationProvider.badLdapConnection", "Connection to LDAP server failed."), cause);
}
private DirContextOperations searchForUser(DirContext context, String username) throws NamingException {
SearchControls searchControls = new SearchControls();
searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE);
String bindPrincipal = createBindPrincipal(username);
String searchRoot = (this.rootDn != null) ? this.rootDn : searchRootFromPrincipal(bindPrincipal);
try {
return SpringSecurityLdapTemplate.searchForSingleEntryInternal(context, searchControls, searchRoot,
this.searchFilter, new Object[] { bindPrincipal, username });
}
catch (CommunicationException ex) {
throw badLdapConnection(ex);
}
catch (IncorrectResultSizeDataAccessException ex) {
// Search should never return multiple results if properly configured -
if (ex.getActualSize() != 0) {
throw ex;
}
// If we found no results, then the username/password did not match
UsernameNotFoundException userNameNotFoundException = new UsernameNotFoundException(
"User " + username + " not found in directory.", ex);
throw badCredentials(userNameNotFoundException);
}
}
private String searchRootFromPrincipal(String bindPrincipal) {
int atChar = bindPrincipal.lastIndexOf('@');
if (atChar < 0) {
this.logger.debug("User principal '" + bindPrincipal
+ "' does not contain the domain, and no domain has been configured");
throw badCredentials();
}
return rootDnFromDomain(bindPrincipal.substring(atChar + 1, bindPrincipal.length()));
}
private String rootDnFromDomain(String domain) {
String[] tokens = StringUtils.tokenizeToStringArray(domain, ".");
StringBuilder root = new StringBuilder();
for (String token : tokens) {
if (root.length() > 0) {
root.append(',');
}
root.append("dc=").append(token);
}
return root.toString();
}
String createBindPrincipal(String username) {
if (this.domain == null || username.toLowerCase().endsWith(this.domain)) {
return username;
}
return username + "@" + this.domain;
}
/**
* By default, a failed authentication (LDAP error 49) will result in a
* {@code BadCredentialsException}.
* <p>
* If this property is set to {@code true}, the exception message from a failed bind
* attempt will be parsed for the AD-specific error code and a
* {@link CredentialsExpiredException}, {@link DisabledException},
* {@link AccountExpiredException} or {@link LockedException} will be thrown for the
* corresponding codes. All other codes will result in the default
* {@code BadCredentialsException}.
* @param convertSubErrorCodesToExceptions {@code true} to raise an exception based on
* the AD error code.
*/
public void setConvertSubErrorCodesToExceptions(boolean convertSubErrorCodesToExceptions) {
this.convertSubErrorCodesToExceptions = convertSubErrorCodesToExceptions;
}
/**
* The LDAP filter string to search for the user being authenticated. Occurrences of
* {0} are replaced with the {@code username@domain}. Occurrences of {1} are replaced
* with the {@code username} only.
* <p>
* Defaults to: {@code (&(objectClass=user)(userPrincipalName={0}))}
* </p>
* @param searchFilter the filter string
*
* @since 3.2.6
*/
public void setSearchFilter(String searchFilter) {
Assert.hasText(searchFilter, "searchFilter must have text");
this.searchFilter = searchFilter;
}
/**
* Allows a custom environment properties to be used to create initial LDAP context.
* @param environment the additional environment parameters to use when creating the
* LDAP Context
*/
public void setContextEnvironmentProperties(Map<String, Object> environment) {
Assert.notEmpty(environment, "environment must not be empty");
this.contextEnvironmentProperties = new Hashtable<>(environment);
}
static class ContextFactory {
DirContext createContext(Hashtable<?, ?> env) throws NamingException {
return new InitialLdapContext(env, null);
}
}
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.buffer;
import io.netty.util.internal.PlatformDependent;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.GatheringByteChannel;
import java.nio.channels.ScatteringByteChannel;
/**
* A NIO {@link ByteBuffer} based buffer. It is recommended to use {@link Unpooled#directBuffer(int)}
* and {@link Unpooled#wrappedBuffer(ByteBuffer)} instead of calling the
* constructor explicitly.
*/
public class UnpooledUnsafeDirectByteBuf extends AbstractReferenceCountedByteBuf {
private static final boolean NATIVE_ORDER = ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN;
private final ByteBufAllocator alloc;
private long memoryAddress;
private ByteBuffer buffer;
private ByteBuffer tmpNioBuf;
private int capacity;
private boolean doNotFree;
/**
* Creates a new direct buffer.
*
* @param initialCapacity the initial capacity of the underlying direct buffer
* @param maxCapacity the maximum capacity of the underlying direct buffer
*/
protected UnpooledUnsafeDirectByteBuf(ByteBufAllocator alloc, int initialCapacity, int maxCapacity) {
super(maxCapacity);
if (alloc == null) {
throw new NullPointerException("alloc");
}
if (initialCapacity < 0) {
throw new IllegalArgumentException("initialCapacity: " + initialCapacity);
}
if (maxCapacity < 0) {
throw new IllegalArgumentException("maxCapacity: " + maxCapacity);
}
if (initialCapacity > maxCapacity) {
throw new IllegalArgumentException(String.format(
"initialCapacity(%d) > maxCapacity(%d)", initialCapacity, maxCapacity));
}
this.alloc = alloc;
setByteBuffer(allocateDirect(initialCapacity));
}
/**
* Creates a new direct buffer by wrapping the specified initial buffer.
*
* @param maxCapacity the maximum capacity of the underlying direct buffer
*/
protected UnpooledUnsafeDirectByteBuf(ByteBufAllocator alloc, ByteBuffer initialBuffer, int maxCapacity) {
super(maxCapacity);
if (alloc == null) {
throw new NullPointerException("alloc");
}
if (initialBuffer == null) {
throw new NullPointerException("initialBuffer");
}
if (!initialBuffer.isDirect()) {
throw new IllegalArgumentException("initialBuffer is not a direct buffer.");
}
if (initialBuffer.isReadOnly()) {
throw new IllegalArgumentException("initialBuffer is a read-only buffer.");
}
int initialCapacity = initialBuffer.remaining();
if (initialCapacity > maxCapacity) {
throw new IllegalArgumentException(String.format(
"initialCapacity(%d) > maxCapacity(%d)", initialCapacity, maxCapacity));
}
this.alloc = alloc;
doNotFree = true;
setByteBuffer(initialBuffer.slice().order(ByteOrder.BIG_ENDIAN));
writerIndex(initialCapacity);
}
/**
* Allocate a new direct {@link ByteBuffer} with the given initialCapacity.
*/
protected ByteBuffer allocateDirect(int initialCapacity) {
return ByteBuffer.allocateDirect(initialCapacity);
}
/**
* Free a direct {@link ByteBuffer}
*/
protected void freeDirect(ByteBuffer buffer) {
PlatformDependent.freeDirectBuffer(buffer);
}
private void setByteBuffer(ByteBuffer buffer) {
ByteBuffer oldBuffer = this.buffer;
if (oldBuffer != null) {
if (doNotFree) {
doNotFree = false;
} else {
freeDirect(oldBuffer);
}
}
this.buffer = buffer;
memoryAddress = PlatformDependent.directBufferAddress(buffer);
tmpNioBuf = null;
capacity = buffer.remaining();
}
@Override
public boolean isDirect() {
return true;
}
@Override
public int capacity() {
return capacity;
}
@Override
public ByteBuf capacity(int newCapacity) {
ensureAccessible();
if (newCapacity < 0 || newCapacity > maxCapacity()) {
throw new IllegalArgumentException("newCapacity: " + newCapacity);
}
int readerIndex = readerIndex();
int writerIndex = writerIndex();
int oldCapacity = capacity;
if (newCapacity > oldCapacity) {
ByteBuffer oldBuffer = buffer;
ByteBuffer newBuffer = allocateDirect(newCapacity);
oldBuffer.position(0).limit(oldBuffer.capacity());
newBuffer.position(0).limit(oldBuffer.capacity());
newBuffer.put(oldBuffer);
newBuffer.clear();
setByteBuffer(newBuffer);
} else if (newCapacity < oldCapacity) {
ByteBuffer oldBuffer = buffer;
ByteBuffer newBuffer = allocateDirect(newCapacity);
if (readerIndex < newCapacity) {
if (writerIndex > newCapacity) {
writerIndex(writerIndex = newCapacity);
}
oldBuffer.position(readerIndex).limit(writerIndex);
newBuffer.position(readerIndex).limit(writerIndex);
newBuffer.put(oldBuffer);
newBuffer.clear();
} else {
setIndex(newCapacity, newCapacity);
}
setByteBuffer(newBuffer);
}
return this;
}
@Override
public ByteBufAllocator alloc() {
return alloc;
}
@Override
public ByteOrder order() {
return ByteOrder.BIG_ENDIAN;
}
@Override
public boolean hasArray() {
return false;
}
@Override
public byte[] array() {
throw new UnsupportedOperationException("direct buffer");
}
@Override
public int arrayOffset() {
throw new UnsupportedOperationException("direct buffer");
}
@Override
public boolean hasMemoryAddress() {
return true;
}
@Override
public long memoryAddress() {
ensureAccessible();
return memoryAddress;
}
@Override
protected byte _getByte(int index) {
return PlatformDependent.getByte(addr(index));
}
@Override
protected short _getShort(int index) {
short v = PlatformDependent.getShort(addr(index));
return NATIVE_ORDER? v : Short.reverseBytes(v);
}
@Override
protected int _getUnsignedMedium(int index) {
long addr = addr(index);
return (PlatformDependent.getByte(addr) & 0xff) << 16 |
(PlatformDependent.getByte(addr + 1) & 0xff) << 8 |
PlatformDependent.getByte(addr + 2) & 0xff;
}
@Override
protected int _getInt(int index) {
int v = PlatformDependent.getInt(addr(index));
return NATIVE_ORDER? v : Integer.reverseBytes(v);
}
@Override
protected long _getLong(int index) {
long v = PlatformDependent.getLong(addr(index));
return NATIVE_ORDER? v : Long.reverseBytes(v);
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) {
checkIndex(index, length);
if (dst == null) {
throw new NullPointerException("dst");
}
if (dstIndex < 0 || dstIndex > dst.capacity() - length) {
throw new IndexOutOfBoundsException("dstIndex: " + dstIndex);
}
if (dst.hasMemoryAddress()) {
PlatformDependent.copyMemory(addr(index), dst.memoryAddress() + dstIndex, length);
} else if (dst.hasArray()) {
PlatformDependent.copyMemory(addr(index), dst.array(), dst.arrayOffset() + dstIndex, length);
} else {
dst.setBytes(dstIndex, this, index, length);
}
return this;
}
@Override
public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) {
checkIndex(index, length);
if (dst == null) {
throw new NullPointerException("dst");
}
if (dstIndex < 0 || dstIndex > dst.length - length) {
throw new IndexOutOfBoundsException(String.format(
"dstIndex: %d, length: %d (expected: range(0, %d))", dstIndex, length, dst.length));
}
if (length != 0) {
PlatformDependent.copyMemory(addr(index), dst, dstIndex, length);
}
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuffer dst) {
getBytes(index, dst, false);
return this;
}
private void getBytes(int index, ByteBuffer dst, boolean internal) {
checkIndex(index);
if (dst == null) {
throw new NullPointerException("dst");
}
int bytesToCopy = Math.min(capacity() - index, dst.remaining());
ByteBuffer tmpBuf;
if (internal) {
tmpBuf = internalNioBuffer();
} else {
tmpBuf = buffer.duplicate();
}
tmpBuf.clear().position(index).limit(index + bytesToCopy);
dst.put(tmpBuf);
}
@Override
public ByteBuf readBytes(ByteBuffer dst) {
int length = dst.remaining();
checkReadableBytes(length);
getBytes(readerIndex, dst, true);
readerIndex += length;
return this;
}
@Override
protected void _setByte(int index, int value) {
PlatformDependent.putByte(addr(index), (byte) value);
}
@Override
protected void _setShort(int index, int value) {
PlatformDependent.putShort(addr(index), NATIVE_ORDER ? (short) value : Short.reverseBytes((short) value));
}
@Override
protected void _setMedium(int index, int value) {
long addr = addr(index);
PlatformDependent.putByte(addr, (byte) (value >>> 16));
PlatformDependent.putByte(addr + 1, (byte) (value >>> 8));
PlatformDependent.putByte(addr + 2, (byte) value);
}
@Override
protected void _setInt(int index, int value) {
PlatformDependent.putInt(addr(index), NATIVE_ORDER ? value : Integer.reverseBytes(value));
}
@Override
protected void _setLong(int index, long value) {
PlatformDependent.putLong(addr(index), NATIVE_ORDER ? value : Long.reverseBytes(value));
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) {
checkIndex(index, length);
if (src == null) {
throw new NullPointerException("src");
}
if (srcIndex < 0 || srcIndex > src.capacity() - length) {
throw new IndexOutOfBoundsException("srcIndex: " + srcIndex);
}
if (length != 0) {
if (src.hasMemoryAddress()) {
PlatformDependent.copyMemory(src.memoryAddress() + srcIndex, addr(index), length);
} else if (src.hasArray()) {
PlatformDependent.copyMemory(src.array(), src.arrayOffset() + srcIndex, addr(index), length);
} else {
src.getBytes(srcIndex, this, index, length);
}
}
return this;
}
@Override
public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) {
checkIndex(index, length);
if (length != 0) {
PlatformDependent.copyMemory(src, srcIndex, addr(index), length);
}
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuffer src) {
ensureAccessible();
ByteBuffer tmpBuf = internalNioBuffer();
if (src == tmpBuf) {
src = src.duplicate();
}
tmpBuf.clear().position(index).limit(index + src.remaining());
tmpBuf.put(src);
return this;
}
@Override
public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException {
ensureAccessible();
if (length != 0) {
byte[] tmp = new byte[length];
PlatformDependent.copyMemory(addr(index), tmp, 0, length);
out.write(tmp);
}
return this;
}
@Override
public int getBytes(int index, GatheringByteChannel out, int length) throws IOException {
return getBytes(index, out, length, false);
}
private int getBytes(int index, GatheringByteChannel out, int length, boolean internal) throws IOException {
ensureAccessible();
if (length == 0) {
return 0;
}
ByteBuffer tmpBuf;
if (internal) {
tmpBuf = internalNioBuffer();
} else {
tmpBuf = buffer.duplicate();
}
tmpBuf.clear().position(index).limit(index + length);
return out.write(tmpBuf);
}
@Override
public int readBytes(GatheringByteChannel out, int length) throws IOException {
checkReadableBytes(length);
int readBytes = getBytes(readerIndex, out, length, true);
readerIndex += readBytes;
return readBytes;
}
@Override
public int setBytes(int index, InputStream in, int length) throws IOException {
checkIndex(index, length);
byte[] tmp = new byte[length];
int readBytes = in.read(tmp);
if (readBytes > 0) {
PlatformDependent.copyMemory(tmp, 0, addr(index), readBytes);
}
return readBytes;
}
@Override
public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException {
ensureAccessible();
ByteBuffer tmpBuf = internalNioBuffer();
tmpBuf.clear().position(index).limit(index + length);
try {
return in.read(tmpBuf);
} catch (ClosedChannelException ignored) {
return -1;
}
}
@Override
public int nioBufferCount() {
return 1;
}
@Override
public ByteBuffer[] nioBuffers(int index, int length) {
return new ByteBuffer[] { nioBuffer(index, length) };
}
@Override
public ByteBuf copy(int index, int length) {
checkIndex(index, length);
ByteBuf copy = alloc().directBuffer(length, maxCapacity());
if (length != 0) {
if (copy.hasMemoryAddress()) {
PlatformDependent.copyMemory(addr(index), copy.memoryAddress(), length);
copy.setIndex(0, length);
} else {
copy.writeBytes(this, index, length);
}
}
return copy;
}
@Override
public ByteBuffer internalNioBuffer(int index, int length) {
checkIndex(index, length);
return (ByteBuffer) internalNioBuffer().clear().position(index).limit(index + length);
}
private ByteBuffer internalNioBuffer() {
ByteBuffer tmpNioBuf = this.tmpNioBuf;
if (tmpNioBuf == null) {
this.tmpNioBuf = tmpNioBuf = buffer.duplicate();
}
return tmpNioBuf;
}
@Override
public ByteBuffer nioBuffer(int index, int length) {
checkIndex(index, length);
return ((ByteBuffer) buffer.duplicate().position(index).limit(index + length)).slice();
}
@Override
protected void deallocate() {
ByteBuffer buffer = this.buffer;
if (buffer == null) {
return;
}
this.buffer = null;
if (!doNotFree) {
freeDirect(buffer);
}
}
@Override
public ByteBuf unwrap() {
return null;
}
long addr(int index) {
return memoryAddress + index;
}
@Override
protected SwappedByteBuf newSwappedByteBuf() {
return new UnsafeDirectSwappedByteBuf(this);
}
}
| |
/*
* Copyright 2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jsefa.test.xml;
import static org.jsefa.test.common.JSefaTestUtil.FormatType.XML;
import static org.jsefa.test.xml.ValueValidationOnSerializationTest.Mode.INVALID;
import static org.jsefa.test.xml.ValueValidationOnSerializationTest.Mode.VALID;
import static org.jsefa.test.xml.ValueValidationOnSerializationTest.Mode.VALIDATION_OFF;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import junit.framework.TestCase;
import org.jsefa.SerializationException;
import org.jsefa.common.annotation.SimpleListItem;
import org.jsefa.common.config.Configuration;
import org.jsefa.common.config.ValidationMode;
import org.jsefa.common.validator.ValidationError;
import org.jsefa.common.validator.ValidationResult;
import org.jsefa.common.validator.Validator;
import org.jsefa.test.common.AbstractTestDTO;
import org.jsefa.test.common.JSefaTestUtil;
import org.jsefa.xml.annotation.ListItem;
import org.jsefa.xml.annotation.MapKey;
import org.jsefa.xml.annotation.MapValue;
import org.jsefa.xml.annotation.XmlAttribute;
import org.jsefa.xml.annotation.XmlDataType;
import org.jsefa.xml.annotation.XmlElement;
import org.jsefa.xml.annotation.XmlElementList;
import org.jsefa.xml.annotation.XmlElementMap;
import org.jsefa.xml.annotation.XmlTextContent;
/**
* Tests for testing serialization with value validation.
*
* @author Norman Lahme-Huetig
*
*/
public class ValueValidationOnSerializationTest extends TestCase {
enum Mode {
VALIDATION_OFF, VALID, INVALID,
}
/**
* Tests validation for an attribute value.
*/
public void testAttribute() {
AttributeTestDTO dto = new AttributeTestDTO();
dto.stringField = "valid";
check(dto, VALID);
dto.stringField = "not valid";
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
/**
* Tests validation for a simple element value.
*/
public void testSimpleElement() {
SimpleElementTestDTO dto = new SimpleElementTestDTO();
dto.stringField = "valid";
check(dto, VALID);
dto.stringField = "not valid";
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
/**
* Tests validation for a simple element value with list type.
*/
public void testSimpleElementWithListType() {
SimpleElementWithListTypeTestDTO dto = new SimpleElementWithListTypeTestDTO();
dto.stringList = new ArrayList<String>();
dto.stringList.add("valid");
check(dto, VALID);
dto.stringList.add("not valid");
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
/**
* Tests validation for a text content of an element.
*/
public void testTextContent() {
TextContentTestDTO dto = new TextContentTestDTO();
dto.stringField = "valid";
check(dto, VALID);
dto.stringField = "not valid";
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
/**
* Tests validation for an explicit simple list item value.
*/
public void testExplicitSimpleListItem() {
ExplicitSimpleListItemTestDTO dto = new ExplicitSimpleListItemTestDTO();
dto.stringList = new ArrayList<String>();
dto.stringList.add("valid");
check(dto, VALID);
dto.stringList.add("not valid");
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
/**
* Tests validation for an explicit simple list item value.
*/
public void testImplicitSimpleListItem() {
ImplicitSimpleListItemTestDTO dto = new ImplicitSimpleListItemTestDTO();
dto.stringList = new ArrayList<String>();
dto.stringList.add("valid");
check(dto, VALID);
dto.stringList.add("not valid");
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
/**
* Tests validation for an explicit simple map value.
*/
public void testExplicitSimpleMapValue() {
ExplicitSimpleMapTestDTO dto = new ExplicitSimpleMapTestDTO();
dto.map = new HashMap<Integer, String>();
dto.map.put(3, "valid");
check(dto, VALID);
dto.map.put(3, "not valid");
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
/**
* Tests validation for an explicit simple map value.
*/
public void testImplicitSimpleMapValue() {
ImplicitSimpleMapTestDTO dto = new ImplicitSimpleMapTestDTO();
dto.map = new HashMap<Integer, String>();
dto.map.put(3, "valid");
check(dto, VALID);
dto.map.put(3, "not valid");
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
/**
* Tests validation for a complex element value.
*/
public void testComplexElement() {
ComplexElementTestDTO dto = new ComplexElementTestDTO();
dto.complexField = new ComplexElementDTO();
dto.complexField.stringField1 = "valid";
check(dto, VALID);
dto.complexField.stringField1 = null;
dto.complexField.stringField2 = "valid";
check(dto, VALID);
dto.complexField.stringField2 = null;
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
/**
* Tests validation for a top level complex element value.
*/
public void testTopLevelComplexElement() {
ComplexElementDTO dto = new ComplexElementDTO();
dto.stringField1 = "valid";
check(dto, VALID);
dto.stringField1 = null;
dto.stringField2 = "valid";
check(dto, VALID);
dto.stringField2 = null;
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
/**
* Tests validation for an implicit list of complex elements.
*/
public void testImplicitComplexListItem() {
ImplicitComplexListItemTestDTO dto = new ImplicitComplexListItemTestDTO();
dto.listField = new ArrayList<ComplexElementDTO>();
ComplexElementDTO itemDTO = new ComplexElementDTO();
itemDTO.stringField1 = "valid";
dto.listField.add(itemDTO);
check(dto, VALID);
itemDTO.stringField1 = null;
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
/**
* Tests validation for an explicit list of complex elements.
*/
public void testExplicitComplexListItem() {
ExplicitComplexListItemTestDTO dto = new ExplicitComplexListItemTestDTO();
dto.listField = new ArrayList<ComplexElementDTO>();
ComplexElementDTO itemDTO = new ComplexElementDTO();
itemDTO.stringField1 = "valid";
dto.listField.add(itemDTO);
check(dto, VALID);
itemDTO.stringField1 = null;
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
/**
* Tests validation for an implicit map of complex elements..
*/
public void testImplicitComplexMap() {
ImplicitComplexMapTestDTO dto = new ImplicitComplexMapTestDTO();
dto.mapField = new HashMap<String, ComplexElementDTO>();
ComplexElementDTO itemDTO = new ComplexElementDTO();
itemDTO.stringField1 = "valid";
dto.mapField.put("valid", itemDTO);
check(dto, VALID);
itemDTO.stringField1 = null;
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
/**
* Tests validation for an explicit map of complex elements.
*/
public void testExplicitComplexMap() {
ExplicitComplexMapTestDTO dto = new ExplicitComplexMapTestDTO();
dto.mapField = new HashMap<String, ComplexElementDTO>();
ComplexElementDTO itemDTO = new ComplexElementDTO();
itemDTO.stringField1 = "valid";
dto.mapField.put("valid", itemDTO);
check(dto, VALID);
itemDTO.stringField1 = null;
check(dto, INVALID);
check(dto, VALIDATION_OFF);
}
@SuppressWarnings("unchecked")
private void check(Object value, Mode mode) {
Configuration config = JSefaTestUtil.createConfiguration(XML);
if (VALIDATION_OFF == mode) {
config.setValidationMode(ValidationMode.NONE);
} else {
config.getValidatorProvider().registerValidatorType(ComplexElementDTO.class,
ComplexElementDTOValidator.class);
}
try {
JSefaTestUtil.serialize(XML, config, value);
assertFalse(mode == INVALID);
} catch (SerializationException e) {
assertTrue(mode == INVALID);
}
}
@XmlDataType(defaultElementName = "a")
static final class AttributeTestDTO extends AbstractTestDTO {
@XmlAttribute(name = "b", constraints = "pattern=\\w*")
String stringField;
}
@XmlDataType(defaultElementName = "a")
static final class SimpleElementTestDTO extends AbstractTestDTO {
@XmlElement(name = "b", constraints = "pattern=\\w*")
String stringField;
}
@XmlDataType(defaultElementName = "a")
static final class SimpleElementWithListTypeTestDTO extends AbstractTestDTO {
@XmlElement(name = "b", listItem = @SimpleListItem(constraints = "pattern=\\w*"))
List<String> stringList;
}
@XmlDataType(defaultElementName = "a")
static final class TextContentTestDTO extends AbstractTestDTO {
@XmlTextContent(constraints = "pattern=\\w*")
String stringField;
}
@XmlDataType(defaultElementName = "a")
static final class ExplicitSimpleListItemTestDTO extends AbstractTestDTO {
@XmlElementList(name = "b", implicit = false, items = @ListItem(name = "i", constraints = "pattern=\\w*"))
List<String> stringList;
}
@XmlDataType(defaultElementName = "a")
static final class ImplicitSimpleListItemTestDTO extends AbstractTestDTO {
@XmlElementList(implicit = true, items = @ListItem(name = "i", constraints = "pattern=\\w*"))
List<String> stringList;
}
@XmlDataType(defaultElementName = "a")
static final class ExplicitSimpleMapTestDTO extends AbstractTestDTO {
@XmlElementMap(name = "b", implicit = false, key = @MapKey(name = "k", constraints = "min=3"),
values = @MapValue(name = "v", constraints = "pattern=\\w*"))
Map<Integer, String> map;
}
@XmlDataType(defaultElementName = "a")
static final class ImplicitSimpleMapTestDTO extends AbstractTestDTO {
@XmlElementMap(implicit = true, key = @MapKey(name = "k", constraints = "min=3"),
values = @MapValue(name = "v", constraints = "pattern=\\w*"))
Map<Integer, String> map;
}
@XmlDataType(defaultElementName = "a")
static final class ComplexElementTestDTO extends AbstractTestDTO {
@XmlElement(name = "b")
ComplexElementDTO complexField;
}
@XmlDataType(defaultElementName = "a")
static final class ComplexElementDTO extends AbstractTestDTO {
@XmlElement(name = "f1")
String stringField1;
@XmlElement(name = "f2")
String stringField2;
}
@XmlDataType(defaultElementName = "a")
static final class ExplicitComplexListItemTestDTO extends AbstractTestDTO {
@XmlElementList(name = "b", implicit = false, items = @ListItem(name = "i"))
List<ComplexElementDTO> listField;
}
@XmlDataType(defaultElementName = "a")
static final class ImplicitComplexListItemTestDTO extends AbstractTestDTO {
@XmlElementList(implicit = true, items = @ListItem(name = "i"))
List<ComplexElementDTO> listField;
}
@XmlDataType(defaultElementName = "a")
static final class ExplicitComplexMapTestDTO extends AbstractTestDTO {
@XmlElementMap(name = "b", implicit = false, key = @MapKey(name = "k"), values = @MapValue(name = "v"))
Map<String, ComplexElementDTO> mapField;
}
@XmlDataType(defaultElementName = "a")
static final class ImplicitComplexMapTestDTO extends AbstractTestDTO {
@XmlElementMap(implicit = true, key = @MapKey(name = "k"), values = @MapValue(name = "v"))
Map<String, ComplexElementDTO> mapField;
}
private static final class ComplexElementDTOValidator implements Validator {
static ComplexElementDTOValidator create() {
return new ComplexElementDTOValidator();
}
public ValidationResult validate(Object value) {
ComplexElementDTO dto = (ComplexElementDTO) value;
if (dto.stringField1 == null && dto.stringField2 == null) {
return ValidationResult.create(ValidationError.create("foo", "Both fields are null"));
} else {
return ValidationResult.VALID;
}
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.transcribe.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/transcribe-2017-10-26/ListMedicalTranscriptionJobs"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListMedicalTranscriptionJobsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* When specified, returns only medical transcription jobs with the specified status. Jobs are ordered by creation
* date, with the newest jobs returned first. If you don't specify a status, Amazon Transcribe Medical returns all
* transcription jobs ordered by creation date.
* </p>
*/
private String status;
/**
* <p>
* When specified, the jobs returned in the list are limited to jobs whose name contains the specified string.
* </p>
*/
private String jobNameContains;
/**
* <p>
* If you a receive a truncated result in the previous request of <code>ListMedicalTranscriptionJobs</code>, include
* <code>NextToken</code> to fetch the next set of jobs.
* </p>
*/
private String nextToken;
/**
* <p>
* The maximum number of medical transcription jobs to return in each page of results. If there are fewer results
* than the value you specify, only the actual results are returned. If you do not specify a value, the default of 5
* is used.
* </p>
*/
private Integer maxResults;
/**
* <p>
* When specified, returns only medical transcription jobs with the specified status. Jobs are ordered by creation
* date, with the newest jobs returned first. If you don't specify a status, Amazon Transcribe Medical returns all
* transcription jobs ordered by creation date.
* </p>
*
* @param status
* When specified, returns only medical transcription jobs with the specified status. Jobs are ordered by
* creation date, with the newest jobs returned first. If you don't specify a status, Amazon Transcribe
* Medical returns all transcription jobs ordered by creation date.
* @see TranscriptionJobStatus
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* When specified, returns only medical transcription jobs with the specified status. Jobs are ordered by creation
* date, with the newest jobs returned first. If you don't specify a status, Amazon Transcribe Medical returns all
* transcription jobs ordered by creation date.
* </p>
*
* @return When specified, returns only medical transcription jobs with the specified status. Jobs are ordered by
* creation date, with the newest jobs returned first. If you don't specify a status, Amazon Transcribe
* Medical returns all transcription jobs ordered by creation date.
* @see TranscriptionJobStatus
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* When specified, returns only medical transcription jobs with the specified status. Jobs are ordered by creation
* date, with the newest jobs returned first. If you don't specify a status, Amazon Transcribe Medical returns all
* transcription jobs ordered by creation date.
* </p>
*
* @param status
* When specified, returns only medical transcription jobs with the specified status. Jobs are ordered by
* creation date, with the newest jobs returned first. If you don't specify a status, Amazon Transcribe
* Medical returns all transcription jobs ordered by creation date.
* @return Returns a reference to this object so that method calls can be chained together.
* @see TranscriptionJobStatus
*/
public ListMedicalTranscriptionJobsRequest withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* When specified, returns only medical transcription jobs with the specified status. Jobs are ordered by creation
* date, with the newest jobs returned first. If you don't specify a status, Amazon Transcribe Medical returns all
* transcription jobs ordered by creation date.
* </p>
*
* @param status
* When specified, returns only medical transcription jobs with the specified status. Jobs are ordered by
* creation date, with the newest jobs returned first. If you don't specify a status, Amazon Transcribe
* Medical returns all transcription jobs ordered by creation date.
* @return Returns a reference to this object so that method calls can be chained together.
* @see TranscriptionJobStatus
*/
public ListMedicalTranscriptionJobsRequest withStatus(TranscriptionJobStatus status) {
this.status = status.toString();
return this;
}
/**
* <p>
* When specified, the jobs returned in the list are limited to jobs whose name contains the specified string.
* </p>
*
* @param jobNameContains
* When specified, the jobs returned in the list are limited to jobs whose name contains the specified
* string.
*/
public void setJobNameContains(String jobNameContains) {
this.jobNameContains = jobNameContains;
}
/**
* <p>
* When specified, the jobs returned in the list are limited to jobs whose name contains the specified string.
* </p>
*
* @return When specified, the jobs returned in the list are limited to jobs whose name contains the specified
* string.
*/
public String getJobNameContains() {
return this.jobNameContains;
}
/**
* <p>
* When specified, the jobs returned in the list are limited to jobs whose name contains the specified string.
* </p>
*
* @param jobNameContains
* When specified, the jobs returned in the list are limited to jobs whose name contains the specified
* string.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListMedicalTranscriptionJobsRequest withJobNameContains(String jobNameContains) {
setJobNameContains(jobNameContains);
return this;
}
/**
* <p>
* If you a receive a truncated result in the previous request of <code>ListMedicalTranscriptionJobs</code>, include
* <code>NextToken</code> to fetch the next set of jobs.
* </p>
*
* @param nextToken
* If you a receive a truncated result in the previous request of <code>ListMedicalTranscriptionJobs</code>,
* include <code>NextToken</code> to fetch the next set of jobs.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* If you a receive a truncated result in the previous request of <code>ListMedicalTranscriptionJobs</code>, include
* <code>NextToken</code> to fetch the next set of jobs.
* </p>
*
* @return If you a receive a truncated result in the previous request of <code>ListMedicalTranscriptionJobs</code>,
* include <code>NextToken</code> to fetch the next set of jobs.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* If you a receive a truncated result in the previous request of <code>ListMedicalTranscriptionJobs</code>, include
* <code>NextToken</code> to fetch the next set of jobs.
* </p>
*
* @param nextToken
* If you a receive a truncated result in the previous request of <code>ListMedicalTranscriptionJobs</code>,
* include <code>NextToken</code> to fetch the next set of jobs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListMedicalTranscriptionJobsRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* The maximum number of medical transcription jobs to return in each page of results. If there are fewer results
* than the value you specify, only the actual results are returned. If you do not specify a value, the default of 5
* is used.
* </p>
*
* @param maxResults
* The maximum number of medical transcription jobs to return in each page of results. If there are fewer
* results than the value you specify, only the actual results are returned. If you do not specify a value,
* the default of 5 is used.
*/
public void setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* The maximum number of medical transcription jobs to return in each page of results. If there are fewer results
* than the value you specify, only the actual results are returned. If you do not specify a value, the default of 5
* is used.
* </p>
*
* @return The maximum number of medical transcription jobs to return in each page of results. If there are fewer
* results than the value you specify, only the actual results are returned. If you do not specify a value,
* the default of 5 is used.
*/
public Integer getMaxResults() {
return this.maxResults;
}
/**
* <p>
* The maximum number of medical transcription jobs to return in each page of results. If there are fewer results
* than the value you specify, only the actual results are returned. If you do not specify a value, the default of 5
* is used.
* </p>
*
* @param maxResults
* The maximum number of medical transcription jobs to return in each page of results. If there are fewer
* results than the value you specify, only the actual results are returned. If you do not specify a value,
* the default of 5 is used.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListMedicalTranscriptionJobsRequest withMaxResults(Integer maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getJobNameContains() != null)
sb.append("JobNameContains: ").append(getJobNameContains()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListMedicalTranscriptionJobsRequest == false)
return false;
ListMedicalTranscriptionJobsRequest other = (ListMedicalTranscriptionJobsRequest) obj;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getJobNameContains() == null ^ this.getJobNameContains() == null)
return false;
if (other.getJobNameContains() != null && other.getJobNameContains().equals(this.getJobNameContains()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getJobNameContains() == null) ? 0 : getJobNameContains().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
return hashCode;
}
@Override
public ListMedicalTranscriptionJobsRequest clone() {
return (ListMedicalTranscriptionJobsRequest) super.clone();
}
}
| |
package org.reclipse.behavior.inference.automaton.symbols;
import java.util.Iterator;
import junit.framework.TestCase;
import org.reclipse.behavior.inference.StructuralAnnotation;
import org.reclipse.behavior.inference.automaton.Token;
import org.reclipse.behavior.inference.automaton.symbols.MethodCallObject;
import org.reclipse.behavior.inference.automaton.symbols.ProhibitedCallerSymbol;
import org.reclipse.tracer.model.tracegraph.TGMethodCall;
import org.reclipse.tracer.model.tracegraph.TGObject;
import org.reclipse.tracer.model.tracegraph.TGType;
/**
* @author lowende
* @author Last editor: $Author: mcp $
* @version $Revision: 4281 $ $Date: 2010-03-10 11:47:02 +0100 (Mi, 10 Mrz 2010) $
*/
public class ProhibitedCallerSymbolTest extends TestCase
{
private StructuralAnnotation annotation;
private Token token;
private TGMethodCall methodCall;
private TGObject caller1_1;
private TGObject caller1_2;
private TGObject caller1_3;
private TGObject caller2;
private TGObject caller3;
private TGObject callee;
private ProhibitedCallerSymbol symbol;
/**
* @see junit.framework.TestCase#setUp()
*/
@Override
protected void setUp() throws Exception
{
super.setUp();
// create the annotation and the token
this.annotation = new StructuralAnnotation();
this.annotation.addToNodes("m", "method()");
this.annotation.addToNodes("A1", "org.reclipse.test.CallerType1");
this.annotation.addToNodes("A2", "org.reclipse.test.CallerType2");
this.annotation.addToNodes("B", "org.reclipse.test.CalleeType");
this.token = new Token(this.annotation);
// create method call
this.methodCall = new TGMethodCall();
this.methodCall.setName("method");
// create three caller objects
TGType callerType1 = new TGType();
callerType1.setName("org.reclipse.test.CallerType1");
this.caller1_1 = new TGObject();
this.caller1_1.setId("caller1_1");
this.caller1_1.setType(callerType1);
TGType callerSubType1 = new TGType();
callerSubType1.setName("org.reclipse.test.CallerSubType1");
callerType1.addToSubTypes(callerSubType1);
this.caller1_2 = new TGObject();
this.caller1_2.setId("caller1_2");
this.caller1_2.setType(callerSubType1);
this.caller1_3 = new TGObject();
this.caller1_3.setId("caller1_3");
this.caller1_3.setType(callerType1);
TGType callerType2 = new TGType();
callerType2.setName("org.reclipse.test.CallerType2");
this.caller2 = new TGObject();
this.caller2.setId("caller2");
this.caller2.setType(callerType2);
TGType callerType3 = new TGType();
callerType3.setName("org.reclipse.test.CallerType3");
this.caller3 = new TGObject();
this.caller3.setId("caller3");
this.caller3.setType(callerType3);
// create callee object
TGType calleeType = new TGType();
calleeType.setName("org.reclipse.test.CalleeType");
this.callee = new TGObject();
this.callee.setId("callee");
this.callee.setType(calleeType);
// set callee, it will be the same for all tests
this.methodCall.setCallee(callee);
// create the symbol
this.symbol = new ProhibitedCallerSymbol();
this.symbol.setMethodName("m");
MethodCallObject firstCallObject = new MethodCallObject("a1_1", "A1");
this.symbol.addToPermittedCallers(firstCallObject);
MethodCallObject secondMethodCallObject = new MethodCallObject("a1_2",
"A1");
this.symbol.addToPermittedCallers(secondMethodCallObject);
MethodCallObject thirdMethodCallObject = new MethodCallObject("a2", "A2");
this.symbol.addToPermittedCallers(thirdMethodCallObject);
MethodCallObject calleeObject = new MethodCallObject("b", "B");
this.symbol.setCallee(calleeObject);
}
/**
* @see junit.framework.TestCase#tearDown()
*/
@Override
protected void tearDown() throws Exception
{
super.tearDown();
this.annotation = null;
this.token = null;
this.methodCall = null;
this.caller1_1 = null;
this.caller2 = null;
this.caller3 = null;
this.callee = null;
this.symbol = null;
}
/**
* Test method for
* 'org.reclipse.patterns.behavior.automaton.symbols.ProhibitedCallersSymbol.accept(Token,
* TGMethodCall)'
*
* Setup: The caller's type does not fit the types of 'a1_1', 'a1_2', and 'a2', 'b' unbound,
* caller3 calls the method
*
* Result: symbol rejects, possible bindings: b bound to callee
*/
public void testAccept1()
{
this.methodCall.setCaller(this.caller3);
assertFalse(this.symbol.accept(this.methodCall, this.token));
Iterator iter = this.token.iteratorOfPossibleBindings("b");
assertTrue(iter.hasNext());
assertEquals(this.callee, iter.next());
assertFalse(iter.hasNext());
}
/**
* Test method for
* 'org.reclipse.patterns.behavior.automaton.symbols.ProhibitedCallersSymbol.accept(Token,
* TGMethodCall)'
*
* Setup: The caller's type does not fit the types of 'a1_1', 'a1_2', and 'a2', 'b' bound to
* callee, caller3 calls the method
*
* Result: symbol accepts, possible bindings: none
*/
public void testAccept2()
{
this.token.addToBindings("b", this.callee);
this.methodCall.setCaller(this.caller3);
assertTrue(this.symbol.accept(this.methodCall, this.token));
Iterator iter = this.token.iteratorOfPossibleBindings("b");
assertFalse(iter.hasNext());
}
/**
* Test method for
* 'org.reclipse.patterns.behavior.automaton.symbols.ProhibitedCallersSymbol.accept(Token,
* TGMethodCall)'
*
* Setup: The caller's type fits the type of 'a2', 'a2' unbound, caller2 calls the method
*
* Result: symbol rejects, possible bindings: none
*/
public void testAccept3()
{
this.token.addToBindings("b", this.callee);
this.methodCall.setCaller(this.caller2);
assertFalse(this.symbol.accept(this.methodCall, this.token));
Iterator iter = this.token.iteratorOfPossibleBindings("b");
assertFalse(iter.hasNext());
}
/**
* Test method for
* 'org.reclipse.patterns.behavior.automaton.symbols.ProhibitedCallersSymbol.accept(Token,
* TGMethodCall)'
*
* Setup: The caller's type fits the type of 'a2', 'a2' bound to caller2, caller2 calls the
* method
*
* Result: symbol rejects, possible bindings: none
*/
public void testAccept4()
{
this.token.addToBindings("a2", this.caller2);
this.token.addToBindings("b", this.callee);
this.methodCall.setCaller(this.caller2);
assertFalse(this.symbol.accept(this.methodCall, this.token));
Iterator iter = this.token.iteratorOfPossibleBindings("b");
assertFalse(iter.hasNext());
}
/**
* Test method for
* 'org.reclipse.patterns.behavior.automaton.symbols.ProhibitedCallersSymbol.accept(Token,
* TGMethodCall)'
*
* Setup: The caller's type fits the type of 'a1_1' and 'a1_2', 'a1_1' bound to caller1_1, 'a1_2'
* bound to caller1_2, b unbound, caller1_3 calls the method
*
* Result: symbol rejects, possible bindings: b bound to callee
*/
public void testAccept5()
{
this.token.addToBindings("a1_1", this.caller1_1);
this.token.addToBindings("a1_2", this.caller1_2);
this.methodCall.setCaller(this.caller1_3);
assertFalse(this.symbol.accept(this.methodCall, this.token));
Iterator iter = this.token.iteratorOfPossibleBindings("b");
assertTrue(iter.hasNext());
assertEquals(this.callee, iter.next());
assertFalse(iter.hasNext());
}
/**
* Test method for
* 'org.reclipse.patterns.behavior.automaton.symbols.ProhibitedCallersSymbol.accept(Token,
* TGMethodCall)'
*
* Setup: The caller's type fits the type of 'a1_1' and 'a1_2', 'a1_1' bound to caller1_1, 'a1_2'
* bound to caller1_2, b bound to callee, caller1_3 calls the method
*
* Result: symbol accepts, possible bindings: none
*/
public void testAccept6()
{
this.token.addToBindings("a1_1", this.caller1_1);
this.token.addToBindings("a1_2", this.caller1_2);
this.token.addToBindings("b", this.callee);
this.methodCall.setCaller(this.caller1_3);
assertTrue(this.symbol.accept(this.methodCall, this.token));
Iterator iter = this.token.iteratorOfPossibleBindings("b");
assertFalse(iter.hasNext());
}
/**
* Test method for
* 'org.reclipse.patterns.behavior.automaton.symbols.ProhibitedCallersSymbol.accept(Token,
* TGMethodCall)'
*
* Setup: The caller's type fits the type of 'a1_1' and 'a1_2', 'a1_1' bound to caller1_1, 'a1_2'
* unbound, b bound to callee, caller1_2 calls the method
*
* Result: symbol rejects, possible bindings: none
*/
public void testAccept7()
{
this.token.addToBindings("a1_1", this.caller1_1);
this.token.addToBindings("b", this.callee);
this.methodCall.setCaller(this.caller1_2);
assertFalse(this.symbol.accept(this.methodCall, this.token));
Iterator iter = this.token.iteratorOfPossibleBindings("b");
assertFalse(iter.hasNext());
}
/**
* Test method for
* 'org.reclipse.patterns.behavior.automaton.symbols.ProhibitedCallersSymbol.accept(Token,
* TGMethodCall)'
*
* Setup: The caller's type fits the type of 'a1_1' and 'a1_2', 'a1_1' bound to caller1_1, 'a1_2'
* bound to caller1_2, b bound to callee, caller1_2 calls the method
*
* Result: symbol rejects, possible bindings: none
*/
public void testAccept8()
{
this.token.addToBindings("a1_1", this.caller1_1);
this.token.addToBindings("a1_2", this.caller1_2);
this.token.addToBindings("b", this.callee);
this.methodCall.setCaller(this.caller1_2);
assertFalse(this.symbol.accept(this.methodCall, this.token));
Iterator iter = this.token.iteratorOfPossibleBindings("b");
assertFalse(iter.hasNext());
}
}
| |
// ========================================================================
// Copyright (c) 2006-2010 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.util;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import javax.servlet.MultipartConfigElement;
import javax.servlet.ServletException;
import javax.servlet.http.Part;
/**
* MultiPartInputStream
*
* Handle a MultiPart Mime input stream, breaking it up on the boundary into files and strings.
*/
public class MultiPartInputStream
{
public static final MultipartConfigElement __DEFAULT_MULTIPART_CONFIG = new MultipartConfigElement(System.getProperty("java.io.tmpdir"));
protected InputStream _in;
protected MultipartConfigElement _config;
protected String _contentType;
protected MultiMap<String> _parts;
protected File _tmpDir;
protected File _contextTmpDir;
public class MultiPart implements Part
{
protected String _name;
protected String _filename;
protected File _file;
protected OutputStream _out;
protected String _contentType;
protected MultiMap<String> _headers;
protected long _size = 0;
public MultiPart (String name, String filename)
throws IOException
{
_name = name;
_filename = filename;
}
protected void setContentType (String contentType)
{
_contentType = contentType;
}
protected void open()
throws FileNotFoundException, IOException
{
//We will either be writing to a file, if it has a filename on the content-disposition
//and otherwise a byte-array-input-stream, OR if we exceed the getFileSizeThreshold, we
//will need to change to write to a file.
if (_filename != null && _filename.trim().length() > 0)
{
createFile();
}
else
{
//Write to a buffer in memory until we discover we've exceed the
//MultipartConfig fileSizeThreshold
_out = new ByteArrayOutputStream();
}
}
protected void close()
throws IOException
{
_out.close();
}
protected void write (int b)
throws IOException, ServletException
{
if (MultiPartInputStream.this._config.getMaxFileSize() > 0 && _size + 1 > MultiPartInputStream.this._config.getMaxFileSize())
throw new ServletException ("Multipart Mime part "+_name+" exceeds max filesize");
if (MultiPartInputStream.this._config.getFileSizeThreshold() > 0 && _size + 1 > MultiPartInputStream.this._config.getFileSizeThreshold() && _file==null)
createFile();
_out.write(b);
_size ++;
}
protected void write (byte[] bytes, int offset, int length)
throws IOException, ServletException
{
if (MultiPartInputStream.this._config.getMaxFileSize() > 0 && _size + length > MultiPartInputStream.this._config.getMaxFileSize())
throw new ServletException ("Multipart Mime part "+_name+" exceeds max filesize");
if (MultiPartInputStream.this._config.getFileSizeThreshold() > 0 && _size + length > MultiPartInputStream.this._config.getFileSizeThreshold() && _file==null)
createFile();
_out.write(bytes, offset, length);
_size += length;
}
protected void createFile ()
throws IOException
{
_file = File.createTempFile("MultiPart", "", MultiPartInputStream.this._tmpDir);
FileOutputStream fos = new FileOutputStream(_file);
BufferedOutputStream bos = new BufferedOutputStream(fos);
if (_size > 0 && _out != null)
{
//already written some bytes, so need to copy them into the file
_out.flush();
((ByteArrayOutputStream)_out).writeTo(bos);
_out.close();
}
_out = bos;
}
protected void setHeaders(MultiMap<String> headers)
{
_headers = headers;
}
/**
* @see javax.servlet.http.Part#getContentType()
*/
public String getContentType()
{
return _contentType;
}
/**
* @see javax.servlet.http.Part#getHeader(java.lang.String)
*/
public String getHeader(String name)
{
return (String)_headers.getValue(name, 0);
}
/**
* @see javax.servlet.http.Part#getHeaderNames()
*/
public Collection<String> getHeaderNames()
{
return _headers.keySet();
}
/**
* @see javax.servlet.http.Part#getHeaders(java.lang.String)
*/
public Collection<String> getHeaders(String name)
{
return _headers.getValues(name);
}
/**
* @see javax.servlet.http.Part#getInputStream()
*/
public InputStream getInputStream() throws IOException
{
if (_file != null)
{
return new BufferedInputStream (new FileInputStream(_file));
}
else
{
//part content is in a ByteArrayOutputStream
return new ByteArrayInputStream(((ByteArrayOutputStream)_out).toByteArray());
}
}
/**
* @see javax.servlet.http.Part#getName()
*/
public String getName()
{
return _name;
}
/**
* @see javax.servlet.http.Part#getSize()
*/
public long getSize()
{
return _size;
}
/**
* @see javax.servlet.http.Part#write(java.lang.String)
*/
public void write(String fileName) throws IOException
{
if (_file == null)
{
//part data is only in the ByteArrayOutputStream and never been written to disk
_file = new File (_tmpDir, fileName);
BufferedOutputStream bos = null;
try
{
bos = new BufferedOutputStream(new FileOutputStream(_file));
((ByteArrayOutputStream)_out).writeTo(bos);
bos.flush();
}
finally
{
if (bos != null)
bos.close();
}
}
else
{
//the part data is already written to a temporary file, just rename it
_file.renameTo(new File(_tmpDir, fileName));
}
}
/**
* @see javax.servlet.http.Part#delete()
*/
public void delete() throws IOException
{
if (_file != null)
_file.delete();
}
/**
* Get the file, if any, the data has been written to.
* @return
*/
public File getFile ()
{
return _file;
}
/**
* Get the filename from the content-disposition.
* @return null or the filename
*/
public String getContentDispositionFilename ()
{
return _filename;
}
}
/**
* @param in Request input stream
* @param contentType Content-Type header
* @param config MultipartConfigElement
* @param contextTmpDir javax.servlet.context.tempdir
*/
public MultiPartInputStream (InputStream in, String contentType, MultipartConfigElement config, File contextTmpDir)
{
_in = new BufferedInputStream(in);
_contentType = contentType;
_config = config;
_contextTmpDir = contextTmpDir;
if (_contextTmpDir == null)
_contextTmpDir = new File (System.getProperty("java.io.tmpdir"));
if (_config == null)
_config = new MultipartConfigElement(_contextTmpDir.getAbsolutePath());
}
public Collection<Part> getParts()
throws IOException, ServletException
{
parse();
Collection<Object> values = _parts.values();
List<Part> parts = new ArrayList<Part>();
for (Object o: values)
{
List<Part> asList = LazyList.getList(o, false);
parts.addAll(asList);
}
return parts;
}
public Part getPart(String name)
throws IOException, ServletException
{
parse();
return (Part)_parts.getValue(name, 0);
}
protected void parse ()
throws IOException, ServletException
{
//have we already parsed the input?
if (_parts != null)
return;
//initialize
long total = 0; //keep running total of size of bytes read from input and throw an exception if exceeds MultipartConfigElement._maxRequestSize
_parts = new MultiMap<String>();
//if its not a multipart request, don't parse it
if (_contentType == null || !_contentType.startsWith("multipart/form-data"))
return;
//sort out the location to which to write the files
if (_config.getLocation() == null)
_tmpDir = _contextTmpDir;
else if ("".equals(_config.getLocation()))
_tmpDir = _contextTmpDir;
else
{
File f = new File (_config.getLocation());
if (f.isAbsolute())
_tmpDir = f;
else
_tmpDir = new File (_contextTmpDir, _config.getLocation());
}
if (!_tmpDir.exists())
_tmpDir.mkdirs();
String boundary="--"+QuotedStringTokenizer.unquote(value(_contentType.substring(_contentType.indexOf("boundary="))).trim());
byte[] byteBoundary=(boundary+"--").getBytes(StringUtil.__ISO_8859_1);
// Get first boundary
byte[] bytes=TypeUtil.readLine(_in);
String line=bytes==null?null:new String(bytes,"UTF-8");
if(line==null || !line.equals(boundary))
{
throw new IOException("Missing initial multi part boundary");
}
// Read each part
boolean lastPart=false;
String contentDisposition=null;
String contentType=null;
String contentTransferEncoding=null;
outer:while(!lastPart)
{
MultiMap<String> headers = new MultiMap<String>();
while(true)
{
bytes=TypeUtil.readLine(_in);
if(bytes==null)
break outer;
// If blank line, end of part headers
if(bytes.length==0)
break;
total += bytes.length;
if (_config.getMaxRequestSize() > 0 && total > _config.getMaxRequestSize())
throw new ServletException ("Request exceeds maxRequestSize ("+_config.getMaxRequestSize()+")");
line=new String(bytes,"UTF-8");
//get content-disposition and content-type
int c=line.indexOf(':',0);
if(c>0)
{
String key=line.substring(0,c).trim().toLowerCase();
String value=line.substring(c+1,line.length()).trim();
headers.put(key, value);
if (key.equalsIgnoreCase("content-disposition"))
contentDisposition=value;
if (key.equalsIgnoreCase("content-type"))
contentType = value;
if(key.equals("content-transfer-encoding"))
contentTransferEncoding=value;
}
}
// Extract content-disposition
boolean form_data=false;
if(contentDisposition==null)
{
throw new IOException("Missing content-disposition");
}
QuotedStringTokenizer tok=new QuotedStringTokenizer(contentDisposition,";");
String name=null;
String filename=null;
while(tok.hasMoreTokens())
{
String t=tok.nextToken().trim();
String tl=t.toLowerCase();
if(t.startsWith("form-data"))
form_data=true;
else if(tl.startsWith("name="))
name=value(t);
else if(tl.startsWith("filename="))
filename=value(t);
}
// Check disposition
if(!form_data)
{
continue;
}
//It is valid for reset and submit buttons to have an empty name.
//If no name is supplied, the browser skips sending the info for that field.
//However, if you supply the empty string as the name, the browser sends the
//field, with name as the empty string. So, only continue this loop if we
//have not yet seen a name field.
if(name==null)
{
continue;
}
if ("base64".equalsIgnoreCase(contentTransferEncoding))
{
_in = new Base64InputStream(_in);
}
else if ("quoted-printable".equalsIgnoreCase(contentTransferEncoding))
{
_in = new FilterInputStream(_in)
{
@Override
public int read() throws IOException
{
int c = in.read();
if (c >= 0 && c == '=')
{
int hi = in.read();
int lo = in.read();
if (hi < 0 || lo < 0)
{
throw new IOException("Unexpected end to quoted-printable byte");
}
char[] chars = new char[] { (char)hi, (char)lo };
c = Integer.parseInt(new String(chars),16);
}
return c;
}
};
}
//Have a new Part
MultiPart part = new MultiPart(name, filename);
part.setHeaders(headers);
part.setContentType(contentType);
_parts.add(name, part);
part.open();
try
{
int state=-2;
int c;
boolean cr=false;
boolean lf=false;
// loop for all lines`
while(true)
{
int b=0;
while((c=(state!=-2)?state:_in.read())!=-1)
{
total ++;
if (_config.getMaxRequestSize() > 0 && total > _config.getMaxRequestSize())
throw new ServletException("Request exceeds maxRequestSize ("+_config.getMaxRequestSize()+")");
state=-2;
// look for CR and/or LF
if(c==13||c==10)
{
if(c==13)
state=_in.read();
break;
}
// look for boundary
if(b>=0&&b<byteBoundary.length&&c==byteBoundary[b])
b++;
else
{
// this is not a boundary
if(cr)
part.write(13);
if(lf)
part.write(10);
cr=lf=false;
if(b>0)
part.write(byteBoundary,0,b);
b=-1;
part.write(c);
}
}
// check partial boundary
if((b>0&&b<byteBoundary.length-2)||(b==byteBoundary.length-1))
{
if(cr)
part.write(13);
if(lf)
part.write(10);
cr=lf=false;
part.write(byteBoundary,0,b);
b=-1;
}
// boundary match
if(b>0||c==-1)
{
if(b==byteBoundary.length)
lastPart=true;
if(state==10)
state=-2;
break;
}
// handle CR LF
if(cr)
part.write(13);
if(lf)
part.write(10);
cr=(c==13);
lf=(c==10||state==10);
if(state==10)
state=-2;
}
}
finally
{
part.close();
}
}
}
/* ------------------------------------------------------------ */
private String value(String nameEqualsValue)
{
String value=nameEqualsValue.substring(nameEqualsValue.indexOf('=')+1).trim();
int i=value.indexOf(';');
if(i>0)
value=value.substring(0,i);
if(value.startsWith("\""))
{
value=value.substring(1,value.indexOf('"',1));
}
else
{
i=value.indexOf(' ');
if(i>0)
value=value.substring(0,i);
}
return value;
}
private static class Base64InputStream extends InputStream
{
BufferedReader _in;
String _line;
byte[] _buffer;
int _pos;
public Base64InputStream (InputStream in)
{
_in = new BufferedReader(new InputStreamReader(in));
}
@Override
public int read() throws IOException
{
if (_buffer==null || _pos>= _buffer.length)
{
_line = _in.readLine();
if (_line==null)
return -1;
if (_line.startsWith("--"))
_buffer=(_line+"\r\n").getBytes();
else if (_line.length()==0)
_buffer="\r\n".getBytes();
else
_buffer=B64Code.decode(_line);
_pos=0;
}
return _buffer[_pos++];
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tier.sockets.command;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.ArgumentCaptor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.apache.geode.CancelCriterion;
import org.apache.geode.cache.operations.GetOperationContext;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.internal.cache.tier.CachedRegionHelper;
import org.apache.geode.internal.cache.tier.sockets.CacheServerStats;
import org.apache.geode.internal.cache.tier.sockets.ChunkedMessage;
import org.apache.geode.internal.cache.tier.sockets.Message;
import org.apache.geode.internal.cache.tier.sockets.ObjectPartList;
import org.apache.geode.internal.cache.tier.sockets.Part;
import org.apache.geode.internal.cache.tier.sockets.ServerConnection;
import org.apache.geode.internal.security.AuthorizeRequest;
import org.apache.geode.internal.security.SecurityService;
import org.apache.geode.security.NotAuthorizedException;
import org.apache.geode.security.ResourcePermission.Operation;
import org.apache.geode.security.ResourcePermission.Resource;
import org.apache.geode.test.junit.categories.UnitTest;
@Category(UnitTest.class)
public class GetAllTest {
private static final String REGION_NAME = "region1";
private static final Object[] KEYS = new Object[] {"key1", "key2", "key3"};
@Mock
private SecurityService securityService;
@Mock
private Message message;
@Mock
private ServerConnection serverConnection;
@Mock
private AuthorizeRequest authzRequest;
@Mock
private InternalCache cache;
@Mock
private Part regionNamePart;
@Mock
private Part keyPart;
@Mock
private ChunkedMessage chunkedResponseMessage;
@InjectMocks
private GetAll getAll;
@Before
public void setUp() throws Exception {
this.getAll = new GetAll();
MockitoAnnotations.initMocks(this);
when(this.authzRequest.getAuthorize(any(), any(), any()))
.thenReturn(mock(GetOperationContext.class));
when(this.cache.getRegion(isA(String.class))).thenReturn(mock(LocalRegion.class));
when(this.cache.getCancelCriterion()).thenReturn(mock(CancelCriterion.class));
when(this.keyPart.getObject()).thenReturn(KEYS);
when(this.message.getPart(eq(0))).thenReturn(this.regionNamePart);
when(this.message.getPart(eq(1))).thenReturn(this.keyPart);
when(this.regionNamePart.getString()).thenReturn(REGION_NAME);
when(this.serverConnection.getCache()).thenReturn(this.cache);
when(this.serverConnection.getCacheServerStats()).thenReturn(mock(CacheServerStats.class));
when(this.serverConnection.getAuthzRequest()).thenReturn(this.authzRequest);
when(this.serverConnection.getCachedRegionHelper()).thenReturn(mock(CachedRegionHelper.class));
when(this.serverConnection.getChunkedResponseMessage()).thenReturn(this.chunkedResponseMessage);
}
@Test
public void noSecurityShouldSucceed() throws Exception {
when(this.securityService.isClientSecurityRequired()).thenReturn(false);
this.getAll.cmdExecute(this.message, this.serverConnection, this.securityService, 0);
verify(this.chunkedResponseMessage).sendChunk(eq(this.serverConnection));
}
@Test
public void integratedSecurityShouldSucceedIfAuthorized() throws Exception {
when(this.securityService.isClientSecurityRequired()).thenReturn(true);
when(this.securityService.isIntegratedSecurity()).thenReturn(true);
this.getAll.cmdExecute(this.message, this.serverConnection, this.securityService, 0);
ArgumentCaptor<ObjectPartList> argument = ArgumentCaptor.forClass(ObjectPartList.class);
verify(this.chunkedResponseMessage).addObjPart(argument.capture(), eq(false));
assertThat(argument.getValue().getObjects()).hasSize(KEYS.length);
for (Object key : argument.getValue().getKeys()) {
assertThat(key).isIn(KEYS);
}
for (Object key : KEYS) {
verify(this.securityService).authorize(Resource.DATA, Operation.READ, REGION_NAME,
key.toString());
}
verify(this.chunkedResponseMessage).sendChunk(eq(this.serverConnection));
}
@Test
public void integratedSecurityShouldFailIfNotAuthorized() throws Exception {
when(this.securityService.isClientSecurityRequired()).thenReturn(true);
when(this.securityService.isIntegratedSecurity()).thenReturn(true);
for (Object key : KEYS) {
doThrow(new NotAuthorizedException("")).when(this.securityService).authorize(Resource.DATA,
Operation.READ, REGION_NAME, key.toString());
}
this.getAll.cmdExecute(this.message, this.serverConnection, this.securityService, 0);
for (Object key : KEYS) {
verify(this.securityService).authorize(Resource.DATA, Operation.READ, REGION_NAME,
key.toString());
}
ArgumentCaptor<ObjectPartList> argument = ArgumentCaptor.forClass(ObjectPartList.class);
verify(this.chunkedResponseMessage).addObjPart(argument.capture(), eq(false));
assertThat(argument.getValue().getObjects()).hasSize(KEYS.length);
for (Object key : argument.getValue().getObjects()) {
assertThat(key).isExactlyInstanceOf(NotAuthorizedException.class);
}
verify(this.chunkedResponseMessage).sendChunk(eq(this.serverConnection));
}
@Test
public void oldSecurityShouldSucceedIfAuthorized() throws Exception {
when(this.securityService.isClientSecurityRequired()).thenReturn(true);
when(this.securityService.isIntegratedSecurity()).thenReturn(false);
this.getAll.cmdExecute(this.message, this.serverConnection, this.securityService, 0);
ArgumentCaptor<ObjectPartList> argument = ArgumentCaptor.forClass(ObjectPartList.class);
verify(this.chunkedResponseMessage).addObjPart(argument.capture(), eq(false));
assertThat(argument.getValue().getObjects()).hasSize(KEYS.length);
for (Object key : argument.getValue().getKeys()) {
assertThat(key).isIn(KEYS);
}
for (Object key : KEYS) {
verify(this.authzRequest).getAuthorize(eq(REGION_NAME), eq(key.toString()), eq(null));
}
verify(this.chunkedResponseMessage).sendChunk(eq(this.serverConnection));
}
@Test
public void oldSecurityShouldFailIfNotAuthorized() throws Exception {
when(this.securityService.isClientSecurityRequired()).thenReturn(true);
when(this.securityService.isIntegratedSecurity()).thenReturn(false);
for (Object key : KEYS) {
doThrow(new NotAuthorizedException("")).when(this.authzRequest).getAuthorize(eq(REGION_NAME),
eq(key.toString()), eq(null));
}
this.getAll.cmdExecute(this.message, this.serverConnection, this.securityService, 0);
ArgumentCaptor<ObjectPartList> argument = ArgumentCaptor.forClass(ObjectPartList.class);
verify(this.chunkedResponseMessage).addObjPart(argument.capture(), eq(false));
assertThat(argument.getValue().getObjects()).hasSize(KEYS.length);
for (Object o : argument.getValue().getObjects()) {
assertThat(o).isExactlyInstanceOf(NotAuthorizedException.class);
}
for (Object key : KEYS) {
verify(this.authzRequest).getAuthorize(eq(REGION_NAME), eq(key.toString()), eq(null));
}
verify(this.chunkedResponseMessage).sendChunk(eq(this.serverConnection));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.extensions.sql.zetasql;
import static com.google.zetasql.CivilTimeEncoder.decodePacked64TimeNanos;
import static com.google.zetasql.CivilTimeEncoder.encodePacked64TimeNanos;
import com.google.zetasql.Value;
import io.grpc.Status;
import java.util.List;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Splitter;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.math.LongMath;
import org.apache.calcite.avatica.util.TimeUnit;
import org.apache.calcite.util.DateString;
import org.apache.calcite.util.TimeString;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.LocalTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
/** DateTimeUtils. */
public class DateTimeUtils {
public static final Long MILLIS_PER_DAY = 86400000L;
private static final Long MICROS_PER_MILLI = 1000L;
@SuppressWarnings("unchecked")
private enum TimestampPatterns {
TIMESTAMP_PATTERN,
TIMESTAMP_PATTERN_SUBSECOND,
TIMESTAMP_PATTERN_T,
TIMESTAMP_PATTERN_SUBSECOND_T,
}
@SuppressWarnings("unchecked")
private static final ImmutableMap<Enum, DateTimeFormatter> TIMESTAMP_PATTERN_WITHOUT_TZ =
ImmutableMap.of(
TimestampPatterns.TIMESTAMP_PATTERN, DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss"),
TimestampPatterns.TIMESTAMP_PATTERN_SUBSECOND,
DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS"),
TimestampPatterns.TIMESTAMP_PATTERN_T, DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss"),
TimestampPatterns.TIMESTAMP_PATTERN_SUBSECOND_T,
DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS"));
@SuppressWarnings("unchecked")
private static final ImmutableMap<Enum, DateTimeFormatter> TIMESTAMP_PATTERN_WITH_TZ =
ImmutableMap.of(
TimestampPatterns.TIMESTAMP_PATTERN, DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ssZZ"),
TimestampPatterns.TIMESTAMP_PATTERN_SUBSECOND,
DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSSZZ"),
TimestampPatterns.TIMESTAMP_PATTERN_T,
DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZZ"),
TimestampPatterns.TIMESTAMP_PATTERN_SUBSECOND_T,
DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZZ"));
public static DateTimeFormatter findDateTimePattern(String str) {
if (str.indexOf('+') == -1) {
return findDateTimePattern(str, TIMESTAMP_PATTERN_WITHOUT_TZ);
} else {
return findDateTimePattern(str, TIMESTAMP_PATTERN_WITH_TZ);
}
}
@SuppressWarnings("unchecked")
public static DateTimeFormatter findDateTimePattern(
String str, ImmutableMap<Enum, DateTimeFormatter> patternMap) {
if (str.indexOf('.') == -1) {
if (str.indexOf('T') == -1) {
return patternMap.get(TimestampPatterns.TIMESTAMP_PATTERN);
} else {
return patternMap.get(TimestampPatterns.TIMESTAMP_PATTERN_T);
}
} else {
if (str.indexOf('T') == -1) {
return patternMap.get(TimestampPatterns.TIMESTAMP_PATTERN_SUBSECOND);
} else {
return patternMap.get(TimestampPatterns.TIMESTAMP_PATTERN_SUBSECOND_T);
}
}
}
// https://cloud.google.com/bigquery/docs/reference/standard-sql/migrating-from-legacy-sql#timestamp_differences
// 0001-01-01 00:00:00 to 9999-12-31 23:59:59.999999 UTC.
// -62135596800000000 to 253402300799999999
@SuppressWarnings("GoodTime")
public static final Long MIN_UNIX_MILLIS = -62135596800000L;
@SuppressWarnings("GoodTime")
public static final Long MAX_UNIX_MILLIS = 253402300799999L;
public static DateTime parseTimestampWithUTCTimeZone(String str) {
return findDateTimePattern(str).withZoneUTC().parseDateTime(str);
}
@SuppressWarnings("unused")
public static DateTime parseTimestampWithLocalTimeZone(String str) {
return findDateTimePattern(str).withZone(DateTimeZone.getDefault()).parseDateTime(str);
}
public static DateTime parseTimestampWithTimeZone(String str) {
// for example, accept "1990-10-20 13:24:01+0730"
if (str.indexOf('.') == -1) {
return DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ssZ").parseDateTime(str);
} else {
return DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSSZ").parseDateTime(str);
}
}
public static String formatTimestampWithTimeZone(DateTime dt) {
String resultWithoutZone;
if (dt.getMillisOfSecond() == 0) {
resultWithoutZone = dt.toString(DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss"));
} else {
resultWithoutZone = dt.toString(DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS"));
}
// ZetaSQL expects a 2-digit timezone offset (-05) if the minute part is zero, and it expects
// a 4-digit timezone with a colon (-07:52) if the minute part is non-zero. None of the
// variations on z,Z,ZZ,.. do this for us so we have to do it manually here.
String zone = dt.toString(DateTimeFormat.forPattern("ZZ"));
List<String> zoneParts = Lists.newArrayList(Splitter.on(':').limit(2).split(zone));
if (zoneParts.size() == 2 && zoneParts.get(1).equals("00")) {
zone = zoneParts.get(0);
}
return resultWithoutZone + zone;
}
@SuppressWarnings("unused")
public static DateTime parseTimestampWithoutTimeZone(String str) {
return DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").parseDateTime(str);
}
public static DateTime parseDate(String str) {
return DateTimeFormat.forPattern("yyyy-MM-dd").withZoneUTC().parseDateTime(str);
}
public static DateTime parseTime(String str) {
// DateTimeFormat does not parse "08:10:10" for pattern "HH:mm:ss.SSS". In this case, '.' must
// appear.
if (str.indexOf('.') == -1) {
return DateTimeFormat.forPattern("HH:mm:ss").withZoneUTC().parseDateTime(str);
} else {
return DateTimeFormat.forPattern("HH:mm:ss.SSS").withZoneUTC().parseDateTime(str);
}
}
@SuppressWarnings(
"Value with nanoseconds will be truncated to milliseconds in decodePacked64TimeNanos.")
public static TimeString convertTimeValueToTimeString(Value value) {
LocalTime localTime = decodePacked64TimeNanos(value.getTimeValue());
return TimeString.fromMillisOfDay(localTime.getMillisOfDay());
}
// dates are represented as an int32 value, indicating the offset
// in days from the epoch 1970-01-01. ZetaSQL dates are not timezone aware,
// and do not correspond to any particular 24 hour period.
public static DateString convertDateValueToDateString(Value value) {
return DateString.fromDaysSinceEpoch(value.getDateValue());
}
public static Value parseDateToValue(String dateString) {
DateTime dateTime = parseDate(dateString);
return Value.createDateValue((int) (dateTime.getMillis() / MILLIS_PER_DAY));
}
public static Value parseTimeToValue(String timeString) {
DateTime dateTime = parseTime(timeString);
return Value.createTimeValue(
encodePacked64TimeNanos(LocalTime.fromMillisOfDay(dateTime.getMillisOfDay())));
}
public static Value parseTimestampWithTZToValue(String timestampString) {
DateTime dateTime = parseTimestampWithTimeZone(timestampString);
// convert from micros.
// TODO: how to handle overflow.
return Value.createTimestampValueFromUnixMicros(
LongMath.checkedMultiply(dateTime.getMillis(), MICROS_PER_MILLI));
}
private static void safeCheckSubMillisPrecision(long micros) {
long subMilliPrecision = micros % 1000L;
if (subMilliPrecision != 0) {
throw new IllegalArgumentException(
String.format(
"%s has sub-millisecond precision, which Beam ZetaSQL does"
+ " not currently support.",
micros));
}
}
@SuppressWarnings("GoodTime")
public static long safeMicrosToMillis(long micros) {
safeCheckSubMillisPrecision(micros);
return micros / 1000L;
}
/**
* This function validates that Long representation of timestamp is compatible with ZetaSQL
* timestamp values range.
*
* <p>Invoked via reflection. @see SqlOperators
*
* @param ts Timestamp to validate.
* @return Unchanged timestamp sent for validation.
*/
@SuppressWarnings("GoodTime")
public static Long validateTimestamp(Long ts) {
if (ts == null) {
return null;
}
if ((ts < MIN_UNIX_MILLIS) || (ts > MAX_UNIX_MILLIS)) {
throw Status.OUT_OF_RANGE
.withDescription("Timestamp is out of valid range.")
.asRuntimeException();
}
return ts;
}
/**
* This function validates that interval is compatible with ZetaSQL timestamp values range.
*
* <p>ZetaSQL validates that if we represent interval in milliseconds, it will fit into Long.
*
* <p>In case of SECOND or smaller time unit, it converts timestamp to microseconds, so we need to
* convert those to microsecond and verify that we do not cause overflow.
*
* <p>Invoked via reflection. @see SqlOperators
*
* @param arg Argument for the interval.
* @param unit Time unit used in this interval.
* @return Argument for the interval.
*/
@SuppressWarnings("GoodTime")
public static Long validateTimeInterval(Long arg, TimeUnit unit) {
if (arg == null) {
return null;
}
// multiplier to convert to milli or microseconds.
long multiplier = unit.multiplier.longValue();
switch (unit) {
case SECOND:
case MILLISECOND:
multiplier *= 1000L; // Change multiplier from milliseconds to microseconds.
break;
default:
break;
}
if ((arg > Long.MAX_VALUE / multiplier) || (arg < Long.MIN_VALUE / multiplier)) {
throw Status.OUT_OF_RANGE
.withDescription("Interval is out of valid range")
.asRuntimeException();
}
return arg;
}
}
| |
package com.peth.towerdefense;
import java.util.ArrayList;
import org.andengine.entity.sprite.Sprite;
import org.andengine.input.touch.TouchEvent;
import org.andengine.opengl.texture.region.ITextureRegion;
import org.andengine.opengl.vbo.VertexBufferObjectManager;
public abstract class Tower extends Sprite {
// tower constants
public static final int TOWER_TEST = 0;
public static final int TOWER_SLOW = 1;
public static final int TOWER_FIRE = 2;
public static final int TOWER_FLAMETHROWER = 3;
public static final int TOWER_BOMB = 4;
public static final int TOWER_PEBBLE = 5;
// scan method constants
public static final int SCAN_FIRST = 0;
public static final int SCAN_LAST = 1;
// globals
public float mCenterX;
public float mCenterY;
public float mOffsetX;
public float mOffsetY;
public String mName;
public float mRange;
public Sprite mRangeCircle;
public int mDelay;
public float mDamage;
public int mScanMethod;
public Enemy mTarget;
public float mPrice;
public ArrayList<Integer> mOptions;
public BasePoint mBasePoint;
public Thread mScanThread;
public boolean mActive = true;
// constructor
public Tower(BasePoint parent, float offsetX, float offsetY, ITextureRegion texture, VertexBufferObjectManager pVertexBufferObjectManager) {
// superconstructor
super(parent.mCenterX - (texture.getWidth() / 2), parent.mCenterY - (texture.getHeight() / 2), texture, pVertexBufferObjectManager);
// set variables
setZIndex(TowerDefense.ZINDEX_TOWERS);
mBasePoint = parent;
mCenterX = parent.mCenterX;
mCenterY = parent.mCenterY;
mOffsetX = offsetX;
mOffsetY = offsetY;
mOptions = new ArrayList<Integer>();
mOptions.add(Option.SELL_TOWER);
// register touch handler
TowerDefense.mSceneManager.getCurrentLevel().registerTouchArea(this);
// start scanning for enemies
mScanThread = new Thread(new ScanTask());
mScanThread.start();
}
// scan task to be run on seperate thread
class ScanTask implements Runnable {
@Override
public void run() {
while (mActive) {
if (!TowerDefense.mSceneManager.getCurrentLevel().mPaused) {
mTarget = scan(mScanMethod);
if (mTarget != null) {
if (mTarget.mState == Enemy.STATE_DEAD) {
mTarget = null;
} else {
if (inRange(mTarget)) {
fireRound();
try {Thread.sleep(mDelay);} catch (InterruptedException e) {e.printStackTrace();}
} else {
mTarget = null;
}
}
}
}
}
}
}
// checks all enemies to see if one is in range
public Enemy scan(int scanMethod) {
synchronized (TowerDefense.mSceneManager.getCurrentLevel().mCurrentEnemies) {
switch (scanMethod) {
case SCAN_FIRST:
for (int i = 0; i < TowerDefense.mSceneManager.getCurrentLevel().mCurrentEnemies.size(); i++) {
Enemy potentialTarget = TowerDefense.mSceneManager.getCurrentLevel().mCurrentEnemies.get(i);
if (!potentialTarget.isDead() && inRange(potentialTarget)) {
return potentialTarget;
}
}
break;
case SCAN_LAST:
Enemy lastTarget = null;
for (int i = 0; i < TowerDefense.mSceneManager.getCurrentLevel().mCurrentEnemies.size(); i++) {
Enemy potentialTarget = TowerDefense.mSceneManager.getCurrentLevel().mCurrentEnemies.get(i);
if (!potentialTarget.isDead() && inRange(potentialTarget)) {
lastTarget = TowerDefense.mSceneManager.getCurrentLevel().mCurrentEnemies.get(i);
}
}
return lastTarget;
}
}
return null;
}
// takes an enemy and checks whether it is in range
public boolean inRange(Enemy enemy) {
float diffX = mCenterX - enemy.mCenterX;
float diffY = (mCenterY - enemy.mCenterY) / TowerDefense.PERSPECTIVE;
double dist = Math.sqrt(Math.pow(diffX, 2) + Math.pow(diffY, 2));
return (dist <= mRange);
}
/* NO LONGER USED (WAS USED FOR ROTATING SPRITE), BUT MIGHT COME IN HANDY LATER
// takes an enemy and calculates its direction in degrees
public float getDirection(Enemy enemy) {
return (float) (180 - Math.toDegrees(Math.atan2(enemy.getX() - getX(), enemy.getY() - getY())));
}
*/
// creates a new round aimed at the current target
public void fireRound() {
// placeholder for subclasses
}
@Override
public void onDetached() {
mActive = false;
TowerDefense.mSceneManager.getCurrentLevel().unregisterTouchArea(this);
}
@Override
public boolean onAreaTouched(TouchEvent pSceneTouchEvent, float pTouchAreaLocalX, float pTouchAreaLocalY) {
if (!TowerDefense.mSceneManager.getCurrentLevel().mPaused) {
if (pSceneTouchEvent.getAction() == TouchEvent.ACTION_UP) {
if (TowerDefense.mSceneManager.getCurrentLevel().mSelectionWheel != null && TowerDefense.mSceneManager.getCurrentLevel().mSelectionWheel.mBasePoint == mBasePoint) {
TowerDefense.mSceneManager.getCurrentLevel().unselect();
} else {
select();
}
}
}
return true;
}
public void select() {
TowerDefense.mSceneManager.getCurrentLevel().unselect();
TowerDefense.mSceneManager.getCurrentLevel().mSelection = this;
mRangeCircle.setVisible(true);
showSelectionWheel();
TowerDefense.mSceneManager.getCurrentLevel().mHUD.showInfo(this);
}
public void showSelectionWheel() {
if (TowerDefense.mSceneManager.getCurrentLevel().mSelectionWheel != null) TowerDefense.mSceneManager.getCurrentLevel().mSelectionWheel.hide();
TowerDefense.mSceneManager.getCurrentLevel().mSelectionWheel = new SelectionWheel(mCenterX, mCenterY + mOffsetY / 2, mBasePoint, mOptions, getVertexBufferObjectManager());
}
}
class TestTower extends Tower {
// constants
public static final int OFFSET_X = 0;
public static final int OFFSET_Y = -30;
public static final String NAME = "Earth Tower";
public static final int PRICE = 70;
public static final int RANGE = 120;
public static final int DELAY = 800;
public static final int SCAN_METHOD = SCAN_FIRST;
public static final ITextureRegion TEXTURE = TowerDefense.TEXTURE_TOWER_TEST;
// constructor
public TestTower(BasePoint parent, VertexBufferObjectManager pVertexBufferObjectManager) {
super(parent, OFFSET_X, OFFSET_Y, TEXTURE, pVertexBufferObjectManager);
// set variables
mName = NAME;
mRange = RANGE;
mDelay = DELAY;
mDamage = (float) TestRound.DAMAGE * (1000f / mDelay);
mScanMethod = SCAN_METHOD;
mPrice = PRICE;
mOptions.add(Option.LOCKED);
mOptions.add(Option.BUILD_TOWER_PEBBLE);
mOptions.add(Option.LOCKED);
// build range circle //TODO create a RangeCircle class
mRangeCircle = new Sprite(mCenterX - mRange, mCenterY - (mRange * TowerDefense.PERSPECTIVE), TowerDefense.TEXTURE_RANGECIRCLE, getVertexBufferObjectManager());
float scale = (mRange * 2) / TowerDefense.TEXTURE_RANGECIRCLE.getWidth();
mRangeCircle.setWidth(mRangeCircle.getWidth() * scale);
mRangeCircle.setHeight(mRangeCircle.getHeight() * scale);
mRangeCircle.setZIndex(TowerDefense.ZINDEX_TOWERS - 1);
mRangeCircle.setVisible(false);
TowerDefense.mSceneManager.getCurrentLevel().attachChild(mRangeCircle);
}
// super methods
public void fireRound() {
Round round = new TestRound(mTarget, mCenterX + mOffsetX, mCenterY + mOffsetY, getVertexBufferObjectManager());
TowerDefense.mSceneManager.getCurrentLevel().attachChild(round);
}
}
class PebbleTower extends Tower {
// constants
public static final int OFFSET_X = 0;
public static final int OFFSET_Y = -30;
public static final String NAME = "Pebble Tower";
public static final int PRICE = 140;
public static final int RANGE = 120;
public static final int DELAY = 200;
public static final int SCAN_METHOD = SCAN_FIRST;
public static final ITextureRegion TEXTURE = TowerDefense.TEXTURE_TOWER_PEBBLE;
// constructor
public PebbleTower(BasePoint parent, VertexBufferObjectManager pVertexBufferObjectManager) {
super(parent, OFFSET_X, OFFSET_Y, TEXTURE, pVertexBufferObjectManager);
// set variables
mName = NAME;
mRange = RANGE;
mDelay = DELAY;
mDamage = (float) TestRound.DAMAGE * (1000f / mDelay);
mScanMethod = SCAN_METHOD;
mPrice = PRICE;
mOptions.add(Option.LOCKED);
mOptions.add(Option.LOCKED);
mOptions.add(Option.LOCKED);
// build range circle //TODO create a RangeCircle class
mRangeCircle = new Sprite(mCenterX - mRange, mCenterY - (mRange * TowerDefense.PERSPECTIVE), TowerDefense.TEXTURE_RANGECIRCLE, getVertexBufferObjectManager());
float scale = (mRange * 2) / TowerDefense.TEXTURE_RANGECIRCLE.getWidth();
mRangeCircle.setWidth(mRangeCircle.getWidth() * scale);
mRangeCircle.setHeight(mRangeCircle.getHeight() * scale);
mRangeCircle.setZIndex(TowerDefense.ZINDEX_TOWERS - 1);
mRangeCircle.setVisible(false);
TowerDefense.mSceneManager.getCurrentLevel().attachChild(mRangeCircle);
}
// super methods
public void fireRound() {
Round round = new PebbleRound(mTarget, mCenterX + mOffsetX, mCenterY + mOffsetY, getVertexBufferObjectManager());
TowerDefense.mSceneManager.getCurrentLevel().attachChild(round);
}
}
class SlowTower extends Tower {
// constants
public static final int OFFSET_X = 0;
public static final int OFFSET_Y = -30;
public static final String NAME = "Water Tower";
public static final int PRICE = 70;
public static final int RANGE = 100;
public static final int DELAY = 2000;
public static final int SCAN_METHOD = SCAN_LAST;
public static final ITextureRegion TEXTURE = TowerDefense.TEXTURE_TOWER_SLOW;
// constructor
public SlowTower(BasePoint parent, VertexBufferObjectManager pVertexBufferObjectManager) {
super(parent, OFFSET_X, OFFSET_Y, TEXTURE, pVertexBufferObjectManager);
// set variables
mName = NAME;
mRange = RANGE;
mDelay = DELAY;
mDamage = (float) SlowRound.DAMAGE * (1000f / mDelay);
mScanMethod = SCAN_METHOD;
mPrice = PRICE;
mOptions.add(Option.LOCKED);
mOptions.add(Option.LOCKED);
mOptions.add(Option.LOCKED);
// build range circle //TODO create a RangeCircle class
mRangeCircle = new Sprite(mCenterX - mRange, mCenterY - (mRange * TowerDefense.PERSPECTIVE), TowerDefense.TEXTURE_RANGECIRCLE, getVertexBufferObjectManager());
float scale = (mRange * 2) / TowerDefense.TEXTURE_RANGECIRCLE.getWidth();
mRangeCircle.setWidth(mRangeCircle.getWidth() * scale);
mRangeCircle.setHeight(mRangeCircle.getHeight() * scale);
mRangeCircle.setZIndex(TowerDefense.ZINDEX_TOWERS - 1);
mRangeCircle.setVisible(false);
TowerDefense.mSceneManager.getCurrentLevel().attachChild(mRangeCircle);
}
// super methods
public void fireRound() {
Round round = new SlowRound(mTarget, mCenterX + mOffsetX, mCenterY + mOffsetY, getVertexBufferObjectManager());
TowerDefense.mSceneManager.getCurrentLevel().attachChild(round);
}
}
class FireTower extends Tower {
// constants
public static final int OFFSET_X = 0;
public static final int OFFSET_Y = -30;
public static final String NAME = "Fire Tower";
public static final int PRICE = 120;
public static final int RANGE = 150;
public static final int DELAY = 1500;
public static final int SCAN_METHOD = SCAN_FIRST;
public static final ITextureRegion TEXTURE = TowerDefense.TEXTURE_TOWER_FIRE;
// constructor
public FireTower(BasePoint parent, VertexBufferObjectManager pVertexBufferObjectManager) {
super(parent, OFFSET_X, OFFSET_Y, TEXTURE, pVertexBufferObjectManager);
// set variables
mName = NAME;
mRange = RANGE;
mDelay = DELAY;
mDamage = (float) FireBallRound.DAMAGE * (1000f / mDelay);
mScanMethod = SCAN_METHOD;
mPrice = PRICE;
mOptions.add(Option.LOCKED);
mOptions.add(Option.BUILD_TOWER_FLAMETHROWER);
mOptions.add(Option.LOCKED);
// build range circle //TODO create a RangeCircle class
mRangeCircle = new Sprite(mCenterX - mRange, mCenterY - (mRange * TowerDefense.PERSPECTIVE), TowerDefense.TEXTURE_RANGECIRCLE, getVertexBufferObjectManager());
float scale = (mRange * 2) / TowerDefense.TEXTURE_RANGECIRCLE.getWidth();
mRangeCircle.setWidth(mRangeCircle.getWidth() * scale);
mRangeCircle.setHeight(mRangeCircle.getHeight() * scale);
mRangeCircle.setZIndex(TowerDefense.ZINDEX_TOWERS - 1);
mRangeCircle.setVisible(false);
TowerDefense.mSceneManager.getCurrentLevel().attachChild(mRangeCircle);
}
// super methods
public void fireRound() {
Round round = new FireBallRound(mTarget, mCenterX + mOffsetX, mCenterY + mOffsetY, getVertexBufferObjectManager());
TowerDefense.mSceneManager.getCurrentLevel().attachChild(round);
}
}
class FlamethrowerTower extends Tower {
// constants
public static final int OFFSET_X = 0;
public static final int OFFSET_Y = -30;
public static final String NAME = "Flamethrower Tower";
public static final int PRICE = 180;
public static final int RANGE = 100;
public static final int DELAY = 5;
public static final int SCAN_METHOD = SCAN_FIRST;
public static final ITextureRegion TEXTURE = TowerDefense.TEXTURE_TOWER_FLAMETHROWER;
// constructor
public FlamethrowerTower(BasePoint parent, VertexBufferObjectManager pVertexBufferObjectManager) {
super(parent, OFFSET_X, OFFSET_Y, TEXTURE, pVertexBufferObjectManager);
// set variables
mName = NAME;
mRange = RANGE;
mDelay = DELAY;
mDamage = (float) FlameRound.DAMAGE * (1000f / mDelay);
mScanMethod = SCAN_METHOD;
mPrice = PRICE;
mOptions.add(Option.LOCKED);
// build range circle //TODO create a RangeCircle class
mRangeCircle = new Sprite(mCenterX - mRange, mCenterY - (mRange * TowerDefense.PERSPECTIVE), TowerDefense.TEXTURE_RANGECIRCLE, getVertexBufferObjectManager());
float scale = (mRange * 2) / TowerDefense.TEXTURE_RANGECIRCLE.getWidth();
mRangeCircle.setWidth(mRangeCircle.getWidth() * scale);
mRangeCircle.setHeight(mRangeCircle.getHeight() * scale);
mRangeCircle.setZIndex(TowerDefense.ZINDEX_TOWERS - 1);
mRangeCircle.setVisible(false);
TowerDefense.mSceneManager.getCurrentLevel().attachChild(mRangeCircle);
}
// super methods
public void fireRound() {
Round round = new FlameRound(mTarget, mCenterX + mOffsetX, mCenterY + mOffsetY, getVertexBufferObjectManager());
TowerDefense.mSceneManager.getCurrentLevel().attachChild(round);
}
}
| |
package org.ftccommunity.gui;
import javafx.application.Application;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.fxml.FXMLLoader;
import javafx.geometry.Insets;
import javafx.scene.Scene;
import javafx.scene.image.Image;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.VBox;
import javafx.stage.Modality;
import javafx.stage.Stage;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.logging.Logger;
import java.util.prefs.Preferences;
import org.ftccommunity.gui.view.*;
import org.ftccommunity.simulator.RobotSimulator;
import org.ftccommunity.simulator.modules.BrickSimulator;
import org.ftccommunity.simulator.modules.LegacyBrickSimulator;
import org.ftccommunity.utils.ClientLogger;
import org.ftccommunity.utils.Utils;
public class MainApp extends Application {
private static final Logger logger = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME);
private static Stage primaryStage;
private BorderPane rootLayout;
/**
* The data as an observable list of Controllers.
*/
private ObservableList<BrickSimulator> brickList;
public MainApp() {
brickList = FXCollections.observableArrayList();
try {
ClientLogger.setup();
} catch (IOException ex) {
System.out.println("Cannot setup the logger!");
}
}
public static void main(String[] args) {
launch(args);
}
/**
* Returns the data as an observable list of Controller Simulators.
*
* @return
*/
public ObservableList<BrickSimulator> getBrickData() {
return brickList;
}
@Override
public void start(Stage primaryStage) {
Runtime.getRuntime().addShutdownHook(new Thread("shutdown thread") {
public void run() {
System.out.println("***** Threads Exiting *****");
RobotSimulator.isgThreadsAreRunning();
}
});
MainApp.primaryStage = primaryStage;
MainApp.primaryStage.setTitle("Simulator App");
// Set the application icon.
this.primaryStage.getIcons().add(new Image("file:resources/images/robot.png"));
initRootLayout();
showBrickOverview();
}
/**
* Initializes the root layout and tries to load the last opened
* controller file.
*/
public void initRootLayout() {
try {
// Load root layout from fxml file.
FXMLLoader loader = new FXMLLoader();
loader.setLocation(this.getClass().getResource("view/RootLayout.fxml"));
rootLayout = loader.load();
// Show the scene containing the root layout.
Scene scene = new Scene(rootLayout);
primaryStage.setScene(scene);
// Give the controller access to the main app.
RootLayoutController controller = loader.getController();
controller.setMainApp(this);
primaryStage.show();
} catch (IOException e) {
e.printStackTrace();
}
// Try to load last opened controller file.
//File file = Utils.getBrickFilePath(Preferences.userNodeForPackage(this.getClass()));
File file = Utils.getBrickFilePath(Preferences.userNodeForPackage(MainApp.class));
if (file != null) {
try {
Utils.loadBrickDataFromFile(file, brickList);
} catch (Exception e) {
e.printStackTrace();
}
}
}
/**
* Shows the controller overview inside the root layout.
*/
private void showBrickOverview() {
try {
// Load controller overview.
FXMLLoader loader = new FXMLLoader();
loader.setLocation(this.getClass().getResource("view/Overview.fxml"));
AnchorPane controllerOverview = loader.load();
// Set controller overview into the center of root layout.
rootLayout.setCenter(controllerOverview);
// Give the controller access to the main app.
OverviewController controller = loader.getController();
controller.setMainApp(this);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Opens a dialog to edit details for the specified brick. If the user
* clicks OK, the changes are saved into the provided brick object and true
* is returned.
*
* @param brick the controller object to be edited
* @return true if the user clicked OK, false otherwise.
*/
public boolean showBrickEditDialog(BrickSimulator brick) {
try {
// Load the fxml file and create a new stage for the popup dialog.
FXMLLoader loader = new FXMLLoader();
loader.setLocation(this.getClass().getResource(brick.getFXMLFileName()));
AnchorPane page = loader.load();
// Create the dialog Stage.
Stage dialogStage = new Stage();
dialogStage.setTitle("Edit Controller");
dialogStage.initModality(Modality.WINDOW_MODAL);
dialogStage.initOwner(primaryStage);
Scene scene = new Scene(page);
dialogStage.setScene(scene);
// Give the Brick we are editing to the controller.
EditDialogController c = loader.getController();
c.setDialogStage(dialogStage);
c.setBrick(brick);
c.fillFieldsWithCurrentValues();
// Set the dialog icon.
dialogStage.getIcons().add(new Image("file:resources/images/edit.png"));
// Show the dialog and wait until the user closes it
dialogStage.showAndWait();
return c.isOkClicked();
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
/**
* Opens a dialog to edit details for the specified brick. If the user
* clicks OK, the changes are saved into the provided brick object and true
* is returned.
*
* @return true if the user clicked OK, false otherwise.
*/
public boolean showBrickDebugWindow() {
try {
// Load the fxml file and create a new stage for the popup dialog.
FXMLLoader loader = new FXMLLoader();
loader.setLocation(this.getClass().getResource("view/DebugWindow.fxml"));
AnchorPane page = loader.load();
VBox vbox = new VBox();
vbox.setPadding(new Insets(10));
vbox.setSpacing(8);
// Read the current list of modules from the GUI MainApp class
List<BrickSimulator> brickList = this.getBrickData();
for (BrickSimulator currentBrick : brickList) {
currentBrick.setupDebugGuiVbox(vbox);
}
page.getChildren().add(vbox);
// Create the dialog Stage.
Stage dialogStage = new Stage();
dialogStage.setTitle("Debug");
dialogStage.initModality(Modality.NONE);
dialogStage.initOwner(primaryStage);
Scene scene = new Scene(page);
dialogStage.setScene(scene);
// Call some routines in the controller
DebugWindowController c = loader.getController();
c.setDialogStage(dialogStage);
c.setMainApp(this);
// Set the dialog icon.
dialogStage.getIcons().add(new Image("file:resources/images/edit.png"));
// Show the dialog and wait until the user closes it
dialogStage.showAndWait();
} catch (IOException e) {
e.printStackTrace();
}
return true;
}
/**
* Opens a dialog to choose the type of brick we are creating
*
* @param brick the controller object to be edited
* @return true if the user clicked OK, false otherwise.
*/
public boolean showBrickNewDialog(BrickSimulator[] brick) {
try {
// Load the fxml file and create a new stage for the popup dialog.
FXMLLoader loader = new FXMLLoader();
loader.setLocation(this.getClass().getResource("view/NewDialog.fxml"));
AnchorPane page = loader.load();
// Create the dialog Stage.
Stage dialogStage = new Stage();
dialogStage.setTitle("New Controller");
dialogStage.initModality(Modality.WINDOW_MODAL);
dialogStage.initOwner(primaryStage);
Scene scene = new Scene(page);
dialogStage.setScene(scene);
// Set the Brick into the controller.
NewDialogController c = loader.getController();
c.setDialogStage(dialogStage);
c.setBrick(brick);
c.initChoiceBox();
// Set the dialog icon.
dialogStage.getIcons().add(new Image("file:resources/images/edit.png"));
// Show the dialog and wait until the user closes it
dialogStage.showAndWait();
return c.isOkClicked();
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
/**
* Returns the main stage.
*
* @return the primary stage
*/
public static Stage getPrimaryStage() {
return primaryStage;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vision/v1/text_annotation.proto
package com.google.cloud.vision.v1;
/**
* <pre>
* A single symbol representation.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1.Symbol}
*/
public final class Symbol extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.cloud.vision.v1.Symbol)
SymbolOrBuilder {
// Use Symbol.newBuilder() to construct.
private Symbol(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Symbol() {
text_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private Symbol(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
com.google.cloud.vision.v1.TextAnnotation.TextProperty.Builder subBuilder = null;
if (property_ != null) {
subBuilder = property_.toBuilder();
}
property_ = input.readMessage(com.google.cloud.vision.v1.TextAnnotation.TextProperty.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(property_);
property_ = subBuilder.buildPartial();
}
break;
}
case 18: {
com.google.cloud.vision.v1.BoundingPoly.Builder subBuilder = null;
if (boundingBox_ != null) {
subBuilder = boundingBox_.toBuilder();
}
boundingBox_ = input.readMessage(com.google.cloud.vision.v1.BoundingPoly.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(boundingBox_);
boundingBox_ = subBuilder.buildPartial();
}
break;
}
case 26: {
java.lang.String s = input.readStringRequireUtf8();
text_ = s;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.vision.v1.TextAnnotationProto.internal_static_google_cloud_vision_v1_Symbol_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1.TextAnnotationProto.internal_static_google_cloud_vision_v1_Symbol_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1.Symbol.class, com.google.cloud.vision.v1.Symbol.Builder.class);
}
public static final int PROPERTY_FIELD_NUMBER = 1;
private com.google.cloud.vision.v1.TextAnnotation.TextProperty property_;
/**
* <pre>
* Additional information detected for the symbol.
* </pre>
*
* <code>optional .google.cloud.vision.v1.TextAnnotation.TextProperty property = 1;</code>
*/
public boolean hasProperty() {
return property_ != null;
}
/**
* <pre>
* Additional information detected for the symbol.
* </pre>
*
* <code>optional .google.cloud.vision.v1.TextAnnotation.TextProperty property = 1;</code>
*/
public com.google.cloud.vision.v1.TextAnnotation.TextProperty getProperty() {
return property_ == null ? com.google.cloud.vision.v1.TextAnnotation.TextProperty.getDefaultInstance() : property_;
}
/**
* <pre>
* Additional information detected for the symbol.
* </pre>
*
* <code>optional .google.cloud.vision.v1.TextAnnotation.TextProperty property = 1;</code>
*/
public com.google.cloud.vision.v1.TextAnnotation.TextPropertyOrBuilder getPropertyOrBuilder() {
return getProperty();
}
public static final int BOUNDING_BOX_FIELD_NUMBER = 2;
private com.google.cloud.vision.v1.BoundingPoly boundingBox_;
/**
* <pre>
* The bounding box for the symbol.
* The vertices are in the order of top-left, top-right, bottom-right,
* bottom-left. When a rotation of the bounding box is detected the rotation
* is represented as around the top-left corner as defined when the text is
* read in the 'natural' orientation.
* For example:
* * when the text is horizontal it might look like:
* 0----1
* | |
* 3----2
* * when it's rotated 180 degrees around the top-left corner it becomes:
* 2----3
* | |
* 1----0
* and the vertice order will still be (0, 1, 2, 3).
* </pre>
*
* <code>optional .google.cloud.vision.v1.BoundingPoly bounding_box = 2;</code>
*/
public boolean hasBoundingBox() {
return boundingBox_ != null;
}
/**
* <pre>
* The bounding box for the symbol.
* The vertices are in the order of top-left, top-right, bottom-right,
* bottom-left. When a rotation of the bounding box is detected the rotation
* is represented as around the top-left corner as defined when the text is
* read in the 'natural' orientation.
* For example:
* * when the text is horizontal it might look like:
* 0----1
* | |
* 3----2
* * when it's rotated 180 degrees around the top-left corner it becomes:
* 2----3
* | |
* 1----0
* and the vertice order will still be (0, 1, 2, 3).
* </pre>
*
* <code>optional .google.cloud.vision.v1.BoundingPoly bounding_box = 2;</code>
*/
public com.google.cloud.vision.v1.BoundingPoly getBoundingBox() {
return boundingBox_ == null ? com.google.cloud.vision.v1.BoundingPoly.getDefaultInstance() : boundingBox_;
}
/**
* <pre>
* The bounding box for the symbol.
* The vertices are in the order of top-left, top-right, bottom-right,
* bottom-left. When a rotation of the bounding box is detected the rotation
* is represented as around the top-left corner as defined when the text is
* read in the 'natural' orientation.
* For example:
* * when the text is horizontal it might look like:
* 0----1
* | |
* 3----2
* * when it's rotated 180 degrees around the top-left corner it becomes:
* 2----3
* | |
* 1----0
* and the vertice order will still be (0, 1, 2, 3).
* </pre>
*
* <code>optional .google.cloud.vision.v1.BoundingPoly bounding_box = 2;</code>
*/
public com.google.cloud.vision.v1.BoundingPolyOrBuilder getBoundingBoxOrBuilder() {
return getBoundingBox();
}
public static final int TEXT_FIELD_NUMBER = 3;
private volatile java.lang.Object text_;
/**
* <pre>
* The actual UTF-8 representation of the symbol.
* </pre>
*
* <code>optional string text = 3;</code>
*/
public java.lang.String getText() {
java.lang.Object ref = text_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
text_ = s;
return s;
}
}
/**
* <pre>
* The actual UTF-8 representation of the symbol.
* </pre>
*
* <code>optional string text = 3;</code>
*/
public com.google.protobuf.ByteString
getTextBytes() {
java.lang.Object ref = text_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
text_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (property_ != null) {
output.writeMessage(1, getProperty());
}
if (boundingBox_ != null) {
output.writeMessage(2, getBoundingBox());
}
if (!getTextBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, text_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (property_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getProperty());
}
if (boundingBox_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getBoundingBox());
}
if (!getTextBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, text_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vision.v1.Symbol)) {
return super.equals(obj);
}
com.google.cloud.vision.v1.Symbol other = (com.google.cloud.vision.v1.Symbol) obj;
boolean result = true;
result = result && (hasProperty() == other.hasProperty());
if (hasProperty()) {
result = result && getProperty()
.equals(other.getProperty());
}
result = result && (hasBoundingBox() == other.hasBoundingBox());
if (hasBoundingBox()) {
result = result && getBoundingBox()
.equals(other.getBoundingBox());
}
result = result && getText()
.equals(other.getText());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasProperty()) {
hash = (37 * hash) + PROPERTY_FIELD_NUMBER;
hash = (53 * hash) + getProperty().hashCode();
}
if (hasBoundingBox()) {
hash = (37 * hash) + BOUNDING_BOX_FIELD_NUMBER;
hash = (53 * hash) + getBoundingBox().hashCode();
}
hash = (37 * hash) + TEXT_FIELD_NUMBER;
hash = (53 * hash) + getText().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vision.v1.Symbol parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1.Symbol parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1.Symbol parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1.Symbol parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1.Symbol parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.Symbol parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1.Symbol parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.Symbol parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1.Symbol parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.Symbol parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.vision.v1.Symbol prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A single symbol representation.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1.Symbol}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.cloud.vision.v1.Symbol)
com.google.cloud.vision.v1.SymbolOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.vision.v1.TextAnnotationProto.internal_static_google_cloud_vision_v1_Symbol_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1.TextAnnotationProto.internal_static_google_cloud_vision_v1_Symbol_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1.Symbol.class, com.google.cloud.vision.v1.Symbol.Builder.class);
}
// Construct using com.google.cloud.vision.v1.Symbol.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
if (propertyBuilder_ == null) {
property_ = null;
} else {
property_ = null;
propertyBuilder_ = null;
}
if (boundingBoxBuilder_ == null) {
boundingBox_ = null;
} else {
boundingBox_ = null;
boundingBoxBuilder_ = null;
}
text_ = "";
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.cloud.vision.v1.TextAnnotationProto.internal_static_google_cloud_vision_v1_Symbol_descriptor;
}
public com.google.cloud.vision.v1.Symbol getDefaultInstanceForType() {
return com.google.cloud.vision.v1.Symbol.getDefaultInstance();
}
public com.google.cloud.vision.v1.Symbol build() {
com.google.cloud.vision.v1.Symbol result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.cloud.vision.v1.Symbol buildPartial() {
com.google.cloud.vision.v1.Symbol result = new com.google.cloud.vision.v1.Symbol(this);
if (propertyBuilder_ == null) {
result.property_ = property_;
} else {
result.property_ = propertyBuilder_.build();
}
if (boundingBoxBuilder_ == null) {
result.boundingBox_ = boundingBox_;
} else {
result.boundingBox_ = boundingBoxBuilder_.build();
}
result.text_ = text_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vision.v1.Symbol) {
return mergeFrom((com.google.cloud.vision.v1.Symbol)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vision.v1.Symbol other) {
if (other == com.google.cloud.vision.v1.Symbol.getDefaultInstance()) return this;
if (other.hasProperty()) {
mergeProperty(other.getProperty());
}
if (other.hasBoundingBox()) {
mergeBoundingBox(other.getBoundingBox());
}
if (!other.getText().isEmpty()) {
text_ = other.text_;
onChanged();
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.vision.v1.Symbol parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.vision.v1.Symbol) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private com.google.cloud.vision.v1.TextAnnotation.TextProperty property_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.TextAnnotation.TextProperty, com.google.cloud.vision.v1.TextAnnotation.TextProperty.Builder, com.google.cloud.vision.v1.TextAnnotation.TextPropertyOrBuilder> propertyBuilder_;
/**
* <pre>
* Additional information detected for the symbol.
* </pre>
*
* <code>optional .google.cloud.vision.v1.TextAnnotation.TextProperty property = 1;</code>
*/
public boolean hasProperty() {
return propertyBuilder_ != null || property_ != null;
}
/**
* <pre>
* Additional information detected for the symbol.
* </pre>
*
* <code>optional .google.cloud.vision.v1.TextAnnotation.TextProperty property = 1;</code>
*/
public com.google.cloud.vision.v1.TextAnnotation.TextProperty getProperty() {
if (propertyBuilder_ == null) {
return property_ == null ? com.google.cloud.vision.v1.TextAnnotation.TextProperty.getDefaultInstance() : property_;
} else {
return propertyBuilder_.getMessage();
}
}
/**
* <pre>
* Additional information detected for the symbol.
* </pre>
*
* <code>optional .google.cloud.vision.v1.TextAnnotation.TextProperty property = 1;</code>
*/
public Builder setProperty(com.google.cloud.vision.v1.TextAnnotation.TextProperty value) {
if (propertyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
property_ = value;
onChanged();
} else {
propertyBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* Additional information detected for the symbol.
* </pre>
*
* <code>optional .google.cloud.vision.v1.TextAnnotation.TextProperty property = 1;</code>
*/
public Builder setProperty(
com.google.cloud.vision.v1.TextAnnotation.TextProperty.Builder builderForValue) {
if (propertyBuilder_ == null) {
property_ = builderForValue.build();
onChanged();
} else {
propertyBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Additional information detected for the symbol.
* </pre>
*
* <code>optional .google.cloud.vision.v1.TextAnnotation.TextProperty property = 1;</code>
*/
public Builder mergeProperty(com.google.cloud.vision.v1.TextAnnotation.TextProperty value) {
if (propertyBuilder_ == null) {
if (property_ != null) {
property_ =
com.google.cloud.vision.v1.TextAnnotation.TextProperty.newBuilder(property_).mergeFrom(value).buildPartial();
} else {
property_ = value;
}
onChanged();
} else {
propertyBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* Additional information detected for the symbol.
* </pre>
*
* <code>optional .google.cloud.vision.v1.TextAnnotation.TextProperty property = 1;</code>
*/
public Builder clearProperty() {
if (propertyBuilder_ == null) {
property_ = null;
onChanged();
} else {
property_ = null;
propertyBuilder_ = null;
}
return this;
}
/**
* <pre>
* Additional information detected for the symbol.
* </pre>
*
* <code>optional .google.cloud.vision.v1.TextAnnotation.TextProperty property = 1;</code>
*/
public com.google.cloud.vision.v1.TextAnnotation.TextProperty.Builder getPropertyBuilder() {
onChanged();
return getPropertyFieldBuilder().getBuilder();
}
/**
* <pre>
* Additional information detected for the symbol.
* </pre>
*
* <code>optional .google.cloud.vision.v1.TextAnnotation.TextProperty property = 1;</code>
*/
public com.google.cloud.vision.v1.TextAnnotation.TextPropertyOrBuilder getPropertyOrBuilder() {
if (propertyBuilder_ != null) {
return propertyBuilder_.getMessageOrBuilder();
} else {
return property_ == null ?
com.google.cloud.vision.v1.TextAnnotation.TextProperty.getDefaultInstance() : property_;
}
}
/**
* <pre>
* Additional information detected for the symbol.
* </pre>
*
* <code>optional .google.cloud.vision.v1.TextAnnotation.TextProperty property = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.TextAnnotation.TextProperty, com.google.cloud.vision.v1.TextAnnotation.TextProperty.Builder, com.google.cloud.vision.v1.TextAnnotation.TextPropertyOrBuilder>
getPropertyFieldBuilder() {
if (propertyBuilder_ == null) {
propertyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.TextAnnotation.TextProperty, com.google.cloud.vision.v1.TextAnnotation.TextProperty.Builder, com.google.cloud.vision.v1.TextAnnotation.TextPropertyOrBuilder>(
getProperty(),
getParentForChildren(),
isClean());
property_ = null;
}
return propertyBuilder_;
}
private com.google.cloud.vision.v1.BoundingPoly boundingBox_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.BoundingPoly, com.google.cloud.vision.v1.BoundingPoly.Builder, com.google.cloud.vision.v1.BoundingPolyOrBuilder> boundingBoxBuilder_;
/**
* <pre>
* The bounding box for the symbol.
* The vertices are in the order of top-left, top-right, bottom-right,
* bottom-left. When a rotation of the bounding box is detected the rotation
* is represented as around the top-left corner as defined when the text is
* read in the 'natural' orientation.
* For example:
* * when the text is horizontal it might look like:
* 0----1
* | |
* 3----2
* * when it's rotated 180 degrees around the top-left corner it becomes:
* 2----3
* | |
* 1----0
* and the vertice order will still be (0, 1, 2, 3).
* </pre>
*
* <code>optional .google.cloud.vision.v1.BoundingPoly bounding_box = 2;</code>
*/
public boolean hasBoundingBox() {
return boundingBoxBuilder_ != null || boundingBox_ != null;
}
/**
* <pre>
* The bounding box for the symbol.
* The vertices are in the order of top-left, top-right, bottom-right,
* bottom-left. When a rotation of the bounding box is detected the rotation
* is represented as around the top-left corner as defined when the text is
* read in the 'natural' orientation.
* For example:
* * when the text is horizontal it might look like:
* 0----1
* | |
* 3----2
* * when it's rotated 180 degrees around the top-left corner it becomes:
* 2----3
* | |
* 1----0
* and the vertice order will still be (0, 1, 2, 3).
* </pre>
*
* <code>optional .google.cloud.vision.v1.BoundingPoly bounding_box = 2;</code>
*/
public com.google.cloud.vision.v1.BoundingPoly getBoundingBox() {
if (boundingBoxBuilder_ == null) {
return boundingBox_ == null ? com.google.cloud.vision.v1.BoundingPoly.getDefaultInstance() : boundingBox_;
} else {
return boundingBoxBuilder_.getMessage();
}
}
/**
* <pre>
* The bounding box for the symbol.
* The vertices are in the order of top-left, top-right, bottom-right,
* bottom-left. When a rotation of the bounding box is detected the rotation
* is represented as around the top-left corner as defined when the text is
* read in the 'natural' orientation.
* For example:
* * when the text is horizontal it might look like:
* 0----1
* | |
* 3----2
* * when it's rotated 180 degrees around the top-left corner it becomes:
* 2----3
* | |
* 1----0
* and the vertice order will still be (0, 1, 2, 3).
* </pre>
*
* <code>optional .google.cloud.vision.v1.BoundingPoly bounding_box = 2;</code>
*/
public Builder setBoundingBox(com.google.cloud.vision.v1.BoundingPoly value) {
if (boundingBoxBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
boundingBox_ = value;
onChanged();
} else {
boundingBoxBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* The bounding box for the symbol.
* The vertices are in the order of top-left, top-right, bottom-right,
* bottom-left. When a rotation of the bounding box is detected the rotation
* is represented as around the top-left corner as defined when the text is
* read in the 'natural' orientation.
* For example:
* * when the text is horizontal it might look like:
* 0----1
* | |
* 3----2
* * when it's rotated 180 degrees around the top-left corner it becomes:
* 2----3
* | |
* 1----0
* and the vertice order will still be (0, 1, 2, 3).
* </pre>
*
* <code>optional .google.cloud.vision.v1.BoundingPoly bounding_box = 2;</code>
*/
public Builder setBoundingBox(
com.google.cloud.vision.v1.BoundingPoly.Builder builderForValue) {
if (boundingBoxBuilder_ == null) {
boundingBox_ = builderForValue.build();
onChanged();
} else {
boundingBoxBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The bounding box for the symbol.
* The vertices are in the order of top-left, top-right, bottom-right,
* bottom-left. When a rotation of the bounding box is detected the rotation
* is represented as around the top-left corner as defined when the text is
* read in the 'natural' orientation.
* For example:
* * when the text is horizontal it might look like:
* 0----1
* | |
* 3----2
* * when it's rotated 180 degrees around the top-left corner it becomes:
* 2----3
* | |
* 1----0
* and the vertice order will still be (0, 1, 2, 3).
* </pre>
*
* <code>optional .google.cloud.vision.v1.BoundingPoly bounding_box = 2;</code>
*/
public Builder mergeBoundingBox(com.google.cloud.vision.v1.BoundingPoly value) {
if (boundingBoxBuilder_ == null) {
if (boundingBox_ != null) {
boundingBox_ =
com.google.cloud.vision.v1.BoundingPoly.newBuilder(boundingBox_).mergeFrom(value).buildPartial();
} else {
boundingBox_ = value;
}
onChanged();
} else {
boundingBoxBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* The bounding box for the symbol.
* The vertices are in the order of top-left, top-right, bottom-right,
* bottom-left. When a rotation of the bounding box is detected the rotation
* is represented as around the top-left corner as defined when the text is
* read in the 'natural' orientation.
* For example:
* * when the text is horizontal it might look like:
* 0----1
* | |
* 3----2
* * when it's rotated 180 degrees around the top-left corner it becomes:
* 2----3
* | |
* 1----0
* and the vertice order will still be (0, 1, 2, 3).
* </pre>
*
* <code>optional .google.cloud.vision.v1.BoundingPoly bounding_box = 2;</code>
*/
public Builder clearBoundingBox() {
if (boundingBoxBuilder_ == null) {
boundingBox_ = null;
onChanged();
} else {
boundingBox_ = null;
boundingBoxBuilder_ = null;
}
return this;
}
/**
* <pre>
* The bounding box for the symbol.
* The vertices are in the order of top-left, top-right, bottom-right,
* bottom-left. When a rotation of the bounding box is detected the rotation
* is represented as around the top-left corner as defined when the text is
* read in the 'natural' orientation.
* For example:
* * when the text is horizontal it might look like:
* 0----1
* | |
* 3----2
* * when it's rotated 180 degrees around the top-left corner it becomes:
* 2----3
* | |
* 1----0
* and the vertice order will still be (0, 1, 2, 3).
* </pre>
*
* <code>optional .google.cloud.vision.v1.BoundingPoly bounding_box = 2;</code>
*/
public com.google.cloud.vision.v1.BoundingPoly.Builder getBoundingBoxBuilder() {
onChanged();
return getBoundingBoxFieldBuilder().getBuilder();
}
/**
* <pre>
* The bounding box for the symbol.
* The vertices are in the order of top-left, top-right, bottom-right,
* bottom-left. When a rotation of the bounding box is detected the rotation
* is represented as around the top-left corner as defined when the text is
* read in the 'natural' orientation.
* For example:
* * when the text is horizontal it might look like:
* 0----1
* | |
* 3----2
* * when it's rotated 180 degrees around the top-left corner it becomes:
* 2----3
* | |
* 1----0
* and the vertice order will still be (0, 1, 2, 3).
* </pre>
*
* <code>optional .google.cloud.vision.v1.BoundingPoly bounding_box = 2;</code>
*/
public com.google.cloud.vision.v1.BoundingPolyOrBuilder getBoundingBoxOrBuilder() {
if (boundingBoxBuilder_ != null) {
return boundingBoxBuilder_.getMessageOrBuilder();
} else {
return boundingBox_ == null ?
com.google.cloud.vision.v1.BoundingPoly.getDefaultInstance() : boundingBox_;
}
}
/**
* <pre>
* The bounding box for the symbol.
* The vertices are in the order of top-left, top-right, bottom-right,
* bottom-left. When a rotation of the bounding box is detected the rotation
* is represented as around the top-left corner as defined when the text is
* read in the 'natural' orientation.
* For example:
* * when the text is horizontal it might look like:
* 0----1
* | |
* 3----2
* * when it's rotated 180 degrees around the top-left corner it becomes:
* 2----3
* | |
* 1----0
* and the vertice order will still be (0, 1, 2, 3).
* </pre>
*
* <code>optional .google.cloud.vision.v1.BoundingPoly bounding_box = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.BoundingPoly, com.google.cloud.vision.v1.BoundingPoly.Builder, com.google.cloud.vision.v1.BoundingPolyOrBuilder>
getBoundingBoxFieldBuilder() {
if (boundingBoxBuilder_ == null) {
boundingBoxBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.BoundingPoly, com.google.cloud.vision.v1.BoundingPoly.Builder, com.google.cloud.vision.v1.BoundingPolyOrBuilder>(
getBoundingBox(),
getParentForChildren(),
isClean());
boundingBox_ = null;
}
return boundingBoxBuilder_;
}
private java.lang.Object text_ = "";
/**
* <pre>
* The actual UTF-8 representation of the symbol.
* </pre>
*
* <code>optional string text = 3;</code>
*/
public java.lang.String getText() {
java.lang.Object ref = text_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
text_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The actual UTF-8 representation of the symbol.
* </pre>
*
* <code>optional string text = 3;</code>
*/
public com.google.protobuf.ByteString
getTextBytes() {
java.lang.Object ref = text_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
text_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The actual UTF-8 representation of the symbol.
* </pre>
*
* <code>optional string text = 3;</code>
*/
public Builder setText(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
text_ = value;
onChanged();
return this;
}
/**
* <pre>
* The actual UTF-8 representation of the symbol.
* </pre>
*
* <code>optional string text = 3;</code>
*/
public Builder clearText() {
text_ = getDefaultInstance().getText();
onChanged();
return this;
}
/**
* <pre>
* The actual UTF-8 representation of the symbol.
* </pre>
*
* <code>optional string text = 3;</code>
*/
public Builder setTextBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
text_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.cloud.vision.v1.Symbol)
}
// @@protoc_insertion_point(class_scope:google.cloud.vision.v1.Symbol)
private static final com.google.cloud.vision.v1.Symbol DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vision.v1.Symbol();
}
public static com.google.cloud.vision.v1.Symbol getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Symbol>
PARSER = new com.google.protobuf.AbstractParser<Symbol>() {
public Symbol parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Symbol(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Symbol> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Symbol> getParserForType() {
return PARSER;
}
public com.google.cloud.vision.v1.Symbol getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Phone.com API
* This is a Phone.com api Swagger definition
*
* OpenAPI spec version: 1.0.0
* Contact: apisupport@phone.com
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package io.swagger.client.model;
import java.util.Objects;
import com.google.gson.annotations.SerializedName;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import io.swagger.client.model.FilterIdNameArray;
import io.swagger.client.model.QueueFull;
import io.swagger.client.model.SortIdName;
import java.util.ArrayList;
import java.util.List;
/**
* ListQueues
*/
@javax.annotation.Generated(value = "io.swagger.codegen.languages.JavaClientCodegen", date = "2017-03-21T10:43:19.319+01:00")
public class ListQueues {
@SerializedName("filters")
private FilterIdNameArray filters = null;
@SerializedName("sort")
private SortIdName sort = null;
@SerializedName("total")
private Integer total = null;
@SerializedName("offset")
private Integer offset = null;
@SerializedName("limit")
private Integer limit = null;
@SerializedName("items")
private List<QueueFull> items = new ArrayList<QueueFull>();
public ListQueues filters(FilterIdNameArray filters) {
this.filters = filters;
return this;
}
/**
* Get filters
* @return filters
**/
@ApiModelProperty(example = "null", value = "")
public FilterIdNameArray getFilters() {
return filters;
}
public void setFilters(FilterIdNameArray filters) {
this.filters = filters;
}
public ListQueues sort(SortIdName sort) {
this.sort = sort;
return this;
}
/**
* Get sort
* @return sort
**/
@ApiModelProperty(example = "null", value = "")
public SortIdName getSort() {
return sort;
}
public void setSort(SortIdName sort) {
this.sort = sort;
}
public ListQueues total(Integer total) {
this.total = total;
return this;
}
/**
* Get total
* @return total
**/
@ApiModelProperty(example = "null", value = "")
public Integer getTotal() {
return total;
}
public void setTotal(Integer total) {
this.total = total;
}
public ListQueues offset(Integer offset) {
this.offset = offset;
return this;
}
/**
* Get offset
* @return offset
**/
@ApiModelProperty(example = "null", value = "")
public Integer getOffset() {
return offset;
}
public void setOffset(Integer offset) {
this.offset = offset;
}
public ListQueues limit(Integer limit) {
this.limit = limit;
return this;
}
/**
* Get limit
* @return limit
**/
@ApiModelProperty(example = "null", value = "")
public Integer getLimit() {
return limit;
}
public void setLimit(Integer limit) {
this.limit = limit;
}
public ListQueues items(List<QueueFull> items) {
this.items = items;
return this;
}
public ListQueues addItemsItem(QueueFull itemsItem) {
this.items.add(itemsItem);
return this;
}
/**
* Get items
* @return items
**/
@ApiModelProperty(example = "null", value = "")
public List<QueueFull> getItems() {
return items;
}
public void setItems(List<QueueFull> items) {
this.items = items;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ListQueues listQueues = (ListQueues) o;
return Objects.equals(this.filters, listQueues.filters) &&
Objects.equals(this.sort, listQueues.sort) &&
Objects.equals(this.total, listQueues.total) &&
Objects.equals(this.offset, listQueues.offset) &&
Objects.equals(this.limit, listQueues.limit) &&
Objects.equals(this.items, listQueues.items);
}
@Override
public int hashCode() {
return Objects.hash(filters, sort, total, offset, limit, items);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ListQueues {\n");
sb.append(" filters: ").append(toIndentedString(filters)).append("\n");
sb.append(" sort: ").append(toIndentedString(sort)).append("\n");
sb.append(" total: ").append(toIndentedString(total)).append("\n");
sb.append(" offset: ").append(toIndentedString(offset)).append("\n");
sb.append(" limit: ").append(toIndentedString(limit)).append("\n");
sb.append(" items: ").append(toIndentedString(items)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zenkun.datetimepicker.time;
import com.nineoldandroids.animation.Keyframe;
import com.nineoldandroids.animation.ObjectAnimator;
import com.nineoldandroids.animation.PropertyValuesHolder;
import com.nineoldandroids.animation.ValueAnimator;
import com.nineoldandroids.animation.ValueAnimator.AnimatorUpdateListener;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.util.Log;
import android.view.View;
import com.android.datetimepicker.R;
/**
* View to show what number is selected. This will draw a blue circle over the number, with a blue
* line coming from the center of the main circle to the edge of the blue selection.
*/
public class RadialSelectorView extends View {
private static final String TAG = "RadialSelectorView";
private final Paint mPaint = new Paint();
private boolean mIsInitialized;
private boolean mDrawValuesReady;
private float mCircleRadiusMultiplier;
private float mAmPmCircleRadiusMultiplier;
private float mInnerNumbersRadiusMultiplier;
private float mOuterNumbersRadiusMultiplier;
private float mNumbersRadiusMultiplier;
private float mSelectionRadiusMultiplier;
private float mAnimationRadiusMultiplier;
private boolean mIs24HourMode;
private boolean mHasInnerCircle;
private int mXCenter;
private int mYCenter;
private int mCircleRadius;
private float mTransitionMidRadiusMultiplier;
private float mTransitionEndRadiusMultiplier;
private int mLineLength;
private int mSelectionRadius;
private InvalidateUpdateListener mInvalidateUpdateListener;
private int mSelectionDegrees;
private double mSelectionRadians;
private boolean mForceDrawDot;
public RadialSelectorView(Context context) {
super(context);
mIsInitialized = false;
}
/**
* Initialize this selector with the state of the picker.
* @param context Current context.
* @param is24HourMode Whether the selector is in 24-hour mode, which will tell us
* whether the circle's center is moved up slightly to make room for the AM/PM circles.
* @param hasInnerCircle Whether we have both an inner and an outer circle of numbers
* that may be selected. Should be true for 24-hour mode in the hours circle.
* @param disappearsOut Whether the numbers' animation will have them disappearing out
* or disappearing in.
* @param selectionDegrees The initial degrees to be selected.
* @param isInnerCircle Whether the initial selection is in the inner or outer circle.
* Will be ignored when hasInnerCircle is false.
*/
public void initialize(Context context, boolean is24HourMode, boolean hasInnerCircle,
boolean disappearsOut, int selectionDegrees, boolean isInnerCircle) {
if (mIsInitialized) {
Log.e(TAG, "This RadialSelectorView may only be initialized once.");
return;
}
Resources res = context.getResources();
int blue = res.getColor(R.color.blue);
mPaint.setColor(blue);
mPaint.setAntiAlias(true);
// Calculate values for the circle radius size.
mIs24HourMode = is24HourMode;
if (is24HourMode) {
mCircleRadiusMultiplier = Float.parseFloat(
res.getString(R.string.circle_radius_multiplier_24HourMode));
} else {
mCircleRadiusMultiplier = Float.parseFloat(
res.getString(R.string.circle_radius_multiplier));
mAmPmCircleRadiusMultiplier =
Float.parseFloat(res.getString(R.string.ampm_circle_radius_multiplier));
}
// Calculate values for the radius size(s) of the numbers circle(s).
mHasInnerCircle = hasInnerCircle;
if (hasInnerCircle) {
mInnerNumbersRadiusMultiplier =
Float.parseFloat(res.getString(R.string.numbers_radius_multiplier_inner));
mOuterNumbersRadiusMultiplier =
Float.parseFloat(res.getString(R.string.numbers_radius_multiplier_outer));
} else {
mNumbersRadiusMultiplier =
Float.parseFloat(res.getString(R.string.numbers_radius_multiplier_normal));
}
mSelectionRadiusMultiplier =
Float.parseFloat(res.getString(R.string.selection_radius_multiplier));
// Calculate values for the transition mid-way states.
mAnimationRadiusMultiplier = 1;
mTransitionMidRadiusMultiplier = 1f + (0.05f * (disappearsOut? -1 : 1));
mTransitionEndRadiusMultiplier = 1f + (0.3f * (disappearsOut? 1 : -1));
mInvalidateUpdateListener = new InvalidateUpdateListener();
setSelection(selectionDegrees, isInnerCircle, false);
mIsInitialized = true;
}
/**
* Set the selection.
* @param selectionDegrees The degrees to be selected.
* @param isInnerCircle Whether the selection should be in the inner circle or outer. Will be
* ignored if hasInnerCircle was initialized to false.
* @param forceDrawDot Whether to force the dot in the center of the selection circle to be
* drawn. If false, the dot will be drawn only when the degrees is not a multiple of 30, i.e.
* the selection is not on a visible number.
*/
public void setSelection(int selectionDegrees, boolean isInnerCircle, boolean forceDrawDot) {
mSelectionDegrees = selectionDegrees;
mSelectionRadians = selectionDegrees * Math.PI / 180;
mForceDrawDot = forceDrawDot;
if (mHasInnerCircle) {
if (isInnerCircle) {
mNumbersRadiusMultiplier = mInnerNumbersRadiusMultiplier;
} else {
mNumbersRadiusMultiplier = mOuterNumbersRadiusMultiplier;
}
}
}
/**
* Allows for smoother animations.
*/
@Override
public boolean hasOverlappingRendering() {
return false;
}
/**
* Set the multiplier for the radius. Will be used during animations to move in/out.
*/
public void setAnimationRadiusMultiplier(float animationRadiusMultiplier) {
mAnimationRadiusMultiplier = animationRadiusMultiplier;
}
public int getDegreesFromCoords(float pointX, float pointY, boolean forceLegal,
final Boolean[] isInnerCircle) {
if (!mDrawValuesReady) {
return -1;
}
double hypotenuse = Math.sqrt(
(pointY - mYCenter)*(pointY - mYCenter) +
(pointX - mXCenter)*(pointX - mXCenter));
// Check if we're outside the range
if (mHasInnerCircle) {
if (forceLegal) {
// If we're told to force the coordinates to be legal, we'll set the isInnerCircle
// boolean based based off whichever number the coordinates are closer to.
int innerNumberRadius = (int) (mCircleRadius * mInnerNumbersRadiusMultiplier);
int distanceToInnerNumber = (int) Math.abs(hypotenuse - innerNumberRadius);
int outerNumberRadius = (int) (mCircleRadius * mOuterNumbersRadiusMultiplier);
int distanceToOuterNumber = (int) Math.abs(hypotenuse - outerNumberRadius);
isInnerCircle[0] = (distanceToInnerNumber <= distanceToOuterNumber);
} else {
// Otherwise, if we're close enough to either number (with the space between the
// two allotted equally), set the isInnerCircle boolean as the closer one.
// appropriately, but otherwise return -1.
int minAllowedHypotenuseForInnerNumber =
(int) (mCircleRadius * mInnerNumbersRadiusMultiplier) - mSelectionRadius;
int maxAllowedHypotenuseForOuterNumber =
(int) (mCircleRadius * mOuterNumbersRadiusMultiplier) + mSelectionRadius;
int halfwayHypotenusePoint = (int) (mCircleRadius *
((mOuterNumbersRadiusMultiplier + mInnerNumbersRadiusMultiplier) / 2));
if (hypotenuse >= minAllowedHypotenuseForInnerNumber &&
hypotenuse <= halfwayHypotenusePoint) {
isInnerCircle[0] = true;
} else if (hypotenuse <= maxAllowedHypotenuseForOuterNumber &&
hypotenuse >= halfwayHypotenusePoint) {
isInnerCircle[0] = false;
} else {
return -1;
}
}
} else {
// If there's just one circle, we'll need to return -1 if:
// we're not told to force the coordinates to be legal, and
// the coordinates' distance to the number is within the allowed distance.
if (!forceLegal) {
int distanceToNumber = (int) Math.abs(hypotenuse - mLineLength);
// The max allowed distance will be defined as the distance from the center of the
// number to the edge of the circle.
int maxAllowedDistance = (int) (mCircleRadius * (1 - mNumbersRadiusMultiplier));
if (distanceToNumber > maxAllowedDistance) {
return -1;
}
}
}
float opposite = Math.abs(pointY - mYCenter);
double radians = Math.asin(opposite / hypotenuse);
int degrees = (int) (radians * 180 / Math.PI);
// Now we have to translate to the correct quadrant.
boolean rightSide = (pointX > mXCenter);
boolean topSide = (pointY < mYCenter);
if (rightSide && topSide) {
degrees = 90 - degrees;
} else if (rightSide && !topSide) {
degrees = 90 + degrees;
} else if (!rightSide && !topSide) {
degrees = 270 - degrees;
} else if (!rightSide && topSide) {
degrees = 270 + degrees;
}
return degrees;
}
@Override
public void onDraw(Canvas canvas) {
int viewWidth = getWidth();
if (viewWidth == 0 || !mIsInitialized) {
return;
}
if (!mDrawValuesReady) {
mXCenter = getWidth() / 2;
mYCenter = getHeight() / 2;
mCircleRadius = (int) (Math.min(mXCenter, mYCenter) * mCircleRadiusMultiplier);
if (!mIs24HourMode) {
// We'll need to draw the AM/PM circles, so the main circle will need to have
// a slightly higher center. To keep the entire view centered vertically, we'll
// have to push it up by half the radius of the AM/PM circles.
int amPmCircleRadius = (int) (mCircleRadius * mAmPmCircleRadiusMultiplier);
mYCenter -= amPmCircleRadius / 2;
}
mSelectionRadius = (int) (mCircleRadius * mSelectionRadiusMultiplier);
mDrawValuesReady = true;
}
// Calculate the current radius at which to place the selection circle.
mLineLength = (int) (mCircleRadius * mNumbersRadiusMultiplier * mAnimationRadiusMultiplier);
int pointX = mXCenter + (int) (mLineLength * Math.sin(mSelectionRadians));
int pointY = mYCenter - (int) (mLineLength * Math.cos(mSelectionRadians));
// Draw the selection circle.
mPaint.setAlpha(51);
canvas.drawCircle(pointX, pointY, mSelectionRadius, mPaint);
if (mForceDrawDot | mSelectionDegrees % 30 != 0) {
// We're not on a direct tick (or we've been told to draw the dot anyway).
mPaint.setAlpha(255);
canvas.drawCircle(pointX, pointY, (mSelectionRadius * 2 / 7), mPaint);
} else {
// We're not drawing the dot, so shorten the line to only go as far as the edge of the
// selection circle.
int lineLength = mLineLength;
lineLength -= mSelectionRadius;
pointX = mXCenter + (int) (lineLength * Math.sin(mSelectionRadians));
pointY = mYCenter - (int) (lineLength * Math.cos(mSelectionRadians));
}
// Draw the line from the center of the circle.
mPaint.setAlpha(255);
mPaint.setStrokeWidth(1);
canvas.drawLine(mXCenter, mYCenter, pointX, pointY, mPaint);
}
public ObjectAnimator getDisappearAnimator() {
if (!mIsInitialized || !mDrawValuesReady) {
Log.e(TAG, "RadialSelectorView was not ready for animation.");
return null;
}
Keyframe kf0, kf1, kf2;
float midwayPoint = 0.2f;
int duration = 500;
kf0 = Keyframe.ofFloat(0f, 1);
kf1 = Keyframe.ofFloat(midwayPoint, mTransitionMidRadiusMultiplier);
kf2 = Keyframe.ofFloat(1f, mTransitionEndRadiusMultiplier);
PropertyValuesHolder radiusDisappear = PropertyValuesHolder.ofKeyframe(
"animationRadiusMultiplier", kf0, kf1, kf2);
kf0 = Keyframe.ofFloat(0f, 1f);
kf1 = Keyframe.ofFloat(1f, 0f);
PropertyValuesHolder fadeOut = PropertyValuesHolder.ofKeyframe("alpha", kf0, kf1);
ObjectAnimator disappearAnimator = ObjectAnimator.ofPropertyValuesHolder(
this, radiusDisappear, fadeOut).setDuration(duration);
disappearAnimator.addUpdateListener(mInvalidateUpdateListener);
return disappearAnimator;
}
public ObjectAnimator getReappearAnimator() {
if (!mIsInitialized || !mDrawValuesReady) {
Log.e(TAG, "RadialSelectorView was not ready for animation.");
return null;
}
Keyframe kf0, kf1, kf2, kf3;
float midwayPoint = 0.2f;
int duration = 500;
// The time points are half of what they would normally be, because this animation is
// staggered against the disappear so they happen seamlessly. The reappear starts
// halfway into the disappear.
float delayMultiplier = 0.25f;
float transitionDurationMultiplier = 1f;
float totalDurationMultiplier = transitionDurationMultiplier + delayMultiplier;
int totalDuration = (int) (duration * totalDurationMultiplier);
float delayPoint = (delayMultiplier * duration) / totalDuration;
midwayPoint = 1 - (midwayPoint * (1 - delayPoint));
kf0 = Keyframe.ofFloat(0f, mTransitionEndRadiusMultiplier);
kf1 = Keyframe.ofFloat(delayPoint, mTransitionEndRadiusMultiplier);
kf2 = Keyframe.ofFloat(midwayPoint, mTransitionMidRadiusMultiplier);
kf3 = Keyframe.ofFloat(1f, 1);
PropertyValuesHolder radiusReappear = PropertyValuesHolder.ofKeyframe(
"animationRadiusMultiplier", kf0, kf1, kf2, kf3);
kf0 = Keyframe.ofFloat(0f, 0f);
kf1 = Keyframe.ofFloat(delayPoint, 0f);
kf2 = Keyframe.ofFloat(1f, 1f);
PropertyValuesHolder fadeIn = PropertyValuesHolder.ofKeyframe("alpha", kf0, kf1, kf2);
ObjectAnimator reappearAnimator = ObjectAnimator.ofPropertyValuesHolder(
this, radiusReappear, fadeIn).setDuration(totalDuration);
reappearAnimator.addUpdateListener(mInvalidateUpdateListener);
return reappearAnimator;
}
/**
* We'll need to invalidate during the animation.
*/
private class InvalidateUpdateListener implements AnimatorUpdateListener {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
RadialSelectorView.this.invalidate();
}
}
}
| |
/*
* Copyright (c) 2010-2017 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.provisioning.ucf.impl.connid;
import com.evolveum.midpoint.prism.*;
import com.evolveum.midpoint.prism.crypto.Protector;
import com.evolveum.midpoint.prism.delta.PropertyDelta;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.query.ObjectQuery;
import com.evolveum.midpoint.prism.schema.PrismSchema;
import com.evolveum.midpoint.prism.util.PrismAsserts;
import com.evolveum.midpoint.prism.util.PrismTestUtil;
import com.evolveum.midpoint.prism.xnode.MapXNode;
import com.evolveum.midpoint.provisioning.ucf.api.*;
import com.evolveum.midpoint.schema.CapabilityUtil;
import com.evolveum.midpoint.schema.DeltaConvertor;
import com.evolveum.midpoint.schema.MidPointPrismContextFactory;
import com.evolveum.midpoint.schema.constants.MidPointConstants;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.processor.*;
import com.evolveum.midpoint.schema.result.AsynchronousOperationReturnValue;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.schema.util.ShadowUtil;
import com.evolveum.midpoint.schema.util.SchemaDebugUtil;
import com.evolveum.midpoint.schema.util.ResourceTypeUtil;
import com.evolveum.midpoint.test.IntegrationTestTools;
import com.evolveum.midpoint.test.ldap.OpenDJController;
import com.evolveum.midpoint.test.util.TestUtil;
import com.evolveum.midpoint.util.DOMUtil;
import com.evolveum.midpoint.util.PrettyPrinter;
import com.evolveum.midpoint.util.exception.*;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.xml.ns._public.common.common_3.*;
import com.evolveum.midpoint.xml.ns._public.resource.capabilities_3.CredentialsCapabilityType;
import com.evolveum.midpoint.xml.ns._public.resource.capabilities_3.PagedSearchCapabilityType;
import com.evolveum.prism.xml.ns._public.types_3.ItemDeltaType;
import com.evolveum.prism.xml.ns._public.types_3.ItemPathType;
import com.evolveum.prism.xml.ns._public.types_3.ModificationTypeType;
import com.evolveum.prism.xml.ns._public.types_3.ProtectedStringType;
import com.evolveum.prism.xml.ns._public.types_3.RawType;
import org.identityconnectors.framework.common.objects.Name;
import org.identityconnectors.framework.common.objects.Uid;
import org.opends.server.types.Entry;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.testng.AbstractTestNGSpringContextTests;
import org.testng.Assert;
import org.testng.AssertJUnit;
import org.testng.annotations.*;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import javax.xml.namespace.QName;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static com.evolveum.midpoint.test.IntegrationTestTools.*;
import static org.testng.AssertJUnit.*;
/**
* Test UCF implementation with OpenDJ and ICF LDAP connector.
* <p/>
* This test is using embedded OpenDJ as a resource and ICF LDAP connector. The
* test is executed by direct calls to the UCF interface.
*
* @author Radovan Semancik
* @author Katka Valalikova
* <p/>
* This is an UCF test. It shold not need repository or other things
* from the midPoint spring context except from the provisioning beans.
* But due to a general issue with spring context initialization this is
* a lesser evil for now (MID-392)
*/
@ContextConfiguration(locations = { "classpath:ctx-ucf-connid-test.xml" })
public class TestUcfOpenDj extends AbstractTestNGSpringContextTests {
private static final File RESOURCE_OPENDJ_FILE = new File(UcfTestUtil.TEST_DIR, "resource-opendj.xml");
private static final File RESOURCE_OPENDJ_BAD_FILE = new File(UcfTestUtil.TEST_DIR, "resource-opendj-bad.xml");
private static final File CONNECTOR_LDAP_FILE = new File(UcfTestUtil.TEST_DIR, "connector-ldap.xml");
private ResourceType resourceType;
private ResourceType badResourceType;
private ConnectorType connectorType;
private ConnectorFactory factory;
private ConnectorInstance cc;
private PrismSchema connectorSchema;
private ResourceSchema resourceSchema;
private static Trace LOGGER = TraceManager.getTrace(TestUcfOpenDj.class);
@Autowired(required = true)
ConnectorFactory connectorFactoryIcfImpl;
@Autowired(required = true)
Protector protector;
@Autowired(required = true)
PrismContext prismContext;
protected static OpenDJController openDJController = new OpenDJController();
@BeforeSuite
public void setup() throws SchemaException, SAXException, IOException {
PrettyPrinter.setDefaultNamespacePrefix(MidPointConstants.NS_MIDPOINT_PUBLIC_PREFIX);
PrismTestUtil.resetPrismContext(MidPointPrismContextFactory.FACTORY);
}
@BeforeClass
public static void startLdap() throws Exception {
LOGGER.info("------------------------------------------------------------------------------");
LOGGER.info("START: OpenDjUcfTest");
LOGGER.info("------------------------------------------------------------------------------");
openDJController.startCleanServer();
}
@AfterClass
public static void stopLdap() throws Exception {
openDJController.stop();
LOGGER.info("------------------------------------------------------------------------------");
LOGGER.info("STOP: OpenDjUcfTest");
LOGGER.info("------------------------------------------------------------------------------");
}
@BeforeMethod
public void initUcf() throws Exception {
TestUtil.displayTestTile("initUcf");
// Resource
PrismObject<ResourceType> resource = PrismTestUtil.parseObject(RESOURCE_OPENDJ_FILE);
resourceType = resource.asObjectable();
// Resource: Second copy for negative test cases
PrismObject<ResourceType> badResource = PrismTestUtil.parseObject(RESOURCE_OPENDJ_BAD_FILE);
badResourceType = badResource.asObjectable();
// Connector
PrismObject<ConnectorType> connector = PrismTestUtil.parseObject(CONNECTOR_LDAP_FILE);
connectorType = connector.asObjectable();
factory = connectorFactoryIcfImpl;
connectorSchema = factory.generateConnectorConfigurationSchema(connectorType);
AssertJUnit.assertNotNull("Cannot generate connector schema", connectorSchema);
display("Connector schema", connectorSchema);
cc = factory.createConnectorInstance(connectorType, ResourceTypeUtil.getResourceNamespace(resourceType), "ldap connector");
OperationResult result = new OperationResult("initUcf");
cc.configure(resourceType.getConnectorConfiguration().asPrismContainerValue(), result);
cc.initialize(null, null, false, result);
// TODO: assert something
resourceSchema = cc.fetchResourceSchema(null, result);
display("Resource schema", resourceSchema);
AssertJUnit.assertNotNull(resourceSchema);
}
@AfterMethod
public void shutdownUcf() throws Exception {
}
@Test
public void test010ConnectorSchemaSanity() throws Exception {
final String TEST_NAME = "test010ConnectorSchemaSanity";
TestUtil.displayTestTile(TEST_NAME);
IntegrationTestTools.assertConnectorSchemaSanity(connectorSchema, "LDAP connector", true);
PrismContainerDefinition configurationDefinition =
connectorSchema.findItemDefinition(ResourceType.F_CONNECTOR_CONFIGURATION.getLocalPart(), PrismContainerDefinition.class);
PrismContainerDefinition configurationPropertiesDefinition =
configurationDefinition.findContainerDefinition(SchemaConstants.CONNECTOR_SCHEMA_CONFIGURATION_PROPERTIES_ELEMENT_QNAME);
PrismPropertyDefinition<String> propHost = configurationPropertiesDefinition.findPropertyDefinition(new QName(UcfTestUtil.CONNECTOR_LDAP_NS,"host"));
assertNotNull("No definition for configuration property 'host' in connector schema", propHost);
PrismAsserts.assertDefinition(propHost, new QName(UcfTestUtil.CONNECTOR_LDAP_NS,"host"), DOMUtil.XSD_STRING, 1, 1);
assertEquals("Wrong property 'host' display name", "Host", propHost.getDisplayName());
assertEquals("Wrong property 'host' help", "The name or IP address of the LDAP server host.", propHost.getHelp());
assertEquals("Wrong property 'host' display order", (Integer)1, propHost.getDisplayOrder()); // MID-2642
PrismPropertyDefinition<String> propPort = configurationPropertiesDefinition.findPropertyDefinition(new QName(UcfTestUtil.CONNECTOR_LDAP_NS,"port"));
assertNotNull("No definition for configuration property 'port' in connector schema", propPort);
PrismAsserts.assertDefinition(propPort, new QName(UcfTestUtil.CONNECTOR_LDAP_NS,"port"), DOMUtil.XSD_INT, 0, 1);
assertEquals("Wrong property 'port' display name", "Port number", propPort.getDisplayName());
assertEquals("Wrong property 'port' help", "LDAP server port number.", propPort.getHelp());
assertEquals("Wrong property 'port' display order", (Integer)2, propPort.getDisplayOrder()); // MID-2642
}
@Test
public void test020ResourceSchemaSanity() throws Exception {
final String TEST_NAME = "test020ResourceSchemaSanity";
TestUtil.displayTestTile(TEST_NAME);
QName objectClassQname = new QName(ResourceTypeUtil.getResourceNamespace(resourceType), OpenDJController.OBJECT_CLASS_INETORGPERSON_NAME);
ObjectClassComplexTypeDefinition accountDefinition = resourceSchema.findObjectClassDefinition(objectClassQname);
assertNotNull("No object class definition " + objectClassQname, accountDefinition);
// assertEquals("Object class " + objectClassQname + " is not account", ShadowKindType.ACCOUNT, accountDefinition.getKind());
// assertTrue("Object class " + objectClassQname + " is not default account", accountDefinition.isDefaultInAKind());
assertFalse("Object class " + objectClassQname + " is empty", accountDefinition.isEmpty());
assertFalse("Object class " + objectClassQname + " is empty", accountDefinition.isIgnored());
Collection<? extends ResourceAttributeDefinition> identifiers = accountDefinition.getPrimaryIdentifiers();
assertNotNull("Null identifiers for " + objectClassQname, identifiers);
assertFalse("Empty identifiers for " + objectClassQname, identifiers.isEmpty());
ResourceAttributeDefinition<String> idPrimaryDef = accountDefinition.findAttributeDefinition(
new QName(ResourceTypeUtil.getResourceNamespace(resourceType), OpenDJController.RESOURCE_OPENDJ_PRIMARY_IDENTIFIER_LOCAL_NAME));
assertNotNull("No definition for attribute "+OpenDJController.RESOURCE_OPENDJ_PRIMARY_IDENTIFIER_LOCAL_NAME, idPrimaryDef);
assertTrue("Attribute "+OpenDJController.RESOURCE_OPENDJ_PRIMARY_IDENTIFIER_LOCAL_NAME+" in not an identifier",idPrimaryDef.isIdentifier(accountDefinition));
assertTrue("Attribute "+OpenDJController.RESOURCE_OPENDJ_PRIMARY_IDENTIFIER_LOCAL_NAME+" in not in identifiers list",identifiers.contains(idPrimaryDef));
assertEquals("Attribute "+OpenDJController.RESOURCE_OPENDJ_PRIMARY_IDENTIFIER_LOCAL_NAME+" has wrong native name", OpenDJController.RESOURCE_OPENDJ_PRIMARY_IDENTIFIER_LOCAL_NAME, idPrimaryDef.getNativeAttributeName());
assertEquals("Attribute "+OpenDJController.RESOURCE_OPENDJ_PRIMARY_IDENTIFIER_LOCAL_NAME+" has wrong framework name", Uid.NAME, idPrimaryDef.getFrameworkAttributeName());
ResourceAttributeDefinition<String> idSecondaryDef = accountDefinition.findAttributeDefinition(
new QName(ResourceTypeUtil.getResourceNamespace(resourceType), OpenDJController.RESOURCE_OPENDJ_SECONDARY_IDENTIFIER_LOCAL_NAME));
assertNotNull("No definition for attribute "+SchemaConstants.ICFS_NAME, idSecondaryDef);
assertTrue("Attribute "+OpenDJController.RESOURCE_OPENDJ_SECONDARY_IDENTIFIER_LOCAL_NAME+" in not secondary identifier",idSecondaryDef.isSecondaryIdentifier(accountDefinition));
assertFalse("Attribute "+OpenDJController.RESOURCE_OPENDJ_SECONDARY_IDENTIFIER_LOCAL_NAME+" in in identifiers list and it should NOT be",identifiers.contains(idSecondaryDef));
assertTrue("Attribute "+OpenDJController.RESOURCE_OPENDJ_SECONDARY_IDENTIFIER_LOCAL_NAME+" in not in secomdary identifiers list",accountDefinition.getSecondaryIdentifiers().contains(idSecondaryDef));
assertEquals("Attribute "+OpenDJController.RESOURCE_OPENDJ_SECONDARY_IDENTIFIER_LOCAL_NAME+" has wrong native name", OpenDJController.RESOURCE_OPENDJ_SECONDARY_IDENTIFIER_LOCAL_NAME, idSecondaryDef.getNativeAttributeName());
assertEquals("Attribute "+OpenDJController.RESOURCE_OPENDJ_SECONDARY_IDENTIFIER_LOCAL_NAME+" has wrong framework name", Name.NAME, idSecondaryDef.getFrameworkAttributeName());
assertEquals("Unexpected identifiers: "+identifiers, 1, identifiers.size());
assertEquals("Unexpected secondary identifiers: "+accountDefinition.getSecondaryIdentifiers(), 1, accountDefinition.getSecondaryIdentifiers().size());
}
private Collection<ResourceAttribute<?>> addSampleResourceObject(String name, String givenName, String familyName)
throws CommunicationException, GenericFrameworkException, SchemaException,
ObjectAlreadyExistsException, ConfigurationException {
OperationResult result = new OperationResult(this.getClass().getName() + ".testAdd");
QName objectClassQname = new QName(ResourceTypeUtil.getResourceNamespace(resourceType), OpenDJController.OBJECT_CLASS_INETORGPERSON_NAME);
ObjectClassComplexTypeDefinition accountDefinition = resourceSchema.findObjectClassDefinition(objectClassQname);
assertNotNull("No object class definition "+objectClassQname, accountDefinition);
ResourceAttributeContainer resourceObject = accountDefinition.instantiate(ShadowType.F_ATTRIBUTES);
ResourceAttributeDefinition<String> attributeDefinition = accountDefinition
.findAttributeDefinition(new QName(ResourceTypeUtil.getResourceNamespace(resourceType), OpenDJController.RESOURCE_OPENDJ_SECONDARY_IDENTIFIER_LOCAL_NAME));
ResourceAttribute<String> attribute = attributeDefinition.instantiate();
attribute.setValue(new PrismPropertyValue<String>("uid=" + name + ",ou=people,dc=example,dc=com"));
resourceObject.add(attribute);
attributeDefinition = accountDefinition
.findAttributeDefinition(new QName(ResourceTypeUtil.getResourceNamespace(resourceType), "sn"));
attribute = attributeDefinition.instantiate();
attribute.setValue(new PrismPropertyValue(familyName));
resourceObject.add(attribute);
attributeDefinition = accountDefinition
.findAttributeDefinition(new QName(ResourceTypeUtil.getResourceNamespace(resourceType), "cn"));
attribute = attributeDefinition.instantiate();
attribute.setValue(new PrismPropertyValue(givenName + " " + familyName));
resourceObject.add(attribute);
attributeDefinition = accountDefinition.findAttributeDefinition(new QName(ResourceTypeUtil.getResourceNamespace(resourceType),
"givenName"));
attribute = attributeDefinition.instantiate();
attribute.setValue(new PrismPropertyValue(givenName));
resourceObject.add(attribute);
PrismObject<ShadowType> shadow = wrapInShadow(ShadowType.class, resourceObject);
Set<Operation> operation = new HashSet<Operation>();
AsynchronousOperationReturnValue<Collection<ResourceAttribute<?>>> ret = cc.addObject(shadow, operation, null, result);
Collection<ResourceAttribute<?>> resourceAttributes = ret.getReturnValue();
return resourceAttributes;
}
private String getEntryUuid(Collection<ResourceAttribute<?>> identifiers) {
for (ResourceAttribute<?> identifier : identifiers) {
if (identifier.getElementName().equals(new QName(ResourceTypeUtil.getResourceNamespace(resourceType), OpenDJController.RESOURCE_OPENDJ_PRIMARY_IDENTIFIER_LOCAL_NAME))) {
return identifier.getValue(String.class).getValue();
}
}
return null;
}
@Test
public void test100AddDeleteObject() throws Exception {
final String TEST_NAME = "test100AddDeleteObject";
TestUtil.displayTestTile(this, TEST_NAME);
OperationResult result = new OperationResult(this.getClass().getName() + "." + TEST_NAME);
Collection<ResourceAttribute<?>> identifiers = addSampleResourceObject("john", "John", "Smith");
String uid = null;
for (ResourceAttribute<?> resourceAttribute : identifiers) {
if (SchemaConstants.ICFS_UID.equals(resourceAttribute.getElementName())) {
uid = resourceAttribute.getValue(String.class).getValue();
System.out.println("uuuuid:" + uid);
assertNotNull(uid);
}
}
ObjectClassComplexTypeDefinition accountDefinition = resourceSchema.findObjectClassDefinition(OpenDJController.OBJECT_CLASS_INETORGPERSON_NAME);
cc.deleteObject(accountDefinition, null, identifiers, null, result);
ResourceObjectIdentification identification = ResourceObjectIdentification.createFromAttributes(
accountDefinition, identifiers);
PrismObject<ShadowType> resObj = null;
try {
resObj = cc.fetchObject(identification, null, null,
result);
Assert.fail();
} catch (ObjectNotFoundException ex) {
AssertJUnit.assertNull(resObj);
}
}
@Test
public void test110ChangeModifyObject() throws Exception {
final String TEST_NAME = "test110ChangeModifyObject";
TestUtil.displayTestTile(this, TEST_NAME);
OperationResult result = new OperationResult(this.getClass().getName() + "." + TEST_NAME);
Collection<ResourceAttribute<?>> identifiers = addSampleResourceObject("john", "John", "Smith");
Set<Operation> changes = new HashSet<Operation>();
changes.add(createAddAttributeChange("employeeNumber", "123123123"));
changes.add(createReplaceAttributeChange("sn", "Smith007"));
changes.add(createAddAttributeChange("street", "Wall Street"));
changes.add(createDeleteAttributeChange("givenName", "John"));
ObjectClassComplexTypeDefinition accountDefinition = resourceSchema.findObjectClassDefinition(OpenDJController.OBJECT_CLASS_INETORGPERSON_NAME);
cc.modifyObject(accountDefinition, identifiers, changes, null, result);
ResourceObjectIdentification identification = ResourceObjectIdentification.createFromAttributes(
accountDefinition, identifiers);
PrismObject<ShadowType> shadow = cc.fetchObject(identification, null, null, result);
ResourceAttributeContainer resObj = ShadowUtil.getAttributesContainer(shadow);
AssertJUnit.assertNull(resObj.findAttribute(new QName(ResourceTypeUtil.getResourceNamespace(resourceType), "givenName")));
String addedEmployeeNumber = resObj
.findAttribute(new QName(ResourceTypeUtil.getResourceNamespace(resourceType), "employeeNumber")).getValue(String.class)
.getValue();
String changedSn = resObj.findAttribute(new QName(ResourceTypeUtil.getResourceNamespace(resourceType), "sn"))
.getValues(String.class).iterator().next().getValue();
String addedStreet = resObj.findAttribute(new QName(ResourceTypeUtil.getResourceNamespace(resourceType), "street"))
.getValues(String.class).iterator().next().getValue();
System.out.println("changed employee number: " + addedEmployeeNumber);
System.out.println("changed sn: " + changedSn);
System.out.println("added street: " + addedStreet);
AssertJUnit.assertEquals("123123123", addedEmployeeNumber);
AssertJUnit.assertEquals("Smith007", changedSn);
AssertJUnit.assertEquals("Wall Street", addedStreet);
}
@Test
public void test200FetchChanges() throws Exception {
final String TEST_NAME = "test200FetchChanges";
TestUtil.displayTestTile(this, TEST_NAME);
OperationResult result = new OperationResult(this.getClass().getName() + "." + TEST_NAME);
ObjectClassComplexTypeDefinition accountDefinition = resourceSchema.findObjectClassDefinition(OpenDJController.OBJECT_CLASS_INETORGPERSON_NAME);
PrismProperty<Integer> lastToken = cc.fetchCurrentToken(accountDefinition, null, result);
System.out.println("Property:");
System.out.println(SchemaDebugUtil.prettyPrint(lastToken));
System.out.println("token " + lastToken.toString());
assertNotNull("No last token", lastToken);
assertNotNull("No last token value", lastToken.getRealValue());
List<Change> changes = cc.fetchChanges(accountDefinition, lastToken, null, null, result);
display("Changes", changes);
// Just one pseudo-change that updates the token
AssertJUnit.assertEquals(1, changes.size());
Change change = changes.get(0);
assertNull(change.getCurrentShadow());
assertNull(change.getIdentifiers());
assertNull(change.getObjectDelta());
assertNotNull(change.getToken());
}
private PrismProperty createProperty(String propertyName, String propertyValue) {
ObjectClassComplexTypeDefinition accountDefinition = resourceSchema.findObjectClassDefinition(
new QName(ResourceTypeUtil.getResourceNamespace(resourceType), OpenDJController.OBJECT_CLASS_INETORGPERSON_NAME));
ResourceAttributeDefinition propertyDef = accountDefinition.findAttributeDefinition(new QName(
ResourceTypeUtil.getResourceNamespace(resourceType), propertyName));
ResourceAttribute property = propertyDef.instantiate();
property.setValue(new PrismPropertyValue(propertyValue));
return property;
}
private PropertyModificationOperation createReplaceAttributeChange(String propertyName, String propertyValue) {
PrismProperty property = createProperty(propertyName, propertyValue);
ItemPath propertyPath = new ItemPath(ShadowType.F_ATTRIBUTES,
new QName(ResourceTypeUtil.getResourceNamespace(resourceType), propertyName));
PropertyDelta delta = new PropertyDelta(propertyPath, property.getDefinition(), prismContext);
delta.setValueToReplace(new PrismPropertyValue(propertyValue));
PropertyModificationOperation attributeModification = new PropertyModificationOperation(delta);
return attributeModification;
}
private PropertyModificationOperation createAddAttributeChange(String propertyName, String propertyValue) {
PrismProperty property = createProperty(propertyName, propertyValue);
ItemPath propertyPath = new ItemPath(ShadowType.F_ATTRIBUTES,
new QName(ResourceTypeUtil.getResourceNamespace(resourceType), propertyName));
PropertyDelta delta = new PropertyDelta(propertyPath, property.getDefinition(), prismContext);
delta.addValueToAdd(new PrismPropertyValue(propertyValue));
PropertyModificationOperation attributeModification = new PropertyModificationOperation(delta);
return attributeModification;
}
private PropertyModificationOperation createDeleteAttributeChange(String propertyName, String propertyValue) {
PrismProperty property = createProperty(propertyName, propertyValue);
ItemPath propertyPath = new ItemPath(ShadowType.F_ATTRIBUTES,
new QName(ResourceTypeUtil.getResourceNamespace(resourceType), propertyName));
PropertyDelta delta = new PropertyDelta(propertyPath, property.getDefinition(), prismContext);
delta.addValueToDelete(new PrismPropertyValue(propertyValue));
PropertyModificationOperation attributeModification = new PropertyModificationOperation(delta);
return attributeModification;
}
private PropertyModificationOperation createActivationChange(ActivationStatusType status) {
PrismObjectDefinition<ShadowType> shadowDefinition = getShadowDefinition(ShadowType.class);
PropertyDelta<ActivationStatusType> delta = PropertyDelta.createDelta(
new ItemPath(ShadowType.F_ACTIVATION, ActivationType.F_ADMINISTRATIVE_STATUS),
shadowDefinition);
delta.setValueToReplace(new PrismPropertyValue<ActivationStatusType>(status));
return new PropertyModificationOperation(delta);
}
/**
* Simple call to connector test() method.
*
* @throws Exception
*/
@Test
public void test300TestConnection() throws Exception {
final String TEST_NAME = "test300TestConnection";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
OperationResult result = new OperationResult(TEST_NAME);
// WHEN
cc.test(result);
// THEN
result.computeStatus("test failed");
AssertJUnit.assertNotNull(result);
OperationResult connectorConnectionResult = result.getSubresults().get(0);
AssertJUnit.assertNotNull(connectorConnectionResult);
System.out.println("Test \"connector connection\" result: " + connectorConnectionResult);
AssertJUnit.assertTrue(connectorConnectionResult.isSuccess());
AssertJUnit.assertTrue(result.isSuccess());
}
/**
* Simple call to connector test() method.
*
* @throws Exception
*/
@Test
public void test310TestConnectionNegative() throws Exception {
final String TEST_NAME = "test310TestConnectionNegative";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
OperationResult result = new OperationResult(TEST_NAME);
ConnectorInstance badConnector = factory.createConnectorInstance(connectorType,
ResourceTypeUtil.getResourceNamespace(badResourceType), "test connector");
badConnector.configure(badResourceType.getConnectorConfiguration().asPrismContainerValue(), result);
// WHEN
badConnector.test(result);
// THEN
result.computeStatus("test failed");
display("Test result (FAILURE EXPECTED)", result);
AssertJUnit.assertNotNull(result);
OperationResult connectorConnectionResult = result.getSubresults().get(1);
AssertJUnit.assertNotNull(connectorConnectionResult);
System.out.println("Test \"connector connection\" result: " + connectorConnectionResult
+ " (FAILURE EXPECTED)");
AssertJUnit.assertTrue("Unexpected success of bad connector test",
!connectorConnectionResult.isSuccess());
AssertJUnit.assertTrue(!result.isSuccess());
}
/**
* Test fetching and translating resource schema.
*
* @throws Exception
*/
@Test
public void test400FetchResourceSchema() throws Exception {
final String TEST_NAME = "test400FetchResourceSchema";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
// WHEN
// The schema was fetched during test init. Now just check if it was OK.
// THEN
AssertJUnit.assertNotNull(resourceSchema);
System.out.println(resourceSchema.debugDump());
Document xsdSchema = resourceSchema.serializeToXsd();
System.out
.println("-------------------------------------------------------------------------------------");
System.out.println(DOMUtil.printDom(xsdSchema));
System.out
.println("-------------------------------------------------------------------------------------");
ObjectClassComplexTypeDefinition accountDefinition = resourceSchema
.findObjectClassDefinition(new QName(ResourceTypeUtil.getResourceNamespace(resourceType), OpenDJController.OBJECT_CLASS_INETORGPERSON_NAME));
AssertJUnit.assertNotNull(accountDefinition);
AssertJUnit.assertFalse("No identifiers for account object class ", accountDefinition
.getPrimaryIdentifiers().isEmpty());
PrismPropertyDefinition uidDefinition = accountDefinition.findAttributeDefinition(
new QName(ResourceTypeUtil.getResourceNamespace(resourceType), OpenDJController.RESOURCE_OPENDJ_PRIMARY_IDENTIFIER_LOCAL_NAME));
AssertJUnit.assertNotNull(uidDefinition);
for (Definition def : resourceSchema.getDefinitions()) {
if (def instanceof ResourceAttributeContainerDefinition) {
ResourceAttributeContainerDefinition rdef = (ResourceAttributeContainerDefinition) def;
assertNotEmpty("No type name in object class", rdef.getTypeName());
assertNotEmpty("No native object class for " + rdef.getTypeName(),
rdef.getNativeObjectClass());
// This is maybe not that important, but just for a sake of
// completeness
assertNotEmpty("No name for " + rdef.getTypeName(), rdef.getName());
}
}
}
@Test
public void test410Capabilities() throws Exception {
final String TEST_NAME = "test410Capabilities";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
OperationResult result = new OperationResult(TEST_NAME);
// WHEN
Collection<Object> capabilities = cc.fetchCapabilities(result);
// THEN
result.computeStatus("getCapabilities failed");
TestUtil.assertSuccess("getCapabilities failed (result)", result);
assertFalse("Empty capabilities returned", capabilities.isEmpty());
CredentialsCapabilityType capCred = CapabilityUtil.getCapability(capabilities,
CredentialsCapabilityType.class);
assertNotNull("password capability not present", capCred.getPassword());
PagedSearchCapabilityType capPage = CapabilityUtil.getCapability(capabilities, PagedSearchCapabilityType.class);
assertNotNull("paged search capability not present", capPage);
}
@Test
public void test500FetchObject() throws Exception {
final String TEST_NAME = "test500FetchObject";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
ResourceAttributeContainer resourceObject = createResourceObject(
"uid=Teell,ou=People,dc=example,dc=com", "Teell William", "Teell");
OperationResult addResult = new OperationResult(this.getClass().getName() + "." + TEST_NAME);
PrismObject<ShadowType> shadow = wrapInShadow(ShadowType.class, resourceObject);
// Add a testing object
cc.addObject(shadow, null, null, addResult);
ObjectClassComplexTypeDefinition accountDefinition = resourceObject.getDefinition().getComplexTypeDefinition();
Collection<ResourceAttribute<?>> identifiers = resourceObject.getPrimaryIdentifiers();
// Determine object class from the schema
ResourceObjectIdentification identification = new ResourceObjectIdentification(accountDefinition, identifiers, null);
OperationResult result = new OperationResult(this.getClass().getName() + "." + TEST_NAME);
// WHEN
PrismObject<ShadowType> ro = cc.fetchObject(identification, null, null, result);
// THEN
AssertJUnit.assertNotNull(ro);
System.out.println("Fetched object " + ro);
System.out.println("Result:");
System.out.println(result.debugDump());
}
@Test
public void test510Search() throws Exception {
final String TEST_NAME = "test510Search";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
ObjectClassComplexTypeDefinition accountDefinition = resourceSchema.findObjectClassDefinition(OpenDJController.OBJECT_CLASS_INETORGPERSON_NAME);
// Determine object class from the schema
ShadowResultHandler handler = new ShadowResultHandler() {
@Override
public boolean handle(PrismObject<ShadowType> object) {
System.out.println("Search: found: " + object);
return true;
}
};
OperationResult result = new OperationResult(this.getClass().getName() + "." + TEST_NAME);
// WHEN
cc.search(accountDefinition, new ObjectQuery(), handler, null, null, null, null, result);
// THEN
}
@Test
public void test600CreateAccountWithPassword() throws Exception {
final String TEST_NAME = "test600CreateAccountWithPassword";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
ResourceAttributeContainer resourceObject = createResourceObject(
"uid=lechuck,ou=people,dc=example,dc=com", "Ghost Pirate LeChuck", "LeChuck");
Set<Operation> additionalOperations = new HashSet<Operation>();
ProtectedStringType ps = protector.encryptString("t4k30v3rTh3W0rld");
// PasswordChangeOperation passOp = new PasswordChangeOperation(ps);
// additionalOperations.add(passOp);
OperationResult addResult = new OperationResult(this.getClass().getName()
+ "." + TEST_NAME);
PrismObject<ShadowType> shadow = wrapInShadow(ShadowType.class, resourceObject);
CredentialsType credentials = new CredentialsType();
PasswordType pass = new PasswordType();
pass.setValue(ps);
credentials.setPassword(pass);
shadow.asObjectable().setCredentials(credentials);
// WHEN
cc.addObject(shadow, additionalOperations, null, addResult);
// THEN
String entryUuid = (String) resourceObject.getPrimaryIdentifier().getValue().getValue();
Entry entry = openDJController.searchAndAssertByEntryUuid(entryUuid);
display("Entry before change", entry);
String passwordAfter = OpenDJController.getAttributeValue(entry, "userPassword");
assertNotNull(passwordAfter);
System.out.println("Changed password: " + passwordAfter);
// TODO
}
@Test
public void test610ChangePassword() throws Exception {
final String TEST_NAME = "test610ChangePassword";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
ResourceAttributeContainer resourceObject = createResourceObject(
"uid=drake,ou=People,dc=example,dc=com", "Sir Francis Drake", "Drake");
PrismObject<ShadowType> shadow = wrapInShadow(ShadowType.class, resourceObject);
OperationResult addResult = new OperationResult(this.getClass().getName() + "." + TEST_NAME);
// Add a testing object
cc.addObject(shadow, null, null, addResult);
String entryUuid = (String) resourceObject.getPrimaryIdentifier().getValue().getValue();
Entry entry = openDJController.searchAndAssertByEntryUuid(entryUuid);
display("Entry before change", entry);
String passwordBefore = OpenDJController.getAttributeValue(entry, "userPassword");
// We have set no password during create, therefore the password should
// be empty
assertNull(passwordBefore);
ObjectClassComplexTypeDefinition accountDefinition = resourceObject.getDefinition().getComplexTypeDefinition();
Collection<ResourceAttribute<?>> identifiers = resourceObject.getPrimaryIdentifiers();
// Determine object class from the schema
OperationResult result = new OperationResult(this.getClass().getName() + ".testFetchObject");
// WHEN
Set<Operation> changes = new HashSet<Operation>();
ProtectedStringType passPs = protector.encryptString("salalala");
ItemDeltaType propMod = new ItemDeltaType();
//create modification path
Document doc = DOMUtil.getDocument();
ItemPathType path = new ItemPathType("credentials/password/value");
// PropertyPath propPath = new PropertyPath(new PropertyPath(ResourceObjectShadowType.F_CREDENTIALS), CredentialsType.F_PASSWORD);
propMod.setPath(path);
//set the replace value
MapXNode passPsXnode = ((PrismContextImpl) prismContext).getBeanMarshaller().marshalProtectedDataType(passPs, null);
RawType value = new RawType(passPsXnode, prismContext);
propMod.getValue().add(value);
//set the modificaion type
propMod.setModificationType(ModificationTypeType.REPLACE);
PropertyDelta passDelta = (PropertyDelta)DeltaConvertor.createItemDelta(propMod, shadow.getDefinition());
PropertyModificationOperation passwordModification = new PropertyModificationOperation(passDelta);
changes.add(passwordModification);
// PasswordChangeOperation passwordChange = new PasswordChangeOperation(passPs);
// changes.add(passwordChange);
cc.modifyObject(accountDefinition, identifiers, changes, null, result);
// THEN
entry = openDJController.searchAndAssertByEntryUuid(entryUuid);
display("Entry after change", entry);
String passwordAfter = OpenDJController.getAttributeValue(entry, "userPassword");
assertNotNull(passwordAfter);
System.out.println("Account password: " + passwordAfter);
}
private ResourceAttributeContainer createResourceObject(String dn, String sn, String cn) throws SchemaException {
// Account type is hardcoded now
ObjectClassComplexTypeDefinition accountDefinition = resourceSchema
.findObjectClassDefinition(new QName(ResourceTypeUtil.getResourceNamespace(resourceType), OpenDJController.OBJECT_CLASS_INETORGPERSON_NAME));
// Determine identifier from the schema
ResourceAttributeContainer resourceObject = accountDefinition.instantiate(ShadowType.F_ATTRIBUTES);
ResourceAttributeDefinition road = accountDefinition.findAttributeDefinition(
new QName(ResourceTypeUtil.getResourceNamespace(resourceType), "sn"));
ResourceAttribute roa = road.instantiate();
roa.setValue(new PrismPropertyValue(sn));
resourceObject.add(roa);
road = accountDefinition.findAttributeDefinition(new QName(ResourceTypeUtil.getResourceNamespace(resourceType), "cn"));
roa = road.instantiate();
roa.setValue(new PrismPropertyValue(cn));
resourceObject.add(roa);
road = accountDefinition.findAttributeDefinition(
new QName(ResourceTypeUtil.getResourceNamespace(resourceType), OpenDJController.RESOURCE_OPENDJ_SECONDARY_IDENTIFIER_LOCAL_NAME));
roa = road.instantiate();
roa.setValue(new PrismPropertyValue(dn));
resourceObject.add(roa);
return resourceObject;
}
private <T extends ShadowType> PrismObject<T> wrapInShadow(Class<T> type, ResourceAttributeContainer resourceObject) throws SchemaException {
PrismObjectDefinition<T> shadowDefinition = getShadowDefinition(type);
PrismObject<T> shadow = shadowDefinition.instantiate();
resourceObject.setElementName(ShadowType.F_ATTRIBUTES);
shadow.getValue().add(resourceObject);
return shadow;
}
private <T extends ShadowType> PrismObjectDefinition<T> getShadowDefinition(Class<T> type) {
return prismContext.getSchemaRegistry().findObjectDefinitionByCompileTimeClass(type);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.runtime.instructions;
import java.util.HashMap;
import org.apache.sysml.lops.Checkpoint;
import org.apache.sysml.lops.Compression;
import org.apache.sysml.lops.DataGen;
import org.apache.sysml.lops.LeftIndex;
import org.apache.sysml.lops.RightIndex;
import org.apache.sysml.lops.WeightedCrossEntropy;
import org.apache.sysml.lops.WeightedCrossEntropyR;
import org.apache.sysml.lops.WeightedDivMM;
import org.apache.sysml.lops.WeightedDivMMR;
import org.apache.sysml.lops.WeightedSigmoid;
import org.apache.sysml.lops.WeightedSigmoidR;
import org.apache.sysml.lops.WeightedSquaredLoss;
import org.apache.sysml.lops.WeightedSquaredLossR;
import org.apache.sysml.lops.WeightedUnaryMM;
import org.apache.sysml.lops.WeightedUnaryMMR;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.instructions.spark.AggregateTernarySPInstruction;
import org.apache.sysml.runtime.instructions.spark.AggregateUnarySPInstruction;
import org.apache.sysml.runtime.instructions.spark.AppendGAlignedSPInstruction;
import org.apache.sysml.runtime.instructions.spark.AppendGSPInstruction;
import org.apache.sysml.runtime.instructions.spark.AppendMSPInstruction;
import org.apache.sysml.runtime.instructions.spark.AppendRSPInstruction;
import org.apache.sysml.runtime.instructions.spark.BinUaggChainSPInstruction;
import org.apache.sysml.runtime.instructions.spark.BinarySPInstruction;
import org.apache.sysml.runtime.instructions.spark.BuiltinNarySPInstruction;
import org.apache.sysml.runtime.instructions.spark.CSVReblockSPInstruction;
import org.apache.sysml.runtime.instructions.spark.CastSPInstruction;
import org.apache.sysml.runtime.instructions.spark.CentralMomentSPInstruction;
import org.apache.sysml.runtime.instructions.spark.CheckpointSPInstruction;
import org.apache.sysml.runtime.instructions.spark.CompressionSPInstruction;
import org.apache.sysml.runtime.instructions.spark.ConvolutionSPInstruction;
import org.apache.sysml.runtime.instructions.spark.CovarianceSPInstruction;
import org.apache.sysml.runtime.instructions.spark.CpmmSPInstruction;
import org.apache.sysml.runtime.instructions.spark.CumulativeAggregateSPInstruction;
import org.apache.sysml.runtime.instructions.spark.CumulativeOffsetSPInstruction;
import org.apache.sysml.runtime.instructions.spark.IndexingSPInstruction;
import org.apache.sysml.runtime.instructions.spark.MapmmChainSPInstruction;
import org.apache.sysml.runtime.instructions.spark.MapmmSPInstruction;
import org.apache.sysml.runtime.instructions.spark.MatrixReshapeSPInstruction;
import org.apache.sysml.runtime.instructions.spark.MultiReturnParameterizedBuiltinSPInstruction;
import org.apache.sysml.runtime.instructions.spark.PMapmmSPInstruction;
import org.apache.sysml.runtime.instructions.spark.ParameterizedBuiltinSPInstruction;
import org.apache.sysml.runtime.instructions.spark.TernarySPInstruction;
import org.apache.sysml.runtime.instructions.spark.PmmSPInstruction;
import org.apache.sysml.runtime.instructions.spark.QuantilePickSPInstruction;
import org.apache.sysml.runtime.instructions.spark.QuaternarySPInstruction;
import org.apache.sysml.runtime.instructions.spark.RandSPInstruction;
import org.apache.sysml.runtime.instructions.spark.ReblockSPInstruction;
import org.apache.sysml.runtime.instructions.spark.ReorgSPInstruction;
import org.apache.sysml.runtime.instructions.spark.RmmSPInstruction;
import org.apache.sysml.runtime.instructions.spark.SPInstruction;
import org.apache.sysml.runtime.instructions.spark.SPInstruction.SPType;
import org.apache.sysml.runtime.instructions.spark.SpoofSPInstruction;
import org.apache.sysml.runtime.instructions.spark.CtableSPInstruction;
import org.apache.sysml.runtime.instructions.spark.Tsmm2SPInstruction;
import org.apache.sysml.runtime.instructions.spark.TsmmSPInstruction;
import org.apache.sysml.runtime.instructions.spark.QuantileSortSPInstruction;
import org.apache.sysml.runtime.instructions.spark.UaggOuterChainSPInstruction;
import org.apache.sysml.runtime.instructions.spark.UnaryMatrixSPInstruction;
import org.apache.sysml.runtime.instructions.spark.WriteSPInstruction;
import org.apache.sysml.runtime.instructions.spark.ZipmmSPInstruction;
public class SPInstructionParser extends InstructionParser
{
public static final HashMap<String, SPType> String2SPInstructionType;
static {
String2SPInstructionType = new HashMap<>();
//unary aggregate operators
String2SPInstructionType.put( "uak+" , SPType.AggregateUnary);
String2SPInstructionType.put( "uark+" , SPType.AggregateUnary);
String2SPInstructionType.put( "uack+" , SPType.AggregateUnary);
String2SPInstructionType.put( "uasqk+" , SPType.AggregateUnary);
String2SPInstructionType.put( "uarsqk+" , SPType.AggregateUnary);
String2SPInstructionType.put( "uacsqk+" , SPType.AggregateUnary);
String2SPInstructionType.put( "uamean" , SPType.AggregateUnary);
String2SPInstructionType.put( "uarmean" , SPType.AggregateUnary);
String2SPInstructionType.put( "uacmean" , SPType.AggregateUnary);
String2SPInstructionType.put( "uavar" , SPType.AggregateUnary);
String2SPInstructionType.put( "uarvar" , SPType.AggregateUnary);
String2SPInstructionType.put( "uacvar" , SPType.AggregateUnary);
String2SPInstructionType.put( "uamax" , SPType.AggregateUnary);
String2SPInstructionType.put( "uarmax" , SPType.AggregateUnary);
String2SPInstructionType.put( "uarimax", SPType.AggregateUnary);
String2SPInstructionType.put( "uacmax" , SPType.AggregateUnary);
String2SPInstructionType.put( "uamin" , SPType.AggregateUnary);
String2SPInstructionType.put( "uarmin" , SPType.AggregateUnary);
String2SPInstructionType.put( "uarimin" , SPType.AggregateUnary);
String2SPInstructionType.put( "uacmin" , SPType.AggregateUnary);
String2SPInstructionType.put( "ua+" , SPType.AggregateUnary);
String2SPInstructionType.put( "uar+" , SPType.AggregateUnary);
String2SPInstructionType.put( "uac+" , SPType.AggregateUnary);
String2SPInstructionType.put( "ua*" , SPType.AggregateUnary);
String2SPInstructionType.put( "uar*" , SPType.AggregateUnary);
String2SPInstructionType.put( "uac*" , SPType.AggregateUnary);
String2SPInstructionType.put( "uatrace" , SPType.AggregateUnary);
String2SPInstructionType.put( "uaktrace", SPType.AggregateUnary);
//binary aggregate operators (matrix multiplication operators)
String2SPInstructionType.put( "mapmm" , SPType.MAPMM);
String2SPInstructionType.put( "mapmmchain" , SPType.MAPMMCHAIN);
String2SPInstructionType.put( "tsmm" , SPType.TSMM); //single-pass tsmm
String2SPInstructionType.put( "tsmm2" , SPType.TSMM2); //multi-pass tsmm
String2SPInstructionType.put( "cpmm" , SPType.CPMM);
String2SPInstructionType.put( "rmm" , SPType.RMM);
String2SPInstructionType.put( "pmm" , SPType.PMM);
String2SPInstructionType.put( "zipmm" , SPType.ZIPMM);
String2SPInstructionType.put( "pmapmm" , SPType.PMAPMM);
String2SPInstructionType.put( "uaggouterchain", SPType.UaggOuterChain);
//ternary aggregate operators
String2SPInstructionType.put( "tak+*" , SPType.AggregateTernary);
String2SPInstructionType.put( "tack+*" , SPType.AggregateTernary);
// Neural network operators
String2SPInstructionType.put( "conv2d", SPType.Convolution);
String2SPInstructionType.put( "conv2d_bias_add", SPType.Convolution);
String2SPInstructionType.put( "maxpooling", SPType.Convolution);
String2SPInstructionType.put( "relu_maxpooling", SPType.Convolution);
String2SPInstructionType.put( RightIndex.OPCODE, SPType.MatrixIndexing);
String2SPInstructionType.put( LeftIndex.OPCODE, SPType.MatrixIndexing);
String2SPInstructionType.put( "mapLeftIndex" , SPType.MatrixIndexing);
// Reorg Instruction Opcodes (repositioning of existing values)
String2SPInstructionType.put( "r'" , SPType.Reorg);
String2SPInstructionType.put( "rev" , SPType.Reorg);
String2SPInstructionType.put( "rdiag" , SPType.Reorg);
String2SPInstructionType.put( "rshape" , SPType.MatrixReshape);
String2SPInstructionType.put( "rsort" , SPType.Reorg);
String2SPInstructionType.put( "+" , SPType.Binary);
String2SPInstructionType.put( "-" , SPType.Binary);
String2SPInstructionType.put( "*" , SPType.Binary);
String2SPInstructionType.put( "/" , SPType.Binary);
String2SPInstructionType.put( "%%" , SPType.Binary);
String2SPInstructionType.put( "%/%" , SPType.Binary);
String2SPInstructionType.put( "1-*" , SPType.Binary);
String2SPInstructionType.put( "^" , SPType.Binary);
String2SPInstructionType.put( "^2" , SPType.Binary);
String2SPInstructionType.put( "*2" , SPType.Binary);
String2SPInstructionType.put( "map+" , SPType.Binary);
String2SPInstructionType.put( "map-" , SPType.Binary);
String2SPInstructionType.put( "map*" , SPType.Binary);
String2SPInstructionType.put( "map/" , SPType.Binary);
String2SPInstructionType.put( "map%%" , SPType.Binary);
String2SPInstructionType.put( "map%/%" , SPType.Binary);
String2SPInstructionType.put( "map1-*" , SPType.Binary);
String2SPInstructionType.put( "map^" , SPType.Binary);
String2SPInstructionType.put( "map+*" , SPType.Binary);
String2SPInstructionType.put( "map-*" , SPType.Binary);
// Relational Instruction Opcodes
String2SPInstructionType.put( "==" , SPType.Binary);
String2SPInstructionType.put( "!=" , SPType.Binary);
String2SPInstructionType.put( "<" , SPType.Binary);
String2SPInstructionType.put( ">" , SPType.Binary);
String2SPInstructionType.put( "<=" , SPType.Binary);
String2SPInstructionType.put( ">=" , SPType.Binary);
String2SPInstructionType.put( "map>" , SPType.Binary);
String2SPInstructionType.put( "map>=" , SPType.Binary);
String2SPInstructionType.put( "map<" , SPType.Binary);
String2SPInstructionType.put( "map<=" , SPType.Binary);
String2SPInstructionType.put( "map==" , SPType.Binary);
String2SPInstructionType.put( "map!=" , SPType.Binary);
// Boolean Instruction Opcodes
String2SPInstructionType.put( "&&" , SPType.Binary);
String2SPInstructionType.put( "||" , SPType.Binary);
String2SPInstructionType.put( "xor" , SPType.Binary);
String2SPInstructionType.put( "bitwAnd", SPType.Binary);
String2SPInstructionType.put( "bitwOr", SPType.Binary);
String2SPInstructionType.put( "bitwXor", SPType.Binary);
String2SPInstructionType.put( "bitwShiftL", SPType.Binary);
String2SPInstructionType.put( "bitwShiftR", SPType.Binary);
String2SPInstructionType.put( "!" , SPType.Unary);
String2SPInstructionType.put( "map&&" , SPType.Binary);
String2SPInstructionType.put( "map||" , SPType.Binary);
String2SPInstructionType.put( "mapxor" , SPType.Binary);
String2SPInstructionType.put( "mapbitwAnd", SPType.Binary);
String2SPInstructionType.put( "mapbitwOr", SPType.Binary);
String2SPInstructionType.put( "mapbitwXor", SPType.Binary);
String2SPInstructionType.put( "mapbitwShiftL", SPType.Binary);
String2SPInstructionType.put( "mapbitwShiftR", SPType.Binary);
// Builtin Instruction Opcodes
String2SPInstructionType.put( "max" , SPType.Binary);
String2SPInstructionType.put( "min" , SPType.Binary);
String2SPInstructionType.put( "mapmax" , SPType.Binary);
String2SPInstructionType.put( "mapmin" , SPType.Binary);
// REBLOCK Instruction Opcodes
String2SPInstructionType.put( "rblk" , SPType.Reblock);
String2SPInstructionType.put( "csvrblk", SPType.CSVReblock);
// Spark-specific instructions
String2SPInstructionType.put( Checkpoint.OPCODE, SPType.Checkpoint);
String2SPInstructionType.put( Compression.OPCODE, SPType.Compression);
// Builtin Instruction Opcodes
String2SPInstructionType.put( "log" , SPType.Builtin);
String2SPInstructionType.put( "log_nz" , SPType.Builtin);
String2SPInstructionType.put( "exp" , SPType.Unary);
String2SPInstructionType.put( "abs" , SPType.Unary);
String2SPInstructionType.put( "sin" , SPType.Unary);
String2SPInstructionType.put( "cos" , SPType.Unary);
String2SPInstructionType.put( "tan" , SPType.Unary);
String2SPInstructionType.put( "asin" , SPType.Unary);
String2SPInstructionType.put( "acos" , SPType.Unary);
String2SPInstructionType.put( "atan" , SPType.Unary);
String2SPInstructionType.put( "sinh" , SPType.Unary);
String2SPInstructionType.put( "cosh" , SPType.Unary);
String2SPInstructionType.put( "tanh" , SPType.Unary);
String2SPInstructionType.put( "sign" , SPType.Unary);
String2SPInstructionType.put( "sqrt" , SPType.Unary);
String2SPInstructionType.put( "plogp" , SPType.Unary);
String2SPInstructionType.put( "round" , SPType.Unary);
String2SPInstructionType.put( "ceil" , SPType.Unary);
String2SPInstructionType.put( "floor" , SPType.Unary);
String2SPInstructionType.put( "sprop", SPType.Unary);
String2SPInstructionType.put( "sigmoid", SPType.Unary);
// Parameterized Builtin Functions
String2SPInstructionType.put( "groupedagg", SPType.ParameterizedBuiltin);
String2SPInstructionType.put( "mapgroupedagg", SPType.ParameterizedBuiltin);
String2SPInstructionType.put( "rmempty", SPType.ParameterizedBuiltin);
String2SPInstructionType.put( "replace", SPType.ParameterizedBuiltin);
String2SPInstructionType.put( "rexpand", SPType.ParameterizedBuiltin);
String2SPInstructionType.put( "lowertri", SPType.ParameterizedBuiltin);
String2SPInstructionType.put( "uppertri", SPType.ParameterizedBuiltin);
String2SPInstructionType.put( "transformapply", SPType.ParameterizedBuiltin);
String2SPInstructionType.put( "transformdecode",SPType.ParameterizedBuiltin);
String2SPInstructionType.put( "transformencode",SPType.MultiReturnBuiltin);
String2SPInstructionType.put( "mappend", SPType.MAppend);
String2SPInstructionType.put( "rappend", SPType.RAppend);
String2SPInstructionType.put( "gappend", SPType.GAppend);
String2SPInstructionType.put( "galignedappend", SPType.GAlignedAppend);
String2SPInstructionType.put( "cbind", SPType.BuiltinNary);
String2SPInstructionType.put( "rbind", SPType.BuiltinNary);
String2SPInstructionType.put( DataGen.RAND_OPCODE , SPType.Rand);
String2SPInstructionType.put( DataGen.SEQ_OPCODE , SPType.Rand);
String2SPInstructionType.put( DataGen.SAMPLE_OPCODE, SPType.Rand);
//ternary instruction opcodes
String2SPInstructionType.put( "ctable", SPType.Ctable);
String2SPInstructionType.put( "ctableexpand", SPType.Ctable);
//ternary instruction opcodes
String2SPInstructionType.put( "+*", SPType.Ternary);
String2SPInstructionType.put( "-*", SPType.Ternary);
String2SPInstructionType.put( "ifelse", SPType.Ternary);
//quaternary instruction opcodes
String2SPInstructionType.put( WeightedSquaredLoss.OPCODE, SPType.Quaternary);
String2SPInstructionType.put( WeightedSquaredLossR.OPCODE, SPType.Quaternary);
String2SPInstructionType.put( WeightedSigmoid.OPCODE, SPType.Quaternary);
String2SPInstructionType.put( WeightedSigmoidR.OPCODE, SPType.Quaternary);
String2SPInstructionType.put( WeightedDivMM.OPCODE, SPType.Quaternary);
String2SPInstructionType.put( WeightedDivMMR.OPCODE, SPType.Quaternary);
String2SPInstructionType.put( WeightedCrossEntropy.OPCODE, SPType.Quaternary);
String2SPInstructionType.put( WeightedCrossEntropyR.OPCODE,SPType.Quaternary);
String2SPInstructionType.put( WeightedUnaryMM.OPCODE, SPType.Quaternary);
String2SPInstructionType.put( WeightedUnaryMMR.OPCODE, SPType.Quaternary);
//cumsum/cumprod/cummin/cummax
String2SPInstructionType.put( "ucumack+" , SPType.CumsumAggregate);
String2SPInstructionType.put( "ucumac*" , SPType.CumsumAggregate);
String2SPInstructionType.put( "ucumacmin" , SPType.CumsumAggregate);
String2SPInstructionType.put( "ucumacmax" , SPType.CumsumAggregate);
String2SPInstructionType.put( "bcumoffk+" , SPType.CumsumOffset);
String2SPInstructionType.put( "bcumoff*" , SPType.CumsumOffset);
String2SPInstructionType.put( "bcumoffmin", SPType.CumsumOffset);
String2SPInstructionType.put( "bcumoffmax", SPType.CumsumOffset);
//central moment, covariance, quantiles (sort/pick)
String2SPInstructionType.put( "cm" , SPType.CentralMoment);
String2SPInstructionType.put( "cov" , SPType.Covariance);
String2SPInstructionType.put( "qsort" , SPType.QSort);
String2SPInstructionType.put( "qpick" , SPType.QPick);
String2SPInstructionType.put( "binuaggchain", SPType.BinUaggChain);
String2SPInstructionType.put( "write" , SPType.Write);
String2SPInstructionType.put( "castdtm" , SPType.Cast);
String2SPInstructionType.put( "castdtf" , SPType.Cast);
String2SPInstructionType.put( "spoof" , SPType.SpoofFused);
}
public static SPInstruction parseSingleInstruction (String str ) {
if ( str == null || str.isEmpty() )
return null;
SPType cptype = InstructionUtils.getSPType(str);
if ( cptype == null )
// return null;
throw new DMLRuntimeException("Invalid SP Instruction Type: " + str);
SPInstruction spinst = parseSingleInstruction(cptype, str);
if ( spinst == null )
throw new DMLRuntimeException("Unable to parse instruction: " + str);
return spinst;
}
public static SPInstruction parseSingleInstruction ( SPType sptype, String str ) {
if ( str == null || str.isEmpty() )
return null;
String [] parts = null;
switch(sptype)
{
// matrix multiplication instructions
case CPMM:
return CpmmSPInstruction.parseInstruction(str);
case RMM:
return RmmSPInstruction.parseInstruction(str);
case MAPMM:
return MapmmSPInstruction.parseInstruction(str);
case MAPMMCHAIN:
return MapmmChainSPInstruction.parseInstruction(str);
case TSMM:
return TsmmSPInstruction.parseInstruction(str);
case TSMM2:
return Tsmm2SPInstruction.parseInstruction(str);
case PMM:
return PmmSPInstruction.parseInstruction(str);
case ZIPMM:
return ZipmmSPInstruction.parseInstruction(str);
case PMAPMM:
return PMapmmSPInstruction.parseInstruction(str);
case UaggOuterChain:
return UaggOuterChainSPInstruction.parseInstruction(str);
case AggregateUnary:
return AggregateUnarySPInstruction.parseInstruction(str);
case AggregateTernary:
return AggregateTernarySPInstruction.parseInstruction(str);
case Convolution:
return ConvolutionSPInstruction.parseInstruction(str);
case MatrixIndexing:
return IndexingSPInstruction.parseInstruction(str);
case Reorg:
return ReorgSPInstruction.parseInstruction(str);
case Binary:
return BinarySPInstruction.parseInstruction(str);
case Ternary:
return TernarySPInstruction.parseInstruction(str);
//ternary instructions
case Ctable:
return CtableSPInstruction.parseInstruction(str);
//quaternary instructions
case Quaternary:
return QuaternarySPInstruction.parseInstruction(str);
// Reblock instructions
case Reblock:
return ReblockSPInstruction.parseInstruction(str);
case CSVReblock:
return CSVReblockSPInstruction.parseInstruction(str);
case Builtin:
parts = InstructionUtils.getInstructionPartsWithValueType(str);
if ( parts[0].equals("log") || parts[0].equals("log_nz") ) {
if ( parts.length == 3 ) {
// B=log(A), y=log(x)
return UnaryMatrixSPInstruction.parseInstruction(str);
} else if ( parts.length == 4 ) {
// B=log(A,10), y=log(x,10)
return BinarySPInstruction.parseInstruction(str);
}
}
else {
throw new DMLRuntimeException("Invalid Builtin Instruction: " + str );
}
case Unary:
return UnaryMatrixSPInstruction.parseInstruction(str);
case BuiltinNary:
return BuiltinNarySPInstruction.parseInstruction(str);
case ParameterizedBuiltin:
return ParameterizedBuiltinSPInstruction.parseInstruction(str);
case MultiReturnBuiltin:
return MultiReturnParameterizedBuiltinSPInstruction.parseInstruction(str);
case MatrixReshape:
return MatrixReshapeSPInstruction.parseInstruction(str);
case MAppend: //matrix/frame
return AppendMSPInstruction.parseInstruction(str);
case RAppend: //matrix/frame
return AppendRSPInstruction.parseInstruction(str);
case GAppend:
return AppendGSPInstruction.parseInstruction(str);
case GAlignedAppend:
return AppendGAlignedSPInstruction.parseInstruction(str);
case Rand:
return RandSPInstruction.parseInstruction(str);
case QSort:
return QuantileSortSPInstruction.parseInstruction(str);
case QPick:
return QuantilePickSPInstruction.parseInstruction(str);
case Write:
return WriteSPInstruction.parseInstruction(str);
case CumsumAggregate:
return CumulativeAggregateSPInstruction.parseInstruction(str);
case CumsumOffset:
return CumulativeOffsetSPInstruction.parseInstruction(str);
case CentralMoment:
return CentralMomentSPInstruction.parseInstruction(str);
case Covariance:
return CovarianceSPInstruction.parseInstruction(str);
case BinUaggChain:
return BinUaggChainSPInstruction.parseInstruction(str);
case Checkpoint:
return CheckpointSPInstruction.parseInstruction(str);
case Compression:
return CompressionSPInstruction.parseInstruction(str);
case SpoofFused:
return SpoofSPInstruction.parseInstruction(str);
case Cast:
return CastSPInstruction.parseInstruction(str);
default:
throw new DMLRuntimeException("Invalid SP Instruction Type: " + sptype );
}
}
}
| |
package com.fsck.k9.activity;
import java.util.Collection;
import java.util.List;
import android.app.SearchManager;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences.Editor;
import android.content.res.Configuration;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentManager.OnBackStackChangedListener;
import android.support.v4.app.FragmentTransaction;
import android.util.Log;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AnimationUtils;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import com.actionbarsherlock.app.ActionBar;
import com.actionbarsherlock.view.Menu;
import com.actionbarsherlock.view.MenuItem;
import com.fsck.k9.Account;
import com.fsck.k9.Account.SortType;
import com.fsck.k9.K9;
import com.fsck.k9.K9.SplitViewMode;
import com.fsck.k9.Preferences;
import com.fsck.k9.R;
import com.fsck.k9.activity.misc.SwipeGestureDetector.OnSwipeGestureListener;
import com.fsck.k9.activity.setup.AccountSettings;
import com.fsck.k9.activity.setup.FolderSettings;
import com.fsck.k9.activity.setup.Prefs;
import com.fsck.k9.crypto.PgpData;
import com.fsck.k9.fragment.MessageListFragment;
import com.fsck.k9.fragment.MessageViewFragment;
import com.fsck.k9.fragment.MessageListFragment.MessageListFragmentListener;
import com.fsck.k9.fragment.MessageViewFragment.MessageViewFragmentListener;
import com.fsck.k9.mail.Message;
import com.fsck.k9.mail.store.StorageManager;
import com.fsck.k9.search.LocalSearch;
import com.fsck.k9.search.SearchAccount;
import com.fsck.k9.search.SearchSpecification;
import com.fsck.k9.search.SearchSpecification.Attribute;
import com.fsck.k9.search.SearchSpecification.Searchfield;
import com.fsck.k9.search.SearchSpecification.SearchCondition;
import com.fsck.k9.view.MessageHeader;
import com.fsck.k9.view.MessageTitleView;
import com.fsck.k9.view.ViewSwitcher;
import com.fsck.k9.view.ViewSwitcher.OnSwitchCompleteListener;
import de.cketti.library.changelog.ChangeLog;
/**
* MessageList is the primary user interface for the program. This Activity
* shows a list of messages.
* From this Activity the user can perform all standard message operations.
*/
public class MessageList extends K9FragmentActivity implements MessageListFragmentListener,
MessageViewFragmentListener, OnBackStackChangedListener, OnSwipeGestureListener,
OnSwitchCompleteListener {
// for this activity
private static final String EXTRA_SEARCH = "search";
private static final String EXTRA_NO_THREADING = "no_threading";
private static final String ACTION_SHORTCUT = "shortcut";
private static final String EXTRA_SPECIAL_FOLDER = "special_folder";
private static final String EXTRA_MESSAGE_REFERENCE = "message_reference";
// used for remote search
public static final String EXTRA_SEARCH_ACCOUNT = "com.fsck.k9.search_account";
private static final String EXTRA_SEARCH_FOLDER = "com.fsck.k9.search_folder";
private static final String STATE_DISPLAY_MODE = "displayMode";
private static final String STATE_MESSAGE_LIST_WAS_DISPLAYED = "messageListWasDisplayed";
// Used for navigating to next/previous message
private static final int PREVIOUS = 1;
private static final int NEXT = 2;
public static void actionDisplaySearch(Context context, SearchSpecification search,
boolean noThreading, boolean newTask) {
actionDisplaySearch(context, search, noThreading, newTask, true);
}
public static void actionDisplaySearch(Context context, SearchSpecification search,
boolean noThreading, boolean newTask, boolean clearTop) {
context.startActivity(
intentDisplaySearch(context, search, noThreading, newTask, clearTop));
}
public static Intent intentDisplaySearch(Context context, SearchSpecification search,
boolean noThreading, boolean newTask, boolean clearTop) {
Intent intent = new Intent(context, MessageList.class);
intent.putExtra(EXTRA_SEARCH, search);
intent.putExtra(EXTRA_NO_THREADING, noThreading);
if (clearTop) {
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
}
if (newTask) {
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
}
return intent;
}
public static Intent shortcutIntent(Context context, String specialFolder) {
Intent intent = new Intent(context, MessageList.class);
intent.setAction(ACTION_SHORTCUT);
intent.putExtra(EXTRA_SPECIAL_FOLDER, specialFolder);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
return intent;
}
public static Intent actionDisplayMessageIntent(Context context,
MessageReference messageReference) {
Intent intent = new Intent(context, MessageList.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.putExtra(EXTRA_MESSAGE_REFERENCE, messageReference);
return intent;
}
private enum DisplayMode {
MESSAGE_LIST,
MESSAGE_VIEW,
SPLIT_VIEW
}
private StorageManager.StorageListener mStorageListener = new StorageListenerImplementation();
private ActionBar mActionBar;
private View mActionBarMessageList;
private View mActionBarMessageView;
private MessageTitleView mActionBarSubject;
private TextView mActionBarTitle;
private TextView mActionBarSubTitle;
private TextView mActionBarUnread;
private Menu mMenu;
private ViewGroup mMessageViewContainer;
private View mMessageViewPlaceHolder;
private MessageListFragment mMessageListFragment;
private MessageViewFragment mMessageViewFragment;
private int mFirstBackStackId = -1;
private Account mAccount;
private String mFolderName;
private LocalSearch mSearch;
private boolean mSingleFolderMode;
private boolean mSingleAccountMode;
private ProgressBar mActionBarProgress;
private MenuItem mMenuButtonCheckMail;
private View mActionButtonIndeterminateProgress;
private int mLastDirection = (K9.messageViewShowNext()) ? NEXT : PREVIOUS;
/**
* {@code true} if the message list should be displayed as flat list (i.e. no threading)
* regardless whether or not message threading was enabled in the settings. This is used for
* filtered views, e.g. when only displaying the unread messages in a folder.
*/
private boolean mNoThreading;
private DisplayMode mDisplayMode;
private MessageReference mMessageReference;
/**
* {@code true} when the message list was displayed once. This is used in
* {@link #onBackPressed()} to decide whether to go from the message view to the message list or
* finish the activity.
*/
private boolean mMessageListWasDisplayed = false;
private ViewSwitcher mViewSwitcher;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (UpgradeDatabases.actionUpgradeDatabases(this, getIntent())) {
finish();
return;
}
if (useSplitView()) {
setContentView(R.layout.split_message_list);
} else {
setContentView(R.layout.message_list);
mViewSwitcher = (ViewSwitcher) findViewById(R.id.container);
mViewSwitcher.setFirstInAnimation(AnimationUtils.loadAnimation(this, R.anim.slide_in_left));
mViewSwitcher.setFirstOutAnimation(AnimationUtils.loadAnimation(this, R.anim.slide_out_right));
mViewSwitcher.setSecondInAnimation(AnimationUtils.loadAnimation(this, R.anim.slide_in_right));
mViewSwitcher.setSecondOutAnimation(AnimationUtils.loadAnimation(this, R.anim.slide_out_left));
mViewSwitcher.setOnSwitchCompleteListener(this);
}
initializeActionBar();
// Enable gesture detection for MessageLists
setupGestureDetector(this);
if (!decodeExtras(getIntent())) {
return;
}
findFragments();
initializeDisplayMode(savedInstanceState);
initializeLayout();
initializeFragments();
displayViews();
ChangeLog cl = new ChangeLog(this);
if (cl.isFirstRun()) {
cl.getLogDialog().show();
}
}
@Override
public void onNewIntent(Intent intent) {
super.onNewIntent(intent);
setIntent(intent);
if (mFirstBackStackId >= 0) {
getSupportFragmentManager().popBackStackImmediate(mFirstBackStackId,
FragmentManager.POP_BACK_STACK_INCLUSIVE);
mFirstBackStackId = -1;
}
removeMessageListFragment();
removeMessageViewFragment();
mMessageReference = null;
mSearch = null;
mFolderName = null;
if (!decodeExtras(intent)) {
return;
}
initializeDisplayMode(null);
initializeFragments();
displayViews();
}
/**
* Get references to existing fragments if the activity was restarted.
*/
private void findFragments() {
FragmentManager fragmentManager = getSupportFragmentManager();
mMessageListFragment = (MessageListFragment) fragmentManager.findFragmentById(
R.id.message_list_container);
mMessageViewFragment = (MessageViewFragment) fragmentManager.findFragmentById(
R.id.message_view_container);
}
/**
* Create fragment instances if necessary.
*
* @see #findFragments()
*/
private void initializeFragments() {
FragmentManager fragmentManager = getSupportFragmentManager();
fragmentManager.addOnBackStackChangedListener(this);
boolean hasMessageListFragment = (mMessageListFragment != null);
if (!hasMessageListFragment) {
FragmentTransaction ft = fragmentManager.beginTransaction();
mMessageListFragment = MessageListFragment.newInstance(mSearch, false,
(K9.isThreadedViewEnabled() && !mNoThreading));
ft.add(R.id.message_list_container, mMessageListFragment);
ft.commit();
}
// Check if the fragment wasn't restarted and has a MessageReference in the arguments. If
// so, open the referenced message.
if (!hasMessageListFragment && mMessageViewFragment == null &&
mMessageReference != null) {
openMessage(mMessageReference);
}
}
/**
* Set the initial display mode (message list, message view, or split view).
*
* <p><strong>Note:</strong>
* This method has to be called after {@link #findFragments()} because the result depends on
* the availability of a {@link MessageViewFragment} instance.
* </p>
*
* @param savedInstanceState
* The saved instance state that was passed to the activity as argument to
* {@link #onCreate(Bundle)}. May be {@code null}.
*/
private void initializeDisplayMode(Bundle savedInstanceState) {
if (useSplitView()) {
mDisplayMode = DisplayMode.SPLIT_VIEW;
return;
}
if (savedInstanceState != null) {
DisplayMode savedDisplayMode =
(DisplayMode) savedInstanceState.getSerializable(STATE_DISPLAY_MODE);
if (savedDisplayMode != DisplayMode.SPLIT_VIEW) {
mDisplayMode = savedDisplayMode;
return;
}
}
if (mMessageViewFragment != null || mMessageReference != null) {
mDisplayMode = DisplayMode.MESSAGE_VIEW;
} else {
mDisplayMode = DisplayMode.MESSAGE_LIST;
}
}
private boolean useSplitView() {
SplitViewMode splitViewMode = K9.getSplitViewMode();
int orientation = getResources().getConfiguration().orientation;
return (splitViewMode == SplitViewMode.ALWAYS ||
(splitViewMode == SplitViewMode.WHEN_IN_LANDSCAPE &&
orientation == Configuration.ORIENTATION_LANDSCAPE));
}
private void initializeLayout() {
mMessageViewContainer = (ViewGroup) findViewById(R.id.message_view_container);
mMessageViewPlaceHolder = getLayoutInflater().inflate(R.layout.empty_message_view, null);
}
private void displayViews() {
switch (mDisplayMode) {
case MESSAGE_LIST: {
showMessageList();
break;
}
case MESSAGE_VIEW: {
showMessageView();
break;
}
case SPLIT_VIEW: {
mMessageListWasDisplayed = true;
if (mMessageViewFragment == null) {
showMessageViewPlaceHolder();
} else {
MessageReference activeMessage = mMessageViewFragment.getMessageReference();
if (activeMessage != null) {
mMessageListFragment.setActiveMessage(activeMessage);
}
}
break;
}
}
}
private boolean decodeExtras(Intent intent) {
String action = intent.getAction();
if (Intent.ACTION_VIEW.equals(action) && intent.getData() != null) {
Uri uri = intent.getData();
List<String> segmentList = uri.getPathSegments();
String accountId = segmentList.get(0);
Collection<Account> accounts = Preferences.getPreferences(this).getAvailableAccounts();
for (Account account : accounts) {
if (String.valueOf(account.getAccountNumber()).equals(accountId)) {
mMessageReference = new MessageReference();
mMessageReference.accountUuid = account.getUuid();
mMessageReference.folderName = segmentList.get(1);
mMessageReference.uid = segmentList.get(2);
break;
}
}
} else if (ACTION_SHORTCUT.equals(action)) {
// Handle shortcut intents
String specialFolder = intent.getStringExtra(EXTRA_SPECIAL_FOLDER);
if (SearchAccount.UNIFIED_INBOX.equals(specialFolder)) {
mSearch = SearchAccount.createUnifiedInboxAccount(this).getRelatedSearch();
} else if (SearchAccount.ALL_MESSAGES.equals(specialFolder)) {
mSearch = SearchAccount.createAllMessagesAccount(this).getRelatedSearch();
}
} else if (intent.getStringExtra(SearchManager.QUERY) != null) {
// check if this intent comes from the system search ( remote )
if (Intent.ACTION_SEARCH.equals(intent.getAction())) {
//Query was received from Search Dialog
String query = intent.getStringExtra(SearchManager.QUERY);
mSearch = new LocalSearch(getString(R.string.search_results));
mSearch.setManualSearch(true);
mNoThreading = true;
mSearch.or(new SearchCondition(Searchfield.SENDER, Attribute.CONTAINS, query));
mSearch.or(new SearchCondition(Searchfield.SUBJECT, Attribute.CONTAINS, query));
mSearch.or(new SearchCondition(Searchfield.MESSAGE_CONTENTS, Attribute.CONTAINS, query));
Bundle appData = intent.getBundleExtra(SearchManager.APP_DATA);
if (appData != null) {
mSearch.addAccountUuid(appData.getString(EXTRA_SEARCH_ACCOUNT));
// searches started from a folder list activity will provide an account, but no folder
if (appData.getString(EXTRA_SEARCH_FOLDER) != null) {
mSearch.addAllowedFolder(appData.getString(EXTRA_SEARCH_FOLDER));
}
} else {
mSearch.addAccountUuid(LocalSearch.ALL_ACCOUNTS);
}
}
} else {
// regular LocalSearch object was passed
mSearch = intent.getParcelableExtra(EXTRA_SEARCH);
mNoThreading = intent.getBooleanExtra(EXTRA_NO_THREADING, false);
}
if (mMessageReference == null) {
mMessageReference = intent.getParcelableExtra(EXTRA_MESSAGE_REFERENCE);
}
if (mMessageReference != null) {
mSearch = new LocalSearch();
mSearch.addAccountUuid(mMessageReference.accountUuid);
mSearch.addAllowedFolder(mMessageReference.folderName);
}
if (mSearch == null) {
// We've most likely been started by an old unread widget
String accountUuid = intent.getStringExtra("account");
String folderName = intent.getStringExtra("folder");
mSearch = new LocalSearch(folderName);
mSearch.addAccountUuid((accountUuid == null) ? "invalid" : accountUuid);
if (folderName != null) {
mSearch.addAllowedFolder(folderName);
}
}
Preferences prefs = Preferences.getPreferences(getApplicationContext());
String[] accountUuids = mSearch.getAccountUuids();
if (mSearch.searchAllAccounts()) {
Account[] accounts = prefs.getAccounts();
mSingleAccountMode = (accounts.length == 1);
if (mSingleAccountMode) {
mAccount = accounts[0];
}
} else {
mSingleAccountMode = (accountUuids.length == 1);
if (mSingleAccountMode) {
mAccount = prefs.getAccount(accountUuids[0]);
}
}
mSingleFolderMode = mSingleAccountMode && (mSearch.getFolderNames().size() == 1);
if (mSingleAccountMode && (mAccount == null || !mAccount.isAvailable(this))) {
Log.i(K9.LOG_TAG, "not opening MessageList of unavailable account");
onAccountUnavailable();
return false;
}
if (mSingleFolderMode) {
mFolderName = mSearch.getFolderNames().get(0);
}
// now we know if we are in single account mode and need a subtitle
mActionBarSubTitle.setVisibility((!mSingleFolderMode) ? View.GONE : View.VISIBLE);
return true;
}
@Override
public void onPause() {
super.onPause();
StorageManager.getInstance(getApplication()).removeListener(mStorageListener);
}
@Override
public void onResume() {
super.onResume();
if (!(this instanceof Search)) {
//necessary b/c no guarantee Search.onStop will be called before MessageList.onResume
//when returning from search results
Search.setActive(false);
}
if (mAccount != null && !mAccount.isAvailable(this)) {
onAccountUnavailable();
return;
}
StorageManager.getInstance(getApplication()).addListener(mStorageListener);
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putSerializable(STATE_DISPLAY_MODE, mDisplayMode);
outState.putBoolean(STATE_MESSAGE_LIST_WAS_DISPLAYED, mMessageListWasDisplayed);
}
@Override
public void onRestoreInstanceState(Bundle savedInstanceState) {
mMessageListWasDisplayed = savedInstanceState.getBoolean(STATE_MESSAGE_LIST_WAS_DISPLAYED);
}
private void initializeActionBar() {
mActionBar = getSupportActionBar();
mActionBar.setDisplayShowCustomEnabled(true);
mActionBar.setCustomView(R.layout.actionbar_custom);
View customView = mActionBar.getCustomView();
mActionBarMessageList = customView.findViewById(R.id.actionbar_message_list);
mActionBarMessageView = customView.findViewById(R.id.actionbar_message_view);
mActionBarSubject = (MessageTitleView) customView.findViewById(R.id.message_title_view);
mActionBarTitle = (TextView) customView.findViewById(R.id.actionbar_title_first);
mActionBarSubTitle = (TextView) customView.findViewById(R.id.actionbar_title_sub);
mActionBarUnread = (TextView) customView.findViewById(R.id.actionbar_unread_count);
mActionBarProgress = (ProgressBar) customView.findViewById(R.id.actionbar_progress);
mActionButtonIndeterminateProgress =
getLayoutInflater().inflate(R.layout.actionbar_indeterminate_progress_actionview, null);
mActionBar.setDisplayHomeAsUpEnabled(true);
}
@Override
public boolean dispatchKeyEvent(KeyEvent event) {
boolean ret = false;
if (KeyEvent.ACTION_DOWN == event.getAction()) {
ret = onCustomKeyDown(event.getKeyCode(), event);
}
if (!ret) {
ret = super.dispatchKeyEvent(event);
}
return ret;
}
@Override
public void onBackPressed() {
if (mDisplayMode == DisplayMode.MESSAGE_VIEW && mMessageListWasDisplayed) {
showMessageList();
} else {
super.onBackPressed();
}
}
/**
* Handle hotkeys
*
* <p>
* This method is called by {@link #dispatchKeyEvent(KeyEvent)} before any view had the chance
* to consume this key event.
* </p>
*
* @param keyCode
* The value in {@code event.getKeyCode()}.
* @param event
* Description of the key event.
*
* @return {@code true} if this event was consumed.
*/
public boolean onCustomKeyDown(final int keyCode, final KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_VOLUME_UP: {
if (mMessageViewFragment != null && mDisplayMode != DisplayMode.MESSAGE_LIST &&
K9.useVolumeKeysForNavigationEnabled()) {
showPreviousMessage();
return true;
} else if (mDisplayMode != DisplayMode.MESSAGE_VIEW &&
K9.useVolumeKeysForListNavigationEnabled()) {
mMessageListFragment.onMoveUp();
return true;
}
break;
}
case KeyEvent.KEYCODE_VOLUME_DOWN: {
if (mMessageViewFragment != null && mDisplayMode != DisplayMode.MESSAGE_LIST &&
K9.useVolumeKeysForNavigationEnabled()) {
showNextMessage();
return true;
} else if (mDisplayMode != DisplayMode.MESSAGE_VIEW &&
K9.useVolumeKeysForListNavigationEnabled()) {
mMessageListFragment.onMoveDown();
return true;
}
break;
}
case KeyEvent.KEYCODE_C: {
mMessageListFragment.onCompose();
return true;
}
case KeyEvent.KEYCODE_Q: {
if (mMessageListFragment != null && mMessageListFragment.isSingleAccountMode()) {
onShowFolderList();
}
return true;
}
case KeyEvent.KEYCODE_O: {
mMessageListFragment.onCycleSort();
return true;
}
case KeyEvent.KEYCODE_I: {
mMessageListFragment.onReverseSort();
return true;
}
case KeyEvent.KEYCODE_DEL:
case KeyEvent.KEYCODE_D: {
if (mDisplayMode == DisplayMode.MESSAGE_LIST) {
mMessageListFragment.onDelete();
} else if (mMessageViewFragment != null) {
mMessageViewFragment.onDelete();
}
return true;
}
case KeyEvent.KEYCODE_S: {
mMessageListFragment.toggleMessageSelect();
return true;
}
case KeyEvent.KEYCODE_G: {
if (mDisplayMode == DisplayMode.MESSAGE_LIST) {
mMessageListFragment.onToggleFlagged();
} else if (mMessageViewFragment != null) {
mMessageViewFragment.onToggleFlagged();
}
return true;
}
case KeyEvent.KEYCODE_M: {
if (mDisplayMode == DisplayMode.MESSAGE_LIST) {
mMessageListFragment.onMove();
} else if (mMessageViewFragment != null) {
mMessageViewFragment.onMove();
}
return true;
}
case KeyEvent.KEYCODE_V: {
if (mDisplayMode == DisplayMode.MESSAGE_LIST) {
mMessageListFragment.onArchive();
} else if (mMessageViewFragment != null) {
mMessageViewFragment.onArchive();
}
return true;
}
case KeyEvent.KEYCODE_Y: {
if (mDisplayMode == DisplayMode.MESSAGE_LIST) {
mMessageListFragment.onCopy();
} else if (mMessageViewFragment != null) {
mMessageViewFragment.onCopy();
}
return true;
}
case KeyEvent.KEYCODE_Z: {
if (mDisplayMode == DisplayMode.MESSAGE_LIST) {
mMessageListFragment.onToggleRead();
} else if (mMessageViewFragment != null) {
mMessageViewFragment.onToggleRead();
}
return true;
}
case KeyEvent.KEYCODE_F: {
if (mMessageViewFragment != null) {
mMessageViewFragment.onForward();
}
return true;
}
case KeyEvent.KEYCODE_A: {
if (mMessageViewFragment != null) {
mMessageViewFragment.onReplyAll();
}
return true;
}
case KeyEvent.KEYCODE_R: {
if (mMessageViewFragment != null) {
mMessageViewFragment.onReply();
}
return true;
}
case KeyEvent.KEYCODE_J:
case KeyEvent.KEYCODE_P: {
if (mMessageViewFragment != null) {
showPreviousMessage();
}
return true;
}
case KeyEvent.KEYCODE_N:
case KeyEvent.KEYCODE_K: {
if (mMessageViewFragment != null) {
showNextMessage();
}
return true;
}
/* FIXME
case KeyEvent.KEYCODE_Z: {
mMessageViewFragment.zoom(event);
return true;
}*/
case KeyEvent.KEYCODE_H: {
Toast toast = Toast.makeText(this, R.string.message_list_help_key, Toast.LENGTH_LONG);
toast.show();
return true;
}
}
return false;
}
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
// Swallow these events too to avoid the audible notification of a volume change
if (K9.useVolumeKeysForListNavigationEnabled()) {
if ((keyCode == KeyEvent.KEYCODE_VOLUME_UP) || (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN)) {
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Swallowed key up.");
return true;
}
}
return super.onKeyUp(keyCode, event);
}
private void onAccounts() {
Accounts.listAccounts(this);
finish();
}
private void onShowFolderList() {
FolderList.actionHandleAccount(this, mAccount);
finish();
}
private void onEditPrefs() {
Prefs.actionPrefs(this);
}
private void onEditAccount() {
AccountSettings.actionSettings(this, mAccount);
}
@Override
public boolean onSearchRequested() {
return mMessageListFragment.onSearchRequested();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int itemId = item.getItemId();
switch (itemId) {
case android.R.id.home: {
goBack();
return true;
}
case R.id.compose: {
mMessageListFragment.onCompose();
return true;
}
case R.id.toggle_message_view_theme: {
onToggleTheme();
return true;
}
// MessageList
case R.id.check_mail: {
mMessageListFragment.checkMail();
return true;
}
case R.id.set_sort_date: {
mMessageListFragment.changeSort(SortType.SORT_DATE);
return true;
}
case R.id.set_sort_arrival: {
mMessageListFragment.changeSort(SortType.SORT_ARRIVAL);
return true;
}
case R.id.set_sort_subject: {
mMessageListFragment.changeSort(SortType.SORT_SUBJECT);
return true;
}
case R.id.set_sort_sender: {
mMessageListFragment.changeSort(SortType.SORT_SENDER);
return true;
}
case R.id.set_sort_flag: {
mMessageListFragment.changeSort(SortType.SORT_FLAGGED);
return true;
}
case R.id.set_sort_unread: {
mMessageListFragment.changeSort(SortType.SORT_UNREAD);
return true;
}
case R.id.set_sort_attach: {
mMessageListFragment.changeSort(SortType.SORT_ATTACHMENT);
return true;
}
case R.id.select_all: {
mMessageListFragment.selectAll();
return true;
}
case R.id.app_settings: {
onEditPrefs();
return true;
}
case R.id.account_settings: {
onEditAccount();
return true;
}
case R.id.search: {
mMessageListFragment.onSearchRequested();
return true;
}
case R.id.search_remote: {
mMessageListFragment.onRemoteSearch();
return true;
}
case R.id.mark_all_as_read: {
mMessageListFragment.markAllAsRead();
return true;
}
case R.id.show_folder_list: {
onShowFolderList();
return true;
}
// MessageView
case R.id.next_message: {
showNextMessage();
return true;
}
case R.id.previous_message: {
showPreviousMessage();
return true;
}
case R.id.delete: {
mMessageViewFragment.onDelete();
return true;
}
case R.id.reply: {
mMessageViewFragment.onReply();
return true;
}
case R.id.reply_all: {
mMessageViewFragment.onReplyAll();
return true;
}
case R.id.forward: {
mMessageViewFragment.onForward();
return true;
}
case R.id.share: {
mMessageViewFragment.onSendAlternate();
return true;
}
case R.id.toggle_unread: {
mMessageViewFragment.onToggleRead();
return true;
}
case R.id.archive: {
mMessageViewFragment.onArchive();
return true;
}
case R.id.spam: {
mMessageViewFragment.onSpam();
return true;
}
case R.id.move: {
mMessageViewFragment.onMove();
return true;
}
case R.id.copy: {
mMessageViewFragment.onCopy();
return true;
}
case R.id.select_text: {
mMessageViewFragment.onSelectText();
return true;
}
case R.id.show_headers:
case R.id.hide_headers: {
mMessageViewFragment.onToggleAllHeadersView();
updateMenu();
return true;
}
}
if (!mSingleFolderMode) {
// None of the options after this point are "safe" for search results
//TODO: This is not true for "unread" and "starred" searches in regular folders
return false;
}
switch (itemId) {
case R.id.send_messages: {
mMessageListFragment.onSendPendingMessages();
return true;
}
case R.id.folder_settings: {
if (mFolderName != null) {
FolderSettings.actionSettings(this, mAccount, mFolderName);
}
return true;
}
case R.id.expunge: {
mMessageListFragment.onExpunge();
return true;
}
default: {
return super.onOptionsItemSelected(item);
}
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getSupportMenuInflater().inflate(R.menu.message_list_option, menu);
mMenu = menu;
mMenuButtonCheckMail= menu.findItem(R.id.check_mail);
return true;
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
configureMenu(menu);
return true;
}
/**
* Hide menu items not appropriate for the current context.
*
* <p><strong>Note:</strong>
* Please adjust the comments in {@code res/menu/message_list_option.xml} if you change the
* visibility of a menu item in this method.
* </p>
*
* @param menu
* The {@link Menu} instance that should be modified. May be {@code null}; in that case
* the method does nothing and immediately returns.
*/
private void configureMenu(Menu menu) {
if (menu == null) {
return;
}
// Set visibility of account/folder settings menu items
if (mMessageListFragment == null) {
menu.findItem(R.id.account_settings).setVisible(false);
menu.findItem(R.id.folder_settings).setVisible(false);
} else {
menu.findItem(R.id.account_settings).setVisible(
mMessageListFragment.isSingleAccountMode());
menu.findItem(R.id.folder_settings).setVisible(
mMessageListFragment.isSingleFolderMode());
}
/*
* Set visibility of menu items related to the message view
*/
if (mDisplayMode == DisplayMode.MESSAGE_LIST
|| mMessageViewFragment == null
|| !mMessageViewFragment.isInitialized()) {
menu.findItem(R.id.next_message).setVisible(false);
menu.findItem(R.id.previous_message).setVisible(false);
menu.findItem(R.id.single_message_options).setVisible(false);
menu.findItem(R.id.delete).setVisible(false);
menu.findItem(R.id.archive).setVisible(false);
menu.findItem(R.id.move).setVisible(false);
menu.findItem(R.id.copy).setVisible(false);
menu.findItem(R.id.spam).setVisible(false);
menu.findItem(R.id.refile).setVisible(false);
menu.findItem(R.id.toggle_unread).setVisible(false);
menu.findItem(R.id.select_text).setVisible(false);
menu.findItem(R.id.toggle_message_view_theme).setVisible(false);
menu.findItem(R.id.show_headers).setVisible(false);
menu.findItem(R.id.hide_headers).setVisible(false);
} else {
// hide prev/next buttons in split mode
if (mDisplayMode != DisplayMode.MESSAGE_VIEW) {
menu.findItem(R.id.next_message).setVisible(false);
menu.findItem(R.id.previous_message).setVisible(false);
} else {
MessageReference ref = mMessageViewFragment.getMessageReference();
boolean initialized = (mMessageListFragment != null &&
mMessageListFragment.isLoadFinished());
boolean canDoPrev = (initialized && !mMessageListFragment.isFirst(ref));
boolean canDoNext = (initialized && !mMessageListFragment.isLast(ref));
MenuItem prev = menu.findItem(R.id.previous_message);
prev.setEnabled(canDoPrev);
prev.getIcon().setAlpha(canDoPrev ? 255 : 127);
MenuItem next = menu.findItem(R.id.next_message);
next.setEnabled(canDoNext);
next.getIcon().setAlpha(canDoNext ? 255 : 127);
}
MenuItem toggleTheme = menu.findItem(R.id.toggle_message_view_theme);
if (K9.useFixedMessageViewTheme()) {
toggleTheme.setVisible(false);
} else {
// Set title of menu item to switch to dark/light theme
if (K9.getK9MessageViewTheme() == K9.Theme.DARK) {
toggleTheme.setTitle(R.string.message_view_theme_action_light);
} else {
toggleTheme.setTitle(R.string.message_view_theme_action_dark);
}
toggleTheme.setVisible(true);
}
// Set title of menu item to toggle the read state of the currently displayed message
if (mMessageViewFragment.isMessageRead()) {
menu.findItem(R.id.toggle_unread).setTitle(R.string.mark_as_unread_action);
} else {
menu.findItem(R.id.toggle_unread).setTitle(R.string.mark_as_read_action);
}
// Jellybean has built-in long press selection support
menu.findItem(R.id.select_text).setVisible(Build.VERSION.SDK_INT < 16);
menu.findItem(R.id.delete).setVisible(K9.isMessageViewDeleteActionVisible());
/*
* Set visibility of copy, move, archive, spam in action bar and refile submenu
*/
Menu refileSubmenu = menu.findItem(R.id.refile).getSubMenu();
if (mMessageViewFragment.isCopyCapable()) {
menu.findItem(R.id.copy).setVisible(K9.isMessageViewCopyActionVisible());
refileSubmenu.findItem(R.id.copy).setVisible(true);
} else {
menu.findItem(R.id.copy).setVisible(false);
refileSubmenu.findItem(R.id.copy).setVisible(false);
}
if (mMessageViewFragment.isMoveCapable()) {
boolean canMessageBeArchived = mMessageViewFragment.canMessageBeArchived();
boolean canMessageBeMovedToSpam = mMessageViewFragment.canMessageBeMovedToSpam();
menu.findItem(R.id.move).setVisible(K9.isMessageViewMoveActionVisible());
menu.findItem(R.id.archive).setVisible(canMessageBeArchived &&
K9.isMessageViewArchiveActionVisible());
menu.findItem(R.id.spam).setVisible(canMessageBeMovedToSpam &&
K9.isMessageViewSpamActionVisible());
refileSubmenu.findItem(R.id.move).setVisible(true);
refileSubmenu.findItem(R.id.archive).setVisible(canMessageBeArchived);
refileSubmenu.findItem(R.id.spam).setVisible(canMessageBeMovedToSpam);
} else {
menu.findItem(R.id.move).setVisible(false);
menu.findItem(R.id.archive).setVisible(false);
menu.findItem(R.id.spam).setVisible(false);
menu.findItem(R.id.refile).setVisible(false);
}
if (mMessageViewFragment.allHeadersVisible()) {
menu.findItem(R.id.show_headers).setVisible(false);
} else {
menu.findItem(R.id.hide_headers).setVisible(false);
}
}
/*
* Set visibility of menu items related to the message list
*/
// Hide both search menu items by default and enable one when appropriate
menu.findItem(R.id.search).setVisible(false);
menu.findItem(R.id.search_remote).setVisible(false);
if (mDisplayMode == DisplayMode.MESSAGE_VIEW || mMessageListFragment == null ||
!mMessageListFragment.isInitialized()) {
menu.findItem(R.id.check_mail).setVisible(false);
menu.findItem(R.id.set_sort).setVisible(false);
menu.findItem(R.id.select_all).setVisible(false);
menu.findItem(R.id.send_messages).setVisible(false);
menu.findItem(R.id.expunge).setVisible(false);
menu.findItem(R.id.mark_all_as_read).setVisible(false);
menu.findItem(R.id.show_folder_list).setVisible(false);
} else {
menu.findItem(R.id.set_sort).setVisible(true);
menu.findItem(R.id.select_all).setVisible(true);
menu.findItem(R.id.mark_all_as_read).setVisible(
mMessageListFragment.isMarkAllAsReadSupported());
if (!mMessageListFragment.isSingleAccountMode()) {
menu.findItem(R.id.expunge).setVisible(false);
menu.findItem(R.id.send_messages).setVisible(false);
menu.findItem(R.id.show_folder_list).setVisible(false);
} else {
menu.findItem(R.id.send_messages).setVisible(mMessageListFragment.isOutbox());
menu.findItem(R.id.expunge).setVisible(mMessageListFragment.isRemoteFolder() &&
mMessageListFragment.isAccountExpungeCapable());
menu.findItem(R.id.show_folder_list).setVisible(true);
}
menu.findItem(R.id.check_mail).setVisible(mMessageListFragment.isCheckMailSupported());
// If this is an explicit local search, show the option to search on the server
if (!mMessageListFragment.isRemoteSearch() &&
mMessageListFragment.isRemoteSearchAllowed()) {
menu.findItem(R.id.search_remote).setVisible(true);
} else if (!mMessageListFragment.isManualSearch()) {
menu.findItem(R.id.search).setVisible(true);
}
}
}
protected void onAccountUnavailable() {
finish();
// TODO inform user about account unavailability using Toast
Accounts.listAccounts(this);
}
public void setActionBarTitle(String title) {
mActionBarTitle.setText(title);
}
public void setActionBarSubTitle(String subTitle) {
mActionBarSubTitle.setText(subTitle);
}
public void setActionBarUnread(int unread) {
if (unread == 0) {
mActionBarUnread.setVisibility(View.GONE);
} else {
mActionBarUnread.setVisibility(View.VISIBLE);
mActionBarUnread.setText(Integer.toString(unread));
}
}
@Override
public void setMessageListTitle(String title) {
setActionBarTitle(title);
}
@Override
public void setMessageListSubTitle(String subTitle) {
setActionBarSubTitle(subTitle);
}
@Override
public void setUnreadCount(int unread) {
setActionBarUnread(unread);
}
@Override
public void setMessageListProgress(int progress) {
setSupportProgress(progress);
}
@Override
public void openMessage(MessageReference messageReference) {
Preferences prefs = Preferences.getPreferences(getApplicationContext());
Account account = prefs.getAccount(messageReference.accountUuid);
String folderName = messageReference.folderName;
if (folderName.equals(account.getDraftsFolderName())) {
MessageCompose.actionEditDraft(this, messageReference);
} else {
mMessageViewContainer.removeView(mMessageViewPlaceHolder);
if (mMessageListFragment != null) {
mMessageListFragment.setActiveMessage(messageReference);
}
MessageViewFragment fragment = MessageViewFragment.newInstance(messageReference);
FragmentTransaction ft = getSupportFragmentManager().beginTransaction();
ft.replace(R.id.message_view_container, fragment);
mMessageViewFragment = fragment;
ft.commit();
if (mDisplayMode != DisplayMode.SPLIT_VIEW) {
showMessageView();
}
}
}
@Override
public void onResendMessage(Message message) {
MessageCompose.actionEditDraft(this, message.makeMessageReference());
}
@Override
public void onForward(Message message) {
MessageCompose.actionForward(this, message.getFolder().getAccount(), message, null);
}
@Override
public void onReply(Message message) {
MessageCompose.actionReply(this, message.getFolder().getAccount(), message, false, null);
}
@Override
public void onReplyAll(Message message) {
MessageCompose.actionReply(this, message.getFolder().getAccount(), message, true, null);
}
@Override
public void onCompose(Account account) {
MessageCompose.actionCompose(this, account);
}
@Override
public void showMoreFromSameSender(String senderAddress) {
LocalSearch tmpSearch = new LocalSearch("From " + senderAddress);
tmpSearch.addAccountUuids(mSearch.getAccountUuids());
tmpSearch.and(Searchfield.SENDER, senderAddress, Attribute.CONTAINS);
MessageListFragment fragment = MessageListFragment.newInstance(tmpSearch, false, false);
addMessageListFragment(fragment, true);
}
@Override
public void onBackStackChanged() {
findFragments();
if (mDisplayMode == DisplayMode.SPLIT_VIEW) {
showMessageViewPlaceHolder();
}
configureMenu(mMenu);
}
@Override
public void onSwipeRightToLeft(MotionEvent e1, MotionEvent e2) {
if (mMessageListFragment != null && mDisplayMode != DisplayMode.MESSAGE_VIEW) {
mMessageListFragment.onSwipeRightToLeft(e1, e2);
}
}
@Override
public void onSwipeLeftToRight(MotionEvent e1, MotionEvent e2) {
if (mMessageListFragment != null && mDisplayMode != DisplayMode.MESSAGE_VIEW) {
mMessageListFragment.onSwipeLeftToRight(e1, e2);
}
}
private final class StorageListenerImplementation implements StorageManager.StorageListener {
@Override
public void onUnmount(String providerId) {
if (mAccount != null && providerId.equals(mAccount.getLocalStorageProviderId())) {
runOnUiThread(new Runnable() {
@Override
public void run() {
onAccountUnavailable();
}
});
}
}
@Override
public void onMount(String providerId) {
// no-op
}
}
private void addMessageListFragment(MessageListFragment fragment, boolean addToBackStack) {
FragmentTransaction ft = getSupportFragmentManager().beginTransaction();
ft.replace(R.id.message_list_container, fragment);
if (addToBackStack)
ft.addToBackStack(null);
mMessageListFragment = fragment;
int transactionId = ft.commit();
if (transactionId >= 0 && mFirstBackStackId < 0) {
mFirstBackStackId = transactionId;
}
}
@Override
public boolean startSearch(Account account, String folderName) {
// If this search was started from a MessageList of a single folder, pass along that folder info
// so that we can enable remote search.
if (account != null && folderName != null) {
final Bundle appData = new Bundle();
appData.putString(EXTRA_SEARCH_ACCOUNT, account.getUuid());
appData.putString(EXTRA_SEARCH_FOLDER, folderName);
startSearch(null, false, appData, false);
} else {
// TODO Handle the case where we're searching from within a search result.
startSearch(null, false, null, false);
}
return true;
}
@Override
public void showThread(Account account, String folderName, long threadRootId) {
showMessageViewPlaceHolder();
LocalSearch tmpSearch = new LocalSearch();
tmpSearch.addAccountUuid(account.getUuid());
tmpSearch.and(Searchfield.THREAD_ID, String.valueOf(threadRootId), Attribute.EQUALS);
MessageListFragment fragment = MessageListFragment.newInstance(tmpSearch, true, false);
addMessageListFragment(fragment, true);
}
private void showMessageViewPlaceHolder() {
removeMessageViewFragment();
// Add placeholder view if necessary
if (mMessageViewPlaceHolder.getParent() == null) {
mMessageViewContainer.addView(mMessageViewPlaceHolder);
}
mMessageListFragment.setActiveMessage(null);
}
/**
* Remove MessageViewFragment if necessary.
*/
private void removeMessageViewFragment() {
if (mMessageViewFragment != null) {
FragmentTransaction ft = getSupportFragmentManager().beginTransaction();
ft.remove(mMessageViewFragment);
mMessageViewFragment = null;
ft.commit();
showDefaultTitleView();
}
}
private void removeMessageListFragment() {
FragmentTransaction ft = getSupportFragmentManager().beginTransaction();
ft.remove(mMessageListFragment);
mMessageListFragment = null;
ft.commit();
}
@Override
public void remoteSearchStarted() {
// Remove action button for remote search
configureMenu(mMenu);
}
@Override
public void goBack() {
FragmentManager fragmentManager = getSupportFragmentManager();
if (mDisplayMode == DisplayMode.MESSAGE_VIEW) {
showMessageList();
} else if (fragmentManager.getBackStackEntryCount() > 0) {
fragmentManager.popBackStack();
} else if (mMessageListFragment.isManualSearch()) {
finish();
} else if (!mSingleFolderMode) {
onAccounts();
} else {
onShowFolderList();
}
}
@Override
public void enableActionBarProgress(boolean enable) {
if (mMenuButtonCheckMail != null && mMenuButtonCheckMail.isVisible()) {
mActionBarProgress.setVisibility(ProgressBar.GONE);
if (enable) {
mMenuButtonCheckMail
.setActionView(mActionButtonIndeterminateProgress);
} else {
mMenuButtonCheckMail.setActionView(null);
}
} else {
if (mMenuButtonCheckMail != null)
mMenuButtonCheckMail.setActionView(null);
if (enable) {
mActionBarProgress.setVisibility(ProgressBar.VISIBLE);
} else {
mActionBarProgress.setVisibility(ProgressBar.GONE);
}
}
}
private void restartActivity() {
// restart the current activity, so that the theme change can be applied
if (Build.VERSION.SDK_INT < 11) {
Intent intent = getIntent();
intent.addFlags(Intent.FLAG_ACTIVITY_NO_ANIMATION);
finish();
overridePendingTransition(0, 0); // disable animations to speed up the switch
startActivity(intent);
overridePendingTransition(0, 0);
} else {
recreate();
}
}
@Override
public void displayMessageSubject(String subject) {
if (mDisplayMode == DisplayMode.MESSAGE_VIEW) {
mActionBarSubject.setText(subject);
}
}
@Override
public void onReply(Message message, PgpData pgpData) {
MessageCompose.actionReply(this, mAccount, message, false, pgpData.getDecryptedData());
}
@Override
public void onReplyAll(Message message, PgpData pgpData) {
MessageCompose.actionReply(this, mAccount, message, true, pgpData.getDecryptedData());
}
@Override
public void onForward(Message mMessage, PgpData mPgpData) {
MessageCompose.actionForward(this, mAccount, mMessage, mPgpData.getDecryptedData());
}
@Override
public void showNextMessageOrReturn() {
if (K9.messageViewReturnToList() || !showLogicalNextMessage()) {
if (mDisplayMode == DisplayMode.SPLIT_VIEW) {
showMessageViewPlaceHolder();
} else {
showMessageList();
}
}
}
/**
* Shows the next message in the direction the user was displaying messages.
*
* @return {@code true}
*/
private boolean showLogicalNextMessage() {
boolean result = false;
if (mLastDirection == NEXT) {
result = showNextMessage();
} else if (mLastDirection == PREVIOUS) {
result = showPreviousMessage();
}
if (!result) {
result = showNextMessage() || showPreviousMessage();
}
return result;
}
@Override
public void setProgress(boolean enable) {
setSupportProgressBarIndeterminateVisibility(enable);
}
@Override
public void messageHeaderViewAvailable(MessageHeader header) {
mActionBarSubject.setMessageHeader(header);
}
private boolean showNextMessage() {
MessageReference ref = mMessageViewFragment.getMessageReference();
if (ref != null) {
if (mMessageListFragment.openNext(ref)) {
mLastDirection = NEXT;
return true;
}
}
return false;
}
private boolean showPreviousMessage() {
MessageReference ref = mMessageViewFragment.getMessageReference();
if (ref != null) {
if (mMessageListFragment.openPrevious(ref)) {
mLastDirection = PREVIOUS;
return true;
}
}
return false;
}
private void showMessageList() {
mMessageListWasDisplayed = true;
mDisplayMode = DisplayMode.MESSAGE_LIST;
mViewSwitcher.showFirstView();
mMessageListFragment.setActiveMessage(null);
showDefaultTitleView();
configureMenu(mMenu);
}
private void showMessageView() {
mDisplayMode = DisplayMode.MESSAGE_VIEW;
if (!mMessageListWasDisplayed) {
mViewSwitcher.setAnimateFirstView(false);
}
mViewSwitcher.showSecondView();
showMessageTitleView();
configureMenu(mMenu);
}
@Override
public void updateMenu() {
invalidateOptionsMenu();
}
@Override
public void disableDeleteAction() {
mMenu.findItem(R.id.delete).setEnabled(false);
}
private void onToggleTheme() {
if (K9.getK9MessageViewTheme() == K9.Theme.DARK) {
K9.setK9MessageViewThemeSetting(K9.Theme.LIGHT);
} else {
K9.setK9MessageViewThemeSetting(K9.Theme.DARK);
}
new Thread(new Runnable() {
@Override
public void run() {
Context appContext = getApplicationContext();
Preferences prefs = Preferences.getPreferences(appContext);
Editor editor = prefs.getPreferences().edit();
K9.save(editor);
editor.commit();
}
}).start();
restartActivity();
}
private void showDefaultTitleView() {
mActionBarMessageView.setVisibility(View.GONE);
mActionBarMessageList.setVisibility(View.VISIBLE);
if (mMessageListFragment != null) {
mMessageListFragment.updateTitle();
}
mActionBarSubject.setMessageHeader(null);
}
private void showMessageTitleView() {
mActionBarMessageList.setVisibility(View.GONE);
mActionBarMessageView.setVisibility(View.VISIBLE);
if (mMessageViewFragment != null) {
displayMessageSubject(null);
mMessageViewFragment.updateTitle();
}
}
@Override
public void onSwitchComplete(int displayedChild) {
if (displayedChild == 0) {
removeMessageViewFragment();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smartloli.kafka.eagle.core.factory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.I0Itec.zkclient.ZkClient;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.security.JaasUtils;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.smartloli.kafka.eagle.common.domain.BrokersDomain;
import org.smartloli.kafka.eagle.common.domain.HostsDomain;
import org.smartloli.kafka.eagle.common.domain.KafkaSqlDomain;
import org.smartloli.kafka.eagle.common.domain.MetadataDomain;
import org.smartloli.kafka.eagle.common.domain.OffsetZkDomain;
import org.smartloli.kafka.eagle.common.domain.PageParamDomain;
import org.smartloli.kafka.eagle.common.domain.PartitionsDomain;
import org.smartloli.kafka.eagle.common.util.CalendarUtils;
import org.smartloli.kafka.eagle.common.util.SystemConfigUtils;
import org.smartloli.kafka.eagle.common.util.ZKPoolUtils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import kafka.admin.AdminClient;
import kafka.admin.AdminClient.ConsumerGroupSummary;
import kafka.admin.AdminClient.ConsumerSummary;
import kafka.admin.AdminUtils;
import kafka.admin.RackAwareMode;
import kafka.admin.TopicCommand;
import kafka.api.OffsetRequest;
import kafka.api.PartitionOffsetRequestInfo;
import kafka.cluster.BrokerEndPoint;
import kafka.common.TopicAndPartition;
import kafka.consumer.ConsumerThreadId;
import kafka.coordinator.GroupOverview;
import kafka.javaapi.OffsetResponse;
import kafka.javaapi.PartitionMetadata;
import kafka.javaapi.TopicMetadata;
import kafka.javaapi.TopicMetadataRequest;
import kafka.javaapi.TopicMetadataResponse;
import kafka.javaapi.consumer.SimpleConsumer;
import kafka.utils.ZkUtils;
import scala.Option;
import scala.Tuple2;
import scala.collection.Iterator;
import scala.collection.JavaConversions;
import scala.collection.Seq;
/**
* Implements KafkaService all method.
*
* @author smartloli.
*
* Created by Jan 18, 2017.
*
* Update by hexiang 20170216
*
* @see org.smartloli.kafka.eagle.core.factory.KafkaService
*/
public class KafkaServiceImpl implements KafkaService {
private final Logger LOG = LoggerFactory.getLogger(KafkaServiceImpl.class);
private final String BROKER_IDS_PATH = "/brokers/ids";
private final String BROKER_TOPICS_PATH = "/brokers/topics";
private final String CONSUMERS_PATH = "/consumers";
/** Instance Zookeeper client pool. */
private ZKPoolUtils zkPool = ZKPoolUtils.getInstance();
/** Zookeeper service interface. */
private ZkService zkService = new ZkFactory().create();
/**
* Use Kafka low level consumer API to find leader.
*
* @param a_seedBrokers
* @param a_topic
* @param a_partition
* @return PartitionMetadata.
* @see kafka.javaapi.PartitionMetadata
*/
private PartitionMetadata findLeader(List<String> a_seedBrokers, String a_topic, int a_partition) {
PartitionMetadata returnMetaData = null;
loop : for (String seed : a_seedBrokers) {
SimpleConsumer consumer = null;
try {
String ip = seed.split(":")[0];
String port = seed.split(":")[1];
consumer = new SimpleConsumer(ip, Integer.parseInt(port), 10000, 64 * 1024, "leaderLookup");
List<String> topics = Collections.singletonList(a_topic);
TopicMetadataRequest topicMetaReqst = new TopicMetadataRequest(topics);
kafka.javaapi.TopicMetadataResponse topicMetaResp = consumer.send(topicMetaReqst);
List<TopicMetadata> topicMetadatas = topicMetaResp.topicsMetadata();
for (TopicMetadata item : topicMetadatas) {
for (PartitionMetadata part : item.partitionsMetadata()) {
if (part.partitionId() == a_partition) {
returnMetaData = part;
break loop;
}
}
}
} catch (Exception e) {
LOG.error("Error communicating with Broker [" + seed + "] to find Leader for [" + a_topic + ", " + a_partition + "] Reason: " + e);
} finally {
if (consumer != null)
consumer.close();
}
}
return returnMetaData;
}
/**
* Find topic and group exist in zookeeper.
*
* @param topic
* Filter topic.
* @param group
* Filter group
* @return Boolean.
*/
public boolean findTopicAndGroupExist(String clusterAlias, String topic, String group) {
ZkClient zkc = zkPool.getZkClient(clusterAlias);
String ownersPath = CONSUMERS_PATH + "/" + group + "/owners/" + topic;
boolean status = ZkUtils.apply(zkc, false).pathExists(ownersPath);
if (zkc != null) {
zkPool.release(clusterAlias, zkc);
zkc = null;
}
return status;
}
/**
* Obtaining metadata in zookeeper by topic.
*
* @param topic
* Selected condition.
* @return List.
*/
public List<String> findTopicPartition(String clusterAlias, String topic) {
ZkClient zkc = zkPool.getZkClient(clusterAlias);
Seq<String> brokerTopicsPaths = ZkUtils.apply(zkc, false).getChildren(BROKER_TOPICS_PATH + "/" + topic + "/partitions");
List<String> topicAndPartitions = JavaConversions.seqAsJavaList(brokerTopicsPaths);
if (zkc != null) {
zkPool.release(clusterAlias, zkc);
zkc = null;
brokerTopicsPaths = null;
}
return topicAndPartitions;
}
/** Get kafka active consumer topic. */
public Map<String, List<String>> getActiveTopic(String clusterAlias) {
ZkClient zkc = zkPool.getZkClientSerializer(clusterAlias);
Map<String, List<String>> actvTopics = new HashMap<String, List<String>>();
try {
Seq<String> subConsumerPaths = ZkUtils.apply(zkc, false).getChildren(CONSUMERS_PATH);
List<String> groups = JavaConversions.seqAsJavaList(subConsumerPaths);
JSONArray groupsAndTopics = new JSONArray();
for (String group : groups) {
scala.collection.mutable.Map<String, scala.collection.immutable.List<ConsumerThreadId>> topics = ZkUtils.apply(zkc, false).getConsumersPerTopic(group, false);
for (Entry<String, ?> entry : JavaConversions.mapAsJavaMap(topics).entrySet()) {
JSONObject groupAndTopic = new JSONObject();
groupAndTopic.put("topic", entry.getKey());
groupAndTopic.put("group", group);
groupsAndTopics.add(groupAndTopic);
}
}
for (Object object : groupsAndTopics) {
JSONObject groupAndTopic = (JSONObject) object;
String group = groupAndTopic.getString("group");
String topic = groupAndTopic.getString("topic");
if (actvTopics.containsKey(group + "_" + topic)) {
actvTopics.get(group + "_" + topic).add(topic);
} else {
List<String> topics = new ArrayList<String>();
topics.add(topic);
actvTopics.put(group + "_" + topic, topics);
}
}
} catch (Exception ex) {
LOG.error(ex.getMessage());
} finally {
if (zkc != null) {
zkPool.releaseZKSerializer(clusterAlias, zkc);
zkc = null;
}
}
return actvTopics;
}
/** Get all broker list from zookeeper. */
public String getAllBrokersInfo(String clusterAlias) {
ZkClient zkc = zkPool.getZkClientSerializer(clusterAlias);
List<BrokersDomain> targets = new ArrayList<BrokersDomain>();
if (ZkUtils.apply(zkc, false).pathExists(BROKER_IDS_PATH)) {
Seq<String> subBrokerIdsPaths = ZkUtils.apply(zkc, false).getChildren(BROKER_IDS_PATH);
List<String> brokerIdss = JavaConversions.seqAsJavaList(subBrokerIdsPaths);
int id = 0;
for (String ids : brokerIdss) {
try {
Tuple2<Option<String>, Stat> tuple = ZkUtils.apply(zkc, false).readDataMaybeNull(BROKER_IDS_PATH + "/" + ids);
BrokersDomain broker = new BrokersDomain();
broker.setCreated(CalendarUtils.convertUnixTime2Date(tuple._2.getCtime()));
broker.setModify(CalendarUtils.convertUnixTime2Date(tuple._2.getMtime()));
String host = JSON.parseObject(tuple._1.get()).getString("host");
int port = JSON.parseObject(tuple._1.get()).getInteger("port");
broker.setHost(host);
broker.setPort(port);
broker.setId(++id);
targets.add(broker);
} catch (Exception ex) {
LOG.error(ex.getMessage());
}
}
}
if (zkc != null) {
zkPool.releaseZKSerializer(clusterAlias, zkc);
zkc = null;
}
return targets.toString();
}
/** Get all topic info from zookeeper. */
public String getAllPartitions(String clusterAlias) {
ZkClient zkc = zkPool.getZkClientSerializer(clusterAlias);
List<PartitionsDomain> targets = new ArrayList<PartitionsDomain>();
if (ZkUtils.apply(zkc, false).pathExists(BROKER_TOPICS_PATH)) {
Seq<String> subBrokerTopicsPaths = ZkUtils.apply(zkc, false).getChildren(BROKER_TOPICS_PATH);
List<String> topics = JavaConversions.seqAsJavaList(subBrokerTopicsPaths);
int id = 0;
for (String topic : topics) {
try {
Tuple2<Option<String>, Stat> tuple = ZkUtils.apply(zkc, false).readDataMaybeNull(BROKER_TOPICS_PATH + "/" + topic);
PartitionsDomain partition = new PartitionsDomain();
partition.setId(++id);
partition.setCreated(CalendarUtils.convertUnixTime2Date(tuple._2.getCtime()));
partition.setModify(CalendarUtils.convertUnixTime2Date(tuple._2.getMtime()));
partition.setTopic(topic);
JSONObject partitionObject = JSON.parseObject(tuple._1.get()).getJSONObject("partitions");
partition.setPartitionNumbers(partitionObject.size());
partition.setPartitions(partitionObject.keySet());
targets.add(partition);
} catch (Exception ex) {
LOG.error(ex.getMessage());
}
}
}
if (zkc != null) {
zkPool.releaseZKSerializer(clusterAlias, zkc);
zkc = null;
}
return targets.toString();
}
/** Obtaining kafka consumer information from zookeeper. */
public Map<String, List<String>> getConsumers(String clusterAlias) {
ZkClient zkc = zkPool.getZkClient(clusterAlias);
Map<String, List<String>> consumers = new HashMap<String, List<String>>();
try {
Seq<String> subConsumerPaths = ZkUtils.apply(zkc, false).getChildren(CONSUMERS_PATH);
List<String> groups = JavaConversions.seqAsJavaList(subConsumerPaths);
for (String group : groups) {
String path = CONSUMERS_PATH + "/" + group + "/owners";
if (ZkUtils.apply(zkc, false).pathExists(path)) {
Seq<String> owners = ZkUtils.apply(zkc, false).getChildren(path);
List<String> ownersSerialize = JavaConversions.seqAsJavaList(owners);
consumers.put(group, ownersSerialize);
} else {
LOG.error("Consumer Path[" + path + "] is not exist.");
}
}
} catch (Exception ex) {
LOG.error(ex.getMessage());
} finally {
if (zkc != null) {
zkPool.release(clusterAlias, zkc);
zkc = null;
}
}
return consumers;
}
/** Obtaining kafka consumer page information from zookeeper. */
public Map<String, List<String>> getConsumers(String clusterAlias, PageParamDomain page) {
ZkClient zkc = zkPool.getZkClient(clusterAlias);
Map<String, List<String>> consumers = new HashMap<String, List<String>>();
try {
if (page.getSearch().length() > 0) {
String path = CONSUMERS_PATH + "/" + page.getSearch() + "/owners";
if (ZkUtils.apply(zkc, false).pathExists(path)) {
Seq<String> owners = ZkUtils.apply(zkc, false).getChildren(path);
List<String> ownersSerialize = JavaConversions.seqAsJavaList(owners);
consumers.put(page.getSearch(), ownersSerialize);
} else {
LOG.error("Consumer Path[" + path + "] is not exist.");
}
} else {
Seq<String> subConsumersPaths = ZkUtils.apply(zkc, false).getChildren(CONSUMERS_PATH);
List<String> groups = JavaConversions.seqAsJavaList(subConsumersPaths);
int offset = 0;
for (String group : groups) {
if (offset < (page.getiDisplayLength() + page.getiDisplayStart()) && offset >= page.getiDisplayStart()) {
String path = CONSUMERS_PATH + "/" + group + "/owners";
if (ZkUtils.apply(zkc, false).pathExists(path)) {
Seq<String> owners = ZkUtils.apply(zkc, false).getChildren(path);
List<String> ownersSerialize = JavaConversions.seqAsJavaList(owners);
consumers.put(group, ownersSerialize);
} else {
LOG.error("Consumer Path[" + path + "] is not exist.");
}
}
offset++;
}
}
} catch (Exception ex) {
LOG.error(ex.getMessage());
} finally {
if (zkc != null) {
zkPool.release(clusterAlias, zkc);
zkc = null;
}
}
return consumers;
}
/**
* Use Kafka low consumer API & get logsize size from zookeeper.
*
* @param hosts
* Zookeeper host list.
* @param topic
* Appoint topic.
* @param partition
* Appoint partition.
* @return Long.
*/
public long getLogSize(List<String> hosts, String topic, int partition) {
LOG.info("Find leader hosts [" + hosts + "]");
PartitionMetadata metadata = findLeader(hosts, topic, partition);
if (metadata == null) {
LOG.error("[KafkaClusterUtils.getLogSize()] - Can't find metadata for Topic and Partition. Exiting");
return 0L;
}
if (metadata.leader() == null) {
LOG.error("[KafkaClusterUtils.getLogSize()] - Can't find Leader for Topic and Partition. Exiting");
return 0L;
}
String clientName = "Client_" + topic + "_" + partition;
String reaHost = metadata.leader().host();
int port = metadata.leader().port();
long ret = 0L;
try {
SimpleConsumer simpleConsumer = new SimpleConsumer(reaHost, port, 100000, 64 * 1024, clientName);
TopicAndPartition topicAndPartition = new TopicAndPartition(topic, partition);
Map<TopicAndPartition, PartitionOffsetRequestInfo> requestInfo = new HashMap<TopicAndPartition, PartitionOffsetRequestInfo>();
requestInfo.put(topicAndPartition, new PartitionOffsetRequestInfo(OffsetRequest.LatestTime(), 1));
kafka.javaapi.OffsetRequest request = new kafka.javaapi.OffsetRequest(requestInfo, OffsetRequest.CurrentVersion(), clientName);
OffsetResponse response = simpleConsumer.getOffsetsBefore(request);
if (response.hasError()) {
LOG.error("Error fetching data Offset , Reason: " + response.errorCode(topic, partition));
return 0;
}
long[] offsets = response.offsets(topic, partition);
ret = offsets[0];
if (simpleConsumer != null) {
simpleConsumer.close();
}
} catch (Exception ex) {
LOG.error(ex.getMessage());
}
return ret;
}
/**
* According to group, topic and partition to get offset from zookeeper.
*
* @param topic
* Filter topic.
* @param group
* Filter group.
* @param partition
* Filter partition.
* @return OffsetZkDomain.
*
* @see org.smartloli.kafka.eagle.domain.OffsetZkDomain
*/
public OffsetZkDomain getOffset(String clusterAlias, String topic, String group, int partition) {
ZkClient zkc = zkPool.getZkClientSerializer(clusterAlias);
OffsetZkDomain offsetZk = new OffsetZkDomain();
String offsetPath = CONSUMERS_PATH + "/" + group + "/offsets/" + topic + "/" + partition;
String ownersPath = CONSUMERS_PATH + "/" + group + "/owners/" + topic + "/" + partition;
Tuple2<Option<String>, Stat> tuple = null;
try {
if (ZkUtils.apply(zkc, false).pathExists(offsetPath)) {
tuple = ZkUtils.apply(zkc, false).readDataMaybeNull(offsetPath);
} else {
LOG.info("Partition[" + partition + "],OffsetPath[" + offsetPath + "] is not exist!");
if (zkc != null) {
zkPool.releaseZKSerializer(clusterAlias, zkc);
zkc = null;
}
return offsetZk;
}
} catch (Exception ex) {
LOG.error("Partition[" + partition + "],get offset has error,msg is " + ex.getMessage());
if (zkc != null) {
zkPool.releaseZKSerializer(clusterAlias, zkc);
zkc = null;
}
return offsetZk;
}
long offsetSize = Long.parseLong(tuple._1.get());
if (ZkUtils.apply(zkc, false).pathExists(ownersPath)) {
Tuple2<String, Stat> tuple2 = ZkUtils.apply(zkc, false).readData(ownersPath);
offsetZk.setOwners(tuple2._1 == null ? "" : tuple2._1);
} else {
offsetZk.setOwners("");
}
offsetZk.setOffset(offsetSize);
offsetZk.setCreate(CalendarUtils.convertUnixTime2Date(tuple._2.getCtime()));
offsetZk.setModify(CalendarUtils.convertUnixTime2Date(tuple._2.getMtime()));
if (zkc != null) {
zkPool.releaseZKSerializer(clusterAlias, zkc);
zkc = null;
}
return offsetZk;
}
/**
* According to topic and partition to obtain Replicas & Isr.
*
* @param topic
* @param partitionid
* @return String.
*/
public String geyReplicasIsr(String clusterAlias, String topic, int partitionid) {
ZkClient zkc = zkPool.getZkClientSerializer(clusterAlias);
Seq<Object> repclicasAndPartition = ZkUtils.apply(zkc, false).getInSyncReplicasForPartition(topic, partitionid);
List<Object> targets = JavaConversions.seqAsJavaList(repclicasAndPartition);
if (zkc != null) {
zkPool.releaseZKSerializer(clusterAlias, zkc);
zkc = null;
}
return targets.toString();
}
/** Get zookeeper cluster information. */
public String zkCluster(String clusterAlias) {
String[] zks = SystemConfigUtils.getPropertyArray(clusterAlias + ".zk.list", ",");
JSONArray targets = new JSONArray();
int id = 1;
for (String zk : zks) {
JSONObject object = new JSONObject();
object.put("id", id++);
object.put("ip", zk.split(":")[0]);
object.put("port", zk.split(":")[1]);
object.put("mode", zkService.status(zk.split(":")[0], zk.split(":")[1]));
targets.add(object);
}
return targets.toJSONString();
}
/** Judge whether the zkcli is active. */
public JSONObject zkCliStatus(String clusterAlias) {
JSONObject target = new JSONObject();
ZkClient zkc = zkPool.getZkClient(clusterAlias);
if (zkc != null) {
target.put("live", true);
target.put("list", SystemConfigUtils.getProperty(clusterAlias + ".zk.list"));
} else {
target.put("live", false);
target.put("list", SystemConfigUtils.getProperty(clusterAlias + ".zk.list"));
}
if (zkc != null) {
zkPool.release(clusterAlias, zkc);
zkc = null;
}
return target;
}
/**
* Create topic to kafka cluster, it is worth noting that the backup number
* must be less than or equal to brokers data.
*
* @param topicName
* Create topic name.
* @param partitions
* Create topic partitions.
* @param replic
* Replic numbers.
* @return Map.
*/
public Map<String, Object> create(String clusterAlias, String topicName, String partitions, String replic) {
Map<String, Object> targets = new HashMap<String, Object>();
int brokers = JSON.parseArray(getAllBrokersInfo(clusterAlias)).size();
if (Integer.parseInt(replic) > brokers) {
targets.put("status", "error");
targets.put("info", "replication factor: " + replic + " larger than available brokers: " + brokers);
return targets;
}
String formatter = SystemConfigUtils.getProperty("kafka.eagle.offset.storage");
String zks = SystemConfigUtils.getProperty(clusterAlias + ".zk.list");
if ("kafka".equals(formatter)) {
ZkUtils zkUtils = ZkUtils.apply(zks, 30000, 30000, JaasUtils.isZkSecurityEnabled());
AdminUtils.createTopic(zkUtils, topicName, Integer.parseInt(partitions), Integer.parseInt(replic), new Properties(), RackAwareMode.Enforced$.MODULE$);
if (zkUtils != null) {
zkUtils.close();
}
} else {
String[] options = new String[]{"--create", "--zookeeper", zks, "--partitions", partitions, "--topic", topicName, "--replication-factor", replic};
TopicCommand.main(options);
}
targets.put("status", "success");
targets.put("info", "Create topic[" + topicName + "] has successed,partitions numbers is [" + partitions + "],replication-factor numbers is [" + replic + "]");
return targets;
}
/** Delete topic to kafka cluster. */
public Map<String, Object> delete(String clusterAlias, String topicName) {
Map<String, Object> targets = new HashMap<String, Object>();
ZkClient zkc = zkPool.getZkClient(clusterAlias);
String formatter = SystemConfigUtils.getProperty("kafka.eagle.offset.storage");
String zks = SystemConfigUtils.getProperty(clusterAlias + ".zk.list");
if ("kafka".equals(formatter)) {
ZkUtils zkUtils = ZkUtils.apply(zks, 30000, 30000, JaasUtils.isZkSecurityEnabled());
AdminUtils.deleteTopic(zkUtils, topicName);
if (zkUtils != null) {
zkUtils.close();
}
} else {
String[] options = new String[]{"--delete", "--zookeeper", zks, "--topic", topicName};
TopicCommand.main(options);
}
targets.put("status", zkc.deleteRecursive(ZkUtils.getTopicPath(topicName)) == true ? "success" : "failed");
if (zkc != null) {
zkPool.release(clusterAlias, zkc);
zkc = null;
}
return targets;
}
/**
* Find leader through topic.
*
* @param topic
* @return List
* @see org.smartloli.kafka.eagle.domain.MetadataDomain
*/
public List<MetadataDomain> findLeader(String clusterAlias, String topic) {
List<MetadataDomain> targets = new ArrayList<>();
SimpleConsumer consumer = null;
for (HostsDomain broker : getBrokers(clusterAlias)) {
try {
consumer = new SimpleConsumer(broker.getHost(), broker.getPort(), 100000, 64 * 1024, "leaderLookup");
if (consumer != null) {
break;
}
} catch (Exception ex) {
LOG.error(ex.getMessage());
}
}
if (consumer == null) {
LOG.error("Connection [SimpleConsumer] has failed,please check brokers.");
return targets;
}
List<String> topics = Collections.singletonList(topic);
TopicMetadataRequest topicMetaReqst = new TopicMetadataRequest(topics);
TopicMetadataResponse topicMetaRespn = consumer.send(topicMetaReqst);
if (topicMetaRespn == null) {
LOG.error("Get [TopicMetadataResponse] has null.");
return targets;
}
List<TopicMetadata> topicsMeta = topicMetaRespn.topicsMetadata();
for (TopicMetadata item : topicsMeta) {
for (PartitionMetadata part : item.partitionsMetadata()) {
MetadataDomain metadata = new MetadataDomain();
metadata.setIsr(geyReplicasIsr(clusterAlias, topic, part.partitionId()));
metadata.setLeader(part.leader() == null ? -1 : part.leader().id());
metadata.setPartitionId(part.partitionId());
List<Integer> replicases = new ArrayList<>();
for (BrokerEndPoint repli : part.replicas()) {
replicases.add(repli.id());
}
metadata.setReplicas(replicases.toString());
targets.add(metadata);
}
}
if (consumer != null) {
consumer.close();
}
return targets;
}
/** Get kafka brokers from zookeeper. */
private List<HostsDomain> getBrokers(String clusterAlias) {
List<HostsDomain> targets = new ArrayList<HostsDomain>();
JSONArray brokers = JSON.parseArray(getAllBrokersInfo(clusterAlias));
for (Object object : brokers) {
JSONObject broker = (JSONObject) object;
HostsDomain host = new HostsDomain();
host.setHost(broker.getString("host"));
host.setPort(broker.getInteger("port"));
targets.add(host);
}
return targets;
}
private String parseBrokerServer(String clusterAlias) {
String brokerServer = "";
JSONArray brokers = JSON.parseArray(getAllBrokersInfo(clusterAlias));
for (Object object : brokers) {
JSONObject broker = (JSONObject) object;
brokerServer += broker.getString("host") + ":" + broker.getInteger("port") + ",";
}
return brokerServer.substring(0, brokerServer.length() - 1);
}
/** Convert query sql to object. */
public KafkaSqlDomain parseSql(String clusterAlias, String sql) {
return segments(clusterAlias, prepare(sql));
}
private String prepare(String sql) {
sql = sql.trim();
sql = sql.replaceAll("\\s+", " ");
return sql;
}
private KafkaSqlDomain segments(String clusterAlias, String sql) {
KafkaSqlDomain kafkaSql = new KafkaSqlDomain();
kafkaSql.setMetaSql(sql);
sql = sql.toLowerCase();
kafkaSql.setSql(sql);
if (sql.contains("and")) {
sql = sql.split("and")[0];
} else if (sql.contains("group by")) {
sql = sql.split("group")[0];
} else if (sql.contains("limit")) {
sql = sql.split("limit")[0];
}
kafkaSql.getSchema().put("partition", "integer");
kafkaSql.getSchema().put("offset", "bigint");
kafkaSql.getSchema().put("msg", "varchar");
if (!sql.startsWith("select")) {
kafkaSql.setStatus(false);
return kafkaSql;
} else {
kafkaSql.setStatus(true);
Matcher matcher = Pattern.compile("select\\s.+from\\s(.+)where\\s(.+)").matcher(sql);
if (matcher.find()) {
kafkaSql.setTableName(matcher.group(1).trim().replaceAll("\"", ""));
if (matcher.group(2).trim().startsWith("\"partition\"")) {
String[] columns = matcher.group(2).trim().split("in")[1].replace("(", "").replace(")", "").trim().split(",");
for (String column : columns) {
try {
kafkaSql.getPartition().add(Integer.parseInt(column));
} catch (Exception e) {
LOG.error("Parse parition[" + column + "] has error,msg is " + e.getMessage());
}
}
}
}
kafkaSql.setSeeds(getBrokers(clusterAlias));
}
return kafkaSql;
}
/** Get kafka 0.10.x activer topics. */
public Set<String> getKafkaActiverTopics(String clusterAlias, String group) {
JSONArray consumerGroups = getKafkaMetadata(parseBrokerServer(clusterAlias), group);
Set<String> topics = new HashSet<>();
for (Object object : consumerGroups) {
JSONObject consumerGroup = (JSONObject) object;
for (Object topicObject : consumerGroup.getJSONArray("topicSub")) {
JSONObject topic = (JSONObject) topicObject;
if (consumerGroup.getString("owner") != "" || consumerGroup.getString("owner") != null) {
topics.add(topic.getString("topic"));
}
}
}
return topics;
}
/** Get kafka 0.10.x consumer metadata. */
public String getKafkaConsumer(String clusterAlias) {
Properties prop = new Properties();
JSONArray consumerGroups = new JSONArray();
prop.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, parseBrokerServer(clusterAlias));
try {
AdminClient adminClient = AdminClient.create(prop);
scala.collection.immutable.Map<Node, scala.collection.immutable.List<GroupOverview>> opts = adminClient.listAllConsumerGroups();
Iterator<Tuple2<Node, scala.collection.immutable.List<GroupOverview>>> groupOverview = opts.iterator();
while (groupOverview.hasNext()) {
Tuple2<Node, scala.collection.immutable.List<GroupOverview>> tuple = groupOverview.next();
String node = tuple._1.host() + ":" + tuple._1.port();
Iterator<GroupOverview> groups = tuple._2.iterator();
while (groups.hasNext()) {
GroupOverview group = groups.next();
JSONObject consumerGroup = new JSONObject();
String groupId = group.groupId();
if (!groupId.contains("kafka.eagle")) {
consumerGroup.put("group", groupId);
consumerGroup.put("node", node);
consumerGroup.put("meta", getKafkaMetadata(parseBrokerServer(clusterAlias), groupId));
consumerGroups.add(consumerGroup);
}
}
}
adminClient.close();
} catch (Exception e) {
LOG.error("Get kafka consumer has error,msg is " + e.getMessage());
}
return consumerGroups.toJSONString();
}
/** Get kafka 0.10.x consumer metadata. */
private JSONArray getKafkaMetadata(String bootstrapServers, String group) {
Properties prop = new Properties();
prop.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
JSONArray consumerGroups = new JSONArray();
try {
AdminClient adminClient = AdminClient.create(prop);
ConsumerGroupSummary cgs = adminClient.describeConsumerGroup(group);
Option<scala.collection.immutable.List<ConsumerSummary>> opts = cgs.consumers();
Iterator<ConsumerSummary> consumerSummarys = opts.get().iterator();
while (consumerSummarys.hasNext()) {
ConsumerSummary consumerSummary = consumerSummarys.next();
Iterator<TopicPartition> topics = consumerSummary.assignment().iterator();
JSONObject topicSub = new JSONObject();
JSONArray topicSubs = new JSONArray();
while (topics.hasNext()) {
JSONObject object = new JSONObject();
TopicPartition topic = topics.next();
object.put("topic", topic.topic());
object.put("partition", topic.partition());
topicSubs.add(object);
}
topicSub.put("owner", consumerSummary.consumerId());
topicSub.put("node", consumerSummary.host().replaceAll("/", ""));
topicSub.put("topicSub", topicSubs);
consumerGroups.add(topicSub);
}
adminClient.close();
} catch (Exception e) {
LOG.error("Get kafka consumer metadata has error, msg is " + e.getMessage());
}
return consumerGroups;
}
/** Get kafka 0.10.x consumer pages. */
public String getKafkaActiverSize(String clusterAlias, String group) {
JSONArray consumerGroups = getKafkaMetadata(parseBrokerServer(clusterAlias), group);
int activerCounter = 0;
Set<String> topics = new HashSet<>();
for (Object object : consumerGroups) {
JSONObject consumerGroup = (JSONObject) object;
if (consumerGroup.getString("owner") != "" || consumerGroup.getString("owner") != null) {
activerCounter++;
}
for (Object topicObject : consumerGroup.getJSONArray("topicSub")) {
JSONObject topic = (JSONObject) topicObject;
topics.add(topic.getString("topic"));
}
}
JSONObject activerAndTopics = new JSONObject();
activerAndTopics.put("activers", activerCounter);
activerAndTopics.put("topics", topics.size());
return activerAndTopics.toJSONString();
}
/** Get kafka 0.10.x consumer groups. */
public int getKafkaConsumerGroups(String clusterAlias) {
Properties prop = new Properties();
int counter = 0;
prop.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, parseBrokerServer(clusterAlias));
try {
AdminClient adminClient = AdminClient.create(prop);
scala.collection.immutable.Map<Node, scala.collection.immutable.List<GroupOverview>> opts = adminClient.listAllConsumerGroups();
Iterator<Tuple2<Node, scala.collection.immutable.List<GroupOverview>>> groupOverview = opts.iterator();
while (groupOverview.hasNext()) {
Tuple2<Node, scala.collection.immutable.List<GroupOverview>> tuple = groupOverview.next();
Iterator<GroupOverview> groups = tuple._2.iterator();
while (groups.hasNext()) {
GroupOverview group = groups.next();
String groupId = group.groupId();
if (!groupId.contains("kafka.eagle")) {
counter++;
}
}
}
adminClient.close();
} catch (Exception e) {
e.printStackTrace();
}
return counter;
}
/** Get kafka 0.10.x consumer topic information. */
public Set<String> getKafkaConsumerTopic(String clusterAlias, String group) {
JSONArray consumerGroups = getKafkaMetadata(parseBrokerServer(clusterAlias), group);
Set<String> topics = new HashSet<>();
for (Object object : consumerGroups) {
JSONObject consumerGroup = (JSONObject) object;
for (Object topicObject : consumerGroup.getJSONArray("topicSub")) {
JSONObject topic = (JSONObject) topicObject;
topics.add(topic.getString("topic"));
}
}
return topics;
}
/** Get kafka 0.10.x consumer group and topic. */
public String getKafkaConsumerGroupTopic(String clusterAlias, String group) {
return getKafkaMetadata(parseBrokerServer(clusterAlias), group).toJSONString();
}
}
| |
package org.jenkinsci.plugins.p4.client;
import com.perforce.p4java.exception.AccessException;
import com.perforce.p4java.exception.ConnectionException;
import com.perforce.p4java.exception.P4JavaException;
import com.perforce.p4java.exception.RequestException;
import com.perforce.p4java.impl.mapbased.server.Server;
import com.perforce.p4java.impl.mapbased.server.cmd.ResultMapParser;
import com.perforce.p4java.server.CmdSpec;
import com.perforce.p4java.server.IOptionsServer;
import com.perforce.p4java.server.callback.ICommandCallback;
import com.perforce.p4java.server.callback.IProgressCallback;
import hudson.AbortException;
import hudson.model.Descriptor;
import hudson.model.TaskListener;
import jenkins.model.Jenkins;
import org.jenkinsci.plugins.p4.PerforceScm;
import org.jenkinsci.plugins.p4.console.P4Logging;
import org.jenkinsci.plugins.p4.console.P4Progress;
import org.jenkinsci.plugins.p4.credentials.P4BaseCredentials;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
public class SessionHelper extends CredentialsHelper {
private static Logger logger = Logger.getLogger(SessionHelper.class.getName());
private final ConnectionConfig connectionConfig;
private final Validate validate;
private final String sessionId;
private final long sessionLife;
private final boolean sessionEnabled;
private IOptionsServer connection;
private boolean abort = false;
private static ConcurrentMap<String, SessionEntry> loginCache = new ConcurrentHashMap<>();
public SessionHelper(P4BaseCredentials credential, TaskListener listener) throws IOException {
super(credential, listener);
this.connectionConfig = new ConnectionConfig(getCredential());
this.sessionId = credential.getId();
this.sessionLife = credential.getSessionLife();
this.sessionEnabled = credential.isSessionEnabled();
connectionRetry();
validate = new Validate(listener);
}
public SessionHelper(String credentialID, TaskListener listener) throws IOException {
super(credentialID, listener);
this.connectionConfig = new ConnectionConfig(getCredential());
this.sessionId = credentialID;
this.sessionLife = getCredential().getSessionLife();
this.sessionEnabled = getCredential().isSessionEnabled();
connectionRetry();
validate = new Validate(listener);
}
public void invalidateSession() {
loginCache.remove(sessionId);
}
/**
* Remove all entries for a specific user.
*
* @param user p4user
*/
public static void invalidateSession(String user) {
for (Map.Entry<String, SessionEntry> entry : loginCache.entrySet()) {
if (entry.getValue().getUser().equals(user)) {
loginCache.remove(entry.getKey());
}
}
}
public IOptionsServer getConnection() {
return connection;
}
public Validate getValidate() {
return validate;
}
public String getTrust() throws Exception {
return connection.getTrust();
}
public String getTicket() {
try {
if (login()) {
if (sessionEnabled && loginCache.containsKey(sessionId)) {
SessionEntry entry = loginCache.get(sessionId);
return entry.getTicket();
}
return connection.getAuthTicket();
}
} catch (Exception e) {
log(e.getLocalizedMessage());
}
return null;
}
public boolean isConnected() {
if (connection == null) {
return false;
}
return connection.isConnected();
}
public boolean isUnicode() throws ConnectionException, AccessException, RequestException {
return connection.supportsUnicode();
}
/**
* Checks the Perforce server version number and returns true if greater
* than or equal to the min version. The value of min must be of the form
* 20092 or 20073 (corresponding to 2009.2 and 2007.3 respectively).
*
* @param min Minimum server version
* @return true if version supported.
*/
public boolean checkVersion(int min) {
int ver = connection.getServerVersionNumber();
return (ver >= min);
}
public boolean login() throws Exception {
connection.setUserName(getAuthorisationConfig().getUsername());
// CHARSET is not defined (only for client access)
if (isUnicode()) {
connection.setCharsetName("utf8");
}
switch (getAuthorisationConfig().getType()) {
case PASSWORD:
// Exit early if logged in
if (sessionEnabled && isLogin()) {
if (connection.getAuthTicket() == null) {
SessionEntry entry = loginCache.get(sessionId);
logger.info("Setting connection's ticket from cache: " + entry.getTicket());
connection.setAuthTicket(entry.getTicket());
}
return true;
}
String pass = getAuthorisationConfig().getPassword();
boolean allHosts = getAuthorisationConfig().isAllhosts();
connection.login(pass, allHosts);
break;
case TICKET:
String ticket = getAuthorisationConfig().getTicketValue();
connection.setAuthTicket(ticket);
break;
case TICKETPATH:
String path = getAuthorisationConfig().getTicketPath();
if (path == null || path.isEmpty()) {
path = connection.getTicketsFilePath();
}
connection.setTicketsFilePath(path);
break;
default:
throw new Exception("Unknown Authorisation type: " + getAuthorisationConfig().getType());
}
// return login status...
if (isLogin()) {
return true;
} else {
String status = connection.getLoginStatus();
logger.info("P4: login failed '" + status + "'");
return false;
}
}
public void logout() throws Exception {
if (isLogin()) {
connection.logout();
}
}
/**
* Disconnect from the Perforce Server.
*/
protected void disconnect() {
try {
getConnection().disconnect();
logger.fine("P4: closed connection OK");
} catch (Exception e) {
String err = "P4: Unable to close Perforce connection.";
logger.severe(err);
log(err);
}
}
public boolean hasAborted() {
return abort;
}
public void abort() {
this.abort = true;
}
public PerforceScm.DescriptorImpl getP4SCM() {
Jenkins j = Jenkins.getInstance();
if (j != null) {
Descriptor dsc = j.getDescriptor(PerforceScm.class);
if (dsc instanceof PerforceScm.DescriptorImpl) {
PerforceScm.DescriptorImpl p4scm = (PerforceScm.DescriptorImpl) dsc;
return p4scm;
}
}
return null;
}
/**
* Retry Connection with back off for each failed attempt.
*/
private void connectionRetry() throws AbortException {
int trys = 0;
int attempt = getRetry();
String err = "P4: Invalid credentials. Giving up...";
while (trys <= attempt) {
try {
if (connect()) {
return;
}
} catch (Exception e) {
err = e.getMessage();
}
trys++;
String msg = "P4: Connection retry: " + trys;
logger.severe(msg);
log(msg);
// back off n^2 seconds, before retry
try {
TimeUnit.SECONDS.sleep(trys ^ 2);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
logger.severe(err);
log(err);
throw new AbortException(err);
}
/**
* Convenience wrapper to connect and report errors
*/
private boolean connect() throws Exception {
// Connect to the Perforce server
this.connection = ConnectionFactory.getConnection(connectionConfig);
logger.fine("P4: opened connection OK");
// Login to Perforce
try {
login();
} catch (Exception e) {
String err = "P4: Unable to login: " + e;
logger.severe(err);
log(err);
return false;
}
// Register progress callback
IProgressCallback progress = new P4Progress(getListener(), this);
this.connection.registerProgressCallback(progress);
// Register logging callback
ICommandCallback logging = new P4Logging(getListener(), false);
this.connection.registerCallback(logging);
// Check P4IGNORE Environment
Server server = (Server) this.connection;
if (server.getIgnoreFileName() == null) {
String os = System.getProperty("os.name").toLowerCase();
String ignore = os.contains("win") ? "p4ignore.txt" : ".p4ignore";
server.setIgnoreFileName(ignore);
}
return true;
}
private boolean isLogin() throws Exception {
String user = connection.getUserName();
if (sessionEnabled && loginCache.containsKey(sessionId)) {
SessionEntry entry = loginCache.get(sessionId);
long expire = entry.getExpire();
long remain = expire - System.currentTimeMillis() - sessionLife;
if (remain > 0) {
logger.info("Found session entry for: " + sessionId + "(" + entry + ")");
return true;
} else {
logger.info("Removing session entry for: " + sessionId + "(" + entry + ")");
loginCache.remove(sessionId);
}
}
if (sessionEnabled) {
logger.info("No entry in session for: " + sessionId );
}
List<Map<String, Object>> resultMaps = connection.execMapCmdList(CmdSpec.LOGIN, new String[]{"-s"}, null);
String ticket = connection.getAuthTicket();
if (resultMaps != null && !resultMaps.isEmpty()) {
for (Map<String, Object> map : resultMaps) {
String status = ResultMapParser.getInfoStr(map);
if (status == null) {
continue;
}
if (status.contains("not necessary")) {
SessionEntry entry = new SessionEntry(user, ticket, Long.MAX_VALUE);
loginCache.put(sessionId, entry);
return true;
}
if (status.contains("ticket expires in")) {
SessionEntry entry = new SessionEntry(user, ticket, getExpiry(status));
loginCache.put(sessionId, entry);
return true;
}
// If there is a broker or something else that swallows the message
if (status.isEmpty()) {
return true;
}
}
}
return false;
}
private long getExpiry(String loginStatus) throws P4JavaException {
try {
String pattern = ".* expires in (\\d+) hours (\\d+) minutes.";
Pattern regex = Pattern.compile(pattern);
Matcher matcher = regex.matcher(loginStatus);
if (matcher.matches()) {
String hourStr = matcher.group(1);
String minStr = matcher.group(2);
int hours = Integer.parseInt(hourStr);
int minutes = Integer.parseInt(minStr);
long milli = ((hours * 60L * 60L) + (minutes * 60L)) * 1000L;
return System.currentTimeMillis() + milli;
}
throw new P4JavaException("Unable to parse expires time: " + loginStatus);
} catch (PatternSyntaxException | NumberFormatException e) {
throw new P4JavaException(e);
}
}
}
| |
package org.sunflow.core;
import org.sunflow.PluginRegistry;
import org.sunflow.image.Color;
import org.sunflow.math.Point3;
import org.sunflow.math.QMC;
import org.sunflow.math.Vector3;
import org.sunflow.system.Timer;
import org.sunflow.system.UI;
import org.sunflow.system.UI.Module;
class LightServer {
// parent
private final Scene scene;
// lighting
LightSource[] lights;
// shading override
private Shader shaderOverride;
private boolean shaderOverridePhotons;
// direct illumination
private int maxDiffuseDepth;
private int maxReflectionDepth;
private int maxRefractionDepth;
// indirect illumination
private CausticPhotonMapInterface causticPhotonMap;
private GIEngine giEngine;
private int photonCounter;
LightServer(Scene scene) {
this.scene = scene;
lights = new LightSource[0];
causticPhotonMap = null;
shaderOverride = null;
shaderOverridePhotons = false;
maxDiffuseDepth = 1;
maxReflectionDepth = 4;
maxRefractionDepth = 4;
causticPhotonMap = null;
giEngine = null;
}
void setLights(LightSource[] lights) {
this.lights = lights;
}
Scene getScene() {
return scene;
}
void setShaderOverride(Shader shader, boolean photonOverride) {
shaderOverride = shader;
shaderOverridePhotons = photonOverride;
}
boolean build(Options options) {
// read options
maxDiffuseDepth = options.getInt("depths.diffuse", maxDiffuseDepth);
maxReflectionDepth = options.getInt("depths.reflection", maxReflectionDepth);
maxRefractionDepth = options.getInt("depths.refraction", maxRefractionDepth);
String giEngineType = options.getString("gi.engine", null);
giEngine = PluginRegistry.GI_ENGINE_PLUGINS.createObject(giEngineType);
String caustics = options.getString("caustics", null);
causticPhotonMap = PluginRegistry.CAUSTIC_PHOTON_MAP_PLUGINS.createObject(caustics);
// validate options
maxDiffuseDepth = Math.max(0, maxDiffuseDepth);
maxReflectionDepth = Math.max(0, maxReflectionDepth);
maxRefractionDepth = Math.max(0, maxRefractionDepth);
Timer t = new Timer();
t.start();
// count total number of light samples
int numLightSamples = 0;
for (LightSource light : lights) {
numLightSamples += light.getNumSamples();
}
// initialize gi engine
if (giEngine != null) {
if (!giEngine.init(options, scene)) {
return false;
}
}
if (!calculatePhotons(causticPhotonMap, "caustic", 0, options)) {
return false;
}
t.end();
UI.printInfo(Module.LIGHT, "Light Server stats:");
UI.printInfo(Module.LIGHT, " * Light sources found: %d", lights.length);
UI.printInfo(Module.LIGHT, " * Light samples: %d", numLightSamples);
UI.printInfo(Module.LIGHT, " * Max raytrace depth:");
UI.printInfo(Module.LIGHT, " - Diffuse %d", maxDiffuseDepth);
UI.printInfo(Module.LIGHT, " - Reflection %d", maxReflectionDepth);
UI.printInfo(Module.LIGHT, " - Refraction %d", maxRefractionDepth);
UI.printInfo(Module.LIGHT, " * GI engine %s", giEngineType == null ? "none" : giEngineType);
UI.printInfo(Module.LIGHT, " * Caustics: %s", caustics == null ? "none" : caustics);
UI.printInfo(Module.LIGHT, " * Shader override: %b", shaderOverride);
UI.printInfo(Module.LIGHT, " * Photon override: %b", shaderOverridePhotons);
UI.printInfo(Module.LIGHT, " * Build time: %s", t.toString());
return true;
}
void showStats() {
}
boolean calculatePhotons(final PhotonStore map, String type, final int seed, Options options) {
if (map == null) {
return true;
}
if (lights.length == 0) {
UI.printError(Module.LIGHT, "Unable to trace %s photons, no lights in scene", type);
return false;
}
final float[] histogram = new float[lights.length];
histogram[0] = lights[0].getPower();
for (int i = 1; i < lights.length; i++) {
histogram[i] = histogram[i - 1] + lights[i].getPower();
}
UI.printInfo(Module.LIGHT, "Tracing %s photons ...", type);
map.prepare(options, scene.getBounds());
int numEmittedPhotons = map.numEmit();
if (numEmittedPhotons <= 0 || histogram[histogram.length - 1] <= 0) {
UI.printError(Module.LIGHT, "Photon mapping enabled, but no %s photons to emit", type);
return false;
}
UI.taskStart("Tracing " + type + " photons", 0, numEmittedPhotons);
Thread[] photonThreads = new Thread[scene.getThreads()];
final float scale = 1.0f / numEmittedPhotons;
int delta = numEmittedPhotons / photonThreads.length;
photonCounter = 0;
Timer photonTimer = new Timer();
photonTimer.start();
for (int i = 0; i < photonThreads.length; i++) {
final int threadID = i;
final int start = threadID * delta;
final int end = (threadID == (photonThreads.length - 1)) ? numEmittedPhotons : (threadID + 1) * delta;
photonThreads[i] = new Thread(() -> {
IntersectionState istate = new IntersectionState();
for (int i1 = start; i1 < end; i1++) {
synchronized (LightServer.this) {
UI.taskUpdate(photonCounter);
photonCounter++;
if (UI.taskCanceled()) {
return;
}
}
int qmcI = i1 + seed;
double rand = QMC.halton(0, qmcI) * histogram[histogram.length - 1];
int j = 0;
while (rand >= histogram[j] && j < histogram.length) {
j++;
}
// make sure we didn't pick a zero-probability light
if (j == histogram.length) {
continue;
}
double randX1 = (j == 0) ? rand / histogram[0] : (rand - histogram[j]) / (histogram[j] - histogram[j - 1]);
double randY1 = QMC.halton(1, qmcI);
double randX2 = QMC.halton(2, qmcI);
double randY2 = QMC.halton(3, qmcI);
Point3 pt = new Point3();
Vector3 dir = new Vector3();
Color power = new Color();
lights[j].getPhoton(randX1, randY1, randX2, randY2, pt, dir, power);
power.mul(scale);
Ray r = new Ray(pt, dir);
scene.trace(r, istate);
if (istate.hit()) {
shadePhoton(ShadingState.createPhotonState(r, istate, qmcI, map, LightServer.this), power);
}
}
});
photonThreads[i].setPriority(scene.getThreadPriority());
photonThreads[i].start();
}
for (int i = 0; i < photonThreads.length; i++) {
try {
photonThreads[i].join();
} catch (InterruptedException e) {
UI.printError(Module.LIGHT, "Photon thread %d of %d was interrupted", i + 1, photonThreads.length);
return false;
}
}
if (UI.taskCanceled()) {
UI.taskStop(); // shut down task cleanly
return false;
}
photonTimer.end();
UI.taskStop();
UI.printInfo(Module.LIGHT, "Tracing time for %s photons: %s", type, photonTimer.toString());
map.init();
return true;
}
void shadePhoton(ShadingState state, Color power) {
state.getInstance().prepareShadingState(state);
Shader shader = getPhotonShader(state);
// scatter photon
if (shader != null) {
shader.scatterPhoton(state, power);
}
}
void traceDiffusePhoton(ShadingState previous, Ray r, Color power) {
if (previous.getDiffuseDepth() >= maxDiffuseDepth) {
return;
}
IntersectionState istate = previous.getIntersectionState();
scene.trace(r, istate);
if (previous.getIntersectionState().hit()) {
// create a new shading context
ShadingState state = ShadingState.createDiffuseBounceState(previous, r, 0);
shadePhoton(state, power);
}
}
void traceReflectionPhoton(ShadingState previous, Ray r, Color power) {
if (previous.getReflectionDepth() >= maxReflectionDepth) {
return;
}
IntersectionState istate = previous.getIntersectionState();
scene.trace(r, istate);
if (previous.getIntersectionState().hit()) {
// create a new shading context
ShadingState state = ShadingState.createReflectionBounceState(previous, r, 0);
shadePhoton(state, power);
}
}
void traceRefractionPhoton(ShadingState previous, Ray r, Color power) {
if (previous.getRefractionDepth() >= maxRefractionDepth) {
return;
}
IntersectionState istate = previous.getIntersectionState();
scene.trace(r, istate);
if (previous.getIntersectionState().hit()) {
// create a new shading context
ShadingState state = ShadingState.createRefractionBounceState(previous, r, 0);
shadePhoton(state, power);
}
}
private Shader getShader(ShadingState state) {
return shaderOverride != null ? shaderOverride : state.getShader();
}
private Shader getPhotonShader(ShadingState state) {
return (shaderOverride != null && shaderOverridePhotons) ? shaderOverride : state.getShader();
}
ShadingState getRadiance(float rx, float ry, float time, int i, int d, Ray r, IntersectionState istate, ShadingCache cache) {
// set this value once - will stay constant for the entire ray-tree
istate.time = time;
scene.trace(r, istate);
if (istate.hit()) {
ShadingState state = ShadingState.createState(istate, rx, ry, time, r, i, d, this);
state.getInstance().prepareShadingState(state);
Shader shader = getShader(state);
if (shader == null) {
state.setResult(Color.BLACK);
return state;
}
if (cache != null) {
Color c = cache.lookup(state, shader);
if (c != null) {
state.setResult(c);
return state;
}
}
state.setResult(shader.getRadiance(state));
if (cache != null) {
cache.add(state, shader, state.getResult());
}
checkNanInf(state.getResult());
return state;
} else {
return null;
}
}
private static void checkNanInf(Color c) {
if (c.isNan()) {
UI.printWarning(Module.LIGHT, "NaN shading sample!");
} else if (c.isInf()) {
UI.printWarning(Module.LIGHT, "Inf shading sample!");
}
}
void shadeBakeResult(ShadingState state) {
Shader shader = getShader(state);
if (shader != null) {
state.setResult(shader.getRadiance(state));
} else {
state.setResult(Color.BLACK);
}
}
Color shadeHit(ShadingState state) {
state.getInstance().prepareShadingState(state);
Shader shader = getShader(state);
return (shader != null) ? shader.getRadiance(state) : Color.BLACK;
}
Color traceGlossy(ShadingState previous, Ray r, int i) {
// limit path depth and disable caustic paths
if (previous.getReflectionDepth() >= maxReflectionDepth || previous.getDiffuseDepth() > 0) {
return Color.BLACK;
}
IntersectionState istate = previous.getIntersectionState();
istate.numGlossyRays++;
scene.trace(r, istate);
return istate.hit() ? shadeHit(ShadingState.createGlossyBounceState(previous, r, i)) : Color.BLACK;
}
Color traceReflection(ShadingState previous, Ray r, int i) {
// limit path depth and disable caustic paths
if (previous.getReflectionDepth() >= maxReflectionDepth || previous.getDiffuseDepth() > 0) {
return Color.BLACK;
}
IntersectionState istate = previous.getIntersectionState();
istate.numReflectionRays++;
scene.trace(r, istate);
return istate.hit() ? shadeHit(ShadingState.createReflectionBounceState(previous, r, i)) : Color.BLACK;
}
Color traceRefraction(ShadingState previous, Ray r, int i) {
// limit path depth and disable caustic paths
if (previous.getRefractionDepth() >= maxRefractionDepth || previous.getDiffuseDepth() > 0) {
return Color.BLACK;
}
IntersectionState istate = previous.getIntersectionState();
istate.numRefractionRays++;
scene.trace(r, istate);
return istate.hit() ? shadeHit(ShadingState.createRefractionBounceState(previous, r, i)) : Color.BLACK;
}
ShadingState traceFinalGather(ShadingState previous, Ray r, int i) {
if (previous.getDiffuseDepth() >= maxDiffuseDepth) {
return null;
}
IntersectionState istate = previous.getIntersectionState();
scene.trace(r, istate);
return istate.hit() ? ShadingState.createFinalGatherState(previous, r, i) : null;
}
Color getGlobalRadiance(ShadingState state) {
if (giEngine == null) {
return Color.BLACK;
}
return giEngine.getGlobalRadiance(state);
}
Color getIrradiance(ShadingState state, Color diffuseReflectance) {
// no gi engine, or we have already exceeded number of available bounces
if (giEngine == null || state.getDiffuseDepth() >= maxDiffuseDepth) {
return Color.BLACK;
}
return giEngine.getIrradiance(state, diffuseReflectance);
}
void initLightSamples(ShadingState state) {
for (LightSource l : lights) {
l.getSamples(state);
}
}
void initCausticSamples(ShadingState state) {
if (causticPhotonMap != null) {
causticPhotonMap.getSamples(state);
}
}
}
| |
/*
* Copyright 2012 Evernote Corporation
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.evernote.client.android;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.Dialog;
import android.app.ProgressDialog;
import android.content.Intent;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.text.TextUtils;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.Window;
import android.webkit.WebChromeClient;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.Toast;
import com.evernote.androidsdk.R;
import com.evernote.client.oauth.EvernoteAuthToken;
import com.evernote.client.oauth.YinxiangApi;
import com.evernote.edam.userstore.BootstrapInfo;
import com.evernote.edam.userstore.BootstrapProfile;
import org.scribe.builder.ServiceBuilder;
import org.scribe.builder.api.EvernoteApi;
import org.scribe.model.Token;
import org.scribe.model.Verifier;
import org.scribe.oauth.OAuthService;
import java.util.ArrayList;
/**
* An Android Activity for authenticating to Evernote using OAuth. Third parties
* should not need to use this class directly.
*
*
* class created by @tylersmithnet
*/
public class EvernoteOAuthActivity extends ActionBarActivity {
private static final String LOGTAG = "EvernoteOAuthActivity";
static final String EXTRA_EVERNOTE_SERVICE = "EVERNOTE_HOST";
static final String EXTRA_CONSUMER_KEY = "CONSUMER_KEY";
static final String EXTRA_CONSUMER_SECRET = "CONSUMER_SECRET";
static final String EXTRA_REQUEST_TOKEN = "REQUEST_TOKEN";
static final String EXTRA_REQUEST_TOKEN_SECRET = "REQUEST_TOKEN_SECRET";
static final String EXTRA_BOOTSTRAP_SELECTED_PROFILE_POS = "BOOTSTRAP_SELECTED_PROFILE_POS";
static final String EXTRA_BOOTSTRAP_SELECTED_PROFILE = "BOOTSTRAP_SELECTED_PROFILE";
static final String EXTRA_BOOTSTRAP_SELECTED_PROFILES = "BOOTSTRAP_SELECTED_PROFILES";
private EvernoteSession.EvernoteService mEvernoteService = null;
private BootstrapProfile mSelectedBootstrapProfile;
private int mSelectedBootstrapProfilePos = 0;
private ArrayList<BootstrapProfile> mBootstrapProfiles = new ArrayList<BootstrapProfile>();
private String mConsumerKey = null;
private String mConsumerSecret = null;
private String mRequestToken = null;
private String mRequestTokenSecret = null;
private final int DIALOG_PROGRESS = 101;
private Activity mActivity;
private WebView mWebView;
private AsyncTask mBeginAuthSyncTask = null;
private AsyncTask mCompleteAuthSyncTask = null;
private Menu menu;
/**
* Overrides the callback URL and authenticate
*/
private WebViewClient mWebViewClient = new WebViewClient() {
@Override
public boolean shouldOverrideUrlLoading(WebView view, String url) {
Uri uri = Uri.parse(url);
if (uri.getScheme().equals(getCallbackScheme())) {
if (mCompleteAuthSyncTask == null) {
mCompleteAuthSyncTask = new CompleteAuthAsyncTask()
.execute(uri);
}
return true;
}
return super.shouldOverrideUrlLoading(view, url);
}
};
/**
* Allows for showing progress
*/
private WebChromeClient mWebChromeClient = new WebChromeClient() {
@Override
public void onProgressChanged(WebView view, int newProgress) {
super.onProgressChanged(view, newProgress);
mActivity.setProgress(newProgress * 1000);
}
};
@SuppressLint("SetJavaScriptEnabled")
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Show web loading progress
getWindow().requestFeature(Window.FEATURE_PROGRESS);
setContentView(R.layout.esdk__webview);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayUseLogoEnabled(false);
getSupportActionBar().setDisplayShowTitleEnabled(true);
mActivity = this;
mWebView = (WebView) findViewById(R.id.esdk__webview);
mWebView.setWebViewClient(mWebViewClient);
mWebView.setWebChromeClient(mWebChromeClient);
mWebView.getSettings().setJavaScriptEnabled(true);
if (savedInstanceState != null) {
mEvernoteService = savedInstanceState
.getParcelable(EXTRA_EVERNOTE_SERVICE);
mConsumerKey = savedInstanceState.getString(EXTRA_CONSUMER_KEY);
mConsumerSecret = savedInstanceState
.getString(EXTRA_CONSUMER_SECRET);
mRequestToken = savedInstanceState.getString(EXTRA_REQUEST_TOKEN);
mRequestTokenSecret = savedInstanceState
.getString(EXTRA_REQUEST_TOKEN_SECRET);
mSelectedBootstrapProfile = (BootstrapProfile) savedInstanceState
.getSerializable(EXTRA_BOOTSTRAP_SELECTED_PROFILE);
mSelectedBootstrapProfilePos = savedInstanceState
.getInt(EXTRA_BOOTSTRAP_SELECTED_PROFILE_POS);
mBootstrapProfiles = (ArrayList<BootstrapProfile>) savedInstanceState
.getSerializable(EXTRA_BOOTSTRAP_SELECTED_PROFILES);
mWebView.restoreState(savedInstanceState);
} else {
Intent intent = getIntent();
mEvernoteService = intent
.getParcelableExtra(EXTRA_EVERNOTE_SERVICE);
mConsumerKey = intent.getStringExtra(EXTRA_CONSUMER_KEY);
mConsumerSecret = intent.getStringExtra(EXTRA_CONSUMER_SECRET);
}
}
@Override
protected void onResume() {
super.onResume();
if (TextUtils.isEmpty(mConsumerKey)
|| TextUtils.isEmpty(mConsumerSecret)) {
exit(false);
return;
}
if (mSelectedBootstrapProfile == null) {
mBeginAuthSyncTask = new BootstrapAsyncTask().execute();
}
}
/**
* Not needed because of conficChanges, but leaving in case developer does
* not add to manifest
*
* @param outState
*/
@Override
protected void onSaveInstanceState(Bundle outState) {
outState.putParcelable(EXTRA_EVERNOTE_SERVICE, mEvernoteService);
outState.putString(EXTRA_CONSUMER_KEY, mConsumerKey);
outState.putString(EXTRA_CONSUMER_SECRET, mConsumerSecret);
outState.putString(EXTRA_REQUEST_TOKEN, mRequestToken);
outState.putString(EXTRA_REQUEST_TOKEN_SECRET, mRequestTokenSecret);
outState.putSerializable(EXTRA_BOOTSTRAP_SELECTED_PROFILE,
mSelectedBootstrapProfile);
outState.putInt(EXTRA_BOOTSTRAP_SELECTED_PROFILE_POS,
mSelectedBootstrapProfilePos);
outState.putSerializable(EXTRA_BOOTSTRAP_SELECTED_PROFILES,
mBootstrapProfiles);
mWebView.saveState(outState);
super.onSaveInstanceState(outState);
}
@Override
protected Dialog onCreateDialog(int id) {
switch (id) {
case DIALOG_PROGRESS:
return new ProgressDialog(EvernoteOAuthActivity.this);
}
// TODO onCreateDialog(int) is deprecated
return super.onCreateDialog(id);
}
@Override
protected void onPrepareDialog(int id, Dialog dialog) {
switch (id) {
case DIALOG_PROGRESS:
((ProgressDialog) dialog).setIndeterminate(true);
dialog.setCancelable(false);
((ProgressDialog) dialog)
.setMessage(getString(R.string.esdk__loading));
}
}
/**
* Specifies a URL scheme that uniquely identifies callbacks to this
* application after a user authorizes access to their Evernote account in
* our WebView.
*/
private String getCallbackScheme() {
return "en-oauth";
}
/**
* Create a Scribe OAuthService object that can be used to perform OAuth
* authentication with the appropriate Evernote service.
*/
@SuppressWarnings("unchecked")
private OAuthService createService() {
OAuthService builder = null;
@SuppressWarnings("rawtypes")
Class apiClass = null;
String host = mSelectedBootstrapProfile.getSettings().getServiceHost();
if (host != null && !host.startsWith("http")) {
host = "https://" + host;
}
if (host.equals(EvernoteSession.HOST_SANDBOX)) {
apiClass = EvernoteApi.Sandbox.class;
} else if (host.equals(EvernoteSession.HOST_PRODUCTION)) {
apiClass = EvernoteApi.class;
} else if (host.equals(EvernoteSession.HOST_CHINA)) {
apiClass = YinxiangApi.class;
} else {
throw new IllegalArgumentException("Unsupported Evernote host: "
+ host);
}
builder = new ServiceBuilder().provider(apiClass).apiKey(mConsumerKey)
.apiSecret(mConsumerSecret)
.callback(getCallbackScheme() + "://callback").build();
return builder;
}
/**
* Exit the activity and display a toast message.
*
* @param success
* Whether the OAuth process completed successfully.
*/
private void exit(final boolean success) {
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(
mActivity,
success ? R.string.esdk__evernote_login_successful
: R.string.esdk__evernote_login_failed,
Toast.LENGTH_LONG).show();
setResult(success ? RESULT_OK : RESULT_CANCELED);
finish();
}
});
}
/**
* On honeycomb and above this will create an actionbar with the item to
* switch services Below honeycomb it will create the options menu bound to
* a hardware key
*
* @param menu
* @return
*/
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.esdk__oauth, menu);
this.menu = menu;
return true;
}
/**
* On Honeycomb and above this is called when we invalidate, this happens
* when the {@link ArrayList} of {@link BootstrapProfile} are updated.
*
* Below Honeycomb this is called when the user presses the menu button.
*
* This detects the number of bootstrap items and sets the UI element
* appropriately.
*
* @param menu
* @return
*/
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
MenuItem itemSwitchService = menu.findItem(R.id.esdk__switch_service);
if (mBootstrapProfiles != null && mBootstrapProfiles.size() > 1) {
itemSwitchService.setVisible(true);
if (BootstrapManager.CHINA_PROFILE.equals(mSelectedBootstrapProfile
.getName())) {
itemSwitchService
.setTitle(BootstrapManager.DISPLAY_EVERNOTE_INTL);
} else {
itemSwitchService.setTitle(BootstrapManager.DISPLAY_YXBIJI);
}
} else {
itemSwitchService.setVisible(true);
}
return true;
}
/**
* This will select the next {@link BootstrapProfile} in
* {@link #mBootstrapProfiles} and start a new webview load request.
*
* @param item
* @return
*/
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == R.id.esdk__switch_service) {
if ((mBeginAuthSyncTask == null || mBeginAuthSyncTask.getStatus() != AsyncTask.Status.RUNNING)
&& (mSelectedBootstrapProfile != null && mBootstrapProfiles != null)) {
mSelectedBootstrapProfilePos = (mSelectedBootstrapProfilePos + 1)
% mBootstrapProfiles.size();
if (BootstrapManager.CHINA_PROFILE
.equals(mSelectedBootstrapProfile.getName())) {
item.setTitle(BootstrapManager.DISPLAY_YXBIJI);
} else {
item.setTitle(BootstrapManager.DISPLAY_EVERNOTE_INTL);
}
mBootstrapProfiles = null;
mSelectedBootstrapProfile = null;
mBeginAuthSyncTask = new BootstrapAsyncTask().execute();
}
} else if (item.getItemId() == android.R.id.home) {
exit(false);
}
return true;
}
/**
* Get a request token from the Evernote service and send the user to our
* WebView to authorize access.
*/
private class BootstrapAsyncTask extends AsyncTask<Void, Void, String> {
@Override
protected void onPreExecute() {
// TODO deprecated
showDialog(DIALOG_PROGRESS);
}
@Override
protected String doInBackground(Void... params) {
String url = null;
try {
EvernoteSession session = EvernoteSession.getOpenSession();
if (session != null) {
// Network request
BootstrapManager.BootstrapInfoWrapper infoWrapper = session
.getBootstrapSession().getBootstrapInfo();
if (infoWrapper != null) {
BootstrapInfo info = infoWrapper.getBootstrapInfo();
if (info != null) {
mBootstrapProfiles = (ArrayList<BootstrapProfile>) info
.getProfiles();
if (mBootstrapProfiles != null
&& mBootstrapProfiles.size() > 0
&& mSelectedBootstrapProfilePos < mBootstrapProfiles
.size()) {
mSelectedBootstrapProfile = mBootstrapProfiles
.get(mSelectedBootstrapProfilePos);
}
}
}
}
if (mSelectedBootstrapProfile == null
|| TextUtils.isEmpty(mSelectedBootstrapProfile
.getSettings().getServiceHost())) {
Log.d(LOGTAG, "Bootstrap did not return a valid host");
return null;
}
OAuthService service = createService();
Log.i(LOGTAG, "Retrieving OAuth request token...");
Token reqToken = service.getRequestToken();
mRequestToken = reqToken.getToken();
mRequestTokenSecret = reqToken.getSecret();
Log.i(LOGTAG, "Redirecting user for authorization...");
url = service.getAuthorizationUrl(reqToken);
} catch (BootstrapManager.ClientUnsupportedException cue) {
return null;
} catch (Exception ex) {
Log.e(LOGTAG, "Failed to obtain OAuth request token", ex);
}
return url;
}
/**
* Open a WebView to allow the user to authorize access to their
* account.
*
* @param url
* The URL of the OAuth authorization web page.
*/
@SuppressLint("NewApi")
@Override
protected void onPostExecute(String url) {
// TODO deprecated
removeDialog(DIALOG_PROGRESS);
if (!TextUtils.isEmpty(url)) {
mWebView.loadUrl(url);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
invalidateOptionsMenu();
}
onPrepareOptionsMenu(menu);
} else {
exit(false);
}
}
}
/**
* An AsyncTask to complete the OAuth process after successful user
* authorization.
*/
private class CompleteAuthAsyncTask extends
AsyncTask<Uri, Void, EvernoteAuthToken> {
@Override
protected void onPreExecute() {
// TODO deprecated
showDialog(DIALOG_PROGRESS);
}
@Override
protected EvernoteAuthToken doInBackground(Uri... uris) {
EvernoteAuthToken authToken = null;
if (uris == null || uris.length == 0) {
return null;
}
Uri uri = uris[0];
if (!TextUtils.isEmpty(mRequestToken)) {
OAuthService service = createService();
String verifierString = uri.getQueryParameter("oauth_verifier");
if (TextUtils.isEmpty(verifierString)) {
Log.i(LOGTAG, "User did not authorize access");
} else {
Verifier verifier = new Verifier(verifierString);
Log.i(LOGTAG, "Retrieving OAuth access token...");
try {
Token reqToken = new Token(mRequestToken,
mRequestTokenSecret);
authToken = new EvernoteAuthToken(
service.getAccessToken(reqToken, verifier));
} catch (Exception ex) {
Log.e(LOGTAG, "Failed to obtain OAuth access token", ex);
}
}
} else {
Log.d(LOGTAG,
"Unable to retrieve OAuth access token, no request token");
}
return authToken;
}
/**
* Save the authentication information resulting from a successful OAuth
* authorization and complete the activity.
*/
@Override
protected void onPostExecute(EvernoteAuthToken authToken) {
// TODO deprecated
removeDialog(DIALOG_PROGRESS);
if (EvernoteSession.getOpenSession() == null) {
exit(false);
return;
}
exit(EvernoteSession.getOpenSession().persistAuthenticationToken(
getApplicationContext(), authToken,
mSelectedBootstrapProfile.getSettings().getServiceHost()));
}
}
}
| |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.compositor.layouts;
import android.content.Context;
import android.graphics.Bitmap;
import android.text.TextUtils;
import android.view.ViewGroup;
import org.chromium.base.ObserverList;
import org.chromium.base.VisibleForTesting;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.compositor.TitleCache;
import org.chromium.chrome.browser.compositor.layouts.components.LayoutTab;
import org.chromium.chrome.browser.compositor.layouts.components.VirtualView;
import org.chromium.chrome.browser.compositor.layouts.content.TabContentManager;
import org.chromium.chrome.browser.compositor.layouts.content.TitleBitmapFactory;
import org.chromium.chrome.browser.compositor.layouts.eventfilter.BlackHoleEventFilter;
import org.chromium.chrome.browser.compositor.layouts.eventfilter.EdgeSwipeEventFilter.ScrollDirection;
import org.chromium.chrome.browser.compositor.layouts.eventfilter.EdgeSwipeHandler;
import org.chromium.chrome.browser.compositor.layouts.eventfilter.EventFilter;
import org.chromium.chrome.browser.compositor.layouts.eventfilter.GestureEventFilter;
import org.chromium.chrome.browser.compositor.overlays.SceneOverlay;
import org.chromium.chrome.browser.compositor.overlays.strip.StripLayoutHelperManager;
import org.chromium.chrome.browser.contextualsearch.ContextualSearchManagementDelegate;
import org.chromium.chrome.browser.device.DeviceClassManager;
import org.chromium.chrome.browser.fullscreen.FullscreenManager;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.chrome.browser.tabmodel.EmptyTabModelObserver;
import org.chromium.chrome.browser.tabmodel.EmptyTabModelSelectorObserver;
import org.chromium.chrome.browser.tabmodel.TabCreatorManager;
import org.chromium.chrome.browser.tabmodel.TabModel;
import org.chromium.chrome.browser.tabmodel.TabModel.TabLaunchType;
import org.chromium.chrome.browser.tabmodel.TabModel.TabSelectionType;
import org.chromium.chrome.browser.tabmodel.TabModelObserver;
import org.chromium.chrome.browser.tabmodel.TabModelSelector;
import org.chromium.chrome.browser.tabmodel.TabModelSelector.CloseAllTabsDelegate;
import org.chromium.chrome.browser.tabmodel.TabModelSelectorObserver;
import org.chromium.chrome.browser.tabmodel.TabModelSelectorTabObserver;
import org.chromium.chrome.browser.tabmodel.TabModelUtils;
import org.chromium.chrome.browser.util.FeatureUtilities;
import org.chromium.chrome.browser.widget.OverviewListLayout;
import org.chromium.ui.base.LocalizationUtils;
import org.chromium.ui.resources.dynamics.DynamicResourceLoader;
import java.util.List;
/**
* A {@link Layout} controller for the more complicated Chrome browser. This is currently a
* superset of {@link LayoutManagerDocument}.
*/
public class LayoutManagerChrome
extends LayoutManagerDocument implements OverviewModeBehavior, CloseAllTabsDelegate {
// Layouts
/** An {@link Layout} that should be used as the accessibility tab switcher. */
protected OverviewListLayout mOverviewListLayout;
/** A {@link Layout} that should be used when the user is swiping sideways on the toolbar. */
protected ToolbarSwipeLayout mToolbarSwipeLayout;
/** A {@link Layout} that should be used when the user is in the tab switcher. */
protected Layout mOverviewLayout;
// Event Filters
/** A {@link EventFilter} that consumes all touch events. */
protected EventFilter mBlackHoleEventFilter;
private final GestureEventFilter mGestureEventFilter;
// Event Filter Handlers
private final EdgeSwipeHandler mToolbarSwipeHandler;
// Internal State
/** A {@link TitleCache} instance that stores all title/favicon bitmaps as CC resources. */
protected TitleCache mTitleCache;
/** Responsible for building non-incognito titles. */
protected TitleBitmapFactory mStandardTitleBitmapFactory;
/** Responsible for building all incognito titles. */
protected TitleBitmapFactory mIncognitoTitleBitmapFactory;
/** Whether or not animations are enabled. This can disable certain layouts or effects. */
protected boolean mEnableAnimations = true;
private boolean mCreatingNtp;
private final ObserverList<OverviewModeObserver> mOverviewModeObservers;
private TabModelSelectorObserver mTabModelSelectorObserver;
private TabModelObserver mTabModelObserver;
private TabModelSelectorTabObserver mTabSelectorTabObserver;
/**
* Protected class to handle {@link TabModelObserver} related tasks. Extending classes will
* need to override any related calls to add new functionality */
protected class LayoutManagerTabModelObserver extends EmptyTabModelObserver {
@Override
public void didSelectTab(Tab tab, TabSelectionType type, int lastId) {
if (tab.getId() != lastId) tabSelected(tab.getId(), lastId, tab.isIncognito());
}
@Override
public void willAddTab(Tab tab, TabLaunchType type) {
// Open the new tab
if (type == TabLaunchType.FROM_INSTANT || type == TabLaunchType.FROM_RESTORE) return;
tabCreating(getTabModelSelector().getCurrentTabId(), tab.getUrl(), tab.isIncognito());
}
@Override
public void didAddTab(Tab tab, TabLaunchType launchType) {
int tabId = tab.getId();
if (launchType != TabLaunchType.FROM_INSTANT
&& launchType != TabLaunchType.FROM_RESTORE) {
boolean incognito = tab.isIncognito();
boolean willBeSelected = launchType != TabLaunchType.FROM_LONGPRESS_BACKGROUND
|| (!getTabModelSelector().isIncognitoSelected() && incognito);
float lastTapX = LocalizationUtils.isLayoutRtl() ? mLastContentWidthDp : 0.f;
float lastTapY = 0.f;
if (launchType != TabLaunchType.FROM_MENU_OR_OVERVIEW) {
float heightDelta =
mLastFullscreenViewportDp.height() - mLastVisibleViewportDp.height();
lastTapX = mPxToDp * mLastTapX;
lastTapY = mPxToDp * mLastTapY - heightDelta;
}
tabCreated(tabId, getTabModelSelector().getCurrentTabId(), launchType, incognito,
willBeSelected, lastTapX, lastTapY);
}
}
@Override
public void didCloseTab(Tab tab) {
tabClosed(tab);
}
@Override
public void tabPendingClosure(Tab tab) {
tabClosed(tab);
}
@Override
public void tabClosureUndone(Tab tab) {
tabClosureCancelled(tab.getId(), tab.isIncognito());
}
@Override
public void tabClosureCommitted(Tab tab) {
LayoutManagerChrome.this.tabClosureCommitted(tab.getId(), tab.isIncognito());
}
@Override
public void didMoveTab(Tab tab, int newIndex, int curIndex) {
tabMoved(tab.getId(), curIndex, newIndex, tab.isIncognito());
}
}
/**
* Delegate of a factory to create an overview layout.
*/
public interface OverviewLayoutFactoryDelegate {
/**
* @param context The current Android's context.
* @param updateHost The {@link LayoutUpdateHost} view for this layout.
* @param renderHost The {@link LayoutRenderHost} view for this layout.
* @param eventFilter The {@link EventFilter} that is needed for this view.
*/
Layout createOverviewLayout(Context context, LayoutUpdateHost updateHost,
LayoutRenderHost renderHost, EventFilter eventFilter);
}
/**
* Creates the {@link LayoutManagerChrome} instance.
* @param host A {@link LayoutManagerHost} instance.
* @param overviewLayoutFactoryDelegate A {@link OverviewLayoutFactoryDelegate} instance.
*/
public LayoutManagerChrome(
LayoutManagerHost host, OverviewLayoutFactoryDelegate overviewLayoutFactoryDelegate) {
super(host);
Context context = host.getContext();
LayoutRenderHost renderHost = host.getLayoutRenderHost();
// Set up state
mStandardTitleBitmapFactory =
new TitleBitmapFactory(context, false, R.drawable.default_favicon);
mIncognitoTitleBitmapFactory =
new TitleBitmapFactory(context, true, R.drawable.default_favicon_white);
mOverviewModeObservers = new ObserverList<OverviewModeObserver>();
// Build Event Filter Handlers
mToolbarSwipeHandler = new ToolbarSwipeHandler(this);
// Build Event Filters
mBlackHoleEventFilter = new BlackHoleEventFilter(context, this);
mGestureEventFilter = new GestureEventFilter(context, this, mGestureHandler);
// Build Layouts
mOverviewListLayout =
new OverviewListLayout(context, this, renderHost, mBlackHoleEventFilter);
mToolbarSwipeLayout =
new ToolbarSwipeLayout(context, this, renderHost, mBlackHoleEventFilter);
if (overviewLayoutFactoryDelegate != null) {
mOverviewLayout = overviewLayoutFactoryDelegate.createOverviewLayout(
context, this, renderHost, mGestureEventFilter);
}
}
/**
* @return The {@link TabModelObserver} instance this class should be using.
*/
protected LayoutManagerTabModelObserver createTabModelObserver() {
return new LayoutManagerTabModelObserver();
}
/**
* @return A list of virtual views representing compositor rendered views.
*/
@Override
public void getVirtualViews(List<VirtualView> views) {
if (getActiveLayout() != null) {
getActiveLayout().getVirtualViews(views);
}
}
/**
* @return The {@link EdgeSwipeHandler} responsible for processing swipe events for the toolbar.
*/
@Override
public EdgeSwipeHandler getTopSwipeHandler() {
return mToolbarSwipeHandler;
}
@Override
public void init(TabModelSelector selector, TabCreatorManager creator,
TabContentManager content, ViewGroup androidContentContainer,
ContextualSearchManagementDelegate contextualSearchDelegate,
DynamicResourceLoader dynamicResourceLoader) {
// TODO: TitleCache should be a part of the ResourceManager.
mTitleCache = mHost.getTitleCache();
// Initialize Layouts
mToolbarSwipeLayout.setTabModelSelector(selector, content);
mOverviewListLayout.setTabModelSelector(selector, content);
if (mOverviewLayout != null) mOverviewLayout.setTabModelSelector(selector, content);
super.init(selector, creator, content, androidContentContainer, contextualSearchDelegate,
dynamicResourceLoader);
mTabModelSelectorObserver = new EmptyTabModelSelectorObserver() {
@Override
public void onTabModelSelected(TabModel newModel, TabModel oldModel) {
tabModelSwitched(newModel.isIncognito());
}
};
selector.addObserver(mTabModelSelectorObserver);
selector.setCloseAllTabsDelegate(this);
mTabModelObserver = createTabModelObserver();
for (TabModel model : selector.getModels()) model.addObserver(mTabModelObserver);
mTabSelectorTabObserver = new TabModelSelectorTabObserver(selector) {
@Override
public void onLoadStarted(Tab tab) {
tabLoadStarted(tab.getId(), tab.isIncognito());
}
@Override
public void onLoadStopped(Tab tab) {
tabLoadFinished(tab.getId(), tab.isIncognito());
}
@Override
public void onPageLoadStarted(Tab tab, String url) {
tabPageLoadStarted(tab.getId(), tab.isIncognito());
}
@Override
public void onPageLoadFinished(Tab tab) {
tabPageLoadFinished(tab.getId(), tab.isIncognito());
}
@Override
public void onPageLoadFailed(Tab tab, int errorCode) {
tabPageLoadFinished(tab.getId(), tab.isIncognito());
}
@Override
public void onCrash(Tab tab, boolean sadTabShown) {
tabPageLoadFinished(tab.getId(), tab.isIncognito());
}
};
}
@Override
public void destroy() {
super.destroy();
if (mTabModelSelectorObserver != null) {
getTabModelSelector().removeObserver(mTabModelSelectorObserver);
}
if (mTabModelObserver != null) {
for (TabModel model : getTabModelSelector().getModels()) {
model.removeObserver(mTabModelObserver);
}
}
if (mTabSelectorTabObserver != null) mTabSelectorTabObserver.destroy();
mOverviewModeObservers.clear();
if (mOverviewLayout != null) {
mOverviewLayout.destroy();
mOverviewLayout = null;
}
mOverviewListLayout.destroy();
mToolbarSwipeLayout.destroy();
}
@Override
protected void addGlobalSceneOverlay(SceneOverlay helper) {
super.addGlobalSceneOverlay(helper);
mOverviewListLayout.addSceneOverlay(helper);
mToolbarSwipeLayout.addSceneOverlay(helper);
if (mOverviewLayout != null) mOverviewLayout.addSceneOverlay(helper);
}
/**
* Meant to be overridden by child classes for when they need to extend the toolbar side swipe
* functionality.
* @param provider A {@link LayoutProvider} instance.
* @return A {@link ToolbarSwipeHandler} instance that will be used by internal layouts.
*/
protected ToolbarSwipeHandler createToolbarSwipeHandler(LayoutProvider provider) {
return new ToolbarSwipeHandler(provider);
}
/**
* Simulates a click on the view at the specified pixel offset
* from the top left of the view.
* This is used by UI tests.
* @param x Coordinate of the click in dp.
* @param y Coordinate of the click in dp.
*/
@VisibleForTesting
public void simulateClick(float x, float y) {
if (getActiveLayout() != null) getActiveLayout().click(time(), x, y);
}
/**
* Simulates a drag and issues Up-event to commit the drag.
* @param x Coordinate to start the Drag from in dp.
* @param y Coordinate to start the Drag from in dp.
* @param dX Amount of drag in X direction in dp.
* @param dY Amount of drag in Y direction in dp.
*/
@VisibleForTesting
public void simulateDrag(float x, float y, float dX, float dY) {
if (getActiveLayout() != null) {
getActiveLayout().onDown(0, x, y);
getActiveLayout().drag(0, x, y, dX, dY);
getActiveLayout().onUpOrCancel(time());
}
}
private boolean isOverviewLayout(Layout layout) {
return layout != null && (layout == mOverviewLayout || layout == mOverviewListLayout);
}
@Override
protected void startShowing(Layout layout, boolean animate) {
mCreatingNtp = false;
super.startShowing(layout, animate);
Layout layoutBeingShown = getActiveLayout();
// Check if a layout is showing that should hide the contextual search bar.
if (mContextualSearchDelegate != null
&& (isOverviewLayout(layoutBeingShown)
|| layoutBeingShown == mToolbarSwipeLayout)) {
mContextualSearchDelegate.dismissContextualSearchBar();
}
// Check if we should notify OverviewModeObservers.
if (isOverviewLayout(layoutBeingShown)) {
boolean showToolbar =
!mEnableAnimations || getTabModelSelector().getCurrentModel().getCount() <= 0;
for (OverviewModeObserver observer : mOverviewModeObservers) {
observer.onOverviewModeStartedShowing(showToolbar);
}
}
}
@Override
public void startHiding(int nextTabId, boolean hintAtTabSelection) {
super.startHiding(nextTabId, hintAtTabSelection);
Layout layoutBeingHidden = getActiveLayout();
if (isOverviewLayout(layoutBeingHidden)) {
boolean showToolbar = true;
if (mEnableAnimations && layoutBeingHidden == mOverviewLayout) {
final LayoutTab tab = layoutBeingHidden.getLayoutTab(nextTabId);
showToolbar = tab != null ? !tab.showToolbar() : true;
}
boolean creatingNtp = layoutBeingHidden == mOverviewLayout && mCreatingNtp;
for (OverviewModeObserver observer : mOverviewModeObservers) {
observer.onOverviewModeStartedHiding(showToolbar, creatingNtp);
}
}
}
@Override
public void doneShowing() {
super.doneShowing();
if (isOverviewLayout(getActiveLayout())) {
for (OverviewModeObserver observer : mOverviewModeObservers) {
observer.onOverviewModeFinishedShowing();
}
}
}
@Override
public void doneHiding() {
Layout layoutBeingHidden = getActiveLayout();
if (getNextLayout() == getDefaultLayout()) {
Tab tab = getTabModelSelector() != null ? getTabModelSelector().getCurrentTab() : null;
emptyCachesExcept(tab != null ? tab.getId() : Tab.INVALID_TAB_ID);
}
super.doneHiding();
if (isOverviewLayout(layoutBeingHidden)) {
for (OverviewModeObserver observer : mOverviewModeObservers) {
observer.onOverviewModeFinishedHiding();
}
}
}
@VisibleForTesting
public void tabSelected(int tabId, int prevId, boolean incognito) {
// Update the model here so we properly set the right selected TabModel.
if (getActiveLayout() != null) {
getActiveLayout().onTabSelected(time(), tabId, prevId, incognito);
}
}
/**
* Should be called when a tab created event is triggered.
* @param id The id of the tab that was created.
* @param sourceId The id of the creating tab if any.
* @param launchType How the tab was launched.
* @param incognito Whether or not the created tab is incognito.
* @param willBeSelected Whether or not the created tab will be selected.
* @param originX The x coordinate of the action that created this tab in dp.
* @param originY The y coordinate of the action that created this tab in dp.
*/
protected void tabCreated(int id, int sourceId, TabLaunchType launchType, boolean incognito,
boolean willBeSelected, float originX, float originY) {
Tab newTab = TabModelUtils.getTabById(getTabModelSelector().getModel(incognito), id);
mCreatingNtp = newTab != null && newTab.isNativePage();
int newIndex = TabModelUtils.getTabIndexById(getTabModelSelector().getModel(incognito), id);
getActiveLayout().onTabCreated(
time(), id, newIndex, sourceId, incognito, !willBeSelected, originX, originY);
}
/**
* Should be called when a tab creating event is triggered (called before the tab is done being
* created).
* @param sourceId The id of the creating tab if any.
* @param url The url of the created tab.
* @param isIncognito Whether or not created tab will be incognito.
*/
protected void tabCreating(int sourceId, String url, boolean isIncognito) {
if (getActiveLayout() != null) getActiveLayout().onTabCreating(sourceId);
}
/**
* Should be called when a tab closed event is triggered.
* @param id The id of the closed tab.
* @param nextId The id of the next tab that will be visible, if any.
* @param incognito Whether or not the closed tab is incognito.
*/
protected void tabClosed(int id, int nextId, boolean incognito) {
if (getActiveLayout() != null) getActiveLayout().onTabClosed(time(), id, nextId, incognito);
}
private void tabClosed(Tab tab) {
Tab currentTab =
getTabModelSelector() != null ? getTabModelSelector().getCurrentTab() : null;
int nextTabId = currentTab != null ? currentTab.getId() : Tab.INVALID_TAB_ID;
tabClosed(tab.getId(), nextTabId, tab.isIncognito());
}
/**
* Called when a tab closure has been committed and all tab cleanup should happen.
* @param id The id of the closed tab.
* @param incognito Whether or not the closed tab is incognito.
*/
protected void tabClosureCommitted(int id, boolean incognito) {
if (getActiveLayout() != null) {
getActiveLayout().onTabClosureCommitted(time(), id, incognito);
}
}
@Override
public boolean closeAllTabsRequest(boolean incognito) {
if (!isOverviewLayout(getActiveLayout()) || !getActiveLayout().handlesCloseAll()) {
return false;
}
getActiveLayout().onTabsAllClosing(time(), incognito);
return true;
}
/**
* Called when the selected tab model has switched.
* @param incognito Whether or not the new current tab model is incognito.
*/
protected void tabModelSwitched(boolean incognito) {
if (getActiveLayout() != null) getActiveLayout().onTabModelSwitched(incognito);
}
private void tabMoved(int id, int oldIndex, int newIndex, boolean incognito) {
if (getActiveLayout() != null) {
getActiveLayout().onTabMoved(time(), id, oldIndex, newIndex, incognito);
}
}
private void tabPageLoadStarted(int id, boolean incognito) {
if (getActiveLayout() != null) getActiveLayout().onTabPageLoadStarted(id, incognito);
}
private void tabPageLoadFinished(int id, boolean incognito) {
if (getActiveLayout() != null) getActiveLayout().onTabPageLoadFinished(id, incognito);
}
private void tabLoadStarted(int id, boolean incognito) {
if (getActiveLayout() != null) getActiveLayout().onTabLoadStarted(id, incognito);
}
private void tabLoadFinished(int id, boolean incognito) {
if (getActiveLayout() != null) getActiveLayout().onTabLoadFinished(id, incognito);
}
private void tabClosureCancelled(int id, boolean incognito) {
if (getActiveLayout() != null) {
getActiveLayout().onTabClosureCancelled(time(), id, incognito);
}
}
@Override
public void initLayoutTabFromHost(int tabId) {
super.initLayoutTabFromHost(tabId);
if (getTabModelSelector() == null || getActiveLayout() == null) return;
TabModelSelector selector = getTabModelSelector();
Tab tab = selector.getTabById(tabId);
if (tab == null) return;
LayoutTab layoutTab = getExistingLayoutTab(tabId);
if (layoutTab == null) return;
if (mTitleCache != null && layoutTab.isTitleNeeded()) {
mTitleCache.put(tabId, getTitleBitmap(tab), getFaviconBitmap(tab), tab.isIncognito(),
tab.isTitleDirectionRtl());
}
}
/**
* Builds a title bitmap for a {@link Tab}. This function does not do anything in the
* general case because only the phone need to bake special resource.
*
* @param tab The tab to build the title bitmap for.
* @return The Title bitmap
*/
protected Bitmap getTitleBitmap(Tab tab) {
TitleBitmapFactory titleBitmapFactory =
tab.isIncognito() ? mIncognitoTitleBitmapFactory : mStandardTitleBitmapFactory;
return titleBitmapFactory.getTitleBitmap(mHost.getContext(), getTitleForTab(tab));
}
/**
* Comes up with a valid title to return for a tab.
* @param tab The {@link Tab} to build a title for.
* @return The title to use.
*/
protected String getTitleForTab(Tab tab) {
String title = tab.getTitle();
if (TextUtils.isEmpty(title)) title = tab.getUrl();
return title;
}
/**
* Builds a favicon bitmap for a {@link Tab}. This function does not do anything in the
* general case because only the phone need to bake special texture.
*
* @param tab The tab to build the title bitmap for.
* @return The Favicon bitmap
*/
protected Bitmap getFaviconBitmap(Tab tab) {
TitleBitmapFactory titleBitmapFactory =
tab.isIncognito() ? mIncognitoTitleBitmapFactory : mStandardTitleBitmapFactory;
return titleBitmapFactory.getFaviconBitmap(mHost.getContext(), tab.getFavicon());
}
/**
* @return The {@link OverviewListLayout} managed by this class.
*/
@VisibleForTesting
public Layout getOverviewListLayout() {
return mOverviewListLayout;
}
/**
* @return The overview layout {@link Layout} managed by this class.
*/
@VisibleForTesting
public Layout getOverviewLayout() {
return mOverviewLayout;
}
/**
* @return The {@link StripLayoutHelperManager} managed by this class.
*/
@VisibleForTesting
public StripLayoutHelperManager getStripLayoutHelperManager() {
return null;
}
/**
* @return Whether or not to use the accessibility layout.
*/
protected boolean useAccessibilityLayout() {
return DeviceClassManager.isAccessibilityModeEnabled(mHost.getContext())
|| DeviceClassManager.enableAccessibilityLayout();
}
/**
* Show the overview {@link Layout}. This is generally a {@link Layout} that visibly represents
* all of the {@link Tab}s opened by the user.
* @param animate Whether or not to animate the transition to overview mode.
*/
public void showOverview(boolean animate) {
boolean useAccessibility = useAccessibilityLayout();
boolean accessibilityIsVisible =
useAccessibility && getActiveLayout() == mOverviewListLayout;
boolean normalIsVisible = getActiveLayout() == mOverviewLayout && mOverviewLayout != null;
// We only want to use the AccessibilityOverviewLayout if the following are all valid:
// 1. We're already showing the AccessibilityOverviewLayout OR we're using accessibility.
// 2. We're not already showing the normal OverviewLayout (or we are on a tablet, in which
// case the normal layout is always visible).
if ((accessibilityIsVisible || useAccessibility) && !normalIsVisible) {
startShowing(mOverviewListLayout, animate);
} else if (mOverviewLayout != null) {
startShowing(mOverviewLayout, animate);
}
}
/**
* Hides the current {@link Layout}, returning to the default {@link Layout}.
* @param animate Whether or not to animate the transition to the default {@link Layout}.
*/
public void hideOverview(boolean animate) {
Layout activeLayout = getActiveLayout();
if (activeLayout != null && !activeLayout.isHiding()) {
if (animate) {
activeLayout.onTabSelecting(time(), Tab.INVALID_TAB_ID);
} else {
startHiding(Tab.INVALID_TAB_ID, false);
doneHiding();
}
}
}
/**
* @param enabled Whether or not to allow model-reactive animations (tab creation, closing,
* etc.).
*/
public void setEnableAnimations(boolean enabled) {
mEnableAnimations = enabled;
}
@Override
public boolean overviewVisible() {
Layout activeLayout = getActiveLayout();
return isOverviewLayout(activeLayout) && !activeLayout.isHiding();
}
@Override
public void addOverviewModeObserver(OverviewModeObserver listener) {
mOverviewModeObservers.addObserver(listener);
}
@Override
public void removeOverviewModeObserver(OverviewModeObserver listener) {
mOverviewModeObservers.removeObserver(listener);
}
/**
* A {@link EdgeSwipeHandler} meant to respond to edge events for the toolbar.
*/
protected class ToolbarSwipeHandler extends EdgeSwipeHandlerLayoutDelegate {
/**
* Creates an instance of the {@link ToolbarSwipeHandler}.
* @param provider A {@link LayoutProvider} instance.
*/
public ToolbarSwipeHandler(LayoutProvider provider) {
super(provider);
}
@Override
public void swipeStarted(ScrollDirection direction, float x, float y) {
if (direction == ScrollDirection.DOWN) {
startShowing(mOverviewLayout, true);
super.swipeStarted(direction, x, y);
} else if (direction == ScrollDirection.LEFT || direction == ScrollDirection.RIGHT) {
startShowing(mToolbarSwipeLayout, true);
super.swipeStarted(direction, x, y);
}
}
@Override
public boolean isSwipeEnabled(ScrollDirection direction) {
FullscreenManager manager = mHost.getFullscreenManager();
if (getActiveLayout() != mStaticLayout
|| !DeviceClassManager.enableToolbarSwipe(
FeatureUtilities.isDocumentMode(mHost.getContext()))
|| (manager != null && manager.getPersistentFullscreenMode())) {
return false;
}
boolean isAccessibility =
DeviceClassManager.isAccessibilityModeEnabled(mHost.getContext());
return direction == ScrollDirection.LEFT || direction == ScrollDirection.RIGHT
|| (direction == ScrollDirection.DOWN && mOverviewLayout != null
&& !isAccessibility);
}
}
/**
* @param id The id of the {@link Tab} to search for.
* @return A {@link Tab} instance or {@code null} if it could be found.
*/
protected Tab getTabById(int id) {
TabModelSelector selector = getTabModelSelector();
return selector == null ? null : selector.getTabById(id);
}
}
| |
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.common.dao.service;
import static org.apache.commons.lang.StringUtils.isBlank;
import static org.apache.commons.lang.StringUtils.isNotBlank;
import static org.kaaproject.kaa.server.common.dao.impl.DaoUtil.convertDtoList;
import static org.kaaproject.kaa.server.common.dao.impl.DaoUtil.getDto;
import static org.kaaproject.kaa.server.common.dao.service.Validator.isValidId;
import static org.kaaproject.kaa.server.common.dao.service.Validator.validateHash;
import static org.kaaproject.kaa.server.common.dao.service.Validator.validateId;
import static org.kaaproject.kaa.server.common.dao.service.Validator.validateObject;
import static org.kaaproject.kaa.server.common.dao.service.Validator.validateSqlId;
import org.apache.avro.generic.GenericRecord;
import org.apache.commons.lang.StringUtils;
import org.kaaproject.kaa.common.avro.GenericAvroConverter;
import org.kaaproject.kaa.common.dto.EndpointNotificationDto;
import org.kaaproject.kaa.common.dto.NotificationDto;
import org.kaaproject.kaa.common.dto.NotificationSchemaDto;
import org.kaaproject.kaa.common.dto.NotificationTypeDto;
import org.kaaproject.kaa.common.dto.UpdateNotificationDto;
import org.kaaproject.kaa.common.dto.VersionDto;
import org.kaaproject.kaa.server.common.dao.NotificationService;
import org.kaaproject.kaa.server.common.dao.exception.DatabaseProcessingException;
import org.kaaproject.kaa.server.common.dao.exception.IncorrectParameterException;
import org.kaaproject.kaa.server.common.dao.impl.EndpointNotificationDao;
import org.kaaproject.kaa.server.common.dao.impl.EndpointProfileDao;
import org.kaaproject.kaa.server.common.dao.impl.NotificationDao;
import org.kaaproject.kaa.server.common.dao.impl.NotificationSchemaDao;
import org.kaaproject.kaa.server.common.dao.impl.TopicDao;
import org.kaaproject.kaa.server.common.dao.model.EndpointNotification;
import org.kaaproject.kaa.server.common.dao.model.EndpointProfile;
import org.kaaproject.kaa.server.common.dao.model.Notification;
import org.kaaproject.kaa.server.common.dao.model.sql.NotificationSchema;
import org.kaaproject.kaa.server.common.dao.model.sql.Topic;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.TimeZone;
@Service
@Transactional
public class NotificationServiceImpl implements NotificationService {
private static final Logger LOG = LoggerFactory.getLogger(NotificationServiceImpl.class);
@Value("#{sql_dao[dao_max_wait_time]}")
private int waitSeconds;
@Autowired(required = false)
private int ttl = 7 * 24 * 3600 * 1000;
@Autowired
private TopicDao<Topic> topicDao;
@Autowired
private NotificationSchemaDao<NotificationSchema> notificationSchemaDao;
private EndpointProfileDao<EndpointProfile> endpointProfileDao;
private NotificationDao<Notification> notificationDao;
private EndpointNotificationDao<EndpointNotification> unicastNotificationDao;
@Override
public NotificationSchemaDto saveNotificationSchema(NotificationSchemaDto notificationSchemaDto) {
validateNotificationSchemaObject(notificationSchemaDto);
String id = notificationSchemaDto.getId();
if (StringUtils.isBlank(id)) {
notificationSchemaDto.setId(null);
notificationSchemaDto.setCreatedTime(System.currentTimeMillis());
NotificationSchema foundSchema;
NotificationTypeDto type = notificationSchemaDto.getType();
if (type != null) {
foundSchema = notificationSchemaDao.findLatestNotificationSchemaByAppId(
notificationSchemaDto.getApplicationId(), type);
} else {
throw new IncorrectParameterException(
"Invalid Notification type in Notification Schema object.");
}
if (foundSchema != null) {
int lastSchemaVersion = foundSchema.getVersion();
notificationSchemaDto.setVersion(++lastSchemaVersion);
} else {
notificationSchemaDto.incrementVersion();
}
} else {
NotificationSchemaDto oldNotificationSchemaDto = getDto(notificationSchemaDao.findById(id));
if (oldNotificationSchemaDto != null) {
oldNotificationSchemaDto.editFields(notificationSchemaDto);
notificationSchemaDto = oldNotificationSchemaDto;
} else {
LOG.error("Can't find notification schema with given id [{}].", id);
throw new IncorrectParameterException("Invalid notification schema id: " + id);
}
}
return getDto(notificationSchemaDao.save(new NotificationSchema(notificationSchemaDto)));
}
@Override
public UpdateNotificationDto<NotificationDto> saveNotification(NotificationDto dto) {
validateObject(dto, "Can't save notification. Invalid notification object");
dto.setId(null);
UpdateNotificationDto<NotificationDto> updateNotificationDto = null;
String schemaId = dto.getSchemaId();
String topicId = dto.getTopicId();
if (isNotBlank(schemaId) && isNotBlank(topicId)) {
NotificationSchema schema = notificationSchemaDao.findById(schemaId);
if (schema != null) {
dto.setNfVersion(schema.getVersion());
dto.setApplicationId(schema.getApplicationId());
dto.setType(schema.getType());
} else {
throw new DatabaseProcessingException("Can't find notification schema by id " + schemaId);
}
try {
dto.setBody(serializeNotificationBody(dto, schema));
} catch (IOException ex) {
LOG.error("Can't serialize notification body using schema. ", ex);
throw new DatabaseProcessingException("Can't serialize notification body using schema: "
+ schemaId);
}
long currentTime = new GregorianCalendar(TimeZone.getTimeZone("UTC")).getTimeInMillis();
Date expiredAt = dto.getExpiredAt();
dto.setExpiredAt(expiredAt != null ? expiredAt : new Date(currentTime + ttl));
dto.setLastTimeModify(new Date(currentTime));
NotificationDto notificationDto = saveNotificationAndIncTopicSecNum(dto);
if (notificationDto != null) {
updateNotificationDto = new UpdateNotificationDto<NotificationDto>();
updateNotificationDto.setAppId(notificationDto.getApplicationId());
updateNotificationDto.setTopicId(topicId);
updateNotificationDto.setPayload(notificationDto);
}
return updateNotificationDto;
} else {
throw new IncorrectParameterException(
"Incorrect notification object notification schema id is empty");
}
}
/**
* Sends a notification and increments a topic sequence number.
*
* @param dto notification
* @return saved notification
*/
public NotificationDto saveNotificationAndIncTopicSecNum(NotificationDto dto) {
NotificationDto notificationDto = null;
Topic topic = topicDao.getNextSeqNumber(dto.getTopicId());
if (topic != null) {
dto.setSecNum(topic.getSequenceNumber());
Notification savedDto = notificationDao.save(dto);
notificationDto = savedDto != null ? savedDto.toDto() : null;
} else {
LOG.warn("Can't find topic by id.");
}
return notificationDto;
}
private byte[] serializeNotificationBody(NotificationDto nf, NotificationSchema nfSchema)
throws IOException {
GenericAvroConverter<GenericRecord> converter =
new GenericAvroConverter<>(nfSchema.getCtlSchema().getBody());
String notificationJson = new String(nf.getBody(), Charset.forName("UTF8"));
GenericRecord notificationAvro = converter.decodeJson(notificationJson);
return converter.encode(notificationAvro);
}
@Override
public NotificationDto findNotificationById(String id) {
NotificationDto dto = null;
LOG.debug("Find notification by id [{}] ", id);
if (StringUtils.isNotBlank(id)) {
dto = getDto(notificationDao.findById(id));
}
LOG.trace("Found notification object {} by id [{}] ", dto, id);
return dto;
}
@Override
public List<NotificationDto> findNotificationsByTopicId(String topicId) {
validateId(topicId, "Can't find notifications. Invalid topic id: " + topicId);
return convertDtoList(notificationDao.findNotificationsByTopicId(topicId));
}
@Override
public NotificationSchemaDto findNotificationSchemaById(String id) {
validateId(id, "Can't find notification schema. Invalid notification schema id: " + id);
return getDto(notificationSchemaDao.findById(id));
}
@Override
public List<NotificationSchemaDto> findNotificationSchemasByAppId(String appId) {
validateId(appId, "Can't find notification schemas. Invalid application id: " + appId);
return convertDtoList(notificationSchemaDao.findNotificationSchemasByAppId(appId));
}
@Override
public List<VersionDto> findUserNotificationSchemasByAppId(String applicationId) {
validateId(applicationId, "Can't find schemas. Invalid application id: " + applicationId);
List<NotificationSchema> notificationSchemas = notificationSchemaDao
.findNotificationSchemasByAppIdAndType(applicationId, NotificationTypeDto.USER);
List<VersionDto> schemas = new ArrayList<>();
for (NotificationSchema notificationSchema : notificationSchemas) {
schemas.add(notificationSchema.toVersionDto());
}
return schemas;
}
@Override
public List<VersionDto> findNotificationSchemaVersionsByAppId(
String applicationId) {
validateId(applicationId, "Can't find notification schema versions. Invalid application id: "
+ applicationId);
List<NotificationSchema> notificationSchemas =
notificationSchemaDao.findNotificationSchemasByAppId(applicationId);
List<VersionDto> schemas = new ArrayList<>();
for (NotificationSchema notificationSchema : notificationSchemas) {
schemas.add(notificationSchema.toVersionDto());
}
return schemas;
}
@Override
public void removeNotificationSchemasByAppId(String appId) {
validateId(appId, "Can't remove notification schemas. Invalid application id: " + appId);
LOG.debug("Cascade remove corresponding notification to application id [{}]", appId);
unicastNotificationDao.removeNotificationsByAppId(appId);
notificationSchemaDao.removeNotificationSchemasByAppId(appId);
}
@Override
public List<NotificationDto> findNotificationsByTopicIdAndVersionAndStartSecNum(
String topicId, int seqNum, int sysNfVersion, int userNfVersion) {
validateSqlId(topicId, "Can't find notifications. Invalid topic id: " + topicId);
return convertDtoList(notificationDao.findNotificationsByTopicIdAndVersionAndStartSecNum(
topicId, seqNum, sysNfVersion, userNfVersion));
}
@Override
public List<NotificationSchemaDto> findNotificationSchemasByAppIdAndType(
String appId, NotificationTypeDto type) {
validateId(appId, "Can't find notification schemas. Invalid application id: " + appId);
return convertDtoList(notificationSchemaDao.findNotificationSchemasByAppIdAndType(appId, type));
}
@Override
public NotificationSchemaDto findNotificationSchemaByAppIdAndTypeAndVersion(
String appId, NotificationTypeDto type, int majorVersion) {
validateId(appId, "Can't find notification schema. Invalid application id: " + appId);
return getDto(notificationSchemaDao.findNotificationSchemasByAppIdAndTypeAndVersion(
appId, type, majorVersion));
}
@Override
public EndpointNotificationDto findUnicastNotificationById(String id) {
validateId(id, "Can't find unicast notification. Invalid id " + id);
return getDto(unicastNotificationDao.findById(id));
}
@Override
public UpdateNotificationDto<EndpointNotificationDto> saveUnicastNotification(
EndpointNotificationDto dto) {
validateObject(dto, "Can't save unicast notification. Invalid endpoint notification object.");
UpdateNotificationDto<EndpointNotificationDto> updateNotificationDto = null;
NotificationDto notificationDto = dto.getNotificationDto();
String schemaId = notificationDto.getSchemaId();
String topicId = notificationDto.getTopicId();
if (isBlank(schemaId)) {
throw new IncorrectParameterException("Invalid notification schema id: " + schemaId);
} else if (isBlank(topicId)) {
throw new IncorrectParameterException("Invalid notification topic id: " + schemaId);
} else {
byte[] endpointKeyHash = dto.getEndpointKeyHash();
if (endpointKeyHash != null) {
EndpointProfile ep = endpointProfileDao.findByKeyHash(endpointKeyHash);
if (ep == null) {
throw new DatabaseProcessingException("Can't find endpoint profile by hash "
+ endpointKeyHash);
}
if (ep.getSubscriptions() == null || !ep.getSubscriptions().contains(topicId)) {
//TODO Error code?
throw new DatabaseProcessingException("Endpoint profile is not subscribed to this topic");
}
} else {
throw new IncorrectParameterException("Invalid endpointKeyHash: " + endpointKeyHash);
}
notificationDto.setId(null);
notificationDto.setTopicId(topicId);
notificationDto.setSecNum(-1);
NotificationSchema schema = notificationSchemaDao.findById(schemaId);
if (schema != null) {
notificationDto.setNfVersion(schema.getVersion());
notificationDto.setApplicationId(schema.getApplicationId());
notificationDto.setType(schema.getType());
try {
notificationDto.setBody(serializeNotificationBody(notificationDto, schema));
} catch (IOException ex) {
LOG.error("Can't serialize notification body using schema. ", ex);
throw new DatabaseProcessingException("Can't serialize notification body using schema: "
+ schemaId);
}
} else {
throw new DatabaseProcessingException("Can't find notification schema by id " + schemaId);
}
long currentTime = new GregorianCalendar(TimeZone.getTimeZone("UTC")).getTimeInMillis();
Date expiredAt = notificationDto.getExpiredAt();
notificationDto.setExpiredAt(expiredAt != null ? expiredAt : new Date(currentTime + ttl));
notificationDto.setLastTimeModify(new Date(currentTime));
EndpointNotificationDto unicast = getDto(unicastNotificationDao.save(dto));
if (unicast != null && unicast.getNotificationDto() != null) {
LOG.trace("Saved unicast notifications {}", unicast);
updateNotificationDto = new UpdateNotificationDto<EndpointNotificationDto>();
NotificationDto savedDto = unicast.getNotificationDto();
updateNotificationDto.setAppId(savedDto.getApplicationId());
updateNotificationDto.setTopicId(savedDto.getTopicId());
updateNotificationDto.setPayload(unicast);
}
return updateNotificationDto;
}
}
@Override
public List<EndpointNotificationDto> findUnicastNotificationsByKeyHash(final byte[] keyHash) {
validateHash(keyHash, "Can't find unicast notification. Invalid key hash " + keyHash);
return convertDtoList(unicastNotificationDao.findNotificationsByKeyHash(keyHash));
}
@Override
public void removeUnicastNotificationsByKeyHash(final byte[] keyHash) {
validateHash(keyHash, "Can't remove unicast notification. Invalid key hash " + keyHash);
unicastNotificationDao.removeNotificationsByKeyHash(keyHash);
}
@Override
public void removeUnicastNotificationById(String id) {
validateId(id, "Can't remove unicast notification. Invalid id " + id);
unicastNotificationDao.removeById(id);
}
private void validateNotificationSchemaObject(NotificationSchemaDto dto) {
validateObject(dto, "Invalid notification schema object");
if (isBlank(dto.getApplicationId()) && !isValidId(dto.getApplicationId())
|| dto.getType() == null) {
throw new IncorrectParameterException(
"Invalid notification schema object. Check type or applicationId.");
}
}
public void setEndpointProfileDao(EndpointProfileDao<EndpointProfile> endpointProfileDao) {
this.endpointProfileDao = endpointProfileDao;
}
public void setNotificationDao(NotificationDao<Notification> notificationDao) {
this.notificationDao = notificationDao;
}
public void setUnicastNotificationDao(
EndpointNotificationDao<EndpointNotification> unicastNotificationDao) {
this.unicastNotificationDao = unicastNotificationDao;
}
}
| |
/*
* Copyright 2012-2014, Continuuity, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.continuuity.loom.scheduler;
import com.continuuity.loom.Entities;
import com.continuuity.loom.TestHelper;
import com.continuuity.loom.cluster.Cluster;
import com.continuuity.loom.common.conf.Constants;
import com.continuuity.loom.common.queue.Element;
import com.continuuity.loom.common.queue.TrackingQueue;
import com.continuuity.loom.http.ServiceTestBase;
import com.continuuity.loom.http.request.FinishTaskRequest;
import com.continuuity.loom.http.request.TakeTaskRequest;
import com.continuuity.loom.scheduler.callback.CallbackData;
import com.continuuity.loom.scheduler.task.ClusterJob;
import com.continuuity.loom.scheduler.task.ClusterTask;
import com.continuuity.loom.scheduler.task.JobId;
import com.continuuity.loom.scheduler.task.SchedulableTask;
import com.continuuity.loom.scheduler.task.TaskId;
import com.google.common.base.Objects;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultiset;
import com.google.common.collect.Lists;
import com.google.common.collect.Multiset;
import com.google.gson.JsonObject;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.net.InetSocketAddress;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* Test ClusterScheduler
*/
public class SchedulerTest extends ServiceTestBase {
private static Cluster cluster;
private static ClusterJob job;
@Before
public void beforeTest() throws Exception {
cluster = Entities.ClusterExample.createCluster();
job = new ClusterJob(new JobId(cluster.getId(), 0), ClusterAction.CLUSTER_CREATE);
cluster.setLatestJobId(job.getJobId());
clusterStoreService.getView(cluster.getAccount()).writeCluster(cluster);
clusterStore.writeClusterJob(job);
clusterStore.writeNode(Entities.ClusterExample.NODE1);
clusterStore.writeNode(Entities.ClusterExample.NODE2);
}
@After
public void cleanupTest() throws Exception {
jobQueues.removeAll();
clusterQueues.removeAll();
solverQueues.removeAll();
provisionerQueues.removeAll();
callbackQueues.removeAll();
mockClusterCallback.clear();
}
@Test(timeout = 20000)
public void testScheduler() throws Exception {
String tenantId = cluster.getAccount().getTenantId();
ClusterScheduler clusterScheduler = injector.getInstance(ClusterScheduler.class);
CallbackScheduler callbackScheduler = injector.getInstance(CallbackScheduler.class);
clusterQueues.add(tenantId, new Element(cluster.getId(), ClusterAction.CLUSTER_CREATE.name()));
clusterScheduler.run();
String hosts = Entities.ServiceExample.HOSTS.getName();
String namenode = Entities.ServiceExample.NAMENODE.getName();
String datanode = Entities.ServiceExample.DATANODE.getName();
// Verify stages and actions
List<Multiset<ActionService>> expectedStages =
ImmutableList.<Multiset<ActionService>>of(
ImmutableMultiset.of(new ActionService("CREATE", ""), new ActionService("CREATE", "")),
ImmutableMultiset.of(new ActionService("CONFIRM", ""), new ActionService("CONFIRM", "")),
ImmutableMultiset.of(new ActionService("BOOTSTRAP", ""), new ActionService("BOOTSTRAP", "")),
ImmutableMultiset.of(new ActionService("CONFIGURE", hosts), new ActionService("CONFIGURE", hosts),
new ActionService("INSTALL", datanode),
new ActionService("INSTALL", namenode)),
ImmutableMultiset.of(new ActionService("CONFIGURE", namenode),
new ActionService("CONFIGURE", datanode)),
ImmutableMultiset.of(new ActionService("INITIALIZE", namenode)),
ImmutableMultiset.of(new ActionService("START", namenode)),
ImmutableMultiset.of(new ActionService("INITIALIZE", datanode)),
ImmutableMultiset.of(new ActionService("START", datanode))
);
List<Multiset<ActionService>> actualStages = Lists.newArrayList();
waitForCallback(callbackScheduler);
Assert.assertEquals(1, jobQueues.size(tenantId));
String consumerId = "testJobScheduler";
Element jobQueueElement = jobQueues.take(tenantId, consumerId);
String jobId = jobQueueElement.getValue();
job = clusterStore.getClusterJob(JobId.fromString(jobId));
while (true) {
Multiset<ActionService> actionServices = HashMultiset.create();
for (String taskId : job.getCurrentStage()) {
ClusterTask task = clusterStore.getClusterTask(TaskId.fromString(taskId));
actionServices.add(new ActionService(task.getTaskName().name(), task.getService()));
}
actualStages.add(actionServices);
if (!job.hasNextStage()) {
break;
}
job.advanceStage();
}
// 4th and 5th stage get deduped, hence merging them back for comparison
Multiset<ActionService> actionServices = actualStages.remove(3);
actualStages.get(3).addAll(actionServices);
Assert.assertEquals(expectedStages, actualStages);
jobQueues.recordProgress(consumerId, tenantId, jobQueueElement.getId(),
TrackingQueue.ConsumingStatus.FINISHED_SUCCESSFULLY, "");
// Add the job back into the jobQueues, and run job scheduler
jobQueues.add(tenantId, new Element(jobId));
JobScheduler jobScheduler = injector.getInstance(JobScheduler.class);
jobScheduler.run();
Assert.assertEquals(0, jobQueues.size(tenantId));
// Two tasks should have been submitted for provisioning.
TakeTaskRequest takeRequest = new TakeTaskRequest("consumer1", PROVISIONER_ID, tenantId);
SchedulableTask task = TestHelper.takeTask(getLoomUrl(), takeRequest);
JsonObject result = new JsonObject();
Map<String, String> ipAddresses = ImmutableMap.of("access", "123.456.789.123");
FinishTaskRequest finishRequest =
new FinishTaskRequest("consumer1", PROVISIONER_ID, tenantId, task.getTaskId(),
null, null, 0, null, ipAddresses, result);
TestHelper.finishTask(getLoomUrl(), finishRequest);
task = TestHelper.takeTask(getLoomUrl(), takeRequest);
result = new JsonObject();
ipAddresses = ImmutableMap.of("access", "456.789.123.123");
finishRequest = new FinishTaskRequest("consumer1", PROVISIONER_ID, tenantId,
task.getTaskId(), null, null, 0, null, ipAddresses, result);
TestHelper.finishTask(getLoomUrl(), finishRequest);
TestHelper.takeTask(getLoomUrl(), takeRequest);
Assert.assertEquals(2, jobQueues.size(tenantId));
jobScheduler.run();
jobScheduler.run();
jobScheduler.run();
jobScheduler.run();
for (int i = 0; i < 5; i++) {
task = TestHelper.takeTask(getLoomUrl(), takeRequest);
finishRequest = new FinishTaskRequest("consumer1", PROVISIONER_ID, tenantId,
task.getTaskId(), null, null, 0, null, null, null);
TestHelper.finishTask(getLoomUrl(), finishRequest);
jobScheduler.run();
jobScheduler.run();
}
}
@Test(timeout = 20000)
public void testSuccessCallbacks() throws Exception {
testCallbacks(false);
}
@Test(timeout = 20000)
public void testFailureCallbacks() throws Exception {
testCallbacks(true);
}
@Test(timeout = 20000)
public void testFalseOnStartStopsJob() throws Exception {
String tenantId = "q";
ClusterScheduler clusterScheduler = injector.getInstance(ClusterScheduler.class);
clusterQueues.add(tenantId, new Element(cluster.getId(), ClusterAction.CLUSTER_CREATE.name()));
clusterScheduler.run();
CallbackScheduler callbackScheduler = injector.getInstance(CallbackScheduler.class);
// should be no job in the queue until the start callback runs
Assert.assertEquals(0, jobQueues.size(tenantId));
// tell mock callback to return false for onStart callback
mockClusterCallback.setReturnOnStart(false);
// wait for start callback to finish
waitForCallback(callbackScheduler);
Assert.assertEquals(CallbackData.Type.START, mockClusterCallback.getReceivedCallbacks().get(0).getType());
// wait for fail callback to finish
if (mockClusterCallback.getReceivedCallbacks().size() < 2) {
waitForCallback(callbackScheduler);
}
Assert.assertEquals(CallbackData.Type.FAILURE, mockClusterCallback.getReceivedCallbacks().get(1).getType());
// there also should not be any jobs in the queue
Assert.assertEquals(0, jobQueues.size(tenantId));
}
private void waitForCallback(CallbackScheduler callbackScheduler) throws InterruptedException {
int initialSize = mockClusterCallback.getReceivedCallbacks().size();
int size = initialSize;
callbackScheduler.run();
while (size == initialSize) {
size = mockClusterCallback.getReceivedCallbacks().size();
TimeUnit.MILLISECONDS.sleep(20);
}
}
private void testCallbacks(boolean failJob) throws Exception {
ClusterScheduler clusterScheduler = injector.getInstance(ClusterScheduler.class);
String tenantId = cluster.getAccount().getTenantId();
clusterQueues.add(tenantId, new Element(cluster.getId(), ClusterAction.CLUSTER_CREATE.name()));
clusterScheduler.run();
CallbackScheduler callbackScheduler = injector.getInstance(CallbackScheduler.class);
// should be no job in the queue until the start callback runs
Assert.assertEquals(0, jobQueues.size(tenantId));
waitForCallback(callbackScheduler);
Assert.assertEquals(CallbackData.Type.START, mockClusterCallback.getReceivedCallbacks().get(0).getType());
JobScheduler jobScheduler = injector.getInstance(JobScheduler.class);
jobScheduler.run();
// take tasks until there are no more
TakeTaskRequest takeRequest = new TakeTaskRequest("consumer1", PROVISIONER_ID, tenantId);
SchedulableTask task = TestHelper.takeTask(getLoomUrl(), takeRequest);
while (task != null) {
FinishTaskRequest finishRequest =
new FinishTaskRequest("consumer1", PROVISIONER_ID, tenantId,
task.getTaskId(), null, null, failJob ? 1 : 0, null, null, null);
TestHelper.finishTask(getLoomUrl(), finishRequest);
jobScheduler.run();
jobScheduler.run();
task = TestHelper.takeTask(getLoomUrl(), takeRequest);
}
jobScheduler.run();
waitForCallback(callbackScheduler);
// at this point, the failure callback should have run
Assert.assertEquals(failJob ? CallbackData.Type.FAILURE : CallbackData.Type.SUCCESS,
mockClusterCallback.getReceivedCallbacks().get(1).getType());
}
private String getLoomUrl() {
InetSocketAddress address = handlerServer.getBindAddress();
return String.format("http://%s:%s%s", address.getHostName(), address.getPort(), Constants.API_BASE);
}
private static class ActionService {
private final String action;
private final String service;
private ActionService(String action, String service) {
this.action = action;
this.service = service;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ActionService that = (ActionService) o;
return !(action != null ? !action.equals(that.action) : that.action != null) &&
!(service != null ? !service.equals(that.service) : that.service != null);
}
@Override
public int hashCode() {
int result = action != null ? action.hashCode() : 0;
result = 31 * result + (service != null ? service.hashCode() : 0);
return result;
}
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("action", action)
.add("service", service)
.toString();
}
}
}
| |
/*
* Copyright (c) 2014. Real Time Genomics Limited.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.rtg.util.diagnostic;
import java.io.Serializable;
import com.rtg.util.EnumHelper;
import com.rtg.util.PseudoEnum;
/**
* Enumeration of SLIM warnings.
* See <code>src.com.reeltwo.cartesian.util.diagnostic.Diagnostics.properties</code>
* for the localised messages.
*/
public final class WarningType implements DiagnosticType, PseudoEnum, Serializable {
private static int sOrdinal = -1;
/**
* Warning for an unexpected nucleotide or amino acid in a sequence. Parameters
* are sequence name and the unexpected residue.
*/
public static final WarningType BAD_TIDE = new WarningType(++sOrdinal, "BAD_TIDE", 3);
/**
* Number of bad tides
*/
public static final WarningType NUMBER_OF_BAD_TIDE = new WarningType(++sOrdinal, "NUMBER_OF_BAD_TIDE", 1);
/**
* Warning for a sequence with no name. Parameter is the name assigned to the
* sequence.
*/
public static final WarningType NO_NAME = new WarningType(++sOrdinal, "NO_NAME", 1);
/**
* Warning used for a sequence with too many residues. Parameter is the name of the
* sequence.
*/
public static final WarningType SEQUENCE_TOO_LONG = new WarningType(++sOrdinal, "SEQUENCE_TOO_LONG", 1);
/**
* A file or directory could not be converted to a canonical path (this may be
* because the file does not exist). The parameter is the supplied path.
*/
public static final WarningType BAD_PATH = new WarningType(++sOrdinal, "BAD_PATH", 1);
/**
* A sequence label was present but not sequence data followed.
*/
public static final WarningType NO_SEQUENCE = new WarningType(++sOrdinal, "NO_SEQUENCE", 1);
/**
* A sequence label was too large to write fully because of file size limits,
* or label size limits.
* The parameter is the truncated label.
* (Unlikely to ever happen in real world).
*/
public static final WarningType SEQUENCE_LABEL_TOO_LONG = new WarningType(++sOrdinal, "SEQUENCE_LABEL_TOO_LONG", 1);
/**
* A sequence name and quality name in FASTQ file differs.
*/
public static final WarningType SEQUENCE_LABEL_MISMATCH = new WarningType(++sOrdinal, "SEQUENCE_LABEL_MISMATCH", 2);
/**
* A read length does not agree with the length expected and will be ignored (in Ngs).
*/
public static final WarningType INCORRECT_LENGTH = new WarningType(++sOrdinal, "INCORRECT_LENGTH", 3);
/**
* %1 out of %2 Symbols corresponded to A, C, G, T, or N.
*/
public static final WarningType POSSIBLY_NOT_PROTEIN = new WarningType(++sOrdinal, "POSSIBLY_NOT_PROTEIN", 2);
/**
* Number of reads of wrong length.
*/
public static final WarningType NUMBER_OF_INCORRECT_LENGTH = new WarningType(++sOrdinal, "NUMBER_OF_INCORRECT_LENGTH", 1);
/**
* The supplied file \"%1\" is not a FASTA file or has no sequences.
*/
public static final WarningType NOT_FASTA_FILE = new WarningType(++sOrdinal, "NOT_FASTA_FILE", 1);
/**
* A quality outside the likely range was found, if the input is Solexa/Illumina it should be specified
*/
public static final WarningType POSSIBLY_SOLEXA = new WarningType(++sOrdinal, "POSSIBLY_SOLEXA", 0);
/** Total bad character warnings. */
public static final WarningType BAD_CHAR_WARNINGS = new WarningType(++sOrdinal, "BAD_CHAR_WARNINGS", 1);
/** Warning that sam records were ignored because of corrupted record content. */
public static final WarningType SAM_IGNORED_RECORDS = new WarningType(++sOrdinal, "SAM_IGNORED_RECORDS", 2);
/** Warning that two sam files cannot be merged because their headers are different. */
public static final WarningType SAM_INCOMPATIBLE_HEADERS = new WarningType(++sOrdinal, "SAM_INCOMPATIBLE_HEADERS", 2);
/** SAM record has invalid format. */
public static final WarningType SAM_BAD_FORMAT_WARNING1 = new WarningType(++sOrdinal, "SAM_BAD_FORMAT_WARNING1", 1);
/** SAM record has invalid format. */
public static final WarningType SAM_BAD_FORMAT_WARNING = new WarningType(++sOrdinal, "SAM_BAD_FORMAT_WARNING", 2);
/** %1 */
public static final WarningType INFO_WARNING = new WarningType(++sOrdinal, "INFO_WARNING", 1);
/**
* The supplied file \"%1\" is not a FASTQ file or has no sequences.
*/
public static final WarningType NOT_FASTQ_FILE = new WarningType(++sOrdinal, "NOT_FASTQ_FILE", 1);
private static final EnumHelper<WarningType> HELPER = new EnumHelper<>(WarningType.class, new WarningType[] {
BAD_TIDE,
NUMBER_OF_BAD_TIDE,
NO_NAME,
SEQUENCE_TOO_LONG,
BAD_PATH,
NO_SEQUENCE,
SEQUENCE_LABEL_TOO_LONG,
SEQUENCE_LABEL_MISMATCH,
INCORRECT_LENGTH,
POSSIBLY_NOT_PROTEIN,
NUMBER_OF_INCORRECT_LENGTH,
NOT_FASTA_FILE,
POSSIBLY_SOLEXA,
BAD_CHAR_WARNINGS,
SAM_IGNORED_RECORDS,
SAM_INCOMPATIBLE_HEADERS,
SAM_BAD_FORMAT_WARNING1,
SAM_BAD_FORMAT_WARNING,
INFO_WARNING,
NOT_FASTQ_FILE
});
/**
* see {@link java.lang.Enum#valueOf(Class, String)}
* @param str name of value
* @return the enum value
*/
public static WarningType valueOf(final String str) {
return HELPER.valueOf(str);
}
/**
* @return list of enum values
*/
public static WarningType[] values() {
return HELPER.values();
}
/** Number of parameters that must occur in conjunction with this warning. */
private final int mParams;
private final int mOrdinal;
private final String mName;
private WarningType(final int ordinal, final String name, final int params) {
mParams = params;
mOrdinal = ordinal;
mName = name;
}
@Override
public int ordinal() {
return mOrdinal;
}
@Override
public String name() {
return mName;
}
@Override
public String toString() {
return mName;
}
@Override
public int getNumberOfParameters() {
return mParams;
}
Object readResolve() {
return values()[this.ordinal()];
}
@Override
public String getMessagePrefix() {
return "";
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.servicefabric.fluent;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.http.rest.Response;
import com.azure.core.management.polling.PollResult;
import com.azure.core.util.Context;
import com.azure.core.util.polling.SyncPoller;
import com.azure.resourcemanager.servicefabric.fluent.models.ServiceResourceInner;
import com.azure.resourcemanager.servicefabric.fluent.models.ServiceResourceListInner;
import com.azure.resourcemanager.servicefabric.models.ServiceResourceUpdate;
/** An instance of this class provides access to all the operations defined in ServicesClient. */
public interface ServicesClient {
/**
* Get a Service Fabric service resource created or in the process of being created in the Service Fabric
* application resource.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a Service Fabric service resource created or in the process of being created in the Service Fabric
* application resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
ServiceResourceInner get(String resourceGroupName, String clusterName, String applicationName, String serviceName);
/**
* Get a Service Fabric service resource created or in the process of being created in the Service Fabric
* application resource.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a Service Fabric service resource created or in the process of being created in the Service Fabric
* application resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<ServiceResourceInner> getWithResponse(
String resourceGroupName, String clusterName, String applicationName, String serviceName, Context context);
/**
* Create or update a Service Fabric service resource with the specified name.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @param parameters The service resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the service resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<ServiceResourceInner>, ServiceResourceInner> beginCreateOrUpdate(
String resourceGroupName,
String clusterName,
String applicationName,
String serviceName,
ServiceResourceInner parameters);
/**
* Create or update a Service Fabric service resource with the specified name.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @param parameters The service resource.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the service resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<ServiceResourceInner>, ServiceResourceInner> beginCreateOrUpdate(
String resourceGroupName,
String clusterName,
String applicationName,
String serviceName,
ServiceResourceInner parameters,
Context context);
/**
* Create or update a Service Fabric service resource with the specified name.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @param parameters The service resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the service resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
ServiceResourceInner createOrUpdate(
String resourceGroupName,
String clusterName,
String applicationName,
String serviceName,
ServiceResourceInner parameters);
/**
* Create or update a Service Fabric service resource with the specified name.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @param parameters The service resource.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the service resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
ServiceResourceInner createOrUpdate(
String resourceGroupName,
String clusterName,
String applicationName,
String serviceName,
ServiceResourceInner parameters,
Context context);
/**
* Update a Service Fabric service resource with the specified name.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @param parameters The service resource for patch operations.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the service resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<ServiceResourceInner>, ServiceResourceInner> beginUpdate(
String resourceGroupName,
String clusterName,
String applicationName,
String serviceName,
ServiceResourceUpdate parameters);
/**
* Update a Service Fabric service resource with the specified name.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @param parameters The service resource for patch operations.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the service resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<ServiceResourceInner>, ServiceResourceInner> beginUpdate(
String resourceGroupName,
String clusterName,
String applicationName,
String serviceName,
ServiceResourceUpdate parameters,
Context context);
/**
* Update a Service Fabric service resource with the specified name.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @param parameters The service resource for patch operations.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the service resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
ServiceResourceInner update(
String resourceGroupName,
String clusterName,
String applicationName,
String serviceName,
ServiceResourceUpdate parameters);
/**
* Update a Service Fabric service resource with the specified name.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @param parameters The service resource for patch operations.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the service resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
ServiceResourceInner update(
String resourceGroupName,
String clusterName,
String applicationName,
String serviceName,
ServiceResourceUpdate parameters,
Context context);
/**
* Delete a Service Fabric service resource with the specified name.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginDelete(
String resourceGroupName, String clusterName, String applicationName, String serviceName);
/**
* Delete a Service Fabric service resource with the specified name.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginDelete(
String resourceGroupName, String clusterName, String applicationName, String serviceName, Context context);
/**
* Delete a Service Fabric service resource with the specified name.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String clusterName, String applicationName, String serviceName);
/**
* Delete a Service Fabric service resource with the specified name.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param serviceName The name of the service resource in the format of {applicationName}~{serviceName}.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(
String resourceGroupName, String clusterName, String applicationName, String serviceName, Context context);
/**
* Gets all service resources created or in the process of being created in the Service Fabric application resource.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all service resources created or in the process of being created in the Service Fabric application
* resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
ServiceResourceListInner list(String resourceGroupName, String clusterName, String applicationName);
/**
* Gets all service resources created or in the process of being created in the Service Fabric application resource.
*
* @param resourceGroupName The name of the resource group.
* @param clusterName The name of the cluster resource.
* @param applicationName The name of the application resource.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all service resources created or in the process of being created in the Service Fabric application
* resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<ServiceResourceListInner> listWithResponse(
String resourceGroupName, String clusterName, String applicationName, Context context);
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.HBaseTestingUtility.COLUMNS;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.filter.TimestampsFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
/**
* Test Minimum Versions feature (HBASE-4071).
*/
@Category({RegionServerTests.class, SmallTests.class})
public class TestMinVersions {
HBaseTestingUtility hbu = HBaseTestingUtility.createLocalHTU();
private final byte[] T0 = Bytes.toBytes("0");
private final byte[] T1 = Bytes.toBytes("1");
private final byte[] T2 = Bytes.toBytes("2");
private final byte[] T3 = Bytes.toBytes("3");
private final byte[] T4 = Bytes.toBytes("4");
private final byte[] T5 = Bytes.toBytes("5");
private final byte[] c0 = COLUMNS[0];
@Rule public TestName name = new TestName();
/**
* Verify behavior of getClosestBefore(...)
*/
@Test
public void testGetClosestBefore() throws Exception {
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 1, 1000, 1, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
try {
// 2s in the past
long ts = EnvironmentEdgeManager.currentTime() - 2000;
Put p = new Put(T1, ts);
p.add(c0, c0, T1);
region.put(p);
p = new Put(T1, ts+1);
p.add(c0, c0, T4);
region.put(p);
p = new Put(T3, ts);
p.add(c0, c0, T3);
region.put(p);
// now make sure that getClosestBefore(...) get can
// rows that would be expired without minVersion.
// also make sure it gets the latest version
Result r = region.getClosestRowBefore(T1, c0);
checkResult(r, c0, T4);
r = region.getClosestRowBefore(T2, c0);
checkResult(r, c0, T4);
// now flush/compact
region.flushcache();
region.compactStores(true);
r = region.getClosestRowBefore(T1, c0);
checkResult(r, c0, T4);
r = region.getClosestRowBefore(T2, c0);
checkResult(r, c0, T4);
} finally {
HBaseTestingUtility.closeRegionAndWAL(region);
}
}
/**
* Test mixed memstore and storefile scanning
* with minimum versions.
*/
@Test
public void testStoreMemStore() throws Exception {
// keep 3 versions minimum
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
// 2s in the past
long ts = EnvironmentEdgeManager.currentTime() - 2000;
try {
Put p = new Put(T1, ts-1);
p.add(c0, c0, T2);
region.put(p);
p = new Put(T1, ts-3);
p.add(c0, c0, T0);
region.put(p);
// now flush/compact
region.flushcache();
region.compactStores(true);
p = new Put(T1, ts);
p.add(c0, c0, T3);
region.put(p);
p = new Put(T1, ts-2);
p.add(c0, c0, T1);
region.put(p);
p = new Put(T1, ts-3);
p.add(c0, c0, T0);
region.put(p);
// newest version in the memstore
// the 2nd oldest in the store file
// and the 3rd, 4th oldest also in the memstore
Get g = new Get(T1);
g.setMaxVersions();
Result r = region.get(g); // this'll use ScanWildcardColumnTracker
checkResult(r, c0, T3,T2,T1);
g = new Get(T1);
g.setMaxVersions();
g.addColumn(c0, c0);
r = region.get(g); // this'll use ExplicitColumnTracker
checkResult(r, c0, T3,T2,T1);
} finally {
HBaseTestingUtility.closeRegionAndWAL(region);
}
}
/**
* Make sure the Deletes behave as expected with minimum versions
*/
@Test
public void testDelete() throws Exception {
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
// 2s in the past
long ts = EnvironmentEdgeManager.currentTime() - 2000;
try {
Put p = new Put(T1, ts-2);
p.add(c0, c0, T1);
region.put(p);
p = new Put(T1, ts-1);
p.add(c0, c0, T2);
region.put(p);
p = new Put(T1, ts);
p.add(c0, c0, T3);
region.put(p);
Delete d = new Delete(T1, ts-1);
region.delete(d);
Get g = new Get(T1);
g.setMaxVersions();
Result r = region.get(g); // this'll use ScanWildcardColumnTracker
checkResult(r, c0, T3);
g = new Get(T1);
g.setMaxVersions();
g.addColumn(c0, c0);
r = region.get(g); // this'll use ExplicitColumnTracker
checkResult(r, c0, T3);
// now flush/compact
region.flushcache();
region.compactStores(true);
// try again
g = new Get(T1);
g.setMaxVersions();
r = region.get(g); // this'll use ScanWildcardColumnTracker
checkResult(r, c0, T3);
g = new Get(T1);
g.setMaxVersions();
g.addColumn(c0, c0);
r = region.get(g); // this'll use ExplicitColumnTracker
checkResult(r, c0, T3);
} finally {
HBaseTestingUtility.closeRegionAndWAL(region);
}
}
/**
* Make sure the memstor behaves correctly with minimum versions
*/
@Test
public void testMemStore() throws Exception {
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
// 2s in the past
long ts = EnvironmentEdgeManager.currentTime() - 2000;
try {
// 2nd version
Put p = new Put(T1, ts-2);
p.add(c0, c0, T2);
region.put(p);
// 3rd version
p = new Put(T1, ts-1);
p.add(c0, c0, T3);
region.put(p);
// 4th version
p = new Put(T1, ts);
p.add(c0, c0, T4);
region.put(p);
// now flush/compact
region.flushcache();
region.compactStores(true);
// now put the first version (backdated)
p = new Put(T1, ts-3);
p.add(c0, c0, T1);
region.put(p);
// now the latest change is in the memstore,
// but it is not the latest version
Result r = region.get(new Get(T1));
checkResult(r, c0, T4);
Get g = new Get(T1);
g.setMaxVersions();
r = region.get(g); // this'll use ScanWildcardColumnTracker
checkResult(r, c0, T4,T3);
g = new Get(T1);
g.setMaxVersions();
g.addColumn(c0, c0);
r = region.get(g); // this'll use ExplicitColumnTracker
checkResult(r, c0, T4,T3);
p = new Put(T1, ts+1);
p.add(c0, c0, T5);
region.put(p);
// now the latest version is in the memstore
g = new Get(T1);
g.setMaxVersions();
r = region.get(g); // this'll use ScanWildcardColumnTracker
checkResult(r, c0, T5,T4);
g = new Get(T1);
g.setMaxVersions();
g.addColumn(c0, c0);
r = region.get(g); // this'll use ExplicitColumnTracker
checkResult(r, c0, T5,T4);
} finally {
HBaseTestingUtility.closeRegionAndWAL(region);
}
}
/**
* Verify basic minimum versions functionality
*/
@Test
public void testBaseCase() throws Exception {
// 1 version minimum, 1000 versions maximum, ttl = 1s
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
try {
// 2s in the past
long ts = EnvironmentEdgeManager.currentTime() - 2000;
// 1st version
Put p = new Put(T1, ts-3);
p.add(c0, c0, T1);
region.put(p);
// 2nd version
p = new Put(T1, ts-2);
p.add(c0, c0, T2);
region.put(p);
// 3rd version
p = new Put(T1, ts-1);
p.add(c0, c0, T3);
region.put(p);
// 4th version
p = new Put(T1, ts);
p.add(c0, c0, T4);
region.put(p);
Result r = region.get(new Get(T1));
checkResult(r, c0, T4);
Get g = new Get(T1);
g.setTimeRange(0L, ts+1);
r = region.get(g);
checkResult(r, c0, T4);
// oldest version still exists
g.setTimeRange(0L, ts-2);
r = region.get(g);
checkResult(r, c0, T1);
// gets see only available versions
// even before compactions
g = new Get(T1);
g.setMaxVersions();
r = region.get(g); // this'll use ScanWildcardColumnTracker
checkResult(r, c0, T4,T3);
g = new Get(T1);
g.setMaxVersions();
g.addColumn(c0, c0);
r = region.get(g); // this'll use ExplicitColumnTracker
checkResult(r, c0, T4,T3);
// now flush
region.flushcache();
// with HBASE-4241 a flush will eliminate the expired rows
g = new Get(T1);
g.setTimeRange(0L, ts-2);
r = region.get(g);
assertTrue(r.isEmpty());
// major compaction
region.compactStores(true);
// after compaction the 4th version is still available
g = new Get(T1);
g.setTimeRange(0L, ts+1);
r = region.get(g);
checkResult(r, c0, T4);
// so is the 3rd
g.setTimeRange(0L, ts);
r = region.get(g);
checkResult(r, c0, T3);
// but the 2nd and earlier versions are gone
g.setTimeRange(0L, ts-1);
r = region.get(g);
assertTrue(r.isEmpty());
} finally {
HBaseTestingUtility.closeRegionAndWAL(region);
}
}
/**
* Verify that basic filters still behave correctly with
* minimum versions enabled.
*/
@Test
public void testFilters() throws Exception {
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
final byte [] c1 = COLUMNS[1];
// 2s in the past
long ts = EnvironmentEdgeManager.currentTime() - 2000;
try {
Put p = new Put(T1, ts-3);
p.add(c0, c0, T0);
p.add(c1, c1, T0);
region.put(p);
p = new Put(T1, ts-2);
p.add(c0, c0, T1);
p.add(c1, c1, T1);
region.put(p);
p = new Put(T1, ts-1);
p.add(c0, c0, T2);
p.add(c1, c1, T2);
region.put(p);
p = new Put(T1, ts);
p.add(c0, c0, T3);
p.add(c1, c1, T3);
region.put(p);
List<Long> tss = new ArrayList<Long>();
tss.add(ts-1);
tss.add(ts-2);
Get g = new Get(T1);
g.addColumn(c1,c1);
g.setFilter(new TimestampsFilter(tss));
g.setMaxVersions();
Result r = region.get(g);
checkResult(r, c1, T2,T1);
g = new Get(T1);
g.addColumn(c0,c0);
g.setFilter(new TimestampsFilter(tss));
g.setMaxVersions();
r = region.get(g);
checkResult(r, c0, T2,T1);
// now flush/compact
region.flushcache();
region.compactStores(true);
g = new Get(T1);
g.addColumn(c1,c1);
g.setFilter(new TimestampsFilter(tss));
g.setMaxVersions();
r = region.get(g);
checkResult(r, c1, T2);
g = new Get(T1);
g.addColumn(c0,c0);
g.setFilter(new TimestampsFilter(tss));
g.setMaxVersions();
r = region.get(g);
checkResult(r, c0, T2);
} finally {
HBaseTestingUtility.closeRegionAndWAL(region);
}
}
private void checkResult(Result r, byte[] col, byte[] ... vals) {
assertEquals(r.size(), vals.length);
List<Cell> kvs = r.getColumnCells(col, col);
assertEquals(kvs.size(), vals.length);
for (int i=0;i<vals.length;i++) {
assertTrue(CellUtil.matchingValue(kvs.get(i), vals[i]));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Contributors: Dan MacDonald <dan@redknee.com>
package org.apache.log4j.net;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.helpers.Constants;
import org.apache.log4j.spi.LoggingEvent;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.net.InetAddress;
import java.net.Socket;
import java.net.UnknownHostException;
/**
* Sends {@link LoggingEvent} objects to a remote a log server, usually a
* {@link SocketNode}.
* <p>
* The SocketAppender has the following properties:
*
* <ul>
* <li>If sent to a {@link SocketNode}, remote logging is non-intrusive as
* far as the log event is concerned. In other words, the event will be logged
* with the same time stamp, {@link org.apache.log4j.NDC}, location info as if
* it were logged locally by the client.
* </li>
*
* <li>SocketAppenders do not use a layout. They ship a serialized
* {@link LoggingEvent} object to the server side.</li>
*
* <li>Remote logging uses the TCP protocol. Consequently, if the server is
* reachable, then log events will eventually arrive at the server.
*
* <p><li>If the remote server is down, the logging requests are simply dropped.
* However, if and when the server comes back up then event transmission is
* resumed transparently. This transparent reconneciton is performed by a
* <em>connector</em> thread which periodically attempts to connect to
* the server.
*
* <p><li>Logging events are automatically <em>buffered</em> by the native TCP
* implementation. This means that if the link to server is slow but still
* faster than the rate of (log) event production by the client, the client will
* not be affected by the slow network connection. However, if the network
* connection is slower then the rate of event production, then the client can
* only progress at the network rate. In particular, if the network link to the
* the server is down, the client will be blocked.
*
* <p>On the other hand, if the network link is up, but the server is down, the
* client will not be blocked when making log requests but the log events will
* be lost due to server unavailability.
*
* <p><li>Even if a <code>SocketAppender</code> is no longer attached to any
* category, it will not be garbage collected in the presence of a connector
* thread. A connector thread exists only if the connection to the server is
* down. To avoid this garbage collection problem, you should {@link #close}
* the the <code>SocketAppender</code> explicitly. See also next item.
*
* <p>Long lived applications which create/destroy many
* <code>SocketAppender</code> instances should be aware of this garbage
* collection problem. Most other applications can safely ignore it.
*
* <p><li>If the JVM hosting the <code>SocketAppender</code> exits before the
* <code>SocketAppender</code> is closed either explicitly or subsequent to
* garbage collection, then there might be untransmitted data in the pipe
* which might be lost. This is a common problem on Windows based systems.
*
* <p>To avoid lost data, it is usually sufficient to {@link #close} the
* <code>SocketAppender</code> either explicitly or by calling the
* {@link org.apache.log4j.LogManager#shutdown} method before exiting the
* application.
*
* </ul>
*
* @author Ceki Gülcü
* @since 0.8.4
* */
public class SocketAppender extends AppenderSkeleton {
/**
* The default port number of remote logging server (4560).
*/
public static final int DEFAULT_PORT = 4560;
/**
* The default reconnection delay (30000 milliseconds or 30 seconds).
*/
static final int DEFAULT_RECONNECTION_DELAY = 30000;
// reset the ObjectOutputStream every 70 calls
//private static final int RESET_FREQUENCY = 70;
private static final int RESET_FREQUENCY = 1;
/**
* We remember host name as String in addition to the resolved
* InetAddress so that it can be returned via getOption().
*/
String remoteHost;
InetAddress address;
int port = DEFAULT_PORT;
ObjectOutputStream oos;
int reconnectionDelay = DEFAULT_RECONNECTION_DELAY;
boolean locationInfo = false;
private Connector connector;
int counter = 0;
String hostname;
String application;
public SocketAppender() {
super(false);
}
/**
Connects to remote server at <code>address</code> and <code>port</code>.
*/
public SocketAppender(InetAddress address, int port) {
super(false);
this.address = address;
this.remoteHost = address.getHostName();
this.port = port;
activateOptions();
}
/**
Connects to remote server at <code>host</code> and <code>port</code>.
*/
public SocketAppender(String host, int port) {
super(false);
this.port = port;
this.address = getAddressByName(host);
this.remoteHost = host;
activateOptions();
}
/**
Connect to the specified <b>RemoteHost</b> and <b>Port</b>.
*/
public void activateOptions() {
try {
hostname = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException uhe) {
try {
hostname = InetAddress.getLocalHost().getHostAddress();
} catch (UnknownHostException uhe2) {
hostname = "unknown";
}
}
if(remoteHost != null) {
address = getAddressByName(remoteHost);
connect(address, port);
} else {
String err = "The RemoteHost property is required for SocketAppender named "+ name;
getLogger().error(err);
throw new IllegalStateException(err);
}
// all is dandy on the eastern front.
super.activateOptions();
}
/**
* Close this appender.
*
* <p>This will mark the appender as closed and call then {@link
* #cleanUp} method.
* */
public synchronized void close() {
if (closed) {
return;
}
this.closed = true;
cleanUp();
}
/**
* Drop the connection to the remote host and release the underlying
* connector thread if it has been created
* */
public void cleanUp() {
if (oos != null) {
try {
oos.close();
} catch (IOException e) {
getLogger().error("Could not close oos.", e);
}
oos = null;
}
if (connector != null) {
//LogLog.debug("Interrupting the connector.");
connector.interrupted = true;
connector = null; // allow gc
}
}
void connect(InetAddress address, int port) {
if (this.address == null) {
return;
}
try {
// First, close the previous connection if any.
cleanUp();
oos =
new ObjectOutputStream(new Socket(address, port).getOutputStream());
} catch (IOException e) {
String msg =
"Could not connect to remote log4j server at ["
+ address.getHostName() + "].";
if (reconnectionDelay > 0) {
msg += " We will try again later.";
fireConnector(); // fire the connector thread
}
/**
* Rather than log an ugly stack trace, output the msg
*/
getLogger().error(msg + "(" + e.getMessage() + ")");
}
}
public void append(LoggingEvent event) {
if (event == null) {
return;
}
if (oos != null) {
try {
if (locationInfo) {
event.getLocationInformation();
}
if (hostname != null) {
event.setProperty(Constants.HOSTNAME_KEY, hostname);
}
if (application != null) {
event.setProperty(Constants.APPLICATION_KEY, application);
}
oos.writeObject(event);
oos.flush();
if (++counter >= RESET_FREQUENCY) {
counter = 0;
// Failing to reset the object output stream every now and
// then creates a serious memory leak.
//System.err.println("Doing oos.reset()");
oos.reset();
}
} catch (IOException e) {
oos = null;
getLogger().warn("Detected problem with connection: " + e);
if (reconnectionDelay > 0) {
fireConnector();
}
}
}
}
void fireConnector() {
if (connector == null) {
getLogger().debug("Starting a new connector thread.");
connector = new Connector();
connector.setDaemon(true);
connector.setPriority(Thread.MIN_PRIORITY);
connector.start();
}
}
InetAddress getAddressByName(String host) {
try {
return InetAddress.getByName(host);
} catch (Exception e) {
getLogger().error("Could not find address of [" + host + "].", e);
return null;
}
}
/**
* The <b>RemoteHost</b> option takes a string value which should be
* the host name of the server where a {@link SocketNode} or a
* {@link SocketReceiver} is running.
* */
public void setRemoteHost(String host) {
remoteHost = host;
}
/**
* Returns value of the <b>RemoteHost</b> option.
*/
public String getRemoteHost() {
return remoteHost;
}
/**
* The <b>Port</b> option takes a positive integer representing the port
* where the server is waiting for connections.
*/
public void setPort(int port) {
this.port = port;
}
/**
* Returns value of the <b>Port</b> option.
*/
public int getPort() {
return port;
}
/**
* The <b>LocationInfo</b> option takes a boolean value. If true, the
* information sent to the remote host will include location information.
* By default no location information is sent to the server.
*/
public void setLocationInfo(boolean locationInfo) {
this.locationInfo = locationInfo;
}
/**
* Returns value of the <b>LocationInfo</b> option.
*/
public boolean getLocationInfo() {
return locationInfo;
}
/**
* The <b>App</b> option takes a string value which should be the name of the
* application getting logged.
* If property was already set (via system property), don't set here.
*/
public void setApplication(String lapp) {
this.application = lapp;
}
/**
* Returns value of the <b>Application</b> option.
*/
public String getApplication() {
return application;
}
/**
* The <b>ReconnectionDelay</b> option takes a positive integer representing
* the number of milliseconds to wait between each failed connection attempt
* to the server. The default value of this option is 30000 which corresponds
* to 30 seconds.
* <p>
* Setting this option to zero turns off reconnection capability.
*/
public void setReconnectionDelay(int delay) {
this.reconnectionDelay = delay;
}
/**
* Returns value of the <b>ReconnectionDelay</b> option.
*/
public int getReconnectionDelay() {
return reconnectionDelay;
}
/**
* The Connector will reconnect when the server becomes available again.
* It does this by attempting to open a new connection every
* <code>reconnectionDelay</code> milliseconds.
* <p>
* It stops trying whenever a connection is established. It will restart to
* try reconnect to the server when previpously open connection is droppped.
*
* @author Ceki Gülcü
* @since 0.8.4
*/
class Connector extends Thread {
boolean interrupted = false;
public void run() {
Socket socket;
while (!interrupted) {
try {
sleep(reconnectionDelay);
getLogger().debug("Attempting connection to {}", address.getHostName());
socket = new Socket(address, port);
synchronized (this) {
oos = new ObjectOutputStream(socket.getOutputStream());
connector = null;
getLogger().debug("Connection established. Exiting connector thread.");
break;
}
} catch (InterruptedException e) {
getLogger().debug("Connector interrupted. Leaving loop.");
return;
} catch (java.net.ConnectException e) {
getLogger().debug(
"Remote host " + address.getHostName() + " refused connection.");
} catch (IOException e) {
getLogger().debug(
"Could not connect to {}. Exception is {}", address.getHostName(), e);
}
}
//LogLog.debug("Exiting Connector.run() method.");
}
/**
public
void finalize() {
LogLog.debug("Connector finalize() has been called.");
}
*/
}
/**
* Gets whether appender requires a layout.
* @return false
*/
public boolean requiresLayout() {
return false;
}
}
| |
// Copyright 2014 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package adwords.axis.v201409.advancedoperations;
import com.google.api.ads.adwords.axis.factory.AdWordsServices;
import com.google.api.ads.adwords.axis.v201409.cm.AdGroupAd;
import com.google.api.ads.adwords.axis.v201409.cm.AdGroupAdOperation;
import com.google.api.ads.adwords.axis.v201409.cm.AdGroupAdReturnValue;
import com.google.api.ads.adwords.axis.v201409.cm.AdGroupAdServiceInterface;
import com.google.api.ads.adwords.axis.v201409.cm.AttributeFieldMapping;
import com.google.api.ads.adwords.axis.v201409.cm.ConstantOperand;
import com.google.api.ads.adwords.axis.v201409.cm.ConstantOperandConstantType;
import com.google.api.ads.adwords.axis.v201409.cm.CustomerFeed;
import com.google.api.ads.adwords.axis.v201409.cm.CustomerFeedOperation;
import com.google.api.ads.adwords.axis.v201409.cm.CustomerFeedServiceInterface;
import com.google.api.ads.adwords.axis.v201409.cm.Feed;
import com.google.api.ads.adwords.axis.v201409.cm.FeedAttribute;
import com.google.api.ads.adwords.axis.v201409.cm.FeedAttributeType;
import com.google.api.ads.adwords.axis.v201409.cm.FeedItem;
import com.google.api.ads.adwords.axis.v201409.cm.FeedItemAdGroupTargeting;
import com.google.api.ads.adwords.axis.v201409.cm.FeedItemAttributeValue;
import com.google.api.ads.adwords.axis.v201409.cm.FeedItemOperation;
import com.google.api.ads.adwords.axis.v201409.cm.FeedItemReturnValue;
import com.google.api.ads.adwords.axis.v201409.cm.FeedItemServiceInterface;
import com.google.api.ads.adwords.axis.v201409.cm.FeedMapping;
import com.google.api.ads.adwords.axis.v201409.cm.FeedMappingOperation;
import com.google.api.ads.adwords.axis.v201409.cm.FeedMappingServiceInterface;
import com.google.api.ads.adwords.axis.v201409.cm.FeedOperation;
import com.google.api.ads.adwords.axis.v201409.cm.FeedServiceInterface;
import com.google.api.ads.adwords.axis.v201409.cm.Function;
import com.google.api.ads.adwords.axis.v201409.cm.FunctionArgumentOperand;
import com.google.api.ads.adwords.axis.v201409.cm.FunctionOperator;
import com.google.api.ads.adwords.axis.v201409.cm.Operator;
import com.google.api.ads.adwords.axis.v201409.cm.TextAd;
import com.google.api.ads.adwords.lib.client.AdWordsSession;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.client.auth.oauth2.Credential;
import com.google.common.collect.Lists;
import java.util.List;
/**
* This example adds an ad customizer feed and associates it with the customer. Then it adds an ad
* that uses the feed to populate dynamic data.
*
* Credentials and properties in {@code fromFile()} are pulled from the "ads.properties" file. See
* README for more info.
*
* Tags: CustomerFeedService.mutate, FeedItemService.mutate, FeedMappingService.mutate
* Tags: FeedService.mutate, AdGroupAdService.mutate
*
* @author Josh Radcliff
*/
public class AddAdCustomizer {
// See the Placeholder reference page for a list of all the placeholder types and fields.
// https://developers.google.com/adwords/api/docs/appendix/placeholders
private static final int PLACEHOLDER_AD_CUSTOMIZER = 10;
private static final int PLACEHOLDER_FIELD_PRICE = 3;
private static final int PLACEHOLDER_FIELD_DATE = 4;
private static final int PLACEHOLDER_FIELD_STRING = 5;
public static void main(String[] args) throws Exception {
// Generate a refreshable OAuth2 credential similar to a ClientLogin token
// and can be used in place of a service account.
Credential oAuth2Credential = new OfflineCredentials.Builder()
.forApi(Api.ADWORDS)
.fromFile()
.build()
.generateCredential();
// Construct an AdWordsSession.
AdWordsSession session = new AdWordsSession.Builder()
.fromFile()
.withOAuth2Credential(oAuth2Credential)
.build();
List<Long> adGroupIds = Lists.newArrayList(
Long.valueOf("INSERT_ADGROUP_ID_HERE"),
Long.valueOf("INSERT_ADGROUP_ID_HERE"));
AdWordsServices adWordsServices = new AdWordsServices();
runExample(adWordsServices, session, adGroupIds);
}
public static void runExample(AdWordsServices adWordsServices, AdWordsSession session,
List<Long> adGroupIds) throws Exception {
// Create a customizer feed. One feed per account can be used for all ads.
CustomizersDataHolder dataHolder = createCustomizerFeed(adWordsServices, session);
// Create a feed mapping to map the fields with customizer IDs.
createFeedMapping(adWordsServices, session, dataHolder);
// Add feed items containing the values we'd like to place in ads.
createCustomizerFeedItems(adWordsServices, session, adGroupIds, dataHolder);
// Create a customer (account-level) feed with a matching function that determines
// when to use this feed. For this case we use the "IDENTITY" matching function that is always
// true just to associate this feed with the customer. The targeting is done within the feed
// items using the campaignTargeting, adGroupTargeting, or keywordTargeting attributes.
createCustomerFeed(adWordsServices, session, dataHolder);
// All set! We can now create ads with customizations.
createAdsWithCustomizations(adWordsServices, session, adGroupIds);
}
/**
* Creates a new Feed for ad customizers.
*
* @return A new CustomizersDataHolder, populated with the feed ID and attribute IDs of the new
* Feed.
*/
private static CustomizersDataHolder createCustomizerFeed(AdWordsServices adWordsServices,
AdWordsSession session) throws Exception {
// Get the FeedService.
FeedServiceInterface feedService = adWordsServices.get(session, FeedServiceInterface.class);
Feed customizerFeed = new Feed();
customizerFeed.setName("CustomizerFeed");
FeedAttribute nameAttribute = new FeedAttribute();
nameAttribute.setName("Name");
nameAttribute.setType(FeedAttributeType.STRING);
FeedAttribute priceAttribute = new FeedAttribute();
priceAttribute.setName("Price");
priceAttribute.setType(FeedAttributeType.STRING);
FeedAttribute dateAttribute = new FeedAttribute();
dateAttribute.setName("Date");
dateAttribute.setType(FeedAttributeType.DATE_TIME);
customizerFeed.setAttributes(
new FeedAttribute[] {nameAttribute, priceAttribute, dateAttribute});
FeedOperation feedOperation = new FeedOperation();
feedOperation.setOperand(customizerFeed);
feedOperation.setOperator(Operator.ADD);
Feed addedFeed = feedService.mutate(new FeedOperation[] {feedOperation}).getValue()[0];
CustomizersDataHolder dataHolder = new CustomizersDataHolder();
dataHolder.customizersFeedId = addedFeed.getId();
dataHolder.nameFeedAttributeId = addedFeed.getAttributes(0).getId();
dataHolder.priceFeedAttributeId = addedFeed.getAttributes(1).getId();
dataHolder.dateFeedAttributeId = addedFeed.getAttributes(2).getId();
System.out.printf("Feed with name '%s' and ID %d was added with:%n", addedFeed.getName(),
dataHolder.customizersFeedId);
System.out.printf(" Name attribute ID %d%n", dataHolder.nameFeedAttributeId);
System.out.printf(" Price attribute ID %d%n", dataHolder.priceFeedAttributeId);
System.out.printf(" Date attribute ID %d%n", dataHolder.dateFeedAttributeId);
return dataHolder;
}
/**
* Creates a new FeedMapping that indicates how the data holder's feed should be interpreted
* in the context of ad customizers.
*/
private static void createFeedMapping(AdWordsServices adWordsServices, AdWordsSession session,
CustomizersDataHolder dataHolder) throws Exception {
// Get the FeedMappingService.
FeedMappingServiceInterface feedMappingService = adWordsServices.get(session,
FeedMappingServiceInterface.class);
FeedMapping feedMapping = new FeedMapping();
feedMapping.setFeedId(dataHolder.customizersFeedId);
feedMapping.setPlaceholderType(PLACEHOLDER_AD_CUSTOMIZER);
List<AttributeFieldMapping> attributeFieldMappings = Lists.newArrayList();
attributeFieldMappings.add(
new AttributeFieldMapping(dataHolder.nameFeedAttributeId, PLACEHOLDER_FIELD_STRING));
attributeFieldMappings.add(
new AttributeFieldMapping(dataHolder.priceFeedAttributeId, PLACEHOLDER_FIELD_PRICE));
attributeFieldMappings.add(
new AttributeFieldMapping(dataHolder.dateFeedAttributeId, PLACEHOLDER_FIELD_DATE));
feedMapping.setAttributeFieldMappings(
attributeFieldMappings.toArray(new AttributeFieldMapping[attributeFieldMappings.size()]));
FeedMappingOperation feedMappingOperation = new FeedMappingOperation();
feedMappingOperation.setOperand(feedMapping);
feedMappingOperation.setOperator(Operator.ADD);
FeedMapping addedFeedMapping =
feedMappingService.mutate(new FeedMappingOperation[] {feedMappingOperation}).getValue(0);
System.out.printf(
"Feed mapping with ID %d and placeholder type %d was added " + "for feed with ID %d.%n",
addedFeedMapping.getFeedMappingId(), addedFeedMapping.getPlaceholderType(),
addedFeedMapping.getFeedId());
}
/**
* Creates FeedItems with the values to use in ad customizations for each ad group in
* <code>adGroupIds</code>.
*/
private static void createCustomizerFeedItems(AdWordsServices adWordsServices,
AdWordsSession session, List<Long> adGroupIds, CustomizersDataHolder dataHolder)
throws Exception {
// Get the FeedItemService.
FeedItemServiceInterface feedItemService =
adWordsServices.get(session, FeedItemServiceInterface.class);
List<FeedItemOperation> feedItemOperations = Lists.newArrayList();
feedItemOperations.add(createFeedItemAddOperation("Mars", "$1234.56", "20140601 000000",
adGroupIds.get(0), dataHolder));
feedItemOperations.add(createFeedItemAddOperation("Venus", "$1450.00", "20140615 120000",
adGroupIds.get(1), dataHolder));
FeedItemReturnValue feedItemReturnValue = feedItemService.mutate(
feedItemOperations.toArray(new FeedItemOperation[feedItemOperations.size()]));
for (FeedItem addedFeedItem : feedItemReturnValue.getValue()) {
System.out.printf("Added feed item with ID %d.%n", addedFeedItem.getFeedItemId());
dataHolder.customizerFeedItemIds.add(addedFeedItem.getFeedItemId());
}
}
/**
* Creates a FeedItemOperation that will create a FeedItem with the specified values and
* ad group target when sent to FeedItemService.mutate.
*
* @param name the value for the name attribute of the FeedItem
* @param price the value for the price attribute of the FeedItem
* @param date the value for the date attribute of the FeedItem
* @param adGroupId the ID of the ad group to target with the FeedItem
* @param dataHolder the data holder that contains metadata about the customizer Feed
* @return a new FeedItemOperation for adding a FeedItem
*/
private static FeedItemOperation createFeedItemAddOperation(String name, String price,
String date, Long adGroupId, CustomizersDataHolder dataHolder) {
FeedItem feedItem = new FeedItem();
feedItem.setFeedId(dataHolder.customizersFeedId);
List<FeedItemAttributeValue> attributeValues = Lists.newArrayList();
FeedItemAttributeValue nameAttributeValue = new FeedItemAttributeValue();
nameAttributeValue.setFeedAttributeId(dataHolder.nameFeedAttributeId);
nameAttributeValue.setStringValue(name);
attributeValues.add(nameAttributeValue);
FeedItemAttributeValue priceAttributeValue = new FeedItemAttributeValue();
priceAttributeValue.setFeedAttributeId(dataHolder.priceFeedAttributeId);
priceAttributeValue.setStringValue(price);
attributeValues.add(priceAttributeValue);
FeedItemAttributeValue dateAttributeValue = new FeedItemAttributeValue();
dateAttributeValue.setFeedAttributeId(dataHolder.dateFeedAttributeId);
dateAttributeValue.setStringValue(date);
attributeValues.add(dateAttributeValue);
feedItem.setAttributeValues(
attributeValues.toArray(new FeedItemAttributeValue[attributeValues.size()]));
feedItem.setAdGroupTargeting(new FeedItemAdGroupTargeting(adGroupId));
FeedItemOperation feedItemOperation = new FeedItemOperation();
feedItemOperation.setOperand(feedItem);
feedItemOperation.setOperator(Operator.ADD);
return feedItemOperation;
}
/**
* Creates a CustomerFeed that will associate the data holder's Feed with the ad customizers
* placeholder type.
*/
private static void createCustomerFeed(AdWordsServices adWordsServices, AdWordsSession session,
CustomizersDataHolder dataHolder) throws Exception {
// Get the CustomerFeedService.
CustomerFeedServiceInterface customerFeedService =
adWordsServices.get(session, CustomerFeedServiceInterface.class);
CustomerFeed customerFeed = new CustomerFeed();
customerFeed.setFeedId(dataHolder.customizersFeedId);
customerFeed.setPlaceholderTypes(new int[] {PLACEHOLDER_AD_CUSTOMIZER});
// Create a matching function that will always evaluate to true.
Function customerMatchingFunction = new Function();
ConstantOperand constOperand = new ConstantOperand();
constOperand.setType(ConstantOperandConstantType.BOOLEAN);
constOperand.setBooleanValue(true);
customerMatchingFunction.setLhsOperand(new FunctionArgumentOperand[] {constOperand});
customerMatchingFunction.setOperator(FunctionOperator.IDENTITY);
customerFeed.setMatchingFunction(customerMatchingFunction);
// Create an operation to add the customer feed.
CustomerFeedOperation customerFeedOperation = new CustomerFeedOperation();
customerFeedOperation.setOperand(customerFeed);
customerFeedOperation.setOperator(Operator.ADD);
CustomerFeed addedCustomerFeed =
customerFeedService.mutate(new CustomerFeedOperation[] {customerFeedOperation}).getValue(0);
System.out.printf("Customer feed for feed ID %d was added.%n", addedCustomerFeed.getFeedId());
}
/**
* Creates text ads that use ad customizations for the specified ad group IDs.
*/
private static void createAdsWithCustomizations(AdWordsServices adWordsServices,
AdWordsSession session, List<Long> adGroupIds) throws Exception {
// Get the AdGroupAdService.
AdGroupAdServiceInterface adGroupAdService =
adWordsServices.get(session, AdGroupAdServiceInterface.class);
TextAd textAd = new TextAd();
textAd.setHeadline("Luxury Cruise to {=CustomizerFeed.Name}");
textAd.setDescription1("Only {=CustomizerFeed.Price}");
textAd.setDescription2("Offer ends in {=countdown(CustomizerFeed.Date)}!");
textAd.setUrl("http://www.example.com");
textAd.setDisplayUrl("www.example.com");
// We add the same ad to both ad groups. When they serve, they will show different values, since
// they match different feed items.
List<AdGroupAdOperation> adGroupAdOperations = Lists.newArrayList();
for (Long adGroupId : adGroupIds) {
AdGroupAd adGroupAd = new AdGroupAd();
adGroupAd.setAdGroupId(adGroupId);
adGroupAd.setAd(textAd);
AdGroupAdOperation adGroupAdOperation = new AdGroupAdOperation();
adGroupAdOperation.setOperand(adGroupAd);
adGroupAdOperation.setOperator(Operator.ADD);
adGroupAdOperations.add(adGroupAdOperation);
}
AdGroupAdReturnValue adGroupAdReturnValue = adGroupAdService.mutate(
adGroupAdOperations.toArray(new AdGroupAdOperation[adGroupAdOperations.size()]));
for (AdGroupAd addedAd : adGroupAdReturnValue.getValue()) {
System.out.printf("Created an ad with ID %d, type '%s' and status '%s'.%n",
addedAd.getAd().getId(), addedAd.getAd().getAdType(), addedAd.getStatus());
}
}
/**
* A container for metadata related to an ad customizers feed.
*/
private static class CustomizersDataHolder {
private Long customizersFeedId;
private Long nameFeedAttributeId;
private Long priceFeedAttributeId;
private Long dateFeedAttributeId;
private List<Long> customizerFeedItemIds = Lists.newArrayList();
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.action.admin.cluster.configuration;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.ClusterStateObserver.Listener;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.coordination.CoordinationMetadata;
import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfigExclusion;
import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfiguration;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.cluster.node.DiscoveryNodes.Builder;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.indices.TestIndexNameExpressionResolver;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.MockTransport;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
import static org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction.MAXIMUM_VOTING_CONFIG_EXCLUSIONS_SETTING;
import static org.elasticsearch.cluster.ClusterState.builder;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.elasticsearch.test.ClusterServiceUtils.setState;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.sameInstance;
import static org.hamcrest.Matchers.startsWith;
public class TransportAddVotingConfigExclusionsActionTests extends ESTestCase {
private static ThreadPool threadPool;
private static ClusterService clusterService;
private static DiscoveryNode localNode, otherNode1, otherNode2, otherDataNode;
private static VotingConfigExclusion localNodeExclusion, otherNode1Exclusion, otherNode2Exclusion;
private TransportService transportService;
private ClusterStateObserver clusterStateObserver;
private ClusterSettings clusterSettings;
private int staticMaximum;
@BeforeClass
public static void createThreadPoolAndClusterService() {
threadPool = new TestThreadPool("test", Settings.EMPTY);
localNode = makeDiscoveryNode("local");
localNodeExclusion = new VotingConfigExclusion(localNode);
otherNode1 = makeDiscoveryNode("other1");
otherNode1Exclusion = new VotingConfigExclusion(otherNode1);
otherNode2 = makeDiscoveryNode("other2");
otherNode2Exclusion = new VotingConfigExclusion(otherNode2);
otherDataNode = new DiscoveryNode("data", "data", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT);
clusterService = createClusterService(threadPool, localNode);
}
private static DiscoveryNode makeDiscoveryNode(String name) {
return new DiscoveryNode(
name,
name,
buildNewFakeTransportAddress(),
emptyMap(),
Set.of(DiscoveryNodeRole.MASTER_ROLE),
Version.CURRENT);
}
@AfterClass
public static void shutdownThreadPoolAndClusterService() {
clusterService.stop();
threadPool.shutdown();
}
@Before
public void setupForTest() {
final MockTransport transport = new MockTransport();
transportService = transport.createTransportService(Settings.EMPTY, threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR, boundTransportAddress -> localNode, null, emptySet());
final Settings.Builder nodeSettingsBuilder = Settings.builder();
if (randomBoolean()) {
staticMaximum = between(5, 15);
nodeSettingsBuilder.put(MAXIMUM_VOTING_CONFIG_EXCLUSIONS_SETTING.getKey(), staticMaximum);
} else {
staticMaximum = MAXIMUM_VOTING_CONFIG_EXCLUSIONS_SETTING.get(Settings.EMPTY);
}
final Settings nodeSettings = nodeSettingsBuilder.build();
clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
new TransportAddVotingConfigExclusionsAction(nodeSettings, clusterSettings, transportService, clusterService, threadPool,
new ActionFilters(emptySet()), TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext())); // registers action
transportService.start();
transportService.acceptIncomingRequests();
final VotingConfiguration allNodesConfig = VotingConfiguration.of(localNode, otherNode1, otherNode2);
setState(clusterService, builder(new ClusterName("cluster"))
.nodes(new Builder().add(localNode).add(otherNode1).add(otherNode2).add(otherDataNode)
.localNodeId(localNode.getId()).masterNodeId(localNode.getId()))
.metadata(Metadata.builder()
.coordinationMetadata(CoordinationMetadata.builder().lastAcceptedConfiguration(allNodesConfig)
.lastCommittedConfiguration(allNodesConfig).build())));
clusterStateObserver = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext());
}
public void testWithdrawsVoteFromANode() throws InterruptedException {
final CountDownLatch countDownLatch = new CountDownLatch(2);
clusterStateObserver.waitForNextChange(new AdjustConfigurationForExclusions(countDownLatch));
transportService.sendRequest(localNode, AddVotingConfigExclusionsAction.NAME, new AddVotingConfigExclusionsRequest("other1"),
expectSuccess(r -> {
assertNotNull(r);
countDownLatch.countDown();
})
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
assertThat(clusterService.getClusterApplierService().state().getVotingConfigExclusions(), contains(otherNode1Exclusion));
}
public void testWithdrawsVotesFromMultipleNodes() throws InterruptedException {
final CountDownLatch countDownLatch = new CountDownLatch(2);
clusterStateObserver.waitForNextChange(new AdjustConfigurationForExclusions(countDownLatch));
transportService.sendRequest(localNode, AddVotingConfigExclusionsAction.NAME,
new AddVotingConfigExclusionsRequest("other1", "other2"),
expectSuccess(r -> {
assertNotNull(r);
countDownLatch.countDown();
})
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
assertThat(clusterService.getClusterApplierService().state().getVotingConfigExclusions(),
containsInAnyOrder(otherNode1Exclusion, otherNode2Exclusion));
}
public void testReturnsImmediatelyIfVoteAlreadyWithdrawn() throws InterruptedException {
final ClusterState state = clusterService.state();
setState(clusterService, builder(state)
.metadata(Metadata.builder(state.metadata())
.coordinationMetadata(CoordinationMetadata.builder(state.coordinationMetadata())
.lastCommittedConfiguration(VotingConfiguration.of(localNode, otherNode2))
.lastAcceptedConfiguration(VotingConfiguration.of(localNode, otherNode2))
.build())));
final CountDownLatch countDownLatch = new CountDownLatch(1);
// no observer to reconfigure
transportService.sendRequest(localNode, AddVotingConfigExclusionsAction.NAME, new AddVotingConfigExclusionsRequest("other1"),
expectSuccess(r -> {
assertNotNull(r);
countDownLatch.countDown();
})
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
assertThat(clusterService.getClusterApplierService().state().getVotingConfigExclusions(),
contains(otherNode1Exclusion));
}
public void testExcludeAbsentNodesByNodeIds() throws InterruptedException {
final CountDownLatch countDownLatch = new CountDownLatch(2);
clusterStateObserver.waitForNextChange(new AdjustConfigurationForExclusions(countDownLatch));
transportService.sendRequest(localNode, AddVotingConfigExclusionsAction.NAME,
new AddVotingConfigExclusionsRequest(new String[]{"absent_id"},
Strings.EMPTY_ARRAY, TimeValue.timeValueSeconds(30)),
expectSuccess(e -> countDownLatch.countDown())
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
assertEquals(Set.of(new VotingConfigExclusion("absent_id", VotingConfigExclusion.MISSING_VALUE_MARKER)),
clusterService.getClusterApplierService().state().getVotingConfigExclusions());
}
public void testExcludeExistingNodesByNodeIds() throws InterruptedException {
final CountDownLatch countDownLatch = new CountDownLatch(2);
clusterStateObserver.waitForNextChange(new AdjustConfigurationForExclusions(countDownLatch));
transportService.sendRequest(localNode, AddVotingConfigExclusionsAction.NAME,
new AddVotingConfigExclusionsRequest(new String[]{"other1", "other2"},
Strings.EMPTY_ARRAY, TimeValue.timeValueSeconds(30)),
expectSuccess(r -> {
assertNotNull(r);
countDownLatch.countDown();
})
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
assertThat(clusterService.getClusterApplierService().state().getVotingConfigExclusions(),
containsInAnyOrder(otherNode1Exclusion, otherNode2Exclusion));
}
public void testExcludeAbsentNodesByNodeNames() throws InterruptedException {
final CountDownLatch countDownLatch = new CountDownLatch(2);
clusterStateObserver.waitForNextChange(new AdjustConfigurationForExclusions(countDownLatch));
transportService.sendRequest(localNode, AddVotingConfigExclusionsAction.NAME, new AddVotingConfigExclusionsRequest("absent_node"),
expectSuccess(e -> countDownLatch.countDown())
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
assertEquals(Set.of(new VotingConfigExclusion(VotingConfigExclusion.MISSING_VALUE_MARKER, "absent_node")),
clusterService.getClusterApplierService().state().getVotingConfigExclusions());
}
public void testExcludeExistingNodesByNodeNames() throws InterruptedException {
final CountDownLatch countDownLatch = new CountDownLatch(2);
clusterStateObserver.waitForNextChange(new AdjustConfigurationForExclusions(countDownLatch));
transportService.sendRequest(localNode, AddVotingConfigExclusionsAction.NAME,
new AddVotingConfigExclusionsRequest("other1", "other2"),
expectSuccess(r -> {
assertNotNull(r);
countDownLatch.countDown();
})
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
assertThat(clusterService.getClusterApplierService().state().getVotingConfigExclusions(),
containsInAnyOrder(otherNode1Exclusion, otherNode2Exclusion));
}
public void testSucceedsEvenIfAllExclusionsAlreadyAdded() throws InterruptedException {
final ClusterState state = clusterService.state();
final ClusterState.Builder builder = builder(state);
builder.metadata(Metadata.builder(state.metadata()).
coordinationMetadata(
CoordinationMetadata.builder(state.coordinationMetadata())
.addVotingConfigExclusion(otherNode1Exclusion).
build()));
setState(clusterService, builder);
final CountDownLatch countDownLatch = new CountDownLatch(1);
transportService.sendRequest(localNode, AddVotingConfigExclusionsAction.NAME, new AddVotingConfigExclusionsRequest("other1"),
expectSuccess(r -> {
assertNotNull(r);
countDownLatch.countDown();
})
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
assertThat(clusterService.getClusterApplierService().state().getVotingConfigExclusions(),
contains(otherNode1Exclusion));
}
public void testExcludeByNodeIdSucceedsEvenIfAllExclusionsAlreadyAdded() throws InterruptedException {
final ClusterState state = clusterService.state();
final ClusterState.Builder builder = builder(state);
builder.metadata(Metadata.builder(state.metadata()).
coordinationMetadata(
CoordinationMetadata.builder(state.coordinationMetadata())
.addVotingConfigExclusion(otherNode1Exclusion).
build()));
setState(clusterService, builder);
final CountDownLatch countDownLatch = new CountDownLatch(1);
transportService.sendRequest(localNode, AddVotingConfigExclusionsAction.NAME,
new AddVotingConfigExclusionsRequest(new String[]{"other1"},
Strings.EMPTY_ARRAY, TimeValue.timeValueSeconds(30)),
expectSuccess(r -> {
assertNotNull(r);
countDownLatch.countDown();
})
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
assertThat(clusterService.getClusterApplierService().state().getVotingConfigExclusions(),
contains(otherNode1Exclusion));
}
public void testExcludeByNodeNameSucceedsEvenIfAllExclusionsAlreadyAdded() throws InterruptedException {
final ClusterState state = clusterService.state();
final ClusterState.Builder builder = builder(state);
builder.metadata(Metadata.builder(state.metadata()).
coordinationMetadata(
CoordinationMetadata.builder(state.coordinationMetadata())
.addVotingConfigExclusion(otherNode1Exclusion).
build()));
setState(clusterService, builder);
final CountDownLatch countDownLatch = new CountDownLatch(1);
transportService.sendRequest(localNode, AddVotingConfigExclusionsAction.NAME, new AddVotingConfigExclusionsRequest("other1"),
expectSuccess(r -> {
assertNotNull(r);
countDownLatch.countDown();
})
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
assertThat(clusterService.getClusterApplierService().state().getVotingConfigExclusions(),
contains(otherNode1Exclusion));
}
public void testReturnsErrorIfMaximumExclusionCountExceeded() throws InterruptedException {
final Metadata.Builder metadataBuilder = Metadata.builder(clusterService.state().metadata());
CoordinationMetadata.Builder coordinationMetadataBuilder =
CoordinationMetadata.builder(clusterService.state().coordinationMetadata())
.addVotingConfigExclusion(localNodeExclusion);
final int actualMaximum;
if (randomBoolean()) {
actualMaximum = staticMaximum;
} else {
actualMaximum = between(2, 15);
clusterSettings.applySettings(Settings.builder().put(clusterService.state().metadata().persistentSettings())
.put(MAXIMUM_VOTING_CONFIG_EXCLUSIONS_SETTING.getKey(), actualMaximum).build());
}
for (int i = 2; i < actualMaximum; i++) {
coordinationMetadataBuilder.addVotingConfigExclusion(
new VotingConfigExclusion(randomAlphaOfLength(10), randomAlphaOfLength(10)));
}
final int existingCount, newCount;
if (randomBoolean()) {
coordinationMetadataBuilder.addVotingConfigExclusion(otherNode1Exclusion);
existingCount = actualMaximum;
newCount = 1;
} else {
existingCount = actualMaximum - 1;
newCount = 2;
}
metadataBuilder.coordinationMetadata(coordinationMetadataBuilder.build());
final ClusterState.Builder builder = builder(clusterService.state()).metadata(metadataBuilder);
setState(clusterService, builder);
final CountDownLatch countDownLatch = new CountDownLatch(1);
final SetOnce<TransportException> exceptionHolder = new SetOnce<>();
transportService.sendRequest(localNode, AddVotingConfigExclusionsAction.NAME,
new AddVotingConfigExclusionsRequest("other1", "other2"),
expectError(e -> {
exceptionHolder.set(e);
countDownLatch.countDown();
})
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
final Throwable rootCause = exceptionHolder.get().getRootCause();
assertThat(rootCause, instanceOf(IllegalArgumentException.class));
assertThat(rootCause.getMessage(), equalTo("add voting config exclusions request for nodes named [other1, other2] would add ["
+ newCount + "] exclusions to the existing [" + existingCount +
"] which would exceed the maximum of [" + actualMaximum + "] set by [cluster.max_voting_config_exclusions]"));
}
public void testTimesOut() throws InterruptedException {
final CountDownLatch countDownLatch = new CountDownLatch(1);
final SetOnce<TransportException> exceptionHolder = new SetOnce<>();
transportService.sendRequest(localNode, AddVotingConfigExclusionsAction.NAME,
new AddVotingConfigExclusionsRequest(Strings.EMPTY_ARRAY, new String[]{"other1"}, TimeValue.timeValueMillis(100)),
expectError(e -> {
exceptionHolder.set(e);
countDownLatch.countDown();
})
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
final Throwable rootCause = exceptionHolder.get().getRootCause();
assertThat(rootCause, instanceOf(ElasticsearchTimeoutException.class));
assertThat(rootCause.getMessage(), startsWith("timed out waiting for voting config exclusions [{other1}"));
}
private TransportResponseHandler<ActionResponse.Empty> expectSuccess(
Consumer<ActionResponse.Empty> onResponse) {
return responseHandler(onResponse, e -> {
throw new AssertionError("unexpected", e);
});
}
private TransportResponseHandler<ActionResponse.Empty> expectError(Consumer<TransportException> onException) {
return responseHandler(r -> {
assert false : r;
}, onException);
}
private TransportResponseHandler<ActionResponse.Empty> responseHandler(
Consumer<ActionResponse.Empty> onResponse, Consumer<TransportException> onException) {
return new TransportResponseHandler<>() {
@Override
public void handleResponse(ActionResponse.Empty response) {
onResponse.accept(response);
}
@Override
public void handleException(TransportException exp) {
onException.accept(exp);
}
@Override
public ActionResponse.Empty read(StreamInput in) {
return ActionResponse.Empty.INSTANCE;
}
};
}
private static class AdjustConfigurationForExclusions implements Listener {
final CountDownLatch doneLatch;
AdjustConfigurationForExclusions(CountDownLatch latch) {
this.doneLatch = latch;
}
@Override
public void onNewClusterState(ClusterState state) {
clusterService.getMasterService().submitStateUpdateTask("reconfiguration", new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
assertThat(currentState, sameInstance(state));
final Set<String> votingNodeIds = new HashSet<>();
currentState.nodes().forEach(n -> votingNodeIds.add(n.getId()));
currentState.getVotingConfigExclusions().forEach(t -> votingNodeIds.remove(t.getNodeId()));
final VotingConfiguration votingConfiguration = new VotingConfiguration(votingNodeIds);
return builder(currentState)
.metadata(Metadata.builder(currentState.metadata())
.coordinationMetadata(CoordinationMetadata.builder(currentState.coordinationMetadata())
.lastAcceptedConfiguration(votingConfiguration)
.lastCommittedConfiguration(votingConfiguration)
.build()))
.build();
}
@Override
public void onFailure(String source, Exception e) {
throw new AssertionError("unexpected failure", e);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
doneLatch.countDown();
}
});
}
@Override
public void onClusterServiceClose() {
throw new AssertionError("unexpected close");
}
@Override
public void onTimeout(TimeValue timeout) {
throw new AssertionError("unexpected timeout");
}
}
}
| |
/*
* Copyright 2002-2014 Drew Noakes
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* More information about this project is available at:
*
* https://drewnoakes.com/code/exif/
* https://github.com/drewnoakes/metadata-extractor
*/
package com.drew.metadata.exif;
import com.drew.lang.annotations.NotNull;
import com.drew.metadata.Directory;
import java.util.HashMap;
/**
* Describes Exif tags from the SubIFD directory.
*
* @author Drew Noakes https://drewnoakes.com
*/
public class ExifSubIFDDirectory extends Directory
{
/**
* The actual aperture value of lens when the image was taken. Unit is APEX.
* To convert this value to ordinary F-number (F-stop), calculate this value's
* power of root 2 (=1.4142). For example, if the ApertureValue is '5',
* F-number is 1.4142^5 = F5.6.
*/
public static final int TAG_APERTURE = 0x9202;
/**
* When image format is no compression, this value shows the number of bits
* per component for each pixel. Usually this value is '8,8,8'.
*/
public static final int TAG_BITS_PER_SAMPLE = 0x0102;
/**
* Shows the color space of the image data components.
* 0 = WhiteIsZero
* 1 = BlackIsZero
* 2 = RGB
* 3 = RGB Palette
* 4 = Transparency Mask
* 5 = CMYK
* 6 = YCbCr
* 8 = CIELab
* 9 = ICCLab
* 10 = ITULab
* 32803 = Color Filter Array
* 32844 = Pixar LogL
* 32845 = Pixar LogLuv
* 34892 = Linear Raw
*/
public static final int TAG_PHOTOMETRIC_INTERPRETATION = 0x0106;
/**
* 1 = No dithering or halftoning
* 2 = Ordered dither or halftone
* 3 = Randomized dither
*/
public static final int TAG_THRESHOLDING = 0x0107;
/**
* 1 = Normal
* 2 = Reversed
*/
public static final int TAG_FILL_ORDER = 0x010A;
public static final int TAG_DOCUMENT_NAME = 0x010D;
/** The position in the file of raster data. */
public static final int TAG_STRIP_OFFSETS = 0x0111;
/** Each pixel is composed of this many samples. */
public static final int TAG_SAMPLES_PER_PIXEL = 0x0115;
/** The raster is codified by a single block of data holding this many rows. */
public static final int TAG_ROWS_PER_STRIP = 0x116;
/** The size of the raster data in bytes. */
public static final int TAG_STRIP_BYTE_COUNTS = 0x0117;
public static final int TAG_MIN_SAMPLE_VALUE = 0x0118;
public static final int TAG_MAX_SAMPLE_VALUE = 0x0119;
/**
* When image format is no compression YCbCr, this value shows byte aligns of
* YCbCr data. If value is '1', Y/Cb/Cr value is chunky format, contiguous for
* each subsampling pixel. If value is '2', Y/Cb/Cr value is separated and
* stored to Y plane/Cb plane/Cr plane format.
*/
public static final int TAG_PLANAR_CONFIGURATION = 0x011C;
public static final int TAG_YCBCR_SUBSAMPLING = 0x0212;
/**
* The new subfile type tag.
* 0 = Full-resolution Image
* 1 = Reduced-resolution image
* 2 = Single page of multi-page image
* 3 = Single page of multi-page reduced-resolution image
* 4 = Transparency mask
* 5 = Transparency mask of reduced-resolution image
* 6 = Transparency mask of multi-page image
* 7 = Transparency mask of reduced-resolution multi-page image
*/
public static final int TAG_NEW_SUBFILE_TYPE = 0x00FE;
/**
* The old subfile type tag.
* 1 = Full-resolution image (Main image)
* 2 = Reduced-resolution image (Thumbnail)
* 3 = Single page of multi-page image
*/
public static final int TAG_SUBFILE_TYPE = 0x00FF;
public static final int TAG_TRANSFER_FUNCTION = 0x012D;
public static final int TAG_PREDICTOR = 0x013D;
public static final int TAG_TILE_WIDTH = 0x0142;
public static final int TAG_TILE_LENGTH = 0x0143;
public static final int TAG_TILE_OFFSETS = 0x0144;
public static final int TAG_TILE_BYTE_COUNTS = 0x0145;
public static final int TAG_JPEG_TABLES = 0x015B;
public static final int TAG_CFA_REPEAT_PATTERN_DIM = 0x828D;
/** There are two definitions for CFA pattern, I don't know the difference... */
public static final int TAG_CFA_PATTERN_2 = 0x828E;
public static final int TAG_BATTERY_LEVEL = 0x828F;
public static final int TAG_IPTC_NAA = 0x83BB;
public static final int TAG_INTER_COLOR_PROFILE = 0x8773;
public static final int TAG_SPECTRAL_SENSITIVITY = 0x8824;
/**
* Indicates the Opto-Electric Conversion Function (OECF) specified in ISO 14524.
* <p/>
* OECF is the relationship between the camera optical input and the image values.
* <p/>
* The values are:
* <ul>
* <li>Two shorts, indicating respectively number of columns, and number of rows.</li>
* <li>For each column, the column name in a null-terminated ASCII string.</li>
* <li>For each cell, an SRATIONAL value.</li>
* </ul>
*/
public static final int TAG_OPTO_ELECTRIC_CONVERSION_FUNCTION = 0x8828;
public static final int TAG_INTERLACE = 0x8829;
public static final int TAG_TIME_ZONE_OFFSET = 0x882A;
public static final int TAG_SELF_TIMER_MODE = 0x882B;
public static final int TAG_FLASH_ENERGY = 0x920B;
public static final int TAG_SPATIAL_FREQ_RESPONSE = 0x920C;
public static final int TAG_NOISE = 0x920D;
public static final int TAG_IMAGE_NUMBER = 0x9211;
public static final int TAG_SECURITY_CLASSIFICATION = 0x9212;
public static final int TAG_IMAGE_HISTORY = 0x9213;
public static final int TAG_SUBJECT_LOCATION = 0x9214;
/** There are two definitions for exposure index, I don't know the difference... */
public static final int TAG_EXPOSURE_INDEX_2 = 0x9215;
public static final int TAG_TIFF_EP_STANDARD_ID = 0x9216;
public static final int TAG_FLASH_ENERGY_2 = 0xA20B;
public static final int TAG_SPATIAL_FREQ_RESPONSE_2 = 0xA20C;
public static final int TAG_SUBJECT_LOCATION_2 = 0xA214;
public static final int TAG_PAGE_NAME = 0x011D;
/**
* Exposure time (reciprocal of shutter speed). Unit is second.
*/
public static final int TAG_EXPOSURE_TIME = 0x829A;
/**
* The actual F-number(F-stop) of lens when the image was taken.
*/
public static final int TAG_FNUMBER = 0x829D;
/**
* Exposure program that the camera used when image was taken. '1' means
* manual control, '2' program normal, '3' aperture priority, '4' shutter
* priority, '5' program creative (slow program), '6' program action
* (high-speed program), '7' portrait mode, '8' landscape mode.
*/
public static final int TAG_EXPOSURE_PROGRAM = 0x8822;
public static final int TAG_ISO_EQUIVALENT = 0x8827;
public static final int TAG_EXIF_VERSION = 0x9000;
public static final int TAG_DATETIME_ORIGINAL = 0x9003;
public static final int TAG_DATETIME_DIGITIZED = 0x9004;
public static final int TAG_COMPONENTS_CONFIGURATION = 0x9101;
/**
* Average (rough estimate) compression level in JPEG bits per pixel.
* */
public static final int TAG_COMPRESSED_AVERAGE_BITS_PER_PIXEL = 0x9102;
/**
* Shutter speed by APEX value. To convert this value to ordinary 'Shutter Speed';
* calculate this value's power of 2, then reciprocal. For example, if the
* ShutterSpeedValue is '4', shutter speed is 1/(24)=1/16 second.
*/
public static final int TAG_SHUTTER_SPEED = 0x9201;
public static final int TAG_BRIGHTNESS_VALUE = 0x9203;
public static final int TAG_EXPOSURE_BIAS = 0x9204;
/**
* Maximum aperture value of lens. You can convert to F-number by calculating
* power of root 2 (same process of ApertureValue:0x9202).
* The actual aperture value of lens when the image was taken. To convert this
* value to ordinary f-number(f-stop), calculate the value's power of root 2
* (=1.4142). For example, if the ApertureValue is '5', f-number is 1.41425^5 = F5.6.
*/
public static final int TAG_MAX_APERTURE = 0x9205;
/**
* Indicates the distance the autofocus camera is focused to. Tends to be less accurate as distance increases.
*/
public static final int TAG_SUBJECT_DISTANCE = 0x9206;
/**
* Exposure metering method. '0' means unknown, '1' average, '2' center
* weighted average, '3' spot, '4' multi-spot, '5' multi-segment, '6' partial,
* '255' other.
*/
public static final int TAG_METERING_MODE = 0x9207;
public static final int TAG_LIGHT_SOURCE = 0x9208;
/**
* White balance (aka light source). '0' means unknown, '1' daylight,
* '2' fluorescent, '3' tungsten, '10' flash, '17' standard light A,
* '18' standard light B, '19' standard light C, '20' D55, '21' D65,
* '22' D75, '255' other.
*/
public static final int TAG_WHITE_BALANCE = 0x9208;
/**
* 0x0 = 0000000 = No Flash
* 0x1 = 0000001 = Fired
* 0x5 = 0000101 = Fired, Return not detected
* 0x7 = 0000111 = Fired, Return detected
* 0x9 = 0001001 = On
* 0xd = 0001101 = On, Return not detected
* 0xf = 0001111 = On, Return detected
* 0x10 = 0010000 = Off
* 0x18 = 0011000 = Auto, Did not fire
* 0x19 = 0011001 = Auto, Fired
* 0x1d = 0011101 = Auto, Fired, Return not detected
* 0x1f = 0011111 = Auto, Fired, Return detected
* 0x20 = 0100000 = No flash function
* 0x41 = 1000001 = Fired, Red-eye reduction
* 0x45 = 1000101 = Fired, Red-eye reduction, Return not detected
* 0x47 = 1000111 = Fired, Red-eye reduction, Return detected
* 0x49 = 1001001 = On, Red-eye reduction
* 0x4d = 1001101 = On, Red-eye reduction, Return not detected
* 0x4f = 1001111 = On, Red-eye reduction, Return detected
* 0x59 = 1011001 = Auto, Fired, Red-eye reduction
* 0x5d = 1011101 = Auto, Fired, Red-eye reduction, Return not detected
* 0x5f = 1011111 = Auto, Fired, Red-eye reduction, Return detected
* 6543210 (positions)
*
* This is a bitmask.
* 0 = flash fired
* 1 = return detected
* 2 = return able to be detected
* 3 = unknown
* 4 = auto used
* 5 = unknown
* 6 = red eye reduction used
*/
public static final int TAG_FLASH = 0x9209;
/**
* Focal length of lens used to take image. Unit is millimeter.
* Nice digital cameras actually save the focal length as a function of how far they are zoomed in.
*/
public static final int TAG_FOCAL_LENGTH = 0x920A;
/**
* This tag holds the Exif Makernote. Makernotes are free to be in any format, though they are often IFDs.
* To determine the format, we consider the starting bytes of the makernote itself and sometimes the
* camera model and make.
* <p/>
* The component count for this tag includes all of the bytes needed for the makernote.
*/
public static final int TAG_MAKERNOTE = 0x927C;
public static final int TAG_USER_COMMENT = 0x9286;
public static final int TAG_SUBSECOND_TIME = 0x9290;
public static final int TAG_SUBSECOND_TIME_ORIGINAL = 0x9291;
public static final int TAG_SUBSECOND_TIME_DIGITIZED = 0x9292;
public static final int TAG_FLASHPIX_VERSION = 0xA000;
/**
* Defines Color Space. DCF image must use sRGB color space so value is
* always '1'. If the picture uses the other color space, value is
* '65535':Uncalibrated.
*/
public static final int TAG_COLOR_SPACE = 0xA001;
public static final int TAG_EXIF_IMAGE_WIDTH = 0xA002;
public static final int TAG_EXIF_IMAGE_HEIGHT = 0xA003;
public static final int TAG_RELATED_SOUND_FILE = 0xA004;
/** This tag is a pointer to the Exif Interop IFD. */
public static final int TAG_INTEROP_OFFSET = 0xA005;
public static final int TAG_FOCAL_PLANE_X_RESOLUTION = 0xA20E;
public static final int TAG_FOCAL_PLANE_Y_RESOLUTION = 0xA20F;
/**
* Unit of FocalPlaneXResolution/FocalPlaneYResolution. '1' means no-unit,
* '2' inch, '3' centimeter.
*
* Note: Some of Fujifilm's digicam(e.g.FX2700,FX2900,Finepix4700Z/40i etc)
* uses value '3' so it must be 'centimeter', but it seems that they use a
* '8.3mm?'(1/3in.?) to their ResolutionUnit. Fuji's BUG? Finepix4900Z has
* been changed to use value '2' but it doesn't match to actual value also.
*/
public static final int TAG_FOCAL_PLANE_RESOLUTION_UNIT = 0xA210;
public static final int TAG_EXPOSURE_INDEX = 0xA215;
public static final int TAG_SENSING_METHOD = 0xA217;
public static final int TAG_FILE_SOURCE = 0xA300;
public static final int TAG_SCENE_TYPE = 0xA301;
public static final int TAG_CFA_PATTERN = 0xA302;
// these tags new with Exif 2.2 (?) [A401 - A4
/**
* This tag indicates the use of special processing on image data, such as rendering
* geared to output. When special processing is performed, the reader is expected to
* disable or minimize any further processing.
* Tag = 41985 (A401.H)
* Type = SHORT
* Count = 1
* Default = 0
* 0 = Normal process
* 1 = Custom process
* Other = reserved
*/
public static final int TAG_CUSTOM_RENDERED = 0xA401;
/**
* This tag indicates the exposure mode set when the image was shot. In auto-bracketing
* mode, the camera shoots a series of frames of the same scene at different exposure settings.
* Tag = 41986 (A402.H)
* Type = SHORT
* Count = 1
* Default = none
* 0 = Auto exposure
* 1 = Manual exposure
* 2 = Auto bracket
* Other = reserved
*/
public static final int TAG_EXPOSURE_MODE = 0xA402;
/**
* This tag indicates the white balance mode set when the image was shot.
* Tag = 41987 (A403.H)
* Type = SHORT
* Count = 1
* Default = none
* 0 = Auto white balance
* 1 = Manual white balance
* Other = reserved
*/
public static final int TAG_WHITE_BALANCE_MODE = 0xA403;
/**
* This tag indicates the digital zoom ratio when the image was shot. If the
* numerator of the recorded value is 0, this indicates that digital zoom was
* not used.
* Tag = 41988 (A404.H)
* Type = RATIONAL
* Count = 1
* Default = none
*/
public static final int TAG_DIGITAL_ZOOM_RATIO = 0xA404;
/**
* This tag indicates the equivalent focal length assuming a 35mm film camera,
* in mm. A value of 0 means the focal length is unknown. Note that this tag
* differs from the FocalLength tag.
* Tag = 41989 (A405.H)
* Type = SHORT
* Count = 1
* Default = none
*/
public static final int TAG_35MM_FILM_EQUIV_FOCAL_LENGTH = 0xA405;
/**
* This tag indicates the type of scene that was shot. It can also be used to
* record the mode in which the image was shot. Note that this differs from
* the scene type (SceneType) tag.
* Tag = 41990 (A406.H)
* Type = SHORT
* Count = 1
* Default = 0
* 0 = Standard
* 1 = Landscape
* 2 = Portrait
* 3 = Night scene
* Other = reserved
*/
public static final int TAG_SCENE_CAPTURE_TYPE = 0xA406;
/**
* This tag indicates the degree of overall image gain adjustment.
* Tag = 41991 (A407.H)
* Type = SHORT
* Count = 1
* Default = none
* 0 = None
* 1 = Low gain up
* 2 = High gain up
* 3 = Low gain down
* 4 = High gain down
* Other = reserved
*/
public static final int TAG_GAIN_CONTROL = 0xA407;
/**
* This tag indicates the direction of contrast processing applied by the camera
* when the image was shot.
* Tag = 41992 (A408.H)
* Type = SHORT
* Count = 1
* Default = 0
* 0 = Normal
* 1 = Soft
* 2 = Hard
* Other = reserved
*/
public static final int TAG_CONTRAST = 0xA408;
/**
* This tag indicates the direction of saturation processing applied by the camera
* when the image was shot.
* Tag = 41993 (A409.H)
* Type = SHORT
* Count = 1
* Default = 0
* 0 = Normal
* 1 = Low saturation
* 2 = High saturation
* Other = reserved
*/
public static final int TAG_SATURATION = 0xA409;
/**
* This tag indicates the direction of sharpness processing applied by the camera
* when the image was shot.
* Tag = 41994 (A40A.H)
* Type = SHORT
* Count = 1
* Default = 0
* 0 = Normal
* 1 = Soft
* 2 = Hard
* Other = reserved
*/
public static final int TAG_SHARPNESS = 0xA40A;
// TODO support this tag (I haven't seen a camera's actual implementation of this yet)
/**
* This tag indicates information on the picture-taking conditions of a particular
* camera model. The tag is used only to indicate the picture-taking conditions in
* the reader.
* Tag = 41995 (A40B.H)
* Type = UNDEFINED
* Count = Any
* Default = none
*
* The information is recorded in the format shown below. The data is recorded
* in Unicode using SHORT type for the number of display rows and columns and
* UNDEFINED type for the camera settings. The Unicode (UCS-2) string including
* Signature is NULL terminated. The specifics of the Unicode string are as given
* in ISO/IEC 10464-1.
*
* Length Type Meaning
* ------+-----------+------------------
* 2 SHORT Display columns
* 2 SHORT Display rows
* Any UNDEFINED Camera setting-1
* Any UNDEFINED Camera setting-2
* : : :
* Any UNDEFINED Camera setting-n
*/
public static final int TAG_DEVICE_SETTING_DESCRIPTION = 0xA40B;
/**
* This tag indicates the distance to the subject.
* Tag = 41996 (A40C.H)
* Type = SHORT
* Count = 1
* Default = none
* 0 = unknown
* 1 = Macro
* 2 = Close view
* 3 = Distant view
* Other = reserved
*/
public static final int TAG_SUBJECT_DISTANCE_RANGE = 0xA40C;
/**
* This tag indicates an identifier assigned uniquely to each image. It is
* recorded as an ASCII string equivalent to hexadecimal notation and 128-bit
* fixed length.
* Tag = 42016 (A420.H)
* Type = ASCII
* Count = 33
* Default = none
*/
public static final int TAG_IMAGE_UNIQUE_ID = 0xA420;
/** String. */
public static final int TAG_CAMERA_OWNER_NAME = 0xA430;
/** String. */
public static final int TAG_BODY_SERIAL_NUMBER = 0xA431;
/** An array of four Rational64u numbers giving focal and aperture ranges. */
public static final int TAG_LENS_SPECIFICATION = 0xA432;
/** String. */
public static final int TAG_LENS_MAKE = 0xA433;
/** String. */
public static final int TAG_LENS_MODEL = 0xA434;
/** String. */
public static final int TAG_LENS_SERIAL_NUMBER = 0xA435;
/** Rational64u. */
public static final int TAG_GAMMA = 0xA500;
public static final int TAG_LENS = 0xFDEA;
@NotNull
protected static final HashMap<Integer, String> _tagNameMap = new HashMap<Integer, String>();
static
{
_tagNameMap.put(TAG_FILL_ORDER, "Fill Order");
_tagNameMap.put(TAG_DOCUMENT_NAME, "Document Name");
// TODO why don't these tags have fields associated with them?
_tagNameMap.put(0x1000, "Related Image File Format");
_tagNameMap.put(0x1001, "Related Image Width");
_tagNameMap.put(0x1002, "Related Image Length");
_tagNameMap.put(0x0156, "Transfer Range");
_tagNameMap.put(0x0200, "JPEG Proc");
_tagNameMap.put(TAG_COMPRESSED_AVERAGE_BITS_PER_PIXEL, "Compressed Bits Per Pixel");
_tagNameMap.put(TAG_MAKERNOTE, "Makernote");
_tagNameMap.put(TAG_INTEROP_OFFSET, "Interoperability Offset");
_tagNameMap.put(TAG_NEW_SUBFILE_TYPE, "New Subfile Type");
_tagNameMap.put(TAG_SUBFILE_TYPE, "Subfile Type");
_tagNameMap.put(TAG_BITS_PER_SAMPLE, "Bits Per Sample");
_tagNameMap.put(TAG_PHOTOMETRIC_INTERPRETATION, "Photometric Interpretation");
_tagNameMap.put(TAG_THRESHOLDING, "Thresholding");
_tagNameMap.put(TAG_STRIP_OFFSETS, "Strip Offsets");
_tagNameMap.put(TAG_SAMPLES_PER_PIXEL, "Samples Per Pixel");
_tagNameMap.put(TAG_ROWS_PER_STRIP, "Rows Per Strip");
_tagNameMap.put(TAG_STRIP_BYTE_COUNTS, "Strip Byte Counts");
_tagNameMap.put(TAG_PAGE_NAME, "Page Name");
_tagNameMap.put(TAG_PLANAR_CONFIGURATION, "Planar Configuration");
_tagNameMap.put(TAG_TRANSFER_FUNCTION, "Transfer Function");
_tagNameMap.put(TAG_PREDICTOR, "Predictor");
_tagNameMap.put(TAG_TILE_WIDTH, "Tile Width");
_tagNameMap.put(TAG_TILE_LENGTH, "Tile Length");
_tagNameMap.put(TAG_TILE_OFFSETS, "Tile Offsets");
_tagNameMap.put(TAG_TILE_BYTE_COUNTS, "Tile Byte Counts");
_tagNameMap.put(TAG_JPEG_TABLES, "JPEG Tables");
_tagNameMap.put(TAG_YCBCR_SUBSAMPLING, "YCbCr Sub-Sampling");
_tagNameMap.put(TAG_CFA_REPEAT_PATTERN_DIM, "CFA Repeat Pattern Dim");
_tagNameMap.put(TAG_CFA_PATTERN_2, "CFA Pattern");
_tagNameMap.put(TAG_BATTERY_LEVEL, "Battery Level");
_tagNameMap.put(TAG_EXPOSURE_TIME, "Exposure Time");
_tagNameMap.put(TAG_FNUMBER, "F-Number");
_tagNameMap.put(TAG_IPTC_NAA, "IPTC/NAA");
_tagNameMap.put(TAG_INTER_COLOR_PROFILE, "Inter Color Profile");
_tagNameMap.put(TAG_EXPOSURE_PROGRAM, "Exposure Program");
_tagNameMap.put(TAG_SPECTRAL_SENSITIVITY, "Spectral Sensitivity");
_tagNameMap.put(TAG_ISO_EQUIVALENT, "ISO Speed Ratings");
_tagNameMap.put(TAG_OPTO_ELECTRIC_CONVERSION_FUNCTION, "Opto-electric Conversion Function (OECF)");
_tagNameMap.put(TAG_INTERLACE, "Interlace");
_tagNameMap.put(TAG_TIME_ZONE_OFFSET, "Time Zone Offset");
_tagNameMap.put(TAG_SELF_TIMER_MODE, "Self Timer Mode");
_tagNameMap.put(TAG_EXIF_VERSION, "Exif Version");
_tagNameMap.put(TAG_DATETIME_ORIGINAL, "Date/Time Original");
_tagNameMap.put(TAG_DATETIME_DIGITIZED, "Date/Time Digitized");
_tagNameMap.put(TAG_COMPONENTS_CONFIGURATION, "Components Configuration");
_tagNameMap.put(TAG_SHUTTER_SPEED, "Shutter Speed Value");
_tagNameMap.put(TAG_APERTURE, "Aperture Value");
_tagNameMap.put(TAG_BRIGHTNESS_VALUE, "Brightness Value");
_tagNameMap.put(TAG_EXPOSURE_BIAS, "Exposure Bias Value");
_tagNameMap.put(TAG_MAX_APERTURE, "Max Aperture Value");
_tagNameMap.put(TAG_SUBJECT_DISTANCE, "Subject Distance");
_tagNameMap.put(TAG_METERING_MODE, "Metering Mode");
_tagNameMap.put(TAG_LIGHT_SOURCE, "Light Source");
_tagNameMap.put(TAG_WHITE_BALANCE, "White Balance");
_tagNameMap.put(TAG_FLASH, "Flash");
_tagNameMap.put(TAG_FOCAL_LENGTH, "Focal Length");
_tagNameMap.put(TAG_FLASH_ENERGY, "Flash Energy");
_tagNameMap.put(TAG_SPATIAL_FREQ_RESPONSE, "Spatial Frequency Response");
_tagNameMap.put(TAG_NOISE, "Noise");
_tagNameMap.put(TAG_IMAGE_NUMBER, "Image Number");
_tagNameMap.put(TAG_SECURITY_CLASSIFICATION, "Security Classification");
_tagNameMap.put(TAG_IMAGE_HISTORY, "Image History");
_tagNameMap.put(TAG_SUBJECT_LOCATION, "Subject Location");
_tagNameMap.put(TAG_EXPOSURE_INDEX, "Exposure Index");
_tagNameMap.put(TAG_TIFF_EP_STANDARD_ID, "TIFF/EP Standard ID");
_tagNameMap.put(TAG_USER_COMMENT, "User Comment");
_tagNameMap.put(TAG_SUBSECOND_TIME, "Sub-Sec Time");
_tagNameMap.put(TAG_SUBSECOND_TIME_ORIGINAL, "Sub-Sec Time Original");
_tagNameMap.put(TAG_SUBSECOND_TIME_DIGITIZED, "Sub-Sec Time Digitized");
_tagNameMap.put(TAG_FLASHPIX_VERSION, "FlashPix Version");
_tagNameMap.put(TAG_COLOR_SPACE, "Color Space");
_tagNameMap.put(TAG_EXIF_IMAGE_WIDTH, "Exif Image Width");
_tagNameMap.put(TAG_EXIF_IMAGE_HEIGHT, "Exif Image Height");
_tagNameMap.put(TAG_RELATED_SOUND_FILE, "Related Sound File");
// 0x920B in TIFF/EP
_tagNameMap.put(TAG_FLASH_ENERGY_2, "Flash Energy");
// 0x920C in TIFF/EP
_tagNameMap.put(TAG_SPATIAL_FREQ_RESPONSE_2, "Spatial Frequency Response");
// 0x920E in TIFF/EP
_tagNameMap.put(TAG_FOCAL_PLANE_X_RESOLUTION, "Focal Plane X Resolution");
// 0x920F in TIFF/EP
_tagNameMap.put(TAG_FOCAL_PLANE_Y_RESOLUTION, "Focal Plane Y Resolution");
// 0x9210 in TIFF/EP
_tagNameMap.put(TAG_FOCAL_PLANE_RESOLUTION_UNIT, "Focal Plane Resolution Unit");
// 0x9214 in TIFF/EP
_tagNameMap.put(TAG_SUBJECT_LOCATION_2, "Subject Location");
// 0x9215 in TIFF/EP
_tagNameMap.put(TAG_EXPOSURE_INDEX_2, "Exposure Index");
// 0x9217 in TIFF/EP
_tagNameMap.put(TAG_SENSING_METHOD, "Sensing Method");
_tagNameMap.put(TAG_FILE_SOURCE, "File Source");
_tagNameMap.put(TAG_SCENE_TYPE, "Scene Type");
_tagNameMap.put(TAG_CFA_PATTERN, "CFA Pattern");
_tagNameMap.put(TAG_CUSTOM_RENDERED, "Custom Rendered");
_tagNameMap.put(TAG_EXPOSURE_MODE, "Exposure Mode");
_tagNameMap.put(TAG_WHITE_BALANCE_MODE, "White Balance Mode");
_tagNameMap.put(TAG_DIGITAL_ZOOM_RATIO, "Digital Zoom Ratio");
_tagNameMap.put(TAG_35MM_FILM_EQUIV_FOCAL_LENGTH, "Focal Length 35");
_tagNameMap.put(TAG_SCENE_CAPTURE_TYPE, "Scene Capture Type");
_tagNameMap.put(TAG_GAIN_CONTROL, "Gain Control");
_tagNameMap.put(TAG_CONTRAST, "Contrast");
_tagNameMap.put(TAG_SATURATION, "Saturation");
_tagNameMap.put(TAG_SHARPNESS, "Sharpness");
_tagNameMap.put(TAG_DEVICE_SETTING_DESCRIPTION, "Device Setting Description");
_tagNameMap.put(TAG_SUBJECT_DISTANCE_RANGE, "Subject Distance Range");
_tagNameMap.put(TAG_IMAGE_UNIQUE_ID, "Unique Image ID");
_tagNameMap.put(TAG_CAMERA_OWNER_NAME, "Camera Owner Name");
_tagNameMap.put(TAG_BODY_SERIAL_NUMBER, "Body Serial Number");
_tagNameMap.put(TAG_LENS_SPECIFICATION, "Lens Specification");
_tagNameMap.put(TAG_LENS_MAKE, "Lens Make");
_tagNameMap.put(TAG_LENS_MODEL, "Lens Model");
_tagNameMap.put(TAG_LENS_SERIAL_NUMBER, "Lens Serial Number");
_tagNameMap.put(TAG_GAMMA, "Gamma");
_tagNameMap.put(TAG_MIN_SAMPLE_VALUE, "Minimum sample value");
_tagNameMap.put(TAG_MAX_SAMPLE_VALUE, "Maximum sample value");
_tagNameMap.put(TAG_LENS, "Lens");
}
public ExifSubIFDDirectory()
{
this.setDescriptor(new ExifSubIFDDescriptor(this));
}
@Override
@NotNull
public String getName()
{
return "Exif SubIFD";
}
@Override
@NotNull
protected HashMap<Integer, String> getTagNameMap()
{
return _tagNameMap;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.inference.results.ClassificationInferenceResults;
import org.elasticsearch.xpack.core.ml.inference.results.SingleValueInferenceResults;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TargetType;
import org.elasticsearch.xpack.core.ml.job.config.Operator;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class TreeTests extends AbstractSerializingTestCase<Tree> {
private final double eps = 1.0E-8;
private boolean lenient;
@Before
public void chooseStrictOrLenient() {
lenient = randomBoolean();
}
@Override
protected Tree doParseInstance(XContentParser parser) throws IOException {
return lenient ? Tree.fromXContentLenient(parser) : Tree.fromXContentStrict(parser);
}
@Override
protected boolean supportsUnknownFields() {
return lenient;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
return field -> field.startsWith("feature_names");
}
@Override
protected Tree createTestInstance() {
return createRandom();
}
public static Tree createRandom(TargetType targetType) {
int numberOfFeatures = randomIntBetween(1, 10);
List<String> featureNames = new ArrayList<>();
for (int i = 0; i < numberOfFeatures; i++) {
featureNames.add(randomAlphaOfLength(10));
}
return buildRandomTree(targetType, featureNames, 6);
}
public static Tree createRandom() {
return createRandom(randomFrom(TargetType.values()));
}
public static Tree buildRandomTree(TargetType targetType, List<String> featureNames, int depth) {
Tree.Builder builder = Tree.builder();
int maxFeatureIndex = featureNames.size() - 1;
builder.setFeatureNames(featureNames);
TreeNode.Builder node = builder.addJunction(0, randomInt(maxFeatureIndex), true, randomDouble());
List<Integer> childNodes = List.of(node.getLeftChild(), node.getRightChild());
for (int i = 0; i < depth -1; i++) {
List<Integer> nextNodes = new ArrayList<>();
for (int nodeId : childNodes) {
if (i == depth -2) {
builder.addLeaf(nodeId, randomDouble());
} else {
TreeNode.Builder childNode =
builder.addJunction(nodeId, randomInt(maxFeatureIndex), true, randomDouble());
nextNodes.add(childNode.getLeftChild());
nextNodes.add(childNode.getRightChild());
}
}
childNodes = nextNodes;
}
List<String> categoryLabels = null;
if (randomBoolean() && targetType == TargetType.CLASSIFICATION) {
categoryLabels = Arrays.asList(generateRandomStringArray(randomIntBetween(1, 10), randomIntBetween(1, 10), false, false));
}
return builder.setTargetType(targetType).setClassificationLabels(categoryLabels).build();
}
public static Tree buildRandomTree(List<String> featureNames, int depth) {
return buildRandomTree(randomFrom(TargetType.values()), featureNames, depth);
}
@Override
protected Writeable.Reader<Tree> instanceReader() {
return Tree::new;
}
public void testInferWithStump() {
Tree.Builder builder = Tree.builder().setTargetType(TargetType.REGRESSION);
builder.setRoot(TreeNode.builder(0).setLeafValue(Collections.singletonList(42.0)));
builder.setFeatureNames(Collections.emptyList());
Tree tree = builder.build();
List<String> featureNames = Arrays.asList("foo", "bar");
List<Double> featureVector = Arrays.asList(0.6, 0.0);
Map<String, Object> featureMap = zipObjMap(featureNames, featureVector); // does not really matter as this is a stump
assertThat(42.0,
closeTo(((SingleValueInferenceResults)tree.infer(featureMap, RegressionConfig.EMPTY_PARAMS, Collections.emptyMap())).value(),
0.00001));
}
public void testInfer() {
// Build a tree with 2 nodes and 3 leaves using 2 features
// The leaves have unique values 0.1, 0.2, 0.3
Tree.Builder builder = Tree.builder().setTargetType(TargetType.REGRESSION);
TreeNode.Builder rootNode = builder.addJunction(0, 0, true, 0.5);
builder.addLeaf(rootNode.getRightChild(), 0.3);
TreeNode.Builder leftChildNode = builder.addJunction(rootNode.getLeftChild(), 1, true, 0.8);
builder.addLeaf(leftChildNode.getLeftChild(), 0.1);
builder.addLeaf(leftChildNode.getRightChild(), 0.2);
List<String> featureNames = Arrays.asList("foo", "bar");
Tree tree = builder.setFeatureNames(featureNames).build();
// This feature vector should hit the right child of the root node
List<Double> featureVector = Arrays.asList(0.6, 0.0);
Map<String, Object> featureMap = zipObjMap(featureNames, featureVector);
assertThat(0.3,
closeTo(((SingleValueInferenceResults)tree.infer(featureMap, RegressionConfig.EMPTY_PARAMS, Collections.emptyMap())).value(),
0.00001));
// This should hit the left child of the left child of the root node
// i.e. it takes the path left, left
featureVector = Arrays.asList(0.3, 0.7);
featureMap = zipObjMap(featureNames, featureVector);
assertThat(0.1,
closeTo(((SingleValueInferenceResults)tree.infer(featureMap, RegressionConfig.EMPTY_PARAMS, Collections.emptyMap())).value(),
0.00001));
// This should hit the right child of the left child of the root node
// i.e. it takes the path left, right
featureVector = Arrays.asList(0.3, 0.9);
featureMap = zipObjMap(featureNames, featureVector);
assertThat(0.2,
closeTo(((SingleValueInferenceResults)tree.infer(featureMap, RegressionConfig.EMPTY_PARAMS, Collections.emptyMap())).value(),
0.00001));
// This should still work if the internal values are strings
List<String> featureVectorStrings = Arrays.asList("0.3", "0.9");
featureMap = zipObjMap(featureNames, featureVectorStrings);
assertThat(0.2,
closeTo(((SingleValueInferenceResults)tree.infer(featureMap, RegressionConfig.EMPTY_PARAMS, Collections.emptyMap())).value(),
0.00001));
// This should handle missing values and take the default_left path
featureMap = new HashMap<>(2) {{
put("foo", 0.3);
put("bar", null);
}};
assertThat(0.1,
closeTo(((SingleValueInferenceResults)tree.infer(featureMap, RegressionConfig.EMPTY_PARAMS, Collections.emptyMap())).value(),
0.00001));
}
public void testInferNestedFields() {
// Build a tree with 2 nodes and 3 leaves using 2 features
// The leaves have unique values 0.1, 0.2, 0.3
Tree.Builder builder = Tree.builder().setTargetType(TargetType.REGRESSION);
TreeNode.Builder rootNode = builder.addJunction(0, 0, true, 0.5);
builder.addLeaf(rootNode.getRightChild(), 0.3);
TreeNode.Builder leftChildNode = builder.addJunction(rootNode.getLeftChild(), 1, true, 0.8);
builder.addLeaf(leftChildNode.getLeftChild(), 0.1);
builder.addLeaf(leftChildNode.getRightChild(), 0.2);
List<String> featureNames = Arrays.asList("foo.baz", "bar.biz");
Tree tree = builder.setFeatureNames(featureNames).build();
// This feature vector should hit the right child of the root node
Map<String, Object> featureMap = new HashMap<>() {{
put("foo", new HashMap<>(){{
put("baz", 0.6);
}});
put("bar", new HashMap<>(){{
put("biz", 0.0);
}});
}};
assertThat(0.3,
closeTo(((SingleValueInferenceResults)tree.infer(featureMap, RegressionConfig.EMPTY_PARAMS, Collections.emptyMap())).value(),
0.00001));
// This should hit the left child of the left child of the root node
// i.e. it takes the path left, left
featureMap = new HashMap<>() {{
put("foo", new HashMap<>(){{
put("baz", 0.3);
}});
put("bar", new HashMap<>(){{
put("biz", 0.7);
}});
}};
assertThat(0.1,
closeTo(((SingleValueInferenceResults)tree.infer(featureMap, RegressionConfig.EMPTY_PARAMS, Collections.emptyMap())).value(),
0.00001));
// This should hit the right child of the left child of the root node
// i.e. it takes the path left, right
featureMap = new HashMap<>() {{
put("foo", new HashMap<>(){{
put("baz", 0.3);
}});
put("bar", new HashMap<>(){{
put("biz", 0.9);
}});
}};
assertThat(0.2,
closeTo(((SingleValueInferenceResults)tree.infer(featureMap, RegressionConfig.EMPTY_PARAMS, Collections.emptyMap())).value(),
0.00001));
}
public void testTreeClassificationProbability() {
// Build a tree with 2 nodes and 3 leaves using 2 features
// The leaves have unique values 0.1, 0.2, 0.3
Tree.Builder builder = Tree.builder().setTargetType(TargetType.CLASSIFICATION);
TreeNode.Builder rootNode = builder.addJunction(0, 0, true, 0.5);
builder.addLeaf(rootNode.getRightChild(), 1.0);
TreeNode.Builder leftChildNode = builder.addJunction(rootNode.getLeftChild(), 1, true, 0.8);
builder.addLeaf(leftChildNode.getLeftChild(), 1.0);
builder.addLeaf(leftChildNode.getRightChild(), 0.0);
List<String> featureNames = Arrays.asList("foo", "bar");
Tree tree = builder.setFeatureNames(featureNames).setClassificationLabels(Arrays.asList("cat", "dog")).build();
double eps = 0.000001;
// This feature vector should hit the right child of the root node
List<Double> featureVector = Arrays.asList(0.6, 0.0);
List<Double> expectedProbs = Arrays.asList(1.0, 0.0);
List<String> expectedFields = Arrays.asList("dog", "cat");
Map<String, Object> featureMap = zipObjMap(featureNames, featureVector);
List<ClassificationInferenceResults.TopClassEntry> probabilities =
((ClassificationInferenceResults)tree.infer(featureMap, new ClassificationConfig(2), Collections.emptyMap()))
.getTopClasses();
for(int i = 0; i < expectedProbs.size(); i++) {
assertThat(probabilities.get(i).getProbability(), closeTo(expectedProbs.get(i), eps));
assertThat(probabilities.get(i).getClassification(), equalTo(expectedFields.get(i)));
}
// This should hit the left child of the left child of the root node
// i.e. it takes the path left, left
featureVector = Arrays.asList(0.3, 0.7);
featureMap = zipObjMap(featureNames, featureVector);
probabilities =
((ClassificationInferenceResults)tree.infer(featureMap, new ClassificationConfig(2), Collections.emptyMap()))
.getTopClasses();
for(int i = 0; i < expectedProbs.size(); i++) {
assertThat(probabilities.get(i).getProbability(), closeTo(expectedProbs.get(i), eps));
assertThat(probabilities.get(i).getClassification(), equalTo(expectedFields.get(i)));
}
// This should handle missing values and take the default_left path
featureMap = new HashMap<>(2) {{
put("foo", 0.3);
put("bar", null);
}};
probabilities =
((ClassificationInferenceResults)tree.infer(featureMap, new ClassificationConfig(2), Collections.emptyMap()))
.getTopClasses();
for(int i = 0; i < expectedProbs.size(); i++) {
assertThat(probabilities.get(i).getProbability(), closeTo(expectedProbs.get(i), eps));
assertThat(probabilities.get(i).getClassification(), equalTo(expectedFields.get(i)));
}
}
public void testTreeWithNullRoot() {
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class,
() -> Tree.builder()
.setNodes(Collections.singletonList(null))
.setFeatureNames(Arrays.asList("foo", "bar"))
.build());
assertThat(ex.getMessage(), equalTo("[tree] cannot contain null nodes"));
}
public void testTreeWithInvalidNode() {
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class,
() -> Tree.builder()
.setNodes(TreeNode.builder(0)
.setLeftChild(1)
.setSplitFeature(1)
.setThreshold(randomDouble()))
.setFeatureNames(Arrays.asList("foo", "bar"))
.build().validate());
assertThat(ex.getMessage(), equalTo("[tree] contains missing nodes [1]"));
}
public void testTreeWithNullNode() {
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class,
() -> Tree.builder()
.setNodes(TreeNode.builder(0)
.setLeftChild(1)
.setSplitFeature(1)
.setThreshold(randomDouble()),
null)
.setFeatureNames(Arrays.asList("foo", "bar"))
.build()
.validate());
assertThat(ex.getMessage(), equalTo("[tree] cannot contain null nodes"));
}
public void testTreeWithCycle() {
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class,
() -> Tree.builder()
.setNodes(TreeNode.builder(0)
.setLeftChild(1)
.setSplitFeature(1)
.setThreshold(randomDouble()),
TreeNode.builder(0)
.setLeftChild(0)
.setSplitFeature(1)
.setThreshold(randomDouble()))
.setFeatureNames(Arrays.asList("foo", "bar"))
.build()
.validate());
assertThat(ex.getMessage(), equalTo("[tree] contains cycle at node 0"));
}
public void testTreeWithTargetTypeAndLabelsMismatch() {
List<String> featureNames = Arrays.asList("foo", "bar");
String msg = "[target_type] should be [classification] if [classification_labels] are provided";
ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> {
Tree.builder()
.setRoot(TreeNode.builder(0)
.setLeftChild(1)
.setSplitFeature(1)
.setThreshold(randomDouble()))
.setFeatureNames(featureNames)
.setClassificationLabels(Arrays.asList("label1", "label2"))
.build()
.validate();
});
assertThat(ex.getMessage(), equalTo(msg));
}
public void testOperationsEstimations() {
Tree tree = buildRandomTree(Arrays.asList("foo", "bar", "baz"), 5);
assertThat(tree.estimatedNumOperations(), equalTo(7L));
}
public void testFeatureImportance() {
List<String> featureNames = Arrays.asList("foo", "bar");
Tree tree = Tree.builder()
.setFeatureNames(featureNames)
.setNodes(
TreeNode.builder(0)
.setSplitFeature(0)
.setOperator(Operator.LT)
.setLeftChild(1)
.setRightChild(2)
.setThreshold(0.5)
.setNumberSamples(4L),
TreeNode.builder(1)
.setSplitFeature(1)
.setLeftChild(3)
.setRightChild(4)
.setOperator(Operator.LT)
.setThreshold(0.5)
.setNumberSamples(2L),
TreeNode.builder(2)
.setSplitFeature(1)
.setLeftChild(5)
.setRightChild(6)
.setOperator(Operator.LT)
.setThreshold(0.5)
.setNumberSamples(2L),
TreeNode.builder(3).setLeafValue(3.0).setNumberSamples(1L),
TreeNode.builder(4).setLeafValue(8.0).setNumberSamples(1L),
TreeNode.builder(5).setLeafValue(13.0).setNumberSamples(1L),
TreeNode.builder(6).setLeafValue(18.0).setNumberSamples(1L)).build();
Map<String, double[]> featureImportance = tree.featureImportance(zipObjMap(featureNames, Arrays.asList(0.25, 0.25)),
Collections.emptyMap());
assertThat(featureImportance.get("foo")[0], closeTo(-5.0, eps));
assertThat(featureImportance.get("bar")[0], closeTo(-2.5, eps));
featureImportance = tree.featureImportance(zipObjMap(featureNames, Arrays.asList(0.25, 0.75)), Collections.emptyMap());
assertThat(featureImportance.get("foo")[0], closeTo(-5.0, eps));
assertThat(featureImportance.get("bar")[0], closeTo(2.5, eps));
featureImportance = tree.featureImportance(zipObjMap(featureNames, Arrays.asList(0.75, 0.25)), Collections.emptyMap());
assertThat(featureImportance.get("foo")[0], closeTo(5.0, eps));
assertThat(featureImportance.get("bar")[0], closeTo(-2.5, eps));
featureImportance = tree.featureImportance(zipObjMap(featureNames, Arrays.asList(0.75, 0.75)), Collections.emptyMap());
assertThat(featureImportance.get("foo")[0], closeTo(5.0, eps));
assertThat(featureImportance.get("bar")[0], closeTo(2.5, eps));
}
public void testMaxFeatureIndex() {
int numFeatures = randomIntBetween(1, 15);
// We need a tree where every feature is used, choose a depth big enough to
// accommodate those non-leave nodes (leaf nodes don't have a feature index)
int depth = (int) Math.ceil(Math.log(numFeatures +1) / Math.log(2)) + 1;
List<String> featureNames = new ArrayList<>(numFeatures);
for (int i=0; i<numFeatures; i++) {
featureNames.add("feature" + i);
}
Tree.Builder builder = Tree.builder().setFeatureNames(featureNames);
// build a tree using feature indices 0..numFeatures -1
int featureIndex = 0;
TreeNode.Builder node = builder.addJunction(0, featureIndex++, true, randomDouble());
List<Integer> childNodes = List.of(node.getLeftChild(), node.getRightChild());
for (int i = 0; i < depth -1; i++) {
List<Integer> nextNodes = new ArrayList<>();
for (int nodeId : childNodes) {
if (i == depth -2) {
builder.addLeaf(nodeId, randomDouble());
} else {
TreeNode.Builder childNode =
builder.addJunction(nodeId, featureIndex++ % numFeatures, true, randomDouble());
nextNodes.add(childNode.getLeftChild());
nextNodes.add(childNode.getRightChild());
}
}
childNodes = nextNodes;
}
Tree tree = builder.build();
assertEquals(numFeatures, tree.maxFeatureIndex() +1);
}
public void testMaxFeatureIndexSingleNodeTree() {
Tree tree = Tree.builder()
.setRoot(TreeNode.builder(0).setLeafValue(10.0))
.setFeatureNames(Collections.emptyList())
.build();
assertEquals(-1, tree.maxFeatureIndex());
}
public void testValidateGivenMissingFeatures() {
List<String> featureNames = Arrays.asList("foo", "bar", "baz");
// build a tree referencing a feature at index 3 which is not in the featureNames list
Tree.Builder builder = Tree.builder().setFeatureNames(featureNames);
builder.addJunction(0, 0, true, randomDouble());
builder.addJunction(1, 1, true, randomDouble());
builder.addJunction(2, 3, true, randomDouble());
builder.addLeaf(3, randomDouble());
builder.addLeaf(4, randomDouble());
builder.addLeaf(5, randomDouble());
builder.addLeaf(6, randomDouble());
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder.build().validate());
assertThat(e.getDetailedMessage(), containsString("feature index [3] is out of bounds for the [feature_names] array"));
}
public void testValidateGivenTreeWithNoFeatures() {
Tree.builder()
.setRoot(TreeNode.builder(0).setLeafValue(10.0))
.setFeatureNames(Collections.emptyList())
.build()
.validate();
}
private static Map<String, Object> zipObjMap(List<String> keys, List<? extends Object> values) {
return IntStream.range(0, keys.size()).boxed().collect(Collectors.toMap(keys::get, values::get));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.mapreduce;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import org.apache.accumulo.core.client.BatchScanner;
import org.apache.accumulo.core.client.ClientSideIteratorScanner;
import org.apache.accumulo.core.client.IsolatedScanner;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.ScannerBase;
import org.apache.accumulo.core.client.admin.TableOperations;
import org.apache.accumulo.core.client.sample.SamplerConfiguration;
import org.apache.accumulo.core.clientImpl.mapreduce.lib.InputConfigurator;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.util.Pair;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
/**
* This abstract {@link InputFormat} class allows MapReduce jobs to use Accumulo as the source of
* K,V pairs.
* <p>
* Subclasses must implement a {@link #createRecordReader(InputSplit, TaskAttemptContext)} to
* provide a {@link RecordReader} for K,V.
* <p>
* A static base class, RecordReaderBase, is provided to retrieve Accumulo {@link Key}/{@link Value}
* pairs, but one must implement its {@link RecordReaderBase#nextKeyValue()} to transform them to
* the desired generic types K,V.
* <p>
* See {@link AccumuloInputFormat} for an example implementation.
*
* @deprecated since 2.0.0; Use org.apache.accumulo.hadoop.mapreduce instead from the
* accumulo-hadoop-mapreduce.jar
*/
@Deprecated(since = "2.0.0")
public abstract class InputFormatBase<K,V> extends AbstractInputFormat<K,V> {
/**
* Gets the table name from the configuration.
*
* @param context
* the Hadoop context for the configured job
* @return the table name
* @since 1.5.0
* @see #setInputTableName(Job, String)
*/
protected static String getInputTableName(JobContext context) {
return InputConfigurator.getInputTableName(CLASS, context.getConfiguration());
}
/**
* Sets the name of the input table, over which this job will scan.
*
* @param job
* the Hadoop job instance to be configured
* @param tableName
* the table to use when the tablename is null in the write call
* @since 1.5.0
*/
public static void setInputTableName(Job job, String tableName) {
InputConfigurator.setInputTableName(CLASS, job.getConfiguration(), tableName);
}
/**
* Sets the input ranges to scan for the single input table associated with this job.
*
* @param job
* the Hadoop job instance to be configured
* @param ranges
* the ranges that will be mapped over
* @see TableOperations#splitRangeByTablets(String, Range, int)
* @since 1.5.0
*/
public static void setRanges(Job job, Collection<Range> ranges) {
InputConfigurator.setRanges(CLASS, job.getConfiguration(), ranges);
}
/**
* Gets the ranges to scan over from a job.
*
* @param context
* the Hadoop context for the configured job
* @return the ranges
* @since 1.5.0
* @see #setRanges(Job, Collection)
*/
protected static List<Range> getRanges(JobContext context) throws IOException {
return InputConfigurator.getRanges(CLASS, context.getConfiguration());
}
/**
* Restricts the columns that will be mapped over for this job for the default input table.
*
* @param job
* the Hadoop job instance to be configured
* @param columnFamilyColumnQualifierPairs
* a pair of {@link Text} objects corresponding to column family and column qualifier. If
* the column qualifier is null, the entire column family is selected. An empty set is
* the default and is equivalent to scanning the all columns.
* @since 1.5.0
*/
public static void fetchColumns(Job job,
Collection<Pair<Text,Text>> columnFamilyColumnQualifierPairs) {
InputConfigurator.fetchColumns(CLASS, job.getConfiguration(), columnFamilyColumnQualifierPairs);
}
/**
* Gets the columns to be mapped over from this job.
*
* @param context
* the Hadoop context for the configured job
* @return a set of columns
* @since 1.5.0
* @see #fetchColumns(Job, Collection)
*/
protected static Set<Pair<Text,Text>> getFetchedColumns(JobContext context) {
return InputConfigurator.getFetchedColumns(CLASS, context.getConfiguration());
}
/**
* Encode an iterator on the single input table for this job.
*
* @param job
* the Hadoop job instance to be configured
* @param cfg
* the configuration of the iterator
* @since 1.5.0
*/
public static void addIterator(Job job, IteratorSetting cfg) {
InputConfigurator.addIterator(CLASS, job.getConfiguration(), cfg);
}
/**
* Gets a list of the iterator settings (for iterators to apply to a scanner) from this
* configuration.
*
* @param context
* the Hadoop context for the configured job
* @return a list of iterators
* @since 1.5.0
* @see #addIterator(Job, IteratorSetting)
*/
protected static List<IteratorSetting> getIterators(JobContext context) {
return InputConfigurator.getIterators(CLASS, context.getConfiguration());
}
/**
* Controls the automatic adjustment of ranges for this job. This feature merges overlapping
* ranges, then splits them to align with tablet boundaries. Disabling this feature will cause
* exactly one Map task to be created for each specified range. The default setting is enabled. *
*
* <p>
* By default, this feature is <b>enabled</b>.
*
* @param job
* the Hadoop job instance to be configured
* @param enableFeature
* the feature is enabled if true, disabled otherwise
* @see #setRanges(Job, Collection)
* @since 1.5.0
*/
public static void setAutoAdjustRanges(Job job, boolean enableFeature) {
InputConfigurator.setAutoAdjustRanges(CLASS, job.getConfiguration(), enableFeature);
}
/**
* Determines whether a configuration has auto-adjust ranges enabled. Must be enabled when
* {@link #setBatchScan(Job, boolean)} is true.
*
* @param context
* the Hadoop context for the configured job
* @return false if the feature is disabled, true otherwise
* @since 1.5.0
* @see #setAutoAdjustRanges(Job, boolean)
*/
protected static boolean getAutoAdjustRanges(JobContext context) {
return InputConfigurator.getAutoAdjustRanges(CLASS, context.getConfiguration());
}
/**
* Controls the use of the {@link IsolatedScanner} in this job.
*
* <p>
* By default, this feature is <b>disabled</b>.
*
* @param job
* the Hadoop job instance to be configured
* @param enableFeature
* the feature is enabled if true, disabled otherwise
* @since 1.5.0
*/
public static void setScanIsolation(Job job, boolean enableFeature) {
InputConfigurator.setScanIsolation(CLASS, job.getConfiguration(), enableFeature);
}
/**
* Determines whether a configuration has isolation enabled.
*
* @param context
* the Hadoop context for the configured job
* @return true if the feature is enabled, false otherwise
* @since 1.5.0
* @see #setScanIsolation(Job, boolean)
*/
protected static boolean isIsolated(JobContext context) {
return InputConfigurator.isIsolated(CLASS, context.getConfiguration());
}
/**
* Controls the use of the {@link ClientSideIteratorScanner} in this job. Enabling this feature
* will cause the iterator stack to be constructed within the Map task, rather than within the
* Accumulo TServer. To use this feature, all classes needed for those iterators must be available
* on the classpath for the task.
*
* <p>
* By default, this feature is <b>disabled</b>.
*
* @param job
* the Hadoop job instance to be configured
* @param enableFeature
* the feature is enabled if true, disabled otherwise
* @since 1.5.0
*/
public static void setLocalIterators(Job job, boolean enableFeature) {
InputConfigurator.setLocalIterators(CLASS, job.getConfiguration(), enableFeature);
}
/**
* Determines whether a configuration uses local iterators.
*
* @param context
* the Hadoop context for the configured job
* @return true if the feature is enabled, false otherwise
* @since 1.5.0
* @see #setLocalIterators(Job, boolean)
*/
protected static boolean usesLocalIterators(JobContext context) {
return InputConfigurator.usesLocalIterators(CLASS, context.getConfiguration());
}
/**
* Enable reading offline tables. By default, this feature is disabled and only online tables are
* scanned. This will make the map reduce job directly read the table's files. If the table is not
* offline, then the job will fail. If the table comes online during the map reduce job, it is
* likely that the job will fail.
*
* <p>
* To use this option, the map reduce user will need access to read the Accumulo directory in
* HDFS.
*
* <p>
* Reading the offline table will create the scan time iterator stack in the map process. So any
* iterators that are configured for the table will need to be on the mapper's classpath.
*
* <p>
* One way to use this feature is to clone a table, take the clone offline, and use the clone as
* the input table for a map reduce job. If you plan to map reduce over the data many times, it
* may be better to the compact the table, clone it, take it offline, and use the clone for all
* map reduce jobs. The reason to do this is that compaction will reduce each tablet in the table
* to one file, and it is faster to read from one file.
*
* <p>
* There are two possible advantages to reading a tables file directly out of HDFS. First, you may
* see better read performance. Second, it will support speculative execution better. When reading
* an online table speculative execution can put more load on an already slow tablet server.
*
* <p>
* By default, this feature is <b>disabled</b>.
*
* @param job
* the Hadoop job instance to be configured
* @param enableFeature
* the feature is enabled if true, disabled otherwise
* @since 1.5.0
*/
public static void setOfflineTableScan(Job job, boolean enableFeature) {
InputConfigurator.setOfflineTableScan(CLASS, job.getConfiguration(), enableFeature);
}
/**
* Determines whether a configuration has the offline table scan feature enabled.
*
* @param context
* the Hadoop context for the configured job
* @return true if the feature is enabled, false otherwise
* @since 1.5.0
* @see #setOfflineTableScan(Job, boolean)
*/
protected static boolean isOfflineScan(JobContext context) {
return InputConfigurator.isOfflineScan(CLASS, context.getConfiguration());
}
/**
* Controls the use of the {@link BatchScanner} in this job. Using this feature will group Ranges
* by their source tablet, producing an InputSplit per tablet rather than per Range. This batching
* helps to reduce overhead when querying a large number of small ranges. (ex: when doing
* quad-tree decomposition for spatial queries)
* <p>
* In order to achieve good locality of InputSplits this option always clips the input Ranges to
* tablet boundaries. This may result in one input Range contributing to several InputSplits.
* <p>
* Note: that the value of {@link #setAutoAdjustRanges(Job, boolean)} is ignored and is assumed to
* be true when BatchScan option is enabled.
* <p>
* This configuration is incompatible with:
* <ul>
* <li>{@link #setOfflineTableScan(org.apache.hadoop.mapreduce.Job, boolean)}</li>
* <li>{@link #setLocalIterators(org.apache.hadoop.mapreduce.Job, boolean)}</li>
* <li>{@link #setScanIsolation(org.apache.hadoop.mapreduce.Job, boolean)}</li>
* </ul>
* <p>
* By default, this feature is <b>disabled</b>.
*
* @param job
* the Hadoop job instance to be configured
* @param enableFeature
* the feature is enabled if true, disabled otherwise
* @since 1.7.0
*/
public static void setBatchScan(Job job, boolean enableFeature) {
InputConfigurator.setBatchScan(CLASS, job.getConfiguration(), enableFeature);
}
/**
* Determines whether a configuration has the {@link BatchScanner} feature enabled.
*
* @param context
* the Hadoop context for the configured job
* @since 1.7.0
* @see #setBatchScan(Job, boolean)
*/
public static boolean isBatchScan(JobContext context) {
return InputConfigurator.isBatchScan(CLASS, context.getConfiguration());
}
/**
* Causes input format to read sample data. If sample data was created using a different
* configuration or a tables sampler configuration changes while reading data, then the input
* format will throw an error.
*
*
* @param job
* the Hadoop job instance to be configured
* @param samplerConfig
* The sampler configuration that sample must have been created with inorder for reading
* sample data to succeed.
*
* @since 1.8.0
* @see ScannerBase#setSamplerConfiguration(SamplerConfiguration)
*/
public static void setSamplerConfiguration(Job job, SamplerConfiguration samplerConfig) {
InputConfigurator.setSamplerConfiguration(CLASS, job.getConfiguration(), samplerConfig);
}
protected abstract static class RecordReaderBase<K,V> extends AbstractRecordReader<K,V> {
@Override
@Deprecated(since = "2.0.0")
protected List<IteratorSetting> contextIterators(TaskAttemptContext context, String tableName) {
return getIterators(context);
}
/**
* Apply the configured iterators from the configuration to the scanner.
*
* @param context
* the Hadoop context for the configured job
* @param scanner
* the scanner to configure
* @deprecated since 1.7.0; Use {@link #contextIterators} instead.
*/
@Deprecated(since = "1.7.0")
protected void setupIterators(TaskAttemptContext context, Scanner scanner) {
// tableName is given as null as it will be ignored in eventual call to #contextIterators
setupIterators(context, scanner, null, null);
}
/**
* Initialize a scanner over the given input split using this task attempt configuration.
*
* @deprecated since 1.7.0; Use {@link #contextIterators} instead.
*/
@Deprecated(since = "1.7.0")
protected void setupIterators(TaskAttemptContext context, Scanner scanner,
org.apache.accumulo.core.client.mapreduce.RangeInputSplit split) {
setupIterators(context, scanner, null, split);
}
}
}
| |
/*
*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.genie.common.model;
import com.netflix.genie.common.exceptions.GeniePreconditionException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import javax.validation.ConstraintViolationException;
import java.util.HashSet;
import java.util.Set;
/**
* Test the command class.
*
* @author tgianos
*/
public class TestCommand extends TestEntityBase {
private static final String NAME = "pig13";
private static final String USER = "tgianos";
private static final String EXECUTABLE = "/bin/pig13";
private static final String VERSION = "1.0";
private Command c;
/**
* Setup the tests.
*/
@Before
public void setup() {
this.c = new Command();
}
/**
* Test the default Constructor.
*/
@Test
public void testDefaultConstructor() {
Assert.assertNull(this.c.getApplication());
Assert.assertNull(this.c.getClusters());
Assert.assertNull(this.c.getConfigs());
Assert.assertNull(this.c.getEnvPropFile());
Assert.assertNull(this.c.getExecutable());
Assert.assertNull(this.c.getJobType());
Assert.assertNull(this.c.getName());
Assert.assertNull(this.c.getStatus());
Assert.assertNull(this.c.getUser());
Assert.assertNull(this.c.getVersion());
}
/**
* Test the argument Constructor.
*
* @throws GeniePreconditionException If any precondition isn't met.
*/
@Test
public void testConstructor() throws GeniePreconditionException {
c = new Command(NAME, USER, VERSION, CommandStatus.ACTIVE, EXECUTABLE);
Assert.assertNull(this.c.getApplication());
Assert.assertNull(this.c.getClusters());
Assert.assertNull(this.c.getConfigs());
Assert.assertNull(this.c.getEnvPropFile());
Assert.assertEquals(EXECUTABLE, this.c.getExecutable());
Assert.assertNull(this.c.getJobType());
Assert.assertEquals(NAME, this.c.getName());
Assert.assertEquals(CommandStatus.ACTIVE, this.c.getStatus());
Assert.assertEquals(USER, this.c.getUser());
Assert.assertEquals(VERSION, this.c.getVersion());
}
/**
* Test to make sure validation works.
*
* @throws GeniePreconditionException If any precondition isn't met.
*/
@Test
public void testOnCreateOrUpdateCommand() throws GeniePreconditionException {
this.c = new Command(NAME, USER, VERSION, CommandStatus.ACTIVE, EXECUTABLE);
Assert.assertNull(this.c.getTags());
this.c.onCreateOrUpdateCommand();
Assert.assertEquals(2, this.c.getTags().size());
}
/**
* Make sure validation works on valid apps.
*/
@Test
public void testValidate() {
this.c = new Command(NAME, USER, VERSION, CommandStatus.ACTIVE, EXECUTABLE);
this.validate(this.c);
}
/**
* Make sure validation works on with failure from super class.
*/
@Test(expected = ConstraintViolationException.class)
public void testValidateNoName() {
this.c = new Command(null, USER, VERSION, CommandStatus.ACTIVE, EXECUTABLE);
this.validate(this.c);
}
/**
* Make sure validation works on with failure from super class.
*/
@Test(expected = ConstraintViolationException.class)
public void testValidateNoUser() {
this.c = new Command(NAME, " ", VERSION, CommandStatus.ACTIVE, EXECUTABLE);
this.validate(this.c);
}
/**
* Make sure validation works on with failure from super class.
*/
@Test(expected = ConstraintViolationException.class)
public void testValidateNoVersion() {
this.c = new Command(NAME, USER, "", CommandStatus.ACTIVE, EXECUTABLE);
this.validate(this.c);
}
/**
* Make sure validation works on with failure from command.
*/
@Test(expected = ConstraintViolationException.class)
public void testValidateNoStatus() {
this.c = new Command(NAME, USER, VERSION, null, EXECUTABLE);
this.validate(this.c);
}
/**
* Make sure validation works on with failure from command.
*/
@Test(expected = ConstraintViolationException.class)
public void testValidateNoExecutable() {
this.c = new Command(NAME, USER, VERSION, CommandStatus.ACTIVE, " ");
this.validate(this.c);
}
/**
* Test setting the status.
*/
@Test
public void testSetStatus() {
Assert.assertNull(this.c.getStatus());
this.c.setStatus(CommandStatus.ACTIVE);
Assert.assertEquals(CommandStatus.ACTIVE, this.c.getStatus());
}
/**
* Test setting the property file.
*/
@Test
public void testSetEnvPropFile() {
Assert.assertNull(this.c.getEnvPropFile());
final String propFile = "s3://netflix.propFile";
this.c.setEnvPropFile(propFile);
Assert.assertEquals(propFile, this.c.getEnvPropFile());
}
/**
* Test setting the job type.
*/
@Test
public void testSetJobType() {
Assert.assertNull(this.c.getJobType());
final String jobType = "pig";
this.c.setJobType(jobType);
Assert.assertEquals(jobType, this.c.getJobType());
}
/**
* Test setting the executable.
*/
@Test
public void testSetExecutable() {
Assert.assertNull(this.c.getExecutable());
this.c.setExecutable(EXECUTABLE);
Assert.assertEquals(EXECUTABLE, this.c.getExecutable());
}
/**
* Test setting the configs.
*/
@Test
public void testSetConfigs() {
Assert.assertNull(this.c.getConfigs());
final Set<String> configs = new HashSet<>();
configs.add("s3://netflix.configFile");
this.c.setConfigs(configs);
Assert.assertEquals(configs, this.c.getConfigs());
}
/**
* Test setting the tags.
*/
@Test
public void testSetTags() {
Assert.assertNull(this.c.getTags());
final Set<String> tags = new HashSet<>();
tags.add("tag1");
tags.add("tag2");
this.c.setTags(tags);
Assert.assertEquals(tags, this.c.getTags());
}
/**
* Test setting an application.
*
* @throws GeniePreconditionException If any precondition isn't met.
*/
@Test
public void testSetApplication() throws GeniePreconditionException {
Assert.assertNull(this.c.getApplication());
final Application one = new Application();
one.setId("one");
final Application two = new Application();
two.setId("two");
this.c.setApplication(one);
Assert.assertEquals(one, this.c.getApplication());
Assert.assertTrue(one.getCommands().contains(this.c));
this.c.setApplication(two);
Assert.assertEquals(two, this.c.getApplication());
Assert.assertFalse(one.getCommands().contains(this.c));
Assert.assertTrue(two.getCommands().contains(this.c));
this.c.setApplication(null);
Assert.assertNull(this.c.getApplication());
Assert.assertTrue(one.getCommands().isEmpty());
Assert.assertTrue(two.getCommands().isEmpty());
}
/**
* Test setting the clusters.
*/
@Test
public void testSetClusters() {
Assert.assertNull(this.c.getClusters());
final Set<Cluster> clusters = new HashSet<>();
clusters.add(new Cluster());
this.c.setClusters(clusters);
Assert.assertEquals(clusters, this.c.getClusters());
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.refactoring;
import com.intellij.codeInsight.template.impl.TemplateManagerImpl;
import com.intellij.codeInsight.template.impl.TemplateState;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.actionSystem.EditorActionManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pass;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiExpression;
import com.intellij.psi.PsiLiteralExpression;
import com.intellij.psi.PsiLocalVariable;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.codeStyle.JavaCodeStyleSettings;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.introduce.inplace.AbstractInplaceIntroducer;
import com.intellij.refactoring.introduceVariable.IntroduceVariableBase;
import com.intellij.refactoring.introduceVariable.IntroduceVariableHandler;
import com.intellij.testFramework.MapDataContext;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class InplaceIntroduceVariableTest extends AbstractJavaInplaceIntroduceTest {
@Nullable
@Override
protected PsiExpression getExpressionFromEditor() {
final PsiExpression expression = super.getExpressionFromEditor();
if (expression != null) {
return expression;
}
final PsiExpression expr = PsiTreeUtil.getParentOfType(getFile().findElementAt(getEditor().getCaretModel().getOffset()), PsiExpression.class);
if (expr == null && InjectedLanguageManager.getInstance(getProject()).isInjectedFragment(getFile())) {
PsiElement element = getFile();
return PsiTreeUtil.getParentOfType(InjectedLanguageManager.getInstance(element.getProject()).getTopLevelFile(element)
.findElementAt(InjectedLanguageUtil.getTopLevelEditor(getEditor()).getCaretModel().getOffset()), PsiExpression.class);
}
return expr instanceof PsiLiteralExpression ? expr : null;
}
public void testFromExpression() {
doTest(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer inplaceIntroduceFieldPopup) {
type("expr");
}
});
}
public void testConflictingInnerClassName() {
final JavaCodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()).getCustomSettings(JavaCodeStyleSettings.class);
final boolean oldOption = settings.INSERT_INNER_CLASS_IMPORTS;
try {
settings.INSERT_INNER_CLASS_IMPORTS = true;
doTest(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer inplaceIntroduceFieldPopup) {
type("constants");
}
});
}
finally {
settings.INSERT_INNER_CLASS_IMPORTS = oldOption;
}
}
public void testInsideInjectedString() {
doTestInsideInjection(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer inplaceIntroduceFieldPopup) {
type("expr");
}
});
}
public void testInjectedString() {
doTestInsideInjection(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer inplaceIntroduceFieldPopup) {
bringRealEditorBack();
type("expr");
}
});
}
public void testPlaceInsideLoopAndRename() {
doTest(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer inplaceIntroduceFieldPopup) {
type("expr");
}
});
}
public void testPlaceInsideLambdaBody() {
doTest(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer inplaceIntroduceFieldPopup) {
type("expr");
}
});
}
public void testPlaceInsideLambdaBodyMultipleOccurrences1() {
doTestReplaceChoice(IntroduceVariableBase.JavaReplaceChoice.ALL, new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer inplaceIntroduceFieldPopup) {
type("expr");
}
});
}
public void testReplaceAllOnDummyCodeWithSameNameAsGenerated() {
doTestReplaceChoice(IntroduceVariableBase.JavaReplaceChoice.ALL, new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer inplaceIntroduceFieldPopup) {
type("expr");
}
});
}
public void testReplaceAllIntroduceFieldInLocalClass() {
doTestReplaceChoice(IntroduceVariableBase.JavaReplaceChoice.ALL, new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer inplaceIntroduceFieldPopup) {
type("smth");
}
});
}
public void testReplaceAllWithScopeInvalidation() {
doTestReplaceChoice(IntroduceVariableBase.JavaReplaceChoice.ALL, new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer inplaceIntroduceFieldPopup) {
type("newType");
}
});
}
public void testRanges() {
doTest(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer inplaceIntroduceFieldPopup) {
type("expr");
}
});
}
public void testFromParenthesis() {
doTest(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer inplaceIntroduceFieldPopup) {
type("expr");
}
});
}
public void testConflictWithField() {
doTest(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer introducer) {
type("height");
}
});
}
public void testConflictWithFieldNoCast() {
doTest(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer introducer) {
type("weights");
}
});
}
public void testCast() {
doTestTypeChange("Integer");
}
public void testCastToObject() {
doTestTypeChange("Object");
}
public void testEscapePosition() {
doTestStopEditing(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer introducer) {
invokeEditorAction(IdeActions.ACTION_EDITOR_ESCAPE);
invokeEditorAction(IdeActions.ACTION_EDITOR_ESCAPE);
}
});
}
public void testEscapePositionIfTyped() {
doTestStopEditing(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer introducer) {
type("fooBar");
invokeEditorAction(IdeActions.ACTION_EDITOR_ESCAPE);
}
});
}
public void testWritable() {
doTestReplaceChoice(IntroduceVariableBase.JavaReplaceChoice.ALL);
}
public void testNoWritable() {
doTestReplaceChoice(IntroduceVariableBase.JavaReplaceChoice.NO_WRITE);
}
public void testAllInsertFinal() {
doTestReplaceChoice(IntroduceVariableBase.JavaReplaceChoice.ALL);
}
public void testAllIncomplete() {
doTestReplaceChoice(IntroduceVariableBase.JavaReplaceChoice.ALL);
}
public void testStreamSimple() {
doTestReplaceChoice(IntroduceVariableBase.JavaReplaceChoice.CHAIN);
}
public void testStreamMultiple() {
doTestReplaceChoice(IntroduceVariableBase.JavaReplaceChoice.CHAIN_ALL);
}
public void testStreamMultiline() {
doTestReplaceChoice(IntroduceVariableBase.JavaReplaceChoice.CHAIN);
}
public void testBrokenFormattingWithInValidation() {
doTest(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer introducer) {
type("bool");
}
});
}
public void testStopEditing() {
doTestStopEditing(new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer introducer) {
invokeEditorAction(IdeActions.ACTION_EDITOR_MOVE_CARET_LEFT);
invokeEditorAction(IdeActions.ACTION_EDITOR_ENTER);
invokeEditorAction(IdeActions.ACTION_EDITOR_ENTER);
}
});
}
private void doTestStopEditing(Pass<AbstractInplaceIntroducer> pass) {
String name = getTestName(true);
configureByFile(getBasePath() + name + getExtension());
final boolean enabled = getEditor().getSettings().isVariableInplaceRenameEnabled();
try {
TemplateManagerImpl.setTemplateTesting(getProject(), getTestRootDisposable());
getEditor().getSettings().setVariableInplaceRenameEnabled(true);
final AbstractInplaceIntroducer introducer = invokeRefactoring();
pass.pass(introducer);
checkResultByFile(getBasePath() + name + "_after" + getExtension());
}
finally {
TemplateState state = TemplateManagerImpl.getTemplateState(getEditor());
if (state != null) {
state.gotoEnd(true);
}
getEditor().getSettings().setVariableInplaceRenameEnabled(enabled);
}
}
private void doTestTypeChange(final String newType) {
final Pass<AbstractInplaceIntroducer> typeChanger = new Pass<AbstractInplaceIntroducer>() {
@Override
public void pass(AbstractInplaceIntroducer inplaceIntroduceFieldPopup) {
type(newType);
}
};
String name = getTestName(true);
configureByFile(getBasePath() + name + getExtension());
final boolean enabled = getEditor().getSettings().isVariableInplaceRenameEnabled();
try {
TemplateManagerImpl.setTemplateTesting(getProject(), getTestRootDisposable());
getEditor().getSettings().setVariableInplaceRenameEnabled(true);
final AbstractInplaceIntroducer introducer = invokeRefactoring();
TemplateState state = TemplateManagerImpl.getTemplateState(getEditor());
assert state != null;
state.previousTab();
typeChanger.pass(introducer);
state.gotoEnd(false);
checkResultByFile(getBasePath() + name + "_after" + getExtension());
}
finally {
getEditor().getSettings().setVariableInplaceRenameEnabled(enabled);
}
}
private void doTestReplaceChoice(IntroduceVariableBase.JavaReplaceChoice choice) {
doTestReplaceChoice(choice, null);
}
private void doTestReplaceChoice(IntroduceVariableBase.JavaReplaceChoice choice, Pass<AbstractInplaceIntroducer> pass) {
String name = getTestName(true);
configureByFile(getBasePath() + name + getExtension());
final boolean enabled = getEditor().getSettings().isVariableInplaceRenameEnabled();
try {
TemplateManagerImpl.setTemplateTesting(getProject(), getTestRootDisposable());
getEditor().getSettings().setVariableInplaceRenameEnabled(true);
MyIntroduceHandler handler = createIntroduceHandler();
((MyIntroduceVariableHandler)handler).setChoice(choice);
final AbstractInplaceIntroducer introducer = invokeRefactoring(handler);
if (pass != null) {
pass.pass(introducer);
}
TemplateState state = TemplateManagerImpl.getTemplateState(getEditor());
assert state != null;
state.gotoEnd(false);
checkResultByFile(getBasePath() + name + "_after" + getExtension());
}
finally {
getEditor().getSettings().setVariableInplaceRenameEnabled(enabled);
}
}
private static void invokeEditorAction(String actionId) {
EditorActionManager.getInstance().getActionHandler(actionId)
.execute(getEditor(), getEditor().getCaretModel().getCurrentCaret(), new MapDataContext());
}
@Override
protected String getBasePath() {
return "/refactoring/inplaceIntroduceVariable/";
}
@Override
protected MyIntroduceHandler createIntroduceHandler() {
return new MyIntroduceVariableHandler();
}
public static class MyIntroduceVariableHandler extends IntroduceVariableHandler implements MyIntroduceHandler {
private JavaReplaceChoice myChoice = null;
public void setChoice(JavaReplaceChoice choice) {
myChoice = choice;
}
@Override
public boolean invokeImpl(Project project, @NotNull PsiExpression selectedExpr, Editor editor) {
return super.invokeImpl(project, selectedExpr, editor);
}
@Override
public boolean invokeImpl(Project project, PsiLocalVariable localVariable, Editor editor) {
return super.invokeImpl(project, localVariable, editor);
}
@Override
protected JavaReplaceChoice getOccurrencesChoice() {
return myChoice;
}
@Override
protected boolean isInplaceAvailableInTestMode() {
return true;
}
}
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.cp.internal;
import com.hazelcast.core.DistributedObject;
import com.hazelcast.core.HazelcastException;
import com.hazelcast.cp.CPGroup;
import com.hazelcast.cp.CPGroupId;
import com.hazelcast.cp.CPMember;
import com.hazelcast.cp.event.CPGroupAvailabilityListener;
import com.hazelcast.cp.event.CPMembershipListener;
import com.hazelcast.cp.CPSubsystem;
import com.hazelcast.cp.CPSubsystemManagementService;
import com.hazelcast.cp.IAtomicLong;
import com.hazelcast.cp.IAtomicReference;
import com.hazelcast.cp.ICountDownLatch;
import com.hazelcast.cp.ISemaphore;
import com.hazelcast.cp.internal.datastructures.atomiclong.AtomicLongService;
import com.hazelcast.cp.internal.datastructures.atomicref.AtomicRefService;
import com.hazelcast.cp.internal.datastructures.countdownlatch.CountDownLatchService;
import com.hazelcast.cp.internal.datastructures.lock.LockService;
import com.hazelcast.cp.internal.datastructures.semaphore.SemaphoreService;
import com.hazelcast.cp.internal.datastructures.spi.RaftRemoteService;
import com.hazelcast.cp.internal.session.RaftSessionService;
import com.hazelcast.cp.lock.FencedLock;
import com.hazelcast.cp.session.CPSessionManagementService;
import com.hazelcast.instance.impl.HazelcastInstanceImpl;
import com.hazelcast.logging.ILogger;
import com.hazelcast.spi.impl.InternalCompletableFuture;
import javax.annotation.Nonnull;
import java.util.Collection;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import static com.hazelcast.internal.util.Preconditions.checkNotNull;
/**
* Provides access to CP Subsystem utilities
*/
public class CPSubsystemImpl implements CPSubsystem {
private final HazelcastInstanceImpl instance;
private final boolean cpSubsystemEnabled;
private volatile CPSubsystemManagementService cpSubsystemManagementService;
public CPSubsystemImpl(HazelcastInstanceImpl instance) {
this.instance = instance;
int cpMemberCount = instance.getConfig().getCPSubsystemConfig().getCPMemberCount();
this.cpSubsystemEnabled = cpMemberCount > 0;
ILogger logger = instance.node.getLogger(CPSubsystem.class);
if (cpSubsystemEnabled) {
logger.info("CP Subsystem is enabled with " + cpMemberCount + " members.");
} else {
logger.warning("CP Subsystem is not enabled. CP data structures will operate in UNSAFE mode! "
+ "Please note that UNSAFE mode will not provide strong consistency guarantees.");
}
}
@Nonnull
@Override
public IAtomicLong getAtomicLong(@Nonnull String name) {
checkNotNull(name, "Retrieving an atomic long instance with a null name is not allowed!");
return createProxy(AtomicLongService.SERVICE_NAME, name);
}
@Nonnull
@Override
public <E> IAtomicReference<E> getAtomicReference(@Nonnull String name) {
checkNotNull(name, "Retrieving an atomic reference instance with a null name is not allowed!");
return createProxy(AtomicRefService.SERVICE_NAME, name);
}
@Nonnull
@Override
public ICountDownLatch getCountDownLatch(@Nonnull String name) {
checkNotNull(name, "Retrieving a count down latch instance with a null name is not allowed!");
return createProxy(CountDownLatchService.SERVICE_NAME, name);
}
@Nonnull
@Override
public FencedLock getLock(@Nonnull String name) {
checkNotNull(name, "Retrieving an fenced lock instance with a null name is not allowed!");
return createProxy(LockService.SERVICE_NAME, name);
}
@Nonnull
@Override
public ISemaphore getSemaphore(@Nonnull String name) {
checkNotNull(name, "Retrieving a semaphore instance with a null name is not allowed!");
return createProxy(SemaphoreService.SERVICE_NAME, name);
}
@Override
public CPMember getLocalCPMember() {
return getCPSubsystemManagementService().getLocalCPMember();
}
@Override
public CPSubsystemManagementService getCPSubsystemManagementService() {
if (!cpSubsystemEnabled) {
throw new HazelcastException("CP Subsystem is not enabled!");
}
if (cpSubsystemManagementService != null) {
return cpSubsystemManagementService;
}
RaftService raftService = getService(RaftService.SERVICE_NAME);
cpSubsystemManagementService = new CPSubsystemManagementServiceImpl(raftService);
return cpSubsystemManagementService;
}
@Override
public CPSessionManagementService getCPSessionManagementService() {
if (!cpSubsystemEnabled) {
throw new HazelcastException("CP Subsystem is not enabled!");
}
return getService(RaftSessionService.SERVICE_NAME);
}
private <T> T getService(@Nonnull String serviceName) {
return instance.node.getNodeEngine().getService(serviceName);
}
private <T extends DistributedObject> T createProxy(String serviceName, String name) {
RaftRemoteService service = getService(serviceName);
return service.createProxy(name);
}
@Override
public UUID addMembershipListener(CPMembershipListener listener) {
RaftService raftService = getService(RaftService.SERVICE_NAME);
return raftService.registerMembershipListener(listener);
}
@Override
public boolean removeMembershipListener(UUID id) {
RaftService raftService = getService(RaftService.SERVICE_NAME);
return raftService.removeMembershipListener(id);
}
@Override
public UUID addGroupAvailabilityListener(CPGroupAvailabilityListener listener) {
RaftService raftService = getService(RaftService.SERVICE_NAME);
return raftService.registerAvailabilityListener(listener);
}
@Override
public boolean removeGroupAvailabilityListener(UUID id) {
RaftService raftService = getService(RaftService.SERVICE_NAME);
return raftService.removeAvailabilityListener(id);
}
private static class CPSubsystemManagementServiceImpl implements CPSubsystemManagementService {
private final RaftService raftService;
CPSubsystemManagementServiceImpl(RaftService raftService) {
this.raftService = raftService;
}
@Override
public CPMember getLocalCPMember() {
return raftService.getLocalCPMember();
}
@Override
public InternalCompletableFuture<Collection<CPGroupId>> getCPGroupIds() {
return raftService.getCPGroupIds();
}
@Override
public InternalCompletableFuture<CPGroup> getCPGroup(String name) {
return raftService.getCPGroup(name);
}
@Override
public InternalCompletableFuture<Void> forceDestroyCPGroup(String groupName) {
return raftService.forceDestroyCPGroup(groupName);
}
@Override
public InternalCompletableFuture<Collection<CPMember>> getCPMembers() {
return raftService.getCPMembers();
}
@Override
public InternalCompletableFuture<Void> promoteToCPMember() {
return raftService.promoteToCPMember();
}
@Override
public InternalCompletableFuture<Void> removeCPMember(UUID cpMemberUuid) {
return raftService.removeCPMember(cpMemberUuid);
}
@Override
public InternalCompletableFuture<Void> reset() {
return raftService.resetCPSubsystem();
}
@Override
public boolean isDiscoveryCompleted() {
return raftService.isDiscoveryCompleted();
}
@Override
public boolean awaitUntilDiscoveryCompleted(long timeout, TimeUnit timeUnit) throws InterruptedException {
return raftService.awaitUntilDiscoveryCompleted(timeout, timeUnit);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.common.configuration.enrichment;
import com.google.common.base.Joiner;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import org.apache.curator.framework.CuratorFramework;
import org.apache.metron.common.Constants;
import org.apache.metron.common.configuration.ConfigurationsUtils;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SensorEnrichmentUpdateConfig {
protected static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static class FieldList {
Type type;
Map<String, List<String>> fieldToEnrichmentTypes;
public Type getType() {
return type;
}
public void setType(Type type) {
this.type = type;
}
public Map<String, List<String>> getFieldToEnrichmentTypes() {
return fieldToEnrichmentTypes;
}
public void setFieldToEnrichmentTypes(Map<String, List<String>> fieldToEnrichmentTypes) {
this.fieldToEnrichmentTypes = fieldToEnrichmentTypes;
}
}
public String zkQuorum;
public Map<String, FieldList> sensorToFieldList;
public String getZkQuorum() {
return zkQuorum;
}
public void setZkQuorum(String zkQuorum) {
this.zkQuorum = zkQuorum;
}
public Map<String, FieldList> getSensorToFieldList() {
return sensorToFieldList;
}
public void setSensorToFieldList(Map<String, FieldList> sensorToFieldList) {
this.sensorToFieldList = sensorToFieldList;
}
/**
* Updates the sensor configs using a {@link ZKSourceConfigHandler} to read configs from
* ZooKeeper and the internal {@code sensorToFieldList}.
*
* @throws Exception If there's an issue reading from ZK or updating configs
*/
public void updateSensorConfigs( ) throws Exception {
CuratorFramework client = ConfigurationsUtils.getClient(getZkQuorum());
try {
client.start();
updateSensorConfigs(new ZKSourceConfigHandler(client), sensorToFieldList);
}
finally {
client.close();
}
}
public static interface SourceConfigHandler {
SensorEnrichmentConfig readConfig(String sensor) throws Exception;
void persistConfig(String sensor, SensorEnrichmentConfig config) throws Exception;
}
public static class ZKSourceConfigHandler implements SourceConfigHandler {
CuratorFramework client;
public ZKSourceConfigHandler(CuratorFramework client) {
this.client = client;
}
@Override
public SensorEnrichmentConfig readConfig(String sensor) throws Exception {
SensorEnrichmentConfig sensorEnrichmentConfig = new SensorEnrichmentConfig();
try {
sensorEnrichmentConfig = SensorEnrichmentConfig.fromBytes(ConfigurationsUtils.readSensorEnrichmentConfigBytesFromZookeeper(sensor, client));
}catch (KeeperException.NoNodeException e) {
}
return sensorEnrichmentConfig;
}
@Override
public void persistConfig(String sensor, SensorEnrichmentConfig config) throws Exception {
ConfigurationsUtils.writeSensorEnrichmentConfigToZookeeper(sensor, config.toJSON().getBytes(), client);
}
}
/**
* Updates the sensor configs with the provided @{link SourceConfigHandler} and the provided
* {@code sensorToFieldList}.
*
* @param scHandler Handles retrieval of configs
* @param sensorToFieldList Map from sensor to @{link FieldList}
* @throws Exception If there's an issue updating config
*/
public static void updateSensorConfigs( SourceConfigHandler scHandler
, Map<String, FieldList> sensorToFieldList
) throws Exception
{
Map<String, SensorEnrichmentConfig> sourceConfigsChanged = new HashMap<>();
for (Map.Entry<String, FieldList> kv : sensorToFieldList.entrySet()) {
SensorEnrichmentConfig config = findConfigBySensorType(scHandler, sourceConfigsChanged, kv.getKey());
Map<String, Object > fieldMap = null;
Map<String, List<String>> fieldToTypeMap = null;
List<String> fieldList = null;
if(kv.getValue().type == Type.THREAT_INTEL) {
fieldMap = config.getThreatIntel().getFieldMap();
if(fieldMap!= null) {
fieldList = (List<String>)fieldMap.get(Constants.SIMPLE_HBASE_THREAT_INTEL);
} else {
fieldMap = new HashMap<>();
}
if(fieldList == null) {
fieldList = new ArrayList<>();
fieldMap.put(Constants.SIMPLE_HBASE_THREAT_INTEL, fieldList);
}
fieldToTypeMap = config.getThreatIntel().getFieldToTypeMap();
if(fieldToTypeMap == null) {
fieldToTypeMap = new HashMap<>();
config.getThreatIntel().setFieldToTypeMap(fieldToTypeMap);
}
}
else if(kv.getValue().type == Type.ENRICHMENT) {
fieldMap = config.getEnrichment().getFieldMap();
if(fieldMap!= null) {
fieldList = (List<String>)fieldMap.get(Constants.SIMPLE_HBASE_ENRICHMENT);
} else {
fieldMap = new HashMap<>();
}
if(fieldList == null) {
fieldList = new ArrayList<>();
fieldMap.put(Constants.SIMPLE_HBASE_ENRICHMENT, fieldList);
}
fieldToTypeMap = config.getEnrichment().getFieldToTypeMap();
if(fieldToTypeMap == null) {
fieldToTypeMap = new HashMap<>();
config.getEnrichment().setFieldToTypeMap(fieldToTypeMap);
}
}
if(fieldToTypeMap == null || fieldMap == null) {
LOG.debug("fieldToTypeMap is null or fieldMap is null, so skipping");
continue;
}
//Add the additional fields to the field list associated with the hbase adapter
{
HashSet<String> fieldSet = new HashSet<>(fieldList);
List<String> additionalFields = new ArrayList<>();
for (String field : kv.getValue().getFieldToEnrichmentTypes().keySet()) {
if (!fieldSet.contains(field)) {
additionalFields.add(field);
}
}
//adding only the ones that we don't already have to the field list
if (additionalFields.size() > 0) {
LOG.debug("Adding additional fields: {}", Joiner.on(',').join(additionalFields));
fieldList.addAll(additionalFields);
sourceConfigsChanged.put(kv.getKey(), config);
}
}
//Add the additional enrichment types to the mapping between the fields
{
for(Map.Entry<String, List<String>> fieldToType : kv.getValue().getFieldToEnrichmentTypes().entrySet()) {
String field = fieldToType.getKey();
final HashSet<String> types = new HashSet<>(fieldToType.getValue());
int sizeBefore = 0;
if(fieldToTypeMap.containsKey(field)) {
List<String> typeList = (List<String>)fieldToTypeMap.get(field);
sizeBefore = new HashSet<>(typeList).size();
types.addAll(typeList);
}
int sizeAfter = types.size();
boolean changed = sizeBefore != sizeAfter;
if(changed) {
fieldToTypeMap.put(field, new ArrayList<String>() {{
addAll(types);
}});
sourceConfigsChanged.put(kv.getKey(), config);
}
}
}
}
for(Map.Entry<String, SensorEnrichmentConfig> kv : sourceConfigsChanged.entrySet()) {
scHandler.persistConfig(kv.getKey(), kv.getValue());
}
}
private static SensorEnrichmentConfig findConfigBySensorType(SourceConfigHandler scHandler, Map<String, SensorEnrichmentConfig> sourceConfigsChanged, String key) throws Exception {
SensorEnrichmentConfig config = sourceConfigsChanged.get(key);
if(config == null) {
config = scHandler.readConfig(key);
if(LOG.isDebugEnabled()) {
LOG.debug(config.toJSON());
}
}
return config;
}
}
| |
package model.tool.maxflow;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Vector;
import util.FileOrganizer;
import util.Log;
/**
* The Class GraphUtil.
*/
public class GraphUtil {
/**
* Total flow.
*
* @param arcs the arcs
* @param flow the flow
* @return the double
*/
public static double totalFlow(Collection<Arc> arcs, HashMap<Arc, Double> flow) {
double sum = 0.0;
for (Arc a : arcs) {
Double f = flow.get(a);
if (null == f) {
Log.w("Null preflow.. " + a.toString());
continue;
}
sum += f;
}
return sum;
}
/**
* Write output file.
*
* @param outputFileName the output file name
* @param graph the graph
* @param flow the flow
*/
public static void writeOutputFile(String outputFileName, Graph graph, HashMap<Arc, Double> flow) {
String content = "# Node u, Node v, Flow on arc uv, Capacity of arc uv\n";
for (Arc arc : graph.getAllArcs()) {
Double f = flow.get(arc);
if (null == f) {
Log.e("Cannot find flow value for arc " + arc.getName());
f = 0.0;
}
Vertex u = arc.getStartVertex();
Vertex v = arc.getEndVertex();
content += String.format("%s, %s, %.3f, %.3f\n", u.getName(), v.getName(), f, arc.getCapacity());
}
FileOrganizer.writeFile(outputFileName, content);
}
/**
* Write the input file for push relabel algo.
*
* @param fileName the file name
* @param graph the graph
* @param source the source
* @param sink the sink
*/
public static void writeInputFileForPushRelabelAlgo(String fileName, Graph graph, Vertex source, Vertex sink) {
String content = "# Node u, Node v, Capacity of uv, Name of uv\n";
String format = "%3s, %3s, %12.5f, %s\n";
for (Arc a : source.getOutgoingArcs()) {
Vertex v = a.getEndVertex();
if (v.equals(sink)) {
// avoid double print (should print this later as ingoing arc to sink)
continue;
}
content += String.format(format, source.getName(), v.getName(), a.getCapacity(), a.getName());
}
for (Arc a : graph.getAllArcs()) {
Vertex u = a.getStartVertex();
Vertex v = a.getEndVertex();
if (u.equals(source) || v.equals(sink)) {
continue;
}
content += String.format(format, u.getName(), v.getName(), a.getCapacity(), a.getName());
}
for (Arc a : sink.getIngoingArcs()) {
Vertex u = a.getStartVertex();
content += String.format(format, u.getName(), sink.getName(), a.getCapacity(), a.getName());
}
FileOrganizer.writeFile(fileName, content);
}
/**
* Gets the outgoing arcs.
*
* @param graph the graph
* @param set the set
* @return the outgoing arcs
*/
public static Collection<Arc> getOutgoingArcs(Graph graph, HashSet<String> set) {
Vector<Arc> cut = new Vector<Arc>();
for (String un : set) {
Vertex u = graph.getVertex(un);
for (Arc a : u.getOutgoingArcs()) {
Vertex v = a.getEndVertex();
if (!set.contains(v.getName())) {
cut.add(a);
}
}
}
return cut;
}
/**
* Gets the ingoing arcs.
*
* @param graph the graph
* @param set the set
* @return the ingoing arcs
*/
public static Collection<Arc> getIngoingArcs(Graph graph, HashSet<String> set) {
Vector<Arc> cut = new Vector<Arc>();
for (String un : set) {
Vertex v = graph.getVertex(un);
for (Arc a : v.getIngoingArcs()) {
Vertex u = a.getStartVertex();
if (!set.contains(u.getName())) {
cut.add(a);
}
}
}
return cut;
}
/**
* Gets the connecting arcs.
*
* @param graph the graph
* @param sourceSet the source set
* @param sinkSet the sink set
* @return the connecting arcs
*/
public static Collection<Arc> getConnectingArcs(Graph graph, HashSet<String> sourceSet, HashSet<String> sinkSet) {
Vector<Arc> cut = new Vector<Arc>();
for (String un : sourceSet) {
Vertex u = graph.getVertex(un);
for (Arc a : u.getOutgoingArcs()) {
Vertex v = a.getEndVertex();
if (sinkSet.contains(v.getName())) {
cut.add(a);
}
}
}
return cut;
}
/**
* Find cut set.
*
* @param graph the graph
* @param flow the flow
* @param source the source
* @param sink the sink
* @return the collection
*/
public static Collection<Arc> findCutSet(Graph graph, HashMap<Arc, Double> flow, Vertex source, Vertex sink) {
HashSet<String> setFromSource = findCutVerticesSetClosestToSource(graph, flow, source);
HashSet<String> setFromSink = findCutVerticesSetClosestToSink(graph, flow, sink);
return getConnectingArcs(graph, setFromSource, setFromSink);
}
/**
* Find cut vertices set closest to source.
*
* @param graph the graph
* @param flow the flow
* @param source the source
* @return the collection
*/
public static HashSet<String> findCutVerticesSetClosestToSource(Graph graph, HashMap<Arc, Double> flow, Vertex source) {
Graph rg = graph.createResidualGraph("RES", flow);
LinkedList<Vertex> queue = new LinkedList<Vertex>();
queue.add(rg.getVertex(source.getName()));
HashSet<String> observedVertices = new HashSet<String>();
while (!queue.isEmpty()) {
Vertex u = queue.pop();
observedVertices.add(u.getName());
for (Arc uv : u.getOutgoingArcs()) {
Vertex v = uv.getEndVertex();
if (!observedVertices.contains(v.getName())) {
queue.push(v);
}
}
}
return observedVertices;
}
/**
* Find cut vertices set closest to sink.
*
* @param graph the graph
* @param flow the flow
* @param sink the sink
* @return the hash set
*/
public static HashSet<String> findCutVerticesSetClosestToSink(Graph graph, HashMap<Arc, Double> flow, Vertex sink) {
Graph rg = graph.createResidualGraph("RES", flow);
LinkedList<Vertex> queue = new LinkedList<Vertex>();
queue.add(rg.getVertex(sink.getName()));
HashSet<String> observedVertices = new HashSet<String>();
while (!queue.isEmpty()) {
Vertex v = queue.pop();
observedVertices.add(v.getName());
for (Arc uv : v.getIngoingArcs()) {
Vertex u = uv.getStartVertex();
if (!observedVertices.contains(u.getName())) {
queue.push(u);
}
}
}
return observedVertices;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing.allocation;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESAllocationTestCase;
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;
import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
import static org.hamcrest.Matchers.equalTo;
/**
*
*/
public class ThrottlingAllocationTests extends ESAllocationTestCase {
private final ESLogger logger = Loggers.getLogger(ThrottlingAllocationTests.class);
public void testPrimaryRecoveryThrottling() {
AllocationService strategy = createAllocationService(Settings.builder()
.put("cluster.routing.allocation.node_concurrent_recoveries", 3)
.put("cluster.routing.allocation.node_initial_primaries_recoveries", 3)
.build());
logger.info("Building initial routing table");
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(10).numberOfReplicas(1))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
logger.info("start one node, do reroute, only 3 should initialize");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1"))).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(0));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(3));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(17));
logger.info("start initializing, another 3 should initialize");
routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(3));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(3));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(14));
logger.info("start initializing, another 3 should initialize");
routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(6));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(3));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(11));
logger.info("start initializing, another 1 should initialize");
routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(9));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(1));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(10));
logger.info("start initializing, all primaries should be started");
routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(10));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(0));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(10));
}
public void testReplicaAndPrimaryRecoveryThrottling() {
AllocationService strategy = createAllocationService(Settings.builder()
.put("cluster.routing.allocation.node_concurrent_recoveries", 3)
.put("cluster.routing.allocation.concurrent_source_recoveries", 3)
.put("cluster.routing.allocation.node_initial_primaries_recoveries", 3)
.build());
logger.info("Building initial routing table");
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(5).numberOfReplicas(1))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
logger.info("start one node, do reroute, only 3 should initialize");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1"))).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(0));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(3));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(7));
logger.info("start initializing, another 2 should initialize");
routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(3));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(2));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(5));
logger.info("start initializing, all primaries should be started");
routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(5));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(0));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(5));
logger.info("start another node, replicas should start being allocated");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).put(newNode("node2"))).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(5));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(3));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(2));
logger.info("start initializing replicas");
routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(8));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(2));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0));
logger.info("start initializing replicas, all should be started");
routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(10));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(0));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0));
}
public void testThrottleIncomingAndOutgoing() {
Settings settings = Settings.builder()
.put("cluster.routing.allocation.node_concurrent_recoveries", 5)
.put("cluster.routing.allocation.node_initial_primaries_recoveries", 5)
.put("cluster.routing.allocation.cluster_concurrent_rebalance", 5)
.build();
AllocationService strategy = createAllocationService(settings);
logger.info("Building initial routing table");
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(9).numberOfReplicas(0))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
logger.info("start one node, do reroute, only 5 should initialize");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1"))).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(0));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(5));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(4));
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 5);
logger.info("start initializing, all primaries should be started");
routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(5));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(4));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0));
routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
logger.info("start another 2 nodes, 5 shards should be relocating - at most 5 are allowed per node");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).put(newNode("node2")).put(newNode("node3"))).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(4));
assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(5));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(5));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0));
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 3);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 2);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 5);
routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
logger.info("start the relocating shards, one more shard should relocate away from node1");
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(8));
assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(1));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(1));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0));
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 0);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 1);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 1);
}
public void testOutgoingThrottlesAllocaiton() {
Settings settings = Settings.builder()
.put("cluster.routing.allocation.node_concurrent_recoveries", 1)
.put("cluster.routing.allocation.node_initial_primaries_recoveries", 1)
.put("cluster.routing.allocation.cluster_concurrent_rebalance", 1)
.build();
AllocationService strategy = createAllocationService(settings);
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(3).numberOfReplicas(0))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2")).put(newNode("node3"))).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(0));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(3));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0));
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 1);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 1);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 1);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0);
routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 0);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0);
RoutingAllocation.Result reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node1").iterator().next().shardId().id(), "node1", "node2")));
assertEquals(reroute.explanations().explanations().size(), 1);
assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.YES);
routingTable = reroute.routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 1);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 1);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0);
// outgoing throttles
reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").iterator().next().shardId().id(), "node3", "node1")), true);
assertEquals(reroute.explanations().explanations().size(), 1);
assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 1);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 1);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0);
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(2));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(1));
assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(1));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0));
// incoming throttles
reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").iterator().next().shardId().id(), "node3", "node2")), true);
assertEquals(reroute.explanations().explanations().size(), 1);
assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 1);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 1);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0);
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(2));
assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(1));
assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(1));
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0));
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.quickFix;
import com.intellij.codeInsight.daemon.QuickFixBundle;
import com.intellij.codeInsight.daemon.impl.quickfix.CreateClassKind;
import com.intellij.codeInsight.daemon.impl.quickfix.CreateFromUsageUtils;
import com.intellij.codeInspection.LocalQuickFixAndIntentionActionOnPsiElement;
import com.intellij.ide.util.DirectoryChooserUtil;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.JavaProjectRootsUtil;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.ClassKind;
import com.intellij.psi.util.CreateClassUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author peter
*/
public class CreateClassOrPackageFix extends LocalQuickFixAndIntentionActionOnPsiElement {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.daemon.quickFix.CreateClassOrPackageFix");
private final List<? extends PsiDirectory> myWritableDirectoryList;
private final String myPresentation;
@Nullable private final ClassKind myClassKind;
@Nullable private final String mySuperClass;
private final String myRedPart;
@Nullable private final String myTemplateName;
@Nullable
public static CreateClassOrPackageFix createFix(@NotNull final String qualifiedName,
@NotNull final GlobalSearchScope scope,
@NotNull final PsiElement context,
@Nullable final PsiPackage basePackage,
@Nullable ClassKind kind,
@Nullable String superClass,
@Nullable String templateName) {
final List<PsiDirectory> directories = getWritableDirectoryListDefault(basePackage, scope, context.getManager());
if (directories.isEmpty()) {
return null;
}
final String redPart = basePackage == null ? qualifiedName : qualifiedName.substring(basePackage.getQualifiedName().length() + 1);
final int dot = redPart.indexOf('.');
final boolean fixPath = dot >= 0;
final String firstRedName = fixPath ? redPart.substring(0, dot) : redPart;
directories.removeIf(directory -> !checkCreateClassOrPackage(kind != null && !fixPath, directory, firstRedName));
return new CreateClassOrPackageFix(directories,
context,
fixPath ? qualifiedName : redPart,
redPart,
kind,
superClass,
templateName);
}
@Nullable
public static CreateClassOrPackageFix createFix(@NotNull final String qualifiedName,
@NotNull final PsiElement context,
@Nullable ClassKind kind,
String superClass) {
return createFix(qualifiedName, context.getResolveScope(), context, null, kind, superClass, null);
}
private CreateClassOrPackageFix(@NotNull List<? extends PsiDirectory> writableDirectoryList,
@NotNull PsiElement context,
@NotNull String presentation,
@NotNull String redPart,
@Nullable ClassKind kind,
@Nullable String superClass,
@Nullable final String templateName) {
super(context);
myRedPart = redPart;
myTemplateName = templateName;
myWritableDirectoryList = writableDirectoryList;
myClassKind = kind;
mySuperClass = superClass;
myPresentation = presentation;
}
@Override
@NotNull
public String getText() {
return QuickFixBundle.message(
myClassKind == ClassKind.INTERFACE ? "create.interface.text" : myClassKind != null ? "create.class.text" : "create.package.text",
myPresentation);
}
@Override
@NotNull
public String getFamilyName() {
return getText();
}
@Override
public void invoke(@NotNull final Project project,
@NotNull final PsiFile file,
@Nullable Editor editor,
@NotNull final PsiElement startElement,
@NotNull PsiElement endElement) {
if (isAvailable(project, null, file)) {
PsiDirectory directory = chooseDirectory(project, file);
if (directory == null) return;
WriteAction.run(() -> doCreate(directory, startElement));
}
}
private static boolean checkCreateClassOrPackage(final boolean createJavaClass, final PsiDirectory directory, final String name) {
try {
if (createJavaClass) {
JavaDirectoryService.getInstance().checkCreateClass(directory, name);
}
else {
directory.checkCreateSubdirectory(name);
}
return true;
}
catch (IncorrectOperationException ex) {
return false;
}
}
@Nullable
private PsiDirectory chooseDirectory(final Project project, final PsiFile file) {
PsiDirectory preferredDirectory = myWritableDirectoryList.isEmpty() ? null : myWritableDirectoryList.get(0);
final ProjectFileIndex fileIndex = ProjectRootManager.getInstance(project).getFileIndex();
final VirtualFile virtualFile = file.getVirtualFile();
assert virtualFile != null;
final Module moduleForFile = fileIndex.getModuleForFile(virtualFile);
if (myWritableDirectoryList.size() > 1 && !ApplicationManager.getApplication().isUnitTestMode()) {
if (moduleForFile != null) {
for (PsiDirectory directory : myWritableDirectoryList) {
if (fileIndex.getModuleForFile(directory.getVirtualFile()) == moduleForFile) {
preferredDirectory = directory;
break;
}
}
}
return DirectoryChooserUtil
.chooseDirectory(myWritableDirectoryList.toArray(PsiDirectory.EMPTY_ARRAY),
preferredDirectory, project,
new HashMap<>());
}
return preferredDirectory;
}
private void doCreate(final PsiDirectory baseDirectory, PsiElement myContext) {
final PsiManager manager = baseDirectory.getManager();
PsiDirectory directory = baseDirectory;
String lastName;
for (StringTokenizer st = new StringTokenizer(myRedPart, "."); ;) {
lastName = st.nextToken();
if (st.hasMoreTokens()) {
try {
final PsiDirectory subdirectory = directory.findSubdirectory(lastName);
directory = subdirectory != null ? subdirectory : directory.createSubdirectory(lastName);
}
catch (IncorrectOperationException e) {
CreateFromUsageUtils.scheduleFileOrPackageCreationFailedMessageBox(e, lastName, directory, true);
return;
}
}
else {
break;
}
}
if (myClassKind != null) {
PsiClass createdClass;
if (myTemplateName != null) {
createdClass = CreateClassUtil.createClassFromCustomTemplate(directory, null, lastName, myTemplateName);
}
else {
createdClass = CreateFromUsageUtils
.createClass(myClassKind == ClassKind.INTERFACE ? CreateClassKind.INTERFACE : CreateClassKind.CLASS, directory, lastName,
manager, myContext, null, mySuperClass);
}
if (createdClass != null) {
createdClass.navigate(true);
}
}
else {
try {
directory.createSubdirectory(lastName);
}
catch (IncorrectOperationException e) {
CreateFromUsageUtils.scheduleFileOrPackageCreationFailedMessageBox(e, lastName, directory, true);
}
}
}
@Override
public boolean startInWriteAction() {
return false;
}
private static List<PsiDirectory> getWritableDirectoryListDefault(@Nullable final PsiPackage context,
final GlobalSearchScope scope,
final PsiManager psiManager) {
if (LOG.isDebugEnabled()) {
LOG.debug("Getting writable directory list for package '" + (context == null ? null : context.getQualifiedName()) + "', scope=" + scope);
}
final List<PsiDirectory> writableDirectoryList = new ArrayList<>();
if (context != null) {
for (PsiDirectory directory : context.getDirectories()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Package directory: " + directory);
}
VirtualFile virtualFile = directory.getVirtualFile();
if (directory.isWritable() && scope.contains(virtualFile)
&& !JavaProjectRootsUtil.isInGeneratedCode(virtualFile, psiManager.getProject())) {
writableDirectoryList.add(directory);
}
}
}
else {
for (VirtualFile root : JavaProjectRootsUtil.getSuitableDestinationSourceRoots(psiManager.getProject())) {
PsiDirectory directory = psiManager.findDirectory(root);
if (LOG.isDebugEnabled()) {
LOG.debug("Root: " + root + ", directory: " + directory);
}
if (directory != null && directory.isWritable() && scope.contains(directory.getVirtualFile())) {
writableDirectoryList.add(directory);
}
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Result " + writableDirectoryList);
}
return writableDirectoryList;
}
}
| |
package ua.yandex.shad.stream;
import org.junit.Test;
import ua.yandex.shad.containers.IntList;
import static org.junit.Assert.*;
/**
* Created by lionell on 11/29/15.
*
* @author Ruslan Sakevych
*/
public class AsIntStreamTest {
private static final double EPS = 1e-10;
//<editor-fold desc="static of">
@Test
public void staticOf_withNoParameter_shouldReturnEmptyStream() {
IntList expectedInts = new IntList();
IntStream stream = AsIntStream.of();
AsIntStream asStream = (AsIntStream) stream;
assertEquals(expectedInts, asStream.getInts());
}
@Test
public void staticOf_withOneParameter_shouldReturnCorrectStream() {
IntList expectedInts = new IntList(1);
IntStream stream = AsIntStream.of(1);
AsIntStream asStream = (AsIntStream) stream;
assertEquals(expectedInts, asStream.getInts());
}
@Test
public void staticOf_withManyParameters_shouldReturnCorrectStream() {
IntList expectedInts = new IntList(1, 2, 3);
IntStream stream = AsIntStream.of(1, 2, 3);
AsIntStream asStream = (AsIntStream) stream;
assertEquals(expectedInts, asStream.getInts());
}
//</editor-fold>
//<editor-fold desc="average">
@Test(expected = IllegalArgumentException.class)
public void average_emptyStream_shouldThrowException() {
AsIntStream.of().average();
}
@Test
public void average_streamWithOneElement_shouldReturnCorrectValue() {
IntStream stream = AsIntStream.of(5);
assertEquals(5.0, stream.average(), EPS);
}
@Test
public void average_streamWithManyElements_shouldReturnCorrectValue() {
IntStream stream = AsIntStream.of(3, 1, 1, 2, 3, 2);
assertEquals(2.0, stream.average(), EPS);
}
//</editor-fold>
//<editor-fold desc="max">
@Test(expected = IllegalArgumentException.class)
public void max_emptyStream_shouldThrowException() {
AsIntStream.of().max();
}
@Test
public void max_streamWithOneElement_shouldReturnCorrectValue() {
IntStream stream = AsIntStream.of(-5);
assertEquals(-5, stream.max());
}
@Test
public void max_streamWithManyElements_shouldReturnCorrectValue() {
IntStream stream = AsIntStream.of(13, 10, -1, 2, 33, 2);
assertEquals(33, stream.max());
}
//</editor-fold>
//<editor-fold desc="min">
@Test(expected = IllegalArgumentException.class)
public void min_emptyStream_shouldThrowException() {
AsIntStream.of().min();
}
@Test
public void min_streamWithOneElement_shouldReturnCorrectValue() {
IntStream stream = AsIntStream.of(-1);
assertEquals(-1, stream.min());
}
@Test
public void min_streamWithManyElements_shouldReturnCorrectValue() {
IntStream stream = AsIntStream.of(-314, -1, 1, -2, 3, -2);
assertEquals(-314, stream.min());
}
//</editor-fold>
//<editor-fold desc="count">
@Test
public void count_emptyStream_shouldReturnZero() {
IntStream stream = AsIntStream.of();
assertEquals(0, stream.count());
}
@Test
public void count_streamWithOneElement_shouldReturnOne() {
IntStream stream = AsIntStream.of(-100);
assertEquals(1, stream.count());
}
@Test
public void count_streamWithManyElements_shouldReturnCorrectValue() {
IntStream stream = AsIntStream.of(-314, -1, 1, -2, 3, -2, 7, 9);
assertEquals(8, stream.count());
}
//</editor-fold>
//<editor-fold desc="filter">
@Test
public void filter_emptyStream_streamShouldNotChange() {
IntStream stream = AsIntStream.of();
stream.filter(x -> x > 0);
assertEquals(AsIntStream.of(), stream);
}
@Test
public void filter_streamWithManyElementsAndTotallyNegativePredicate_streamShouldBeEmpty() {
IntStream stream = AsIntStream.of(-1, -4, 2, 0, -1);
stream.filter(x -> false);
assertEquals(AsIntStream.of(), stream);
}
@Test
public void filter_streamWithManyElementsAndTotallyPositivePredicate_streamShouldNotChange() {
IntStream stream = AsIntStream.of(-1, -4, 2, 0, -1);
stream.filter(x -> true);
assertEquals(AsIntStream.of(-1, -4, 2, 0, -1), stream);
}
@Test
public void filter_streamWithManyElementsAndNotNegativePredicate_streamShouldBeCorrect() {
IntStream stream = AsIntStream.of(-1, -4, 2, 0, -1);
stream.filter(x -> x >= 0);
assertEquals(AsIntStream.of(2, 0), stream);
}
//</editor-fold>
//<editor-fold desc="forEach">
// All tests connected with forEach are using IntList::add method reference
@Test
public void forEach_emptyStream_listShouldBeEmpty() {
IntStream stream = AsIntStream.of();
IntList objective = new IntList();
stream.forEach(objective::add);
assertEquals(new IntList(), objective);
}
@Test
public void forEach_streamWithManyElements_listShouldContainsCorrectElements() {
IntStream stream = AsIntStream.of(45, 1, 25);
IntList objective = new IntList();
stream.forEach(objective::add);
assertEquals(new IntList(45, 1, 25), objective);
}
//</editor-fold>
//<editor-fold desc="map">
@Test
public void map_emptyStream_streamShouldNotChange() {
IntStream stream = AsIntStream.of();
stream.map(x -> 2 * x);
assertEquals(AsIntStream.of(), stream);
}
@Test
public void map_streamWithManyElementsAndSquareOperator_streamShouldBeCorrect() {
IntStream stream = AsIntStream.of(1, 2, 3);
stream.map(x -> x * x);
assertEquals(AsIntStream.of(1, 4, 9), stream);
}
//</editor-fold>
//<editor-fold desc="reduce">
@Test
public void reduce_emptyStream_shouldReturnIdentity() {
IntStream stream = AsIntStream.of();
int actualIdentity = stream.reduce(123, (sum, x) -> sum += x);
assertEquals(123, actualIdentity);
}
@Test
public void reduce_streamWithManyElementsAndSumOperator_shouldReturnCorrectValue() {
IntStream stream = AsIntStream.of(1, 2, 3, -1);
int actualIdentity = stream.reduce(4, (sum, x) -> sum += x);
assertEquals(9, actualIdentity);
}
//</editor-fold>
//<editor-fold desc="sum">
@Test
public void sum_emptyStream_shouldReturnZero() {
IntStream stream = AsIntStream.of();
int actualSum = stream.sum();
assertEquals(0, actualSum);
}
@Test
public void sum_streamWithOneElement_shouldReturnThisElement() {
IntStream stream = AsIntStream.of(54);
int actualSum = stream.sum();
assertEquals(54, actualSum);
}
@Test
public void sum_streamWithManyElements_shouldReturnCorrectValue() {
IntStream stream = AsIntStream.of(-14, -1, 1, -2, 3, -2, 7, 9);
int actualSum = stream.sum();
assertEquals(1, actualSum);
}
//</editor-fold>
//<editor-fold desc="toArray">
@Test
public void toArray_emptyStream_shouldReturnEmptyArray() {
IntStream stream = AsIntStream.of();
int[] ints = stream.toArray();
assertEquals(0, ints.length);
}
@Test
public void toArray_streamWithOneElement_shouldReturnCorrectArray() {
int[] expectedInts = {34};
IntStream stream = AsIntStream.of(34);
int[] ints = stream.toArray();
assertArrayEquals(expectedInts, ints);
}
@Test
public void toArray_streamWithManyElements_shouldReturnCorrectArray() {
int[] expectedInts = {34, 43};
IntStream stream = AsIntStream.of(34, 43);
int[] ints = stream.toArray();
assertArrayEquals(expectedInts, ints);
}
@Test
public void toArray_streamWithDelayedOperations_shouldReturnCorrectArray() {
int[] expectedInts = {0, 1, 2, 3, 4, 5, 8, 9, 10};
IntStream stream = AsIntStream.of(-1, 1, 2, 0, -5, 3)
.filter(x -> x > 0)
.map(x -> x * x)
.flatMap(x -> AsIntStream.of(x - 1, x, x + 1));
int[] ints = stream.toArray();
assertArrayEquals(expectedInts, ints);
}
//</editor-fold>
//<editor-fold desc="flatMap">
@Test
public void flatMap_emptyStream_streamShouldNotChange() {
IntStream stream = AsIntStream.of();
stream.flatMap(x -> AsIntStream.of(x + 1));
assertEquals(AsIntStream.of(), stream);
}
@Test
public void flatMap_streamWithOneElement_streamShouldBeCorrect() {
IntStream stream = AsIntStream.of(7);
stream.flatMap(x -> AsIntStream.of(x - 1, x, x + 1));
assertEquals(AsIntStream.of(6, 7, 8), stream);
}
@Test
public void flatMap_streamWithManyElements_streamShouldBeCorrect() {
IntStream stream = AsIntStream.of(1, 4, 6);
stream.flatMap(x -> AsIntStream.of(x - 1, x, x + 1));
assertEquals(AsIntStream.of(0, 1, 2, 3, 4, 5, 5, 6, 7), stream);
}
//</editor-fold>
//<editor-fold desc="equals/hashCode">
@Test
public void equals_twoEqualReferences() {
IntStream stream = AsIntStream.of(1, 2, 3);
assertEquals(stream, stream);
}
@Test
public void equals_nullReference() {
IntStream stream = AsIntStream.of(1, 2, 3);
assertNotEquals(stream, null);
}
@Test
public void equals_differentClass() {
IntStream stream = AsIntStream.of(1, 2, 3);
assertNotEquals(stream, new Object());
}
@Test
public void equals_equalStreams() {
IntStream stream1 = AsIntStream.of(1, 2, 3);
IntStream stream2 = AsIntStream.of(1, 2, 3);
assertEquals(stream1, stream2);
}
@Test
public void equals_equalStreamsWithDelayedOperations() {
IntStream stream1 = AsIntStream.of(-1, 1, 2, 0, -5, 3)
.filter(x -> x > 0)
.map(x -> x * x)
.flatMap(x -> AsIntStream.of(x - 1, x, x + 1));
IntStream stream2 = AsIntStream.of(0, 1, 2, 3, 4, 5, 8, 9, 10);
assertEquals(stream1, stream2);
}
@Test
public void hashCode_differentStreams() {
IntStream stream1 = AsIntStream.of(1, 2, 3);
IntStream stream2 = AsIntStream.of(3, 2, 1);
assertNotEquals(stream1.hashCode(), stream2.hashCode());
}
@Test
public void hashCode_equalStreams() {
IntStream stream1 = AsIntStream.of(1, 2, 3);
IntStream stream2 = AsIntStream.of(1, 2, 3);
assertEquals(stream1.hashCode(), stream2.hashCode());
}
@Test
public void hashCode_equalStreamsWithDelayedOperations() {
IntStream stream1 = AsIntStream.of(-1, 1, 2, 0, -5, 3)
.filter(x -> x > 0)
.map(x -> x * x)
.flatMap(x -> AsIntStream.of(x - 1, x, x + 1));
IntStream stream2 = AsIntStream.of(0, 1, 2, 3, 4, 5, 8, 9, 10);
assertEquals(stream1.hashCode(), stream2.hashCode());
}
//</editor-fold>
}
| |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.masterdb.bean;
import static com.opengamma.util.db.DbDateUtils.MAX_SQL_TIMESTAMP;
import static com.opengamma.util.db.DbDateUtils.toSqlTimestamp;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertNotNull;
import java.sql.Types;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcOperations;
import org.springframework.jdbc.core.SqlParameterValue;
import org.springframework.jdbc.core.support.SqlLobValue;
import org.testng.annotations.Test;
import org.threeten.bp.Clock;
import org.threeten.bp.Instant;
import org.threeten.bp.ZoneOffset;
import com.opengamma.id.ExternalId;
import com.opengamma.id.ExternalIdBundle;
import com.opengamma.id.UniqueId;
import com.opengamma.master.convention.ConventionDocument;
import com.opengamma.masterdb.convention.DbConventionBeanMaster;
import com.opengamma.util.JodaBeanSerialization;
import com.opengamma.util.ZipUtils;
import com.opengamma.util.money.Currency;
import com.opengamma.util.test.AbstractDbTest;
import com.opengamma.util.test.TestGroup;
/**
* Base tests for DbConventionBeanMaster.
*/
@Test(groups = TestGroup.UNIT_DB)
public abstract class AbstractDbConventionBeanMasterTest extends AbstractDbTest {
private static final Logger s_logger = LoggerFactory.getLogger(AbstractDbConventionBeanMasterTest.class);
private static final ExternalIdBundle BUNDLE_201 = ExternalIdBundle.of(ExternalId.of("C", "D"), ExternalId.of("E", "F"));
private static final ExternalIdBundle BUNDLE_102 = ExternalIdBundle.of(ExternalId.of("A", "B"), ExternalId.of("C", "D"), ExternalId.of("GH", "HI"));
private static final ExternalIdBundle BUNDLE_101 = ExternalIdBundle.of(ExternalId.of("A", "B"), ExternalId.of("C", "D"), ExternalId.of("E", "F"));
protected DbConventionBeanMaster _cnvMaster;
protected Instant _version1Instant;
protected Instant _version2Instant;
protected int _totalSecurities;
public AbstractDbConventionBeanMasterTest(String databaseType, String databaseVersion, boolean readOnly) {
super(databaseType, databaseVersion);
s_logger.info("running testcases for {}", databaseType);
}
//-------------------------------------------------------------------------
@Override
protected void doSetUp() {
init();
}
@Override
protected void doTearDown() {
_cnvMaster = null;
}
@Override
protected void doTearDownClass() {
_cnvMaster = null;
}
//-------------------------------------------------------------------------
private void init() {
_cnvMaster = new DbConventionBeanMaster(getDbConnector());
// id bigint NOT NULL,
// oid bigint NOT NULL,
// ver_from_instant timestamp without time zone NOT NULL,
// ver_to_instant timestamp without time zone NOT NULL,
// corr_from_instant timestamp without time zone NOT NULL,
// corr_to_instant timestamp without time zone NOT NULL,
// name varchar(255) NOT NULL,
// main_type char NOT NULL,
// sub_type varchar(255) NOT NULL,
// java_type varchar(255) NOT NULL,
// packed_data blob NOT NULL,
Instant now = Instant.now();
_cnvMaster.setClock(Clock.fixed(now, ZoneOffset.UTC));
_version1Instant = now.minusSeconds(100);
_version2Instant = now.minusSeconds(50);
s_logger.debug("test data now: {}", _version1Instant);
s_logger.debug("test data later: {}", _version2Instant);
final JdbcOperations template = _cnvMaster.getDbConnector().getJdbcOperations();
template.update("INSERT INTO cnv_document VALUES (?,?,?,?,?, ?,?,?,?,?, ?)",
101, 101, toSqlTimestamp(_version1Instant), MAX_SQL_TIMESTAMP, toSqlTimestamp(_version1Instant), MAX_SQL_TIMESTAMP,
"TestConvention101", "S", "MOCK", "MockConvention", blob("TestConvention101", BUNDLE_101));
template.update("INSERT INTO cnv_document VALUES (?,?,?,?,?, ?,?,?,?,?, ?)",
102, 102, toSqlTimestamp(_version1Instant), MAX_SQL_TIMESTAMP, toSqlTimestamp(_version1Instant), MAX_SQL_TIMESTAMP,
"TestConvention102", "S", "MOCK", "MockConvention", blob("TestConvention102", BUNDLE_102));
template.update("INSERT INTO cnv_document VALUES (?,?,?,?,?, ?,?,?,?,?, ?)",
201, 201, toSqlTimestamp(_version1Instant), toSqlTimestamp(_version2Instant), toSqlTimestamp(_version1Instant), MAX_SQL_TIMESTAMP,
"TestConvention201", "S", "MOCK", "MockConvention", blob("TestConvention201", BUNDLE_201));
template.update("INSERT INTO cnv_document VALUES (?,?,?,?,?, ?,?,?,?,?, ?)",
202, 201, toSqlTimestamp(_version2Instant), MAX_SQL_TIMESTAMP, toSqlTimestamp(_version2Instant), MAX_SQL_TIMESTAMP,
"TestConvention202", "S", "MOCK", "MockConvention", blob("TestConvention202", BUNDLE_201));
_totalSecurities = 3;
// id bigint not null,
// key_scheme varchar(255) not null,
// key_value varchar(255) not null,
template.update("INSERT INTO cnv_idkey VALUES (?,?,?)",
1, "A", "B");
template.update("INSERT INTO cnv_idkey VALUES (?,?,?)",
2, "C", "D");
template.update("INSERT INTO cnv_idkey VALUES (?,?,?)",
3, "E", "F");
template.update("INSERT INTO cnv_idkey VALUES (?,?,?)",
4, "GH", "HI");
// doc_id bigint not null,
// idkey_id bigint not null,
template.update("INSERT INTO cnv_doc2idkey VALUES (?,?)",
101, 1);
template.update("INSERT INTO cnv_doc2idkey VALUES (?,?)",
101, 2);
template.update("INSERT INTO cnv_doc2idkey VALUES (?,?)",
101, 3);
template.update("INSERT INTO cnv_doc2idkey VALUES (?,?)",
102, 1);
template.update("INSERT INTO cnv_doc2idkey VALUES (?,?)",
102, 2);
template.update("INSERT INTO cnv_doc2idkey VALUES (?,?)",
102, 4);
template.update("INSERT INTO cnv_doc2idkey VALUES (?,?)",
201, 2);
template.update("INSERT INTO cnv_doc2idkey VALUES (?,?)",
201, 3);
template.update("INSERT INTO cnv_doc2idkey VALUES (?,?)",
202, 2);
template.update("INSERT INTO cnv_doc2idkey VALUES (?,?)",
202, 3);
}
private Object blob(String name, ExternalIdBundle bundle) {
MockConvention value = new MockConvention(name, bundle, Currency.GBP);
String xml = JodaBeanSerialization.serializer(false).xmlWriter().write(value);
byte[] bytes = ZipUtils.deflateString(xml);
SqlLobValue lob = new SqlLobValue(bytes, getDbConnector().getDialect().getLobHandler());
return new SqlParameterValue(Types.BLOB, lob);
}
//-------------------------------------------------------------------------
protected void assert101(final ConventionDocument test) {
UniqueId uniqueId = UniqueId.of("DbCnv", "101", "0");
assertNotNull(test);
assertEquals(uniqueId, test.getUniqueId());
assertEquals(_version1Instant, test.getVersionFromInstant());
assertEquals(null, test.getVersionToInstant());
assertEquals(_version1Instant, test.getCorrectionFromInstant());
assertEquals(null, test.getCorrectionToInstant());
MockConvention convention = (MockConvention) test.getConvention();
assertNotNull(convention);
assertEquals(uniqueId, convention.getUniqueId());
assertEquals("TestConvention101", convention.getName());
assertEquals("MOCK", convention.getConventionType().getName());
assertEquals(BUNDLE_101, convention.getExternalIdBundle());
}
protected void assert102(final ConventionDocument test) {
UniqueId uniqueId = UniqueId.of("DbCnv", "102", "0");
assertNotNull(test);
assertEquals(uniqueId, test.getUniqueId());
assertEquals(_version1Instant, test.getVersionFromInstant());
assertEquals(null, test.getVersionToInstant());
assertEquals(_version1Instant, test.getCorrectionFromInstant());
assertEquals(null, test.getCorrectionToInstant());
MockConvention convention = (MockConvention) test.getConvention();
assertNotNull(convention);
assertEquals(uniqueId, convention.getUniqueId());
assertEquals("TestConvention102", convention.getName());
assertEquals("MOCK", convention.getConventionType().getName());
assertEquals(BUNDLE_102, convention.getExternalIdBundle());
}
protected void assert201(final ConventionDocument test) {
UniqueId uniqueId = UniqueId.of("DbCnv", "201", "0");
assertNotNull(test);
assertEquals(uniqueId, test.getUniqueId());
assertEquals(_version1Instant, test.getVersionFromInstant());
assertEquals(_version2Instant, test.getVersionToInstant());
assertEquals(_version1Instant, test.getCorrectionFromInstant());
assertEquals(null, test.getCorrectionToInstant());
MockConvention convention = (MockConvention) test.getConvention();
assertNotNull(convention);
assertEquals(uniqueId, convention.getUniqueId());
assertEquals("TestConvention201", convention.getName());
assertEquals("MOCK", convention.getConventionType().getName());
assertEquals(BUNDLE_201, convention.getExternalIdBundle());
}
protected void assert202(final ConventionDocument test) {
UniqueId uniqueId = UniqueId.of("DbCnv", "201", "1");
assertNotNull(test);
assertEquals(uniqueId, test.getUniqueId());
assertEquals(_version2Instant, test.getVersionFromInstant());
assertEquals(null, test.getVersionToInstant());
assertEquals(_version2Instant, test.getCorrectionFromInstant());
assertEquals(null, test.getCorrectionToInstant());
MockConvention convention = (MockConvention) test.getConvention();
assertNotNull(convention);
assertEquals(uniqueId, convention.getUniqueId());
assertEquals("TestConvention202", convention.getName());
assertEquals("MOCK", convention.getConventionType().getName());
assertEquals(BUNDLE_201, convention.getExternalIdBundle());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.bookkeeper.zookeeper;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyList;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import com.google.common.collect.Maps;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.apache.bookkeeper.client.api.BKException.Code;
import org.apache.bookkeeper.common.testing.executors.MockExecutorController;
import org.apache.bookkeeper.util.ZkUtils;
import org.apache.zookeeper.AsyncCallback.Children2Callback;
import org.apache.zookeeper.AsyncCallback.DataCallback;
import org.apache.zookeeper.AsyncCallback.StatCallback;
import org.apache.zookeeper.AsyncCallback.StringCallback;
import org.apache.zookeeper.AsyncCallback.VoidCallback;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.Watcher.Event.EventType;
import org.apache.zookeeper.Watcher.Event.KeeperState;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
import org.powermock.api.mockito.PowerMockito;
/**
* A test base that provides mocked zookeeper.
*/
public abstract class MockZooKeeperTestCase {
protected final ConcurrentMap<String, Set<Watcher>> watchers = Maps.newConcurrentMap();
protected ZooKeeper mockZk;
protected ScheduledExecutorService zkCallbackExecutor;
protected MockExecutorController zkCallbackController;
protected void setup() throws Exception {
this.mockZk = mock(ZooKeeper.class);
PowerMockito.mockStatic(ZkUtils.class);
this.zkCallbackExecutor = mock(ScheduledExecutorService.class);
this.zkCallbackController = new MockExecutorController()
.controlExecute(zkCallbackExecutor)
.controlSubmit(zkCallbackExecutor)
.controlSchedule(zkCallbackExecutor)
.controlScheduleAtFixedRate(zkCallbackExecutor, 10);
}
private void addWatcher(String path, Watcher watcher) {
if (null == watcher) {
return;
}
Set<Watcher> watcherSet = watchers.get(path);
if (null == watcherSet) {
watcherSet = new HashSet<>();
watchers.put(path, watcherSet);
}
watcherSet.add(watcher);
}
protected void mockZkUtilsAsyncCreateFullPathOptimistic(
String expectedLedgerPath,
CreateMode expectedCreateMode,
int retCode,
String retCreatedZnodeName
) throws Exception {
PowerMockito.doAnswer(invocationOnMock -> {
String path = invocationOnMock.getArgument(1);
StringCallback callback = invocationOnMock.getArgument(5);
Object ctx = invocationOnMock.getArgument(6);
callback.processResult(
retCode, path, ctx, retCreatedZnodeName);
return null;
}).when(
ZkUtils.class,
"asyncCreateFullPathOptimistic",
eq(mockZk),
eq(expectedLedgerPath),
any(byte[].class),
anyList(),
eq(expectedCreateMode),
any(StringCallback.class),
any());
}
protected void mockZkDelete(
String expectedLedgerPath,
int expectedVersion,
int retCode
) throws Exception {
doAnswer(invocationOnMock -> {
String path = invocationOnMock.getArgument(0);
VoidCallback callback = invocationOnMock.getArgument(2);
Object ctx = invocationOnMock.getArgument(3);
callback.processResult(
retCode, path, ctx
);
return null;
}).when(mockZk).delete(
eq(expectedLedgerPath),
eq(expectedVersion),
any(VoidCallback.class),
any());
}
protected void mockZkUtilsAsyncDeleteFullPathOptimistic(
String expectedLedgerPath,
int expectedZnodeVersion,
int retCode
) throws Exception {
PowerMockito.doAnswer(invocationOnMock -> {
String path = invocationOnMock.getArgument(1);
VoidCallback callback = invocationOnMock.getArgument(3);
callback.processResult(
retCode, path, null);
return null;
}).when(
ZkUtils.class,
"asyncDeleteFullPathOptimistic",
eq(mockZk),
eq(expectedLedgerPath),
eq(expectedZnodeVersion),
any(VoidCallback.class),
eq(expectedLedgerPath));
}
protected void mockZkGetData(
String expectedLedgerPath,
boolean expectedWatcher,
int retCode,
byte[] retData,
Stat retStat
) throws Exception {
doAnswer(invocationOnMock -> {
String path = invocationOnMock.getArgument(0);
Watcher watcher = invocationOnMock.getArgument(1);
DataCallback callback = invocationOnMock.getArgument(2);
Object ctx = invocationOnMock.getArgument(3);
if (Code.OK == retCode) {
addWatcher(path, watcher);
}
callback.processResult(
retCode, path, ctx, retData, retStat
);
return null;
}).when(mockZk).getData(
eq(expectedLedgerPath),
expectedWatcher ? any(Watcher.class) : eq(null),
any(DataCallback.class),
any());
}
protected void mockZkSetData(
String expectedLedgerPath,
byte[] expectedBytes,
int expectedVersion,
int retCode,
Stat retStat
) throws Exception {
doAnswer(invocationOnMock -> {
String path = invocationOnMock.getArgument(0);
StatCallback callback = invocationOnMock.getArgument(3);
Object ctx = invocationOnMock.getArgument(4);
callback.processResult(
retCode, path, ctx, retStat
);
return null;
}).when(mockZk).setData(
eq(expectedLedgerPath),
eq(expectedBytes),
eq(expectedVersion),
any(StatCallback.class),
any());
}
protected boolean notifyWatchedEvent(EventType eventType,
KeeperState keeperState,
String path) {
Set<Watcher> watcherSet = watchers.remove(path);
if (null == watcherSet) {
return false;
}
WatchedEvent event = new WatchedEvent(
eventType, keeperState, path);
for (Watcher watcher : watcherSet) {
watcher.process(event);
}
return true;
}
protected void mockGetChildren(String expectedPath,
boolean expectedWatcher,
int retCode,
List<String> retChildren,
Stat retStat) {
mockGetChildren(
expectedPath, expectedWatcher, retCode, retChildren, retStat, 0);
}
protected void mockGetChildren(String expectedPath,
boolean expectedWatcher,
int retCode,
List<String> retChildren,
Stat retStat,
long delayMs) {
doAnswer(invocationOnMock -> {
String p = invocationOnMock.getArgument(0);
Watcher w = invocationOnMock.getArgument(1);
Children2Callback callback = invocationOnMock.getArgument(2);
Object ctx = invocationOnMock.getArgument(3);
if (Code.OK == retCode) {
addWatcher(p, w);
}
this.zkCallbackExecutor.schedule(() -> callback.processResult(
retCode,
p,
ctx,
retChildren,
retStat
), delayMs, TimeUnit.MILLISECONDS);
return null;
}).when(mockZk).getChildren(
eq(expectedPath),
expectedWatcher ? any(Watcher.class) : eq(null),
any(Children2Callback.class),
any());
}
}
| |
/*
* The aspiredb project
*
* Copyright (c) 2013 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ubc.pavlab.aspiredb.server.service;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import gemma.gsec.SecurityService;
import gemma.gsec.authentication.UserDetailsImpl;
import gemma.gsec.authentication.UserManager;
import gemma.gsec.util.SecurityUtil;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import org.apache.commons.lang.RandomStringUtils;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import ubc.pavlab.aspiredb.server.BaseSpringContextTest;
import ubc.pavlab.aspiredb.server.dao.LabelDao;
import ubc.pavlab.aspiredb.server.dao.PhenotypeDao;
import ubc.pavlab.aspiredb.server.dao.ProjectDao;
import ubc.pavlab.aspiredb.server.dao.SubjectDao;
import ubc.pavlab.aspiredb.server.dao.VariantDao;
import ubc.pavlab.aspiredb.server.model.CNV;
import ubc.pavlab.aspiredb.server.model.Label;
import ubc.pavlab.aspiredb.server.model.Subject;
import ubc.pavlab.aspiredb.server.project.ProjectManager;
import ubc.pavlab.aspiredb.server.security.authorization.acl.AclTestUtils;
import ubc.pavlab.aspiredb.server.util.PersistentTestObjectHelper;
import ubc.pavlab.aspiredb.server.util.PhenotypeUtil;
import ubc.pavlab.aspiredb.shared.LabelValueObject;
public class LabelServiceTest extends BaseSpringContextTest {
@Autowired
private LabelService labelService;
@Autowired
private ProjectManager projectManager;
@Autowired
private LabelDao labelDao;
@Autowired
private PersistentTestObjectHelper persistentTestObjectHelper;
@Autowired
private ProjectDao projectDao;
@Autowired
private SubjectDao subjectDao;
@Autowired
private VariantDao variantDao;
@Autowired
private SubjectService subjectService;
@Autowired
private VariantService variantService;
@Autowired
private PhenotypeDao phenotypeDao;
@Autowired
private PhenotypeUtil phenotypeUtil;
@Autowired
private SecurityService securityService;
@Autowired
UserManager userManager;
@Autowired
AclTestUtils aclUtil;
private Long subjectId;
String username = RandomStringUtils.randomAlphabetic( 6 );
String testname = RandomStringUtils.randomAlphabetic( 6 );
@Before
public void setUp() {
Subject subject = persistentTestObjectHelper.createPersistentTestIndividualObject( username );
subjectId = subject.getId();
try {
userManager.loadUserByUsername( username );
} catch ( UsernameNotFoundException e ) {
userManager.createUser( new UserDetailsImpl( "jimmy", username, true, null, RandomStringUtils
.randomAlphabetic( 10 ) + "@gmail.com", "key", new Date() ) );
}
String groupName = randomName();
this.securityService.createGroup( groupName );
this.securityService.makeWriteableByGroup( subject, groupName );
this.securityService.addUserToGroup( username, groupName );
}
@Test
public void testMultipleUsersCreateSameLabelName() {
Collection<LabelValueObject> lvos = null;
super.runAsAdmin();
LabelValueObject lvo = new LabelValueObject();
lvo.setColour( "red" );
lvo.setName( "blah" );
lvo.setIsShown( true );
// Admin created subject, try adding label
Collection<Long> subjectIds = new ArrayList<Long>();
subjectIds.add( subjectId );
lvo = subjectService.addLabel( subjectIds, lvo );
assertNotNull( lvo );
Label l = labelDao.load( lvo.getId() );
aclUtil.checkHasAcl( l );
assertTrue( SecurityUtil.getCurrentUsername() + " owns the label", securityService.isOwnedByCurrentUser( l ) );
assertTrue( SecurityUtil.getCurrentUsername() + " can edit the label", securityService.isEditable( l ) );
// Now let's try a normal user, try adding label
super.runAsUser( this.username );
assertFalse( SecurityUtil.getCurrentUsername() + " does not own the label",
securityService.isOwnedByCurrentUser( l ) );
Subject subject = persistentTestObjectHelper.createDetachedIndividualObject( "userSubject" );
Long userSubjectId = subject.getId();
subjectIds.clear();
subjectIds.add( userSubjectId );
try {
// try adding admin label as a normal user
lvo = subjectService.addLabel( subjectIds, l.toValueObject() );
fail( "User can not use admin label" );
} catch ( AccessDeniedException e ) {
}
try {
securityService.isEditable( l );
fail( SecurityUtil.getCurrentUsername() + " can not edit the label" );
} catch ( AccessDeniedException e ) {
}
// try admin delete
super.runAsAdmin();
lvos = persistentTestObjectHelper.getLabelsForSubject( subjectId );
assertEquals( 1, lvos.size() );
try {
labelService.deleteSubjectLabel( lvos.iterator().next() );
} catch ( AccessDeniedException e ) {
fail( "Admin created label" );
}
}
@Test
public void testDeleteVariant() {
super.runAsAdmin();
LabelValueObject lvo = new LabelValueObject();
lvo.setColour( "red" );
lvo.setName( "blah" );
lvo.setIsShown( true );
CNV v1 = persistentTestObjectHelper.createPersistentTestCNVObject();
CNV v2 = persistentTestObjectHelper.createPersistentTestCNVObject();
Collection<Long> variantIdsToLabel = new ArrayList<>();
variantIdsToLabel.add( v1.getId() );
variantIdsToLabel.add( v2.getId() );
variantService.addLabel( variantIdsToLabel, lvo );
Collection<LabelValueObject> lvos1 = persistentTestObjectHelper.getLabelsForVariant( v1.getId() );
Collection<LabelValueObject> lvos2 = persistentTestObjectHelper.getLabelsForVariant( v2.getId() );
assertEquals( 1, lvos1.size() );
assertEquals( 1, lvos2.size() );
// only delete label for v1 while keeping label for v2
Collection<Long> variantIdsToRemove = new ArrayList<>();
variantIdsToRemove.add( v1.getId() );
labelService.removeLabelsFromVariants( lvos1, variantIdsToRemove );
lvos1 = persistentTestObjectHelper.getLabelsForVariant( v1.getId() );
lvos2 = persistentTestObjectHelper.getLabelsForVariant( v2.getId() );
assertEquals( 0, lvos1.size() );
assertEquals( 1, lvos2.size() );
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// --------------------------------------------------------------
// THIS IS A GENERATED SOURCE FILE. DO NOT EDIT!
// GENERATED FROM org.apache.flink.api.java.tuple.TupleGenerator.
// --------------------------------------------------------------
package org.apache.flink.api.java.tuple;
import org.apache.flink.annotation.Public;
import org.apache.flink.util.StringUtils;
/**
* A tuple with 25 fields. Tuples are strongly typed; each field may be of a separate type.
* The fields of the tuple can be accessed directly as public fields (f0, f1, ...) or via their position
* through the {@link #getField(int)} method. The tuple field positions start at zero.
*
* <p>Tuples are mutable types, meaning that their fields can be re-assigned. This allows functions that work
* with Tuples to reuse objects in order to reduce pressure on the garbage collector.</p>
*
* <p>Warning: If you subclass Tuple25, then be sure to either <ul>
* <li> not add any new fields, or </li>
* <li> make it a POJO, and always declare the element type of your DataStreams/DataSets to your descendant
* type. (That is, if you have a "class Foo extends Tuple25", then don't use instances of
* Foo in a DataStream<Tuple25> / DataSet<Tuple25>, but declare it as
* DataStream<Foo> / DataSet<Foo>.) </li>
* </ul></p>
* @see Tuple
*
* @param <T0> The type of field 0
* @param <T1> The type of field 1
* @param <T2> The type of field 2
* @param <T3> The type of field 3
* @param <T4> The type of field 4
* @param <T5> The type of field 5
* @param <T6> The type of field 6
* @param <T7> The type of field 7
* @param <T8> The type of field 8
* @param <T9> The type of field 9
* @param <T10> The type of field 10
* @param <T11> The type of field 11
* @param <T12> The type of field 12
* @param <T13> The type of field 13
* @param <T14> The type of field 14
* @param <T15> The type of field 15
* @param <T16> The type of field 16
* @param <T17> The type of field 17
* @param <T18> The type of field 18
* @param <T19> The type of field 19
* @param <T20> The type of field 20
* @param <T21> The type of field 21
* @param <T22> The type of field 22
* @param <T23> The type of field 23
* @param <T24> The type of field 24
*/
@Public
public class Tuple25<T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> extends Tuple {
private static final long serialVersionUID = 1L;
/** Field 0 of the tuple. */
public T0 f0;
/** Field 1 of the tuple. */
public T1 f1;
/** Field 2 of the tuple. */
public T2 f2;
/** Field 3 of the tuple. */
public T3 f3;
/** Field 4 of the tuple. */
public T4 f4;
/** Field 5 of the tuple. */
public T5 f5;
/** Field 6 of the tuple. */
public T6 f6;
/** Field 7 of the tuple. */
public T7 f7;
/** Field 8 of the tuple. */
public T8 f8;
/** Field 9 of the tuple. */
public T9 f9;
/** Field 10 of the tuple. */
public T10 f10;
/** Field 11 of the tuple. */
public T11 f11;
/** Field 12 of the tuple. */
public T12 f12;
/** Field 13 of the tuple. */
public T13 f13;
/** Field 14 of the tuple. */
public T14 f14;
/** Field 15 of the tuple. */
public T15 f15;
/** Field 16 of the tuple. */
public T16 f16;
/** Field 17 of the tuple. */
public T17 f17;
/** Field 18 of the tuple. */
public T18 f18;
/** Field 19 of the tuple. */
public T19 f19;
/** Field 20 of the tuple. */
public T20 f20;
/** Field 21 of the tuple. */
public T21 f21;
/** Field 22 of the tuple. */
public T22 f22;
/** Field 23 of the tuple. */
public T23 f23;
/** Field 24 of the tuple. */
public T24 f24;
/**
* Creates a new tuple where all fields are null.
*/
public Tuple25() {}
/**
* Creates a new tuple and assigns the given values to the tuple's fields.
*
* @param value0 The value for field 0
* @param value1 The value for field 1
* @param value2 The value for field 2
* @param value3 The value for field 3
* @param value4 The value for field 4
* @param value5 The value for field 5
* @param value6 The value for field 6
* @param value7 The value for field 7
* @param value8 The value for field 8
* @param value9 The value for field 9
* @param value10 The value for field 10
* @param value11 The value for field 11
* @param value12 The value for field 12
* @param value13 The value for field 13
* @param value14 The value for field 14
* @param value15 The value for field 15
* @param value16 The value for field 16
* @param value17 The value for field 17
* @param value18 The value for field 18
* @param value19 The value for field 19
* @param value20 The value for field 20
* @param value21 The value for field 21
* @param value22 The value for field 22
* @param value23 The value for field 23
* @param value24 The value for field 24
*/
public Tuple25(T0 value0, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7, T8 value8, T9 value9, T10 value10, T11 value11, T12 value12, T13 value13, T14 value14, T15 value15, T16 value16, T17 value17, T18 value18, T19 value19, T20 value20, T21 value21, T22 value22, T23 value23, T24 value24) {
this.f0 = value0;
this.f1 = value1;
this.f2 = value2;
this.f3 = value3;
this.f4 = value4;
this.f5 = value5;
this.f6 = value6;
this.f7 = value7;
this.f8 = value8;
this.f9 = value9;
this.f10 = value10;
this.f11 = value11;
this.f12 = value12;
this.f13 = value13;
this.f14 = value14;
this.f15 = value15;
this.f16 = value16;
this.f17 = value17;
this.f18 = value18;
this.f19 = value19;
this.f20 = value20;
this.f21 = value21;
this.f22 = value22;
this.f23 = value23;
this.f24 = value24;
}
@Override
public int getArity() { return 25; }
@Override
@SuppressWarnings("unchecked")
public <T> T getField(int pos) {
switch(pos) {
case 0: return (T) this.f0;
case 1: return (T) this.f1;
case 2: return (T) this.f2;
case 3: return (T) this.f3;
case 4: return (T) this.f4;
case 5: return (T) this.f5;
case 6: return (T) this.f6;
case 7: return (T) this.f7;
case 8: return (T) this.f8;
case 9: return (T) this.f9;
case 10: return (T) this.f10;
case 11: return (T) this.f11;
case 12: return (T) this.f12;
case 13: return (T) this.f13;
case 14: return (T) this.f14;
case 15: return (T) this.f15;
case 16: return (T) this.f16;
case 17: return (T) this.f17;
case 18: return (T) this.f18;
case 19: return (T) this.f19;
case 20: return (T) this.f20;
case 21: return (T) this.f21;
case 22: return (T) this.f22;
case 23: return (T) this.f23;
case 24: return (T) this.f24;
default: throw new IndexOutOfBoundsException(String.valueOf(pos));
}
}
@Override
@SuppressWarnings("unchecked")
public <T> void setField(T value, int pos) {
switch(pos) {
case 0:
this.f0 = (T0) value;
break;
case 1:
this.f1 = (T1) value;
break;
case 2:
this.f2 = (T2) value;
break;
case 3:
this.f3 = (T3) value;
break;
case 4:
this.f4 = (T4) value;
break;
case 5:
this.f5 = (T5) value;
break;
case 6:
this.f6 = (T6) value;
break;
case 7:
this.f7 = (T7) value;
break;
case 8:
this.f8 = (T8) value;
break;
case 9:
this.f9 = (T9) value;
break;
case 10:
this.f10 = (T10) value;
break;
case 11:
this.f11 = (T11) value;
break;
case 12:
this.f12 = (T12) value;
break;
case 13:
this.f13 = (T13) value;
break;
case 14:
this.f14 = (T14) value;
break;
case 15:
this.f15 = (T15) value;
break;
case 16:
this.f16 = (T16) value;
break;
case 17:
this.f17 = (T17) value;
break;
case 18:
this.f18 = (T18) value;
break;
case 19:
this.f19 = (T19) value;
break;
case 20:
this.f20 = (T20) value;
break;
case 21:
this.f21 = (T21) value;
break;
case 22:
this.f22 = (T22) value;
break;
case 23:
this.f23 = (T23) value;
break;
case 24:
this.f24 = (T24) value;
break;
default: throw new IndexOutOfBoundsException(String.valueOf(pos));
}
}
/**
* Sets new values to all fields of the tuple.
*
* @param value0 The value for field 0
* @param value1 The value for field 1
* @param value2 The value for field 2
* @param value3 The value for field 3
* @param value4 The value for field 4
* @param value5 The value for field 5
* @param value6 The value for field 6
* @param value7 The value for field 7
* @param value8 The value for field 8
* @param value9 The value for field 9
* @param value10 The value for field 10
* @param value11 The value for field 11
* @param value12 The value for field 12
* @param value13 The value for field 13
* @param value14 The value for field 14
* @param value15 The value for field 15
* @param value16 The value for field 16
* @param value17 The value for field 17
* @param value18 The value for field 18
* @param value19 The value for field 19
* @param value20 The value for field 20
* @param value21 The value for field 21
* @param value22 The value for field 22
* @param value23 The value for field 23
* @param value24 The value for field 24
*/
public void setFields(T0 value0, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7, T8 value8, T9 value9, T10 value10, T11 value11, T12 value12, T13 value13, T14 value14, T15 value15, T16 value16, T17 value17, T18 value18, T19 value19, T20 value20, T21 value21, T22 value22, T23 value23, T24 value24) {
this.f0 = value0;
this.f1 = value1;
this.f2 = value2;
this.f3 = value3;
this.f4 = value4;
this.f5 = value5;
this.f6 = value6;
this.f7 = value7;
this.f8 = value8;
this.f9 = value9;
this.f10 = value10;
this.f11 = value11;
this.f12 = value12;
this.f13 = value13;
this.f14 = value14;
this.f15 = value15;
this.f16 = value16;
this.f17 = value17;
this.f18 = value18;
this.f19 = value19;
this.f20 = value20;
this.f21 = value21;
this.f22 = value22;
this.f23 = value23;
this.f24 = value24;
}
// -------------------------------------------------------------------------------------------------
// standard utilities
// -------------------------------------------------------------------------------------------------
/**
* Creates a string representation of the tuple in the form
* (f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11, f12, f13, f14, f15, f16, f17, f18, f19, f20, f21, f22, f23, f24),
* where the individual fields are the value returned by calling {@link Object#toString} on that field.
* @return The string representation of the tuple.
*/
@Override
public String toString() {
return "(" + StringUtils.arrayAwareToString(this.f0)
+ "," + StringUtils.arrayAwareToString(this.f1)
+ "," + StringUtils.arrayAwareToString(this.f2)
+ "," + StringUtils.arrayAwareToString(this.f3)
+ "," + StringUtils.arrayAwareToString(this.f4)
+ "," + StringUtils.arrayAwareToString(this.f5)
+ "," + StringUtils.arrayAwareToString(this.f6)
+ "," + StringUtils.arrayAwareToString(this.f7)
+ "," + StringUtils.arrayAwareToString(this.f8)
+ "," + StringUtils.arrayAwareToString(this.f9)
+ "," + StringUtils.arrayAwareToString(this.f10)
+ "," + StringUtils.arrayAwareToString(this.f11)
+ "," + StringUtils.arrayAwareToString(this.f12)
+ "," + StringUtils.arrayAwareToString(this.f13)
+ "," + StringUtils.arrayAwareToString(this.f14)
+ "," + StringUtils.arrayAwareToString(this.f15)
+ "," + StringUtils.arrayAwareToString(this.f16)
+ "," + StringUtils.arrayAwareToString(this.f17)
+ "," + StringUtils.arrayAwareToString(this.f18)
+ "," + StringUtils.arrayAwareToString(this.f19)
+ "," + StringUtils.arrayAwareToString(this.f20)
+ "," + StringUtils.arrayAwareToString(this.f21)
+ "," + StringUtils.arrayAwareToString(this.f22)
+ "," + StringUtils.arrayAwareToString(this.f23)
+ "," + StringUtils.arrayAwareToString(this.f24)
+ ")";
}
/**
* Deep equality for tuples by calling equals() on the tuple members
* @param o the object checked for equality
* @return true if this is equal to o.
*/
@Override
public boolean equals(Object o) {
if(this == o) { return true; }
if (!(o instanceof Tuple25)) { return false; }
@SuppressWarnings("rawtypes")
Tuple25 tuple = (Tuple25) o;
if (f0 != null ? !f0.equals(tuple.f0) : tuple.f0 != null) { return false; }
if (f1 != null ? !f1.equals(tuple.f1) : tuple.f1 != null) { return false; }
if (f2 != null ? !f2.equals(tuple.f2) : tuple.f2 != null) { return false; }
if (f3 != null ? !f3.equals(tuple.f3) : tuple.f3 != null) { return false; }
if (f4 != null ? !f4.equals(tuple.f4) : tuple.f4 != null) { return false; }
if (f5 != null ? !f5.equals(tuple.f5) : tuple.f5 != null) { return false; }
if (f6 != null ? !f6.equals(tuple.f6) : tuple.f6 != null) { return false; }
if (f7 != null ? !f7.equals(tuple.f7) : tuple.f7 != null) { return false; }
if (f8 != null ? !f8.equals(tuple.f8) : tuple.f8 != null) { return false; }
if (f9 != null ? !f9.equals(tuple.f9) : tuple.f9 != null) { return false; }
if (f10 != null ? !f10.equals(tuple.f10) : tuple.f10 != null) { return false; }
if (f11 != null ? !f11.equals(tuple.f11) : tuple.f11 != null) { return false; }
if (f12 != null ? !f12.equals(tuple.f12) : tuple.f12 != null) { return false; }
if (f13 != null ? !f13.equals(tuple.f13) : tuple.f13 != null) { return false; }
if (f14 != null ? !f14.equals(tuple.f14) : tuple.f14 != null) { return false; }
if (f15 != null ? !f15.equals(tuple.f15) : tuple.f15 != null) { return false; }
if (f16 != null ? !f16.equals(tuple.f16) : tuple.f16 != null) { return false; }
if (f17 != null ? !f17.equals(tuple.f17) : tuple.f17 != null) { return false; }
if (f18 != null ? !f18.equals(tuple.f18) : tuple.f18 != null) { return false; }
if (f19 != null ? !f19.equals(tuple.f19) : tuple.f19 != null) { return false; }
if (f20 != null ? !f20.equals(tuple.f20) : tuple.f20 != null) { return false; }
if (f21 != null ? !f21.equals(tuple.f21) : tuple.f21 != null) { return false; }
if (f22 != null ? !f22.equals(tuple.f22) : tuple.f22 != null) { return false; }
if (f23 != null ? !f23.equals(tuple.f23) : tuple.f23 != null) { return false; }
if (f24 != null ? !f24.equals(tuple.f24) : tuple.f24 != null) { return false; }
return true;
}
@Override
public int hashCode() {
int result = f0 != null ? f0.hashCode() : 0;
result = 31 * result + (f1 != null ? f1.hashCode() : 0);
result = 31 * result + (f2 != null ? f2.hashCode() : 0);
result = 31 * result + (f3 != null ? f3.hashCode() : 0);
result = 31 * result + (f4 != null ? f4.hashCode() : 0);
result = 31 * result + (f5 != null ? f5.hashCode() : 0);
result = 31 * result + (f6 != null ? f6.hashCode() : 0);
result = 31 * result + (f7 != null ? f7.hashCode() : 0);
result = 31 * result + (f8 != null ? f8.hashCode() : 0);
result = 31 * result + (f9 != null ? f9.hashCode() : 0);
result = 31 * result + (f10 != null ? f10.hashCode() : 0);
result = 31 * result + (f11 != null ? f11.hashCode() : 0);
result = 31 * result + (f12 != null ? f12.hashCode() : 0);
result = 31 * result + (f13 != null ? f13.hashCode() : 0);
result = 31 * result + (f14 != null ? f14.hashCode() : 0);
result = 31 * result + (f15 != null ? f15.hashCode() : 0);
result = 31 * result + (f16 != null ? f16.hashCode() : 0);
result = 31 * result + (f17 != null ? f17.hashCode() : 0);
result = 31 * result + (f18 != null ? f18.hashCode() : 0);
result = 31 * result + (f19 != null ? f19.hashCode() : 0);
result = 31 * result + (f20 != null ? f20.hashCode() : 0);
result = 31 * result + (f21 != null ? f21.hashCode() : 0);
result = 31 * result + (f22 != null ? f22.hashCode() : 0);
result = 31 * result + (f23 != null ? f23.hashCode() : 0);
result = 31 * result + (f24 != null ? f24.hashCode() : 0);
return result;
}
/**
* Shallow tuple copy.
* @return A new Tuple with the same fields as this.
*/
@Override
@SuppressWarnings("unchecked")
public Tuple25<T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19,T20,T21,T22,T23,T24> copy(){
return new Tuple25<T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19,T20,T21,T22,T23,T24>(this.f0,
this.f1,
this.f2,
this.f3,
this.f4,
this.f5,
this.f6,
this.f7,
this.f8,
this.f9,
this.f10,
this.f11,
this.f12,
this.f13,
this.f14,
this.f15,
this.f16,
this.f17,
this.f18,
this.f19,
this.f20,
this.f21,
this.f22,
this.f23,
this.f24);
}
/**
* Creates a new tuple and assigns the given values to the tuple's fields.
* This is more convenient than using the constructor, because the compiler can
* infer the generic type arguments implicitly. For example:
* {@code Tuple3.of(n, x, s)}
* instead of
* {@code new Tuple3<Integer, Double, String>(n, x, s)}
*/
public static <T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19,T20,T21,T22,T23,T24> Tuple25<T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19,T20,T21,T22,T23,T24> of(T0 value0, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7, T8 value8, T9 value9, T10 value10, T11 value11, T12 value12, T13 value13, T14 value14, T15 value15, T16 value16, T17 value17, T18 value18, T19 value19, T20 value20, T21 value21, T22 value22, T23 value23, T24 value24) {
return new Tuple25<T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19,T20,T21,T22,T23,T24>(value0, value1, value2, value3, value4, value5, value6, value7, value8, value9, value10, value11, value12, value13, value14, value15, value16, value17, value18, value19, value20, value21, value22, value23, value24);
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.util;
import static java.nio.charset.StandardCharsets.US_ASCII;
import com.google.common.base.CharMatcher;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import com.google.common.io.Files;
import com.sun.management.OperatingSystemMXBean;
import java.io.File;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.util.Iterator;
/**
* Provides methods to measure the current resource usage of the current
* process. Also provides some convenience methods to obtain several system
* characteristics, like number of processors , total memory, etc.
*/
public final class ResourceUsage {
/*
* Use com.sun.management.OperatingSystemMXBean instead of
* java.lang.management.OperatingSystemMXBean because the latter does not
* support getTotalPhysicalMemorySize() and getFreePhysicalMemorySize().
*/
private static final OperatingSystemMXBean OS_BEAN =
(OperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean();
private static final MemoryMXBean MEM_BEAN = ManagementFactory.getMemoryMXBean();
private static final Splitter WHITESPACE_SPLITTER = Splitter.on(CharMatcher.WHITESPACE);
/**
* Calculates an estimate of the current total CPU usage and the CPU usage of
* the process in percent measured from the two given measurements. The
* returned CPU usages rea average values for the time between the two
* measurements. The returned array contains the total CPU usage at index 0
* and the CPU usage of the measured process at index 1.
*/
public static float[] calculateCurrentCpuUsage(Measurement oldMeasurement,
Measurement newMeasurement) {
if (oldMeasurement == null) {
return new float[2];
}
long idleJiffies =
newMeasurement.getTotalCpuIdleTimeInJiffies()
- oldMeasurement.getTotalCpuIdleTimeInJiffies();
long oldProcessJiffies =
oldMeasurement.getCpuUtilizationInJiffies()[0]
+ oldMeasurement.getCpuUtilizationInJiffies()[1];
long newProcessJiffies =
newMeasurement.getCpuUtilizationInJiffies()[0]
+ newMeasurement.getCpuUtilizationInJiffies()[1];
long processJiffies = newProcessJiffies - oldProcessJiffies;
long elapsedTimeJiffies =
newMeasurement.getTimeInJiffies() - oldMeasurement.getTimeInJiffies();
int processors = getAvailableProcessors();
// TODO(bazel-team): Sometimes smaller then zero. Not sure why.
double totalUsage = Math.max(0, 1.0D - (double) idleJiffies / elapsedTimeJiffies / processors);
double usage = Math.max(0, (double) processJiffies / elapsedTimeJiffies / processors);
return new float[] {(float) totalUsage * 100, (float) usage * 100};
}
private ResourceUsage() {
}
/**
* Returns the number of processors available to the Java virtual machine.
*/
public static int getAvailableProcessors() {
return OS_BEAN.getAvailableProcessors();
}
/**
* Returns the total physical memory in bytes.
*/
public static long getTotalPhysicalMemorySize() {
return OS_BEAN.getTotalPhysicalMemorySize();
}
/**
* Returns the operating system architecture.
*/
public static String getOsArchitecture() {
return OS_BEAN.getArch();
}
/**
* Returns the operating system name.
*/
public static String getOsName() {
return OS_BEAN.getName();
}
/**
* Returns the operating system version.
*/
public static String getOsVersion() {
return OS_BEAN.getVersion();
}
/**
* Returns the initial size of heap memory in bytes.
*
* @see MemoryMXBean#getHeapMemoryUsage()
*/
public static long getHeapMemoryInit() {
return MEM_BEAN.getHeapMemoryUsage().getInit();
}
/**
* Returns the initial size of non heap memory in bytes.
*
* @see MemoryMXBean#getNonHeapMemoryUsage()
*/
public static long getNonHeapMemoryInit() {
return MEM_BEAN.getNonHeapMemoryUsage().getInit();
}
/**
* Returns the maximum size of heap memory in bytes.
*
* @see MemoryMXBean#getHeapMemoryUsage()
*/
public static long getHeapMemoryMax() {
return MEM_BEAN.getHeapMemoryUsage().getMax();
}
/**
* Returns the maximum size of non heap memory in bytes.
*
* @see MemoryMXBean#getNonHeapMemoryUsage()
*/
public static long getNonHeapMemoryMax() {
return MEM_BEAN.getNonHeapMemoryUsage().getMax();
}
/**
* Returns a measurement of the current resource usage of the current process.
*/
public static Measurement measureCurrentResourceUsage() {
return measureCurrentResourceUsage("self");
}
/**
* Returns a measurement of the current resource usage of the process with the
* given process id.
*
* @param processId the process id or <code>self</code> for the current
* process.
*/
public static Measurement measureCurrentResourceUsage(String processId) {
return new Measurement(MEM_BEAN.getHeapMemoryUsage().getUsed(), MEM_BEAN.getHeapMemoryUsage()
.getCommitted(), MEM_BEAN.getNonHeapMemoryUsage().getUsed(), MEM_BEAN
.getNonHeapMemoryUsage().getCommitted(), (float) OS_BEAN.getSystemLoadAverage(), OS_BEAN
.getFreePhysicalMemorySize(), getCurrentTotalIdleTimeInJiffies(),
getCurrentCpuUtilizationInJiffies(processId));
}
/**
* Returns the current total idle time of the processors since system boot.
* Reads /proc/stat to obtain this information.
*/
private static long getCurrentTotalIdleTimeInJiffies() {
try {
File file = new File("/proc/stat");
String content = Files.toString(file, US_ASCII);
String value = Iterables.get(WHITESPACE_SPLITTER.split(content), 5);
return Long.parseLong(value);
} catch (NumberFormatException | IOException e) {
return 0L;
}
}
/**
* Returns the current cpu utilization of the current process with the given
* id in jiffies. The returned array contains the following information: The
* 1st entry is the number of jiffies that the process has executed in user
* mode, and the 2nd entry is the number of jiffies that the process has
* executed in kernel mode. Reads /proc/self/stat to obtain this information.
*
* @param processId the process id or <code>self</code> for the current
* process.
*/
private static long[] getCurrentCpuUtilizationInJiffies(String processId) {
try {
File file = new File("/proc/" + processId + "/stat");
if (file.isDirectory()) {
return new long[2];
}
Iterator<String> stat = WHITESPACE_SPLITTER.split(
Files.toString(file, US_ASCII)).iterator();
for (int i = 0; i < 13; ++i) {
stat.next();
}
long token13 = Long.parseLong(stat.next());
long token14 = Long.parseLong(stat.next());
return new long[] { token13, token14 };
} catch (NumberFormatException | IOException e) {
return new long[2];
}
}
/**
* A snapshot of the resource usage of the current process at a point in time.
*/
public static class Measurement {
private final long timeInNanos;
private final long heapMemoryUsed;
private final long heapMemoryCommitted;
private final long nonHeapMemoryUsed;
private final long nonHeapMemoryCommitted;
private final float loadAverageLastMinute;
private final long freePhysicalMemory;
private final long totalCpuIdleTimeInJiffies;
private final long[] cpuUtilizationInJiffies;
public Measurement(long heapMemoryUsed, long heapMemoryCommitted, long nonHeapMemoryUsed,
long nonHeapMemoryCommitted, float loadAverageLastMinute, long freePhysicalMemory,
long totalCpuIdleTimeInJiffies, long[] cpuUtilizationInJiffies) {
super();
timeInNanos = System.nanoTime();
this.heapMemoryUsed = heapMemoryUsed;
this.heapMemoryCommitted = heapMemoryCommitted;
this.nonHeapMemoryUsed = nonHeapMemoryUsed;
this.nonHeapMemoryCommitted = nonHeapMemoryCommitted;
this.loadAverageLastMinute = loadAverageLastMinute;
this.freePhysicalMemory = freePhysicalMemory;
this.totalCpuIdleTimeInJiffies = totalCpuIdleTimeInJiffies;
this.cpuUtilizationInJiffies = cpuUtilizationInJiffies;
}
/**
* Returns the time of the measurement in jiffies.
*/
public long getTimeInJiffies() {
return timeInNanos / 10000000;
}
/**
* Returns the time of the measurement in ms.
*/
public long getTimeInMs() {
return timeInNanos / 1000000;
}
/**
* Returns the amount of used heap memory in bytes at the time of
* measurement.
*
* @see MemoryMXBean#getHeapMemoryUsage()
*/
public long getHeapMemoryUsed() {
return heapMemoryUsed;
}
/**
* Returns the amount of used non heap memory in bytes at the time of
* measurement.
*
* @see MemoryMXBean#getNonHeapMemoryUsage()
*/
public long getHeapMemoryCommitted() {
return heapMemoryCommitted;
}
/**
* Returns the amount of memory in bytes that is committed for the Java
* virtual machine to use for the heap at the time of measurement.
*
* @see MemoryMXBean#getHeapMemoryUsage()
*/
public long getNonHeapMemoryUsed() {
return nonHeapMemoryUsed;
}
/**
* Returns the amount of memory in bytes that is committed for the Java
* virtual machine to use for non heap memory at the time of measurement.
*
* @see MemoryMXBean#getNonHeapMemoryUsage()
*/
public long getNonHeapMemoryCommitted() {
return nonHeapMemoryCommitted;
}
/**
* Returns the system load average for the last minute at the time of
* measurement.
*
* @see OperatingSystemMXBean#getSystemLoadAverage()
*/
public float getLoadAverageLastMinute() {
return loadAverageLastMinute;
}
/**
* Returns the free physical memmory in bytes at the time of measurement.
*/
public long getFreePhysicalMemory() {
return freePhysicalMemory;
}
/**
* Returns the current total cpu idle since system boot in jiffies.
*/
public long getTotalCpuIdleTimeInJiffies() {
return totalCpuIdleTimeInJiffies;
}
/**
* Returns the current cpu utilization of the current process in jiffies.
* The returned array contains the following information: The 1st entry is
* the number of jiffies that the process has executed in user mode, and the
* 2nd entry is the number of jiffies that the process has executed in
* kernel mode. Reads /proc/self/stat to obtain this information.
*/
public long[] getCpuUtilizationInJiffies() {
return cpuUtilizationInJiffies;
}
/**
* Returns the current cpu utilization of the current process in ms. The
* returned array contains the following information: The 1st entry is the
* number of ms that the process has executed in user mode, and the 2nd
* entry is the number of ms that the process has executed in kernel mode.
* Reads /proc/self/stat to obtain this information.
*/
public long[] getCpuUtilizationInMs() {
return new long[] {cpuUtilizationInJiffies[0] * 10, cpuUtilizationInJiffies[1] * 10};
}
}
}
| |
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 2000-2016. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*/
package com.ericsson.otp.erlang;
import java.io.IOException;
/**
* Maintains a connection between a Java process and a remote Erlang, Java or C
* node. The object maintains connection state and allows data to be sent to and
* received from the peer.
*
* <p>
* Once a connection is established between the local node and a remote node,
* the connection object can be used to send and receive messages between the
* nodes and make rpc calls (assuming that the remote node is a real Erlang
* node).
*
* <p>
* The various receive methods are all blocking and will return only when a
* valid message has been received or an exception is raised.
*
* <p>
* If an exception occurs in any of the methods in this class, the connection
* will be closed and must be explicitely reopened in order to resume
* communication with the peer.
*
* <p>
* It is not possible to create an instance of this class directly.
* OtpConnection objects are returned by {@link OtpSelf#connect(OtpPeer)
* OtpSelf.connect()} and {@link OtpSelf#accept() OtpSelf.accept()}.
*/
public class OtpConnection extends AbstractConnection {
protected OtpSelf self;
protected GenericQueue queue; // messages get delivered here
protected Links links;
private long unlink_id;
/*
* Accept an incoming connection from a remote node. Used by {@link
* OtpSelf#accept() OtpSelf.accept()} to create a connection based on data
* received when handshaking with the peer node, when the remote node is the
* connection intitiator.
*
* @exception java.io.IOException if it was not possible to connect to the
* peer.
*
* @exception OtpAuthException if handshake resulted in an authentication
* error
*/
// package scope
OtpConnection(final OtpSelf self, final OtpTransport s)
throws IOException, OtpAuthException {
super(self, s);
this.self = self;
queue = new GenericQueue();
start();
}
/*
* Intiate and open a connection to a remote node.
*
* @exception java.io.IOException if it was not possible to connect to the
* peer.
*
* @exception OtpAuthException if handshake resulted in an authentication
* error.
*/
// package scope
OtpConnection(final OtpSelf self, final OtpPeer other) throws IOException,
OtpAuthException {
super(self, other);
this.self = self;
queue = new GenericQueue();
start();
}
@Override
public void deliver(final Exception e) {
queue.put(e);
}
@Override
public void deliver(final OtpMsg msg) {
switch (msg.type()) {
case OtpMsg.exitTag:
case OtpMsg.linkTag:
case OtpMsg.unlinkTag:
case AbstractConnection.unlinkIdTag:
case AbstractConnection.unlinkIdAckTag:
handle_link_operation(msg);
break;
default:
queue.put(msg);
break;
}
}
private synchronized void handle_link_operation(final OtpMsg m) {
final OtpErlangPid remote = m.getSenderPid();
switch (m.type()) {
case OtpMsg.linkTag:
// only queue up link-message if link was added...
if (links.addLink(self.pid(), remote, false)) {
queue.put(m);
}
break;
case OtpMsg.unlinkTag:
case AbstractConnection.unlinkIdTag: {
final long unlink_id = m.getUnlinkId();
// only queue up unlink-message if link was removed...
if (links.removeActiveLink(self.pid(), remote)) {
// Use old unlinkTag without unlink id for
// backwards compatibility...
queue.put(new OtpMsg(OtpMsg.unlinkTag, self.pid(),
remote));
}
try {
super.sendUnlinkAck(self.pid(), remote, unlink_id);
} catch (final Exception e) {
}
break;
}
case AbstractConnection.unlinkIdAckTag:
links.removeUnlinkingLink(self.pid(), remote, m.getUnlinkId());
break;
case OtpMsg.exitTag:
// only queue up exit-message if link was removed...
if (links.removeActiveLink(self.pid(), remote)) {
queue.put(m);
}
break;
}
}
/**
* Get information about the node at the peer end of this connection.
*
* @return the {@link OtpPeer Node} representing the peer node.
*/
public OtpPeer peer() {
return peer;
}
/**
* Get information about the node at the local end of this connection.
*
* @return the {@link OtpSelf Node} representing the local node.
*/
public OtpSelf self() {
return self;
}
/**
* Return the number of messages currently waiting in the receive queue for
* this connection.
*/
public int msgCount() {
return queue.getCount();
}
/**
* Receive a message from a remote process. This method blocks until a valid
* message is received or an exception is raised.
*
* <p>
* If the remote node sends a message that cannot be decoded properly, the
* connection is closed and the method throws an exception.
*
* @return an object containing a single Erlang term.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*
* @exception OtpErlangExit
* if an exit signal is received from a process on the peer
* node.
*
* @exception OtpAuthException
* if the remote node sends a message containing an invalid
* cookie.
*/
public OtpErlangObject receive() throws IOException, OtpErlangExit,
OtpAuthException {
try {
return receiveMsg().getMsg();
} catch (final OtpErlangDecodeException e) {
close();
throw new IOException(e.getMessage());
}
}
/**
* Receive a message from a remote process. This method blocks at most for
* the specified time, until a valid message is received or an exception is
* raised.
*
* <p>
* If the remote node sends a message that cannot be decoded properly, the
* connection is closed and the method throws an exception.
*
* @param timeout
* the time in milliseconds that this operation will block.
* Specify 0 to poll the queue.
*
* @return an object containing a single Erlang term.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*
* @exception OtpErlangExit
* if an exit signal is received from a process on the peer
* node.
*
* @exception OtpAuthException
* if the remote node sends a message containing an invalid
* cookie.
*
* @exception InterruptedException
* if no message if the method times out before a message
* becomes available.
*/
public OtpErlangObject receive(final long timeout)
throws InterruptedException, IOException, OtpErlangExit,
OtpAuthException {
try {
return receiveMsg(timeout).getMsg();
} catch (final OtpErlangDecodeException e) {
close();
throw new IOException(e.getMessage());
}
}
/**
* Receive a raw (still encoded) message from a remote process. This message
* blocks until a valid message is received or an exception is raised.
*
* <p>
* If the remote node sends a message that cannot be decoded properly, the
* connection is closed and the method throws an exception.
*
* @return an object containing a raw (still encoded) Erlang term.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*
* @exception OtpErlangExit
* if an exit signal is received from a process on the peer
* node, or if the connection is lost for any reason.
*
* @exception OtpAuthException
* if the remote node sends a message containing an invalid
* cookie.
*/
public OtpInputStream receiveBuf() throws IOException, OtpErlangExit,
OtpAuthException {
return receiveMsg().getMsgBuf();
}
/**
* Receive a raw (still encoded) message from a remote process. This message
* blocks at most for the specified time until a valid message is received
* or an exception is raised.
*
* <p>
* If the remote node sends a message that cannot be decoded properly, the
* connection is closed and the method throws an exception.
*
* @param timeout
* the time in milliseconds that this operation will block.
* Specify 0 to poll the queue.
*
* @return an object containing a raw (still encoded) Erlang term.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*
* @exception OtpErlangExit
* if an exit signal is received from a process on the peer
* node, or if the connection is lost for any reason.
*
* @exception OtpAuthException
* if the remote node sends a message containing an invalid
* cookie.
*
* @exception InterruptedException
* if no message if the method times out before a message
* becomes available.
*/
public OtpInputStream receiveBuf(final long timeout)
throws InterruptedException, IOException, OtpErlangExit,
OtpAuthException {
return receiveMsg(timeout).getMsgBuf();
}
/**
* Receive a messge complete with sender and recipient information.
*
* @return an {@link OtpMsg OtpMsg} containing the header information about
* the sender and recipient, as well as the actual message contents.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*
* @exception OtpErlangExit
* if an exit signal is received from a process on the peer
* node, or if the connection is lost for any reason.
*
* @exception OtpAuthException
* if the remote node sends a message containing an invalid
* cookie.
*/
public OtpMsg receiveMsg() throws IOException, OtpErlangExit,
OtpAuthException {
final Object o = queue.get();
if (o instanceof OtpMsg) {
return (OtpMsg) o;
} else if (o instanceof IOException) {
throw (IOException) o;
} else if (o instanceof OtpErlangExit) {
throw (OtpErlangExit) o;
} else if (o instanceof OtpAuthException) {
throw (OtpAuthException) o;
}
return null;
}
/**
* Receive a messge complete with sender and recipient information. This
* method blocks at most for the specified time.
*
* @param timeout
* the time in milliseconds that this operation will block.
* Specify 0 to poll the queue.
*
* @return an {@link OtpMsg OtpMsg} containing the header information about
* the sender and recipient, as well as the actual message contents.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*
* @exception OtpErlangExit
* if an exit signal is received from a process on the peer
* node, or if the connection is lost for any reason.
*
* @exception OtpAuthException
* if the remote node sends a message containing an invalid
* cookie.
*
* @exception InterruptedException
* if no message if the method times out before a message
* becomes available.
*/
public OtpMsg receiveMsg(final long timeout) throws InterruptedException,
IOException, OtpErlangExit, OtpAuthException {
final Object o = queue.get(timeout);
if (o instanceof OtpMsg) {
return (OtpMsg) o;
} else if (o instanceof IOException) {
throw (IOException) o;
} else if (o instanceof OtpErlangExit) {
throw (OtpErlangExit) o;
} else if (o instanceof OtpAuthException) {
throw (OtpAuthException) o;
}
return null;
}
/**
* Send a message to a process on a remote node.
*
* @param dest
* the Erlang PID of the remote process.
* @param msg
* the message to send.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*/
@SuppressWarnings("resource")
public void send(final OtpErlangPid dest, final OtpErlangObject msg)
throws IOException {
// encode and send the message
super.sendBuf(self.pid(), dest, new OtpOutputStream(msg));
}
/**
* Send a message to a named process on a remote node.
*
* @param dest
* the name of the remote process.
* @param msg
* the message to send.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*/
@SuppressWarnings("resource")
public void send(final String dest, final OtpErlangObject msg)
throws IOException {
// encode and send the message
super.sendBuf(self.pid(), dest, new OtpOutputStream(msg));
}
/**
* Send a pre-encoded message to a named process on a remote node.
*
* @param dest
* the name of the remote process.
* @param payload
* the encoded message to send.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*/
public void sendBuf(final String dest, final OtpOutputStream payload)
throws IOException {
super.sendBuf(self.pid(), dest, payload);
}
/**
* Send a pre-encoded message to a process on a remote node.
*
* @param dest
* the Erlang PID of the remote process.
* @param payload
* the encoded message to send.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*/
public void sendBuf(final OtpErlangPid dest, final OtpOutputStream payload)
throws IOException {
super.sendBuf(self.pid(), dest, payload);
}
/**
* Send an RPC request to the remote Erlang node. This convenience function
* creates the following message and sends it to 'rex' on the remote node:
*
* <pre>
* { self, { call, Mod, Fun, Args, user } }
* </pre>
*
* <p>
* Note that this method has unpredicatble results if the remote node is not
* an Erlang node.
* </p>
*
* @param mod
* the name of the Erlang module containing the function to be
* called.
* @param fun
* the name of the function to call.
* @param args
* an array of Erlang terms, to be used as arguments to the
* function.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*/
public void sendRPC(final String mod, final String fun,
final OtpErlangObject[] args) throws IOException {
sendRPC(mod, fun, new OtpErlangList(args));
}
/**
* Send an RPC request to the remote Erlang node. This convenience function
* creates the following message and sends it to 'rex' on the remote node:
*
* <pre>
* { self, { call, Mod, Fun, Args, user } }
* </pre>
*
* <p>
* Note that this method has unpredicatble results if the remote node is not
* an Erlang node.
* </p>
*
* @param mod
* the name of the Erlang module containing the function to be
* called.
* @param fun
* the name of the function to call.
* @param args
* a list of Erlang terms, to be used as arguments to the
* function.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*/
public void sendRPC(final String mod, final String fun,
final OtpErlangList args) throws IOException {
final OtpErlangObject[] rpc = new OtpErlangObject[2];
final OtpErlangObject[] call = new OtpErlangObject[5];
/* {self, { call, Mod, Fun, Args, user}} */
call[0] = new OtpErlangAtom("call");
call[1] = new OtpErlangAtom(mod);
call[2] = new OtpErlangAtom(fun);
call[3] = args;
call[4] = new OtpErlangAtom("user");
rpc[0] = self.pid();
rpc[1] = new OtpErlangTuple(call);
send("rex", new OtpErlangTuple(rpc));
}
/**
* Receive an RPC reply from the remote Erlang node. This convenience
* function receives a message from the remote node, and expects it to have
* the following format:
*
* <pre>
* { rex, Term }
* </pre>
*
* @return the second element of the tuple if the received message is a
* two-tuple, otherwise null. No further error checking is
* performed.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*
* @exception OtpErlangExit
* if an exit signal is received from a process on the peer
* node.
*
* @exception OtpAuthException
* if the remote node sends a message containing an invalid
* cookie.
*/
public OtpErlangObject receiveRPC() throws IOException, OtpErlangExit,
OtpAuthException {
final OtpErlangObject msg = receive();
if (msg instanceof OtpErlangTuple) {
final OtpErlangTuple t = (OtpErlangTuple) msg;
if (t.arity() == 2) {
return t.elementAt(1); // obs: second element
}
}
return null;
}
/**
* Create a link between the local node and the specified process on the
* remote node. If the link is still active when the remote process
* terminates, an exit signal will be sent to this connection. Use
* {@link #unlink unlink()} to remove the link.
*
* @param dest
* the Erlang PID of the remote process.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*/
public void link(final OtpErlangPid dest) throws IOException {
if (links.addLink(self.pid(), dest, true)) {
try {
super.sendLink(self.pid(), dest);
} catch (final IOException e) {
links.removeLink(self.pid(), dest); // restore...
throw e;
}
}
}
/**
* Remove a link between the local node and the specified process on the
* remote node. This method deactivates links created with {@link #link
* link()}.
*
* @param dest
* the Erlang PID of the remote process.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*/
public void unlink(final OtpErlangPid dest) throws IOException {
long unlink_id = this.unlink_id++;
if (unlink_id == 0)
unlink_id = this.unlink_id++;
if (links.setUnlinking(self.pid(), dest, unlink_id)) {
try {
super.sendUnlink(self.pid(), dest, unlink_id);
} catch (final IOException e) {
links.addLink(self.pid(), dest, true); // restore...
throw e;
}
}
}
/**
* Send an exit signal to a remote process.
*
* @param dest
* the Erlang PID of the remote process.
* @param reason
* an Erlang term describing the exit reason.
*
* @exception java.io.IOException
* if the connection is not active or a communication error
* occurs.
*/
public void exit(final OtpErlangPid dest, final OtpErlangObject reason)
throws IOException {
super.sendExit2(self.pid(), dest, reason);
}
}
| |
// Copyright 2016 Twitter. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.twitter.heron.scheduler.yarn;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import com.google.common.base.Optional;
import org.apache.reef.driver.evaluator.AllocatedEvaluator;
import org.apache.reef.driver.evaluator.EvaluatorDescriptor;
import org.apache.reef.driver.evaluator.EvaluatorRequest;
import org.apache.reef.driver.evaluator.EvaluatorRequestor;
import org.apache.reef.driver.evaluator.FailedEvaluator;
import org.apache.reef.evaluator.context.parameters.ContextIdentifier;
import org.apache.reef.runtime.common.files.REEFFileNames;
import org.apache.reef.tang.Configuration;
import org.apache.reef.tang.types.NamedParameterNode;
import org.apache.reef.wake.time.event.StartTime;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import com.twitter.heron.common.basics.ByteAmount;
import com.twitter.heron.packing.roundrobin.RoundRobinPacking;
import com.twitter.heron.scheduler.SchedulerMain;
import com.twitter.heron.spi.packing.PackingPlan;
import com.twitter.heron.spi.utils.PackingTestUtils;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyInt;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(PowerMockRunner.class)
public class HeronMasterDriverTest {
private EvaluatorRequestor mockRequestor;
private HeronMasterDriver driver;
private HeronMasterDriver spyDriver;
@Before
public void createMocks() throws IOException {
mockRequestor = mock(EvaluatorRequestor.class);
driver = new HeronMasterDriver(mockRequestor,
new REEFFileNames(),
"yarn",
"heron",
"testTopology",
"env",
"jar",
"package",
"core",
0,
false);
spyDriver = spy(driver);
doReturn("").when(spyDriver).getComponentRamMap();
}
@Test
public void requestContainerForWorkerSubmitsValidRequest() {
ByteAmount memory = ByteAmount.fromMegabytes(786);
EvaluatorRequest request = spyDriver.createEvaluatorRequest(5, memory);
doReturn(request).when(spyDriver).createEvaluatorRequest(5, memory);
HeronMasterDriver.HeronWorker worker = new HeronMasterDriver.HeronWorker(3, 5, memory);
spyDriver.requestContainerForWorker(3, worker);
verify(mockRequestor, times(1)).submit(request);
}
@Test
public void scheduleHeronWorkersRequestsContainersForPacking() throws Exception {
Set<PackingPlan.ContainerPlan> containers = new HashSet<>();
PackingPlan.ContainerPlan container1 = PackingTestUtils.testContainerPlan(1, 1, 2);
containers.add(container1);
PackingPlan.ContainerPlan container2 = PackingTestUtils.testContainerPlan(2, 1, 2, 3);
containers.add(container2);
PackingPlan packing = new PackingPlan("packingId", containers);
spyDriver.scheduleHeronWorkers(packing);
verify(mockRequestor, times(2)).submit(any(EvaluatorRequest.class));
verify(spyDriver, times(1)).requestContainerForWorker(eq(1), anyHeronWorker());
verify(spyDriver, times(1)).requestContainerForWorker(eq(2), anyHeronWorker());
verify(spyDriver, times(1)).createEvaluatorRequest(getCpu(container1), getRam(container1));
verify(spyDriver, times(1)).createEvaluatorRequest(getCpu(container2), getRam(container2));
}
@Test
public void onKillClosesContainersKillsTMaster() throws Exception {
HeronMasterDriver.TMaster mockTMaster = mock(HeronMasterDriver.TMaster.class);
when(spyDriver.buildTMaster(any(ExecutorService.class))).thenReturn(mockTMaster);
int numContainers = 3;
AllocatedEvaluator[] mockEvaluators = createApplicationWithContainers(numContainers);
spyDriver.launchTMaster();
spyDriver.killTopology();
for (int id = 0; id < numContainers; id++) {
Mockito.verify(mockEvaluators[id]).close();
assertFalse(spyDriver.lookupByEvaluatorId("e" + id).isPresent());
}
verify(mockTMaster, times(1)).killTMaster();
}
/**
* Tests if all workers are killed and restarted
*/
@Test
public void restartTopologyClosesAndStartsContainers() throws Exception {
int numContainers = 3;
AllocatedEvaluator[] mockEvaluators = createApplicationWithContainers(numContainers);
verify(spyDriver, never()).requestContainerForWorker(anyInt(), anyHeronWorker());
spyDriver.restartTopology();
for (int id = 0; id < numContainers; id++) {
verify(spyDriver, times(1)).requestContainerForWorker(eq(id), anyHeronWorker());
Mockito.verify(mockEvaluators[id]).close();
}
}
@Test
public void restartWorkerRestartsSpecificWorker() throws Exception {
int numContainers = 3;
AllocatedEvaluator[] mockEvaluators = createApplicationWithContainers(numContainers);
verify(spyDriver, never()).requestContainerForWorker(anyInt(), anyHeronWorker());
spyDriver.restartWorker(1);
for (int id = 0; id < numContainers; id++) {
if (id == 1) {
verify(spyDriver, times(1)).requestContainerForWorker(eq(id), anyHeronWorker());
Mockito.verify(mockEvaluators[1]).close();
assertFalse(spyDriver.lookupByEvaluatorId("e" + id).isPresent());
continue;
}
verify(mockEvaluators[id], never()).close();
assertEquals(Integer.valueOf(id), spyDriver.lookupByEvaluatorId("e" + id).get());
}
}
@Test
public void onNextFailedEvaluatorRestartsContainer() throws Exception {
int numContainers = 3;
AllocatedEvaluator[] mockEvaluators = createApplicationWithContainers(numContainers);
FailedEvaluator mockFailedContainer = mock(FailedEvaluator.class);
when(mockFailedContainer.getId()).thenReturn("e1");
verify(spyDriver, never()).requestContainerForWorker(anyInt(), anyHeronWorker());
spyDriver.new FailedContainerHandler().onNext(mockFailedContainer);
for (int id = 0; id < numContainers; id++) {
if (id == 1) {
verify(spyDriver, times(1)).requestContainerForWorker(eq(id), anyHeronWorker());
assertFalse(spyDriver.lookupByEvaluatorId("e" + id).isPresent());
continue;
}
verify(mockEvaluators[id], never()).close();
assertEquals(Integer.valueOf(id), spyDriver.lookupByEvaluatorId("e" + id).get());
}
}
@Test
public void createContextConfigCreatesForGivenWorkerId() {
Configuration config = driver.createContextConfig(4);
boolean found = false;
for (NamedParameterNode<?> namedParameterNode : config.getNamedParameters()) {
if (namedParameterNode.getName().equals(ContextIdentifier.class.getSimpleName())) {
Assert.assertEquals("4", config.getNamedParameter(namedParameterNode));
found = true;
}
}
assertTrue("ContextIdentifier didn't exist.", found);
}
@Test(expected = HeronMasterDriver.ContainerAllocationException.class)
public void scheduleHeronWorkersFailsOnDuplicateRequest() throws Exception {
PackingPlan packingPlan = PackingTestUtils.testPackingPlan("test", new RoundRobinPacking());
spyDriver.scheduleHeronWorkers(packingPlan);
verify(spyDriver, times(1)).requestContainerForWorker(eq(1), anyHeronWorker());
verify(mockRequestor, times(1)).submit(any(EvaluatorRequest.class));
PackingPlan.ContainerPlan duplicatePlan = PackingTestUtils.testContainerPlan(1);
Set<PackingPlan.ContainerPlan> toBeAddedContainerPlans = new HashSet<>();
toBeAddedContainerPlans.add(duplicatePlan);
spyDriver.scheduleHeronWorkers(toBeAddedContainerPlans);
}
@Test
public void scheduleHeronWorkersAddsContainers() throws Exception {
PackingPlan packingPlan = PackingTestUtils.testPackingPlan("test", new RoundRobinPacking());
spyDriver.scheduleHeronWorkers(packingPlan);
verify(spyDriver, times(1)).requestContainerForWorker(eq(1), anyHeronWorker());
verify(mockRequestor, times(1)).submit(any(EvaluatorRequest.class));
Set<PackingPlan.ContainerPlan> toBeAddedContainerPlans = new HashSet<>();
toBeAddedContainerPlans.add(PackingTestUtils.testContainerPlan(2));
toBeAddedContainerPlans.add(PackingTestUtils.testContainerPlan(3));
spyDriver.scheduleHeronWorkers(toBeAddedContainerPlans);
verify(spyDriver, times(1)).requestContainerForWorker(eq(2), anyHeronWorker());
verify(spyDriver, times(1)).requestContainerForWorker(eq(3), anyHeronWorker());
verify(mockRequestor, times(3)).submit(any(EvaluatorRequest.class));
}
@Test
public void killWorkersTerminatesSpecificContainers() throws Exception {
int numContainers = 5;
Set<PackingPlan.ContainerPlan> containers = new HashSet<>();
for (int id = 0; id < numContainers; id++) {
containers.add(PackingTestUtils.testContainerPlan(id));
}
PackingPlan packingPlan = new PackingPlan("packing", containers);
spyDriver.scheduleHeronWorkers(packingPlan);
for (int id = 0; id < numContainers; id++) {
verify(spyDriver, times(1)).requestContainerForWorker(eq(id), anyHeronWorker());
assertTrue(spyDriver.lookupByContainerPlan(id).isPresent());
}
verify(mockRequestor, times(numContainers)).submit(any(EvaluatorRequest.class));
AllocatedEvaluator[] mockEvaluators = createApplicationWithContainers(numContainers);
Set<PackingPlan.ContainerPlan> containersTobeDeleted = new HashSet<>();
containersTobeDeleted.add(PackingTestUtils.testContainerPlan(2));
containersTobeDeleted.add(PackingTestUtils.testContainerPlan(3));
spyDriver.killWorkers(containersTobeDeleted);
for (int id = 0; id < numContainers; id++) {
if (id == 2 || id == 3) {
verify(mockEvaluators[id], times(1)).close();
assertFalse(spyDriver.lookupByContainerPlan(id).isPresent());
assertFalse(spyDriver.lookupByEvaluatorId("e" + id).isPresent());
continue;
}
verify(mockEvaluators[id], never()).close();
assertTrue(spyDriver.lookupByContainerPlan(id).isPresent());
assertTrue(spyDriver.lookupByEvaluatorId("e" + id).isPresent());
}
}
@Test
public void findLargestFittingWorkerReturnsLargestWorker() {
Set<HeronMasterDriver.HeronWorker> workers = new HashSet<>();
workers.add(new HeronMasterDriver.HeronWorker(1, 3, ByteAmount.fromGigabytes(3)));
workers.add(new HeronMasterDriver.HeronWorker(2, 7, ByteAmount.fromGigabytes(7)));
workers.add(new HeronMasterDriver.HeronWorker(3, 5, ByteAmount.fromGigabytes(5)));
workers.add(new HeronMasterDriver.HeronWorker(4, 1, ByteAmount.fromGigabytes(1)));
// enough memory and cores to fit largest container, 2
verifyFittingContainer(workers, 7 * 1024 + 100, 7, 2);
// enough to fit 3 but not container 2
verifyFittingContainer(workers, 5 * 1024 + 100, 6, 3);
// enough memory but not enough cores for container 2
verifyFittingContainer(workers, 7 * 1024 + 100, 5, 3);
// enough cores but not enough memory for container 2
verifyFittingContainer(workers, 5 * 1024 + 100, 7, 3);
}
private void verifyFittingContainer(Set<HeronMasterDriver.HeronWorker> containers,
int ram,
int cores,
int expectedContainer) {
EvaluatorDescriptor evaluatorDescriptor = mock(EvaluatorDescriptor.class);
AllocatedEvaluator mockEvaluator = mock(AllocatedEvaluator.class);
when(mockEvaluator.getEvaluatorDescriptor()).thenReturn(evaluatorDescriptor);
when(evaluatorDescriptor.getMemory()).thenReturn(ram);
when(evaluatorDescriptor.getNumberOfCores()).thenReturn(cores);
Optional<HeronMasterDriver.HeronWorker> worker =
spyDriver.findLargestFittingWorker(mockEvaluator, containers, false);
assertTrue(worker.isPresent());
assertEquals(expectedContainer, worker.get().getWorkerId());
}
@Test
public void fitBiggestContainerIgnoresCoresIfMissing() {
Set<HeronMasterDriver.HeronWorker> workers = new HashSet<>();
workers.add(new HeronMasterDriver.HeronWorker(1, 3, ByteAmount.fromGigabytes(3)));
AllocatedEvaluator mockEvaluator = createMockEvaluator("test", 1, ByteAmount.fromGigabytes(3));
Optional<HeronMasterDriver.HeronWorker> result =
spyDriver.findLargestFittingWorker(mockEvaluator, workers, false);
Assert.assertFalse(result.isPresent());
result = spyDriver.findLargestFittingWorker(mockEvaluator, workers, true);
assertTrue(result.isPresent());
assertEquals(1, result.get().getWorkerId());
}
@Test
public void onNextAllocatedEvaluatorStartsWorker() throws Exception {
PackingPlan packingPlan = PackingTestUtils.testPackingPlan("test", new RoundRobinPacking());
spyDriver.scheduleHeronWorkers(packingPlan);
assertTrue(spyDriver.lookupByContainerPlan(1).isPresent());
PackingPlan.ContainerPlan containerPlan = spyDriver.lookupByContainerPlan(1).get();
AllocatedEvaluator mockEvaluator =
createMockEvaluator("test", getCpu(containerPlan), getRam(containerPlan));
assertFalse(spyDriver.lookupByEvaluatorId("test").isPresent());
spyDriver.new ContainerAllocationHandler().onNext(mockEvaluator);
assertTrue(spyDriver.lookupByEvaluatorId("test").isPresent());
assertEquals(Integer.valueOf(1), spyDriver.lookupByEvaluatorId("test").get());
verify(mockEvaluator, times(1)).submitContext(any(Configuration.class));
}
@Test
public void onNextAllocatedEvaluatorDiscardsExtraWorker() throws Exception {
AllocatedEvaluator mockEvaluator
= createMockEvaluator("test", 1, ByteAmount.fromMegabytes(123));
assertFalse(spyDriver.lookupByEvaluatorId("test").isPresent());
spyDriver.new ContainerAllocationHandler().onNext(mockEvaluator);
assertFalse(spyDriver.lookupByEvaluatorId("test").isPresent());
verify(mockEvaluator, never()).submitContext(any(Configuration.class));
}
@Test
public void tMasterLaunchLaunchesExecutorForTMaster() throws Exception {
ExecutorService executorService = mock(ExecutorService.class);
HeronMasterDriver.TMaster tMaster = spyDriver.buildTMaster(executorService);
doReturn(mock(Future.class)).when(executorService).submit(tMaster);
tMaster.launch();
verify(executorService, times(1)).submit(tMaster);
}
@Test
public void tMasterKillTerminatesTMaster() throws Exception {
ExecutorService mockExecutorService = mock(ExecutorService.class);
HeronMasterDriver.TMaster tMaster = spyDriver.buildTMaster(mockExecutorService);
Future<?> mockFuture = mock(Future.class);
doReturn(mockFuture).when(mockExecutorService).submit(tMaster);
tMaster.launch();
tMaster.killTMaster();
verify(mockFuture, times(1)).cancel(true);
verify(mockExecutorService, times(1)).shutdownNow();
}
@Test
public void tMasterLaunchRestartsTMasterOnFailure() throws Exception {
HeronMasterDriver.TMaster tMaster =
spy(spyDriver.buildTMaster(Executors.newSingleThreadExecutor()));
HeronExecutorTask mockTask = mock(HeronExecutorTask.class);
final CountDownLatch testLatch = new CountDownLatch(1);
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
testLatch.await();
return null;
}
}).when(mockTask).startExecutor();
doReturn(mockTask).when(tMaster).getTMasterExecutorTask();
tMaster.launch();
verify(mockTask, timeout(1000).times(1)).startExecutor();
testLatch.countDown();
//retries if tmaster ends for some reason
verify(mockTask, timeout(1000).times(3)).startExecutor();
}
@Test
@PrepareForTest({HeronReefUtils.class, SchedulerMain.class})
public void onNextStartTimeStartsSchedulerTMaster() throws Exception {
PowerMockito.spy(HeronReefUtils.class);
PowerMockito.doNothing().when(HeronReefUtils.class,
"extractPackageInSandbox",
anyString(),
anyString(),
anyString());
SchedulerMain mockScheduler = mock(SchedulerMain.class);
PowerMockito.spy(SchedulerMain.class);
PowerMockito.doReturn(mockScheduler).when(SchedulerMain.class,
"createInstance",
anyString(),
anyString(),
anyString(),
anyString(),
anyString(),
eq(0),
eq(false));
spyDriver.new HeronSchedulerLauncher().onNext(new StartTime(System.currentTimeMillis()));
verify(mockScheduler, times(1)).runScheduler();
}
private AllocatedEvaluator[] createApplicationWithContainers(int numContainers) {
AllocatedEvaluator[] mockEvaluators = new AllocatedEvaluator[numContainers];
for (int id = 0; id < numContainers; id++) {
mockEvaluators[id]
= simulateContainerAllocation("e" + id, 1, ByteAmount.fromMegabytes(123), id);
}
for (int id = 0; id < numContainers; id++) {
assertEquals(Integer.valueOf(id), spyDriver.lookupByEvaluatorId("e" + id).get());
verify(mockEvaluators[id], times(1)).submitContext(anyConfiguration());
verify(mockEvaluators[id], never()).close();
}
return mockEvaluators;
}
private AllocatedEvaluator simulateContainerAllocation(String evaluatorId,
int cores,
ByteAmount ram,
int workerId) {
AllocatedEvaluator evaluator = createMockEvaluator(evaluatorId, cores, ram);
HeronMasterDriver.HeronWorker worker = new HeronMasterDriver.HeronWorker(workerId, cores, ram);
Set<HeronMasterDriver.HeronWorker> workers = new HashSet<>();
workers.add(worker);
doReturn(workers).when(spyDriver).getWorkersAwaitingAllocation();
doReturn(Optional.of(worker)).when(spyDriver)
.findLargestFittingWorker(eq(evaluator), eq(workers), eq(false));
spyDriver.new ContainerAllocationHandler().onNext(evaluator);
return evaluator;
}
private AllocatedEvaluator createMockEvaluator(String evaluatorId, int cores, ByteAmount mem) {
EvaluatorDescriptor descriptor = mock(EvaluatorDescriptor.class);
when(descriptor.getMemory()).thenReturn(((Long) mem.asMegabytes()).intValue());
when(descriptor.getNumberOfCores()).thenReturn(cores);
AllocatedEvaluator mockEvaluator = mock(AllocatedEvaluator.class);
when(mockEvaluator.getEvaluatorDescriptor()).thenReturn(descriptor);
when(mockEvaluator.getId()).thenReturn(evaluatorId);
return mockEvaluator;
}
private ByteAmount getRam(PackingPlan.ContainerPlan container) {
return container.getRequiredResource().getRam();
}
private int getCpu(PackingPlan.ContainerPlan container) {
return (int) Math.ceil(container.getRequiredResource().getCpu());
}
private Configuration anyConfiguration() {
return Mockito.any(Configuration.class);
}
private HeronMasterDriver.HeronWorker anyHeronWorker() {
return any(HeronMasterDriver.HeronWorker.class);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.http;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import com.google.common.io.CountingOutputStream;
import com.google.inject.Inject;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.guice.annotations.Json;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.guava.Yielder;
import org.apache.druid.java.util.common.guava.Yielders;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.query.QueryInterruptedException;
import org.apache.druid.query.QueryUnsupportedException;
import org.apache.druid.server.QueryCapacityExceededException;
import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.sql.SqlLifecycle;
import org.apache.druid.sql.SqlLifecycleFactory;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.joda.time.DateTimeZone;
import org.joda.time.format.ISODateTimeFormat;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.StreamingOutput;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
@Path("/druid/v2/sql/")
public class SqlResource
{
private static final Logger log = new Logger(SqlResource.class);
private final ObjectMapper jsonMapper;
private final SqlLifecycleFactory sqlLifecycleFactory;
@Inject
public SqlResource(
@Json ObjectMapper jsonMapper,
SqlLifecycleFactory sqlLifecycleFactory
)
{
this.jsonMapper = Preconditions.checkNotNull(jsonMapper, "jsonMapper");
this.sqlLifecycleFactory = Preconditions.checkNotNull(sqlLifecycleFactory, "sqlLifecycleFactory");
}
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response doPost(
final SqlQuery sqlQuery,
@Context final HttpServletRequest req
) throws IOException
{
final SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
final String sqlQueryId = lifecycle.initialize(sqlQuery.getQuery(), sqlQuery.getContext());
final String remoteAddr = req.getRemoteAddr();
final String currThreadName = Thread.currentThread().getName();
try {
Thread.currentThread().setName(StringUtils.format("sql[%s]", sqlQueryId));
lifecycle.setParameters(sqlQuery.getParameterList());
final PlannerContext plannerContext = lifecycle.planAndAuthorize(req);
final DateTimeZone timeZone = plannerContext.getTimeZone();
// Remember which columns are time-typed, so we can emit ISO8601 instead of millis values.
// Also store list of all column names, for X-Druid-Sql-Columns header.
final List<RelDataTypeField> fieldList = lifecycle.rowType().getFieldList();
final boolean[] timeColumns = new boolean[fieldList.size()];
final boolean[] dateColumns = new boolean[fieldList.size()];
final String[] columnNames = new String[fieldList.size()];
for (int i = 0; i < fieldList.size(); i++) {
final SqlTypeName sqlTypeName = fieldList.get(i).getType().getSqlTypeName();
timeColumns[i] = sqlTypeName == SqlTypeName.TIMESTAMP;
dateColumns[i] = sqlTypeName == SqlTypeName.DATE;
columnNames[i] = fieldList.get(i).getName();
}
final Yielder<Object[]> yielder0 = Yielders.each(lifecycle.execute());
try {
return Response
.ok(
(StreamingOutput) outputStream -> {
Exception e = null;
CountingOutputStream os = new CountingOutputStream(outputStream);
Yielder<Object[]> yielder = yielder0;
try (final ResultFormat.Writer writer = sqlQuery.getResultFormat()
.createFormatter(os, jsonMapper)) {
writer.writeResponseStart();
if (sqlQuery.includeHeader()) {
writer.writeHeader(Arrays.asList(columnNames));
}
while (!yielder.isDone()) {
final Object[] row = yielder.get();
writer.writeRowStart();
for (int i = 0; i < fieldList.size(); i++) {
final Object value;
if (timeColumns[i]) {
value = ISODateTimeFormat.dateTime().print(
Calcites.calciteTimestampToJoda((long) row[i], timeZone)
);
} else if (dateColumns[i]) {
value = ISODateTimeFormat.dateTime().print(
Calcites.calciteDateToJoda((int) row[i], timeZone)
);
} else {
value = row[i];
}
writer.writeRowField(fieldList.get(i).getName(), value);
}
writer.writeRowEnd();
yielder = yielder.next(null);
}
writer.writeResponseEnd();
}
catch (Exception ex) {
e = ex;
log.error(ex, "Unable to send SQL response [%s]", sqlQueryId);
throw new RuntimeException(ex);
}
finally {
yielder.close();
lifecycle.emitLogsAndMetrics(e, remoteAddr, os.getCount());
}
}
)
.header("X-Druid-SQL-Query-Id", sqlQueryId)
.build();
}
catch (Throwable e) {
// make sure to close yielder if anything happened before starting to serialize the response.
yielder0.close();
throw new RuntimeException(e);
}
}
catch (QueryCapacityExceededException cap) {
lifecycle.emitLogsAndMetrics(cap, remoteAddr, -1);
return Response.status(QueryCapacityExceededException.STATUS_CODE).entity(jsonMapper.writeValueAsBytes(cap)).build();
}
catch (QueryUnsupportedException unsupported) {
log.warn(unsupported, "Failed to handle query: %s", sqlQuery);
lifecycle.emitLogsAndMetrics(unsupported, remoteAddr, -1);
return Response.status(QueryUnsupportedException.STATUS_CODE).entity(jsonMapper.writeValueAsBytes(unsupported)).build();
}
catch (ForbiddenException e) {
throw e; // let ForbiddenExceptionMapper handle this
}
catch (Exception e) {
log.warn(e, "Failed to handle query: %s", sqlQuery);
lifecycle.emitLogsAndMetrics(e, remoteAddr, -1);
final Exception exceptionToReport;
if (e instanceof RelOptPlanner.CannotPlanException) {
exceptionToReport = new ISE("Cannot build plan for query: %s", sqlQuery.getQuery());
} else {
exceptionToReport = e;
}
return Response.serverError()
.type(MediaType.APPLICATION_JSON_TYPE)
.entity(jsonMapper.writeValueAsBytes(QueryInterruptedException.wrapIfNeeded(exceptionToReport)))
.build();
}
finally {
Thread.currentThread().setName(currThreadName);
}
}
}
| |
package io.dropwizard.metrics;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.HashMap;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.Marker;
public class Slf4jReporterTest {
private final Logger logger = mock(Logger.class);
private final Marker marker = mock(Marker.class);
private final MetricRegistry registry = mock(MetricRegistry.class);
private final Slf4jReporter infoReporter = Slf4jReporter.forRegistry(registry)
.outputTo(logger)
.markWith(marker)
.prefixedWith("prefix")
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.withLoggingLevel(Slf4jReporter.LoggingLevel.INFO)
.filter(MetricFilter.ALL)
.build();
private final Slf4jReporter errorReporter = Slf4jReporter.forRegistry(registry)
.outputTo(logger)
.markWith(marker)
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.withLoggingLevel(Slf4jReporter.LoggingLevel.ERROR)
.filter(MetricFilter.ALL)
.build();
private Map<String,String> testTags;
@Before
public void setup() {
testTags = new HashMap<>();
testTags.put("t1", "v1");
testTags.put("k2", "v2");
}
@Test
public void reportsGaugeValuesAtError() throws Exception {
when(logger.isErrorEnabled(marker)).thenReturn(true);
errorReporter.report(map("gauge", testTags, gauge("value")),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
verify(logger).error(marker, "type={}, name={}, value={}", new Object[]{"GAUGE", "gauge{t1=v1, k2=v2}", "value"});
}
@Test
public void reportsCounterValuesAtError() throws Exception {
final Counter counter = mock(Counter.class);
when(counter.getCount()).thenReturn(100L);
when(logger.isErrorEnabled(marker)).thenReturn(true);
errorReporter.report(this.<Gauge>map(),
map("test.counter", testTags, counter),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
verify(logger).error(marker, "type={}, name={}, count={}", new Object[]{"COUNTER", "test.counter{t1=v1, k2=v2}", 100L});
}
@Test
public void reportsHistogramValuesAtError() throws Exception {
final Histogram histogram = mock(Histogram.class);
when(histogram.getCount()).thenReturn(1L);
final Snapshot snapshot = mock(Snapshot.class);
when(snapshot.getMax()).thenReturn(2L);
when(snapshot.getMean()).thenReturn(3.0);
when(snapshot.getMin()).thenReturn(4L);
when(snapshot.getStdDev()).thenReturn(5.0);
when(snapshot.getMedian()).thenReturn(6.0);
when(snapshot.get75thPercentile()).thenReturn(7.0);
when(snapshot.get95thPercentile()).thenReturn(8.0);
when(snapshot.get98thPercentile()).thenReturn(9.0);
when(snapshot.get99thPercentile()).thenReturn(10.0);
when(snapshot.get999thPercentile()).thenReturn(11.0);
when(histogram.getSnapshot()).thenReturn(snapshot);
when(logger.isErrorEnabled(marker)).thenReturn(true);
errorReporter.report(this.<Gauge>map(),
this.<Counter>map(),
map("test.histogram", testTags, histogram),
this.<Meter>map(),
this.<Timer>map());
verify(logger).error(marker,
"type={}, name={}, count={}, min={}, max={}, mean={}, stddev={}, median={}, p75={}, p95={}, p98={}, p99={}, p999={}",
"HISTOGRAM",
"test.histogram{t1=v1, k2=v2}",
1L,
4L,
2L,
3.0,
5.0,
6.0,
7.0,
8.0,
9.0,
10.0,
11.0);
}
@Test
public void reportsMeterValuesAtError() throws Exception {
final Meter meter = mock(Meter.class);
when(meter.getCount()).thenReturn(1L);
when(meter.getMeanRate()).thenReturn(2.0);
when(meter.getOneMinuteRate()).thenReturn(3.0);
when(meter.getFiveMinuteRate()).thenReturn(4.0);
when(meter.getFifteenMinuteRate()).thenReturn(5.0);
when(logger.isErrorEnabled(marker)).thenReturn(true);
errorReporter.report(this.<Gauge>map(),
this.<Counter>map(),
this.<Histogram>map(),
map("test.meter", testTags, meter),
this.<Timer>map());
verify(logger).error(marker,
"type={}, name={}, count={}, mean_rate={}, m1={}, m5={}, m15={}, rate_unit={}",
"METER",
"test.meter{t1=v1, k2=v2}",
1L,
2.0,
3.0,
4.0,
5.0,
"events/second");
}
@Test
public void reportsTimerValuesAtError() throws Exception {
final Timer timer = mock(Timer.class);
when(timer.getCount()).thenReturn(1L);
when(timer.getMeanRate()).thenReturn(2.0);
when(timer.getOneMinuteRate()).thenReturn(3.0);
when(timer.getFiveMinuteRate()).thenReturn(4.0);
when(timer.getFifteenMinuteRate()).thenReturn(5.0);
final Snapshot snapshot = mock(Snapshot.class);
when(snapshot.getMax()).thenReturn(TimeUnit.MILLISECONDS.toNanos(100));
when(snapshot.getMean()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(200));
when(snapshot.getMin()).thenReturn(TimeUnit.MILLISECONDS.toNanos(300));
when(snapshot.getStdDev()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(400));
when(snapshot.getMedian()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(500));
when(snapshot.get75thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(600));
when(snapshot.get95thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(700));
when(snapshot.get98thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(800));
when(snapshot.get99thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(900));
when(snapshot.get999thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS
.toNanos(1000));
when(timer.getSnapshot()).thenReturn(snapshot);
when(logger.isErrorEnabled(marker)).thenReturn(true);
errorReporter.report(this.<Gauge>map(),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
map("test.another.timer", testTags, timer));
verify(logger).error(marker,
"type={}, name={}, count={}, min={}, max={}, mean={}, stddev={}, median={}, p75={}, p95={}, p98={}, p99={}, p999={}, mean_rate={}, m1={}, m5={}, m15={}, rate_unit={}, duration_unit={}",
"TIMER",
"test.another.timer{t1=v1, k2=v2}",
1L,
300.0,
100.0,
200.0,
400.0,
500.0,
600.0,
700.0,
800.0,
900.0,
1000.0,
2.0,
3.0,
4.0,
5.0,
"events/second",
"milliseconds");
}
@Test
public void reportsGaugeValues() throws Exception {
when(logger.isInfoEnabled(marker)).thenReturn(true);
infoReporter.report(map("gauge", testTags, gauge("value")),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
verify(logger).info(marker, "type={}, name={}, value={}", new Object[]{"GAUGE", "prefix.gauge{t1=v1, k2=v2}", "value"});
}
@Test
public void reportsCounterValues() throws Exception {
final Counter counter = mock(Counter.class);
when(counter.getCount()).thenReturn(100L);
when(logger.isInfoEnabled(marker)).thenReturn(true);
infoReporter.report(this.<Gauge>map(),
map("test.counter", testTags, counter),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
verify(logger).info(marker, "type={}, name={}, count={}", new Object[]{"COUNTER", "prefix.test.counter{t1=v1, k2=v2}", 100L});
}
@Test
public void reportsHistogramValues() throws Exception {
final Histogram histogram = mock(Histogram.class);
when(histogram.getCount()).thenReturn(1L);
final Snapshot snapshot = mock(Snapshot.class);
when(snapshot.getMax()).thenReturn(2L);
when(snapshot.getMean()).thenReturn(3.0);
when(snapshot.getMin()).thenReturn(4L);
when(snapshot.getStdDev()).thenReturn(5.0);
when(snapshot.getMedian()).thenReturn(6.0);
when(snapshot.get75thPercentile()).thenReturn(7.0);
when(snapshot.get95thPercentile()).thenReturn(8.0);
when(snapshot.get98thPercentile()).thenReturn(9.0);
when(snapshot.get99thPercentile()).thenReturn(10.0);
when(snapshot.get999thPercentile()).thenReturn(11.0);
when(histogram.getSnapshot()).thenReturn(snapshot);
when(logger.isInfoEnabled(marker)).thenReturn(true);
infoReporter.report(this.<Gauge>map(),
this.<Counter>map(),
map("test.histogram", testTags, histogram),
this.<Meter>map(),
this.<Timer>map());
verify(logger).info(marker,
"type={}, name={}, count={}, min={}, max={}, mean={}, stddev={}, median={}, p75={}, p95={}, p98={}, p99={}, p999={}",
"HISTOGRAM",
"prefix.test.histogram{t1=v1, k2=v2}",
1L,
4L,
2L,
3.0,
5.0,
6.0,
7.0,
8.0,
9.0,
10.0,
11.0);
}
@Test
public void reportsMeterValues() throws Exception {
final Meter meter = mock(Meter.class);
when(meter.getCount()).thenReturn(1L);
when(meter.getMeanRate()).thenReturn(2.0);
when(meter.getOneMinuteRate()).thenReturn(3.0);
when(meter.getFiveMinuteRate()).thenReturn(4.0);
when(meter.getFifteenMinuteRate()).thenReturn(5.0);
when(logger.isInfoEnabled(marker)).thenReturn(true);
infoReporter.report(this.<Gauge>map(),
this.<Counter>map(),
this.<Histogram>map(),
map("test.meter", testTags, meter),
this.<Timer>map());
verify(logger).info(marker,
"type={}, name={}, count={}, mean_rate={}, m1={}, m5={}, m15={}, rate_unit={}",
"METER",
"prefix.test.meter{t1=v1, k2=v2}",
1L,
2.0,
3.0,
4.0,
5.0,
"events/second");
}
@Test
public void reportsTimerValues() throws Exception {
final Timer timer = mock(Timer.class);
when(timer.getCount()).thenReturn(1L);
when(timer.getMeanRate()).thenReturn(2.0);
when(timer.getOneMinuteRate()).thenReturn(3.0);
when(timer.getFiveMinuteRate()).thenReturn(4.0);
when(timer.getFifteenMinuteRate()).thenReturn(5.0);
final Snapshot snapshot = mock(Snapshot.class);
when(snapshot.getMax()).thenReturn(TimeUnit.MILLISECONDS.toNanos(100));
when(snapshot.getMean()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(200));
when(snapshot.getMin()).thenReturn(TimeUnit.MILLISECONDS.toNanos(300));
when(snapshot.getStdDev()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(400));
when(snapshot.getMedian()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(500));
when(snapshot.get75thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(600));
when(snapshot.get95thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(700));
when(snapshot.get98thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(800));
when(snapshot.get99thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(900));
when(snapshot.get999thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS
.toNanos(1000));
when(timer.getSnapshot()).thenReturn(snapshot);
when(logger.isInfoEnabled(marker)).thenReturn(true);
infoReporter.report(this.<Gauge>map(),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
map("test.another.timer", testTags, timer));
verify(logger).info(marker,
"type={}, name={}, count={}, min={}, max={}, mean={}, stddev={}, median={}, p75={}, p95={}, p98={}, p99={}, p999={}, mean_rate={}, m1={}, m5={}, m15={}, rate_unit={}, duration_unit={}",
"TIMER",
"prefix.test.another.timer{t1=v1, k2=v2}",
1L,
300.0,
100.0,
200.0,
400.0,
500.0,
600.0,
700.0,
800.0,
900.0,
1000.0,
2.0,
3.0,
4.0,
5.0,
"events/second",
"milliseconds");
}
@Test
public void testNameFormatterIsUsed() {
Slf4jReporter reporter = Slf4jReporter.forRegistry(registry)
.outputTo(logger)
.markWith(marker)
.prefixedWith("prefix")
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.withLoggingLevel(Slf4jReporter.LoggingLevel.INFO)
.filter(MetricFilter.ALL)
.withNameFormatter(MetricNameFormatter.APPEND_TAG_VALUES)
.build();
when(logger.isInfoEnabled(marker)).thenReturn(true);
reporter.report(map("gauge", testTags, gauge("value")),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
verify(logger).info(marker, "type={}, name={}, value={}", new Object[]{"GAUGE", "prefix.gauge.v1.v2", "value"});
}
private <T> SortedMap<MetricName, T> map() {
return new TreeMap<MetricName, T>();
}
private <T> SortedMap<MetricName, T> map(String name, Map<String,String> tags, T metric) {
final TreeMap<MetricName, T> map = new TreeMap<MetricName, T>();
map.put(new MetricName(name,tags), metric);
return map;
}
private <T> Gauge gauge(T value) {
final Gauge gauge = mock(Gauge.class);
when(gauge.getValue()).thenReturn(value);
return gauge;
}
}
| |
package edu.uiowa.icts.bluebutton.resource;
/*
* #%L
* blue-button Spring MVC Web App
* %%
* Copyright (C) 2014 - 2015 University of Iowa Institute for Clinical and Translational Science (ICTS)
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.List;
import org.hamcrest.core.IsNull;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.Arrays;
import edu.uiowa.icts.datatable.DataTableColumn;
import edu.uiowa.icts.datatable.DataTableRequest;
import edu.uiowa.icts.datatable.DataTableSearch;
import edu.uiowa.icts.bluebutton.dao.*;
import edu.uiowa.icts.bluebutton.domain.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.hamcrest.Matchers.*;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.*;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
import edu.uiowa.icts.bluebutton.controller.AbstractControllerMVCTests;
/**
* Generated by Protogen
* @since Wed Jul 08 08:12:43 CDT 2015
*/
@WithMockUser(username="user", roles={"BLUEBUTTON_ADMIN"})
public class LabResultSynonymResourceMvcTest extends AbstractControllerMVCTests {
private LabResultSynonym firstLabResultSynonym;
private ObjectMapper mapper;
@Before
public void before() {
// add 20 records to test database
for(int x=1; x<21; x++){
LabResultSynonym labResultSynonym = new LabResultSynonym();
labResultSynonym.setOfficialName("lab result " + x);
labResultSynonym.setUnofficialName("lab unoffical name " + x);
bluebuttonDaoService.getLabResultSynonymService().save(labResultSynonym);
if (x == 1){
// use this ID for update, show, and delete assertions
firstLabResultSynonym = labResultSynonym;
}
}
this.mapper = new ObjectMapper();
// fix NonUniqueObjectException
this.bluebuttonDaoService.getLabResultSynonymService().getSession().flush();
this.bluebuttonDaoService.getLabResultSynonymService().getSession().clear();
}
@Test
public void getByPathVariableIdShouldLoadAndReturnObject() throws Exception {
mockMvc.perform(get("/api/labresultsynonym/"+firstLabResultSynonym.getLabResultSynonymId().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$.labResultSynonymId", is(firstLabResultSynonym.getLabResultSynonymId())))
;
}
@Test
public void getByPathVariableIdShouldReturn404ForBogusId() throws Exception {
mockMvc.perform(get("/api/labresultsynonym/-123")).andExpect(status().isNotFound()).andExpect(jsonPath("$.message", is("/api/labresultsynonym/-123 could not be found.")));
}
@Test
public void restMappingNotFoundShouldReturn404() throws Exception {
mockMvc.perform(get("/api/labresultsynonym/asdfasdf/asdfasdf"))
.andExpect(status().isNotFound())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$.message", is("/api/labresultsynonym/asdfasdf/asdfasdf could not be found.")))
;
}
@Test
public void createShouldPersistAndReturnObject() throws Exception {
long count = bluebuttonDaoService.getLabResultSynonymService().count();
LabResultSynonym labResultSynonym = new LabResultSynonym();
labResultSynonym.setOfficialName("test");
labResultSynonym.setUnofficialName("good test");
mockMvc.perform(post("/api/labresultsynonym/").content(this.mapper.writeValueAsString(labResultSynonym))
.accept(MediaType.APPLICATION_JSON).contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$.labResultSynonymId").value(IsNull.notNullValue()))
;
assertEquals("count should increase by 1", count +1 , bluebuttonDaoService.getLabResultSynonymService().count());
}
@Test
public void updateShouldPersistExistingAndReturnObject() throws Exception {
long count = bluebuttonDaoService.getLabResultSynonymService().count();
mockMvc.perform(post("/api/labresultsynonym/"+ firstLabResultSynonym.getLabResultSynonymId().toString())
.content(this.mapper.writeValueAsString(firstLabResultSynonym))
.accept(MediaType.APPLICATION_JSON).contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$.labResultSynonymId", is(firstLabResultSynonym.getLabResultSynonymId())))
;
assertEquals("count NOT should increase", count , bluebuttonDaoService.getLabResultSynonymService().count());
}
@Test
public void updateByPathVariableIdShouldReturn404ForMismatchBetweenPathIdAndObjectId() throws Exception {
String correctId = firstLabResultSynonym.getLabResultSynonymId().toString();
// this ID manipulation should be overwritten with path variable id
firstLabResultSynonym.setLabResultSynonymId(-123);
mockMvc.perform(post("/api/labresultsynonym/"+correctId)
.content(this.mapper.writeValueAsString(firstLabResultSynonym))
.accept(MediaType.APPLICATION_JSON).contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isNotFound())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$.message", is("/api/labresultsynonym/" +correctId +" could not be found.")))
;
}
@Test
public void updateByPathVariableIdShouldReturn404ForBogusPathId() throws Exception {
mockMvc.perform(post("/api/labresultsynonym/-123")
.content(this.mapper.writeValueAsString(firstLabResultSynonym))
.accept(MediaType.APPLICATION_JSON).contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isNotFound())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$.message", is("/api/labresultsynonym/-123 could not be found.")));
}
@Test
public void deleteShouldDeleteAndReturnStatusOk() throws Exception {
long count = bluebuttonDaoService.getLabResultSynonymService().count();
mockMvc.perform(delete("/api/labresultsynonym/"+ firstLabResultSynonym.getLabResultSynonymId().toString()))
.andExpect(status().isOk());
assertEquals("count should decrease by 1", count - 1 , bluebuttonDaoService.getLabResultSynonymService().count());
}
@Test
public void deleteShouldFailWithBogusId() throws Exception {
long count = bluebuttonDaoService.getLabResultSynonymService().count();
mockMvc.perform(delete("/api/labresultsynonym/-123"))
.andExpect(status().isNotFound())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$.message", is("/api/labresultsynonym/-123 could not be found.")));
assertEquals("count should NOT decrease by 1", count , bluebuttonDaoService.getLabResultSynonymService().count());
}
@Test
public void listShouldReturnAllByDefault() throws Exception {
mockMvc.perform(get("/api/labresultsynonym/"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$.", hasSize(is(20))))
.andExpect(jsonPath("$.[0].labResultSynonymId", is(firstLabResultSynonym.getLabResultSynonymId())))
;
}
@Test
public void listShouldReturnAllByDefaultWithoutTrailUrlSlash() throws Exception {
mockMvc.perform(get("/api/labresultsynonym"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$.", hasSize(is(20))))
.andExpect(jsonPath("$.[0].labResultSynonymId", is(firstLabResultSynonym.getLabResultSynonymId())))
;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.QueryUtil;
import org.apache.phoenix.util.ReadOnlyProps;
import org.junit.BeforeClass;
import org.junit.Test;
public class UnionAllIT extends BaseOwnClusterHBaseManagedTimeIT {
@BeforeClass
public static void doSetup() throws Exception {
Map<String, String> props = Collections.emptyMap();
setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
}
@Test
public void testUnionAllSelects() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "CREATE TABLE test_table " +
" (a_string varchar(10) not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
String dml = "UPSERT INTO test_table VALUES(?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 10);
stmt.execute();
conn.commit();
ddl = "CREATE TABLE b_table " +
" (a_string char(20) not null, col1 bigint" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
dml = "UPSERT INTO b_table VALUES(?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setString(1, "b");
stmt.setInt(2, 20);
stmt.execute();
stmt.setString(1, "c");
stmt.setInt(2, 20);
stmt.execute();
conn.commit();
ddl = "select * from test_table union all select * from b_table union all select * from test_table";
ResultSet rs = conn.createStatement().executeQuery(ddl);
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(10,rs.getInt(2));
assertTrue(rs.next());
assertEquals("b",rs.getString(1).trim());
assertEquals(20,rs.getInt(2));
assertTrue(rs.next());
assertEquals("c",rs.getString(1).trim());
assertEquals(20,rs.getInt(2));
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(10,rs.getInt(2));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testAggregate() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "CREATE TABLE test_table " +
" (a_string char(5) not null, col1 tinyint" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
String dml = "UPSERT INTO test_table VALUES(?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 10);
stmt.execute();
stmt.setString(1, "d");
stmt.setInt(2, 40);
stmt.execute();
stmt.setString(1, "e");
stmt.setInt(2, 50);
stmt.execute();
conn.commit();
ddl = "CREATE TABLE b_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
dml = "UPSERT INTO b_table VALUES(?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setString(1, "b");
stmt.setInt(2, 20);
stmt.execute();
stmt.setString(1, "c");
stmt.setInt(2, 30);
stmt.execute();
conn.commit();
String aggregate = "select count(*) from test_table union all select count(*) from b_table union all select count(*) from test_table";
ResultSet rs = conn.createStatement().executeQuery(aggregate);
assertTrue(rs.next());
assertEquals(3,rs.getInt(1));
assertTrue(rs.next());
assertEquals(2,rs.getInt(1));
assertTrue(rs.next());
assertEquals(3,rs.getInt(1));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testGroupBy() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "CREATE TABLE test_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
String dml = "UPSERT INTO test_table VALUES(?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 10);
stmt.execute();
conn.commit();
ddl = "CREATE TABLE b_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
dml = "UPSERT INTO b_table VALUES(?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setString(1, "b");
stmt.setInt(2, 20);
stmt.execute();
stmt.setString(1, "c");
stmt.setInt(2, 30);
stmt.execute();
conn.commit();
String aggregate = "select count(*), col1 from test_table group by col1 union all select count(*), col1 from b_table group by col1";
ResultSet rs = conn.createStatement().executeQuery(aggregate);
assertTrue(rs.next());
assertEquals(1,rs.getInt(1));
assertTrue(rs.next());
assertEquals(1,rs.getInt(1));
assertTrue(rs.next());
assertEquals(1,rs.getInt(1));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testOrderByLimit() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "CREATE TABLE test_table1 " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
String dml = "UPSERT INTO test_table1 VALUES(?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 10);
stmt.execute();
stmt.setString(1, "f");
stmt.setInt(2, 10);
stmt.execute();
conn.commit();
ddl = "CREATE TABLE b_table1 " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
dml = "UPSERT INTO b_table1 VALUES(?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setString(1, "b");
stmt.setInt(2, 20);
stmt.execute();
stmt.setString(1, "c");
stmt.setInt(2, 30);
stmt.execute();
stmt.setString(1, "d");
stmt.setInt(2, 30);
stmt.execute();
stmt.setString(1, "e");
stmt.setInt(2, 30);
stmt.execute();
conn.commit();
String aggregate = "select count(*), col1 from b_table1 group by col1 union all select count(*), col1 from test_table1 group by col1 order by col1";
ResultSet rs = conn.createStatement().executeQuery(aggregate);
assertTrue(rs.next());
assertEquals(2,rs.getInt(1));
assertTrue(rs.next());
assertEquals(1,rs.getInt(1));
assertTrue(rs.next());
assertEquals(3,rs.getInt(1));
assertFalse(rs.next());
String limit = "select count(*), col1 x from test_table1 group by col1 union all select count(*), col1 x from b_table1 group by col1 order by x limit 2";
rs = conn.createStatement().executeQuery(limit);
assertTrue(rs.next());
assertEquals(2,rs.getInt(1));
assertTrue(rs.next());
assertEquals(1,rs.getInt(1));
assertFalse(rs.next());
String limitOnly = "select * from test_table1 union all select * from b_table1 limit 2";
rs = conn.createStatement().executeQuery(limitOnly);
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(10,rs.getInt(2));
assertTrue(rs.next());
assertEquals("f",rs.getString(1));
assertEquals(10,rs.getInt(2));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testSelectDiff() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "CREATE TABLE test_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
ddl = "CREATE TABLE b_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
ddl = "select a_string, col1, col1 from test_table union all select * from b_table union all select a_string, col1 from test_table";
conn.createStatement().executeQuery(ddl);
fail();
} catch (SQLException e) {
assertEquals(SQLExceptionCode.SELECT_COLUMN_NUM_IN_UNIONALL_DIFFS.getErrorCode(), e.getErrorCode());
} finally {
conn.close();
}
}
@Test
public void testJoinInUnionAll() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "CREATE TABLE test_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
String dml = "UPSERT INTO test_table VALUES(?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 10);
stmt.execute();
conn.commit();
ddl = "CREATE TABLE b_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
dml = "UPSERT INTO b_table VALUES(?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 20);
stmt.execute();
conn.commit();
ddl = "select x.a_string, y.col1 from test_table x, b_table y where x.a_string=y.a_string union all " +
"select t.a_string, s.col1 from test_table s, b_table t where s.a_string=t.a_string";
ResultSet rs = conn.createStatement().executeQuery(ddl);
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(20,rs.getInt(2));
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(10,rs.getInt(2));
assertFalse(rs.next());
ddl = "select x.a_string, y.col1 from test_table x join b_table y on x.a_string=y.a_string union all " +
"select t.a_string, s.col1 from test_table s inner join b_table t on s.a_string=t.a_string";
rs = conn.createStatement().executeQuery(ddl);
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(20,rs.getInt(2));
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(10,rs.getInt(2));
assertFalse(rs.next());
ddl = "select x.a_string, y.col1 from test_table x left join b_table y on x.a_string=y.a_string union all " +
"select t.a_string, s.col1 from test_table s inner join b_table t on s.a_string=t.a_string union all " +
"select y.a_string, x.col1 from b_table x right join test_table y on x.a_string=y.a_string";
rs = conn.createStatement().executeQuery(ddl);
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(20,rs.getInt(2));
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(10,rs.getInt(2));
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(20,rs.getInt(2));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testDerivedTable() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "CREATE TABLE test_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
String dml = "UPSERT INTO test_table VALUES(?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 10);
stmt.execute();
conn.commit();
ddl = "CREATE TABLE b_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
dml = "UPSERT INTO b_table VALUES(?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 20);
stmt.execute();
conn.commit();
ddl = "select * from (select x.a_string, y.col1 from test_table x, b_table y where x.a_string=y.a_string) union all " +
"select * from (select t.a_string, s.col1 from test_table s, b_table t where s.a_string=t.a_string)";
ResultSet rs = conn.createStatement().executeQuery(ddl);
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(20,rs.getInt(2));
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(10,rs.getInt(2));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testUnionAllInDerivedTable() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "CREATE TABLE test_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
String dml = "UPSERT INTO test_table VALUES(?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 10);
stmt.execute();
stmt.setString(1, "b");
stmt.setInt(2, 20);
stmt.execute();
conn.commit();
ddl = "CREATE TABLE b_table " +
" (a_string varchar not null, col2 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
dml = "UPSERT INTO b_table VALUES(?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 30);
stmt.execute();
stmt.setString(1, "c");
stmt.setInt(2, 60);
stmt.execute();
conn.commit();
String query = "select a_string from " +
"(select a_string, col1 from test_table union all select a_string, col2 from b_table order by a_string)";
ResultSet rs = conn.createStatement().executeQuery(query);
assertTrue(rs.next());
assertEquals("a", rs.getString(1));
assertTrue(rs.next());
assertEquals("a", rs.getString(1));
assertTrue(rs.next());
assertEquals("b", rs.getString(1));
assertTrue(rs.next());
assertEquals("c", rs.getString(1));
assertFalse(rs.next());
query = "select c from " +
"(select a_string, col1 c from test_table union all select a_string, col2 c from b_table order by c)";
rs = conn.createStatement().executeQuery(query);
assertTrue(rs.next());
assertEquals(10, rs.getInt(1));
assertTrue(rs.next());
assertEquals(20, rs.getInt(1));
assertTrue(rs.next());
assertEquals(30, rs.getInt(1));
assertTrue(rs.next());
assertEquals(60, rs.getInt(1));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testUnionAllInSubquery() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "CREATE TABLE test_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
String dml = "UPSERT INTO test_table VALUES(?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 10);
stmt.execute();
stmt.setString(1, "b");
stmt.setInt(2, 20);
stmt.execute();
conn.commit();
ddl = "CREATE TABLE b_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
dml = "UPSERT INTO b_table VALUES(?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 30);
stmt.execute();
stmt.setString(1, "c");
stmt.setInt(2, 60);
stmt.execute();
conn.commit();
String[] queries = new String[2];
queries[0] = "select a_string, col1 from test_table where a_string in " +
"(select a_string aa from b_table where a_string != 'a' union all select a_string bb from b_table)";
queries[1] = "select a_string, col1 from test_table where a_string in (select a_string from " +
"(select a_string from b_table where a_string != 'a' union all select a_string from b_table))";
for (String query : queries) {
ResultSet rs = conn.createStatement().executeQuery(query);
assertTrue(rs.next());
assertEquals("a", rs.getString(1));
assertEquals(10, rs.getInt(2));
assertFalse(rs.next());
}
} finally {
conn.close();
}
}
@Test
public void testUnionAllWithBindParam() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "CREATE TABLE test_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
String dml = "UPSERT INTO test_table VALUES(?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 10);
stmt.execute();
conn.commit();
ddl = "CREATE TABLE b_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
dml = "UPSERT INTO b_table VALUES(?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setString(1, "b");
stmt.setInt(2, 20);
stmt.execute();
conn.commit();
ddl = "select a_string, col1 from b_table where col1=? union all select a_string, col1 from test_table where col1=? ";
stmt = conn.prepareStatement(ddl);
stmt.setInt(1, 20);
stmt.setInt(2, 10);
ResultSet rs = stmt.executeQuery();
assertTrue(rs.next());
assertEquals("b",rs.getString(1));
assertEquals(20,rs.getInt(2));
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(10,rs.getInt(2));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testExplainUnionAll() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "CREATE TABLE test_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
ddl = "CREATE TABLE b_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
ddl = "explain select a_string, col1 from test_table union all select a_string, col1 from b_table order by col1 limit 1";
ResultSet rs = conn.createStatement().executeQuery(ddl);
assertEquals(
"UNION ALL OVER 2 QUERIES\n" +
" CLIENT PARALLEL 1-WAY FULL SCAN OVER TEST_TABLE\n" +
" SERVER TOP 1 ROW SORTED BY [COL1]\n" +
" CLIENT MERGE SORT\n" +
" CLIENT PARALLEL 1-WAY FULL SCAN OVER B_TABLE\n" +
" SERVER TOP 1 ROW SORTED BY [COL1]\n" +
" CLIENT MERGE SORT\n" +
"CLIENT MERGE SORT", QueryUtil.getExplainPlan(rs));
String limitPlan =
"UNION ALL OVER 2 QUERIES\n" +
" CLIENT SERIAL 1-WAY FULL SCAN OVER TEST_TABLE\n" +
" SERVER 2 ROW LIMIT\n" +
" CLIENT 2 ROW LIMIT\n" +
" CLIENT SERIAL 1-WAY FULL SCAN OVER B_TABLE\n" +
" SERVER 2 ROW LIMIT\n" +
" CLIENT 2 ROW LIMIT\n" +
"CLIENT 2 ROW LIMIT";
ddl = "explain select a_string, col1 from test_table union all select a_string, col1 from b_table";
rs = conn.createStatement().executeQuery(ddl + " limit 2");
assertEquals(limitPlan, QueryUtil.getExplainPlan(rs));
Statement stmt = conn.createStatement();
stmt.setMaxRows(2);
rs = stmt.executeQuery(ddl);
assertEquals(limitPlan, QueryUtil.getExplainPlan(rs));
ddl = "explain select a_string, col1 from test_table union all select a_string, col1 from b_table";
rs = conn.createStatement().executeQuery(ddl);
assertEquals(
"UNION ALL OVER 2 QUERIES\n" +
" CLIENT PARALLEL 1-WAY FULL SCAN OVER TEST_TABLE\n" +
" CLIENT PARALLEL 1-WAY FULL SCAN OVER B_TABLE", QueryUtil.getExplainPlan(rs));
} finally {
conn.close();
}
}
@Test
public void testBug2295() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "CREATE TABLE table1(" +
"id BIGINT, col1 VARCHAR, col2 integer, CONSTRAINT pk PRIMARY KEY (id)) IMMUTABLE_ROWS=true";
createTestTable(getUrl(), ddl);
ddl = "CREATE TABLE table2(" +
"id BIGINT, col1 VARCHAR, col2 integer, CONSTRAINT pk PRIMARY KEY (id)) IMMUTABLE_ROWS=true";
createTestTable(getUrl(), ddl);
ddl = "CREATE index idx_table1_col1 on table1(col1)";
createTestTable(getUrl(), ddl);
ddl = "CREATE index idx_table2_col1 on table2(col1)";
createTestTable(getUrl(), ddl);
ddl = "Explain SELECT /*+ INDEX(table1 idx_table1_col1) */ col1, col2 from table1 where col1='123' " +
"union all SELECT /*+ INDEX(table2 idx_table2_col1) */ col1, col2 from table2 where col1='123'";
ResultSet rs = conn.createStatement().executeQuery(ddl);
assertTrue(rs.next());
} finally {
String ddl = "drop table table1";
conn.createStatement().execute(ddl);
ddl = "drop table table2";
conn.createStatement().execute(ddl);
conn.close();
}
}
@Test
public void testParameterMetaDataNotNull() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
String ddl = "CREATE TABLE test_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
String dml = "UPSERT INTO test_table VALUES(?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setString(1, "a");
stmt.setInt(2, 10);
stmt.execute();
conn.commit();
ddl = "CREATE TABLE b_table " +
" (a_string varchar not null, col1 integer" +
" CONSTRAINT pk PRIMARY KEY (a_string))\n";
createTestTable(getUrl(), ddl);
dml = "UPSERT INTO b_table VALUES(?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setString(1, "b");
stmt.setInt(2, 20);
stmt.execute();
conn.commit();
String query = "select * from test_table union all select * from b_table";
try{
PreparedStatement pstmt = conn.prepareStatement(query);
assertTrue(pstmt.getParameterMetaData() != null);
ResultSet rs = pstmt.executeQuery();
assertTrue(rs.next());
assertEquals("a",rs.getString(1));
assertEquals(10,rs.getInt(2));
assertTrue(rs.next());
assertEquals("b",rs.getString(1));
assertEquals(20,rs.getInt(2));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testDiffDataTypes() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
String ddl = "create table person ( id bigint not null primary key, " +
"firstname varchar(10), lastname varchar(10) )";
createTestTable(getUrl(), ddl);
String dml = "upsert into person values (?, ?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setInt(1, 1);
stmt.setString(2, "john");
stmt.setString(3, "doe");
stmt.execute();
stmt.setInt(1, 2);
stmt.setString(2, "jane");
stmt.setString(3, "doe");
stmt.execute();
conn.commit();
ddl = "create table user ( id integer not null primary key, firstname char(12)," +
" lastname varchar(12) )";
createTestTable(getUrl(), ddl);
dml = "upsert into user values (?, ?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setInt(1, 1);
stmt.setString(2, "sam");
stmt.setString(3, "johnson");
stmt.execute();
stmt.setInt(1, 2);
stmt.setString(2, "ann");
stmt.setString(3, "wiely");
stmt.execute();
conn.commit();
ddl = "create table t1 ( id varchar(20) not null primary key)";
createTestTable(getUrl(), ddl);
dml = "upsert into t1 values ('abcd')";
stmt = conn.prepareStatement(dml);
stmt.execute();
conn.commit();
ddl = "create table t2 ( id char(50) not null primary key)";
createTestTable(getUrl(), ddl);
dml = "upsert into t2 values ('xyz')";
stmt = conn.prepareStatement(dml);
stmt.execute();
conn.commit();
String query = "select id, 'foo' firstname, lastname from person union all" +
" select * from user";
try {
PreparedStatement pstmt = conn.prepareStatement(query);
ResultSet rs = pstmt.executeQuery();
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("foo", rs.getString(2));
assertEquals("doe", rs.getString(3));
assertTrue(rs.next());
assertEquals(2, rs.getInt(1));
assertEquals("foo", rs.getString(2));
assertEquals("doe", rs.getString(3));
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("sam", rs.getString(2).trim());
assertEquals("johnson", rs.getString(3));
assertTrue(rs.next());
assertEquals(2, rs.getInt(1));
assertEquals("ann", rs.getString(2).trim());
assertEquals("wiely", rs.getString(3));
assertFalse(rs.next());
pstmt = conn.prepareStatement("select * from t1 union all select * from t2");
rs = pstmt.executeQuery();
assertTrue(rs.next());
assertEquals("abcd", rs.getString(1));
assertTrue(rs.next());
assertEquals("xyz", rs.getString(1).trim());
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testDiffScaleSortOrder() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
String ddl = "create table person ( id bigint not null primary key desc, " +
"firstname char(10), lastname varchar(10) )";
createTestTable(getUrl(), ddl);
String dml = "upsert into person values (?, ?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setInt(1, 1);
stmt.setString(2, "john");
stmt.setString(3, "doe");
stmt.execute();
stmt.setInt(1, 2);
stmt.setString(2, "jane");
stmt.setString(3, "doe");
stmt.execute();
conn.commit();
ddl = "create table user ( id integer not null primary key asc, " +
"firstname varchar(12), lastname varchar(10) )";
createTestTable(getUrl(), ddl);
dml = "upsert into user values (?, ?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setInt(1, 1);
stmt.setString(2, "sam");
stmt.setString(3, "johnson");
stmt.execute();
stmt.setInt(1, 2);
stmt.setString(2, "ann");
stmt.setString(3, "wiely");
stmt.execute();
conn.commit();
ddl = "create table t1 ( id varchar(20) not null primary key, col1 decimal)";
createTestTable(getUrl(), ddl);
dml = "upsert into t1 values ('abcd', 234.23)";
stmt = conn.prepareStatement(dml);
stmt.execute();
conn.commit();
ddl = "create table t2 ( id char(50) not null primary key, col1 decimal(12,4))";
createTestTable(getUrl(), ddl);
dml = "upsert into t2 values ('xyz', 1342.1234)";
stmt = conn.prepareStatement(dml);
stmt.execute();
conn.commit();
String query = "select * from user union all select * from person";
try {
PreparedStatement pstmt = conn.prepareStatement(query);
ResultSet rs = pstmt.executeQuery();
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("sam", rs.getString(2));
assertEquals("johnson", rs.getString(3));
assertTrue(rs.next());
assertEquals(2, rs.getInt(1));
assertEquals("ann", rs.getString(2));
assertEquals("wiely", rs.getString(3));
assertTrue(rs.next());
assertEquals(2, rs.getInt(1));
assertEquals("jane", rs.getString(2).trim());
assertEquals("doe", rs.getString(3));
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("john", rs.getString(2).trim());
assertEquals("doe", rs.getString(3));
assertFalse(rs.next());
pstmt = conn.prepareStatement("select * from t1 union all select * from t2");
rs = pstmt.executeQuery();
assertTrue(rs.next());
assertEquals("abcd", rs.getString(1));
assertEquals(BigDecimal.valueOf(234.2300), rs.getBigDecimal(2));
assertTrue(rs.next());
assertEquals("xyz", rs.getString(1).trim());
assertEquals(BigDecimal.valueOf(1342.1234), rs.getBigDecimal(2));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testVarcharChar() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
String ddl = "create table user ( id integer not null primary key asc, " +
"firstname char(8), lastname varchar )";
createTestTable(getUrl(), ddl);
String dml = "upsert into user values (?, ?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setInt(1, 1);
stmt.setString(2, "sam");
stmt.setString(3, "johnson");
stmt.execute();
stmt.setInt(1, 2);
stmt.setString(2, "ann");
stmt.setString(3, "wiely");
stmt.execute();
conn.commit();
ddl = "create table person ( id bigint not null primary key desc, " +
"firstname varchar(10), lastname char(10) )";
createTestTable(getUrl(), ddl);
dml = "upsert into person values (?, ?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setInt(1, 1);
stmt.setString(2, "john");
stmt.setString(3, "doe");
stmt.execute();
stmt.setInt(1, 2);
stmt.setString(2, "jane");
stmt.setString(3, "doe");
stmt.execute();
conn.commit();
String query = "select id, 'baa' firstname, lastname from user " +
"union all select * from person";
try {
PreparedStatement pstmt = conn.prepareStatement(query);
ResultSet rs = pstmt.executeQuery();
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("baa", rs.getString(2));
assertEquals("johnson", rs.getString(3));
assertTrue(rs.next());
assertEquals(2, rs.getInt(1));
assertEquals("baa", rs.getString(2));
assertEquals("wiely", rs.getString(3));
assertTrue(rs.next());
assertEquals(2, rs.getInt(1));
assertEquals("jane", rs.getString(2).trim());
assertEquals("doe", rs.getString(3).trim());
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("john", rs.getString(2).trim());
assertEquals("doe", rs.getString(3).trim());
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testCoerceExpr() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
String ddl = "create table user ( id integer not null primary key desc, " +
"firstname char(8), lastname varchar, sales double)";
createTestTable(getUrl(), ddl);
String dml = "upsert into user values (?, ?, ?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setInt(1, 1);
stmt.setString(2, "sam");
stmt.setString(3, "johnson");
stmt.setDouble(4, 100.6798);
stmt.execute();
stmt.setInt(1, 2);
stmt.setString(2, "ann");
stmt.setString(3, "wiely");
stmt.setDouble(4, 10.67);
stmt.execute();
conn.commit();
ddl = "create table person (id bigint not null primary key, " +
"firstname char(10), lastname varchar(10), sales decimal)";
createTestTable(getUrl(), ddl);
dml = "upsert into person values (?, ?, ?, ?)";
stmt = conn.prepareStatement(dml);
stmt.setInt(1, 1);
stmt.setString(2, "john");
stmt.setString(3, "doe");
stmt.setBigDecimal(4, BigDecimal.valueOf(467.894745));
stmt.execute();
stmt.setInt(1, 2);
stmt.setString(2, "jane");
stmt.setString(3, "doe");
stmt.setBigDecimal(4, BigDecimal.valueOf(88.89474501));
stmt.execute();
conn.commit();
String query = "select id, cast('foo' as char(10)) firstname, lastname, sales " +
"from person union all select * from user";
try {
PreparedStatement pstmt = conn.prepareStatement(query);
ResultSet rs = pstmt.executeQuery();
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("foo", rs.getString(2).trim());
assertEquals("doe", rs.getString(3).trim());
assertEquals(BigDecimal.valueOf(467.894745), rs.getBigDecimal(4));
assertTrue(rs.next());
assertEquals(2, rs.getInt(1));
assertEquals("foo", rs.getString(2).trim());
assertEquals("doe", rs.getString(3).trim());
assertEquals(BigDecimal.valueOf(88.89474501), rs.getBigDecimal(4));
assertTrue(rs.next());
assertEquals(2, rs.getInt(1));
assertEquals("ann", rs.getString(2).trim());
assertEquals("wiely", rs.getString(3).trim());
assertEquals(BigDecimal.valueOf(10.67), rs.getBigDecimal(4));
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("sam", rs.getString(2).trim());
assertEquals("johnson", rs.getString(3).trim());
assertEquals(BigDecimal.valueOf(100.6798), rs.getBigDecimal(4));
assertFalse(rs.next());
query = "select id, cast('foo' as char(10)) firstname, lastname, sales from person";
pstmt = conn.prepareStatement(query);
rs = pstmt.executeQuery();
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("foo", rs.getString(2).trim());
assertEquals("doe", rs.getString(3));
assertEquals(BigDecimal.valueOf(467.894745), rs.getBigDecimal(4));
assertTrue(rs.next());
assertEquals(2, rs.getInt(1));
assertEquals("foo", rs.getString(2).trim());
assertEquals("doe", rs.getString(3));
assertEquals(BigDecimal.valueOf(88.89474501), rs.getBigDecimal(4));
assertFalse(rs.next());
} finally {
conn.close();
}
}
}
| |
/**
* ********************************************************************************
* Copyright (c) 2011, Monnet Project All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met: *
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer. * Redistributions in binary
* form must reproduce the above copyright notice, this list of conditions and
* the following disclaimer in the documentation and/or other materials provided
* with the distribution. * Neither the name of the Monnet Project nor the names
* of its contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE MONNET PROJECT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* *******************************************************************************
*/
package eu.monnetproject.lemon;
import eu.monnetproject.lemon.impl.SPARQLResolver;
import eu.monnetproject.lemon.impl.LemonModelImpl;
import eu.monnetproject.lemon.impl.SPARULUpdater;
import eu.monnetproject.lemon.impl.SPARULUpdaterFactory;
import eu.monnetproject.lemon.model.PropertyValue;
import eu.monnetproject.lemon.model.Text;
import eu.monnetproject.lemon.model.LexicalForm;
import eu.monnetproject.lemon.model.LexicalEntry;
import eu.monnetproject.lemon.model.Lexicon;
import eu.monnetproject.lemon.model.Property;
import eu.monnetproject.lemon.model.LexicalSense;
import eu.monnetproject.lemon.liam.MorphologyApplicationException;
import eu.monnetproject.lemon.liam.MorphologyEngine;
import eu.monnetproject.lemon.liam.impl.MorphologyEngineImpl;
import eu.monnetproject.lemon.model.MorphPattern;
import java.net.URI;
import java.net.URL;
import java.util.*;
/**
* Set of static task that work on lemon models
*
* @author John McCrae
*/
public final class LemonModels {
// No instantiation
private LemonModels() {
}
/**
* Select a lexical entry by the form's representation
*
* @param model The model containing the appropriate lexica
* @param form The representation of the form
* @param lang The languages of the form
*/
public static List<LexicalEntry> getEntriesByForm(LemonModel model, String form, String lang) {
LinkedList<LexicalEntry> rval = new LinkedList<LexicalEntry>();
String query = "PREFIX lemon: <" + LemonModel.NEW_LEMON_URI + "> "
+ "SELECT DISTINCT ?entry { "
+ "?form lemon:writtenRep \"" + form + "\"@" + lang.toString() + " ."
+ "{ ?entry lemon:canonicalForm ?form } UNION "
+ "{ ?entry lemon:otherForm ?form } UNION "
+ "{ ?entry lemon:abstractForm ?form } UNION "
+ "{ ?entry lemon:lexicalForm ?form } ."
+ "?lexicon lemon:entry ?entry }";
final Iterator<LexicalEntry> iter = model.query(LexicalEntry.class, query);
while (iter.hasNext()) {
rval.add(iter.next());
}
return rval;
}
/**
* Select a lexical entry by the form's representation, using regex
*
* @param model The model containing the appropriate lexica
* @param form The representation of the form
*/
public static List<LexicalEntry> getEntriesByFormApprox(LemonModel model, String form) {
LinkedList<LexicalEntry> rval = new LinkedList<LexicalEntry>();
String query = "PREFIX lemon: <" + LemonModel.NEW_LEMON_URI + "> "
+ "SELECT DISTINCT ?entry { "
+ "?form lemon:writtenRep ?rep ."
+ "FILTER(regex(str(?rep),\"" + form + "\",\"i\")) ."
+ "{ ?entry lemon:canonicalForm ?form } UNION "
+ "{ ?entry lemon:otherForm ?form } UNION "
+ "{ ?entry lemon:abstractForm ?form } UNION "
+ "{ ?entry lemon:form ?form } }";
//System.err.println(query);
Iterator<LexicalEntry> iter = model.query(LexicalEntry.class, query);
while (iter.hasNext()) {
LexicalEntry entry = iter.next();
rval.add(entry);
//System.err.println(entry.getURI());
}
return rval;
}
/**
* Select a lexical entry by its form
*
* @param model The model containing the appropriate lexica
* @param form The form object
*/
public static List<LexicalEntry> getEntriesByForm(LemonModel model, LexicalForm form) {
try {
LinkedList<LexicalEntry> rval = new LinkedList<LexicalEntry>();
String formSparql;
if (form.getURI() != null) {
formSparql = "<" + form.getURI() + ">";
} else {
formSparql = "_:" + form.getID();
}
String query = "PREFIX lemon: <" + LemonModel.NEW_LEMON_URI + "> "
+ "SELECT DISTINCT ?entry { "
+ "{ ?entry lemon:canonicalForm " + formSparql + " } UNION "
+ "{ ?entry lemon:otherForm " + formSparql + " } UNION "
+ "{ ?entry lemon:abstractForm " + formSparql + " } UNION "
+ "{ ?entry lemon:form " + formSparql + " } ."
+ "?lexicon lemon:entry ?entry }";
Iterator<LexicalEntry> iter = model.query(LexicalEntry.class, query);
while (iter.hasNext()) {
rval.add(iter.next());
}
return rval;
} catch (Exception x) {
try {
if (form.getURI() == null) {
LinkedList<LexicalEntry> rval = new LinkedList<LexicalEntry>();
// Work around for sesame bug
String query = "PREFIX lemon: <" + LemonModel.NEW_LEMON_URI + "> "
+ "SELECT DISTINCT ?entry { "
+ "{ ?entry lemon:canonicalForm ?x . ?x lemon:writtenRep \"" + form.getWrittenRep().value + "\"@" + form.getWrittenRep().language + " } UNION "
+ "{ ?entry lemon:otherForm ?x . ?x lemon:writtenRep \"" + form.getWrittenRep().value + "\"@" + form.getWrittenRep().language + " } UNION "
+ "{ ?entry lemon:abstractForm ?x . ?x lemon:writtenRep \"" + form.getWrittenRep().value + "\"@" + form.getWrittenRep().language + " } UNION "
+ "{ ?entry lemon:form ?x . ?x lemon:writtenRep \"" + form.getWrittenRep().value + "\"@" + form.getWrittenRep().language + " } ."
+ "?lexicon lemon:entry ?entry. ?entry ?p ?x }";
Iterator<LexicalEntry> iter = model.query(LexicalEntry.class, query);
while (iter.hasNext()) {
rval.add(iter.next());
}
return rval;
} else {
throw new Exception();
}
} catch (Exception x2) {
LinkedList<LexicalEntry> rval = new LinkedList<LexicalEntry>();
for (Lexicon lexicon : model.getLexica()) {
ENTRIES:
for (LexicalEntry entry : lexicon.getEntrys()) {
for (LexicalForm form2 : entry.getForms()) {
if (form.equals(form2)) {
rval.add(entry);
continue ENTRIES;
}
}
}
}
return rval;
}
}
}
/**
* Get the set of lexica containing a given entry
*
* @param model The model containing the appropriate lexica
* @param entry The entry
*/
public static List<Lexicon> getLexicaByEntry(LemonModel model, LexicalEntry entry) {
LinkedList<Lexicon> rval = new LinkedList<Lexicon>();
try {
String entrySparql;
if (entry.getURI() != null) {
entrySparql = "<" + entry.getURI() + ">";
} else {
entrySparql = "_:" + entry.getID();
}
String query = "PREFIX lemon: <" + LemonModel.NEW_LEMON_URI + "> "
+ "SELECT DISTINCT ?lexicon { "
+ "?lexicon lemon:entry " + entrySparql + " }";
Iterator<Lexicon> iter = model.query(Lexicon.class, query);
while (iter.hasNext()) {
rval.add(iter.next());
}
return rval;
} catch (Exception x) {
x.printStackTrace();
for(Lexicon lexicon : model.getLexica()) {
if(lexicon.hasEntry(entry)) {
rval.add(lexicon);
}
}
return rval;
}
}
/**
* Get the set of entries that refer to a given reference
*
* @param model The model containing the appropriate lexica
* @param reference The uri reference
*/
public static List<LexicalEntry> getEntryByReference(LemonModel model, URI reference) {
LinkedList<LexicalEntry> rval = new LinkedList<LexicalEntry>();
try {
String query = "PREFIX lemon: <" + LemonModel.NEW_LEMON_URI + "> "
+ "SELECT DISTINCT ?entry { "
+ "?entry lemon:sense ?sense ."
+ "?sense lemon:reference <" + reference + "> }";
Iterator<LexicalEntry> iter = model.query(LexicalEntry.class, query);
while (iter.hasNext()) {
rval.add(iter.next());
}
return rval;
} catch (Exception x) {
for (Lexicon lexicon : model.getLexica()) {
for (LexicalEntry lexicalEntry : lexicon.getEntrys()) {
for (LexicalSense sense : lexicalEntry.getSenses()) {
if (sense.getReference().equals(reference)) {
rval.add(lexicalEntry);
continue;
}
}
}
}
return rval;
}
}
public static List<LexicalEntry> getEntryByReference(Lexicon lexicon, URI reference) {
LinkedList<LexicalEntry> rval = new LinkedList<LexicalEntry>();
try {
String query = "PREFIX lemon: <" + LemonModel.NEW_LEMON_URI + "> "
+ "SELECT DISTINCT ?entry { "
+ "<" + lexicon.getURI() + "> lemon:entry ?entry ."
+ "?entry lemon:sense ?sense ."
+ "?sense lemon:reference <" + reference + "> }";
Iterator<LexicalEntry> iter = lexicon.getModel().query(LexicalEntry.class, query);
while (iter.hasNext()) {
rval.add(iter.next());
}
return rval;
} catch (Exception x) {
for (LexicalEntry lexicalEntry : lexicon.getEntrys()) {
for (LexicalSense sense : lexicalEntry.getSenses()) {
if (sense.getReference().equals(reference)) {
rval.add(lexicalEntry);
continue;
}
}
}
return rval;
}
}
/**
* Get the set of entries that refer to a given sense
*
* @param model The model containing the appropriate lexica
* @param sense The sense object
*/
@SuppressWarnings("unchecked")
public static LexicalEntry getEntryBySense(LemonModel model, LexicalSense sense) {
if (sense.getIsSenseOf() != null) {
return sense.getIsSenseOf();
} else {
String senseSparql;
if (sense.getURI() != null) {
senseSparql = "<" + sense.getURI() + ">";
} else {
senseSparql = "_:" + sense.getID();
}
String query = "PREFIX lemon: <" + LemonModel.NEW_LEMON_URI + "> "
+ "SELECT DISTINCT ?entry { "
+ "?entry lemon:sense " + senseSparql + " }";
System.err.println(query);
Iterator<LexicalEntry> iter = model.query(LexicalEntry.class, query);
if (iter.hasNext()) {
final LexicalEntry entry = iter.next();
sense.setIsSenseOf(entry);
return entry;
} else {
return null;
}
}
}
/**
* Get all the entries in a lexicon as an alphabetic sorted list
*
* @param model The model containing the lexica
* @param lexicon The lexicon to list
* @param offset The first entry to show
* @param limit The maximum number of entries to return, 0 for no limit
*/
@SuppressWarnings("unchecked")
public static Collection<LexicalEntry> getEntriesAlphabetic(LemonModel model, Lexicon lexicon, int offset, int limit) {
final Comparator<LexicalEntry> entryIDComp = new Comparator<LexicalEntry>() {
@Override
public int compare(LexicalEntry o1, LexicalEntry o2) {
if (o1.getURI() != null && o2.getURI() != null) {
return o1.getURI().toString().compareTo(o2.getURI().toString());
} else if (o1.getURI() == null && o2.getURI() == null) {
return o1.getID().compareTo(o2.getID());
} else if (o1.getURI() == null) {
return +1;
} else {
return -1;
}
}
};
try {
TreeSet<LexicalEntry> rval = new TreeSet<LexicalEntry>(entryIDComp);
final String query = "PREFIX lemon: <" + LemonModel.NEW_LEMON_URI + "> "
+ "SELECT DISTINCT ?entry {"
+ "<" + lexicon.getURI() + "> lemon:entry ?entry . } "
+ "ORDER BY ?entry "
+ (limit > 0 ? "LIMIT " + limit : "")
+ (offset > 0 ? "OFFSET " + offset : "");
Iterator<LexicalEntry> iter = model.query(LexicalEntry.class, query);
while (iter.hasNext()) {
rval.add(iter.next());
}
return rval;
} catch (Exception x) {
x.printStackTrace();
TreeSet<LexicalEntry> entries = new TreeSet<LexicalEntry>(new Comparator<LexicalEntry>() {
@Override
public int compare(LexicalEntry e1, LexicalEntry e2) {
if (e1.getCanonicalForm() != null && e1.getCanonicalForm().getWrittenRep() != null) {
if (e2.getCanonicalForm() != null && e2.getCanonicalForm().getWrittenRep() != null) {
int rv = e1.getCanonicalForm().getWrittenRep().value.compareTo(
e2.getCanonicalForm().getWrittenRep().value);
if (rv == 0) {
return e1.getURI().compareTo(e2.getURI());
} else {
return rv;
}
} else {
return -1;
}
} else if (e2.getCanonicalForm() != null && e2.getCanonicalForm().getWrittenRep() != null) {
return 1;
} else {
return e1.getURI().compareTo(e2.getURI());
}
}
});
for (LexicalEntry le : lexicon.getEntrys()) {
entries.add(le);
}
if (limit > 0) {
if (offset > 0) {
return new ArrayList<LexicalEntry>(entries).subList(offset, Math.min(entries.size(), offset + limit));
} else {
return new ArrayList<LexicalEntry>(entries).subList(0, Math.min(entries.size(), 0 + limit));
}
} else {
if (offset > 0) {
return new ArrayList<LexicalEntry>(entries).subList(offset, entries.size());
} else {
return new ArrayList<LexicalEntry>(entries);
}
}
}
}
/**
* Get entries in a lexicon mapped by the references they have
*
* @param model The model containing all the lexica
* @param lexicon The lexicon containg all entries
* @param offset The first entry to return
* @param limit The maxiumum number of entries to return, 0 for unlimited
*/
public static Map<URI, List<LexicalEntry>> getEntriesBySense(LemonModel model, Lexicon lexicon, int offset, int limit) {
try {
TreeMap<URI, List<LexicalEntry>> rval = new TreeMap<URI, List<LexicalEntry>>(new Comparator<URI>() {
@Override
public int compare(URI uri1, URI uri2) {
if (uri1.toString().equals("special:none")) {
if (uri2.toString().equals("special:none")) {
return 0;
} else {
return -1;
}
} else if (uri2.toString().equals("special:none")) {
return 1;
} else {
return uri1.toString().toLowerCase().compareTo(uri2.toString().toLowerCase());
}
}
@Override
public int hashCode() {
return super.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
return this == obj;
}
});
Iterator<LexicalEntry> iter = model.query(LexicalEntry.class,
"PREFIX lemon: <" + LemonModel.NEW_LEMON_URI + "> "
+ "SELECT DISTINCT ?entry {"
+ "<" + lexicon.getURI() + "> lemon:entry ?entry . "
+ "OPTIONAL { ?entry lemon:sense ?sense ."
+ "?sense lemon:reference ?ref . } "
+ "OPTIONAL { ?entry lemon:canonicalForm ?form . "
+ "?form lemon:writtenRep ?rep } } "
+ "ORDER BY ?sense ?rep "
+ (limit > 0 ? "LIMIT " + limit : "")
+ (offset > 0 ? "OFFSET " + offset : ""));
while (iter.hasNext()) {
LexicalEntry entry = iter.next();
for (LexicalSense s : entry.getSenses()) {
URI ref = s.getReference();
if (!rval.containsKey(ref)) {
rval.put(ref, new LinkedList<LexicalEntry>());
}
rval.get(ref).add(entry);
}
if (entry.getSenses().isEmpty()) {
URI ref = URI.create("special:none");
if (!rval.containsKey(ref)) {
rval.put(ref, new LinkedList<LexicalEntry>());
}
rval.get(ref).add(entry);
}
}
return rval;
} catch (Exception x) {
x.printStackTrace();
return null;
//TreeSet<LexicalEntry> entries = new TreeSet<LexicalEntry>(new Comparator<LexicalEntry>() {
// public int compare(LexicalEntry e1, LexicalEntry e2) {
// if(e1.getCanonicalForm() != null && e1.getCanonicalForm().getWrittenRep() != null) {
// if(e2.getCanonicalForm() != null && e2.getCanonicalForm().getWrittenRep() != null) {
// int rv = e1.getCanonicalForm().getWrittenRep().value.compareTo(
// e2.getCanonicalForm().getWrittenRep().value);
// if(rv == 0) {
// return e1.getURI().compareTo(e2.getURI());
// } else {
// return rv;
// }
// } else {
// return -1;
// }
// } else if(e2.getCanonicalForm() != null && e2.getCanonicalForm().getWrittenRep() != null) {
// return 1;
// } else {
// return e1.getURI().compareTo(e2.getURI());
// }
// }
// public boolean equals(Object o) { return this == o; }
//});
//for(LexicalEntry le : lexicon.getEntrys()) {
// entries.add(le);
//}
//if(limit > 0) {
// if(offset > 0) {
// return new ArrayList(entries).subList(offset,offset+limit);
// } else {
// return new ArrayList(entries).subList(0,0+limit);
// }
//} else {
// if(offset > 0) {
// return new ArrayList(entries).subList(offset,entries.size());
// } else {
// return new ArrayList(entries);
// }
//}
}
}
/**
* Get entries by their written representation and properties
*
* @param model The model containing all lexica
* @param form The written representation of the form
* @param lang The language of the form
* @param props The set of properties the entry object has
*/
public static List<LexicalEntry> getEntriesByFormAndProps(LemonModel model, String form, String lang,
Map<Property, PropertyValue> props) {
try {
LinkedList<LexicalEntry> rval = new LinkedList<LexicalEntry>();
StringBuilder query = new StringBuilder(100);
query.append("PREFIX lemon: <" + LemonModel.NEW_LEMON_URI + "> " + "SELECT DISTINCT ?entry { " + "?form lemon:writtenRep \"").append(form).append("\"@").append(lang.toString()).append(" ."
+ "{ ?entry lemon:canonicalForm ?form } UNION "
+ "{ ?entry lemon:otherForm ?form } UNION "
+ "{ ?entry lemon:abstractForm ?form } UNION "
+ "{ ?entry lemon:lexicalForm ?form } .");
for (Map.Entry<Property, PropertyValue> prop : props.entrySet()) {
query.append("?entry <").append(prop.getKey().getURI()).append("> <").append(prop.getValue().getURI()).append("> .");
}
query.append("?lexicon lemon:entry ?entry }");
Iterator<LexicalEntry> iter = model.query(LexicalEntry.class, query.toString());
while (iter.hasNext()) {
rval.add(iter.next());
}
return rval;
} catch (Exception x) {
List<LexicalEntry> rval = new LinkedList<LexicalEntry>();
for (Lexicon lexicon : model.getLexica()) {
LE_LOOP:
for (LexicalEntry le : lexicon.getEntrys()) {
for (Property prop : props.keySet()) {
Collection<PropertyValue> vals = le.getProperty(prop);
if (!vals.contains(props.get(prop))) {
continue LE_LOOP;
}
}
if (le.getCanonicalForm() != null && le.getCanonicalForm().getWrittenRep().value.equals(form)) {
rval.add(le);
} else {
for (LexicalForm f : le.getOtherForms()) {
if (f.getWrittenRep().value.equals(form)
&& f.getWrittenRep().language.equals(lang)) {
rval.add(le);
continue LE_LOOP;
}
}
for (LexicalForm f : le.getAbstractForms()) {
if (f.getWrittenRep().value.equals(form)
&& f.getWrittenRep().language.equals(lang)) {
rval.add(le);
continue LE_LOOP;
}
}
for (LexicalForm f : le.getForms()) {
if (f.getWrittenRep().value.equals(form)
&& f.getWrittenRep().language.equals(lang)) {
rval.add(le);
continue LE_LOOP;
}
}
}
}
}
return rval;
}
}
/**
* Quickly add a lexical entry to a lexicon. Will re-use existing lexical entry
* adding a new sense if the entry's URI already exists.
*
* @param Lexicon The lexicon
* @param entryURI The identifier for the entry
* @param canForm The canonical form
* @param sense The reference of the sense URI
* @throws IllegalArgumentException If the entry URI is duplicated by an element with a different canonical form
*/
public static LexicalEntry addEntryToLexicon(Lexicon lexicon, URI entryURI, String canForm, URI senseRef) {
LemonFactory factory = lexicon.getModel().getFactory();
boolean duplicateEntry = factory.isURIUsed(entryURI);
LexicalEntry entry = factory.makeLexicalEntry(entryURI);
if(duplicateEntry && entry.getCanonicalForm() != null && entry.getCanonicalForm().getWrittenRep() != null && !canForm.equals(entry.getCanonicalForm().getWrittenRep().value)) {
throw new IllegalArgumentException("There is already a lexical entry in this lexicon with URI <" +entryURI+"> and canonical form \""
+ entry.getCanonicalForm().getWrittenRep().value + "\" that differs from \"" + canForm +"\"");
}
LexicalForm form = factory.makeForm(URI.create(entryURI + "/canonicalForm"));
if (senseRef != null) {
URI senseURI = URI.create(entryURI + "/sense");
int i = 1;
while(factory.isURIUsed(senseURI)) {
senseURI = URI.create(entryURI + "/sense"+i++);
}
LexicalSense sense = factory.makeSense(senseURI);
sense.setReference(senseRef);
entry.addSense(sense);
}
form.setWrittenRep(new Text(canForm, lexicon.getLanguage()));
entry.setCanonicalForm(form);
lexicon.addEntry(entry);
return entry;
}
public static LexicalForm resolveForm(LexicalEntry entry, Map<Property, Collection<PropertyValue>> properties) {
FORMS:
for (LexicalForm form : entry.getForms()) {
for (Map.Entry<Property, Collection<PropertyValue>> props : properties.entrySet()) {
if (!form.getProperty(props.getKey()).containsAll(props.getValue())) {
continue FORMS;
}
}
return form;
}
final MorphologyEngine morphEngine = new MorphologyEngineImpl();
for (MorphPattern pattern : entry.getPatterns()) {
try {
final LexicalForm form = morphEngine.generate(entry, pattern, properties);
if (form != null) {
return form;
}
} catch (MorphologyApplicationException x) {
x.printStackTrace();
}
}
return null;
}
/**
* Connect to a lemon model contained in a SPARQL endpoint
*
* @param endpoint The URL of the SPARQL endpoint
* @param graphs The graphs in the endpoint to use
* @param lingOnto The linguistic ontology to use (may be null)
* @return A model which resolves based on the endpoint
*/
public static LemonModel sparqlEndpoint(URL endpoint, Set<URI> graphs, LinguisticOntology lingOnto) {
return new LemonModelImpl(null, new SPARQLResolver(endpoint, graphs, lingOnto),null);
}
/**
* Connect to a lemon model in a repository supporting SPARQL and SPARQL
* update
*
* @param sparqlEndpoint The URL of the endpoint for querying, e.g.,
* "http://localhost:8080/sparql"
* @param graph The graph to use in the endpoint
* @param lingOnto The linguistic ontology to use (may be null)
* @param updateEndpoint The URL pattern for the endpoint with query, e.g.,
* "http://localhost:8080/sparql-auth?query="
* @param dialect Which dialect of SPARQL to use, e.g., SPARUL for Virtuoso,
* SPARQL11 for 4store
* @return A model which resolves and updates based on the endpoint
*/
public static LemonModel sparqlUpdateEndpoint(URL sparqlEndpoint, URI graph, LinguisticOntology lingOnto,
String updateEndpoint, SPARQL dialect) {
return new LemonModelImpl(null, new SPARQLResolver(sparqlEndpoint, Collections.singleton(graph), lingOnto), new SPARULUpdaterFactory(updateEndpoint, graph, dialect));
}
/**
* Connect to a lemon model in a repository supporting SPARQL and SPARQL
* update
*
* @param sparqlEndpoint The URL of the endpoint for querying, e.g.,
* "http://localhost:8080/sparql"
* @param graph The graph to use in the endpoint
* @param lingOnto The linguistic ontology to use (may be null)
* @param updateEndpoint The URL pattern for the endpoint with query, e.g.,
* "http://localhost:8080/sparql-auth?query="
* @param username The user name to use to authenticate
* @param password The password to use to authenticate
* @param dialect Which dialect of SPARQL to use, e.g., SPARUL for Virtuoso,
* SPARQL11 for 4store
* @return A model which resolves and updates based on the endpoint
*/
public static LemonModel sparqlUpdateEndpoint(URL sparqlEndpoint, URI graph, LinguisticOntology lingOnto,
String updateEndpoint, String username, String password, SPARQL dialect) {
return new LemonModelImpl(null, new SPARQLResolver(sparqlEndpoint, Collections.singleton(graph), lingOnto), new SPARULUpdaterFactory(updateEndpoint, graph, username, password, dialect));
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2017_10_01.implementation;
import com.microsoft.azure.management.network.v2017_10_01.PacketCaptureStorageLocation;
import java.util.List;
import com.microsoft.azure.management.network.v2017_10_01.PacketCaptureFilter;
import com.microsoft.azure.management.network.v2017_10_01.ProvisioningState;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.microsoft.rest.serializer.JsonFlatten;
/**
* Information about packet capture session.
*/
@JsonFlatten
public class PacketCaptureResultInner {
/**
* Name of the packet capture session.
*/
@JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY)
private String name;
/**
* ID of the packet capture operation.
*/
@JsonProperty(value = "id", access = JsonProperty.Access.WRITE_ONLY)
private String id;
/**
* The etag property.
*/
@JsonProperty(value = "etag")
private String etag;
/**
* The ID of the targeted resource, only VM is currently supported.
*/
@JsonProperty(value = "properties.target", required = true)
private String target;
/**
* Number of bytes captured per packet, the remaining bytes are truncated.
*/
@JsonProperty(value = "properties.bytesToCapturePerPacket")
private Integer bytesToCapturePerPacket;
/**
* Maximum size of the capture output.
*/
@JsonProperty(value = "properties.totalBytesPerSession")
private Integer totalBytesPerSession;
/**
* Maximum duration of the capture session in seconds.
*/
@JsonProperty(value = "properties.timeLimitInSeconds")
private Integer timeLimitInSeconds;
/**
* The storageLocation property.
*/
@JsonProperty(value = "properties.storageLocation", required = true)
private PacketCaptureStorageLocation storageLocation;
/**
* The filters property.
*/
@JsonProperty(value = "properties.filters")
private List<PacketCaptureFilter> filters;
/**
* The provisioning state of the packet capture session. Possible values
* include: 'Succeeded', 'Updating', 'Deleting', 'Failed'.
*/
@JsonProperty(value = "properties.provisioningState")
private ProvisioningState provisioningState;
/**
* Get name of the packet capture session.
*
* @return the name value
*/
public String name() {
return this.name;
}
/**
* Get iD of the packet capture operation.
*
* @return the id value
*/
public String id() {
return this.id;
}
/**
* Get the etag value.
*
* @return the etag value
*/
public String etag() {
return this.etag;
}
/**
* Set the etag value.
*
* @param etag the etag value to set
* @return the PacketCaptureResultInner object itself.
*/
public PacketCaptureResultInner withEtag(String etag) {
this.etag = etag;
return this;
}
/**
* Get the ID of the targeted resource, only VM is currently supported.
*
* @return the target value
*/
public String target() {
return this.target;
}
/**
* Set the ID of the targeted resource, only VM is currently supported.
*
* @param target the target value to set
* @return the PacketCaptureResultInner object itself.
*/
public PacketCaptureResultInner withTarget(String target) {
this.target = target;
return this;
}
/**
* Get number of bytes captured per packet, the remaining bytes are truncated.
*
* @return the bytesToCapturePerPacket value
*/
public Integer bytesToCapturePerPacket() {
return this.bytesToCapturePerPacket;
}
/**
* Set number of bytes captured per packet, the remaining bytes are truncated.
*
* @param bytesToCapturePerPacket the bytesToCapturePerPacket value to set
* @return the PacketCaptureResultInner object itself.
*/
public PacketCaptureResultInner withBytesToCapturePerPacket(Integer bytesToCapturePerPacket) {
this.bytesToCapturePerPacket = bytesToCapturePerPacket;
return this;
}
/**
* Get maximum size of the capture output.
*
* @return the totalBytesPerSession value
*/
public Integer totalBytesPerSession() {
return this.totalBytesPerSession;
}
/**
* Set maximum size of the capture output.
*
* @param totalBytesPerSession the totalBytesPerSession value to set
* @return the PacketCaptureResultInner object itself.
*/
public PacketCaptureResultInner withTotalBytesPerSession(Integer totalBytesPerSession) {
this.totalBytesPerSession = totalBytesPerSession;
return this;
}
/**
* Get maximum duration of the capture session in seconds.
*
* @return the timeLimitInSeconds value
*/
public Integer timeLimitInSeconds() {
return this.timeLimitInSeconds;
}
/**
* Set maximum duration of the capture session in seconds.
*
* @param timeLimitInSeconds the timeLimitInSeconds value to set
* @return the PacketCaptureResultInner object itself.
*/
public PacketCaptureResultInner withTimeLimitInSeconds(Integer timeLimitInSeconds) {
this.timeLimitInSeconds = timeLimitInSeconds;
return this;
}
/**
* Get the storageLocation value.
*
* @return the storageLocation value
*/
public PacketCaptureStorageLocation storageLocation() {
return this.storageLocation;
}
/**
* Set the storageLocation value.
*
* @param storageLocation the storageLocation value to set
* @return the PacketCaptureResultInner object itself.
*/
public PacketCaptureResultInner withStorageLocation(PacketCaptureStorageLocation storageLocation) {
this.storageLocation = storageLocation;
return this;
}
/**
* Get the filters value.
*
* @return the filters value
*/
public List<PacketCaptureFilter> filters() {
return this.filters;
}
/**
* Set the filters value.
*
* @param filters the filters value to set
* @return the PacketCaptureResultInner object itself.
*/
public PacketCaptureResultInner withFilters(List<PacketCaptureFilter> filters) {
this.filters = filters;
return this;
}
/**
* Get the provisioning state of the packet capture session. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'.
*
* @return the provisioningState value
*/
public ProvisioningState provisioningState() {
return this.provisioningState;
}
/**
* Set the provisioning state of the packet capture session. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'.
*
* @param provisioningState the provisioningState value to set
* @return the PacketCaptureResultInner object itself.
*/
public PacketCaptureResultInner withProvisioningState(ProvisioningState provisioningState) {
this.provisioningState = provisioningState;
return this;
}
}
| |
// ----------------------------------------------------------------------------
// Copyright 2007-2014, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Description:
// Partial implementation of a ClientPacketHandler
// ----------------------------------------------------------------------------
// Change History:
// 2006/03/26 Martin D. Flynn
// -Initial release
// 2006/06/30 Martin D. Flynn
// -Repackaged
// 2009/04/02 Martin D. Flynn
// -Added 'getMinimumPacketLength' and 'getMaximumPacketLength'
// 2011/05/13 Martin D. Flynn
// -Added several convenience functions.
// ----------------------------------------------------------------------------
package org.opengts.util;
import java.util.*;
import java.net.*;
import javax.net.*;
//import javax.net.ssl.*;
/**
*** An abstract implementation of the <code>ClientPacketHandler</code> interface
**/
public abstract class AbstractClientPacketHandler
implements ClientPacketHandler
{
// ------------------------------------------------------------------------
public static final int PACKET_LEN_LINE_TERMINATOR = ServerSocketThread.PACKET_LEN_LINE_TERMINATOR;
public static final int PACKET_LEN_END_OF_STREAM = ServerSocketThread.PACKET_LEN_END_OF_STREAM;
/* GMT/UTC timezone */
public static final TimeZone GMT_Timezone = DateTime.getGMTTimeZone();
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
private static boolean DebugMode = false;
/**
*** Sets the global debug mode
**/
public static void SetDebugMode(boolean debug)
{
AbstractClientPacketHandler.DebugMode = debug;
}
/**
*** Gets the global debug mode
**/
public static boolean GetDebugMode()
{
return AbstractClientPacketHandler.DebugMode;
}
/**
*** Gets the global debug mode
**/
public static boolean IsDebugMode()
{
return AbstractClientPacketHandler.DebugMode;
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
private static long SequenceCount = 1L;
private static Object SequenceLock = new Object();
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
private long sequenceID = 0L;
private long sessStartTime = 0L;
private InetAddress inetAddr = null;
private String hostAddress = null;
private boolean isDuplex = true; // tcp
private boolean isTextPackets = false;
private boolean promptEnabled = true;
private boolean terminateSess = true; // always terminate by default
private ServerSocketThread.SessionInfo sessionInfo = null;
private int savedEventCount = 0; // DCS use only
public AbstractClientPacketHandler()
{
super();
synchronized (AbstractClientPacketHandler.SequenceLock) {
this.sequenceID = AbstractClientPacketHandler.SequenceCount++;
}
}
// ------------------------------------------------------------------------
public long getSequenceID()
{
return this.sequenceID;
}
public boolean equals(Object other)
{
if (other instanceof AbstractClientPacketHandler) {
return (this.getSequenceID() == ((AbstractClientPacketHandler)other).getSequenceID());
} else {
return false;
}
}
// ------------------------------------------------------------------------
/**
*** Sets the session info handler
*** @param sessionInfo An implementation of the ServerSocketThread.SessionInfo interface
**/
public void setSessionInfo(ServerSocketThread.SessionInfo sessionInfo)
{
this.sessionInfo = sessionInfo;
}
/**
*** Gets a reference to the ClientPacketHandler's session info implementation
*** @return Reference to the session info object
**/
public ServerSocketThread.SessionInfo getSessionInfo()
{
return this.sessionInfo;
}
/**
*** Returns name of the thread handling this client session
*** @return The name of the thread handling this client session
**/
public String getThreadName()
{
ServerSocketThread.SessionInfo si = this.getSessionInfo();
Thread st = (si != null)? si.getSessionThread() : null;
return (st != null)? st.getName() : null;
}
/**
*** Gets the local port to which this socket is bound
*** @return The local port to which this socket is bound
**/
public int getLocalPort()
{
ServerSocketThread.SessionInfo si = this.getSessionInfo();
return (si != null)? si.getLocalPort() : -1;
}
/**
*** Gets the remote/client port used by the client to send the received packet
*** @return The client remote port
**/
public int getRemotePort()
{
ServerSocketThread.SessionInfo si = this.getSessionInfo();
return (si != null)? si.getRemotePort() : -1;
}
// ------------------------------------------------------------------------
/**
*** Sets the prompt enabled state
*** @param enable True to enable prompt, false to disable
**/
public void setPromptEnabled(boolean enable)
{
this.promptEnabled = enable;
}
/**
*** Gets the prompt enabled state
*** @return True to enable prompt, false to disable
**/
public boolean getPromptEnabled()
{
return this.promptEnabled;
}
// ------------------------------------------------------------------------
/**
*** Write bytes to TCP output stream
*** @param data The data bytes to write
*** @return True if bytes were written, false otherwise
**/
public boolean tcpWrite(byte data[])
{
ServerSocketThread.SessionInfo si = this.getSessionInfo();
return (si != null)? si.tcpWrite(data) : false;
}
// ------------------------------------------------------------------------
/**
*** Called when the session has started
**/
public void sessionStarted(InetAddress inetAddr, boolean isDuplex, boolean isText)
{
this.sessStartTime = DateTime.getCurrentTimeSec();
this.inetAddr = inetAddr;
this.isDuplex = isDuplex;
this.isTextPackets = isText;
this.clearSavedEventCount();
this.printSessionStart();
}
/**
*** Displays the sesion startup message.
*** (override to disable)
**/
protected void printSessionStart()
{
String sessType = this.getSessionType();
String host = StringTools.trim(this.getHostAddress());
String name = StringTools.blankDefault(this.getThreadName(),"?");
Print.logInfo("Begin " + sessType + " session (" + name + "): " + host);
}
// ------------------------------------------------------------------------
/**
*** Returns true if the specified session ID matches the current session ID
*** @param sessionID The session ID to test (specifying null should always return false)
*** @return True if the session IDs match, false otherwise
**/
public boolean equalsSessionID(String sessionID)
{
/* no session ID specified? */
if (sessionID == null) {
Print.logWarn("No target SessionID");
return false;
}
/* get current session ID */
String thisSessID = this.getSessionID();
if (thisSessID == null) {
//Print.logWarn("Current SessionID is null (looking for " + sessionID + ")");
return false;
}
/* compare */
//Print.logInfo("Compare SessionIDs: " + sessionID + " ==? " + thisSessID);
return thisSessID.equals(sessionID);
}
/**
*** Returns the session ID (override only)
*** @return The session ID
**/
protected String getSessionID()
{
return null;
}
// ------------------------------------------------------------------------
/**
*** Returns the session start time
**/
public long getSessionStartTime()
{
return this.sessStartTime;
}
// ------------------------------------------------------------------------
/**
*** Returns true if the packets are text
*** @return True if the packets are text
**/
protected boolean isTextPackets()
{
return this.isTextPackets;
}
// ------------------------------------------------------------------------
/**
*** Returns true if this session is duplex (ie TCP)
**/
public boolean isDuplex()
{
return this.isDuplex;
}
/**
*** Returns true if this session is TCP
**/
public boolean isTCP()
{
ServerSocketThread.SessionInfo si = this.getSessionInfo();
return (si != null)? si.isTCP() : this.isDuplex;
}
/**
*** Returns true if this session is UDP
**/
public boolean isUDP()
{
ServerSocketThread.SessionInfo si = this.getSessionInfo();
return (si != null)? si.isUDP() : !this.isTCP();
}
/**
*** Returns true if this session is InputStream
**/
public boolean isInputStream()
{
ServerSocketThread.SessionInfo si = this.getSessionInfo();
return (si != null)? si.isInputStream() : false/*unknown*/;
}
/**
*** Gets the current session type name (ie. TCP, UDP)
*** @return The current session type name
**/
public String getSessionType()
{
if (this.isTCP()) {
return "TCP";
} else
if (this.isUDP()) {
return "UDP";
} else
if (this.isInputStream()) {
return "InputStream";
} else {
return "UNKNOWN";
}
}
// ------------------------------------------------------------------------
/**
*** Gets the IP adress of the remote host
*** @return The IP adress of the remote host
**/
public InetAddress getInetAddress()
{
return this.inetAddr;
}
/**
*** Gets the IP adress of the remote host
*** @return The IP adress of the remote host
**/
public String getHostAddress()
{
if ((this.hostAddress == null) && (this.inetAddr != null)) {
this.hostAddress = this.inetAddr.getHostAddress();
}
return this.hostAddress;
}
/**
*** Returns true if a remote host address is available
*** @return True if a remote host address is available
**/
public boolean hasHostAddress()
{
return (this.getHostAddress() != null);
}
/**
*** Gets the IP adress of the remote host
*** @return The IP adress of the remote host
**/
public String getIPAddress()
{
return this.getHostAddress();
}
/**
*** Returns true if a remote host address is available
*** @return True if a remote host address is available
**/
public boolean hasIPAddress()
{
return (this.getHostAddress() != null);
}
// ------------------------------------------------------------------------
/**
*** Sets the event count state to the specified value
**/
public void setSavedEventCount(int count)
{
this.savedEventCount = count;
}
/**
*** Clears the event count state
**/
public void clearSavedEventCount()
{
this.setSavedEventCount(0);
}
/**
*** Increments the event count state
**/
public void incrementSavedEventCount()
{
this.savedEventCount++;
}
/**
*** Returns true if the current value of the event count state is greater-than zero
**/
public boolean hasSavedEvents()
{
return (this.savedEventCount > 0);
}
/**
*** Gets the current value of the event count state
**/
public int getSavedEventCount()
{
return this.savedEventCount;
}
// ------------------------------------------------------------------------
/**
*** Returns the client response port#
**/
public int getResponsePort()
{
return 0;
}
// ------------------------------------------------------------------------
/**
*** Returns the minimum packet length
**/
public int getMinimumPacketLength()
{
// '-1' indicates that 'ServerSocketThread' should be used
return -1;
}
/**
*** Returns the maximum packet length
**/
public int getMaximumPacketLength()
{
// '-1' indicates that 'ServerSocketThread' should be used
return -1;
}
// ------------------------------------------------------------------------
/**
*** Returns the initial packet that should be sent to the device upon openning
*** the socket connection .
**/
public byte[] getInitialPacket()
throws Exception
{
return null;
}
/**
*** Returns the final packet that should be sent to the device before closing
*** the socket connection.
**/
public byte[] getFinalPacket(boolean hasError)
throws Exception
{
return null;
}
// ------------------------------------------------------------------------
/**
*** Callback to obtain the length of the next packet, based on the provided partial
*** packet data.
**/
public int getActualPacketLength(byte packet[], int packetLen)
{
return this.isTextPackets? PACKET_LEN_LINE_TERMINATOR : packetLen;
}
/**
*** Parse the provided packet information, and return any response that should
*** be sent back to the remote device
**/
public abstract byte[] getHandlePacket(byte cmd[])
throws Exception;
/**
*** Callback: timeout interrupt
*** Called periodically during an idle read. The periodic timeout is based on the value
*** specified on the call to "<code>ServerSocketThread.setMinimuTimeoutIntervalMS</code>"
**/
public void idleTimeoutInterrupt()
{
// override
}
// ------------------------------------------------------------------------
/**
*** Sets the terminate-session state to the specified value
**/
public void forceCloseTCPSession()
{
this.setTerminateSession();
ServerSocketThread.SessionInfo si = this.getSessionInfo();
if (si != null) {
si.forceCloseTCPSession();
}
}
/**
*** Sets the terminate-session state to the specified value
**/
public void setTerminateSession(boolean term)
{
this.terminateSess = term;
}
/**
*** Sets the terminate-session state to true
**/
public void setTerminateSession()
{
this.setTerminateSession(true);
}
/**
*** Clears the terminate-session state to false
**/
public void clearTerminateSession()
{
this.setTerminateSession(false);
}
/**
*** Callback to determine if the current session should be terminated
**/
public boolean getTerminateSession()
{
return this.terminateSess;
}
/**
*** Callback to determine if the current session should be terminated
**/
public boolean terminateSession() // OBSOLETE
{
return this.getTerminateSession();
}
/**
*** Callback just before the session is terminated
**/
public void sessionTerminated(Throwable err, long readCount, long writeCount)
{
this.printSessionTerminated();
}
/**
*** Displays the sesion startup message.
*** (override to disable)
**/
protected void printSessionTerminated()
{
String sessType = this.getSessionType();
String host = StringTools.trim(this.getHostAddress());
String name = StringTools.blankDefault(this.getThreadName(),"?");
Print.logInfo("End " + sessType + " session (" + name + "): " + host);
}
// ------------------------------------------------------------------------
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.service.client;
import static org.junit.Assert.*;
import java.io.IOException;
import java.util.HashMap;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.apache.hadoop.yarn.service.exceptions.LauncherExitCodes.*;
/**
* Test case for CLI to API Service.
*
*/
public class TestApiServiceClient {
private static ApiServiceClient asc;
private static ApiServiceClient badAsc;
private static Server server;
/**
* A mocked version of API Service for testing purpose.
*
*/
@SuppressWarnings("serial")
public static class TestServlet extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
System.out.println("Get was called");
resp.setStatus(HttpServletResponse.SC_OK);
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
resp.setStatus(HttpServletResponse.SC_OK);
}
@Override
protected void doPut(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
resp.setStatus(HttpServletResponse.SC_OK);
}
@Override
protected void doDelete(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
resp.setStatus(HttpServletResponse.SC_OK);
}
}
@BeforeClass
public static void setup() throws Exception {
server = new Server(8088);
((QueuedThreadPool)server.getThreadPool()).setMaxThreads(10);
ServletContextHandler context = new ServletContextHandler();
context.setContextPath("/app");
server.setHandler(context);
context.addServlet(new ServletHolder(TestServlet.class), "/*");
((ServerConnector)server.getConnectors()[0]).setHost("localhost");
server.start();
Configuration conf = new Configuration();
conf.set("yarn.resourcemanager.webapp.address",
"localhost:8088");
asc = new ApiServiceClient();
asc.serviceInit(conf);
Configuration conf2 = new Configuration();
conf2.set("yarn.resourcemanager.webapp.address",
"localhost:8089");
badAsc = new ApiServiceClient();
badAsc.serviceInit(conf2);
}
@AfterClass
public static void tearDown() throws Exception {
server.stop();
}
@Test
public void testLaunch() {
String fileName = "target/test-classes/example-app.json";
String appName = "example-app";
long lifetime = 3600L;
String queue = "default";
try {
int result = asc.actionLaunch(fileName, appName, lifetime, queue);
assertEquals(EXIT_SUCCESS, result);
} catch (IOException | YarnException e) {
fail();
}
}
@Test
public void testBadLaunch() {
String fileName = "unknown_file";
String appName = "unknown_app";
long lifetime = 3600L;
String queue = "default";
try {
int result = badAsc.actionLaunch(fileName, appName, lifetime, queue);
assertEquals(EXIT_EXCEPTION_THROWN, result);
} catch (IOException | YarnException e) {
fail();
}
}
@Test
public void testStop() {
String appName = "example-app";
try {
int result = asc.actionStop(appName);
assertEquals(EXIT_SUCCESS, result);
} catch (IOException | YarnException e) {
fail();
}
}
@Test
public void testBadStop() {
String appName = "unknown_app";
try {
int result = badAsc.actionStop(appName);
assertEquals(EXIT_EXCEPTION_THROWN, result);
} catch (IOException | YarnException e) {
fail();
}
}
@Test
public void testStart() {
String appName = "example-app";
try {
int result = asc.actionStart(appName);
assertEquals(EXIT_SUCCESS, result);
} catch (IOException | YarnException e) {
fail();
}
}
@Test
public void testBadStart() {
String appName = "unknown_app";
try {
int result = badAsc.actionStart(appName);
assertEquals(EXIT_EXCEPTION_THROWN, result);
} catch (IOException | YarnException e) {
fail();
}
}
@Test
public void testSave() {
String fileName = "target/test-classes/example-app.json";
String appName = "example-app";
long lifetime = 3600L;
String queue = "default";
try {
int result = asc.actionSave(fileName, appName, lifetime, queue);
assertEquals(EXIT_SUCCESS, result);
} catch (IOException | YarnException e) {
fail();
}
}
@Test
public void testBadSave() {
String fileName = "unknown_file";
String appName = "unknown_app";
long lifetime = 3600L;
String queue = "default";
try {
int result = badAsc.actionSave(fileName, appName, lifetime, queue);
assertEquals(EXIT_EXCEPTION_THROWN, result);
} catch (IOException | YarnException e) {
fail();
}
}
@Test
public void testFlex() {
String appName = "example-app";
HashMap<String, String> componentCounts = new HashMap<String, String>();
try {
int result = asc.actionFlex(appName, componentCounts);
assertEquals(EXIT_SUCCESS, result);
} catch (IOException | YarnException e) {
fail();
}
}
@Test
public void testBadFlex() {
String appName = "unknown_app";
HashMap<String, String> componentCounts = new HashMap<String, String>();
try {
int result = badAsc.actionFlex(appName, componentCounts);
assertEquals(EXIT_EXCEPTION_THROWN, result);
} catch (IOException | YarnException e) {
fail();
}
}
@Test
public void testDestroy() {
String appName = "example-app";
try {
int result = asc.actionDestroy(appName);
assertEquals(EXIT_SUCCESS, result);
} catch (IOException | YarnException e) {
fail();
}
}
@Test
public void testBadDestroy() {
String appName = "unknown_app";
try {
int result = badAsc.actionDestroy(appName);
assertEquals(EXIT_EXCEPTION_THROWN, result);
} catch (IOException | YarnException e) {
fail();
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cloudwatchevidently.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/evidently-2021-02-01/EvaluateFeature" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class EvaluateFeatureResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* If this user was assigned to a launch or experiment, this field lists the launch or experiment name.
* </p>
*/
private String details;
/**
* <p>
* Specifies the reason that the user session was assigned this variation. Possible values include
* <code>DEFAULT</code>, meaning the user was served the default variation; <code>LAUNCH_RULE_MATCH</code>, if the
* user session was enrolled in a launch; <code>EXPERIMENT_RULE_MATCH</code>, if the user session was enrolled in an
* experiment; or <code>ENTITY_OVERRIDES_MATCH</code>, if the user's <code>entityId</code> matches an override rule.
* </p>
*/
private String reason;
/**
* <p>
* The value assigned to this variation to differentiate it from the other variations of this feature.
* </p>
*/
private VariableValue value;
/**
* <p>
* The name of the variation that was served to the user session.
* </p>
*/
private String variation;
/**
* <p>
* If this user was assigned to a launch or experiment, this field lists the launch or experiment name.
* </p>
* <p>
* This field's value must be valid JSON according to RFC 7159, including the opening and closing braces. For
* example: '{"key": "value"}'.
* </p>
* <p>
* The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service.
* Users of the SDK should not perform Base64 encoding on this field.
* </p>
*
* @param details
* If this user was assigned to a launch or experiment, this field lists the launch or experiment name.
*/
public void setDetails(String details) {
this.details = details;
}
/**
* <p>
* If this user was assigned to a launch or experiment, this field lists the launch or experiment name.
* </p>
* <p>
* This field's value will be valid JSON according to RFC 7159, including the opening and closing braces. For
* example: '{"key": "value"}'.
* </p>
*
* @return If this user was assigned to a launch or experiment, this field lists the launch or experiment name.
*/
public String getDetails() {
return this.details;
}
/**
* <p>
* If this user was assigned to a launch or experiment, this field lists the launch or experiment name.
* </p>
* <p>
* This field's value must be valid JSON according to RFC 7159, including the opening and closing braces. For
* example: '{"key": "value"}'.
* </p>
* <p>
* The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service.
* Users of the SDK should not perform Base64 encoding on this field.
* </p>
*
* @param details
* If this user was assigned to a launch or experiment, this field lists the launch or experiment name.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public EvaluateFeatureResult withDetails(String details) {
setDetails(details);
return this;
}
/**
* <p>
* Specifies the reason that the user session was assigned this variation. Possible values include
* <code>DEFAULT</code>, meaning the user was served the default variation; <code>LAUNCH_RULE_MATCH</code>, if the
* user session was enrolled in a launch; <code>EXPERIMENT_RULE_MATCH</code>, if the user session was enrolled in an
* experiment; or <code>ENTITY_OVERRIDES_MATCH</code>, if the user's <code>entityId</code> matches an override rule.
* </p>
*
* @param reason
* Specifies the reason that the user session was assigned this variation. Possible values include
* <code>DEFAULT</code>, meaning the user was served the default variation; <code>LAUNCH_RULE_MATCH</code>,
* if the user session was enrolled in a launch; <code>EXPERIMENT_RULE_MATCH</code>, if the user session was
* enrolled in an experiment; or <code>ENTITY_OVERRIDES_MATCH</code>, if the user's <code>entityId</code>
* matches an override rule.
*/
public void setReason(String reason) {
this.reason = reason;
}
/**
* <p>
* Specifies the reason that the user session was assigned this variation. Possible values include
* <code>DEFAULT</code>, meaning the user was served the default variation; <code>LAUNCH_RULE_MATCH</code>, if the
* user session was enrolled in a launch; <code>EXPERIMENT_RULE_MATCH</code>, if the user session was enrolled in an
* experiment; or <code>ENTITY_OVERRIDES_MATCH</code>, if the user's <code>entityId</code> matches an override rule.
* </p>
*
* @return Specifies the reason that the user session was assigned this variation. Possible values include
* <code>DEFAULT</code>, meaning the user was served the default variation; <code>LAUNCH_RULE_MATCH</code>,
* if the user session was enrolled in a launch; <code>EXPERIMENT_RULE_MATCH</code>, if the user session was
* enrolled in an experiment; or <code>ENTITY_OVERRIDES_MATCH</code>, if the user's <code>entityId</code>
* matches an override rule.
*/
public String getReason() {
return this.reason;
}
/**
* <p>
* Specifies the reason that the user session was assigned this variation. Possible values include
* <code>DEFAULT</code>, meaning the user was served the default variation; <code>LAUNCH_RULE_MATCH</code>, if the
* user session was enrolled in a launch; <code>EXPERIMENT_RULE_MATCH</code>, if the user session was enrolled in an
* experiment; or <code>ENTITY_OVERRIDES_MATCH</code>, if the user's <code>entityId</code> matches an override rule.
* </p>
*
* @param reason
* Specifies the reason that the user session was assigned this variation. Possible values include
* <code>DEFAULT</code>, meaning the user was served the default variation; <code>LAUNCH_RULE_MATCH</code>,
* if the user session was enrolled in a launch; <code>EXPERIMENT_RULE_MATCH</code>, if the user session was
* enrolled in an experiment; or <code>ENTITY_OVERRIDES_MATCH</code>, if the user's <code>entityId</code>
* matches an override rule.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public EvaluateFeatureResult withReason(String reason) {
setReason(reason);
return this;
}
/**
* <p>
* The value assigned to this variation to differentiate it from the other variations of this feature.
* </p>
*
* @param value
* The value assigned to this variation to differentiate it from the other variations of this feature.
*/
public void setValue(VariableValue value) {
this.value = value;
}
/**
* <p>
* The value assigned to this variation to differentiate it from the other variations of this feature.
* </p>
*
* @return The value assigned to this variation to differentiate it from the other variations of this feature.
*/
public VariableValue getValue() {
return this.value;
}
/**
* <p>
* The value assigned to this variation to differentiate it from the other variations of this feature.
* </p>
*
* @param value
* The value assigned to this variation to differentiate it from the other variations of this feature.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public EvaluateFeatureResult withValue(VariableValue value) {
setValue(value);
return this;
}
/**
* <p>
* The name of the variation that was served to the user session.
* </p>
*
* @param variation
* The name of the variation that was served to the user session.
*/
public void setVariation(String variation) {
this.variation = variation;
}
/**
* <p>
* The name of the variation that was served to the user session.
* </p>
*
* @return The name of the variation that was served to the user session.
*/
public String getVariation() {
return this.variation;
}
/**
* <p>
* The name of the variation that was served to the user session.
* </p>
*
* @param variation
* The name of the variation that was served to the user session.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public EvaluateFeatureResult withVariation(String variation) {
setVariation(variation);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDetails() != null)
sb.append("Details: ").append(getDetails()).append(",");
if (getReason() != null)
sb.append("Reason: ").append(getReason()).append(",");
if (getValue() != null)
sb.append("Value: ").append(getValue()).append(",");
if (getVariation() != null)
sb.append("Variation: ").append(getVariation());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof EvaluateFeatureResult == false)
return false;
EvaluateFeatureResult other = (EvaluateFeatureResult) obj;
if (other.getDetails() == null ^ this.getDetails() == null)
return false;
if (other.getDetails() != null && other.getDetails().equals(this.getDetails()) == false)
return false;
if (other.getReason() == null ^ this.getReason() == null)
return false;
if (other.getReason() != null && other.getReason().equals(this.getReason()) == false)
return false;
if (other.getValue() == null ^ this.getValue() == null)
return false;
if (other.getValue() != null && other.getValue().equals(this.getValue()) == false)
return false;
if (other.getVariation() == null ^ this.getVariation() == null)
return false;
if (other.getVariation() != null && other.getVariation().equals(this.getVariation()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDetails() == null) ? 0 : getDetails().hashCode());
hashCode = prime * hashCode + ((getReason() == null) ? 0 : getReason().hashCode());
hashCode = prime * hashCode + ((getValue() == null) ? 0 : getValue().hashCode());
hashCode = prime * hashCode + ((getVariation() == null) ? 0 : getVariation().hashCode());
return hashCode;
}
@Override
public EvaluateFeatureResult clone() {
try {
return (EvaluateFeatureResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
package com.shaubert.ui.phone;
import android.content.Context;
import android.os.Bundle;
import android.os.Handler;
import android.text.TextUtils;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatDialog;
import androidx.appcompat.widget.SearchView;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
/**
* Created by GODARD Tuatini on 07/05/15.
*/
public class CountryPickerDialog extends AppCompatDialog {
private Countries countries;
private SearchView searchView;
private RecyclerView recyclerView;
private CountryListAdapter adapter;
private CountriesFilter countriesFilter;
private CountryPickerCallbacks callbacks;
private String scrollToCountryIsoCode;
private Handler handler;
private boolean hideKeyboardOnDismiss;
public CountryPickerDialog(Context context) {
this(context, null);
}
public CountryPickerDialog(Context context, @Nullable CountryPickerCallbacks callbacks) {
this(context, callbacks, null);
}
/**
* You can set the scrollToCountryIsoCode to scroll to your favorite country
*
* @param context
* @param callbacks
* @param scrollToCountryIsoCode
*/
public CountryPickerDialog(Context context, @Nullable CountryPickerCallbacks callbacks, @Nullable String scrollToCountryIsoCode) {
super(context);
supportRequestWindowFeature(Window.FEATURE_NO_TITLE);
handler = new Handler();
this.callbacks = callbacks;
this.scrollToCountryIsoCode = scrollToCountryIsoCode;
loadCountries();
}
private void loadCountries() {
Countries.get(new Countries.Callback() {
@Override
public void onLoaded(Countries loadedCountries) {
if (countries == null) {
setCountries(loadedCountries);
}
}
});
}
public void setCustomCountries(Countries countries) {
setCountries(countries);
if (countries == null) {
loadCountries();
}
}
private void setCountries(Countries countries) {
this.countries = countries;
if (adapter != null) {
adapter.setCountries(countries);
}
}
public void setCountriesFilter(CountriesFilter countriesFilter) {
this.countriesFilter = countriesFilter;
if (adapter != null) {
adapter.setCountriesFilter(countriesFilter);
}
}
public void setCallbacks(CountryPickerCallbacks callbacks) {
this.callbacks = callbacks;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.pi_country_picker);
Window window = getWindow();
if (window != null) {
window.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE);
}
recyclerView = findViewById(R.id.pi_country_picker_list);
recyclerView.setLayoutManager(new LinearLayoutManager(getContext()));
adapter = new CountryListAdapter();
adapter.setCountries(countries);
adapter.setCountriesFilter(countriesFilter);
adapter.setItemClickListener(new CountryListAdapter.ItemClickListener() {
@Override
public void onItemClicked(View view, Country country) {
dismiss();
if (callbacks != null) {
callbacks.onCountrySelected(country);
}
}
});
recyclerView.setAdapter(adapter);
searchView = (SearchView) findViewById(R.id.pi_search_view);
searchView.setIconified(false);
searchView.setOnCloseListener(new SearchView.OnCloseListener() {
@Override
public boolean onClose() {
dismiss();
return true;
}
});
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
filterCountries();
return true;
}
@Override
public boolean onQueryTextChange(String newText) {
filterCountries();
return true;
}
});
searchView.setOnSearchClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
filterCountries();
}
});
if (savedInstanceState == null) {
handler.post(new Runnable() {
@Override
public void run() {
scrollToCountry(scrollToCountryIsoCode);
}
});
}
}
@Override
public void dismiss() {
hideKeyboardOnDismissMaybe();
super.dismiss();
}
@Override
public void hide() {
hideKeyboardOnDismissMaybe();
super.hide();
}
private void hideKeyboardOnDismissMaybe() {
if (!hideKeyboardOnDismiss) return;
if (searchView == null) return;
InputMethodManager imm = (InputMethodManager) searchView.getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(searchView.getWindowToken(), 0);
imm.hideSoftInputFromWindow(searchView.getApplicationWindowToken(), 0);
}
private void filterCountries() {
adapter.setQuery(searchView.getQuery().toString());
}
public void scrollToCountry(String countryIsoCode) {
if (TextUtils.isEmpty(countryIsoCode)) {
return;
}
for (int i = 0; i < adapter.getItemCount(); i++) {
Country country = adapter.getItem(i);
if (country.getIsoCode().equalsIgnoreCase(countryIsoCode)) {
RecyclerView.LayoutManager layoutManager = recyclerView.getLayoutManager();
if (layoutManager instanceof LinearLayoutManager) {
((LinearLayoutManager) layoutManager).scrollToPositionWithOffset(i, 0);
} else {
recyclerView.scrollToPosition(i);
}
break;
}
}
}
public void setHideKeyboardOnDismiss(boolean hideKeyboardOnDismiss) {
this.hideKeyboardOnDismiss = hideKeyboardOnDismiss;
}
}
| |
package com.forest.web;
import com.forest.ejb.OrderBean;
import com.forest.ejb.OrderJMSManager;
import com.forest.entity.CustomerOrder;
import com.forest.entity.Person;
import com.forest.qualifiers.LoggedIn;
import com.forest.web.util.AbstractPaginationHelper;
import com.forest.web.util.JsfUtil;
import com.forest.web.util.PageNavigation;
import java.io.Serializable;
import java.util.List;
import java.util.ResourceBundle;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
import javax.enterprise.context.SessionScoped;
import javax.faces.component.UIComponent;
import javax.faces.context.FacesContext;
import javax.faces.convert.Converter;
import javax.faces.convert.FacesConverter;
import javax.faces.model.DataModel;
import javax.faces.model.ListDataModel;
import javax.faces.model.SelectItem;
import javax.inject.Inject;
import javax.inject.Named;
@Named(value = "customerOrderController")
@SessionScoped
public class CustomerOrderController implements Serializable {
private static final String BUNDLE = "bundles.Bundle";
private static final long serialVersionUID = 8606060319870740714L;
@Inject
@LoggedIn
private Person user;
private List<CustomerOrder> myOrders;
private CustomerOrder current;
private DataModel items = null;
@EJB
private com.forest.ejb.OrderBean ejbFacade;
@EJB
private OrderJMSManager orderJMSManager;
private AbstractPaginationHelper pagination;
private int selectedItemIndex;
private String searchString;
private static final Logger logger = Logger.getLogger(CustomerOrderController.class.getCanonicalName());
public CustomerOrderController() {
}
public CustomerOrder getSelected() {
if (current == null) {
current = new CustomerOrder();
selectedItemIndex = -1;
}
return current;
}
private OrderBean getFacade() {
return ejbFacade;
}
public AbstractPaginationHelper getPagination() {
if (pagination == null) {
pagination = new AbstractPaginationHelper(10) {
@Override
public int getItemsCount() {
return getFacade().count();
}
@Override
public DataModel createPageDataModel() {
return new ListDataModel(getFacade().findRange(new int[]{getPageFirstItem(), getPageFirstItem() + getPageSize()}));
}
};
}
return pagination;
}
public PageNavigation prepareList() {
recreateModel();
return PageNavigation.LIST;
}
public PageNavigation prepareView() {
current = (CustomerOrder) getItems().getRowData();
selectedItemIndex = pagination.getPageFirstItem() + getItems().getRowIndex();
return PageNavigation.VIEW;
}
public PageNavigation prepareCreate() {
current = new CustomerOrder();
selectedItemIndex = -1;
return PageNavigation.CREATE;
}
public PageNavigation create() {
try {
getFacade().create(current);
JsfUtil.addSuccessMessage(ResourceBundle.getBundle(BUNDLE).getString("CustomerOrderCreated"));
return prepareCreate();
} catch (Exception e) {
JsfUtil.addErrorMessage(e, ResourceBundle.getBundle(BUNDLE).getString("PersistenceErrorOccured"));
return null;
}
}
public PageNavigation prepareEdit() {
current = (CustomerOrder) getItems().getRowData();
selectedItemIndex = pagination.getPageFirstItem() + getItems().getRowIndex();
return PageNavigation.EDIT;
}
public PageNavigation update() {
try {
getFacade().edit(current);
JsfUtil.addSuccessMessage(ResourceBundle.getBundle(BUNDLE).getString("CustomerOrderUpdated"));
return PageNavigation.VIEW;
} catch (Exception e) {
JsfUtil.addErrorMessage(e, ResourceBundle.getBundle(BUNDLE).getString("PersistenceErrorOccured"));
return null;
}
}
public PageNavigation destroy() {
current = (CustomerOrder) getItems().getRowData();
selectedItemIndex = pagination.getPageFirstItem() + getItems().getRowIndex();
performDestroy();
recreateModel();
return PageNavigation.LIST;
}
public PageNavigation cancelOrder() {
current = (CustomerOrder) getItems().getRowData();
selectedItemIndex = pagination.getPageFirstItem() + getItems().getRowIndex();
try {
// remove from JMS queue
orderJMSManager.deleteMessage(current.getId());
// update DB order status
ejbFacade.setOrderStatus(current.getId(), String.valueOf(OrderBean.Status.CANCELLED_MANUAL.getStatus()));
recreateModel();
return PageNavigation.LIST;
} catch (Exception ex) {
ex.printStackTrace();
}
return PageNavigation.INDEX;
}
public List<CustomerOrder> getMyOrders() {
if (user != null) {
myOrders = getFacade().getOrderByCustomerId(user.getId());
if (myOrders.isEmpty()) {
logger.log(Level.FINEST, "Customer {0} has no orders to display.", user.getEmail());
return null;
} else {
logger.log(Level.FINEST, "Order amount:{0}", myOrders.get(0).getAmount());
return myOrders;
}
} else {
JsfUtil.addErrorMessage("Current user is not authenticated. Please do login before accessing your orders.");
return null;
}
}
public PageNavigation destroyAndView() {
performDestroy();
recreateModel();
updateCurrentItem();
if (selectedItemIndex >= 0) {
return PageNavigation.VIEW;
} else {
// all items were removed - go back to list
recreateModel();
return PageNavigation.LIST;
}
}
private void performDestroy() {
try {
getFacade().remove(current);
JsfUtil.addSuccessMessage(ResourceBundle.getBundle(BUNDLE).getString("CustomerOrderDeleted"));
} catch (Exception e) {
JsfUtil.addErrorMessage(e, ResourceBundle.getBundle(BUNDLE).getString("PersistenceErrorOccured"));
}
}
private void updateCurrentItem() {
int count = getFacade().count();
if (selectedItemIndex >= count) {
// selected index cannot be bigger than number of items:
selectedItemIndex = count - 1;
// go to previous page if last page disappeared:
if (pagination.getPageFirstItem() >= count) {
pagination.previousPage();
}
}
if (selectedItemIndex >= 0) {
current = getFacade().findRange(new int[]{selectedItemIndex, selectedItemIndex + 1}).get(0);
}
}
public DataModel getItems() {
if (items == null) {
items = getPagination().createPageDataModel();
}
return items;
}
private void recreateModel() {
items = null;
}
public PageNavigation next() {
getPagination().nextPage();
recreateModel();
return PageNavigation.LIST;
}
public PageNavigation previous() {
getPagination().previousPage();
recreateModel();
return PageNavigation.LIST;
}
public SelectItem[] getItemsAvailableSelectMany() {
return JsfUtil.getSelectItems(ejbFacade.findAll(), false);
}
public SelectItem[] getItemsAvailableSelectOne() {
return JsfUtil.getSelectItems(ejbFacade.findAll(), true);
}
/**
* @return the searchString
*/
public String getSearchString() {
return searchString;
}
/**
* @param searchString the searchString to set
*/
public void setSearchString(String searchString) {
this.searchString = searchString;
}
@FacesConverter(forClass = CustomerOrder.class)
public static class CustomerOrderControllerConverter implements Converter {
@Override
public Object getAsObject(FacesContext facesContext, UIComponent component, String value) {
if (value == null || value.length() == 0) {
return null;
}
CustomerOrderController controller = (CustomerOrderController) facesContext.getApplication().getELResolver().
getValue(facesContext.getELContext(), null, "customerOrderController");
return controller.ejbFacade.find(getKey(value));
}
java.lang.Integer getKey(String value) {
java.lang.Integer key;
key = Integer.valueOf(value);
return key;
}
String getStringKey(java.lang.Integer value) {
StringBuilder sb = new StringBuilder();
sb.append(value);
return sb.toString();
}
@Override
public String getAsString(FacesContext facesContext, UIComponent component, Object object) {
if (object == null) {
return null;
}
if (object instanceof CustomerOrder) {
CustomerOrder o = (CustomerOrder) object;
return getStringKey(o.getId());
} else {
throw new IllegalArgumentException("object " + object + " is of type " + object.getClass().getName() + "; expected type: " + CustomerOrderController.class.getName());
}
}
}
}
| |
package org.apereo.cas.config;
import org.apache.commons.lang3.StringUtils;
import org.apereo.cas.authentication.AuthenticationEventExecutionPlan;
import org.apereo.cas.authentication.AuthenticationHandler;
import org.apereo.cas.authentication.LdapAuthenticationHandler;
import org.apereo.cas.authentication.principal.DefaultPrincipalFactory;
import org.apereo.cas.authentication.principal.PrincipalFactory;
import org.apereo.cas.authentication.principal.PrincipalResolver;
import org.apereo.cas.authentication.principal.resolvers.ChainingPrincipalResolver;
import org.apereo.cas.authentication.principal.resolvers.EchoingPrincipalResolver;
import org.apereo.cas.authentication.support.DefaultAccountStateHandler;
import org.apereo.cas.authentication.support.LdapPasswordPolicyConfiguration;
import org.apereo.cas.authentication.support.OptionalWarningAccountStateHandler;
import org.apereo.cas.config.support.authentication.AuthenticationEventExecutionPlanConfigurer;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.configuration.model.support.ldap.LdapAuthenticationProperties;
import org.apereo.cas.configuration.support.Beans;
import org.apereo.cas.services.ServicesManager;
import org.apereo.services.persondir.IPersonAttributeDao;
import org.ldaptive.auth.AuthenticationResponseHandler;
import org.ldaptive.auth.Authenticator;
import org.ldaptive.auth.ext.ActiveDirectoryAuthenticationResponseHandler;
import org.ldaptive.auth.ext.EDirectoryAuthenticationResponseHandler;
import org.ldaptive.auth.ext.FreeIPAAuthenticationResponseHandler;
import org.ldaptive.auth.ext.PasswordExpirationAuthenticationResponseHandler;
import org.ldaptive.auth.ext.PasswordPolicyAuthenticationResponseHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.time.Period;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
import java.util.regex.Pattern;
/**
* This is {@link LdapAuthenticationConfiguration} that attempts to create
* relevant authentication handlers for LDAP.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
@Configuration("ldapAuthenticationConfiguration")
@EnableConfigurationProperties(CasConfigurationProperties.class)
public class LdapAuthenticationConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(LdapAuthenticationConfiguration.class);
@Autowired
private CasConfigurationProperties casProperties;
@Autowired
@Qualifier("personDirectoryPrincipalResolver")
private PrincipalResolver personDirectoryPrincipalResolver;
@Autowired
@Qualifier("attributeRepositories")
private List<IPersonAttributeDao> attributeRepositories;
@Autowired
@Qualifier("servicesManager")
private ServicesManager servicesManager;
@ConditionalOnMissingBean(name = "ldapPrincipalFactory")
@Bean
public PrincipalFactory ldapPrincipalFactory() {
return new DefaultPrincipalFactory();
}
@Bean
public Collection<AuthenticationHandler> ldapAuthenticationHandlers() {
final Collection<AuthenticationHandler> handlers = new HashSet<>();
casProperties.getAuthn().getLdap()
.stream()
.filter(ldapInstanceConfigurationPredicate())
.forEach(l -> {
final Map<String, String> attributes = Beans.transformPrincipalAttributesListIntoMap(l.getPrincipalAttributeList());
attributes.putAll(casProperties.getAuthn().getAttributeRepository().getAttributes());
LOGGER.debug("Created and mapped principal attributes [{}] for [{}]...", attributes, l.getLdapUrl());
LOGGER.debug("Creating ldap authenticator for [{}] and baseDn [{}]", l.getLdapUrl(), l.getBaseDn());
final Authenticator authenticator = Beans.newLdaptiveAuthenticator(l);
authenticator.setReturnAttributes(attributes.keySet().toArray(new String[]{}));
LOGGER.debug("Ldap authenticator configured with return attributes [{}] for [{}] and baseDn [{}]",
attributes.keySet(), l.getLdapUrl(), l.getBaseDn());
LOGGER.debug("Creating ldap authentication handler for [{}]", l.getLdapUrl());
final LdapAuthenticationHandler handler = new LdapAuthenticationHandler(l.getName(), servicesManager, ldapPrincipalFactory(),
l.getOrder(), authenticator);
final List<String> additionalAttrs = l.getAdditionalAttributes();
if (StringUtils.isNotBlank(l.getPrincipalAttributeId())) {
additionalAttrs.add(l.getPrincipalAttributeId());
}
handler.setAdditionalAttributes(additionalAttrs);
handler.setAllowMultiplePrincipalAttributeValues(l.isAllowMultiplePrincipalAttributeValues());
handler.setAllowMissingPrincipalAttributeValue(l.isAllowMissingPrincipalAttributeValue());
handler.setPasswordEncoder(Beans.newPasswordEncoder(l.getPasswordEncoder()));
handler.setPrincipalNameTransformer(Beans.newPrincipalNameTransformer(l.getPrincipalTransformation()));
if (StringUtils.isNotBlank(l.getCredentialCriteria())) {
LOGGER.debug("Ldap authentication for [{}] is filtering credentials by [{}]", l.getLdapUrl(), l.getCredentialCriteria());
final Predicate<String> predicate = Pattern.compile(l.getCredentialCriteria()).asPredicate();
handler.setCredentialSelectionPredicate(credential -> predicate.test(credential.getId()));
}
handler.setPrincipalAttributeMap(attributes);
if (StringUtils.isBlank(l.getPrincipalAttributeId())) {
LOGGER.debug("No principal id attribute is found for ldap authentication via [{}]", l.getLdapUrl());
} else {
handler.setPrincipalIdAttribute(l.getPrincipalAttributeId());
LOGGER.debug("Using principal id attribute [{}] for ldap authentication via [{}]", l.getPrincipalAttributeId(),
l.getLdapUrl());
}
if (l.getPasswordPolicy().isEnabled()) {
LOGGER.debug("Password policy is enabled for [{}]. Constructing password policy configuration", l.getLdapUrl());
handler.setPasswordPolicyConfiguration(createLdapPasswordPolicyConfiguration(l, authenticator));
}
LOGGER.debug("Initializing ldap authentication handler for [{}]", l.getLdapUrl());
handler.initialize();
handlers.add(handler);
});
return handlers;
}
private Predicate<LdapAuthenticationProperties> ldapInstanceConfigurationPredicate() {
return l -> {
if (l.getType() == null) {
LOGGER.warn("Skipping ldap authentication entry since no type is defined");
return false;
}
if (StringUtils.isBlank(l.getLdapUrl())) {
LOGGER.warn("Skipping ldap authentication entry since no ldap url is defined");
return false;
}
return true;
};
}
private static LdapPasswordPolicyConfiguration createLdapPasswordPolicyConfiguration(final LdapAuthenticationProperties l,
final Authenticator authenticator) {
final LdapPasswordPolicyConfiguration cfg = new LdapPasswordPolicyConfiguration(l.getPasswordPolicy());
final Set<AuthenticationResponseHandler> handlers = new HashSet<>();
if (cfg.getPasswordWarningNumberOfDays() > 0) {
LOGGER.debug("Password policy authentication response handler is set to accommodate directory type: [{}]", l.getPasswordPolicy().getType());
switch (l.getPasswordPolicy().getType()) {
case AD:
handlers.add(new ActiveDirectoryAuthenticationResponseHandler(Period.ofDays(cfg.getPasswordWarningNumberOfDays())));
break;
case FreeIPA:
handlers.add(new FreeIPAAuthenticationResponseHandler(Period.ofDays(cfg.getPasswordWarningNumberOfDays()), cfg.getLoginFailures()));
break;
case EDirectory:
handlers.add(new EDirectoryAuthenticationResponseHandler(Period.ofDays(cfg.getPasswordWarningNumberOfDays())));
break;
default:
handlers.add(new PasswordPolicyAuthenticationResponseHandler());
handlers.add(new PasswordExpirationAuthenticationResponseHandler());
break;
}
} else {
LOGGER.debug("Password warning number of days is undefined; LDAP authentication may NOT support "
+ "EDirectory, AD and FreeIPA to handle password policy authentication responses");
}
authenticator.setAuthenticationResponseHandlers((AuthenticationResponseHandler[]) handlers.toArray(
new AuthenticationResponseHandler[handlers.size()]));
LOGGER.debug("LDAP authentication response handlers configured are: [{}]", handlers);
if (StringUtils.isNotBlank(l.getPasswordPolicy().getWarningAttributeName())
&& StringUtils.isNotBlank(l.getPasswordPolicy().getWarningAttributeValue())) {
LOGGER.debug("Configuring an warning account state handler for LDAP authentication for warning attribute [{}] and value [{}]",
l.getPasswordPolicy().getWarningAttributeName(), l.getPasswordPolicy().getWarningAttributeValue());
final OptionalWarningAccountStateHandler accountHandler = new OptionalWarningAccountStateHandler();
accountHandler.setDisplayWarningOnMatch(l.getPasswordPolicy().isDisplayWarningOnMatch());
accountHandler.setWarnAttributeName(l.getPasswordPolicy().getWarningAttributeName());
accountHandler.setWarningAttributeValue(l.getPasswordPolicy().getWarningAttributeValue());
accountHandler.setAttributesToErrorMap(l.getPasswordPolicy().getPolicyAttributes());
cfg.setAccountStateHandler(accountHandler);
} else {
final DefaultAccountStateHandler accountHandler = new DefaultAccountStateHandler();
accountHandler.setAttributesToErrorMap(l.getPasswordPolicy().getPolicyAttributes());
cfg.setAccountStateHandler(accountHandler);
LOGGER.debug("Configuring the default account state handler for LDAP authentication");
}
return cfg;
}
/**
* The type Ldap authentication event execution plan configuration.
*/
@Configuration("ldapAuthenticationEventExecutionPlanConfiguration")
@EnableConfigurationProperties(CasConfigurationProperties.class)
public class LdapAuthenticationEventExecutionPlanConfiguration implements AuthenticationEventExecutionPlanConfigurer {
private boolean isAttributeRepositorySourceDefined() {
return !attributeRepositories.isEmpty();
}
@Override
public void configureAuthenticationExecutionPlan(final AuthenticationEventExecutionPlan plan) {
ldapAuthenticationHandlers().forEach(handler -> {
final ChainingPrincipalResolver resolver = new ChainingPrincipalResolver();
if (isAttributeRepositorySourceDefined()) {
LOGGER.debug("Attribute repository sources are defined and available for the principal resolution chain");
resolver.setChain(Arrays.asList(personDirectoryPrincipalResolver, new EchoingPrincipalResolver()));
} else {
LOGGER.debug("Attribute repository sources are not available for principal resolution so principal resolver will echo "
+ "back the principal resolved during LDAP authentication directly.");
resolver.setChain(Arrays.asList(new EchoingPrincipalResolver()));
}
LOGGER.debug("Ldap authentication for [{}] is to chain principal resolvers via [[{}]] for attribute resolution",
handler.getName(), resolver);
plan.registerAuthenticationHandlerWithPrincipalResolver(handler, resolver);
});
}
}
}
| |
/**
*
*/
package cz.metacentrum.perun.core.bl;
import cz.metacentrum.perun.core.api.Candidate;
import cz.metacentrum.perun.core.api.CandidateGroup;
import cz.metacentrum.perun.core.api.CandidateSync;
import cz.metacentrum.perun.core.api.ExtSource;
import cz.metacentrum.perun.core.api.Group;
import cz.metacentrum.perun.core.api.PerunSession;
import cz.metacentrum.perun.core.api.User;
import cz.metacentrum.perun.core.api.Vo;
import cz.metacentrum.perun.core.api.exceptions.CandidateNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.ExtSourceAlreadyAssignedException;
import cz.metacentrum.perun.core.api.exceptions.ExtSourceAlreadyRemovedException;
import cz.metacentrum.perun.core.api.exceptions.ExtSourceExistsException;
import cz.metacentrum.perun.core.api.exceptions.ExtSourceNotAssignedException;
import cz.metacentrum.perun.core.api.exceptions.ExtSourceNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.ExtSourceUnsupportedOperationException;
import cz.metacentrum.perun.core.api.exceptions.InternalErrorException;
import cz.metacentrum.perun.core.api.exceptions.VoNotExistsException;
import java.util.List;
import java.util.Map;
/**
* @author Michal Prochazka <michalp@ics.muni.cz>
*
*/
public interface ExtSourcesManagerBl {
/**
* Initialize manager.
*/
void initialize(PerunSession sess);
/**
* Creates an external source.
*
* @param perunSession
* @param extSource
* @param attributes
*
* @return ExtSource object with newly associated ID.
*
* @throws InternalErrorException
* @throws ExtSourceExistsException
*/
ExtSource createExtSource(PerunSession perunSession, ExtSource extSource, Map<String, String> attributes) throws ExtSourceExistsException;
/**
* Deletes an external source.
*
* @param perunSession
* @param extSource
*
* @throws InternalErrorException
* @throws ExtSourceAlreadyRemovedException if there are 0 rows affected by delete in DB
*/
void deleteExtSource(PerunSession perunSession, ExtSource extSource) throws ExtSourceAlreadyRemovedException;
/**
* Searches for the external source with specified id.
*
* @param perunSession
* @param id
*
* @return External source with specified id
*
* @throws InternalErrorException
*/
ExtSource getExtSourceById(PerunSession perunSession, int id) throws ExtSourceNotExistsException;
/**
* Searches for the external source using specified name.
*
* @param perunSession
* @param name
*
* @return External source with specified name
*
* @throws InternalErrorException
*/
ExtSource getExtSourceByName(PerunSession perunSession, String name) throws ExtSourceNotExistsException;
/**
* Get list of external sources associated with the VO.
*
* @param perunSession
* @param vo
*
* @return list of external sources associated with the VO
*
* @throws InternalErrorException
*/
List<ExtSource> getVoExtSources(PerunSession perunSession, Vo vo);
/**
* Get list of external sources associated with the GROUP.
*
* @param perunSession
* @param group
*
* @return list of external sources associated with the VO
*
* @throws InternalErrorException
*/
List<ExtSource> getGroupExtSources(PerunSession perunSession, Group group);
/**
* Get list of all external sources.
*
* @param perunSession
*
* @return list of external source
*
* @throws InternalErrorException
*/
List<ExtSource> getExtSources(PerunSession perunSession);
/**
* Associate external source definition with the VO.
*
* @param perunSession
* @param vo
* @param source
*
* @throws InternalErrorException
*/
void addExtSource(PerunSession perunSession, Vo vo, ExtSource source) throws ExtSourceAlreadyAssignedException;
/**
* Associate external source definition with the GROUP.
*
* @param perunSession
* @param group
* @param source
*
* @throws InternalErrorException
* @throws ExtSourceAlreadyAssignedException
*/
void addExtSource(PerunSession perunSession, Group group, ExtSource source) throws ExtSourceAlreadyAssignedException;
/**
* Remove association of the external source from the VO.
*
* @param perunSession
* @param vo
* @param source
*
* @throws InternalErrorException
* @throws ExtSourceNotAssignedException
* @throws ExtSourceAlreadyRemovedException if there are 0 rows affected by delete from DB
*/
void removeExtSource(PerunSession perunSession, Vo vo, ExtSource source) throws ExtSourceNotAssignedException, ExtSourceAlreadyRemovedException;
/**
* Remove association of the external source from the GROUP.
*
* @param perunSession
* @param group
* @param source
*
* @throws InternalErrorException
* @throws ExtSourceAlreadyRemovedException when 0 rows affected by removing from DB
* @throws ExtSourceNotAssignedException
*/
void removeExtSource(PerunSession perunSession, Group group, ExtSource source) throws ExtSourceNotAssignedException, ExtSourceAlreadyRemovedException;
/**
* Checks whether the ExtSource exists, if not, then the ExtSource is created.
*
* @param perunSession
* @param extSourceName
* @param extSourceType
*
* @return existing or newly created extSource is returned
*
* @throws InternalErrorException
*/
ExtSource checkOrCreateExtSource(PerunSession perunSession, String extSourceName, String extSourceType);
/**
* Returns list of users stored by this ExtSource, which are not valid.
*
* @param perunSession
* @param source
*
* @return list of users, who is not in the extSource anymore
*
* @throws InternalErrorException
*/
List<User> getInvalidUsers(PerunSession perunSession, ExtSource source);
/**
* Get the candidate from the ExtSource.
* Login of the candidate will be used to gain data from the ExtSource.
*
* @param perunSession Perun session
* @param source External source which will be used to get data about the candidate
* @param login Login of the candidate
* @return a Candidate object
* @throws InternalErrorException
* @throws CandidateNotExistsException
* @throws ExtSourceUnsupportedOperationException
*/
CandidateSync getCandidate(PerunSession perunSession, ExtSource source, String login) throws CandidateNotExistsException, ExtSourceUnsupportedOperationException;
/**
* Get the candidate from subjectData where at least login must exists.
*
* IMPORTANT: expected, that these subjectData was get from the ExtSource before using.
*
* @param perunSession Perun session
* @param subjectData
* @param source External source which will be used to get data about the candidate
* @param login Login of the candidate
*
* @return a Candidate object
* @throws InternalErrorException
*/
CandidateSync getCandidate(PerunSession perunSession, Map<String,String> subjectData , ExtSource source, String login);
void checkExtSourceExists(PerunSession sess, ExtSource extSource) throws ExtSourceNotExistsException;
/**
* Check if extSource is assigned to vo or not. Throw exception if not.
*
* @param sess
* @param extSource
* @param voId
*
* @throws InternalErrorException
* @throws ExtSourceNotAssignedException
* @throws VoNotExistsException
*/
void checkExtSourceAssignedToVo(PerunSession sess, ExtSource extSource, int voId) throws ExtSourceNotAssignedException, VoNotExistsException;
/**
* Loads ext source definitions from the configuration file and updates entries stored in the DB.
*
* @param sess
*/
void loadExtSourcesDefinitions(PerunSession sess);
/**
* Gets attributes for external source.
*
* @param extSource External Source
* @return Map of attributes for external source
* @throws InternalErrorException
*/
Map<String, String> getAttributes(ExtSource extSource);
/**
* Generate a candidate group from a group subject data.
*
* IMPORTANT: expected, that these subjectData was get from the ExtSource before using.
*
* @param perunSession
* @param groupSubjectData
* @param source
* @param loginPrefix login prefix to change group login and parent group login by it
*
* @return Candidate group object
* @throws InternalErrorException
*/
CandidateGroup generateCandidateGroup(PerunSession perunSession, Map<String,String> groupSubjectData, ExtSource source, String loginPrefix);
/**
* Generate candidate groups from a group subject data.
*
* IMPORTANT: expected, that these subjectData was get from the ExtSource before using.
*
* @param perunSession
* @param groupSubjectsData
* @param source
* @param loginPrefix login prefix to change group login and parent group login by it
*
* @return Candidate group objects
* @throws InternalErrorException
*/
List<CandidateGroup> generateCandidateGroups(PerunSession perunSession, List<Map<String,String>> groupSubjectsData, ExtSource source, String loginPrefix);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec;
import static org.apache.hadoop.hive.ql.exec.Utilities.DEPRECATED_MAPRED_DFSCLIENT_PARALLELISM_MAX;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.apache.hadoop.hive.ql.exec.Utilities.getFileExtension;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
import org.apache.hadoop.hive.ql.exec.spark.SparkTask;
import org.apache.hadoop.hive.ql.exec.tez.TezTask;
import org.apache.hadoop.hive.ql.io.*;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.DependencyCollectionWork;
import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFromUtcTimestamp;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
public class TestUtilities {
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
public static final Logger LOG = LoggerFactory.getLogger(TestUtilities.class);
private static final int NUM_BUCKETS = 3;
@Test
public void testGetFileExtension() {
JobConf jc = new JobConf();
assertEquals("No extension for uncompressed unknown format", "",
getFileExtension(jc, false, null));
assertEquals("No extension for compressed unknown format", "",
getFileExtension(jc, true, null));
assertEquals("No extension for uncompressed text format", "",
getFileExtension(jc, false, new HiveIgnoreKeyTextOutputFormat()));
assertEquals("Deflate for uncompressed text format", ".deflate",
getFileExtension(jc, true, new HiveIgnoreKeyTextOutputFormat()));
String extension = ".myext";
jc.set("hive.output.file.extension", extension);
assertEquals("Custom extension for uncompressed unknown format", extension,
getFileExtension(jc, false, null));
assertEquals("Custom extension for compressed unknown format", extension,
getFileExtension(jc, true, null));
assertEquals("Custom extension for uncompressed text format", extension,
getFileExtension(jc, false, new HiveIgnoreKeyTextOutputFormat()));
assertEquals("Custom extension for uncompressed text format", extension,
getFileExtension(jc, true, new HiveIgnoreKeyTextOutputFormat()));
}
@Test
public void testSerializeTimestamp() {
Timestamp ts = Timestamp.ofEpochMilli(1374554702000L, 123456);
ExprNodeConstantDesc constant = new ExprNodeConstantDesc(ts);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1);
children.add(constant);
ExprNodeGenericFuncDesc desc = new ExprNodeGenericFuncDesc(TypeInfoFactory.timestampTypeInfo,
new GenericUDFFromUtcTimestamp(), children);
assertEquals(desc.getExprString(), SerializationUtilities.deserializeExpression(
SerializationUtilities.serializeExpression(desc)).getExprString());
}
@Test
public void testgetDbTableName() throws HiveException{
String tablename;
String [] dbtab;
SessionState.start(new HiveConf(this.getClass()));
String curDefaultdb = SessionState.get().getCurrentDatabase();
//test table without db portion
tablename = "tab1";
dbtab = Utilities.getDbTableName(tablename);
assertEquals("db name", curDefaultdb, dbtab[0]);
assertEquals("table name", tablename, dbtab[1]);
//test table with db portion
tablename = "dab1.tab1";
dbtab = Utilities.getDbTableName(tablename);
assertEquals("db name", "dab1", dbtab[0]);
assertEquals("table name", "tab1", dbtab[1]);
//test invalid table name
tablename = "dab1.tab1.x1";
try {
dbtab = Utilities.getDbTableName(tablename);
fail("exception was expected for invalid table name");
} catch(HiveException ex){
assertEquals("Invalid table name " + tablename, ex.getMessage());
}
}
@Test
public void testReplaceTaskId() {
String taskID = "000000";
int bucketNum = 1;
String newTaskID = Utilities.replaceTaskId(taskID, bucketNum);
Assert.assertEquals("000001", newTaskID);
taskID = "(ds%3D1)000001";
newTaskID = Utilities.replaceTaskId(taskID, 5);
Assert.assertEquals("(ds%3D1)000005", newTaskID);
}
@Test
public void testRemoveTempOrDuplicateFilesOnTezNoDp() throws Exception {
List<Path> paths = runRemoveTempOrDuplicateFilesTestCase("tez", false);
assertEquals(0, paths.size());
}
@Test
public void testRemoveTempOrDuplicateFilesOnTezWithDp() throws Exception {
List<Path> paths = runRemoveTempOrDuplicateFilesTestCase("tez", true);
assertEquals(0, paths.size());
}
@Test
public void testRemoveTempOrDuplicateFilesOnMrNoDp() throws Exception {
List<Path> paths = runRemoveTempOrDuplicateFilesTestCase("mr", false);
assertEquals(NUM_BUCKETS, paths.size());
}
@Test
public void testRemoveTempOrDuplicateFilesOnMrWithDp() throws Exception {
List<Path> paths = runRemoveTempOrDuplicateFilesTestCase("mr", true);
assertEquals(NUM_BUCKETS, paths.size());
}
private List<Path> runRemoveTempOrDuplicateFilesTestCase(String executionEngine, boolean dPEnabled)
throws Exception {
Configuration hconf = new HiveConf(this.getClass());
// do this to verify that Utilities.removeTempOrDuplicateFiles does not revert to default scheme information
hconf.set("fs.defaultFS", "hdfs://should-not-be-used/");
hconf.set(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname, executionEngine);
FileSystem localFs = FileSystem.getLocal(hconf);
DynamicPartitionCtx dpCtx = getDynamicPartitionCtx(dPEnabled);
Path tempDirPath = setupTempDirWithSingleOutputFile(hconf);
FileSinkDesc conf = getFileSinkDesc(tempDirPath);
// HIVE-23354 enforces that MR speculative execution is disabled
hconf.setBoolean(MRJobConfig.MAP_SPECULATIVE, false);
hconf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE, false);
List<Path> paths = Utilities.removeTempOrDuplicateFiles(localFs, tempDirPath, dpCtx, conf, hconf, false);
String expectedScheme = tempDirPath.toUri().getScheme();
String expectedAuthority = tempDirPath.toUri().getAuthority();
assertPathsMatchSchemeAndAuthority(expectedScheme, expectedAuthority, paths);
return paths;
}
private void assertPathsMatchSchemeAndAuthority(String expectedScheme, String expectedAuthority, List<Path> paths) {
for (Path path : paths) {
assertEquals(path.toUri().getScheme().toLowerCase(), expectedScheme.toLowerCase());
assertEquals(path.toUri().getAuthority(), expectedAuthority);
}
}
private DynamicPartitionCtx getDynamicPartitionCtx(boolean dPEnabled) {
DynamicPartitionCtx dpCtx = null;
if (dPEnabled) {
dpCtx = mock(DynamicPartitionCtx.class);
when(dpCtx.getNumDPCols()).thenReturn(0);
when(dpCtx.getNumBuckets()).thenReturn(NUM_BUCKETS);
}
return dpCtx;
}
private FileSinkDesc getFileSinkDesc(Path tempDirPath) {
Table table = mock(Table.class);
when(table.getNumBuckets()).thenReturn(NUM_BUCKETS);
TableDesc tInfo = Utilities.getTableDesc("s", "string");
FileSinkDesc conf = new FileSinkDesc(tempDirPath, tInfo, false);
conf.setTable(table);
return conf;
}
private Path setupTempDirWithSingleOutputFile(Configuration hconf) throws IOException {
Path tempDirPath = new Path("file://" + temporaryFolder.newFolder().getAbsolutePath());
Path taskOutputPath = new Path(tempDirPath, Utilities.getTaskId(hconf));
FileSystem.getLocal(hconf).create(taskOutputPath).close();
return tempDirPath;
}
/**
* Check that calling {@link Utilities#getInputPaths(JobConf, MapWork, Path, Context, boolean)}
* can process two different tables that both have empty partitions.
*/
@Test
public void testGetInputPathsWithEmptyPartitions() throws Exception {
String alias1Name = "alias1";
String alias2Name = "alias2";
MapWork mapWork1 = new MapWork();
MapWork mapWork2 = new MapWork();
JobConf jobConf = new JobConf();
Configuration conf = new Configuration();
Path nonExistentPath1 = new Path(UUID.randomUUID().toString());
Path nonExistentPath2 = new Path(UUID.randomUUID().toString());
PartitionDesc mockPartitionDesc = mock(PartitionDesc.class);
TableDesc mockTableDesc = mock(TableDesc.class);
when(mockTableDesc.isNonNative()).thenReturn(false);
when(mockTableDesc.getProperties()).thenReturn(new Properties());
when(mockPartitionDesc.getProperties()).thenReturn(new Properties());
when(mockPartitionDesc.getTableDesc()).thenReturn(mockTableDesc);
doReturn(HiveSequenceFileOutputFormat.class).when(
mockPartitionDesc).getOutputFileFormatClass();
mapWork1.setPathToAliases(new LinkedHashMap<>(
ImmutableMap.of(nonExistentPath1, Lists.newArrayList(alias1Name))));
mapWork1.setAliasToWork(new LinkedHashMap<>(
ImmutableMap.of(alias1Name, (Operator<?>) mock(Operator.class))));
mapWork1.setPathToPartitionInfo(new LinkedHashMap<>(
ImmutableMap.of(nonExistentPath1, mockPartitionDesc)));
mapWork2.setPathToAliases(new LinkedHashMap<>(
ImmutableMap.of(nonExistentPath2, Lists.newArrayList(alias2Name))));
mapWork2.setAliasToWork(new LinkedHashMap<>(
ImmutableMap.of(alias2Name, (Operator<?>) mock(Operator.class))));
mapWork2.setPathToPartitionInfo(new LinkedHashMap<>(
ImmutableMap.of(nonExistentPath2, mockPartitionDesc)));
List<Path> inputPaths = new ArrayList<>();
try {
Path scratchDir = new Path(HiveConf.getVar(jobConf, HiveConf.ConfVars.LOCALSCRATCHDIR));
List<Path> inputPaths1 = Utilities.getInputPaths(jobConf, mapWork1, scratchDir,
mock(Context.class), false);
inputPaths.addAll(inputPaths1);
assertEquals(inputPaths1.size(), 1);
assertNotEquals(inputPaths1.get(0), nonExistentPath1);
assertTrue(inputPaths1.get(0).getFileSystem(conf).exists(inputPaths1.get(0)));
assertFalse(nonExistentPath1.getFileSystem(conf).exists(nonExistentPath1));
List<Path> inputPaths2 = Utilities.getInputPaths(jobConf, mapWork2, scratchDir,
mock(Context.class), false);
inputPaths.addAll(inputPaths2);
assertEquals(inputPaths2.size(), 1);
assertNotEquals(inputPaths2.get(0), nonExistentPath2);
assertTrue(inputPaths2.get(0).getFileSystem(conf).exists(inputPaths2.get(0)));
assertFalse(nonExistentPath2.getFileSystem(conf).exists(nonExistentPath2));
} finally {
File file;
for (Path path : inputPaths) {
file = new File(path.toString());
if (file.exists()) {
file.delete();
}
}
}
}
/**
* Check that calling {@link Utilities#getInputPaths(JobConf, MapWork, Path, Context, boolean)}
* can process two different tables that both have empty partitions when using multiple threads.
* Some extra logic is placed at the end of the test to validate no race conditions put the
* {@link MapWork} object in an invalid state.
*/
@Test
public void testGetInputPathsWithMultipleThreadsAndEmptyPartitions() throws Exception {
int numPartitions = 15;
JobConf jobConf = new JobConf();
jobConf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname,
Runtime.getRuntime().availableProcessors() * 2);
MapWork mapWork = new MapWork();
Path testTablePath = new Path("testTable");
Path[] testPartitionsPaths = new Path[numPartitions];
PartitionDesc mockPartitionDesc = mock(PartitionDesc.class);
TableDesc mockTableDesc = mock(TableDesc.class);
when(mockTableDesc.isNonNative()).thenReturn(false);
when(mockTableDesc.getProperties()).thenReturn(new Properties());
when(mockPartitionDesc.getProperties()).thenReturn(new Properties());
when(mockPartitionDesc.getTableDesc()).thenReturn(mockTableDesc);
doReturn(HiveSequenceFileOutputFormat.class).when(
mockPartitionDesc).getOutputFileFormatClass();
for (int i = 0; i < numPartitions; i++) {
String testPartitionName = "p=" + i;
testPartitionsPaths[i] = new Path(testTablePath, "p=" + i);
mapWork.getPathToAliases().put(testPartitionsPaths[i], Lists.newArrayList(testPartitionName));
mapWork.getAliasToWork().put(testPartitionName, mock(Operator.class));
mapWork.getPathToPartitionInfo().put(testPartitionsPaths[i], mockPartitionDesc);
}
FileSystem fs = FileSystem.getLocal(jobConf);
try {
fs.mkdirs(testTablePath);
List<Path> inputPaths = Utilities.getInputPaths(jobConf, mapWork,
new Path(HiveConf.getVar(jobConf, HiveConf.ConfVars.LOCALSCRATCHDIR)), mock(Context.class), false);
assertEquals(inputPaths.size(), numPartitions);
for (int i = 0; i < numPartitions; i++) {
assertNotEquals(inputPaths.get(i), testPartitionsPaths[i]);
}
assertEquals(mapWork.getPathToAliases().size(), numPartitions);
assertEquals(mapWork.getPathToPartitionInfo().size(), numPartitions);
assertEquals(mapWork.getAliasToWork().size(), numPartitions);
for (Map.Entry<Path, List<String>> entry : mapWork.getPathToAliases().entrySet()) {
assertNotNull(entry.getKey());
assertNotNull(entry.getValue());
assertEquals(entry.getValue().size(), 1);
assertTrue(entry.getKey().getFileSystem(new Configuration()).exists(entry.getKey()));
}
} finally {
if (fs.exists(testTablePath)) {
fs.delete(testTablePath, true);
}
}
}
/**
* Check that calling {@link Utilities#getMaxExecutorsForInputListing(Configuration, int)}
* returns the maximum number of executors to use based on the number of input locations.
*/
@Test
public void testGetMaxExecutorsForInputListing() {
Configuration conf = new Configuration();
final int ZERO_EXECUTORS = 0;
final int ONE_EXECUTOR = 1;
final int TWO_EXECUTORS = 2;
final int ZERO_THREADS = 0;
final int ONE_THREAD = 1;
final int TWO_THREADS = 2;
final int ZERO_LOCATIONS = 0;
final int ONE_LOCATION = 1;
final int TWO_LOCATIONS = 2;
final int THREE_LOCATIONS = 3;
conf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, ONE_THREAD);
assertEquals(ZERO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, ZERO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, ONE_LOCATION));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, TWO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, THREE_LOCATIONS));
conf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, TWO_THREADS);
assertEquals(ZERO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, ZERO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, ONE_LOCATION));
assertEquals(TWO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, TWO_LOCATIONS));
assertEquals(TWO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, THREE_LOCATIONS));
/*
* The following tests will verify the deprecation variable is still usable.
*/
conf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, ZERO_THREADS);
conf.setInt(DEPRECATED_MAPRED_DFSCLIENT_PARALLELISM_MAX, ZERO_THREADS);
assertEquals(ZERO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, ZERO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, ONE_LOCATION));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, TWO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, THREE_LOCATIONS));
conf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, ZERO_THREADS);
conf.setInt(DEPRECATED_MAPRED_DFSCLIENT_PARALLELISM_MAX, ONE_THREAD);
assertEquals(ZERO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, ZERO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, ONE_LOCATION));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, TWO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, THREE_LOCATIONS));
conf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, ZERO_THREADS);
conf.setInt(DEPRECATED_MAPRED_DFSCLIENT_PARALLELISM_MAX, TWO_THREADS);
assertEquals(ZERO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, ZERO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, ONE_LOCATION));
assertEquals(TWO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, TWO_LOCATIONS));
assertEquals(TWO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, THREE_LOCATIONS));
// Check that HIVE_EXEC_INPUT_LISTING_MAX_THREADS has priority overr DEPRECATED_MAPRED_DFSCLIENT_PARALLELISM_MAX
conf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, TWO_THREADS);
conf.setInt(DEPRECATED_MAPRED_DFSCLIENT_PARALLELISM_MAX, ONE_THREAD);
assertEquals(ZERO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, ZERO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, ONE_LOCATION));
assertEquals(TWO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, TWO_LOCATIONS));
assertEquals(TWO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, THREE_LOCATIONS));
}
/**
* Test for {@link Utilities#getInputPaths(JobConf, MapWork, Path, Context, boolean)} with a single
* threaded.
*/
@Test
public void testGetInputPathsWithASingleThread() throws Exception {
final int NUM_PARTITIONS = 5;
JobConf jobConf = new JobConf();
jobConf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, 1);
runTestGetInputPaths(jobConf, NUM_PARTITIONS);
}
/**
* Test for {@link Utilities#getInputPaths(JobConf, MapWork, Path, Context, boolean)} with multiple
* threads.
*/
@Test
public void testGetInputPathsWithMultipleThreads() throws Exception {
final int NUM_PARTITIONS = 5;
JobConf jobConf = new JobConf();
jobConf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, 2);
runTestGetInputPaths(jobConf, NUM_PARTITIONS);
}
private void runTestGetInputPaths(JobConf jobConf, int numOfPartitions) throws Exception {
MapWork mapWork = new MapWork();
Path scratchDir = new Path(HiveConf.getVar(jobConf, HiveConf.ConfVars.LOCALSCRATCHDIR));
Map<Path, List<String>> pathToAliasTable = new LinkedHashMap<>();
String testTableName = "testTable";
Path testTablePath = new Path(testTableName);
Path[] testPartitionsPaths = new Path[numOfPartitions];
for (int i=0; i<numOfPartitions; i++) {
String testPartitionName = "p=" + i;
testPartitionsPaths[i] = new Path(testTablePath, "p=" + i);
pathToAliasTable.put(testPartitionsPaths[i], Lists.newArrayList(testPartitionName));
mapWork.getAliasToWork().put(testPartitionName, mock(Operator.class));
}
mapWork.setPathToAliases(pathToAliasTable);
FileSystem fs = FileSystem.getLocal(jobConf);
try {
fs.mkdirs(testTablePath);
for (int i=0; i<numOfPartitions; i++) {
fs.mkdirs(testPartitionsPaths[i]);
fs.create(new Path(testPartitionsPaths[i], "test1.txt")).close();
}
List<Path> inputPaths = Utilities.getInputPaths(jobConf, mapWork, scratchDir, mock(Context.class), false);
assertEquals(inputPaths.size(), numOfPartitions);
for (int i=0; i<numOfPartitions; i++) {
assertEquals(inputPaths.get(i), testPartitionsPaths[i]);
}
} finally {
if (fs.exists(testTablePath)) {
fs.delete(testTablePath, true);
}
}
}
@Test
public void testGetInputPathsPool() throws IOException, ExecutionException, InterruptedException {
List<Path> pathsToAdd = new ArrayList<>();
Path path = new Path("dummy-path");
pathsToAdd.add(path);
pathsToAdd.add(path);
pathsToAdd.add(path);
ExecutorService pool = mock(ExecutorService.class);
Future mockFuture = mock(Future.class);
when(mockFuture.get()).thenReturn(path);
when(pool.submit(any(Callable.class))).thenReturn(mockFuture);
Utilities.getInputPathsWithPool(mock(JobConf.class), mock(MapWork.class), mock(Path.class), mock(Context.class),
false, pathsToAdd, pool);
verify(pool, times(3)).submit(any(Callable.class));
verify(pool).shutdown();
verify(pool).shutdownNow();
}
@Test
public void testGetInputPathsPoolAndFailure() throws IOException, ExecutionException, InterruptedException {
List<Path> pathsToAdd = new ArrayList<>();
Path path = new Path("dummy-path");
pathsToAdd.add(path);
pathsToAdd.add(path);
pathsToAdd.add(path);
ExecutorService pool = mock(ExecutorService.class);
Future mockFuture = mock(Future.class);
when(mockFuture.get()).thenThrow(new RuntimeException());
when(pool.submit(any(Callable.class))).thenReturn(mockFuture);
Exception e = null;
try {
Utilities.getInputPathsWithPool(mock(JobConf.class), mock(MapWork.class), mock(Path.class), mock(Context.class),
false, pathsToAdd, pool);
} catch (Exception thrownException) {
e = thrownException;
}
assertNotNull(e);
verify(pool, times(3)).submit(any(Callable.class));
verify(pool).shutdownNow();
}
private Task<?> getDependencyCollectionTask(){
return TaskFactory.get(new DependencyCollectionWork());
}
/**
* Generates a task graph that looks like this:
*
* ---->DTa----
* / \
* root ----->DTb-----*-->DTd---> ProvidedTask --> DTe
* \ /
* ---->DTc----
*/
private List<Task<?>> getTestDiamondTaskGraph(Task<?> providedTask){
// Note: never instantiate a task without TaskFactory.get() if you're not
// okay with .equals() breaking. Doing it via TaskFactory.get makes sure
// that an id is generated, and two tasks of the same type don't show
// up as "equal", which is important for things like iterating over an
// array. Without this, DTa, DTb, and DTc would show up as one item in
// the list of children. Thus, we're instantiating via a helper method
// that instantiates via TaskFactory.get()
Task<?> root = getDependencyCollectionTask();
Task<?> DTa = getDependencyCollectionTask();
Task<?> DTb = getDependencyCollectionTask();
Task<?> DTc = getDependencyCollectionTask();
Task<?> DTd = getDependencyCollectionTask();
Task<?> DTe = getDependencyCollectionTask();
root.addDependentTask(DTa);
root.addDependentTask(DTb);
root.addDependentTask(DTc);
DTa.addDependentTask(DTd);
DTb.addDependentTask(DTd);
DTc.addDependentTask(DTd);
DTd.addDependentTask(providedTask);
providedTask.addDependentTask(DTe);
List<Task<?>> retVals = new ArrayList<Task<?>>();
retVals.add(root);
return retVals;
}
/**
* DependencyCollectionTask that counts how often getDependentTasks on it
* (and thus, on its descendants) is called counted via Task.getDependentTasks.
* It is used to wrap another task to intercept calls on it.
*/
public class CountingWrappingTask extends DependencyCollectionTask {
int count;
Task<?> wrappedDep = null;
public CountingWrappingTask(Task<?> dep) {
count = 0;
wrappedDep = dep;
super.addDependentTask(wrappedDep);
}
@Override
public boolean addDependentTask(Task<?> dependent) {
return wrappedDep.addDependentTask(dependent);
}
@Override
public List<Task<?>> getDependentTasks() {
count++;
System.err.println("YAH:getDepTasks got called!");
(new Exception()).printStackTrace(System.err);
LOG.info("YAH!getDepTasks", new Exception());
return super.getDependentTasks();
}
public int getDepCallCount() {
return count;
}
@Override
public String getName() {
return "COUNTER_TASK";
}
@Override
public String toString() {
return getName() + "_" + wrappedDep.toString();
}
};
/**
* This test tests that Utilities.get*Tasks do not repeat themselves in the process
* of extracting tasks from a given set of root tasks when given DAGs that can have
* multiple paths, such as the case with Diamond-shaped DAGs common to replication.
*/
@Test
public void testGetTasksHaveNoRepeats() {
CountingWrappingTask mrTask = new CountingWrappingTask(new ExecDriver());
CountingWrappingTask tezTask = new CountingWrappingTask(new TezTask());
CountingWrappingTask sparkTask = new CountingWrappingTask(new SparkTask());
// First check - we should not have repeats in results
assertEquals("No repeated MRTasks from Utilities.getMRTasks", 1,
Utilities.getMRTasks(getTestDiamondTaskGraph(mrTask)).size());
assertEquals("No repeated TezTasks from Utilities.getTezTasks", 1,
Utilities.getTezTasks(getTestDiamondTaskGraph(tezTask)).size());
assertEquals("No repeated TezTasks from Utilities.getSparkTasks", 1,
Utilities.getSparkTasks(getTestDiamondTaskGraph(sparkTask)).size());
// Second check - the tasks we looked for must not have been accessed more than
// once as a result of the traversal (note that we actually wind up accessing
// 2 times , because each visit counts twice, once to check for existence, and
// once to visit.
assertEquals("MRTasks should have been visited only once", 2, mrTask.getDepCallCount());
assertEquals("TezTasks should have been visited only once", 2, tezTask.getDepCallCount());
assertEquals("SparkTasks should have been visited only once", 2, sparkTask.getDepCallCount());
}
private static Task<MapredWork> getMapredWork() {
return TaskFactory.get(MapredWork.class);
}
@Test
@SuppressWarnings("unchecked")
public void testGetTasksRecursion() {
Task<MapredWork> rootTask = getMapredWork();
Task<MapredWork> child1 = getMapredWork();
Task<MapredWork> child2 = getMapredWork();
Task<MapredWork> child11 = getMapredWork();
rootTask.addDependentTask(child1);
rootTask.addDependentTask(child2);
child1.addDependentTask(child11);
assertEquals(Lists.newArrayList(rootTask, child1, child2, child11),
Utilities.getMRTasks(getTestDiamondTaskGraph(rootTask)));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.parser.helper;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.camel.catalog.CamelCatalog;
import org.apache.camel.catalog.DefaultCamelCatalog;
import org.apache.camel.catalog.JSonSchemaHelper;
import org.apache.camel.parser.model.CamelNodeDetails;
import org.apache.camel.parser.model.CamelNodeDetailsFactory;
import org.apache.camel.parser.roaster.StatementFieldSource;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.ASTNode;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.AnonymousClassDeclaration;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.Block;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.BooleanLiteral;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.Expression;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.ExpressionStatement;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.FieldDeclaration;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.InfixExpression;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.MemberValuePair;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.MethodDeclaration;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.MethodInvocation;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.NormalAnnotation;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.NumberLiteral;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.ParenthesizedExpression;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.QualifiedName;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.SimpleName;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.SingleMemberAnnotation;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.StringLiteral;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.Type;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.VariableDeclarationFragment;
import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.VariableDeclarationStatement;
import org.jboss.forge.roaster.model.Annotation;
import org.jboss.forge.roaster.model.source.FieldSource;
import org.jboss.forge.roaster.model.source.JavaClassSource;
import org.jboss.forge.roaster.model.source.MethodSource;
/**
* A Camel Java tree parser that only depends on the Roaster API.
* <p/>
* This implement is used for parsing the Camel routes and build a tree structure of the EIP nodes.
*
* @see CamelJavaParserHelper for parser that can discover endpoints and simple expressions
*/
public final class CamelJavaTreeParserHelper {
private final CamelCatalog camelCatalog = new DefaultCamelCatalog(true);
public List<CamelNodeDetails> parseCamelRouteTree(JavaClassSource clazz, String baseDir, String fullyQualifiedFileName,
MethodSource<JavaClassSource> configureMethod) {
// find any from which is the start of the route
CamelNodeDetailsFactory nodeFactory = CamelNodeDetailsFactory.newInstance();
CamelNodeDetails route = nodeFactory.newNode(null, "route");
if (configureMethod != null) {
MethodDeclaration md = (MethodDeclaration) configureMethod.getInternal();
Block block = md.getBody();
if (block != null) {
for (Object statement : md.getBody().statements()) {
// must be a method call expression
if (statement instanceof ExpressionStatement) {
ExpressionStatement es = (ExpressionStatement) statement;
Expression exp = es.getExpression();
boolean valid = isFromCamelRoute(exp);
if (valid) {
parseExpression(nodeFactory, fullyQualifiedFileName, clazz, configureMethod, block, exp, route);
}
}
}
}
}
List<CamelNodeDetails> answer = new ArrayList<>();
if (route.getOutputs() == null || route.getOutputs().isEmpty()) {
// okay no routes found
return answer;
}
// now parse the route node and build the correct model/tree structure of the EIPs
// re-create factory as we rebuild the tree
nodeFactory = CamelNodeDetailsFactory.newInstance();
CamelNodeDetails parent = route.getOutputs().get(0);
for (int i = 0; i < route.getOutputs().size(); i++) {
CamelNodeDetails node = route.getOutputs().get(i);
String name = node.getName();
if ("from".equals(name)) {
CamelNodeDetails from = nodeFactory.copyNode(null, "from", node);
from.setFileName(fullyQualifiedFileName);
answer.add(from);
parent = from;
} else if ("routeId".equals(name)) {
// should be set on the parent
parent.setRouteId(node.getRouteId());
} else if ("end".equals(name) || "endParent".equals(name) || "endRest".equals(name)
|| "endDoTry".equals(name) || "endHystrix".equals(name)) {
// parent should be grand parent
if (parent.getParent() != null) {
parent = parent.getParent();
}
} else if ("endChoice".equals(name)) {
// we are in a choice block so parent should be the first choice up the parent tree
while (!"from".equals(parent.getName()) && !"choice".equals(parent.getName())) {
if (parent.getParent() != null) {
parent = parent.getParent();
} else {
break;
}
}
} else if ("choice".equals(name)) {
// special for some EIPs
CamelNodeDetails output = nodeFactory.copyNode(parent, name, node);
parent.addOutput(output);
parent = output;
} else if ("when".equals(name) || "otherwise".equals(name)) {
// we are in a choice block so parent should be the first choice up the parent tree
while (!"from".equals(parent.getName()) && !"choice".equals(parent.getName())) {
if (parent.getParent() != null) {
parent = parent.getParent();
} else {
break;
}
}
} else {
boolean hasOutput = hasOutput(name);
if (hasOutput) {
// has output so add as new child node
CamelNodeDetails output = nodeFactory.copyNode(parent, name, node);
parent.addOutput(output);
parent = output;
} else {
// add straight to itself
CamelNodeDetails output = nodeFactory.copyNode(parent, name, node);
parent.addOutput(output);
}
}
}
return answer;
}
private boolean isFromCamelRoute(Expression exp) {
String rootMethodName = null;
// find out if this is from a Camel route (eg from, route etc.)
Expression sub = exp;
while (sub instanceof MethodInvocation) {
sub = ((MethodInvocation) sub).getExpression();
if (sub instanceof MethodInvocation) {
Expression parent = ((MethodInvocation) sub).getExpression();
if (parent == null) {
break;
}
}
}
if (sub instanceof MethodInvocation) {
rootMethodName = ((MethodInvocation) sub).getName().getIdentifier();
} else if (sub instanceof SimpleName) {
rootMethodName = ((SimpleName) sub).getIdentifier();
}
// a route starts either via from or route
return "from".equals(rootMethodName) || "route".equals(rootMethodName);
}
private boolean hasOutput(String name) {
String json = camelCatalog.modelJSonSchema(name);
List<Map<String, String>> rows = JSonSchemaHelper.parseJsonSchema("model", json, false);
return isModelOutput(rows);
}
private static boolean isModelOutput(List<Map<String, String>> rows) {
for (Map<String, String> row : rows) {
if (row.containsKey("output")) {
return "true".equals(row.get("output"));
}
}
return false;
}
private boolean hasInput(String name) {
String json = camelCatalog.modelJSonSchema(name);
List<Map<String, String>> rows = JSonSchemaHelper.parseJsonSchema("model", json, false);
return isModelInput(rows);
}
private static boolean isModelInput(List<Map<String, String>> rows) {
for (Map<String, String> row : rows) {
if (row.containsKey("input")) {
return "true".equals(row.get("input"));
}
}
return false;
}
private static CamelNodeDetails grandParent(CamelNodeDetails node, String parentName) {
if (node == null) {
return null;
}
if (parentName.equals(node.getName())) {
return node;
} else {
return grandParent(node.getParent(), parentName);
}
}
private void parseExpression(CamelNodeDetailsFactory nodeFactory, String fullyQualifiedFileName,
JavaClassSource clazz, MethodSource<JavaClassSource> configureMethod, Block block,
Expression exp, CamelNodeDetails node) {
if (exp == null) {
return;
}
if (exp instanceof MethodInvocation) {
MethodInvocation mi = (MethodInvocation) exp;
node = doParseCamelModels(nodeFactory, fullyQualifiedFileName, clazz, configureMethod, block, mi, node);
// if the method was called on another method, then recursive
exp = mi.getExpression();
parseExpression(nodeFactory, fullyQualifiedFileName, clazz, configureMethod, block, exp, node);
}
}
private CamelNodeDetails doParseCamelModels(CamelNodeDetailsFactory nodeFactory, String fullyQualifiedFileName,
JavaClassSource clazz, MethodSource<JavaClassSource> configureMethod, Block block,
MethodInvocation mi, CamelNodeDetails node) {
String name = mi.getName().getIdentifier();
// special for Java DSL having some endXXX
boolean isEnd = "end".equals(name) || "endChoice".equals(name) || "endDoTry".equals(name) || "endHystrix".equals(name) || "endParent".equals(name) || "endRest".equals(name);
boolean isRoute = "route".equals(name) || "from".equals(name) || "routeId".equals(name);
// must be an eip model that has either input or output as we only want to track processors (also accept from)
boolean isEip = camelCatalog.findModelNames().contains(name) && (hasInput(name) || hasOutput(name));
// only include if its a known Camel model (dont include languages)
if (isEnd || isRoute || isEip) {
CamelNodeDetails newNode = nodeFactory.newNode(node, name);
// include source code details
int pos = mi.getName().getStartPosition();
int line = findLineNumber(fullyQualifiedFileName, pos);
if (line > -1) {
newNode.setLineNumber("" + line);
}
pos = mi.getName().getStartPosition() + mi.getName().getLength();
line = findLineNumber(fullyQualifiedFileName, pos);
if (line > -1) {
newNode.setLineNumberEnd("" + line);
}
newNode.setFileName(fullyQualifiedFileName);
newNode.setClassName(clazz.getQualifiedName());
newNode.setMethodName(configureMethod.getName());
if ("routeId".equals(name)) {
// grab the route id
List args = mi.arguments();
if (args != null && args.size() > 0) {
// the first argument has the route id
Expression exp = (Expression) args.get(0);
String routeId = getLiteralValue(clazz, block, exp);
if (routeId != null) {
newNode.setRouteId(routeId);
}
}
}
node.addPreliminaryOutput(newNode);
return node;
}
return node;
}
@SuppressWarnings("unchecked")
private static FieldSource<JavaClassSource> getField(JavaClassSource clazz, Block block, SimpleName ref) {
String fieldName = ref.getIdentifier();
if (fieldName != null) {
// find field in class
FieldSource field = clazz != null ? clazz.getField(fieldName) : null;
if (field == null) {
field = findFieldInBlock(clazz, block, fieldName);
}
return field;
}
return null;
}
@SuppressWarnings("unchecked")
private static FieldSource<JavaClassSource> findFieldInBlock(JavaClassSource clazz, Block block, String fieldName) {
for (Object statement : block.statements()) {
// try local statements first in the block
if (statement instanceof VariableDeclarationStatement) {
final Type type = ((VariableDeclarationStatement) statement).getType();
for (Object obj : ((VariableDeclarationStatement) statement).fragments()) {
if (obj instanceof VariableDeclarationFragment) {
VariableDeclarationFragment fragment = (VariableDeclarationFragment) obj;
SimpleName name = fragment.getName();
if (name != null && fieldName.equals(name.getIdentifier())) {
return new StatementFieldSource(clazz, fragment, type);
}
}
}
}
// okay the field may be burried inside an anonymous inner class as a field declaration
// outside the configure method, so lets go back to the parent and see what we can find
ASTNode node = block.getParent();
if (node instanceof MethodDeclaration) {
node = node.getParent();
}
if (node instanceof AnonymousClassDeclaration) {
List declarations = ((AnonymousClassDeclaration) node).bodyDeclarations();
for (Object dec : declarations) {
if (dec instanceof FieldDeclaration) {
FieldDeclaration fd = (FieldDeclaration) dec;
final Type type = fd.getType();
for (Object obj : fd.fragments()) {
if (obj instanceof VariableDeclarationFragment) {
VariableDeclarationFragment fragment = (VariableDeclarationFragment) obj;
SimpleName name = fragment.getName();
if (name != null && fieldName.equals(name.getIdentifier())) {
return new StatementFieldSource(clazz, fragment, type);
}
}
}
}
}
}
}
return null;
}
/**
* @deprecated currently not in use
*/
@Deprecated
public static String getLiteralValue(JavaClassSource clazz, Block block, Expression expression) {
// unwrap parenthesis
if (expression instanceof ParenthesizedExpression) {
expression = ((ParenthesizedExpression) expression).getExpression();
}
if (expression instanceof StringLiteral) {
return ((StringLiteral) expression).getLiteralValue();
} else if (expression instanceof BooleanLiteral) {
return "" + ((BooleanLiteral) expression).booleanValue();
} else if (expression instanceof NumberLiteral) {
return ((NumberLiteral) expression).getToken();
}
// if it a method invocation then add a dummy value assuming the method invocation will return a valid response
if (expression instanceof MethodInvocation) {
String name = ((MethodInvocation) expression).getName().getIdentifier();
return "{{" + name + "}}";
}
// if its a qualified name (usually a constant field in another class)
// then add a dummy value as we cannot find the field value in other classes and maybe even outside the
// source code we have access to
if (expression instanceof QualifiedName) {
QualifiedName qn = (QualifiedName) expression;
String name = qn.getFullyQualifiedName();
return "{{" + name + "}}";
}
if (expression instanceof SimpleName) {
FieldSource<JavaClassSource> field = getField(clazz, block, (SimpleName) expression);
if (field != null) {
// is the field annotated with a Camel endpoint
if (field.getAnnotations() != null) {
for (Annotation ann : field.getAnnotations()) {
boolean valid = "org.apache.camel.EndpointInject".equals(ann.getQualifiedName()) || "org.apache.camel.cdi.Uri".equals(ann.getQualifiedName());
if (valid) {
Expression exp = (Expression) ann.getInternal();
if (exp instanceof SingleMemberAnnotation) {
exp = ((SingleMemberAnnotation) exp).getValue();
} else if (exp instanceof NormalAnnotation) {
List values = ((NormalAnnotation) exp).values();
for (Object value : values) {
MemberValuePair pair = (MemberValuePair) value;
if ("uri".equals(pair.getName().toString())) {
exp = pair.getValue();
break;
}
}
}
if (exp != null) {
return getLiteralValue(clazz, block, exp);
}
}
}
}
// is the field an org.apache.camel.Endpoint type?
if ("Endpoint".equals(field.getType().getSimpleName())) {
// then grab the uri from the first argument
VariableDeclarationFragment vdf = (VariableDeclarationFragment) field.getInternal();
expression = vdf.getInitializer();
if (expression instanceof MethodInvocation) {
MethodInvocation mi = (MethodInvocation) expression;
List args = mi.arguments();
if (args != null && args.size() > 0) {
// the first argument has the endpoint uri
expression = (Expression) args.get(0);
return getLiteralValue(clazz, block, expression);
}
}
} else {
// no annotations so try its initializer
VariableDeclarationFragment vdf = (VariableDeclarationFragment) field.getInternal();
expression = vdf.getInitializer();
if (expression == null) {
// its a field which has no initializer, then add a dummy value assuming the field will be initialized at runtime
return "{{" + field.getName() + "}}";
} else {
return getLiteralValue(clazz, block, expression);
}
}
} else {
// we could not find the field in this class/method, so its maybe from some other super class, so insert a dummy value
final String fieldName = ((SimpleName) expression).getIdentifier();
return "{{" + fieldName + "}}";
}
} else if (expression instanceof InfixExpression) {
String answer = null;
// is it a string that is concat together?
InfixExpression ie = (InfixExpression) expression;
if (InfixExpression.Operator.PLUS.equals(ie.getOperator())) {
String val1 = getLiteralValue(clazz, block, ie.getLeftOperand());
String val2 = getLiteralValue(clazz, block, ie.getRightOperand());
// if numeric then we plus the values, otherwise we string concat
boolean numeric = isNumericOperator(clazz, block, ie.getLeftOperand()) && isNumericOperator(clazz, block, ie.getRightOperand());
if (numeric) {
Long num1 = val1 != null ? Long.valueOf(val1) : 0;
Long num2 = val2 != null ? Long.valueOf(val2) : 0;
answer = "" + (num1 + num2);
} else {
answer = (val1 != null ? val1 : "") + (val2 != null ? val2 : "");
}
if (!answer.isEmpty()) {
// include extended when we concat on 2 or more lines
List extended = ie.extendedOperands();
if (extended != null) {
for (Object ext : extended) {
String val3 = getLiteralValue(clazz, block, (Expression) ext);
if (numeric) {
Long num3 = val3 != null ? Long.valueOf(val3) : 0;
Long num = Long.valueOf(answer);
answer = "" + (num + num3);
} else {
answer += val3 != null ? val3 : "";
}
}
}
}
}
return answer;
}
return null;
}
private static boolean isNumericOperator(JavaClassSource clazz, Block block, Expression expression) {
if (expression instanceof NumberLiteral) {
return true;
} else if (expression instanceof SimpleName) {
FieldSource field = getField(clazz, block, (SimpleName) expression);
if (field != null) {
return field.getType().isType("int") || field.getType().isType("long")
|| field.getType().isType("Integer") || field.getType().isType("Long");
}
}
return false;
}
private static int findLineNumber(String fullyQualifiedFileName, int position) {
int lines = 0;
try {
int current = 0;
try (BufferedReader br = new BufferedReader(new FileReader(new File(fullyQualifiedFileName)))) {
String line;
while ((line = br.readLine()) != null) {
lines++;
current += line.length() + 1; // add 1 for line feed
if (current >= position) {
return lines;
}
}
}
} catch (Exception e) {
// ignore
return -1;
}
return lines;
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2013, Chris Johnson
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.tikal.jenkins.plugins.multijob.test;
import hudson.model.Action;
import hudson.model.ParameterValue;
import hudson.model.TaskListener;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Cause.UserIdCause;
import hudson.model.CauseAction;
import hudson.model.FreeStyleProject;
import hudson.model.Hudson;
import hudson.model.ParameterDefinition;
import hudson.model.ParametersAction;
import hudson.model.ParametersDefinitionProperty;
import hudson.model.StringParameterDefinition;
import hudson.model.StringParameterValue;
import hudson.plugins.parameterizedtrigger.AbstractBuildParameters;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.jvnet.hudson.test.HudsonTestCase;
import com.tikal.jenkins.plugins.multijob.MultiJobBuild;
import com.tikal.jenkins.plugins.multijob.MultiJobProject;
import com.tikal.jenkins.plugins.multijob.PhaseJobsConfig;
import com.tikal.jenkins.plugins.multijob.PhaseJobsConfig.KillPhaseOnJobResultCondition;
/**
*
* @author Chris Johnson
*/
public class PhaseJobsConfigTest extends HudsonTestCase{
private static final Map<String, String> DEFAULT_KEY_VALUES = new HashMap<String, String>() {{
put("key1", "value1");
put("key2", "value2");
put("key3", "value3");
}};
private static final Map<String, String> CURRENT_KEY_VALUES = new HashMap<String, String>() {{
put("key4", "value4");
put("key5", "value5");
put("key6", "value6");
}};
private static final Map<String, String> OVERRIDES_KEY_VALUES = new HashMap<String, String>() {{
put("key2", "value4");
put("key3", "value5");
}};
private static final Map<String, String> CONFIG_OVERRIDES_KEY_VALUES = new HashMap<String, String>() {{
put("key3", "value9");
}};
@Test
public void testNoParameters() throws Exception {
AbstractProject projectB = createTriggeredProject(null);
MultiJobBuild mjb =createTriggeringBuild(null);
PhaseJobsConfig pjc = new PhaseJobsConfig("dummy", "", true, null, KillPhaseOnJobResultCondition.NEVER, false, false, "", 0, false, false, "" , true);
List<Action> actions = pjc.getActions(mjb, TaskListener.NULL, projectB, true);
// check single ParametersAction created
assertEquals(0, actions.size());
}
@Test
public void testDefaultParameters() throws Exception {
AbstractProject projectB = createTriggeredProject(DEFAULT_KEY_VALUES);
MultiJobBuild mjb = createTriggeringBuild(null);
PhaseJobsConfig pjc = new PhaseJobsConfig("dummy", "", true, null, KillPhaseOnJobResultCondition.NEVER, false, false, "", 0, false, false, "",false);
List<Action> actions = pjc.getActions(mjb, TaskListener.NULL, projectB, true);
// check single ParametersAction created
assertEquals(1, actions.size());
ParametersAction pa = getParametersAction(actions);
checkParameterMatch(DEFAULT_KEY_VALUES, pa);
}
@Test
/**
* Test that both the default and current build parameters are combined
*/
public void testCurrentDefaultParameters() throws Exception {
AbstractProject projectB = createTriggeredProject(DEFAULT_KEY_VALUES);
MultiJobBuild mjb = createTriggeringBuild(createParametersAction(CURRENT_KEY_VALUES));
PhaseJobsConfig pjc = new PhaseJobsConfig("dummy", "", true, null, KillPhaseOnJobResultCondition.NEVER, false, false, "", 0, false, false, "" , false);
List<Action> actions = pjc.getActions(mjb, TaskListener.NULL, projectB, true);
// check single ParametersAction created
assertEquals(1, actions.size());
ParametersAction pa = getParametersAction(actions);
HashMap<String,String> combinedlist = new HashMap<String,String>(DEFAULT_KEY_VALUES);
combinedlist.putAll(CURRENT_KEY_VALUES);
checkParameterMatch(combinedlist, pa);
}
@Test
/**
* Test that the current build parameters override default ones and are combined
*/
public void testCurrentOverridesDefaultParameters() throws Exception {
AbstractProject projectB = createTriggeredProject(DEFAULT_KEY_VALUES);
MultiJobBuild mjb = createTriggeringBuild(createParametersAction(OVERRIDES_KEY_VALUES));
PhaseJobsConfig pjc = new PhaseJobsConfig("dummy", "", true, null, KillPhaseOnJobResultCondition.NEVER, false, false, "", 0, false, false, "",false);
List<Action> actions = pjc.getActions(mjb, TaskListener.NULL, projectB, true);
// check single ParametersAction created
assertEquals(1, actions.size());
ParametersAction pa = getParametersAction(actions);
HashMap<String,String> combinedlist = new HashMap<String,String>(DEFAULT_KEY_VALUES);
combinedlist.putAll(OVERRIDES_KEY_VALUES);
checkParameterMatch(combinedlist, pa);
}
@Test
/**
* Test that the current build parameters are ignored and use just the default ones
*/
public void testCurrentIgnoredDefaultParameters() throws Exception {
AbstractProject projectB = createTriggeredProject(DEFAULT_KEY_VALUES);
MultiJobBuild mjb = createTriggeringBuild(createParametersAction(OVERRIDES_KEY_VALUES));
PhaseJobsConfig pjc = new PhaseJobsConfig("dummy", "", true, null, KillPhaseOnJobResultCondition.NEVER, false, false, "", 0, false, false, "",false);
List<Action> actions = pjc.getActions(mjb, TaskListener.NULL, projectB, false);
// check single ParametersAction created
assertEquals(1, actions.size());
ParametersAction pa = getParametersAction(actions);
checkParameterMatch(DEFAULT_KEY_VALUES, pa);
}
@Test
/**
* Test that the current build parameters are ignored and use just the default ones
*/
public void testConfigsDefaultParameters() throws Exception {
AbstractProject projectB = createTriggeredProject(DEFAULT_KEY_VALUES);
MultiJobBuild mjb = createTriggeringBuild(null);
List<AbstractBuildParameters> configs = new ArrayList<AbstractBuildParameters>();
configs.add(new TestCauseConfig());
configs.add(new TestParametersConfig());
configs.add(new TestParametersConfig(OVERRIDES_KEY_VALUES));
PhaseJobsConfig pjc = new PhaseJobsConfig("dummy", "", true, configs, KillPhaseOnJobResultCondition.NEVER, false, false, "", 0, false, false, "",false);
List<Action> actions = pjc.getActions(mjb, TaskListener.NULL, projectB, true);
// check 2 actions created
assertEquals(2, actions.size());
ParametersAction pa = getParametersAction(actions);
//check that expected parameter is listed
HashMap<String,String> combinedlist = new HashMap<String,String>(DEFAULT_KEY_VALUES);
combinedlist.putAll(OVERRIDES_KEY_VALUES);
checkParameterMatch(combinedlist, pa);
}
@Test
/**
* Test that the config overrides current overrides default values
*/
public void testCurrentConfigsDefaultParameters() throws Exception {
AbstractProject projectB = createTriggeredProject(DEFAULT_KEY_VALUES);
MultiJobBuild mjb = createTriggeringBuild(createParametersAction(OVERRIDES_KEY_VALUES));
List<AbstractBuildParameters> configs = new ArrayList<AbstractBuildParameters>();
configs.add(new TestCauseConfig());
configs.add(new TestParametersConfig());
configs.add(new TestParametersConfig(CONFIG_OVERRIDES_KEY_VALUES));
PhaseJobsConfig pjc = new PhaseJobsConfig("dummy", "", true, configs, KillPhaseOnJobResultCondition.NEVER, false, false, "", 0, false, false, "",false);
List<Action> actions = pjc.getActions(mjb, TaskListener.NULL, projectB, true);
// check 2 actions created
assertEquals(2, actions.size());
ParametersAction pa = getParametersAction(actions);
HashMap<String,String> combinedlist = new HashMap<String,String>(DEFAULT_KEY_VALUES);
combinedlist.putAll(OVERRIDES_KEY_VALUES);
combinedlist.putAll(CONFIG_OVERRIDES_KEY_VALUES);
checkParameterMatch(combinedlist, pa);
}
@Test
/**
* Test that the config overrides default values ignoring current values
*/
public void testNotCurrentConfigsDefaultParameters() throws Exception {
AbstractProject projectB = createTriggeredProject(DEFAULT_KEY_VALUES);
MultiJobBuild mjb = createTriggeringBuild(createParametersAction(OVERRIDES_KEY_VALUES));
List<AbstractBuildParameters> configs = new ArrayList<AbstractBuildParameters>();
configs.add(new TestCauseConfig());
configs.add(new TestParametersConfig());
configs.add(new TestParametersConfig(CONFIG_OVERRIDES_KEY_VALUES));
PhaseJobsConfig pjc = new PhaseJobsConfig("dummy", "", true, configs, KillPhaseOnJobResultCondition.NEVER, false, false, "", 0, false, false, "",false);
List<Action> actions = pjc.getActions(mjb, TaskListener.NULL, projectB, false);
// check 2 actions created
assertEquals(2, actions.size());
ParametersAction pa = getParametersAction(actions);
HashMap<String,String> combinedlist = new HashMap<String,String>(DEFAULT_KEY_VALUES);
combinedlist.putAll(CONFIG_OVERRIDES_KEY_VALUES);
checkParameterMatch(combinedlist, pa);
}
private MultiJobBuild createTriggeringBuild(ParametersAction parametersAction) throws Exception {
// set up the triggering build
MultiJobProject projectA = new MultiJobProject(Hudson.getInstance(), "ssss");
MultiJobBuild mjb = new MultiJobBuild(projectA);
// add build ParametersAction
if(parametersAction != null) {
mjb.getActions().add(parametersAction);
}
return mjb;
}
private AbstractProject createTriggeredProject(Map<String,String> defaultParameters) throws Exception {
// set up the project to be triggered
FreeStyleProject projectB = createFreeStyleProject();
if(defaultParameters != null) {
List<ParameterDefinition> pds = new ArrayList<ParameterDefinition>();
for(String name: defaultParameters.keySet()) {
pds.add(new StringParameterDefinition(name, defaultParameters.get(name)));
}
ParametersDefinitionProperty pdp = new ParametersDefinitionProperty(pds);
projectB.addProperty(pdp);
}
return projectB;
}
private ParametersAction createParametersAction(Map<String,String> items) {
List<ParameterValue> params = new ArrayList<ParameterValue>();
if(items != null) {
for(String name: items.keySet()) {
params.add(new StringParameterValue(name, items.get(name)));
}
}
return new ParametersAction(params);
}
private void checkParameterMatch(Map<String, String> combinedlist, ParametersAction pa) {
assertTrue(pa != null);
assertEquals(combinedlist.size(), pa.getParameters().size());
for(String key : combinedlist.keySet()) {
assertEquals(((StringParameterValue)pa.getParameter(key)).value, combinedlist.get(key));
}
}
private ParametersAction getParametersAction(List<Action> actions) {
ParametersAction pa =null;
for (Action a :actions) {
if(a instanceof ParametersAction) {
pa = (ParametersAction)a;
}
}
return pa;
}
/**
* Config item returning a cause action
*/
class TestCauseConfig extends AbstractBuildParameters {
@Override
public Action getAction(AbstractBuild<?, ?> build, TaskListener listener) throws IOException, InterruptedException {
return new CauseAction(new UserIdCause());
}
}
/**
* Config item returning a ParametersAction
*/
class TestParametersConfig extends AbstractBuildParameters {
private Map<String,String> items;
public TestParametersConfig() {
this.items = null;
}
public TestParametersConfig(Map<String,String> items) {
this.items = items;
}
@Override
public Action getAction(AbstractBuild<?, ?> build, TaskListener listener) throws IOException, InterruptedException {
return createParametersAction(items);
}
}
}
| |
/**
*
*/
package bnw.abm.intg.latchpop;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author Bhagya N. Wickramasinghe 19 May 2016
*/
public class Family {
private static long IDCounter = 0;
private static Map<String, Person> allMembersAlreadyInFamilies = new HashMap<>();
private List<Person> members;
private FamilyType type;
private String familyID;
public Family(FamilyType type) {
this.members = new ArrayList<Person>();
this.setType(type);
this.familyID = String.valueOf(IDCounter++);
}
public String getID() {
return this.familyID;
}
/**
* Number of members in the family
*
* @return the number of members
*/
public int size() {
return getMembers().size();
}
/**
* @return the members
*/
List<Person> getMembers() {
return members;
}
/**
* @param member
* the member to add
*/
void addMember(Person member) {
if (this.members.contains(member)) {
throw new Error("This person already a member of this family");
} else {
this.members.add(member);
Family.allMembersAlreadyInFamilies.put(member.getID(), member);
}
}
void addMembers(List<Person> members) {
if (this.members.stream().anyMatch(e -> members.contains(e))) {
throw new Error("At least one of the new members already exists in this family");
} else {
this.members.addAll(members);
}
}
/**
* @return the type
*/
FamilyType getType() {
return type;
}
/**
* @param type
* the type to set
*/
void setType(FamilyType type) {
if (this.type == null || this.type == FamilyType.BASIC || this.type == FamilyType.UNDEFINED | this.type == type) {
this.type = type;
} else {
throw new Error("Trying to overwrite " + this.type + " with " + type);
}
}
int numberOfChildren() {
int nofChildren = 0;
for (Person member : this.members) {
if (member.getType() == PersonType.Child) {
nofChildren++;
}
}
return nofChildren;
}
boolean validate() {
switch (this.type) {
case COUPLEFAMILYWITHCHILDREN:
return hasMarriedCouple() & hasChildren() & noLoneParents() & noGroupOrLonePersons();
case COUPLEONLY:
return (hasMarriedCouple() & !hasChildren() & noLoneParents() & noGroupOrLonePersons());
case LONEPARENT:
return (hasALoneParent() & hasChildren() & noneMarried() & noGroupOrLonePersons());
case OTHERFAMILY:
return onlyRelatives() & noGroupOrLonePersons();
case LONEPERSON:
return onlyALonePerson();
case GROUPHOUSEHOLD:
return onlyGroupHouseholds();
default:
throw new Error("An alien family: " + this.type);
}
}
private boolean hasALoneParent() {
return members.stream().filter(member -> member.getType() == PersonType.LoneParent).count() == 1;
}
private boolean noLoneParents() {
return members.stream().noneMatch(member -> member.getType() == PersonType.LoneParent);
}
private boolean hasChildren() {
return members.stream().anyMatch(member -> member.getType() == PersonType.Child);
}
private boolean noneMarried() {
return members.stream().noneMatch(member -> member.getType() == PersonType.Married);
}
private boolean hasMarriedCouple() {
return members.stream().filter(person -> person.getType() == PersonType.Married).count() == 2;
}
private boolean onlyRelatives() {
return members.stream().allMatch(person -> person.getType() == PersonType.Relative);
}
private boolean onlyGroupHouseholds() {
return members.stream().allMatch(person -> person.getType() == PersonType.GroupHousehold);
}
private boolean noGroupOrLonePersons() {
return members.stream().noneMatch(person -> person.getType() == PersonType.GroupHousehold | person.getType() == PersonType.LonePerson);
}
private boolean onlyALonePerson() {
return members.size() == 1 && members.stream().filter(person -> person.getType() == PersonType.LonePerson).count() == 1;
}
}
enum FamilyType {
COUPLEFAMILYWITHCHILDREN("Couple family with children", 2, 0, 1, 0, 0, 0),
COUPLEONLY("Couple family with no children", 2, 0, 0, 0, 0, 0),
LONEPERSON("Lone person", 0, 0, 0, 1, 0, 0),
LONEPARENT("One parent family", 0, 1, 1, 0, 0, 0),
OTHERFAMILY("Other family", 0, 0, 0, 0, 2, 0),
GROUPHOUSEHOLD("Group household", 0, 0, 0, 0, 0, 2),
BASIC("Married basic or Lone parent basic ", 0, 0, 0, 0, 0, 0),
UNDEFINED("Undefined", 0, 0, 0, 0, 0, 0);
private String type;
private int basicChildren, basicMarried, basicLonePersons, basicLoneParents, basicRelatives, basicGroupHouseholdPersons;
FamilyType(String type, int basicMarried, int basicLoneParents, int basicChildren, int basicLonePersons, int basicRelatives,
int basicGroupHouseholdPersons) {
this.type = type;
this.basicMarried = basicMarried;
this.basicLoneParents = basicLoneParents;
this.basicChildren = basicChildren;
this.basicLonePersons = basicLonePersons;
this.basicRelatives = basicRelatives;
this.basicGroupHouseholdPersons = basicGroupHouseholdPersons;
}
String description() {
return type;
}
int basicSize() {
return basicChildren + basicMarried + basicLonePersons + basicLoneParents + basicRelatives + basicGroupHouseholdPersons;
}
int basicChildren() {
return basicChildren;
}
int basicGroupHouseholdPersons() {
return basicGroupHouseholdPersons;
}
int basicMarriedPersons() {
return basicMarried;
}
int basicLonePersons() {
return basicLonePersons;
}
int basicLoneParents() {
return basicLoneParents;
}
int basicRelatives() {
return basicRelatives;
}
}
| |
//
// FPlayAndroid is distributed under the FreeBSD License
//
// Copyright (c) 2013-2014, Carlos Rafael Gimenes das Neves
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// The views and conclusions contained in the software and documentation are those
// of the authors and should not be interpreted as representing official policies,
// either expressed or implied, of the FreeBSD Project.
//
// https://github.com/carlosrafaelgn/FPlayAndroid
//
package br.com.carlosrafaelgn.fplay;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.text.InputType;
import android.util.TypedValue;
import android.view.View;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.LinearLayout.LayoutParams;
import android.widget.RelativeLayout;
import android.widget.TextView;
import java.util.Formatter;
import br.com.carlosrafaelgn.fplay.list.FileList;
import br.com.carlosrafaelgn.fplay.list.FileSt;
import br.com.carlosrafaelgn.fplay.playback.Player;
import br.com.carlosrafaelgn.fplay.ui.BgButton;
import br.com.carlosrafaelgn.fplay.ui.BgListView;
import br.com.carlosrafaelgn.fplay.ui.FastAnimator;
import br.com.carlosrafaelgn.fplay.ui.FileView;
import br.com.carlosrafaelgn.fplay.ui.UI;
import br.com.carlosrafaelgn.fplay.ui.drawable.ColorDrawable;
import br.com.carlosrafaelgn.fplay.ui.drawable.TextIconDrawable;
public final class ActivityFileSelection extends ActivityBrowserView implements View.OnClickListener, DialogInterface.OnClickListener, BgListView.OnBgListViewKeyDownObserver {
public interface OnFileSelectionListener {
void onFileSelected(int id, String path, String name);
void onAddClicked(int id, String path, String name);
void onPlayClicked(int id, String path, String name);
}
private final boolean save, hasButtons;
private final String fileType, itemType;
private final int id;
private CharSequence title;
private OnFileSelectionListener listener;
private StringBuilder formatterSB;
private Formatter formatter;
private EditText txtSaveAsName;
private BgListView list;
private FileList fileList;
private FileSt checkedFile;
private BgButton btnGoBack, btnMenu, btnAdd, btnPlay;
private RelativeLayout panelSecondary;
private boolean loading, isCreatingLayout;
private TextIconDrawable btnMenuIcon;
private FastAnimator animator;
private CharSequence msgEmptyList, msgLoading;
public ActivityFileSelection(CharSequence title, int id, boolean save, boolean hasButtons, String itemType, String fileType, OnFileSelectionListener listener) {
if (fileType.charAt(0) != FileSt.PRIVATE_FILETYPE_ID)
throw new IllegalArgumentException("fileType must start with " + FileSt.PRIVATE_FILETYPE_ID);
this.title = title;
this.id = id;
this.save = save;
this.hasButtons = (hasButtons && !save);
this.itemType = itemType;
this.fileType = fileType;
this.listener = listener;
this.formatterSB = new StringBuilder();
this.formatter = new Formatter(formatterSB);
}
@Override
public CharSequence getTitle() {
return title;
}
@SuppressWarnings("StringEquality")
private void updateOverallLayout() {
UI.animationReset();
if (!save) {
RelativeLayout.LayoutParams rp;
final int count = ((fileList != null) ? fileList.getCount() : 0);
if (count != 0 && checkedFile != null) {
if (btnGoBack != null) {
btnGoBack.setNextFocusRightId(R.id.btnMenu);
UI.setNextFocusForwardId(btnGoBack, R.id.btnMenu);
}
if (btnMenu != null)
UI.animationAddViewToShow(btnMenu);
} else {
if (checkedFile != null) {
checkedFile.isChecked = false;
checkedFile = null;
}
if (btnGoBack != null) {
btnGoBack.setNextFocusRightId(R.id.list);
UI.setNextFocusForwardId(btnGoBack, R.id.list);
}
if (btnMenu != null)
UI.animationAddViewToHide(btnMenu);
}
if (hasButtons) {
if (checkedFile == null) {
if (panelSecondary != null && panelSecondary.getVisibility() != View.GONE) {
if (list != null) {
rp = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT);
rp.addRule(RelativeLayout.BELOW, R.id.panelControls);
rp.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM, RelativeLayout.TRUE);
list.setLayoutParams(rp);
UI.setNextFocusForwardId(list, R.id.btnGoBack);
}
UI.animationAddViewToHide(panelSecondary);
if (btnMenu != null)
btnMenu.setNextFocusUpId(R.id.list);
if (btnGoBack != null) {
btnGoBack.setNextFocusUpId(R.id.list);
btnGoBack.setNextFocusLeftId(R.id.list);
}
}
} else if (panelSecondary != null && panelSecondary.getVisibility() != View.VISIBLE) {
if (list != null) {
rp = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT);
rp.addRule(RelativeLayout.BELOW, R.id.panelControls);
rp.addRule(RelativeLayout.ABOVE, R.id.panelSecondary);
list.setLayoutParams(rp);
UI.setNextFocusForwardId(list, R.id.btnAdd);
}
UI.animationAddViewToShow(panelSecondary);
if (btnMenu != null)
btnMenu.setNextFocusUpId(R.id.btnPlay);
if (btnGoBack != null) {
btnGoBack.setNextFocusUpId(R.id.btnPlay);
btnGoBack.setNextFocusLeftId(R.id.btnPlay);
}
}
}
} else {
if (btnMenuIcon != null && btnMenu != null && btnMenuIcon.getIcon() != ((checkedFile != null) ? UI.ICON_DELETE : UI.ICON_SAVE)) {
final CharSequence txt;
if (checkedFile == null) {
txt = getText(R.string.msg_create_new);
btnMenuIcon.setIcon(UI.ICON_SAVE);
} else {
txt = getText(R.string.msg_delete_button);
btnMenuIcon.setIcon(UI.ICON_DELETE);
}
btnMenu.setText(txt);
btnMenu.setContentDescription(txt);
}
}
UI.animationCommit(isCreatingLayout, null);
}
private String format(int resId, String p1) {
formatterSB.delete(0, formatterSB.length());
formatter.format(getText(resId).toString(), p1);
return formatterSB.toString();
}
private String format(int resId, String p1, String p2) {
formatterSB.delete(0, formatterSB.length());
formatter.format(getText(resId).toString(), p1, p2);
return formatterSB.toString();
}
@Override
public void loadingProcessChanged(boolean started) {
if (UI.browserActivity != this)
return;
loading = started;
if (list != null) {
if (animator != null) {
if (started) {
list.setVisibility(View.INVISIBLE);
} else {
animator.end();
list.setVisibility(View.VISIBLE);
animator.start();
}
} else {
list.setCustomEmptyText(started ? msgLoading : msgEmptyList);
}
if (fileList != null) {
fileList.setObserver(started ? null : list);
final int count = fileList.getCount();
if (!started) {
if (UI.accessibilityManager != null && UI.accessibilityManager.isEnabled())
UI.announceAccessibilityText(count == 0 ? msgEmptyList : FileView.makeContextDescription(true, getHostActivity(), fileList.getItemT(0)));
if (count > 0 && !list.isInTouchMode()) {
fileList.setSelection(0, true);
list.centerItem(0);
}
}
}
}
//if (!started)
// updateOverallLayout();
}
@Override
public View createView() {
return new FileView(Player.getService(), null, true);
}
@Override
public void processItemCheckboxClick(int position) {
//see the comments at processItemButtonClick(), in ActivityBrowser2
if (list == null || fileList == null)
return;
if (!list.isInTouchMode() && fileList.getSelection() != position)
fileList.setSelection(position, true);
final FileSt file = fileList.getItemT(position);
if (file == null) //same as above
return;
if (checkedFile != file && checkedFile != null)
checkedFile.isChecked = false;
checkedFile = (file.isChecked ? file : null);
updateOverallLayout();
fileList.notifyCheckedChanged();
}
private void confirm(final String path, final String name, final int deleteIndex) {
UI.prepareDialogAndShow((new AlertDialog.Builder(getHostActivity()))
.setTitle(getText(R.string.oops))
.setView(UI.createDialogView(getHostActivity(), format(deleteIndex >= 0 ? R.string.msg_confirm_delete : R.string.msg_confirm_overwrite, itemType, name)))
.setPositiveButton(deleteIndex >= 0 ? R.string.delete : R.string.overwrite, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
if (deleteIndex >= 0) {
try {
getApplication().deleteFile(path);
final int p;
if (checkedFile != null && fileList != null && (p = fileList.indexOf(checkedFile)) >= 0) {
checkedFile.isChecked = false;
checkedFile = null;
if (fileList.getSelection() != p)
fileList.setSelection(p, true);
fileList.removeSelection();
if (list != null && list.isInTouchMode() && fileList.getSelection() >= 0)
fileList.setSelection(-1, true);
updateOverallLayout();
}
} catch (Throwable ex) {
ex.printStackTrace();
}
} else {
final OnFileSelectionListener listener = ActivityFileSelection.this.listener;
finish(0, null, false);
if (listener != null)
listener.onFileSelected(ActivityFileSelection.this.id, path, name);
}
}
})
.setNegativeButton(R.string.cancel, this)
.create());
}
@Override
public void processItemClick(int position) {
//see the comments at processItemClick(), in ActivityBrowser2
if (list == null || fileList == null)
return;
if (!UI.doubleClickMode || fileList.getSelection() == position) {
final FileSt file = fileList.getItemT(position);
if (save) {
confirm(file.path, file.name, -1);
return;
}
final OnFileSelectionListener listener = this.listener;
finish(0, list.getViewForPosition(position), true);
if (listener != null)
listener.onFileSelected(id, file.path, file.name);
} else {
fileList.setSelection(position, true);
}
}
@Override
public void processItemLongClick(int position) {
}
@Override
public boolean onBgListViewKeyDown(BgListView list, int keyCode) {
final int p;
switch (keyCode) {
case UI.KEY_LEFT:
if (btnMenu != null && btnGoBack != null)
((btnMenu.getVisibility() == View.VISIBLE) ? btnMenu : btnGoBack).requestFocus();
return true;
case UI.KEY_RIGHT:
if (btnAdd != null && btnGoBack != null && panelSecondary != null)
((panelSecondary.getVisibility() == View.VISIBLE) ? btnAdd : btnGoBack).requestFocus();
return true;
case UI.KEY_ENTER:
if (fileList != null && (p = fileList.getSelection()) >= 0)
processItemClick(p);
return true;
case UI.KEY_EXTRA:
if (fileList != null && (p = fileList.getSelection()) >= 0) {
final FileSt file = fileList.getItemT(p);
file.isChecked = !file.isChecked;
processItemCheckboxClick(p);
}
return true;
}
return false;
}
@Override
public void onClick(View view) {
if (view == btnGoBack) {
finish(0, view, true);
} if (view == btnMenu) {
if (loading)
return;
if (save && checkedFile == null) {
final Context ctx = getHostActivity();
final LinearLayout l = (LinearLayout)UI.createDialogView(ctx, null);
TextView lbl = new TextView(ctx);
lbl.setText(format(R.string.msg_enter_name, itemType));
lbl.setTextSize(TypedValue.COMPLEX_UNIT_PX, UI.dialogTextSize);
l.addView(lbl);
txtSaveAsName = new EditText(ctx);
txtSaveAsName.setContentDescription(lbl.getText());
txtSaveAsName.setTextSize(TypedValue.COMPLEX_UNIT_PX, UI.dialogTextSize);
txtSaveAsName.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_FLAG_CAP_SENTENCES | InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS);
txtSaveAsName.setSingleLine();
final LayoutParams p = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT);
p.topMargin = UI.dialogMargin;
txtSaveAsName.setLayoutParams(p);
if (fileList != null && fileList.getSelection() >= 0)
txtSaveAsName.setText(fileList.getItemT(fileList.getSelection()).name);
l.addView(txtSaveAsName);
UI.prepareDialogAndShow((new AlertDialog.Builder(ctx))
.setTitle(format(R.string.msg_create_new_title, itemType))
.setView(l)
.setPositiveButton(R.string.create, this)
.setNegativeButton(R.string.cancel, this)
.create());
} else {
if (fileList != null && checkedFile != null) {
final int s = fileList.indexOf(checkedFile);
if (s >= 0)
confirm(checkedFile.path, checkedFile.name, s);
}
}
} else if (view == btnAdd) {
if (hasButtons && checkedFile != null) {
if (listener != null)
listener.onAddClicked(id, checkedFile.path, checkedFile.name);
checkedFile.isChecked = false;
checkedFile = null;
if (fileList != null)
fileList.notifyCheckedChanged();
updateOverallLayout();
}
} else if (view == btnPlay) {
if (hasButtons && checkedFile != null) {
if (listener != null)
listener.onPlayClicked(id, checkedFile.path, checkedFile.name);
if (Player.goBackWhenPlayingFolders) {
finish(0, (list == null || fileList == null) ? null : list.getViewForPosition(fileList.indexOf(checkedFile)), true);
} else {
checkedFile.isChecked = false;
checkedFile = null;
if (fileList != null)
fileList.notifyCheckedChanged();
updateOverallLayout();
}
}
}
}
@Override
public void onClick(DialogInterface dialog, int which) {
if (which == AlertDialog.BUTTON_POSITIVE) {
String n = txtSaveAsName.getText().toString().trim();
if (!FileSt.isValidPrivateFileName(n))
return;
if (n.length() > 64)
n = n.substring(0, 64);
for (int i = fileList.getCount() - 1; i >= 0; i--) {
if (fileList.getItemT(i).name.equals(n)) {
confirm(n + fileType, n, -1);
txtSaveAsName = null;
return;
}
}
final OnFileSelectionListener listener = this.listener;
finish(0, null, false);
if (listener != null)
listener.onFileSelected(ActivityFileSelection.this.id, n + fileType, n);
}
txtSaveAsName = null;
}
@Override
protected void onCreate() {
UI.browserActivity = this;
fileList = new FileList();
}
@SuppressWarnings("deprecation")
@Override
protected void onCreateLayout(boolean firstCreation) {
setContentView(R.layout.activity_file_selection);
btnGoBack = (BgButton)findViewById(R.id.btnGoBack);
btnGoBack.setOnClickListener(this);
btnGoBack.setIcon(UI.ICON_GOBACK);
btnMenu = (BgButton)findViewById(R.id.btnMenu);
btnMenu.setOnClickListener(this);
msgEmptyList = getText(R.string.empty_list);
msgLoading = getText(R.string.loading);
list = (BgListView)findViewById(R.id.list);
list.setScrollBarType((UI.browserScrollBarType == BgListView.SCROLLBAR_INDEXED) ? BgListView.SCROLLBAR_LARGE : UI.browserScrollBarType);
list.setOnKeyDownObserver(this);
if (UI.animationEnabled) {
list.setCustomEmptyText(msgEmptyList);
((View)list.getParent()).setBackgroundDrawable(new ColorDrawable(UI.color_list));
animator = new FastAnimator(list, false, null, 0);
final TextView lblLoading = (TextView)findViewById(R.id.lblLoading);
lblLoading.setTextColor(UI.color_text_disabled);
UI.largeText(lblLoading);
lblLoading.setVisibility(View.VISIBLE);
}
fileList.setObserver(list);
panelSecondary = (RelativeLayout)findViewById(R.id.panelSecondary);
if (save) {
final CharSequence txt = getText(R.string.msg_create_new);
btnMenu.setText(txt);
btnMenu.setContentDescription(txt);
btnMenu.setDefaultHeight();
btnMenu.setCompoundDrawables((btnMenuIcon = new TextIconDrawable(UI.ICON_SAVE, UI.color_text, UI.defaultControlContentsSize)), null, null, null);
} else {
final CharSequence txt = getText(R.string.msg_delete_button);
btnMenu.setText(txt);
btnMenu.setContentDescription(txt);
btnMenu.setDefaultHeight();
btnMenu.setCompoundDrawables((btnMenuIcon = new TextIconDrawable(UI.ICON_DELETE, UI.color_text, UI.defaultControlContentsSize)), null, null, null);
btnAdd = (BgButton)findViewById(R.id.btnAdd);
btnAdd.setTextColor(UI.colorState_text_reactive);
btnAdd.setOnClickListener(this);
btnAdd.setIcon(UI.ICON_ADD);
RelativeLayout.LayoutParams rp;
final TextView sep2 = (TextView)findViewById(R.id.sep2);
rp = new RelativeLayout.LayoutParams(UI.strokeSize, UI.defaultControlContentsSize);
rp.addRule(RelativeLayout.CENTER_VERTICAL, RelativeLayout.TRUE);
rp.addRule(RelativeLayout.LEFT_OF, R.id.btnPlay);
rp.leftMargin = UI.controlMargin;
rp.rightMargin = UI.controlMargin;
sep2.setLayoutParams(rp);
sep2.setBackgroundDrawable(new ColorDrawable(UI.color_highlight));
btnPlay = (BgButton)findViewById(R.id.btnPlay);
btnPlay.setTextColor(UI.colorState_text_reactive);
btnPlay.setOnClickListener(this);
btnPlay.setIcon(UI.ICON_PLAY);
if (hasButtons) {
UI.prepareControlContainer(panelSecondary, true, false);
rp = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, UI.thickDividerSize + UI.defaultControlSize + (UI.extraSpacing ? (UI.controlMargin << 1) : 0));
rp.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM, RelativeLayout.TRUE);
panelSecondary.setLayoutParams(rp);
}
}
if (UI.isLargeScreen)
UI.prepareViewPaddingForLargeScreen(list, 0, 0);
UI.prepareControlContainer(findViewById(R.id.panelControls), false, true);
fileList.setPrivateFileType(fileType, list.isInTouchMode());
isCreatingLayout = true;
updateOverallLayout();
isCreatingLayout = false;
}
@Override
protected void onPause() {
fileList.setObserver(null);
}
@Override
protected void onResume() {
UI.browserActivity = this;
fileList.setObserver(loading ? null : list);
}
@Override
protected void onOrientationChanged() {
if (list != null && UI.isLargeScreen)
UI.prepareViewPaddingForLargeScreen(list, 0, 0);
}
@Override
protected void onCleanupLayout() {
UI.animationReset();
if (animator != null) {
animator.release();
animator = null;
}
checkedFile = null;
btnGoBack = null;
btnMenu = null;
btnAdd = null;
btnPlay = null;
list = null;
panelSecondary = null;
btnMenuIcon = null;
msgEmptyList = null;
msgLoading = null;
}
@Override
protected void onDestroy() {
UI.browserActivity = null;
fileList.cancel();
fileList = null;
title = null;
listener = null;
formatterSB = null;
formatter = null;
}
}
| |
/*
* Copyright 2000-2005 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.lang.javascript.search;
import com.intellij.lang.javascript.psi.JSClass;
import com.intellij.lang.javascript.psi.JSReferenceList;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.project.Project;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.SearchScope;
import com.intellij.psi.stubs.StubIndex;
import com.intellij.psi.stubs.StubIndexKey;
import com.intellij.util.*;
import consulo.javascript.lang.psi.stubs.JavaScriptIndexKeys;
import javax.annotation.Nonnull;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
public abstract class JSClassSearch implements QueryExecutor<JSClass, JSClassSearch.SearchParameters>
{
public static class SearchParameters
{
private final JSClass myClass;
private final boolean myCheckDeepInheritance;
private final GlobalSearchScope myScope;
public SearchParameters(final JSClass anClass, final boolean checkDeepInheritance, GlobalSearchScope scope)
{
myClass = anClass;
myCheckDeepInheritance = checkDeepInheritance;
myScope = scope;
}
public JSClass getTargetClass()
{
return myClass;
}
public boolean isCheckDeepInheritance()
{
return myCheckDeepInheritance;
}
public GlobalSearchScope getScope()
{
return myScope;
}
}
public static Query<JSClass> searchClassInheritors(final JSClass superClass, final boolean checkDeepInheritance)
{
final SearchParameters parameters = new SearchParameters(superClass, checkDeepInheritance, getUseScope(superClass));
return CLASS_INHERITORS_QUERY_FACTORY.createUniqueResultsQuery(parameters);
}
private static GlobalSearchScope getUseScope(JSClass superClass)
{
SearchScope searchScope = superClass.getUseScope();
return (GlobalSearchScope) searchScope;
}
public static Query<JSClass> searchInterfaceImplementations(final JSClass superClass, final boolean checkDeepInheritance)
{
final SearchParameters parameters = new SearchParameters(superClass, checkDeepInheritance, getUseScope(superClass));
return INTERFACE_IMPLEMENTATIONS_QUERY_FACTORY.createUniqueResultsQuery(parameters);
}
// implementation
private static final QueryFactory<JSClass, SearchParameters> INTERFACE_IMPLEMENTATIONS_QUERY_FACTORY = new QueryFactory<JSClass,
SearchParameters>();
private static final QueryFactory<JSClass, SearchParameters> CLASS_INHERITORS_QUERY_FACTORY = new QueryFactory<JSClass, SearchParameters>();
private static final JSClassSearch OUR_CLASS_SEARCH_EXECUTOR;
static
{
INTERFACE_IMPLEMENTATIONS_QUERY_FACTORY.registerExecutor(new JSClassSearch()
{
@Override
protected StubIndexKey<String, JSReferenceList> getIndexKey()
{
return JavaScriptIndexKeys.IMPLEMENTED_INDEX;
}
@Override
protected JSClass[] getSupers(final JSClass candidate)
{
return candidate.getImplementedInterfaces();
}
@Override
public boolean execute(@Nonnull final SearchParameters queryParameters, @Nonnull Processor<? super JSClass> consumer)
{
final Set<JSClass> visited = new HashSet<JSClass>(); // no abstract classes in ActionScript !
if(queryParameters.isCheckDeepInheritance())
{
final Processor<? super JSClass> consumerCopy = consumer;
consumer = new Processor<JSClass>()
{
@Override
public boolean process(JSClass jsClass)
{
return consumerCopy.process(jsClass) && OUR_CLASS_SEARCH_EXECUTOR.processDirectInheritors(jsClass, this, false, visited,
queryParameters.getScope());
}
};
}
final Processor<? super JSClass> consumerToUse = consumer;
final boolean b = processDirectInheritors(queryParameters.getTargetClass(), consumerToUse, queryParameters.isCheckDeepInheritance(), visited,
queryParameters.getScope());
if(b)
{
return searchClassInheritors(queryParameters.getTargetClass(), queryParameters.isCheckDeepInheritance()).forEach(new Processor<JSClass>()
{
@Override
public boolean process(final JSClass jsClass)
{
return processDirectInheritors(jsClass, consumerToUse, queryParameters.isCheckDeepInheritance(), visited, queryParameters.getScope());
}
});
}
return b;
}
@Override
protected Collection<JSClass> getInheritors(JSClassInheritorsProvider provider, String parentName, Project project, GlobalSearchScope scope)
{
return provider.getImplementingClasses(parentName, project, scope);
}
});
CLASS_INHERITORS_QUERY_FACTORY.registerExecutor(OUR_CLASS_SEARCH_EXECUTOR = new JSClassSearch()
{
@Override
protected StubIndexKey<String, JSReferenceList> getIndexKey()
{
return JavaScriptIndexKeys.EXTENDS_INDEX;
}
@Override
protected JSClass[] getSupers(final JSClass candidate)
{
return candidate.getSuperClasses();
}
@Override
protected Collection<JSClass> getInheritors(JSClassInheritorsProvider provider, String parentName, Project project, GlobalSearchScope scope)
{
return provider.getExtendingClasses(parentName, project, scope);
}
});
}
@Override
public boolean execute(@Nonnull final SearchParameters queryParameters, @Nonnull final Processor<? super JSClass> consumer)
{
return processDirectInheritors(queryParameters.getTargetClass(), consumer, queryParameters.isCheckDeepInheritance(), null,
queryParameters.getScope());
}
protected boolean processDirectInheritors(final JSClass superClass, final Processor<? super JSClass> consumer, final boolean checkDeep, Set<JSClass> processed, final GlobalSearchScope scope)
{
if(processed != null)
{
if(processed.contains(superClass))
{
return true;
}
}
else
{
processed = new HashSet<JSClass>();
}
processed.add(superClass);
Project project = superClass.getProject();
final String name = superClass.getName();
if(name == null)
{
return true;
}
final Set<JSClass> temp = processed;
final Processor<JSClass> processor = new Processor<JSClass>()
{
@Override
public boolean process(JSClass candidate)
{
final JSClass[] classes = getSupers(candidate);
if(classes != null)
{
for(JSClass superClassCandidate : classes)
{
if(superClassCandidate.isEquivalentTo(superClass))
{
if(!consumer.process(candidate))
{
return false;
}
if(checkDeep && !processDirectInheritors(candidate, consumer, checkDeep, temp, scope))
{
return false;
}
}
}
}
return true;
}
};
CommonProcessors.CollectProcessor<JSReferenceList> collectProcessor = new CommonProcessors.CollectProcessor<>();
StubIndex.getInstance().processElements(getIndexKey(), name, project, scope, JSReferenceList.class, collectProcessor);
for(JSReferenceList referenceList : collectProcessor.getResults())
{
JSClass parent = (JSClass) referenceList.getParent();
if(!processor.process(parent))
{
return false;
}
}
for(JSClassInheritorsProvider provider : Extensions.getExtensions(JSClassInheritorsProvider.EP_NAME))
{
Collection<JSClass> inheritors = getInheritors(provider, name, project, scope);
for(JSClass inheritor : inheritors)
{
if(!processor.process(inheritor))
{
return false;
}
}
}
return true;
}
protected abstract Collection<JSClass> getInheritors(JSClassInheritorsProvider provider, String parentName, Project project,
GlobalSearchScope scope);
protected abstract StubIndexKey<String, JSReferenceList> getIndexKey();
protected abstract JSClass[] getSupers(final JSClass candidate);
}
| |
/*
* Copyright (c) 2015 ketao1989.github.io. All Rights Reserved.
*/
package io.github.ketao1989.kafka;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import kafka.api.FetchRequest;
import kafka.api.FetchRequestBuilder;
import kafka.api.PartitionOffsetRequestInfo;
import kafka.common.ErrorMapping;
import kafka.common.TopicAndPartition;
import kafka.javaapi.FetchResponse;
import kafka.javaapi.OffsetResponse;
import kafka.javaapi.PartitionMetadata;
import kafka.javaapi.TopicMetadata;
import kafka.javaapi.TopicMetadataRequest;
import kafka.javaapi.consumer.SimpleConsumer;
import kafka.message.MessageAndOffset;
public class SimpleExample {
public static void main(String args[]) {
SimpleExample example = new SimpleExample();
long maxReads = 10000;
String topic = "Blanka-topic-dev";
int partition = 5;
List<String> seeds = new ArrayList<String>();
seeds.add("10.10.10.99");
int port = 9092;
try {
example.run(maxReads, topic, partition, seeds, port);
} catch (Exception e) {
System.out.println("Oops:" + e);
e.printStackTrace();
}
}
private List<String> m_replicaBrokers = new ArrayList<String>();
public SimpleExample() {
m_replicaBrokers = new ArrayList<String>();
}
public void run(long a_maxReads, String a_topic, int a_partition, List<String> a_seedBrokers, int a_port) throws Exception {
// find the meta data about the topic and partition we are interested in
//
PartitionMetadata metadata = findLeader(a_seedBrokers, a_port, a_topic, a_partition);
if (metadata == null) {
System.out.println("Can't find metadata for Topic and Partition. Exiting");
return;
}
if (metadata.leader() == null) {
System.out.println("Can't find Leader for Topic and Partition. Exiting");
return;
}
String leadBroker = metadata.leader().host();
String clientName = "Client_" + a_topic + "_" + a_partition;
SimpleConsumer consumer = new SimpleConsumer(leadBroker, a_port, 100000, 64 * 1024, clientName);
long readOffset = getLastOffset(consumer,a_topic, a_partition, kafka.api.OffsetRequest.EarliestTime(), clientName);
int numErrors = 0;
while (a_maxReads > 0) {
if (consumer == null) {
consumer = new SimpleConsumer(leadBroker, a_port, 100000, 64 * 1024, clientName);
}
FetchRequest req = new FetchRequestBuilder()
.clientId(clientName)
.addFetch(a_topic, a_partition, readOffset, 100000) // Note: this fetchSize of 100000 might need to be increased if large batches are written to Kafka
.build();
FetchResponse fetchResponse = consumer.fetch(req);
if (fetchResponse.hasError()) {
numErrors++;
// Something went wrong!
short code = fetchResponse.errorCode(a_topic, a_partition);
System.out.println("Error fetching data from the Broker:" + leadBroker + " Reason: " + code);
if (numErrors > 5) break;
if (code == ErrorMapping.OffsetOutOfRangeCode()) {
// We asked for an invalid offset. For simple case ask for the last element to reset
readOffset = getLastOffset(consumer,a_topic, a_partition, kafka.api.OffsetRequest.LatestTime(), clientName);
continue;
}
consumer.close();
consumer = null;
leadBroker = findNewLeader(leadBroker, a_topic, a_partition, a_port);
continue;
}
numErrors = 0;
long numRead = 0;
for (MessageAndOffset messageAndOffset : fetchResponse.messageSet(a_topic, a_partition)) {
long currentOffset = messageAndOffset.offset();
if (currentOffset < readOffset) {
System.out.println("Found an old offset: " + currentOffset + " Expecting: " + readOffset);
continue;
}
readOffset = messageAndOffset.nextOffset();
ByteBuffer payload = messageAndOffset.message().payload();
byte[] bytes = new byte[payload.limit()];
payload.get(bytes);
System.out.println(String.valueOf(messageAndOffset.offset()) + ": " + new String(bytes, "UTF-8"));
numRead++;
a_maxReads--;
}
if (numRead == 0) {
try {
Thread.sleep(1000);
} catch (InterruptedException ie) {
}
}
}
if (consumer != null) consumer.close();
}
public static long getLastOffset(SimpleConsumer consumer, String topic, int partition,
long whichTime, String clientName) {
TopicAndPartition topicAndPartition = new TopicAndPartition(topic, partition);
Map<TopicAndPartition, PartitionOffsetRequestInfo> requestInfo = new HashMap<TopicAndPartition, PartitionOffsetRequestInfo>();
requestInfo.put(topicAndPartition, new PartitionOffsetRequestInfo(whichTime, 1));
kafka.javaapi.OffsetRequest request = new kafka.javaapi.OffsetRequest(
requestInfo, kafka.api.OffsetRequest.CurrentVersion(), clientName);
OffsetResponse response = consumer.getOffsetsBefore(request);
if (response.hasError()) {
System.out.println("Error fetching data Offset Data the Broker. Reason: " + response.errorCode(topic, partition) );
return 0;
}
long[] offsets = response.offsets(topic, partition);
return offsets[0];
}
private String findNewLeader(String a_oldLeader, String a_topic, int a_partition, int a_port) throws Exception {
for (int i = 0; i < 3; i++) {
boolean goToSleep = false;
PartitionMetadata metadata = findLeader(m_replicaBrokers, a_port, a_topic, a_partition);
if (metadata == null) {
goToSleep = true;
} else if (metadata.leader() == null) {
goToSleep = true;
} else if (a_oldLeader.equalsIgnoreCase(metadata.leader().host()) && i == 0) {
// first time through if the leader hasn't changed give ZooKeeper a second to recover
// second time, assume the broker did recover before failover, or it was a non-Broker issue
//
goToSleep = true;
} else {
return metadata.leader().host();
}
if (goToSleep) {
try {
Thread.sleep(1000);
} catch (InterruptedException ie) {
}
}
}
System.out.println("Unable to find new leader after Broker failure. Exiting");
throw new Exception("Unable to find new leader after Broker failure. Exiting");
}
private PartitionMetadata findLeader(List<String> a_seedBrokers, int a_port, String a_topic, int a_partition) {
PartitionMetadata returnMetaData = null;
loop:
for (String seed : a_seedBrokers) {
SimpleConsumer consumer = null;
try {
consumer = new SimpleConsumer(seed, a_port, 100000, 64 * 1024, "leaderLookup");
List<String> topics = Collections.singletonList(a_topic);
TopicMetadataRequest req = new TopicMetadataRequest(topics);
kafka.javaapi.TopicMetadataResponse resp = consumer.send(req);
List<TopicMetadata> metaData = resp.topicsMetadata();
for (TopicMetadata item : metaData) {
for (PartitionMetadata part : item.partitionsMetadata()) {
if (part.partitionId() == a_partition) {
returnMetaData = part;
break loop;
}
}
}
} catch (Exception e) {
System.out.println("Error communicating with Broker [" + seed + "] to find Leader for [" + a_topic
+ ", " + a_partition + "] Reason: " + e);
} finally {
if (consumer != null) consumer.close();
}
}
if (returnMetaData != null) {
m_replicaBrokers.clear();
for (kafka.cluster.Broker replica : returnMetaData.replicas()) {
m_replicaBrokers.add(replica.host());
}
}
return returnMetaData;
}
}
| |
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.project;
import static java.util.Objects.requireNonNull;
import azkaban.flow.Flow;
import azkaban.project.ProjectLogEvent.EventType;
import azkaban.project.validator.ValidationReport;
import azkaban.project.validator.ValidatorConfigs;
import azkaban.project.validator.XmlValidatorManager;
import azkaban.storage.StorageManager;
import azkaban.user.Permission;
import azkaban.user.Permission.Type;
import azkaban.user.User;
import azkaban.utils.Props;
import azkaban.utils.PropsUtils;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.apache.log4j.Logger;
@Singleton
public class ProjectManager {
private static final Logger logger = Logger.getLogger(ProjectManager.class);
private final AzkabanProjectLoader azkabanProjectLoader;
private final ProjectLoader projectLoader;
private final Props props;
private final boolean creatorDefaultPermissions;
private final ConcurrentHashMap<Integer, Project> projectsById =
new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, Project> projectsByName =
new ConcurrentHashMap<>();
@Inject
public ProjectManager(final AzkabanProjectLoader azkabanProjectLoader,
final ProjectLoader loader,
final StorageManager storageManager,
final Props props) {
this.projectLoader = requireNonNull(loader);
this.props = requireNonNull(props);
this.azkabanProjectLoader = requireNonNull(azkabanProjectLoader);
this.creatorDefaultPermissions =
props.getBoolean("creator.default.proxy", true);
// The prop passed to XmlValidatorManager is used to initialize all the
// validators
// Each validator will take certain key/value pairs from the prop to
// initialize itself.
final Props prop = new Props(props);
prop.put(ValidatorConfigs.PROJECT_ARCHIVE_FILE_PATH, "initialize");
// By instantiating an object of XmlValidatorManager, this will verify the
// config files for the validators.
new XmlValidatorManager(prop);
loadAllProjects();
loadProjectWhiteList();
}
private void loadAllProjects() {
final List<Project> projects;
try {
projects = this.projectLoader.fetchAllActiveProjects();
} catch (final ProjectManagerException e) {
throw new RuntimeException("Could not load projects from store.", e);
}
for (final Project proj : projects) {
this.projectsByName.put(proj.getName(), proj);
this.projectsById.put(proj.getId(), proj);
}
for (final Project proj : projects) {
loadAllProjectFlows(proj);
}
}
private void loadAllProjectFlows(final Project project) {
try {
final List<Flow> flows = this.projectLoader.fetchAllProjectFlows(project);
final Map<String, Flow> flowMap = new HashMap<>();
for (final Flow flow : flows) {
flowMap.put(flow.getId(), flow);
}
project.setFlows(flowMap);
} catch (final ProjectManagerException e) {
throw new RuntimeException("Could not load projects flows from store.", e);
}
}
public List<String> getProjectNames() {
return new ArrayList<>(this.projectsByName.keySet());
}
public Props getProps() {
return this.props;
}
public List<Project> getUserProjects(final User user) {
final ArrayList<Project> array = new ArrayList<>();
for (final Project project : this.projectsById.values()) {
final Permission perm = project.getUserPermission(user);
if (perm != null
&& (perm.isPermissionSet(Type.ADMIN) || perm
.isPermissionSet(Type.READ))) {
array.add(project);
}
}
return array;
}
public List<Project> getGroupProjects(final User user) {
final List<Project> array = new ArrayList<>();
for (final Project project : this.projectsById.values()) {
if (project.hasGroupPermission(user, Type.READ)) {
array.add(project);
}
}
return array;
}
public List<Project> getUserProjectsByRegex(final User user, final String regexPattern) {
final List<Project> array = new ArrayList<>();
final Pattern pattern;
try {
pattern = Pattern.compile(regexPattern, Pattern.CASE_INSENSITIVE);
} catch (final PatternSyntaxException e) {
logger.error("Bad regex pattern " + regexPattern);
return array;
}
for (final Project project : this.projectsById.values()) {
final Permission perm = project.getUserPermission(user);
if (perm != null
&& (perm.isPermissionSet(Type.ADMIN) || perm
.isPermissionSet(Type.READ))) {
if (pattern.matcher(project.getName()).find()) {
array.add(project);
}
}
}
return array;
}
public List<Project> getProjects() {
return new ArrayList<>(this.projectsById.values());
}
public List<Project> getProjectsByRegex(final String regexPattern) {
final List<Project> allProjects = new ArrayList<>();
final Pattern pattern;
try {
pattern = Pattern.compile(regexPattern, Pattern.CASE_INSENSITIVE);
} catch (final PatternSyntaxException e) {
logger.error("Bad regex pattern " + regexPattern);
return allProjects;
}
for (final Project project : getProjects()) {
if (pattern.matcher(project.getName()).find()) {
allProjects.add(project);
}
}
return allProjects;
}
/**
* Checks if a project is active using project_name
*/
public Boolean isActiveProject(final String name) {
return this.projectsByName.containsKey(name);
}
/**
* Checks if a project is active using project_id
*/
public Boolean isActiveProject(final int id) {
return this.projectsById.containsKey(id);
}
/**
* fetch active project from cache and inactive projects from db by project_name
*/
public Project getProject(final String name) {
Project fetchedProject = null;
if (isActiveProject(name)) {
fetchedProject = this.projectsByName.get(name);
} else {
try {
fetchedProject = this.projectLoader.fetchProjectByName(name);
} catch (final ProjectManagerException e) {
logger.error("Could not load project from store.", e);
}
}
return fetchedProject;
}
/**
* fetch active project from cache and inactive projects from db by project_id
*/
public Project getProject(final int id) {
Project fetchedProject = null;
if (isActiveProject(id)) {
fetchedProject = this.projectsById.get(id);
} else {
try {
fetchedProject = this.projectLoader.fetchProjectById(id);
} catch (final ProjectManagerException e) {
logger.error("Could not load project from store.", e);
}
}
return fetchedProject;
}
public Project createProject(final String projectName, final String description,
final User creator) throws ProjectManagerException {
if (projectName == null || projectName.trim().isEmpty()) {
throw new ProjectManagerException("Project name cannot be empty.");
} else if (description == null || description.trim().isEmpty()) {
throw new ProjectManagerException("Description cannot be empty.");
} else if (creator == null) {
throw new ProjectManagerException("Valid creator user must be set.");
} else if (!projectName.matches("[a-zA-Z][a-zA-Z_0-9|-]*")) {
throw new ProjectManagerException(
"Project names must start with a letter, followed by any number of letters, digits, '-' or '_'.");
}
if (this.projectsByName.containsKey(projectName)) {
throw new ProjectManagerException("Project already exists.");
}
logger.info("Trying to create " + projectName + " by user "
+ creator.getUserId());
final Project newProject =
this.projectLoader.createNewProject(projectName, description, creator);
this.projectsByName.put(newProject.getName(), newProject);
this.projectsById.put(newProject.getId(), newProject);
if (this.creatorDefaultPermissions) {
// Add permission to project
this.projectLoader.updatePermission(newProject, creator.getUserId(),
new Permission(Permission.Type.ADMIN), false);
// Add proxy user
newProject.addProxyUser(creator.getUserId());
try {
updateProjectSetting(newProject);
} catch (final ProjectManagerException e) {
e.printStackTrace();
throw e;
}
}
this.projectLoader.postEvent(newProject, EventType.CREATED, creator.getUserId(),
null);
return newProject;
}
/**
* Permanently delete all project files and properties data for all versions of a project and log
* event in project_events table
*/
public synchronized Project purgeProject(final Project project, final User deleter)
throws ProjectManagerException {
this.projectLoader.cleanOlderProjectVersion(project.getId(),
project.getVersion() + 1);
this.projectLoader
.postEvent(project, EventType.PURGE, deleter.getUserId(), String
.format("Purged versions before %d", project.getVersion() + 1));
return project;
}
public synchronized Project removeProject(final Project project, final User deleter)
throws ProjectManagerException {
this.projectLoader.removeProject(project, deleter.getUserId());
this.projectLoader.postEvent(project, EventType.DELETED, deleter.getUserId(),
null);
this.projectsByName.remove(project.getName());
this.projectsById.remove(project.getId());
return project;
}
public void updateProjectDescription(final Project project, final String description,
final User modifier) throws ProjectManagerException {
this.projectLoader.updateDescription(project, description, modifier.getUserId());
this.projectLoader.postEvent(project, EventType.DESCRIPTION,
modifier.getUserId(), "Description changed to " + description);
}
public List<ProjectLogEvent> getProjectEventLogs(final Project project,
final int results, final int skip) throws ProjectManagerException {
return this.projectLoader.getProjectEvents(project, results, skip);
}
public Props getProperties(final Project project, final String source)
throws ProjectManagerException {
return this.projectLoader.fetchProjectProperty(project, source);
}
public Props getJobOverrideProperty(final Project project, final String jobName)
throws ProjectManagerException {
return this.projectLoader.fetchProjectProperty(project, jobName + ".jor");
}
public void setJobOverrideProperty(final Project project, final Props prop, final String jobName,
final User modifier)
throws ProjectManagerException {
prop.setSource(jobName + ".jor");
final Props oldProps =
this.projectLoader.fetchProjectProperty(project, prop.getSource());
if (oldProps == null) {
this.projectLoader.uploadProjectProperty(project, prop);
} else {
this.projectLoader.updateProjectProperty(project, prop);
}
final String diffMessage = PropsUtils.getPropertyDiff(oldProps, prop);
this.projectLoader.postEvent(project, EventType.PROPERTY_OVERRIDE,
modifier.getUserId(), diffMessage);
return;
}
public void updateProjectSetting(final Project project)
throws ProjectManagerException {
this.projectLoader.updateProjectSettings(project);
}
public void addProjectProxyUser(final Project project, final String proxyName,
final User modifier) throws ProjectManagerException {
logger.info("User " + modifier.getUserId() + " adding proxy user "
+ proxyName + " to project " + project.getName());
project.addProxyUser(proxyName);
this.projectLoader.postEvent(project, EventType.PROXY_USER,
modifier.getUserId(), "Proxy user " + proxyName
+ " is added to project.");
updateProjectSetting(project);
}
public void removeProjectProxyUser(final Project project, final String proxyName,
final User modifier) throws ProjectManagerException {
logger.info("User " + modifier.getUserId() + " removing proxy user "
+ proxyName + " from project " + project.getName());
project.removeProxyUser(proxyName);
this.projectLoader.postEvent(project, EventType.PROXY_USER,
modifier.getUserId(), "Proxy user " + proxyName
+ " has been removed form the project.");
updateProjectSetting(project);
}
public void updateProjectPermission(final Project project, final String name,
final Permission perm, final boolean group, final User modifier)
throws ProjectManagerException {
logger.info("User " + modifier.getUserId()
+ " updating permissions for project " + project.getName() + " for "
+ name + " " + perm.toString());
this.projectLoader.updatePermission(project, name, perm, group);
if (group) {
this.projectLoader.postEvent(project, EventType.GROUP_PERMISSION,
modifier.getUserId(), "Permission for group " + name + " set to "
+ perm.toString());
} else {
this.projectLoader.postEvent(project, EventType.USER_PERMISSION,
modifier.getUserId(), "Permission for user " + name + " set to "
+ perm.toString());
}
}
public void removeProjectPermission(final Project project, final String name,
final boolean group, final User modifier) throws ProjectManagerException {
logger.info("User " + modifier.getUserId()
+ " removing permissions for project " + project.getName() + " for "
+ name);
this.projectLoader.removePermission(project, name, group);
if (group) {
this.projectLoader.postEvent(project, EventType.GROUP_PERMISSION,
modifier.getUserId(), "Permission for group " + name + " removed.");
} else {
this.projectLoader.postEvent(project, EventType.USER_PERMISSION,
modifier.getUserId(), "Permission for user " + name + " removed.");
}
}
/**
* This method retrieves the uploaded project zip file from DB. A temporary file is created to
* hold the content of the uploaded zip file. This temporary file is provided in the
* ProjectFileHandler instance and the caller of this method should call method
* {@ProjectFileHandler.deleteLocalFile} to delete the temporary file.
*
* @param version - latest version is used if value is -1
* @return ProjectFileHandler - null if can't find project zip file based on project name and
* version
*/
public ProjectFileHandler getProjectFileHandler(final Project project, final int version)
throws ProjectManagerException {
return this.azkabanProjectLoader.getProjectFile(project, version);
}
public Map<String, ValidationReport> uploadProject(final Project project,
final File archive, final String fileType, final User uploader, final Props additionalProps)
throws ProjectManagerException {
return this.azkabanProjectLoader
.uploadProject(project, archive, fileType, uploader, additionalProps);
}
public void updateFlow(final Project project, final Flow flow)
throws ProjectManagerException {
this.projectLoader.updateFlow(project, flow.getVersion(), flow);
}
public void postProjectEvent(final Project project, final EventType type, final String user,
final String message) {
this.projectLoader.postEvent(project, type, user, message);
}
public boolean loadProjectWhiteList() {
if (this.props.containsKey(ProjectWhitelist.XML_FILE_PARAM)) {
ProjectWhitelist.load(this.props);
return true;
}
return false;
}
}
| |
// Copyright 2014 Palantir Technologies
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.palantir.stash.stashbot.hooks;
import java.sql.SQLException;
import java.util.ArrayList;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import com.atlassian.stash.hook.HookResponse;
import com.atlassian.stash.repository.RefChange;
import com.atlassian.stash.repository.RefChangeType;
import com.atlassian.stash.repository.Repository;
import com.atlassian.stash.scm.git.GitCommandBuilderFactory;
import com.google.common.collect.ImmutableList;
import com.palantir.stash.stashbot.config.ConfigurationPersistenceService;
import com.palantir.stash.stashbot.jobtemplate.JobType;
import com.palantir.stash.stashbot.logger.PluginLoggerFactory;
import com.palantir.stash.stashbot.managers.JenkinsManager;
import com.palantir.stash.stashbot.mocks.MockGitCommandBuilderFactory;
import com.palantir.stash.stashbot.outputhandler.CommandOutputHandlerFactory;
import com.palantir.stash.stashbot.persistence.JenkinsServerConfiguration;
import com.palantir.stash.stashbot.persistence.RepositoryConfiguration;
public class TriggerJenkinsBuildHookTest {
private static final String HEAD = "38356e8abe0e97648dd1007278ecc02c3bf3d2cb";
private static final String HEAD_BR = "master";
private static final String FROM_HEAD = "cac9954e06013073c1bf9e17b2c1c919095817dc";
private static final String HEAD_MINUS_ONE = "15e5e7272bec0e0c1093327b0e8e02deefa6d1e5";
private static final int REPO_ID = 1;
private static final Integer MVC = 10;
@Mock
private ConfigurationPersistenceService cpm;
@Mock
private JenkinsManager jenkinsManager;
@Mock
private RepositoryConfiguration rc;
@Mock
private JenkinsServerConfiguration jsc;
private TriggerJenkinsBuildHook tjbh;
@Mock
private Repository repo;
@Mock
private HookResponse hr;
@Mock
private RefChange change;
// Stuff from MockGitCommandFactory class
MockGitCommandBuilderFactory mgc;
private GitCommandBuilderFactory gcbf;
private CommandOutputHandlerFactory cohf;
private ArrayList<RefChange> changes;
private final PluginLoggerFactory lf = new PluginLoggerFactory();
@Before
public void setUp() throws SQLException {
MockitoAnnotations.initMocks(this);
Mockito.when(repo.getId()).thenReturn(REPO_ID);
Mockito.when(cpm.getRepositoryConfigurationForRepository(repo)).thenReturn(rc);
Mockito.when(cpm.getJenkinsServerConfiguration(Mockito.anyString())).thenReturn(jsc);
Mockito.when(cpm.getJobTypeStatusMapping(rc, JobType.VERIFY_COMMIT)).thenReturn(true);
Mockito.when(cpm.getJobTypeStatusMapping(rc, JobType.PUBLISH)).thenReturn(true);
Mockito.when(rc.getCiEnabled()).thenReturn(true);
Mockito.when(rc.getVerifyBranchRegex()).thenReturn(".*master.*");
Mockito.when(rc.getPublishBranchRegex()).thenReturn(".*release.*");
Mockito.when(jsc.getMaxVerifyChain()).thenReturn(MVC);
Mockito.when(change.getFromHash()).thenReturn(FROM_HEAD);
Mockito.when(change.getToHash()).thenReturn(HEAD);
Mockito.when(change.getRefId()).thenReturn(HEAD_BR);
Mockito.when(change.getType()).thenReturn(RefChangeType.UPDATE);
changes = new ArrayList<RefChange>();
changes.add(change);
// MGC stuff
mgc = new MockGitCommandBuilderFactory();
mgc.getChangesets().add(HEAD);
mgc.getBranchMap().put(HEAD, ImmutableList.of(" otherbranch"));
gcbf = mgc.getGitCommandBuilderFactory();
cohf = new CommandOutputHandlerFactory();
tjbh = new TriggerJenkinsBuildHook(cpm, jenkinsManager, gcbf, cohf, lf);
}
@Test
public void testTriggersBuildOnPush() {
tjbh.onReceive(repo, changes, hr);
Mockito.verify(jenkinsManager).triggerBuild(repo, JobType.VERIFY_COMMIT, HEAD, "");
}
@Test
public void testDoesntTriggerBuildOnPushWhenDisabled() {
Mockito.when(cpm.getJobTypeStatusMapping(rc, JobType.VERIFY_COMMIT)).thenReturn(false);
tjbh.onReceive(repo, changes, hr);
Mockito.verify(jenkinsManager, Mockito.never()).triggerBuild(repo, JobType.VERIFY_COMMIT, HEAD, "");
}
@Test
public void testNoBuildOnDisabled() {
Mockito.when(rc.getCiEnabled()).thenReturn(false);
tjbh.onReceive(repo, changes, hr);
Mockito.verify(jenkinsManager, Mockito.never()).triggerBuild(Mockito.any(Repository.class),
Mockito.any(JobType.class), Mockito.anyString(), Mockito.anyString());
}
@Test
public void testNoBuildOnDelete() {
Mockito.when(change.getType()).thenReturn(RefChangeType.DELETE);
mgc.getChangesets().clear(); // empty changesets means no new changes
tjbh.onReceive(repo, changes, hr);
Mockito.verify(jenkinsManager, Mockito.never()).triggerBuild(Mockito.any(Repository.class),
Mockito.any(JobType.class), Mockito.anyString(), Mockito.anyString());
}
@Test
public void testNoBuildOnRegexNotMatch() {
Mockito.when(rc.getVerifyBranchRegex()).thenReturn("blahblahnomatch");
tjbh.onReceive(repo, changes, hr);
Mockito.verify(jenkinsManager, Mockito.never()).triggerBuild(Mockito.eq(repo), Mockito.any(JobType.class),
Mockito.eq(HEAD), Mockito.eq(HEAD_BR));
}
@Test
public void testPublishingBuild() {
Mockito.when(rc.getVerifyBranchRegex()).thenReturn("blahblahnomatch");
Mockito.when(rc.getPublishBranchRegex()).thenReturn("master");
tjbh.onReceive(repo, changes, hr);
Mockito.verify(jenkinsManager).triggerBuild(repo, JobType.PUBLISH, HEAD, HEAD_BR);
}
@Test
public void testPublishingBuildWhenDisabled() {
Mockito.when(cpm.getJobTypeStatusMapping(rc, JobType.PUBLISH)).thenReturn(false);
Mockito.when(rc.getVerifyBranchRegex()).thenReturn("blahblahnomatch");
Mockito.when(rc.getPublishBranchRegex()).thenReturn("master");
tjbh.onReceive(repo, changes, hr);
Mockito.verify(jenkinsManager, Mockito.never()).triggerBuild(repo, JobType.PUBLISH, HEAD, HEAD_BR);
}
@Test
public void testVerifyBuildsMultipleChanges() {
mgc.getChangesets().clear();
mgc.getChangesets().add(HEAD_MINUS_ONE);
mgc.getChangesets().add(HEAD);
tjbh.onReceive(repo, changes, hr);
Mockito.verify(jenkinsManager).triggerBuild(repo, JobType.VERIFY_COMMIT, HEAD_MINUS_ONE, "");
Mockito.verify(jenkinsManager).triggerBuild(repo, JobType.VERIFY_COMMIT, HEAD, "");
}
/* XXX TODO: this test needs to be rewritten to ensure git is invoked in the correct way,
* because this codepath happens in the revwalk git does rather than "in java land" now.
*/
@Ignore
@Test
public void testVerifyIgnoresChangeAlreadyInPreviousBranch() {
// the revlist call here is -- HEAD ^HEAD_MINUS_ONE
// so we'll accomplish that by adding both to the changesets list but blacklisting HEAD_MINUS_ONE so the only new change is HEAD.
mgc.getChangesets().clear();
mgc.getChangesets().add(HEAD_MINUS_ONE);
mgc.getChangesets().add(HEAD);
mgc.getBlacklistedChangesets().add(HEAD_MINUS_ONE);
// HEAD_MINUS_ONE is already in branch master2, so don't verify it
mgc.getBranchMap().put(HEAD_MINUS_ONE, ImmutableList.of(" master2"));
tjbh.onReceive(repo, changes, hr);
Mockito.verify(jenkinsManager, Mockito.never()).triggerBuild(Mockito.eq(repo), Mockito.any(JobType.class),
Mockito.eq(HEAD_MINUS_ONE), Mockito.eq(""));
Mockito.verify(jenkinsManager).triggerBuild(repo, JobType.VERIFY_COMMIT, HEAD, "");
}
@Test
public void testVerifyNewBranch() {
mgc.getChangesets().clear();
mgc.getChangesets().add(HEAD_MINUS_ONE);
mgc.getChangesets().add(HEAD);
Mockito.when(change.getType()).thenReturn(RefChangeType.ADD);
Mockito.when(change.getFromHash()).thenReturn("0000000000000000000000000000000000000000");
tjbh.onReceive(repo, changes, hr);
// TODO: verify the git rev-list is invoked with proper args?
Mockito.verify(jenkinsManager).triggerBuild(repo, JobType.VERIFY_COMMIT, HEAD_MINUS_ONE, "");
Mockito.verify(jenkinsManager).triggerBuild(repo, JobType.VERIFY_COMMIT, HEAD, "");
}
}
| |
package tonius.simplyjetpacks.item.meta;
import net.minecraft.enchantment.EnchantmentHelper;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.EnumRarity;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.text.TextComponentString;
import net.minecraft.world.World;
import net.minecraftforge.common.config.Configuration;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import tonius.simplyjetpacks.SimplyJetpacks;
import tonius.simplyjetpacks.client.model.PackModelType;
import tonius.simplyjetpacks.config.Config;
import tonius.simplyjetpacks.handler.SyncHandler;
import tonius.simplyjetpacks.item.ItemPack;
import tonius.simplyjetpacks.setup.FuelType;
import tonius.simplyjetpacks.setup.ModEnchantments;
import tonius.simplyjetpacks.setup.ModKey;
import tonius.simplyjetpacks.setup.ParticleType;
import tonius.simplyjetpacks.util.math.MathHelper;
import tonius.simplyjetpacks.util.NBTHelper;
import tonius.simplyjetpacks.util.SJStringHelper;
import tonius.simplyjetpacks.util.StringHelper;
import java.util.List;
public class Jetpack extends PackBase
{
protected static final String TAG_HOVERMODE_ON = "JetpackHoverModeOn";
protected static final String TAG_EHOVER_ON = "JetpackEHoverOn";
protected static final String TAG_PARTICLE = "JetpackParticleType";
public double speedVertical = 0.0D;
public double accelVertical = 0.0D;
public double speedVerticalHover = 0.0D;
public double speedVerticalHoverSlow = 0.0D;
public double speedSideways = 0.0D;
public double sprintSpeedModifier = 0.0D;
public double sprintFuelModifier = 0.0D;
public boolean emergencyHoverMode = false;
public ParticleType defaultParticleType = ParticleType.DEFAULT;
public Jetpack(int tier, EnumRarity rarity, String defaultConfigKey)
{
super("jetpack", tier, rarity, defaultConfigKey);
this.setArmorModel(PackModelType.JETPACK);
}
public Jetpack setDefaultParticleType(ParticleType defaultParticleType)
{
this.defaultParticleType = defaultParticleType;
return this;
}
@Override
public void tickArmor(World world, EntityPlayer player, ItemStack stack, ItemPack item)
{
this.flyUser(player, stack, item, false);
}
public void flyUser(EntityPlayer user, ItemStack stack, ItemPack item, boolean force)
{
if(this.isOn(stack))
{
boolean hoverMode = this.isHoverModeOn(stack);
double hoverSpeed = Config.invertHoverSneakingBehavior == SyncHandler.isDescendKeyDown(user) ? this.speedVerticalHoverSlow : this.speedVerticalHover;
boolean flyKeyDown = force || SyncHandler.isFlyKeyDown(user);
boolean descendKeyDown = SyncHandler.isDescendKeyDown(user);
double currentAccel = this.accelVertical * (user.motionY < 0.3D ? 2.5D : 1.0D);
double currentSpeedVertical = this.speedVertical * (user.isInWater() ? 0.4D : 1.0D);
if(flyKeyDown || hoverMode && !user.onGround)
{
if(this.usesFuel)
{
item.useFuel(stack, (int) (user.isSprinting() ? Math.round(this.getFuelUsage(stack) * this.sprintFuelModifier) : this.getFuelUsage(stack)), false);
}
if(item.getFuelStored(stack) > 0)
{
if(flyKeyDown)
{
if(!hoverMode)
{
user.motionY = Math.min(user.motionY + currentAccel, currentSpeedVertical);
}
else
{
if(descendKeyDown)
{
user.motionY = Math.min(user.motionY + currentAccel, -this.speedVerticalHoverSlow);
}
else
{
user.motionY = Math.min(user.motionY + currentAccel, this.speedVerticalHover);
}
}
}
else
{
user.motionY = Math.min(user.motionY + currentAccel, -hoverSpeed);
}
float speedSideways = (float) (user.isSneaking() ? this.speedSideways * 0.5F : this.speedSideways);
float speedForward = (float) (user.isSprinting() ? speedSideways * this.sprintSpeedModifier : speedSideways);
if(SyncHandler.isForwardKeyDown(user))
{
user.moveRelative(0, speedForward, speedForward);
}
if(SyncHandler.isBackwardKeyDown(user))
{
user.moveRelative(0, -speedSideways, speedSideways * 0.8F);
}
if(SyncHandler.isLeftKeyDown(user))
{
user.moveRelative(speedSideways, 0, speedSideways);
}
if(SyncHandler.isRightKeyDown(user))
{
user.moveRelative(-speedSideways, 0, speedSideways);
}
if(!user.worldObj.isRemote)
{
user.fallDistance = 0.0F;
/*
TODO: Check what this is for and how to update this
if (user instanceof EntityPlayerMP) {
((EntityPlayerMP) user).connection.floatingTickCount = 0;
}*/
/*
TODO: Reimplement explosions
if (Config.flammableFluidsExplode) {
if (!(user instanceof EntityPlayer) || !((EntityPlayer) user).capabilities.isCreativeMode) {
int x = Math.round((float) user.posX - 0.5F);
int y = Math.round((float) user.posY);
int z = Math.round((float) user.posZ - 0.5F);
Block fluidBlock = user.worldObj.getBlock(x, y, z);
if (fluidBlock instanceof IFluidBlock && fluidBlock.isFlammable(user.worldObj, x, y, z, ForgeDirection.UNKNOWN)) {
user.worldObj.playSoundAtEntity(user, "mob.ghast.fireball", 2.0F, 1.0F);
user.worldObj.createExplosion(user, user.posX, user.posY, user.posZ, 3.5F, false);
user.attackEntityFrom(new EntityDamageSource("jetpackexplode", user), 100.0F);
}
}
}*/
}
}
}
}
if(!user.worldObj.isRemote && this.emergencyHoverMode && this.isEHoverOn(stack))
{
if(item.getEnergyStored(stack) > 0 && (!this.isHoverModeOn(stack) || !this.isOn(stack)))
{
if(user.posY < -5)
{
this.doEHover(stack, user);
}
else if(user instanceof EntityPlayer)
{
if(!((EntityPlayer) user).capabilities.isCreativeMode && user.fallDistance - 1.2F >= user.getHealth())
{
for(int i = 0; i <= 16; i++)
{
int x = Math.round((float) user.posX - 0.5F);
int y = Math.round((float) user.posY) - i;
int z = Math.round((float) user.posZ - 0.5F);
if(!user.worldObj.isAirBlock(new BlockPos(x, y, z)))
{
this.doEHover(stack, user);
break;
}
}
}
}
}
}
}
protected int getFuelUsage(ItemStack stack)
{
if(ModEnchantments.fuelEffeciency == null)
{
return this.fuelUsage;
}
int fuelEfficiencyLevel = MathHelper.clampI(EnchantmentHelper.getEnchantmentLevel(ModEnchantments.fuelEffeciency, stack), 0, 4);
return (int) Math.round(this.fuelUsage * (20 - fuelEfficiencyLevel) / 20.0D);
}
public void doEHover(ItemStack armor, EntityLivingBase user)
{
NBTHelper.setBoolean(armor, TAG_ON, true);
NBTHelper.setBoolean(armor, TAG_HOVERMODE_ON, true);
if(user instanceof EntityPlayer)
{
((EntityPlayer) user).addChatMessage(new TextComponentString(StringHelper.LIGHT_RED + SJStringHelper.localize("chat.jetpack.emergencyHoverMode.msg")));
}
}
public void setMobMode(ItemStack itemStack)
{
itemStack.getTagCompound().setBoolean(TAG_ON, true);
itemStack.getTagCompound().setBoolean(TAG_HOVERMODE_ON, false);
}
public boolean isHoverModeOn(ItemStack stack)
{
return NBTHelper.getBoolean(stack, TAG_HOVERMODE_ON);
}
public boolean isEHoverOn(ItemStack stack)
{
return NBTHelper.getBoolean(stack, TAG_EHOVER_ON);
}
@Override
public void switchModePrimary(ItemStack stack, EntityPlayer player, boolean showInChat)
{
this.switchHoverMode(stack, player, showInChat);
}
@Override
public void switchModeSecondary(ItemStack stack, EntityPlayer player, boolean showInChat)
{
if(this.emergencyHoverMode)
{
this.switchEHover(stack, player, showInChat);
}
}
protected void switchHoverMode(ItemStack stack, EntityPlayer player, boolean showInChat)
{
this.toggleState(this.isHoverModeOn(stack), stack, "hoverMode", TAG_HOVERMODE_ON, player, showInChat);
}
public void switchEHover(ItemStack stack, EntityPlayer player, boolean showInChat)
{
this.toggleState(this.isEHoverOn(stack), stack, "emergencyHoverMode", TAG_EHOVER_ON, player, showInChat);
}
public void setParticleType(ItemStack stack, ParticleType particle)
{
NBTHelper.setInt(stack, TAG_PARTICLE, particle.ordinal());
}
protected ParticleType getParticleType(ItemStack stack)
{
if(stack.getTagCompound() != null && stack.getTagCompound().hasKey(TAG_PARTICLE))
{
int particle = NBTHelper.getInt(stack, TAG_PARTICLE);
ParticleType particleType = ParticleType.values()[particle];
if(particleType != null)
{
return particleType;
}
}
NBTHelper.setInt(stack, TAG_PARTICLE, this.defaultParticleType.ordinal());
return this.defaultParticleType;
}
public ParticleType getDisplayParticleType(ItemStack stack, ItemPack item, EntityLivingBase user)
{
boolean flyKeyDown = SyncHandler.isFlyKeyDown(user);
if(this.isOn(stack) && item.getFuelStored(stack) > 0 && (flyKeyDown || this.isHoverModeOn(stack) && !user.onGround && user.motionY < 0))
{
return this.getParticleType(stack);
}
return null;
}
@Override
public String getGuiTitlePrefix()
{
return "gui.jetpack";
}
@Override
public ModKey[] getGuiControls()
{
if(this.emergencyHoverMode)
{
return new ModKey[] {ModKey.TOGGLE_PRIMARY, ModKey.MODE_PRIMARY, ModKey.MODE_SECONDARY};
}
else
{
return new ModKey[] {ModKey.TOGGLE_PRIMARY, ModKey.MODE_PRIMARY};
}
}
@Override
@SideOnly(Side.CLIENT)
public void addShiftInformation(ItemStack stack, ItemPack item, EntityPlayer player, List list)
{
list.add(SJStringHelper.getStateText(this.isOn(stack)));
list.add(SJStringHelper.getHoverModeText(this.isHoverModeOn(stack)));
if(this.fuelType == FuelType.FLUID && this.fuelFluid != null)
{
list.add(SJStringHelper.getFuelFluidText(this.fuelFluid));
}
if(this.fuelUsage > 0)
{
list.add(SJStringHelper.getFuelUsageText(this.fuelType, this.getFuelUsage(stack)));
}
list.add(SJStringHelper.getParticlesText(this.getParticleType(stack)));
SJStringHelper.addDescriptionLines(list, "jetpack", StringHelper.BRIGHT_GREEN);
String key = SimplyJetpacks.proxy.getPackGUIKey();
if(key != null)
{
list.add(SJStringHelper.getPackGUIText(key));
}
}
@Override
@SideOnly(Side.CLIENT)
public String getHUDStatesInfo(ItemStack stack, ItemPack item)
{
Boolean engine = this.isOn(stack);
Boolean hover = this.isHoverModeOn(stack);
return SJStringHelper.getHUDStateText(engine, hover, null);
}
@Override
protected void loadConfig(Configuration config)
{
super.loadConfig(config);
if(this.defaults.speedVertical != null)
{
this.speedVertical = config.get(this.defaults.section.name, "Vertical Speed", this.defaults.speedVertical, "The maximum vertical speed of this jetpack when flying.").setMinValue(0.0D).getDouble(this.defaults.speedVertical);
}
if(this.defaults.accelVertical != null)
{
this.accelVertical = config.get(this.defaults.section.name, "Vertical Acceleration", this.defaults.accelVertical, "The vertical acceleration of this jetpack when flying; every tick, this amount of vertical speed will be added until maximum speed is reached.").setMinValue(0.0D).getDouble(this.defaults.accelVertical);
}
if(this.defaults.speedVerticalHover != null)
{
this.speedVerticalHover = config.get(this.defaults.section.name, "Vertical Speed (Hover Mode)", this.defaults.speedVerticalHover, "The maximum vertical speed of this jetpack when flying in hover mode.").setMinValue(0.0D).getDouble(this.defaults.speedVerticalHover);
}
if(this.defaults.speedVerticalHoverSlow != null)
{
this.speedVerticalHoverSlow = config.get(this.defaults.section.name, "Vertical Speed (Hover Mode / Slow Descent)", this.defaults.speedVerticalHoverSlow, "The maximum vertical speed of this jetpack when slowly descending in hover mode.").setMinValue(0.0D).getDouble(this.defaults.speedVerticalHoverSlow);
}
if(this.defaults.speedSideways != null)
{
this.speedSideways = config.get(this.defaults.section.name, "Sideways Speed", this.defaults.speedSideways, "The speed of this jetpack when flying sideways. This is mostly noticeable in hover mode.").setMinValue(0.0D).getDouble(this.defaults.speedSideways);
}
if(this.defaults.sprintSpeedModifier != null)
{
this.sprintSpeedModifier = config.get(this.defaults.section.name, "Sprint Speed Multiplier", this.defaults.sprintSpeedModifier, "How much faster this jetpack will fly forward when sprinting. Setting this to 1.0 will make sprinting have no effect apart from the added speed from vanilla.").setMinValue(0.0D).getDouble(this.defaults.sprintSpeedModifier);
}
if(this.defaults.sprintFuelModifier != null)
{
this.sprintFuelModifier = config.get(this.defaults.section.name, "Sprint Fuel Usage Multiplier", this.defaults.sprintFuelModifier, "How much more energy this jetpack will use when sprinting. Setting this to 1.0 will make sprinting have no effect on energy usage.").setMinValue(0.0D).getDouble(this.defaults.sprintFuelModifier);
}
if(this.defaults.emergencyHoverMode != null)
{
this.emergencyHoverMode = config.get(this.defaults.section.name, "Emergency Hover Mode", this.defaults.emergencyHoverMode, "When enabled, this jetpack will activate hover mode automatically when the wearer is about to die from a fall.").getBoolean(this.defaults.emergencyHoverMode);
}
}
@Override
protected void writeConfigToNBT(NBTTagCompound tag)
{
super.writeConfigToNBT(tag);
if(this.defaults.speedVertical != null)
{
tag.setDouble("SpeedVertical", this.speedVertical);
}
if(this.defaults.accelVertical != null)
{
tag.setDouble("AccelVertical", this.accelVertical);
}
if(this.defaults.speedVerticalHover != null)
{
tag.setDouble("SpeedVerticalHover", this.speedVerticalHover);
}
if(this.defaults.speedVerticalHoverSlow != null)
{
tag.setDouble("SpeedVerticalHoverSlow", this.speedVerticalHoverSlow);
}
if(this.defaults.speedSideways != null)
{
tag.setDouble("SpeedSideways", this.speedSideways);
}
if(this.defaults.sprintSpeedModifier != null)
{
tag.setDouble("SprintSpeedModifier", this.sprintSpeedModifier);
}
if(this.defaults.sprintFuelModifier != null)
{
tag.setDouble("SprintFuelModifier", this.sprintFuelModifier);
}
if(this.defaults.emergencyHoverMode != null)
{
tag.setBoolean("EmergencyHoverMode", this.emergencyHoverMode);
}
}
@Override
protected void readConfigFromNBT(NBTTagCompound tag)
{
super.readConfigFromNBT(tag);
if(this.defaults.speedVertical != null)
{
this.speedVertical = tag.getDouble("SpeedVertical");
}
if(this.defaults.accelVertical != null)
{
this.accelVertical = tag.getDouble("AccelVertical");
}
if(this.defaults.speedVerticalHover != null)
{
this.speedVerticalHover = tag.getDouble("SpeedVerticalHover");
}
if(this.defaults.speedVerticalHoverSlow != null)
{
this.speedVerticalHoverSlow = tag.getDouble("SpeedVerticalHoverSlow");
}
if(this.defaults.speedSideways != null)
{
this.speedSideways = tag.getDouble("SpeedSideways");
}
if(this.defaults.sprintSpeedModifier != null)
{
this.sprintSpeedModifier = tag.getDouble("SprintSpeedModifier");
}
if(this.defaults.sprintFuelModifier != null)
{
this.sprintFuelModifier = tag.getDouble("SprintFuelModifier");
}
if(this.defaults.emergencyHoverMode != null)
{
this.emergencyHoverMode = tag.getBoolean("EmergencyHoverMode");
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lexmodelbuilding.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/lex-models-2017-04-19/StartMigration" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class StartMigrationRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the Amazon Lex V1 bot that you are migrating to Amazon Lex V2.
* </p>
*/
private String v1BotName;
/**
* <p>
* The version of the bot to migrate to Amazon Lex V2. You can migrate the <code>$LATEST</code> version as well as
* any numbered version.
* </p>
*/
private String v1BotVersion;
/**
* <p>
* The name of the Amazon Lex V2 bot that you are migrating the Amazon Lex V1 bot to.
* </p>
* <ul>
* <li>
* <p>
* If the Amazon Lex V2 bot doesn't exist, you must use the <code>CREATE_NEW</code> migration strategy.
* </p>
* </li>
* <li>
* <p>
* If the Amazon Lex V2 bot exists, you must use the <code>UPDATE_EXISTING</code> migration strategy to change the
* contents of the Amazon Lex V2 bot.
* </p>
* </li>
* </ul>
*/
private String v2BotName;
/**
* <p>
* The IAM role that Amazon Lex uses to run the Amazon Lex V2 bot.
* </p>
*/
private String v2BotRole;
/**
* <p>
* The strategy used to conduct the migration.
* </p>
* <ul>
* <li>
* <p>
* <code>CREATE_NEW</code> - Creates a new Amazon Lex V2 bot and migrates the Amazon Lex V1 bot to the new bot.
* </p>
* </li>
* <li>
* <p>
* <code>UPDATE_EXISTING</code> - Overwrites the existing Amazon Lex V2 bot metadata and the locale being migrated.
* It doesn't change any other locales in the Amazon Lex V2 bot. If the locale doesn't exist, a new locale is
* created in the Amazon Lex V2 bot.
* </p>
* </li>
* </ul>
*/
private String migrationStrategy;
/**
* <p>
* The name of the Amazon Lex V1 bot that you are migrating to Amazon Lex V2.
* </p>
*
* @param v1BotName
* The name of the Amazon Lex V1 bot that you are migrating to Amazon Lex V2.
*/
public void setV1BotName(String v1BotName) {
this.v1BotName = v1BotName;
}
/**
* <p>
* The name of the Amazon Lex V1 bot that you are migrating to Amazon Lex V2.
* </p>
*
* @return The name of the Amazon Lex V1 bot that you are migrating to Amazon Lex V2.
*/
public String getV1BotName() {
return this.v1BotName;
}
/**
* <p>
* The name of the Amazon Lex V1 bot that you are migrating to Amazon Lex V2.
* </p>
*
* @param v1BotName
* The name of the Amazon Lex V1 bot that you are migrating to Amazon Lex V2.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StartMigrationRequest withV1BotName(String v1BotName) {
setV1BotName(v1BotName);
return this;
}
/**
* <p>
* The version of the bot to migrate to Amazon Lex V2. You can migrate the <code>$LATEST</code> version as well as
* any numbered version.
* </p>
*
* @param v1BotVersion
* The version of the bot to migrate to Amazon Lex V2. You can migrate the <code>$LATEST</code> version as
* well as any numbered version.
*/
public void setV1BotVersion(String v1BotVersion) {
this.v1BotVersion = v1BotVersion;
}
/**
* <p>
* The version of the bot to migrate to Amazon Lex V2. You can migrate the <code>$LATEST</code> version as well as
* any numbered version.
* </p>
*
* @return The version of the bot to migrate to Amazon Lex V2. You can migrate the <code>$LATEST</code> version as
* well as any numbered version.
*/
public String getV1BotVersion() {
return this.v1BotVersion;
}
/**
* <p>
* The version of the bot to migrate to Amazon Lex V2. You can migrate the <code>$LATEST</code> version as well as
* any numbered version.
* </p>
*
* @param v1BotVersion
* The version of the bot to migrate to Amazon Lex V2. You can migrate the <code>$LATEST</code> version as
* well as any numbered version.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StartMigrationRequest withV1BotVersion(String v1BotVersion) {
setV1BotVersion(v1BotVersion);
return this;
}
/**
* <p>
* The name of the Amazon Lex V2 bot that you are migrating the Amazon Lex V1 bot to.
* </p>
* <ul>
* <li>
* <p>
* If the Amazon Lex V2 bot doesn't exist, you must use the <code>CREATE_NEW</code> migration strategy.
* </p>
* </li>
* <li>
* <p>
* If the Amazon Lex V2 bot exists, you must use the <code>UPDATE_EXISTING</code> migration strategy to change the
* contents of the Amazon Lex V2 bot.
* </p>
* </li>
* </ul>
*
* @param v2BotName
* The name of the Amazon Lex V2 bot that you are migrating the Amazon Lex V1 bot to. </p>
* <ul>
* <li>
* <p>
* If the Amazon Lex V2 bot doesn't exist, you must use the <code>CREATE_NEW</code> migration strategy.
* </p>
* </li>
* <li>
* <p>
* If the Amazon Lex V2 bot exists, you must use the <code>UPDATE_EXISTING</code> migration strategy to
* change the contents of the Amazon Lex V2 bot.
* </p>
* </li>
*/
public void setV2BotName(String v2BotName) {
this.v2BotName = v2BotName;
}
/**
* <p>
* The name of the Amazon Lex V2 bot that you are migrating the Amazon Lex V1 bot to.
* </p>
* <ul>
* <li>
* <p>
* If the Amazon Lex V2 bot doesn't exist, you must use the <code>CREATE_NEW</code> migration strategy.
* </p>
* </li>
* <li>
* <p>
* If the Amazon Lex V2 bot exists, you must use the <code>UPDATE_EXISTING</code> migration strategy to change the
* contents of the Amazon Lex V2 bot.
* </p>
* </li>
* </ul>
*
* @return The name of the Amazon Lex V2 bot that you are migrating the Amazon Lex V1 bot to. </p>
* <ul>
* <li>
* <p>
* If the Amazon Lex V2 bot doesn't exist, you must use the <code>CREATE_NEW</code> migration strategy.
* </p>
* </li>
* <li>
* <p>
* If the Amazon Lex V2 bot exists, you must use the <code>UPDATE_EXISTING</code> migration strategy to
* change the contents of the Amazon Lex V2 bot.
* </p>
* </li>
*/
public String getV2BotName() {
return this.v2BotName;
}
/**
* <p>
* The name of the Amazon Lex V2 bot that you are migrating the Amazon Lex V1 bot to.
* </p>
* <ul>
* <li>
* <p>
* If the Amazon Lex V2 bot doesn't exist, you must use the <code>CREATE_NEW</code> migration strategy.
* </p>
* </li>
* <li>
* <p>
* If the Amazon Lex V2 bot exists, you must use the <code>UPDATE_EXISTING</code> migration strategy to change the
* contents of the Amazon Lex V2 bot.
* </p>
* </li>
* </ul>
*
* @param v2BotName
* The name of the Amazon Lex V2 bot that you are migrating the Amazon Lex V1 bot to. </p>
* <ul>
* <li>
* <p>
* If the Amazon Lex V2 bot doesn't exist, you must use the <code>CREATE_NEW</code> migration strategy.
* </p>
* </li>
* <li>
* <p>
* If the Amazon Lex V2 bot exists, you must use the <code>UPDATE_EXISTING</code> migration strategy to
* change the contents of the Amazon Lex V2 bot.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StartMigrationRequest withV2BotName(String v2BotName) {
setV2BotName(v2BotName);
return this;
}
/**
* <p>
* The IAM role that Amazon Lex uses to run the Amazon Lex V2 bot.
* </p>
*
* @param v2BotRole
* The IAM role that Amazon Lex uses to run the Amazon Lex V2 bot.
*/
public void setV2BotRole(String v2BotRole) {
this.v2BotRole = v2BotRole;
}
/**
* <p>
* The IAM role that Amazon Lex uses to run the Amazon Lex V2 bot.
* </p>
*
* @return The IAM role that Amazon Lex uses to run the Amazon Lex V2 bot.
*/
public String getV2BotRole() {
return this.v2BotRole;
}
/**
* <p>
* The IAM role that Amazon Lex uses to run the Amazon Lex V2 bot.
* </p>
*
* @param v2BotRole
* The IAM role that Amazon Lex uses to run the Amazon Lex V2 bot.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StartMigrationRequest withV2BotRole(String v2BotRole) {
setV2BotRole(v2BotRole);
return this;
}
/**
* <p>
* The strategy used to conduct the migration.
* </p>
* <ul>
* <li>
* <p>
* <code>CREATE_NEW</code> - Creates a new Amazon Lex V2 bot and migrates the Amazon Lex V1 bot to the new bot.
* </p>
* </li>
* <li>
* <p>
* <code>UPDATE_EXISTING</code> - Overwrites the existing Amazon Lex V2 bot metadata and the locale being migrated.
* It doesn't change any other locales in the Amazon Lex V2 bot. If the locale doesn't exist, a new locale is
* created in the Amazon Lex V2 bot.
* </p>
* </li>
* </ul>
*
* @param migrationStrategy
* The strategy used to conduct the migration.</p>
* <ul>
* <li>
* <p>
* <code>CREATE_NEW</code> - Creates a new Amazon Lex V2 bot and migrates the Amazon Lex V1 bot to the new
* bot.
* </p>
* </li>
* <li>
* <p>
* <code>UPDATE_EXISTING</code> - Overwrites the existing Amazon Lex V2 bot metadata and the locale being
* migrated. It doesn't change any other locales in the Amazon Lex V2 bot. If the locale doesn't exist, a new
* locale is created in the Amazon Lex V2 bot.
* </p>
* </li>
* @see MigrationStrategy
*/
public void setMigrationStrategy(String migrationStrategy) {
this.migrationStrategy = migrationStrategy;
}
/**
* <p>
* The strategy used to conduct the migration.
* </p>
* <ul>
* <li>
* <p>
* <code>CREATE_NEW</code> - Creates a new Amazon Lex V2 bot and migrates the Amazon Lex V1 bot to the new bot.
* </p>
* </li>
* <li>
* <p>
* <code>UPDATE_EXISTING</code> - Overwrites the existing Amazon Lex V2 bot metadata and the locale being migrated.
* It doesn't change any other locales in the Amazon Lex V2 bot. If the locale doesn't exist, a new locale is
* created in the Amazon Lex V2 bot.
* </p>
* </li>
* </ul>
*
* @return The strategy used to conduct the migration.</p>
* <ul>
* <li>
* <p>
* <code>CREATE_NEW</code> - Creates a new Amazon Lex V2 bot and migrates the Amazon Lex V1 bot to the new
* bot.
* </p>
* </li>
* <li>
* <p>
* <code>UPDATE_EXISTING</code> - Overwrites the existing Amazon Lex V2 bot metadata and the locale being
* migrated. It doesn't change any other locales in the Amazon Lex V2 bot. If the locale doesn't exist, a
* new locale is created in the Amazon Lex V2 bot.
* </p>
* </li>
* @see MigrationStrategy
*/
public String getMigrationStrategy() {
return this.migrationStrategy;
}
/**
* <p>
* The strategy used to conduct the migration.
* </p>
* <ul>
* <li>
* <p>
* <code>CREATE_NEW</code> - Creates a new Amazon Lex V2 bot and migrates the Amazon Lex V1 bot to the new bot.
* </p>
* </li>
* <li>
* <p>
* <code>UPDATE_EXISTING</code> - Overwrites the existing Amazon Lex V2 bot metadata and the locale being migrated.
* It doesn't change any other locales in the Amazon Lex V2 bot. If the locale doesn't exist, a new locale is
* created in the Amazon Lex V2 bot.
* </p>
* </li>
* </ul>
*
* @param migrationStrategy
* The strategy used to conduct the migration.</p>
* <ul>
* <li>
* <p>
* <code>CREATE_NEW</code> - Creates a new Amazon Lex V2 bot and migrates the Amazon Lex V1 bot to the new
* bot.
* </p>
* </li>
* <li>
* <p>
* <code>UPDATE_EXISTING</code> - Overwrites the existing Amazon Lex V2 bot metadata and the locale being
* migrated. It doesn't change any other locales in the Amazon Lex V2 bot. If the locale doesn't exist, a new
* locale is created in the Amazon Lex V2 bot.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see MigrationStrategy
*/
public StartMigrationRequest withMigrationStrategy(String migrationStrategy) {
setMigrationStrategy(migrationStrategy);
return this;
}
/**
* <p>
* The strategy used to conduct the migration.
* </p>
* <ul>
* <li>
* <p>
* <code>CREATE_NEW</code> - Creates a new Amazon Lex V2 bot and migrates the Amazon Lex V1 bot to the new bot.
* </p>
* </li>
* <li>
* <p>
* <code>UPDATE_EXISTING</code> - Overwrites the existing Amazon Lex V2 bot metadata and the locale being migrated.
* It doesn't change any other locales in the Amazon Lex V2 bot. If the locale doesn't exist, a new locale is
* created in the Amazon Lex V2 bot.
* </p>
* </li>
* </ul>
*
* @param migrationStrategy
* The strategy used to conduct the migration.</p>
* <ul>
* <li>
* <p>
* <code>CREATE_NEW</code> - Creates a new Amazon Lex V2 bot and migrates the Amazon Lex V1 bot to the new
* bot.
* </p>
* </li>
* <li>
* <p>
* <code>UPDATE_EXISTING</code> - Overwrites the existing Amazon Lex V2 bot metadata and the locale being
* migrated. It doesn't change any other locales in the Amazon Lex V2 bot. If the locale doesn't exist, a new
* locale is created in the Amazon Lex V2 bot.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see MigrationStrategy
*/
public StartMigrationRequest withMigrationStrategy(MigrationStrategy migrationStrategy) {
this.migrationStrategy = migrationStrategy.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getV1BotName() != null)
sb.append("V1BotName: ").append(getV1BotName()).append(",");
if (getV1BotVersion() != null)
sb.append("V1BotVersion: ").append(getV1BotVersion()).append(",");
if (getV2BotName() != null)
sb.append("V2BotName: ").append(getV2BotName()).append(",");
if (getV2BotRole() != null)
sb.append("V2BotRole: ").append(getV2BotRole()).append(",");
if (getMigrationStrategy() != null)
sb.append("MigrationStrategy: ").append(getMigrationStrategy());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof StartMigrationRequest == false)
return false;
StartMigrationRequest other = (StartMigrationRequest) obj;
if (other.getV1BotName() == null ^ this.getV1BotName() == null)
return false;
if (other.getV1BotName() != null && other.getV1BotName().equals(this.getV1BotName()) == false)
return false;
if (other.getV1BotVersion() == null ^ this.getV1BotVersion() == null)
return false;
if (other.getV1BotVersion() != null && other.getV1BotVersion().equals(this.getV1BotVersion()) == false)
return false;
if (other.getV2BotName() == null ^ this.getV2BotName() == null)
return false;
if (other.getV2BotName() != null && other.getV2BotName().equals(this.getV2BotName()) == false)
return false;
if (other.getV2BotRole() == null ^ this.getV2BotRole() == null)
return false;
if (other.getV2BotRole() != null && other.getV2BotRole().equals(this.getV2BotRole()) == false)
return false;
if (other.getMigrationStrategy() == null ^ this.getMigrationStrategy() == null)
return false;
if (other.getMigrationStrategy() != null && other.getMigrationStrategy().equals(this.getMigrationStrategy()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getV1BotName() == null) ? 0 : getV1BotName().hashCode());
hashCode = prime * hashCode + ((getV1BotVersion() == null) ? 0 : getV1BotVersion().hashCode());
hashCode = prime * hashCode + ((getV2BotName() == null) ? 0 : getV2BotName().hashCode());
hashCode = prime * hashCode + ((getV2BotRole() == null) ? 0 : getV2BotRole().hashCode());
hashCode = prime * hashCode + ((getMigrationStrategy() == null) ? 0 : getMigrationStrategy().hashCode());
return hashCode;
}
@Override
public StartMigrationRequest clone() {
return (StartMigrationRequest) super.clone();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
*/
package nl.gridline.zieook.model;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import nl.gridline.zieook.api.JSonParent;
/**
* [purpose]
* <p />
* Project zieook-api-data<br />
* RatingFull.java created 23 dec. 2011
* <p />
* Copyright, all rights reserved 2011 GridLine Amsterdam
* @author <a href="mailto:job@gridline.nl">Job</a>
* @version $Revision:$, $Date:$
*/
@XmlRootElement(name = "user_rating")
@XmlAccessorType(XmlAccessType.FIELD)
public class RatingFull implements Serializable, JSonParent
{
/**
*
*/
private static final long serialVersionUID = 8660871604558628788L;
@XmlElement
private double rating;
@XmlElement(required = true)
private Long user;
@XmlElement(required = true)
private CollectionItem item;
@XmlElement(required = false)
private String collection;
@XmlElement(required = false)
private String cp;
@XmlElement(required = false)
private String source;
@XmlElement(required = false)
private Long date;
@XmlElement(required = false)
private Long count;
@XmlElement(required = false)
private String recommender;
public RatingFull()
{
// no-arg constructor
}
public RatingFull(Rating rating)
{
user = rating.getUser();
collection = rating.getCollection();
cp = rating.getCp();
source = rating.getSource();
date = rating.getDate();
count = rating.getCount();
this.rating = rating.getRating();
recommender = rating.getRecommender();
}
/**
* @return The rating.
*/
public double getRating()
{
return rating;
}
/**
* @param rating The rating to set.
*/
public void setRating(double rating)
{
this.rating = rating;
}
/**
* @return The user.
*/
public Long getUser()
{
return user;
}
/**
* @param user The user to set.
*/
public void setUser(Long user)
{
this.user = user;
}
/**
* @return The item.
*/
public CollectionItem getItem()
{
return item;
}
/**
* @param item The item to set.
*/
public void setItem(CollectionItem item)
{
this.item = item;
}
/**
* @return The collection.
*/
public String getCollection()
{
return collection;
}
/**
* @param collection The collection to set.
*/
public void setCollection(String collection)
{
this.collection = collection;
}
/**
* @return The source.
*/
public String getSource()
{
return source;
}
/**
* @param source The source to set.
*/
public void setSource(String source)
{
this.source = source;
}
/**
* @return The date.
*/
public Long getDate()
{
return date;
}
/**
* @param date The date to set.
*/
public void setDate(Long date)
{
this.date = date;
}
/**
* @return The count.
*/
public Long getCount()
{
return count;
}
/**
* @param count The count to set.
*/
public void setCount(Long count)
{
this.count = count;
}
public String getCp()
{
return cp;
}
public void setCp(String cp)
{
this.cp = cp;
}
public String getRecommender()
{
return recommender;
}
public void setRecommender(String recommender)
{
this.recommender = recommender;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString()
{
return new StringBuilder(" user=").append(user).append(" collection=").append(collection).append(" source=")
.append(source).append(" date=").append(date).append(" count=").append(count).append("item=").append(item)
.toString();
}
/*
* (non-Javadoc)
*
* @see nl.gridline.zieook.api.JSonParent#toJSON()
*/
@Override
public String toJSON()
{
return ModelConstants.toJSON(this);
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + ((collection == null) ? 0 : collection.hashCode());
result = prime * result + ((count == null) ? 0 : count.hashCode());
result = prime * result + ((cp == null) ? 0 : cp.hashCode());
result = prime * result + ((date == null) ? 0 : date.hashCode());
result = prime * result + ((item == null) ? 0 : item.hashCode());
long temp;
temp = Double.doubleToLongBits(rating);
result = prime * result + (int) (temp ^ (temp >>> 32));
result = prime * result + ((source == null) ? 0 : source.hashCode());
result = prime * result + ((user == null) ? 0 : user.hashCode());
return result;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj)
{
if (this == obj)
{
return true;
}
if (obj == null)
{
return false;
}
if (getClass() != obj.getClass())
{
return false;
}
RatingFull other = (RatingFull) obj;
if (collection == null)
{
if (other.collection != null)
{
return false;
}
}
else if (!collection.equals(other.collection))
{
return false;
}
if (count == null)
{
if (other.count != null)
{
return false;
}
}
else if (!count.equals(other.count))
{
return false;
}
if (cp == null)
{
if (other.cp != null)
{
return false;
}
}
else if (!cp.equals(other.cp))
{
return false;
}
if (date == null)
{
if (other.date != null)
{
return false;
}
}
else if (!date.equals(other.date))
{
return false;
}
if (item == null)
{
if (other.item != null)
{
return false;
}
}
else if (!item.equals(other.item))
{
return false;
}
if (Double.doubleToLongBits(rating) != Double.doubleToLongBits(other.rating))
{
return false;
}
if (source == null)
{
if (other.source != null)
{
return false;
}
}
else if (!source.equals(other.source))
{
return false;
}
if (user == null)
{
if (other.user != null)
{
return false;
}
}
else if (!user.equals(other.user))
{
return false;
}
return true;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.