gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Function2D.java
* Copyright (C) 2006 Tao Chen, Kin Man Poon, Yi Wang, and Nevin L. Zhang
*/
package voltric.util;
import voltric.variables.DiscreteVariable;
import java.util.ArrayList;
//import hlcm.Function2V;
/**
* This class provides an implementation for two-dimensional tabular functions,
* namely, matrices.
*
* @author Yi Wang
*
*/
public class Function2D extends Function {
/**
* the shortcut to the only two variables in this function. There is a
* requirement that _x>_y.
*/
protected DiscreteVariable _x, _y;
/**
* <p>
* Constructs a function of the specified array of variables.
* </p>
*
* <p>
* Note: Only function classes are supposed to call this method.
* </p>
*
* @param variables
* array of variables to be involved. There are two Variables
* soorted in the Variable array.
*/
protected Function2D(DiscreteVariable[] variables) {
super(variables);
_x = _variables[0];
_y = _variables[1];
}
/**
* <p>
* Constructs a function with all its internal data structures specified.
* </p>
*
* <p>
* Note: Only function classes are supposed to call this method.
* </p>
*
* @param variables
* array of variables in new function. There are two Variables in
* the Variable array.
* @param cells
* array of cells in new function.
* @param magnitudes
* array of magnitudes for variables in new function.
*/
protected Function2D(DiscreteVariable[] variables, double[] cells, int[] magnitudes) {
super(variables, cells, magnitudes);
_x = _variables[0];
_y = _variables[1];
}
/**
* Set cell value.
*
* @param var1
* @param i
* @param var2
* @param j
* @param value
*/
public void setCell(DiscreteVariable var1, int i, DiscreteVariable var2, int j, double value) {
ArrayList<DiscreteVariable> variables = new ArrayList<DiscreteVariable>(2);
ArrayList<Integer> states = new ArrayList<Integer>(2);
variables.add(var1);
states.add(i);
variables.add(var2);
states.add(j);
super.setCell(variables, states, value);
}
// /**
// * Construct an equal hlcm.Function2V of this 2-d function. Note that the
// * two hlcm Variables are given not in order.
// *
// * @param var2
// * @param var2
// * @return
// */
// public Function2V toFunction2V(hlcm.Variable var1, hlcm.Variable var2) {
// if (_x.getName().equals(var1.getName())
// && _y.getName().equals(var2.getName()))
// return toFunction2VInOrder(var1, var2);
// else
// return toFunction2VInOrder(var2, var1);
//
// }
// /**
// * Construct an equal hlcm.Function2V of this 2-d function. Note that the
// * two hlcm Variables are given in order so that _x = var_x and _y = var_y
// *
// * @param var_x
// * @param var_y
// * @return
// */
// private Function2V toFunction2VInOrder(hlcm.Variable var_x,
// hlcm.Variable var_y) {
// Function2V fun_hlcm = new Function2V(var_x, var_y);
// int index = 0;
// for (int s_x = 0; s_x < _x.getCardinality(); s_x++)
// for (int s_y = 0; s_y < _y.getCardinality(); s_y++) {
// fun_hlcm.setVal(var_x, s_x, var_y, s_y, _cells[index]);
// index++;
// }
// return fun_hlcm;
// }
/*
* (non-Javadoc)
*
* @see org.latlab.util.Function#normalize(org.latlab.util.Variable)
*/
@Override
public final boolean normalize(DiscreteVariable variable) {
// argument variable must be either of the variables in this function
if(!variable.equals(_x) && !variable.equals(_y))
throw new IllegalArgumentException("Argument variable must be either of the variables in this function");
boolean hasZero = false;
int xCard = _x.getCardinality();
int yCard = _y.getCardinality();
int index;
double sum;
if (variable == _x) {
// uniform probability that may be used
double uniform = 1.0 / xCard;
for (int i = 0; i < yCard; i++) {
// computes sum
index = i;
sum = 0.0;
for (int j = 0; j < xCard; j++) {
sum += _cells[index];
index += yCard;
}
// normalizes
index = i;
if (sum != 0.0) {
for (int j = 0; j < xCard; j++) {
_cells[index] /= sum;
index += yCard;
}
} else {
for (int j = 0; j < xCard; j++) {
_cells[index] = uniform;
index += yCard;
}
hasZero = true;
}
}
} else {
// uniform probability that may be used
double uniform = 1.0 / yCard;
index = 0;
for (int i = 0; i < xCard; i++) {
// computes sum
sum = 0.0;
for (int j = 0; j < yCard; j++) {
sum += _cells[index++];
}
// normalizes
index -= yCard;
if (sum != 0.0) {
for (int j = 0; j < yCard; j++) {
_cells[index++] /= sum;
}
} else {
for (int j = 0; j < yCard; j++) {
_cells[index++] = uniform;
}
hasZero = true;
}
}
}
return hasZero;
}
/*
* (non-Javadoc)
*
* @see org.latlab.util.Function#project(org.latlab.util.Variable, int)
*/
@Override
public Function project(DiscreteVariable variable, int state) {
// For Test
// System.out.println("Function2D.project(Variable, int) executed");
// argument variable must be either of the variables in this function
if(!variable.equals(_x) && !variable.equals(_y))
throw new IllegalArgumentException("Argument variable must be either of the variables in this function");
// state must be valid
if(!variable.isValid(state))
throw new IllegalArgumentException("state is invalid");
// result is an one-dimensional function
DiscreteVariable[] variables;
double[] cells;
int[] magnitudes = new int[] { 1 };
if (variable == _x) {
variables = new DiscreteVariable[] { _y };
int yCard = _y.getCardinality();
cells = new double[yCard];
System.arraycopy(_cells, state * yCard, cells, 0, yCard);
} else {
variables = new DiscreteVariable[] { _x };
int xCard = _x.getCardinality();
int yCard = _y.getCardinality();
cells = new double[xCard];
int index = state;
for (int i = 0; i < xCard; i++) {
cells[i] = _cells[index];
index += yCard;
}
}
return (new Function1D(variables, cells, magnitudes));
}
/*
* (non-Javadoc)
*
* @see org.latlab.util.Function#sumOut(org.latlab.util.Variable)
*/
@Override
public Function sumOut(DiscreteVariable variable) {
// For Test
// System.out.println("Function2D.sumOut(Variable) executed");
// argument variable must be either of the variables in this function
// argument variable must be either of the variables in this function
if(!variable.equals(_x) && !variable.equals(_y))
throw new IllegalArgumentException("Argument variable must be either of the variables in this function");
// result is an one-dimensional function
DiscreteVariable[] variables;
double[] cells;
int[] magnitudes = new int[] { 1 };
int xCard = _x.getCardinality();
int yCard = _y.getCardinality();
if (variable == _x) {
variables = new DiscreteVariable[] { _y };
cells = new double[yCard];
int index = 0;
for (int i = 0; i < xCard; i++) {
for (int j = 0; j < yCard; j++) {
cells[j] += _cells[index++];
}
}
} else {
variables = new DiscreteVariable[] { _x };
cells = new double[xCard];
int index = 0;
for (int i = 0; i < xCard; i++) {
for (int j = 0; j < yCard; j++) {
cells[i] += _cells[index++];
}
}
}
return (new Function1D(variables, cells, magnitudes));
}
/**
* Returns the product between this Function2D and another function. The
* multiplication is delegated to <code>Function1D.times(Function)</code> if
* the argument is a Function1D and they share a common Variable.
*
* @param function
* another factor
* @return the product between this Function1D and another function.
* @see Function1D#times(Function)
*/
@Override
public final Function times(Function function) {
if (function instanceof Function1D && contains(function._variables[0])) {
return ((Function1D) function).times(this);
} else if (function instanceof Function2D
&& _x == function._variables[0] && _y == function._variables[1]) {
Function result = this.clone();
for (int i = 0; i < getDomainSize(); i++) {
result._cells[i] *= function._cells[i];
}
return result;
} else {
return super.times(function);
}
}
/**
* <p>
* Multiply this function by the argument function. Note that this function
* must contains the argument function in terms of the variables.
* </p>
*
* @param function
* multiplier function.
* @return the product between this function and the specified function.
*/
@Override
public final void multiply(Function function) {
if (function.getDimension() == 0) {
multiply(function._cells[0]);
} else if (function instanceof Function1D) {
int xCard = _x.getCardinality();
int yCard = _y.getCardinality();
int index = 0;
if (_x == ((Function1D) function)._x) {
for (int i = 0; i < xCard; i++) {
for (int j = 0; j < yCard; j++) {
_cells[index] *= function._cells[i];
index++;
}
}
} else {
for (int i = 0; i < xCard; i++) {
for (int j = 0; j < yCard; j++) {
_cells[index] *= function._cells[j];
index++;
}
}
}
} else {
for (int i = 0; i < getDomainSize(); i++) {
_cells[i] *= function._cells[i];
}
}
}
/**
* <p>
* DIvide this function by the argument function. Note that this function
* must contains the argument function in terms of the variables. Also note
* that the argument function should contain NO zero cell at all.
* </p>
*
* @param function
* @return the division
*/
@Override
public final void divide(Function function) {
if (function.getDimension() == 0) {
divide(function._cells[0]);
} else if (function instanceof Function1D) {
int xCard = _x.getCardinality();
int yCard = _y.getCardinality();
int index = 0;
if (_x == ((Function1D) function)._x) {
for (int i = 0; i < xCard; i++) {
for (int j = 0; j < yCard; j++) {
_cells[index] /= function._cells[i];
index++;
}
}
} else {
for (int i = 0; i < xCard; i++) {
for (int j = 0; j < yCard; j++) {
_cells[index] /= function._cells[j];
index++;
}
}
}
} else {
for (int i = 0; i < getDomainSize(); i++) {
_cells[i] /= function._cells[i];
}
}
}
@Override
public final Function myDivide(Function function) {
if (function.getDimension() == 0) {
// super.myDivide(double constant)
return myDivide(function._cells[0]);
}
else if (function instanceof Function1D) {
Function result = this.clone();
int xCard = _x.getCardinality();
int yCard = _y.getCardinality();
int index = 0;
if (_x == ((Function1D) function)._x) {
for (int i = 0; i < xCard; i++) {
for (int j = 0; j < yCard; j++) {
result._cells[index] /= function._cells[i];
index++;
}
}
} else {
for (int i = 0; i < xCard; i++) {
for (int j = 0; j < yCard; j++) {
result._cells[index] /= function._cells[j];
index++;
}
}
}
return result;
} else {
Function result = this.clone();
for (int i = 0; i < getDomainSize(); i++) {
result._cells[i] /= function._cells[i];
}
return result;
}
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.program.model.lang;
import java.io.*;
import java.util.*;
import java.util.stream.Collectors;
import org.antlr.runtime.*;
import org.antlr.runtime.tree.CommonTreeNodeStream;
import generic.stl.VectorSTL;
import ghidra.app.plugin.processors.sleigh.*;
import ghidra.app.plugin.processors.sleigh.symbol.Symbol;
import ghidra.app.plugin.processors.sleigh.symbol.SymbolTable;
import ghidra.app.plugin.processors.sleigh.symbol.UseropSymbol;
import ghidra.app.plugin.processors.sleigh.symbol.VarnodeSymbol;
import ghidra.app.plugin.processors.sleigh.template.*;
import ghidra.pcode.utils.MessageFormattingUtils;
import ghidra.pcodeCPort.address.Address;
import ghidra.pcodeCPort.context.SleighError;
import ghidra.pcodeCPort.sleighbase.SleighBase;
import ghidra.pcodeCPort.slgh_compile.*;
import ghidra.pcodeCPort.slghsymbol.*;
import ghidra.pcodeCPort.slghsymbol.EndSymbol;
import ghidra.pcodeCPort.slghsymbol.OperandSymbol;
import ghidra.pcodeCPort.slghsymbol.StartSymbol;
import ghidra.pcodeCPort.space.*;
import ghidra.pcodeCPort.xml.DocumentStorage;
import ghidra.program.model.address.*;
import ghidra.sleigh.grammar.*;
import ghidra.sleigh.grammar.SleighParser_SemanticParser.semantic_return;
import ghidra.util.exception.AssertException;
/**
* This class is intended to parse p-code snippets, typically from compiler specification files
* or extensions. This is outside the normal SLEIGH compilation process, and the parser is built
* on top of an existing SleighLanguage.
*/
public class PcodeParser extends PcodeCompile {
private SleighBase sleigh;
private AddressFactory addrFactory;
private long tempbase;
private HashMap<String, SleighSymbol> symbolMap = new HashMap<>();
//record symbols added so that they can be removed to reset the parser
private HashSet<String> currentSymbols = new HashSet<>();
protected PcodeParser(SleighBase sleigh) {
this.sleigh = sleigh;
initializeSymbols();
}
/**
* Build parser from an existing SleighLanguage.
* @param language is the existing language
* @param ubase is the starting offset for allocating temporary registers
*/
public PcodeParser(SleighLanguage language, long ubase) {
addrFactory = language.getAddressFactory();
sleigh = new PcodeTranslate(language, ubase);
initializeSymbols();
}
private void initializeSymbols() {
tempbase = sleigh.getUniqueBase();
Location internalLoc = Location.INTERNALLY_DEFINED;
symbolMap.put("inst_start", new StartSymbol(internalLoc, "inst_start", getConstantSpace()));
symbolMap.put("inst_next", new EndSymbol(internalLoc, "inst_next", getConstantSpace()));
symbolMap.put("inst_ref", new FlowRefSymbol(internalLoc, "inst_ref", getConstantSpace()));
symbolMap.put("inst_dest",
new FlowDestSymbol(internalLoc, "inst_dest", getConstantSpace()));
}
/**
* Inject a symbol representing an "operand" to the pcode snippet. This puts a placeholder in the
* resulting template, which gets filled in with the context specific storage locations when final
* p-code is generated
* @param loc is location information for the operand
* @param name of operand symbol
* @param index to use for the placeholder
*/
public void addOperand(Location loc, String name, int index) {
OperandSymbol sym = new OperandSymbol(loc, name, index, null);
addSymbol(sym);
}
@Override
public void addSymbol(SleighSymbol sym) {
SleighSymbol s = sleigh.findSymbol(sym.getName());
if (s == null) {
s = symbolMap.get(sym.getName());
}
if (s != null) {
if (s != sym) {
throw new SleighError("Duplicate symbol name: " + sym.getName() +
" (previously defined at " + s.location + ")", sym.getLocation());
}
}
else {
symbolMap.put(sym.getName(), sym);
currentSymbols.add(sym.getName());
}
}
public void clearSymbols() {
for (String symbol : currentSymbols) {
symbolMap.remove(symbol);
}
currentSymbols.clear();
}
public long getNextTempOffset() {
return tempbase;
}
@Override
public long allocateTemp() {
long base = tempbase;
tempbase = base + SleighBase.MAX_UNIQUE_SIZE;
return base;
}
@Override
public VectorSTL<ghidra.pcodeCPort.semantics.OpTpl> createMacroUse(Location location,
MacroSymbol sym, VectorSTL<ExprTree> param) {
throw new SleighError("Pcode snippet parsing does not support use of macros", location);
}
@Override
public SleighSymbol findSymbol(String nm) {
SleighSymbol sym = symbolMap.get(nm);
if (sym != null) {
return sym;
}
return PcodeParser.this.sleigh.findSymbol(nm);
}
@Override
public AddrSpace getConstantSpace() {
return sleigh.getConstantSpace();
}
@Override
public AddrSpace getDefaultSpace() {
return sleigh.getDefaultSpace();
}
@Override
public AddrSpace getUniqueSpace() {
return sleigh.getUniqueSpace();
}
@Override
public void recordNop(Location location) {
// No NOP statistics collected for snippet parsing
}
// Make sure label symbols are used properly
private String checkLabels() {
List<String> errors = new ArrayList<>();
for (SleighSymbol sym : symbolMap.values()) {
if (sym.getType() != symbol_type.label_symbol) {
continue;
}
LabelSymbol labsym = (LabelSymbol) sym;
if (labsym.getRefCount() == 0) {
errors.add(MessageFormattingUtils.format(labsym.location,
String.format("Label <%s> was placed but never used", sym.getName())));
}
else if (!labsym.isPlaced()) {
errors.add(MessageFormattingUtils.format(labsym.location,
String.format("Label <%s> was referenced but never placed", sym.getName())));
}
}
return errors.stream().collect(Collectors.joining(" "));
}
private ConstructTpl buildConstructor(ghidra.pcodeCPort.semantics.ConstructTpl rtl) {
String errstring = "";
if (rtl != null) {
errstring = checkLabels();
if ((errstring.length() == 0) && (!propagateSize(rtl))) {
errstring = " Could not resolve at least 1 variable size";
}
if ((errstring.length() == 0) && rtl.delaySlot() != 0) { // Delay slot is present in this
errstring = " delayslot not permitted in pcode fragment";
}
if (rtl.getResult() != null) {
errstring = " export not permitted in pcode fragment";
}
}
if (errstring.length() != 0) {
throw new SleighException(errstring);
}
return translateConstructTpl(rtl);
}
/**
* This class wraps on existing SleighLanguage with the SleighBase interface expected by
* PcodeCompile. It populates the symbol table with user-defined operations and the global
* VarnodeSymbol objects, which typically includes all the general purpose registers.
*/
private static class PcodeTranslate extends SleighBase {
private void copySpaces(SleighLanguage language) {
insertSpace(new ConstantSpace(this));
insertSpace(
new OtherSpace(this, SpaceNames.OTHER_SPACE_NAME, SpaceNames.OTHER_SPACE_INDEX));
AddressSpace[] spaces = language.getAddressFactory().getAllAddressSpaces();
for (AddressSpace spc : spaces) {
if (spc.getUnique() < 2) {
continue;
}
AddrSpace resSpace;
int sz = spc.getSize();
if (spc instanceof SegmentedAddressSpace) {
// TODO: SegmentedAddressSpace shouldn't really return 21
sz = 32;
}
if (sz > 64) {
sz = 64;
}
int bytesize = (sz + 7) / 8; // Convert bits to bytes
switch (spc.getType()) {
case AddressSpace.TYPE_UNIQUE:
resSpace = new UniqueSpace(this, spc.getUnique(), 0);
break;
case AddressSpace.TYPE_OTHER:
resSpace = new OtherSpace(this, spc.getName(), spc.getUnique());
break;
case AddressSpace.TYPE_RAM:
resSpace = new AddrSpace(this, spacetype.IPTR_PROCESSOR, spc.getName(),
bytesize, spc.getAddressableUnitSize(), spc.getUnique(),
AddrSpace.hasphysical, 1);
break;
case AddressSpace.TYPE_REGISTER:
resSpace = new AddrSpace(this, spacetype.IPTR_PROCESSOR, spc.getName(),
bytesize, spc.getAddressableUnitSize(), spc.getUnique(),
AddrSpace.hasphysical, 0);
break;
default:
resSpace = null;
}
if (resSpace == null) {
break;
}
insertSpace(resSpace);
}
setDefaultSpace(language.getDefaultSpace().getUnique());
}
/**
* Populate the predefined symbol table for the parser from the given SLEIGH language.
* We only use user-defined op symbols and varnode symbols.
* @param language is the SLEIGH language
*/
private void copySymbols(SleighLanguage language) {
SymbolTable langTable = language.getSymbolTable();
symtab.addScope(); // Global scope
for (Symbol sym : langTable.getSymbolList()) {
if (sym instanceof UseropSymbol) {
UserOpSymbol cloneSym = new UserOpSymbol(null, sym.getName());
cloneSym.setIndex(((UseropSymbol) sym).getIndex());
symtab.addSymbol(cloneSym);
}
else if (sym instanceof VarnodeSymbol) {
VarnodeData vData = ((VarnodeSymbol) sym).getFixedVarnode();
if ("contextreg".equals(sym.getName())) {
continue;
}
ghidra.pcodeCPort.slghsymbol.VarnodeSymbol cloneSym;
AddrSpace base = getSpace(vData.space.getUnique());
cloneSym = new ghidra.pcodeCPort.slghsymbol.VarnodeSymbol(null, sym.getName(),
base, vData.offset, vData.size);
symtab.addSymbol(cloneSym);
}
}
}
public PcodeTranslate(SleighLanguage language, long ubase) {
super();
target_endian = language.isBigEndian() ? 1 : 0;
alignment = 0;
setUniqueBase(ubase);
copySpaces(language);
copySymbols(language);
for (int i = 0; i < numSpaces(); i++) {
AddrSpace space = getSpace(i);
symtab.addSymbol(new SpaceSymbol(null, space));
}
}
@Override
public void initialize(DocumentStorage store) {
// Unused
}
@Override
public int printAssembly(PrintStream s, int size, Address baseaddr) {
return 0;
}
@Override
public int instructionLength(Address baseaddr) {
return 0;
}
}
public ConstructTpl translateConstructTpl(
ghidra.pcodeCPort.semantics.ConstructTpl constructTpl) {
HandleTpl handle = null;
if (constructTpl.getResult() != null) {
handle = translateHandleTpl(constructTpl.getResult());
}
OpTpl[] vec = new OpTpl[constructTpl.getOpvec().size()];
for (int i = 0; i < vec.length; ++i) {
vec[i] = translateOpTpl(constructTpl.getOpvec().get(i));
}
return new ConstructTpl(vec, handle, constructTpl.numLabels());
}
public HandleTpl translateHandleTpl(ghidra.pcodeCPort.semantics.HandleTpl handleTpl) {
return new HandleTpl(translateConstTpl(handleTpl.getSpace()),
translateConstTpl(handleTpl.getSize()), translateConstTpl(handleTpl.getPtrSpace()),
translateConstTpl(handleTpl.getPtrOffset()), translateConstTpl(handleTpl.getPtrSize()),
translateConstTpl(handleTpl.getTempSpace()),
translateConstTpl(handleTpl.getTempOffset()));
}
public OpTpl translateOpTpl(ghidra.pcodeCPort.semantics.OpTpl opTpl) {
VarnodeTpl output = null;
if (opTpl.getOut() != null) {
output = translateVarnodeTpl(opTpl.getOut());
}
VarnodeTpl[] input = new VarnodeTpl[opTpl.numInput()];
for (int i = 0; i < input.length; ++i) {
input[i] = translateVarnodeTpl(opTpl.getIn(i));
}
return new OpTpl(opTpl.getOpcode().ordinal(), output, input);
}
public VarnodeTpl translateVarnodeTpl(ghidra.pcodeCPort.semantics.VarnodeTpl varnodeTpl) {
return new VarnodeTpl(translateConstTpl(varnodeTpl.getSpace()),
translateConstTpl(varnodeTpl.getOffset()), translateConstTpl(varnodeTpl.getSize()));
}
public ConstTpl translateConstTpl(ghidra.pcodeCPort.semantics.ConstTpl constTpl) {
AddrSpace spc = constTpl.getSpace();
AddressSpace resSpace = null;
if (spc != null) {
resSpace = addrFactory.getAddressSpace(spc.getName());
}
int select = 0;
ghidra.pcodeCPort.semantics.ConstTpl.v_field field = constTpl.getSelect();
if (field != null) {
select = field.ordinal();
}
return new ConstTpl(constTpl.getType().ordinal(), constTpl.getReal(), resSpace,
constTpl.getHandleIndex(), select);
}
/**
* Compile pcode semantic statements.
* @param pcodeStatements is the raw source to parse
* @param srcFile source filename from which pcodeStatements came (
* @param srcLine line number in srcFile corresponding to pcodeStatements
* @return ConstructTpl. A null may be returned or
* an exception thrown if parsing/compiling fails (see application log for errors).
* @throws SleighException pcode compile error
*/
public ConstructTpl compilePcode(String pcodeStatements, String srcFile, int srcLine)
throws SleighException {
LineArrayListWriter writer = null;
try {
writer = new LineArrayListWriter();
ParsingEnvironment env = new ParsingEnvironment(writer);
// inject pcode statement lines into writer (needed for error reporting)
BufferedReader r = new BufferedReader(new StringReader(pcodeStatements));
String line;
while ((line = r.readLine()) != null) {
writer.write(line);
writer.newLine();
}
CharStream input = new ANTLRStringStream(writer.toString());
env.getLocator().registerLocation(input.getLine(), new Location(srcFile, srcLine));
SleighLexer lex = new SleighLexer(input);
lex.setEnv(env);
UnbufferedTokenStream tokens = new UnbufferedTokenStream(lex);
SleighParser parser = new SleighParser(tokens);
parser.setEnv(env);
parser.setLexer(lex);
lex.pushMode(SleighRecognizerConstants.SEMANTIC);
semantic_return semantic = parser.semantic();
lex.popMode();
CommonTreeNodeStream nodes = new CommonTreeNodeStream(semantic.getTree());
nodes.setTokenStream(tokens);
// ANTLRUtil.debugNodeStream(nodes, System.out);
SleighCompiler walker = new SleighCompiler(nodes);
SectionVector rtl = walker.semantic(env, null, this, semantic.getTree(), false, false);
if (getErrors() != 0) {
return null;
}
ConstructTpl result = null;
if (rtl != null) {
result = buildConstructor(rtl.getMainSection());
}
return result;
}
catch (IOException e) {
throw new AssertException(); // unexpected condition
}
catch (RecognitionException e) {
throw new SleighException("Semantic compilation error: " + e.getMessage(), e);
}
catch (BailoutException e) {
throw new SleighException("Unrecoverable error(s), halting compilation", e);
}
catch (NullPointerException e) {
throw new SleighException("Unrecoverable error(s), halting compilation", e);
}
finally {
if (writer != null) {
try {
writer.close();
}
catch (IOException e) {
// squash!!! we tried
}
}
}
}
@Override
public SectionSymbol newSectionSymbol(Location where, String text) {
throw new SleighError("Pcode snippet parsing does not support use of sections", where);
}
@Override
public VectorSTL<ghidra.pcodeCPort.semantics.OpTpl> createCrossBuild(Location where,
ghidra.pcodeCPort.semantics.VarnodeTpl v, SectionSymbol second) {
throw new SleighError("Pcode snippet parsing does not support use of sections", where);
}
@Override
public SectionVector standaloneSection(ghidra.pcodeCPort.semantics.ConstructTpl main) {
// Create SectionVector for just the main rtl section with no named sections
SectionVector res = new SectionVector(main, null);
return res;
}
@Override
public SectionVector firstNamedSection(ghidra.pcodeCPort.semantics.ConstructTpl main,
SectionSymbol sym) {
throw new SleighError("Pcode snippet parsing does not support use of sections",
sym.location);
}
@Override
public SectionVector nextNamedSection(SectionVector vec,
ghidra.pcodeCPort.semantics.ConstructTpl section, SectionSymbol sym) {
throw new SleighError("Pcode snippet parsing does not support use of sections",
sym.location);
}
@Override
public SectionVector finalNamedSection(SectionVector vec,
ghidra.pcodeCPort.semantics.ConstructTpl section) {
throw new SleighError("Pcode snippet parsing does not support use of sections", null); // can never get here
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.execution.buffer;
import com.facebook.presto.OutputBuffers;
import com.facebook.presto.OutputBuffers.OutputBufferId;
import com.facebook.presto.block.BlockAssertions;
import com.facebook.presto.execution.StateMachine;
import com.facebook.presto.memory.context.SimpleLocalMemoryContext;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.type.BigintType;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ListenableFuture;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.stream.Collectors;
import static com.facebook.presto.OutputBuffers.BROADCAST_PARTITION_ID;
import static com.facebook.presto.OutputBuffers.BufferType.ARBITRARY;
import static com.facebook.presto.OutputBuffers.createInitialEmptyOutputBuffers;
import static com.facebook.presto.execution.buffer.BufferResult.emptyResults;
import static com.facebook.presto.execution.buffer.BufferState.OPEN;
import static com.facebook.presto.execution.buffer.BufferState.TERMINAL_BUFFER_STATES;
import static com.facebook.presto.execution.buffer.TestClientBuffer.assertBufferResultEquals;
import static com.facebook.presto.execution.buffer.TestClientBuffer.getFuture;
import static com.facebook.presto.execution.buffer.TestingPagesSerdeFactory.testingPagesSerde;
import static com.facebook.presto.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.google.common.base.Preconditions.checkArgument;
import static io.airlift.concurrent.MoreFutures.tryGetFutureValue;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.units.DataSize.Unit.BYTE;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
public class TestArbitraryOutputBuffer
{
private static final PagesSerde PAGES_SERDE = testingPagesSerde();
private static final Duration NO_WAIT = new Duration(0, MILLISECONDS);
private static final Duration MAX_WAIT = new Duration(1, SECONDS);
private static final DataSize BUFFERED_PAGE_SIZE = new DataSize(PAGES_SERDE.serialize(createPage(42)).getRetainedSizeInBytes(), BYTE);
private static final String TASK_INSTANCE_ID = "task-instance-id";
private static final ImmutableList<BigintType> TYPES = ImmutableList.of(BIGINT);
private static final OutputBufferId FIRST = new OutputBufferId(0);
private static final OutputBufferId SECOND = new OutputBufferId(1);
private ScheduledExecutorService stateNotificationExecutor;
@BeforeClass
public void setUp()
{
stateNotificationExecutor = newScheduledThreadPool(5, daemonThreadsNamed("test-%s"));
}
@AfterClass(alwaysRun = true)
public void tearDown()
{
if (stateNotificationExecutor != null) {
stateNotificationExecutor.shutdownNow();
stateNotificationExecutor = null;
}
}
@Test
public void testInvalidConstructorArg()
{
try {
createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY).withBuffer(FIRST, BROADCAST_PARTITION_ID).withNoMoreBufferIds(), new DataSize(0, BYTE));
fail("Expected IllegalStateException");
}
catch (IllegalArgumentException ignored) {
}
try {
createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), new DataSize(0, BYTE));
fail("Expected IllegalStateException");
}
catch (IllegalArgumentException ignored) {
}
}
@Test
public void testSimple()
{
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(ARBITRARY);
ArbitraryOutputBuffer buffer = createArbitraryBuffer(outputBuffers, sizeOfPages(10));
// add three items
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(i));
}
outputBuffers = createInitialEmptyOutputBuffers(ARBITRARY).withBuffer(FIRST, BROADCAST_PARTITION_ID);
// add a queue
buffer.setOutputBuffers(outputBuffers);
assertQueueState(buffer, 3, FIRST, 0, 0);
// get the three elements
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, 0, FIRST, 3, 0);
// acknowledge first three pages
buffer.get(FIRST, 3, sizeOfPages(1)).cancel(true);
// pages now acknowledged
assertQueueState(buffer, 0, FIRST, 0, 3);
// fill the buffer, so that it has 10 buffered pages
for (int i = 3; i < 13; i++) {
addPage(buffer, createPage(i));
}
// there is a pending read from above so one page will be assigned to the first buffer
assertQueueState(buffer, 9, FIRST, 1, 3);
// try to add one more page, which should block
ListenableFuture<?> future = enqueuePage(buffer, createPage(13));
assertFalse(future.isDone());
assertQueueState(buffer, 10, FIRST, 1, 3);
// remove a page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 3, sizeOfPages(1), NO_WAIT), bufferResult(3, createPage(3)));
// page not acknowledged yet so sent count is the same
assertQueueState(buffer, 10, FIRST, 1, 3);
// we should still be blocked
assertFalse(future.isDone());
//
// add another buffer and verify it sees buffered pages
outputBuffers = outputBuffers.withBuffer(SECOND, BROADCAST_PARTITION_ID);
buffer.setOutputBuffers(outputBuffers);
assertQueueState(buffer, 10, SECOND, 0, 0);
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 0, sizeOfPages(10), NO_WAIT), bufferResult(0,
createPage(4),
createPage(5),
createPage(6),
createPage(7),
createPage(8),
createPage(9),
createPage(10),
createPage(11),
createPage(12),
createPage(13)));
// page not acknowledged yet so sent count is still zero
assertQueueState(buffer, 0, SECOND, 10, 0);
// acknowledge the 10 pages
buffer.get(SECOND, 10, sizeOfPages(10)).cancel(true);
assertQueueState(buffer, 0, SECOND, 0, 10);
//
// tell shared buffer there will be no more queues
outputBuffers = outputBuffers.withNoMoreBufferIds();
buffer.setOutputBuffers(outputBuffers);
// buffers should see the same stats and the blocked page future from above should be done
assertQueueState(buffer, 0, FIRST, 1, 3);
assertQueueState(buffer, 0, SECOND, 0, 10);
assertFutureIsDone(future);
// add 3 more pages, buffers always show the same stats
addPage(buffer, createPage(14));
addPage(buffer, createPage(15));
addPage(buffer, createPage(16));
assertQueueState(buffer, 2, FIRST, 1, 3);
assertQueueState(buffer, 2, SECOND, 1, 10);
// pull one page from the second buffer
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 10, sizeOfPages(1), NO_WAIT), bufferResult(10, createPage(14)));
assertQueueState(buffer, 2, FIRST, 1, 3);
assertQueueState(buffer, 2, SECOND, 1, 10);
// acknowledge the page in the first buffer and pull remaining ones
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 4, sizeOfPages(10), NO_WAIT), bufferResult(4, createPage(15), createPage(16)));
assertQueueState(buffer, 0, FIRST, 2, 4);
assertQueueState(buffer, 0, SECOND, 1, 10);
//
// finish the buffer
assertFalse(buffer.isFinished());
buffer.setNoMorePages();
assertQueueState(buffer, 0, FIRST, 2, 4);
assertQueueState(buffer, 0, SECOND, 1, 10);
// not fully finished until all pages are consumed
assertFalse(buffer.isFinished());
// acknowledge the pages from the first buffer; buffer should not close automatically
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 6, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 6, true));
assertQueueState(buffer, 0, FIRST, 0, 6);
assertQueueState(buffer, 0, SECOND, 1, 10);
assertFalse(buffer.isFinished());
// finish first queue
buffer.abort(FIRST);
assertQueueClosed(buffer, 0, FIRST, 6);
assertQueueState(buffer, 0, SECOND, 1, 10);
assertFalse(buffer.isFinished());
// acknowledge a page from the second queue; queue should not close automatically
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 11, sizeOfPages(1), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 11, true));
assertQueueState(buffer, 0, SECOND, 0, 11);
assertFalse(buffer.isFinished());
// finish second queue
buffer.abort(SECOND);
assertQueueClosed(buffer, 0, FIRST, 6);
assertQueueClosed(buffer, 0, SECOND, 11);
assertFinished(buffer);
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 6, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 6, true));
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 11, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 11, true));
}
@Test
public void testBufferFull()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(2));
// Add two pages, buffer is full
addPage(buffer, createPage(1));
addPage(buffer, createPage(2));
// third page is blocked
enqueuePage(buffer, createPage(3));
}
@Test
public void testDuplicateRequests()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(
createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
// add three items
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(i));
}
// add a queue
assertQueueState(buffer, 3, FIRST, 0, 0);
// get the three elements
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, 0, FIRST, 3, 0);
// get the three elements again
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, 0, FIRST, 3, 0);
// acknowledge the pages
buffer.get(FIRST, 3, sizeOfPages(10)).cancel(true);
// attempt to get the three elements again
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, false));
// pages are acknowledged
assertQueueState(buffer, 0, FIRST, 0, 3);
}
@Test
public void testAddQueueAfterCreation()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(
createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
assertFalse(buffer.isFinished());
try {
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds());
fail("Expected IllegalStateException from addQueue after noMoreQueues has been called");
}
catch (IllegalArgumentException ignored) {
}
}
@Test
public void testAddAfterFinish()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(
createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
buffer.setNoMorePages();
addPage(buffer, createPage(0));
addPage(buffer, createPage(1));
assertEquals(buffer.getInfo().getTotalPagesSent(), 0);
}
@Test
public void testAddQueueAfterNoMoreQueues()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(10));
assertFalse(buffer.isFinished());
// tell buffer no more queues will be added
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY).withNoMoreBufferIds());
assertFalse(buffer.isFinished());
// set no more queues a second time to assure that we don't get an exception or such
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY).withNoMoreBufferIds());
assertFalse(buffer.isFinished());
// set no more queues a third time to assure that we don't get an exception or such
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY).withNoMoreBufferIds());
assertFalse(buffer.isFinished());
try {
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds();
buffer.setOutputBuffers(outputBuffers);
fail("Expected IllegalStateException from addQueue after noMoreQueues has been called");
}
catch (IllegalArgumentException ignored) {
}
}
@Test
public void testAddAfterDestroy()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(
createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
buffer.destroy();
addPage(buffer, createPage(0));
addPage(buffer, createPage(1));
assertEquals(buffer.getInfo().getTotalPagesSent(), 0);
}
@Test
public void testGetBeforeCreate()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(10));
assertFalse(buffer.isFinished());
// get a page from a buffer that doesn't exist yet
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0L, sizeOfPages(1));
assertFalse(future.isDone());
// add a page and verify the future is complete
addPage(buffer, createPage(33));
assertTrue(future.isDone());
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(33)));
}
@Test(expectedExceptions = IllegalStateException.class, expectedExceptionsMessageRegExp = "No more buffers already set")
public void testUseUndeclaredBufferAfterFinalBuffersSet()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(
createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
assertFalse(buffer.isFinished());
// get a page from a buffer that was not declared, which will fail
buffer.get(SECOND, 0L, sizeOfPages(1));
}
@Test
public void testAbortBeforeCreate()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(10));
assertFalse(buffer.isFinished());
// get a page from a buffer that doesn't exist yet
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0L, sizeOfPages(1));
assertFalse(future.isDone());
// abort that buffer, and verify the future is finishd
buffer.abort(FIRST);
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, false));
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
// add a page and verify the future is not complete
addPage(buffer, createPage(33));
// add the buffer and verify we did not get the page
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY).withBuffer(FIRST, 0));
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
}
@Test
public void testFullBufferBlocksWriter()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(
createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(2));
// Add two pages, buffer is full
addPage(buffer, createPage(1));
addPage(buffer, createPage(2));
// third page is blocked
enqueuePage(buffer, createPage(3));
}
@Test
public void testAbort()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(10));
// fill the buffer
for (int i = 0; i < 10; i++) {
addPage(buffer, createPage(i));
}
buffer.setNoMorePages();
// add one output buffer
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(ARBITRARY).withBuffer(FIRST, 0);
buffer.setOutputBuffers(outputBuffers);
// read a page from the first buffer
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), NO_WAIT), bufferResult(0, createPage(0)));
// abort buffer, and verify page cannot be acknowledged
buffer.abort(FIRST);
assertQueueClosed(buffer, 9, FIRST, 0);
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 1, sizeOfPages(1), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
outputBuffers = outputBuffers.withBuffer(SECOND, 0).withNoMoreBufferIds();
buffer.setOutputBuffers(outputBuffers);
// first page is lost because the first buffer was aborted
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 0, sizeOfPages(1), NO_WAIT), bufferResult(0, createPage(1)));
buffer.abort(SECOND);
assertQueueClosed(buffer, 0, SECOND, 0);
assertFinished(buffer);
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 1, sizeOfPages(1), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
}
@Test
public void testFinishClosesEmptyQueues()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(
createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
// finish while queues are empty
buffer.setNoMorePages();
assertQueueState(buffer, 0, FIRST, 0, 0);
assertQueueState(buffer, 0, SECOND, 0, 0);
buffer.abort(FIRST);
buffer.abort(SECOND);
assertQueueClosed(buffer, 0, FIRST, 0);
assertQueueClosed(buffer, 0, SECOND, 0);
}
@Test
public void testAbortFreesReader()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(10));
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY).withBuffer(FIRST, 0));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one item
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// abort the buffer
buffer.abort(FIRST);
assertQueueClosed(buffer, 0, FIRST, 1);
// verify the future completed
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, false));
}
@Test
public void testFinishFreesReader()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(10));
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY).withBuffer(FIRST, 0));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one item
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// finish the buffer
assertQueueState(buffer, 0, FIRST, 0, 1);
buffer.abort(FIRST);
assertQueueClosed(buffer, 0, FIRST, 1);
// verify the future completed
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, false));
}
@Test
public void testFinishFreesWriter()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(5));
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, 0)
.withNoMoreBufferIds());
assertFalse(buffer.isFinished());
// fill the buffer
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// enqueue the addition two pages more pages
ListenableFuture<?> firstEnqueuePage = enqueuePage(buffer, createPage(5));
ListenableFuture<?> secondEnqueuePage = enqueuePage(buffer, createPage(6));
// get and acknowledge one page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), MAX_WAIT), bufferResult(0, createPage(0)));
buffer.get(FIRST, 1, sizeOfPages(100)).cancel(true);
// verify we are still blocked because the buffer is full
assertFalse(firstEnqueuePage.isDone());
assertFalse(secondEnqueuePage.isDone());
// finish the query
buffer.setNoMorePages();
assertFalse(buffer.isFinished());
// verify futures are complete
assertFutureIsDone(firstEnqueuePage);
assertFutureIsDone(secondEnqueuePage);
// get and acknowledge the last 5 pages
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 1, sizeOfPages(100), NO_WAIT),
bufferResult(1, createPage(1), createPage(2), createPage(3), createPage(4), createPage(5), createPage(6)));
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 7, sizeOfPages(100), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 7, true));
// verify not finished
assertFalse(buffer.isFinished());
// finish the queue
buffer.abort(FIRST);
// verify finished
assertFinished(buffer);
}
@Test
public void testDestroyFreesReader()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(5));
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, 0)
.withNoMoreBufferIds());
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one page
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// destroy the buffer
buffer.destroy();
assertQueueClosed(buffer, 0, FIRST, 1);
// verify the future completed
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, false));
}
@Test
public void testDestroyFreesWriter()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(5));
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, 0)
.withNoMoreBufferIds());
assertFalse(buffer.isFinished());
// fill the buffer
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// add two pages to the buffer queue
ListenableFuture<?> firstEnqueuePage = enqueuePage(buffer, createPage(5));
ListenableFuture<?> secondEnqueuePage = enqueuePage(buffer, createPage(6));
// get and acknowledge one page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), MAX_WAIT), bufferResult(0, createPage(0)));
buffer.get(FIRST, 1, sizeOfPages(1)).cancel(true);
// verify we are still blocked because the buffer is full
assertFalse(firstEnqueuePage.isDone());
assertFalse(secondEnqueuePage.isDone());
// destroy the buffer (i.e., cancel the query)
buffer.destroy();
assertFinished(buffer);
// verify the futures are completed
assertFutureIsDone(firstEnqueuePage);
assertFutureIsDone(secondEnqueuePage);
}
@Test
public void testFailDoesNotFreeReader()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(
createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one page
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// fail the buffer
buffer.fail();
// future should have not finished
assertFalse(future.isDone());
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
}
@Test
public void testFailFreesWriter()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(
createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// fill the buffer
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// add two pages to the buffer queue
ListenableFuture<?> firstEnqueuePage = enqueuePage(buffer, createPage(5));
ListenableFuture<?> secondEnqueuePage = enqueuePage(buffer, createPage(6));
// get and acknowledge one page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), MAX_WAIT), bufferResult(0, createPage(0)));
buffer.get(FIRST, 1, sizeOfPages(1)).cancel(true);
// verify we are still blocked because the buffer is full
assertFalse(firstEnqueuePage.isDone());
assertFalse(secondEnqueuePage.isDone());
// fail the buffer (i.e., cancel the query)
buffer.fail();
assertFalse(buffer.isFinished());
// verify the futures are completed
assertFutureIsDone(firstEnqueuePage);
assertFutureIsDone(secondEnqueuePage);
}
@Test
public void testAddBufferAfterFail()
{
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, BROADCAST_PARTITION_ID);
ArbitraryOutputBuffer buffer = createArbitraryBuffer(outputBuffers, sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one page
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// fail the buffer
buffer.fail();
// add a buffer
outputBuffers = outputBuffers.withBuffer(SECOND, BROADCAST_PARTITION_ID);
buffer.setOutputBuffers(outputBuffers);
// attempt to get page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
future = buffer.get(SECOND, 0, sizeOfPages(10));
assertFalse(future.isDone());
// set no more buffers
outputBuffers = outputBuffers.withNoMoreBufferIds();
buffer.setOutputBuffers(outputBuffers);
// attempt to get page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
future = buffer.get(SECOND, 0, sizeOfPages(10));
assertFalse(future.isDone());
}
@Test
public void testBufferCompletion()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(5));
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY)
.withBuffer(FIRST, 0)
.withNoMoreBufferIds());
assertFalse(buffer.isFinished());
// fill the buffer
List<Page> pages = new ArrayList<>();
for (int i = 0; i < 5; i++) {
Page page = createPage(i);
addPage(buffer, page);
pages.add(page);
}
buffer.setNoMorePages();
// get and acknowledge 5 pages
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(5), MAX_WAIT), bufferResult(0, pages));
// buffer is not finished
assertFalse(buffer.isFinished());
// there are no more pages and no more buffers, but buffer is not finished because it didn't receive an acknowledgement yet
assertFalse(buffer.isFinished());
// ask the buffer to finish
buffer.abort(FIRST);
// verify that the buffer is finished
assertTrue(buffer.isFinished());
}
@Test
public void testNoMorePagesFreesReader()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(10));
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY).withBuffer(FIRST, 0));
assertFalse(buffer.isFinished());
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
assertFalse(future.isDone());
buffer.setNoMorePages();
assertTrue(future.isDone());
assertTrue(buffer.get(FIRST, 0, sizeOfPages(10)).isDone());
}
@Test
public void testFinishBeforeNoMoreBuffers()
{
ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(10));
// fill the buffer
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(i));
}
buffer.setNoMorePages();
assertFalse(buffer.isFinished());
// add one output buffer
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(ARBITRARY).withBuffer(FIRST, 0);
buffer.setOutputBuffers(outputBuffers);
assertFalse(buffer.isFinished());
// read a page from the first buffer
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), NO_WAIT), bufferResult(0, createPage(0)));
assertFalse(buffer.isFinished());
// read remaining pages from the first buffer and acknowledge
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 1, sizeOfPages(10), NO_WAIT), bufferResult(1, createPage(1), createPage(2)));
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 3, sizeOfPages(1), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 3, true));
assertFalse(buffer.isFinished());
// finish first queue
buffer.abort(FIRST);
assertQueueClosed(buffer, 0, FIRST, 3);
assertFinished(buffer);
// add another buffer after finish
outputBuffers = outputBuffers.withBuffer(SECOND, 0);
buffer.setOutputBuffers(outputBuffers);
// verify second buffer has no results
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 0, sizeOfPages(1), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
}
private static BufferResult getBufferResult(OutputBuffer buffer, OutputBufferId bufferId, long sequenceId, DataSize maxSize, Duration maxWait)
{
ListenableFuture<BufferResult> future = buffer.get(bufferId, sequenceId, maxSize);
return getFuture(future, maxWait);
}
private static ListenableFuture<?> enqueuePage(OutputBuffer buffer, Page page)
{
ListenableFuture<?> future = buffer.enqueue(ImmutableList.of(PAGES_SERDE.serialize(page)));
assertFalse(future.isDone());
return future;
}
private static void addPage(OutputBuffer buffer, Page page)
{
assertTrue(buffer.enqueue(ImmutableList.of(PAGES_SERDE.serialize(page))).isDone(), "Expected add page to not block");
}
private static void assertQueueState(
OutputBuffer buffer,
int unassignedPages,
OutputBufferId bufferId,
int bufferedPages,
int pagesSent)
{
OutputBufferInfo outputBufferInfo = buffer.getInfo();
long assignedPages = outputBufferInfo.getBuffers().stream().mapToInt(BufferInfo::getBufferedPages).sum();
assertEquals(
outputBufferInfo.getTotalBufferedPages() - assignedPages,
unassignedPages,
"unassignedPages");
BufferInfo bufferInfo = outputBufferInfo.getBuffers().stream()
.filter(info -> info.getBufferId().equals(bufferId))
.findAny()
.orElse(null);
assertEquals(
bufferInfo,
new BufferInfo(
bufferId,
false,
bufferedPages,
pagesSent,
new PageBufferInfo(
bufferId.getId(),
bufferedPages,
sizeOfPages(bufferedPages).toBytes(),
bufferedPages + pagesSent, // every page has one row
bufferedPages + pagesSent)));
}
@SuppressWarnings("ConstantConditions")
private static void assertQueueClosed(OutputBuffer buffer, int unassignedPages, OutputBufferId bufferId, int pagesSent)
{
OutputBufferInfo outputBufferInfo = buffer.getInfo();
long assignedPages = outputBufferInfo.getBuffers().stream().mapToInt(BufferInfo::getBufferedPages).sum();
assertEquals(
outputBufferInfo.getTotalBufferedPages() - assignedPages,
unassignedPages,
"unassignedPages");
BufferInfo bufferInfo = outputBufferInfo.getBuffers().stream()
.filter(info -> info.getBufferId().equals(bufferId))
.findAny()
.orElse(null);
assertEquals(bufferInfo.getBufferedPages(), 0);
assertEquals(bufferInfo.getPagesSent(), pagesSent);
assertTrue(bufferInfo.isFinished());
}
private ArbitraryOutputBuffer createArbitraryBuffer(OutputBuffers buffers, DataSize dataSize)
{
ArbitraryOutputBuffer buffer = new ArbitraryOutputBuffer(
TASK_INSTANCE_ID,
new StateMachine<>("bufferState", stateNotificationExecutor, OPEN, TERMINAL_BUFFER_STATES),
dataSize,
() -> new SimpleLocalMemoryContext(newSimpleAggregatedMemoryContext()),
stateNotificationExecutor);
buffer.setOutputBuffers(buffers);
return buffer;
}
private static void assertFinished(OutputBuffer buffer)
{
assertTrue(buffer.isFinished());
for (BufferInfo bufferInfo : buffer.getInfo().getBuffers()) {
assertTrue(bufferInfo.isFinished());
assertEquals(bufferInfo.getBufferedPages(), 0);
}
}
private static void assertFutureIsDone(Future<?> future)
{
tryGetFutureValue(future, 5, SECONDS);
assertTrue(future.isDone());
}
private static BufferResult bufferResult(long token, Page firstPage, Page... otherPages)
{
List<Page> pages = ImmutableList.<Page>builder().add(firstPage).add(otherPages).build();
return bufferResult(token, pages);
}
private static BufferResult bufferResult(long token, List<Page> pages)
{
checkArgument(!pages.isEmpty(), "pages is empty");
return new BufferResult(
TASK_INSTANCE_ID,
token,
token + pages.size(),
false,
pages.stream()
.map(PAGES_SERDE::serialize)
.collect(Collectors.toList()));
}
private static Page createPage(int id)
{
return new Page(BlockAssertions.createLongsBlock(id));
}
private static DataSize sizeOfPages(int count)
{
return new DataSize(BUFFERED_PAGE_SIZE.toBytes() * count, BYTE);
}
}
| |
package nflpicks.model;
/**
*
* Represents a pick a player made for a game. Pretty much the whole point
* of all this crap. If the team is null, that means the player hasn't made a
* pick for the game yet.
*
* @author albundy
*
*/
public class Pick {
/**
*
* The id of the pick.
*
*/
protected int id;
/**
*
* The game the pick is for.
*
*/
protected Game game;
/**
*
* The player who's making the pick.
*
*/
protected Player player;
/**
*
* The team they picked.
*
*/
protected Team team;
/**
*
* The result of the pick ... this could be derived by comparing the
* team to the winning team of the game. Here for convenience, like everything
* else in programming.
*
* I chose not to make it derived because I figured it's usually best if the model
* objects are "dumb" so that they don't carry logic around when we might want that
* logic to be different in different situations.
*
*/
protected String result;
public Pick(){
}
/**
*
* Lets you make a Pick object without calling all the setters.
*
* @param id
* @param game
* @param player
* @param pickedTeam
* @param result
*/
public Pick(int id, Game game, Player player, Team pickedTeam, String result){
this.id = id;
this.game = game;
this.player = player;
this.team = pickedTeam;
this.result = result;
}
public int getId(){
return id;
}
public void setId(int id){
this.id = id;
}
public Game getGame() {
return game;
}
public void setGame(Game game) {
this.game = game;
}
public Player getPlayer() {
return player;
}
public void setPlayer(Player player) {
this.player = player;
}
public Team getTeam() {
return team;
}
public void setTeam(Team team) {
this.team = team;
}
public String getResult(){
return result;
}
public void setResult(String result){
this.result = result;
}
/**
*
* The hash code should turn this object into a relatively unique number
* so that it can be identified by that number easily and so (hopefully)
* there aren't that many "collisions" with other objects.
*
* It starts at a prime number repeatedly multiplies and adds the hash codes
* of the variables of the objects in this class. I don't have that great of a handle
* on why it's done this way (check the internet if you care) but I know
* what it's trying to do.
*
*/
@Override
public int hashCode(){
int primeNumber = 31;
int result = 1;
result = primeNumber * result + Integer.valueOf(id).hashCode();
result = primeNumber * result + (game == null ? 0 : game.hashCode());
result = primeNumber * result + (player == null ? 0 : player.hashCode());
result = primeNumber * result + (team == null ? 0 : team.hashCode());
result = primeNumber * result + (this.result == null ? 0 : this.result.hashCode());
return result;
}
/**
*
* Returns true if the given object has all the same values for all
* the variables in this object.
*
*/
@Override
public boolean equals(Object object){
//Steps to do:
// 1. If the given object is this object, it's equal.
// 2. If it's null or isn't an instance of this class, it's not equal.
// 3. Otherwise, just go down through each variable and return
// false if it's not equal.
// 4. If we get to the end, then all the variables "weren't not equal"
// so that means the object is equal to this one.
if (object == this){
return true;
}
if (object == null || !(object instanceof Pick)){
return false;
}
Pick otherPick = (Pick)object;
int otherId = otherPick.getId();
if (id != otherId){
return false;
}
Game otherGame = otherPick.getGame();
if (game != null){
if (!game.equals(otherGame)){
return false;
}
}
else {
if (otherGame != null){
return false;
}
}
Player otherPlayer = otherPick.getPlayer();
if (player != null){
if (!player.equals(otherPlayer)){
return false;
}
}
else {
if (otherPlayer != null){
return false;
}
}
Team otherTeam = otherPick.getTeam();
if (team != null){
if (!team.equals(otherTeam)){
return false;
}
}
else {
if (otherTeam != null){
return false;
}
}
String otherResult = otherPick.getResult();
if (result != null){
if (!result.equals(otherResult)){
return false;
}
}
else {
if (otherResult != null){
return false;
}
}
return true;
}
/**
*
* Blah blah blah...
*
*/
@Override
public String toString(){
String thisObjectAsAString = "id = " + id +
", game = " + game +
", player = " + player +
", team = " + team +
", result = " + result;
return thisObjectAsAString;
}
}
| |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.world;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.Lists;
import com.google.common.collect.MapMaker;
import com.google.common.collect.Multiset;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.server.MinecraftServer;
import net.minecraft.world.World;
import net.minecraft.world.WorldManager;
import net.minecraft.world.WorldProvider;
import net.minecraft.world.WorldProviderEnd;
import net.minecraft.world.WorldProviderHell;
import net.minecraft.world.WorldProviderSurface;
import net.minecraft.world.WorldServer;
import net.minecraft.world.WorldServerMulti;
import net.minecraft.world.storage.ISaveHandler;
import net.minecraft.world.storage.SaveHandler;
import org.apache.logging.log4j.Level;
import org.spongepowered.api.world.Dimension;
import org.spongepowered.api.world.DimensionTypes;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.SpongeImplFactory;
import org.spongepowered.common.interfaces.IMixinEntityPlayerMP;
import org.spongepowered.common.interfaces.IMixinMinecraftServer;
import org.spongepowered.common.interfaces.world.IMixinWorldProvider;
import org.spongepowered.common.registry.type.world.DimensionRegistryModule;
import java.io.File;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Hashtable;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
public class DimensionManager {
public static final Hashtable<Integer, Class<? extends WorldProvider>> providers = new Hashtable<>();
public static final Hashtable<Integer, Boolean> spawnSettings = new Hashtable<>();
public static final Hashtable<Integer, Integer> dimensions = new Hashtable<>();
public static final Hashtable<Integer, WorldServer> worlds = new Hashtable<>();
public static final ConcurrentMap<World, World> weakWorldMap = new MapMaker().weakKeys().weakValues().makeMap();
public static final ArrayList<Integer> unloadQueue = Lists.newArrayList();
public static final BitSet dimensionMap = new BitSet(Long.SIZE << 4);
public static final Multiset<Integer> leakedWorlds = HashMultiset.create();
public static boolean hasInit = false;
static {
init();
}
public static void init() {
if (hasInit) {
return;
}
hasInit = true;
registerProviderType(0, WorldProviderSurface.class, true);
registerProviderType(-1, WorldProviderHell.class, true);
registerProviderType(1, WorldProviderEnd.class, true);
registerDimension(0, 0);
registerDimension(-1, -1);
registerDimension(1, 1);
}
public static boolean registerProviderType(int id, Class<? extends WorldProvider> provider, boolean keepLoaded) {
if (providers.containsKey(id)) {
return false;
}
// register dimension type
String worldType;
switch (id) {
case -1:
worldType = "nether";
break;
case 0:
worldType = "overworld";
break;
case 1:
worldType = "the_end";
break;
default: // modded
worldType = provider.getSimpleName().toLowerCase();
worldType = worldType.replace("worldprovider", "");
worldType = worldType.replace("provider", "");
}
// Grab provider name if available
try {
WorldProvider worldProvider = provider.newInstance();
worldType = worldProvider.getDimensionName().toLowerCase().replace(" ", "_").replace("[^A-Za-z0-9_]", "");
} catch (Exception e) {
// ignore
}
DimensionRegistryModule.getInstance().registerAdditionalCatalog(new SpongeDimensionType(worldType, keepLoaded, provider, id));
providers.put(id, provider);
spawnSettings.put(id, keepLoaded);
return true;
}
public static int getProviderType(int dim) {
if (!dimensions.containsKey(dim)) {
throw new IllegalArgumentException(String.format("Could not get provider type for dimension %d, does not exist", dim));
}
return dimensions.get(dim);
}
public static WorldProvider createProviderFor(int dim) {
try {
if (dimensions.containsKey(dim)) {
WorldProvider provider = providers.get(getProviderType(dim)).newInstance();
((IMixinWorldProvider) provider).setDimension(dim);
return provider;
} else {
throw new RuntimeException(String.format("No WorldProvider bound for dimension %d", dim));
}
} catch (Exception e) {
SpongeImpl.getLogger().log(Level.ERROR, String.format("An error occurred trying to create an instance of WorldProvider %d (%s)",
dim, providers.get(getProviderType(dim)).getSimpleName()), e);
throw new RuntimeException(e);
}
}
public static boolean shouldLoadSpawn(int dim) {
int id = getProviderType(dim);
return spawnSettings.containsKey(id) && spawnSettings.get(id);
}
public static void loadDimensionDataMap(NBTTagCompound compound) {
dimensionMap.clear();
if (compound == null) {
dimensions.keySet().stream().filter(id -> id >= 0).forEach(dimensionMap::set);
} else {
int[] intArray = compound.getIntArray("DimensionArray");
for (int i = 0; i < intArray.length; i++) {
for (int j = 0; j < Integer.SIZE; j++) {
dimensionMap.set(i * Integer.SIZE + j, (intArray[i] & (1 << j)) != 0);
}
}
}
}
public static NBTTagCompound saveDimensionDataMap() {
int[] data = new int[(dimensionMap.length() + Integer.SIZE - 1) / Integer.SIZE];
NBTTagCompound dimMap = new NBTTagCompound();
for (int i = 0; i < data.length; i++) {
int val = 0;
for (int j = 0; j < Integer.SIZE; j++) {
val |= dimensionMap.get(i * Integer.SIZE + j) ? (1 << j) : 0;
}
data[i] = val;
}
dimMap.setIntArray("DimensionArray", data);
return dimMap;
}
public static Integer[] getIDs() {
return worlds.keySet().toArray(new Integer[worlds.size()]); //Only loaded dims, since usually used to cycle through loaded worlds
}
public static Integer[] getStaticDimensionIDs() {
return dimensions.keySet().toArray(new Integer[dimensions.keySet().size()]);
}
public static WorldServer getWorldFromDimId(int id) {
return worlds.get(id);
}
public static boolean unloadWorldFromDimId(int id) {
WorldServer world = worlds.get(id);
if (world == null) {
return true;
}
if (!world.playerEntities.isEmpty()) {
return false;
}
if (((org.spongepowered.api.world.World) world).doesKeepSpawnLoaded()) {
return false;
}
unloadQueue.add(id);
return true;
}
public static void setWorld(int id, WorldServer world) {
if (world != null) {
worlds.put(id, world);
weakWorldMap.put(world, world);
((IMixinMinecraftServer) MinecraftServer.getServer()).getWorldTickTimes().put(id, new long[100]);
SpongeImpl.getLogger().info("Loading dimension {} ({}) ({})", id, world.getWorldInfo().getWorldName(), world.getMinecraftServer());
} else {
worlds.remove(id);
((IMixinMinecraftServer) MinecraftServer.getServer()).getWorldTickTimes().remove(id);
SpongeImpl.getLogger().info("Unloading dimension {}", id);
}
ArrayList<WorldServer> tmp = new ArrayList<>();
if (worlds.get(0) != null) {
tmp.add(worlds.get(0));
}
if (worlds.get(-1) != null) {
tmp.add(worlds.get(-1));
}
if (worlds.get(1) != null) {
tmp.add(worlds.get(1));
}
for (Map.Entry<Integer, WorldServer> entry : worlds.entrySet()) {
int dim = entry.getKey();
if (dim >= -1 && dim <= 1) {
continue;
}
tmp.add(entry.getValue());
}
MinecraftServer.getServer().worldServers = tmp.toArray(new WorldServer[tmp.size()]);
}
public static WorldServer[] getWorlds() {
return worlds.values().toArray(new WorldServer[worlds.size()]);
}
public static boolean isDimensionRegistered(int dim) {
return dimensions.containsKey(dim);
}
public static void registerDimension(int id, int providerType) {
if (!providers.containsKey(providerType)) {
throw new IllegalArgumentException(
String.format("Failed to register dimension for id %d, provider type %d does not exist", id, providerType));
}
if (dimensions.containsKey(id)) {
throw new IllegalArgumentException(String.format("Failed to register dimension for id %d, One is already registered", id));
}
dimensions.put(id, providerType);
if (id >= 0) {
dimensionMap.set(id);
}
}
public static int getNextFreeDimId() {
int next = 0;
while (true) {
next = dimensionMap.nextClearBit(next);
if (dimensions.containsKey(next)) {
dimensionMap.set(next);
} else {
return next;
}
}
}
public static File getCurrentSaveRootDirectory() {
if (DimensionManager.getWorldFromDimId(0) != null) {
return DimensionManager.getWorldFromDimId(0).getSaveHandler().getWorldDirectory();
} else if (MinecraftServer.getServer() != null) {
MinecraftServer srv = MinecraftServer.getServer();
SaveHandler saveHandler = (SaveHandler) srv.getActiveAnvilConverter().getSaveLoader(srv.getFolderName(), false);
return saveHandler.getWorldDirectory();
} else {
return null;
}
}
public static void initDimension(int dim) {
WorldServer overworld = getWorldFromDimId(0);
if (overworld == null) {
throw new RuntimeException("Cannot Hotload Dim: Overworld is not Loaded!");
}
try {
DimensionManager.getProviderType(dim);
} catch (Exception e) {
SpongeImpl.getLogger().log(Level.ERROR, "Cannot Hotload Dim: " + e.getMessage());
return; // If a provider hasn't been registered then we can't hotload the dim
}
MinecraftServer mcServer = overworld.getMinecraftServer();
ISaveHandler savehandler = overworld.getSaveHandler();
WorldServer world =
(dim == 0 ? overworld : (WorldServer) (new WorldServerMulti(mcServer, savehandler, dim, overworld, mcServer.theProfiler).init()));
world.addWorldAccess(new WorldManager(mcServer, world));
SpongeImpl.postEvent(SpongeImplFactory.createLoadWorldEvent((org.spongepowered.api.world.World) world));
if (!mcServer.isSinglePlayer()) {
world.getWorldInfo().setGameType(mcServer.getGameType());
}
mcServer.setDifficultyForAllWorlds(mcServer.getDifficulty());
}
public static int getClientDimensionToSend(int dim, WorldServer worldserver, EntityPlayerMP playerIn) {
if (!((IMixinEntityPlayerMP) playerIn).usesCustomClient()) {
if (((Dimension) worldserver.provider).getType().equals(DimensionTypes.NETHER)) {
dim = -1;
} else if (((Dimension) worldserver.provider).getType().equals(DimensionTypes.THE_END)) {
dim = 1;
} else {
dim = 0;
}
}
return dim;
}
public static void sendDimensionRegistration(WorldServer worldserver, EntityPlayerMP playerIn, int dim) {
// // register dimension on client-side
// FMLEmbeddedChannel serverChannel = NetworkRegistry.INSTANCE.getChannel("FORGE", Side.SERVER);
// serverChannel.attr(FMLOutboundHandler.FML_MESSAGETARGET).set(FMLOutboundHandler.OutboundTarget.PLAYER);
// serverChannel.attr(FMLOutboundHandler.FML_MESSAGETARGETARGS).set(playerIn);
// serverChannel.writeOutbound(new ForgeMessage.DimensionRegisterMessage(dimension,
// ((SpongeDimensionType) ((Dimension) worldserver.provider).getType()).getDimensionTypeId()));
}
}
| |
package vgrechka;
import net.ttddyy.dsproxy.ExecutionInfo;
import net.ttddyy.dsproxy.QueryInfo;
import net.ttddyy.dsproxy.StatementType;
import net.ttddyy.dsproxy.listener.logging.AbstractQueryLogEntryCreator;
import net.ttddyy.dsproxy.proxy.ParameterSetOperation;
import org.fusesource.jansi.Ansi;
import vgrechka.db.BaseSQLiteAppConfig;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import static vgrechka.Jvm_back_platformKt.getBackPlatform;
/**
* @author Tadaya Tsuyukubo
* @since 1.4
*/
public class FreakingQueryLogEntryCreator extends AbstractQueryLogEntryCreator {
private static final String LINE_SEPARATOR = System.getProperty("line.separator");
private boolean multiline = false;
@Override
public String getLogEntry(ExecutionInfo execInfo, List<QueryInfo> queryInfoList, boolean writeDataSourceName) {
final StringBuilder sb = new StringBuilder();
// if (this.multiline) {
// sb.append(LINE_SEPARATOR);
// }
// if (writeDataSourceName) {
// writeDataSourceNameEntry(sb, execInfo, queryInfoList);
// }
// // Time
// writeTimeEntry(sb, execInfo, queryInfoList);
//
// // Success
// writeResultEntry(sb, execInfo, queryInfoList);
// if (this.multiline) {
// sb.delete(sb.length() - 2, sb.length()); // delete last ", "
// sb.append(LINE_SEPARATOR);
// }
// // Type
// writeTypeEntry(sb, execInfo, queryInfoList);
//
// // Batch
// writeBatchEntry(sb, execInfo, queryInfoList);
//
// // QuerySize
// writeQuerySizeEntry(sb, execInfo, queryInfoList);
//
// // BatchSize
// writeBatchSizeEntry(sb, execInfo, queryInfoList);
// if (this.multiline) {
// sb.delete(sb.length() - 2, sb.length()); // delete last ", "
// sb.append(LINE_SEPARATOR);
// }
BaseSQLiteAppConfig.DataProxySourceSettings config = getBackPlatform().getSpringctx().getBean(BaseSQLiteAppConfig.DataProxySourceSettings.class);
if (config.getColorful())
sb.append(Ansi.ansi().fgCyan());
sb.append("[SQL] ");
for (QueryInfo queryInfo : queryInfoList) {
sb.append(formatQuery(queryInfo.getQuery()));
}
sb.append(" ");
boolean isPrepared = execInfo.getStatementType() == StatementType.PREPARED;
for (QueryInfo queryInfo : queryInfoList) {
for (List<ParameterSetOperation> parameters : queryInfo.getParametersList()) {
SortedMap<String, String> paramMap = getParametersToDisplay(parameters);
if (isPrepared) {
writeParamsForSinglePreparedEntry(sb, paramMap, execInfo, queryInfoList);
} else {
writeParamsForSingleCallableEntry(sb, paramMap, execInfo, queryInfoList);
}
}
}
chompIfEndWith(sb, ',');
if (config.getColorful())
sb.append(Ansi.ansi().reset());
// if (this.multiline) {
// sb.delete(sb.length() - 2, sb.length()); // delete last ", "
// sb.append(LINE_SEPARATOR);
// }
// Params
// writeParamsEntry(sb, execInfo, queryInfoList);
return sb.toString();
}
/**
* Write datasource name when enabled.
*
* <p>default: Name: myDS,
*
* @param sb StringBuilder to write
* @param execInfo execution info
* @param queryInfoList query info list
* @since 1.3.3
*/
protected void writeDataSourceNameEntry(StringBuilder sb, ExecutionInfo execInfo, List<QueryInfo> queryInfoList) {
String name = execInfo.getDataSourceName();
sb.append("Name:");
sb.append(name == null ? "" : name);
sb.append(", ");
}
/**
* Write elapsed time.
*
* <p>default: Time: 123,
*
* @param sb StringBuilder to write
* @param execInfo execution info
* @param queryInfoList query info list
* @since 1.3.3
*/
protected void writeTimeEntry(StringBuilder sb, ExecutionInfo execInfo, List<QueryInfo> queryInfoList) {
sb.append("Time:");
sb.append(execInfo.getElapsedTime());
sb.append(", ");
}
/**
* Write query result whether successful or not.
*
* <p>default: Success: True,
*
* @param sb StringBuilder to write
* @param execInfo execution info
* @param queryInfoList query info list
* @since 1.3.3
*/
protected void writeResultEntry(StringBuilder sb, ExecutionInfo execInfo, List<QueryInfo> queryInfoList) {
sb.append("Success:");
sb.append(execInfo.isSuccess() ? "True" : "False");
sb.append(", ");
}
/**
* Write statement type.
*
* <p>default: Type: Prepared,
*
* @param sb StringBuilder to write
* @param execInfo execution info
* @param queryInfoList query info list
* @since 1.3.3
*/
protected void writeTypeEntry(StringBuilder sb, ExecutionInfo execInfo, List<QueryInfo> queryInfoList) {
sb.append("Type:");
sb.append(getStatementType(execInfo.getStatementType()));
sb.append(", ");
}
/**
* Write whether batch execution or not.
*
* <p>default: Batch: True,
*
* @param sb StringBuilder to write
* @param execInfo execution info
* @param queryInfoList query info list
* @since 1.3.3
*/
protected void writeBatchEntry(StringBuilder sb, ExecutionInfo execInfo, List<QueryInfo> queryInfoList) {
sb.append("Batch:");
sb.append(execInfo.isBatch() ? "True" : "False");
sb.append(", ");
}
/**
* Write query size.
*
* <p>default: QuerySize: 1,
*
* @param sb StringBuilder to write
* @param execInfo execution info
* @param queryInfoList query info list
* @since 1.3.3
*/
protected void writeQuerySizeEntry(StringBuilder sb, ExecutionInfo execInfo, List<QueryInfo> queryInfoList) {
sb.append("QuerySize:");
sb.append(queryInfoList.size());
sb.append(", ");
}
/**
* Write batch size.
*
* <p>default: BatchSize: 1,
*
* @param sb StringBuilder to write
* @param execInfo execution info
* @param queryInfoList query info list
* @since 1.3.3
*/
protected void writeBatchSizeEntry(StringBuilder sb, ExecutionInfo execInfo, List<QueryInfo> queryInfoList) {
sb.append("BatchSize:");
sb.append(execInfo.getBatchSize());
sb.append(", ");
}
/**
* Write queries.
*
* <p>default: Query:["select 1", "select 2"],
*
* @param sb StringBuilder to write
* @param execInfo execution info
* @param queryInfoList query info list
* @since 1.3.3
*/
protected void writeQueriesEntry(StringBuilder sb, ExecutionInfo execInfo, List<QueryInfo> queryInfoList) {
// sb.append("Query:[");
chompIfEndWith(sb, ',');
// sb.append("], ");
}
/**
* Callback method to allow alternating given query for logging.
*
* Subclass can override this method to change the given query.
* For example, it can call BasicFormatterImpl in hibernate to format the query.
*
* @param query a query to format
* @return formatted query
* @since 1.4.1
*/
protected String formatQuery(String query) {
return query;
}
/**
* Write query parameters.
*
* <p>default for prepared: Params:[(foo,100),(bar,101)],
* <p>default for callable: Params:[(1=foo,key=100),(1=bar,key=101)],
*
* @param sb StringBuilder to write
* @param execInfo execution info
* @param queryInfoList query info list
* @since 1.3.3
*/
protected void writeParamsEntry(StringBuilder sb, ExecutionInfo execInfo, List<QueryInfo> queryInfoList) {
boolean isPrepared = execInfo.getStatementType() == StatementType.PREPARED;
sb.append("Params:[");
for (QueryInfo queryInfo : queryInfoList) {
for (List<ParameterSetOperation> parameters : queryInfo.getParametersList()) {
SortedMap<String, String> paramMap = getParametersToDisplay(parameters);
// parameters per batch.
// for prepared: (val1,val2,...)
// for callable: (key1=val1,key2=val2,...)
if (isPrepared) {
writeParamsForSinglePreparedEntry(sb, paramMap, execInfo, queryInfoList);
} else {
writeParamsForSingleCallableEntry(sb, paramMap, execInfo, queryInfoList);
}
}
}
chompIfEndWith(sb, ',');
sb.append("]");
}
/**
* Write query parameters for PreparedStatement.
*
* <p>default: Params:[(foo,100),(bar,101)],
*
* @param sb StringBuilder to write
* @param paramMap sorted parameters map
* @param execInfo execution info
* @param queryInfoList query info list
* @since 1.4
*/
protected void writeParamsForSinglePreparedEntry(StringBuilder sb, SortedMap<String, String> paramMap, ExecutionInfo execInfo, List<QueryInfo> queryInfoList) {
sb.append("(");
for (Map.Entry<String, String> paramEntry : paramMap.entrySet()) {
sb.append(paramEntry.getValue());
sb.append(",");
}
chompIfEndWith(sb, ',');
sb.append("),");
}
/**
* Write parameters for single execution.
*
* <p>default: (1=foo,bar=100),
*
* @param sb StringBuilder to write
* @param paramMap sorted parameters map
* @param execInfo execution info
* @param queryInfoList query info list
* @since 1.4
*/
protected void writeParamsForSingleCallableEntry(StringBuilder sb, SortedMap<String, String> paramMap, ExecutionInfo execInfo, List<QueryInfo> queryInfoList) {
sb.append("(");
for (Map.Entry<String, String> paramEntry : paramMap.entrySet()) {
sb.append(paramEntry.getKey());
sb.append("=");
sb.append(paramEntry.getValue());
sb.append(",");
}
chompIfEndWith(sb, ',');
sb.append("),");
}
/**
* Enable multiline output in {@link #getLogEntry(ExecutionInfo, List, boolean)}.
*
* @param multiline return multi lined log entry when true is set
* @since 1.4.1
*/
public void setMultiline(boolean multiline) {
this.multiline = multiline;
}
/**
* @return true if multiline output is enabled
* @since 1.4.1
*/
public boolean isMultiline() {
return multiline;
}
}
| |
/*
* Copyright 2005-2007 WSO2, Inc. (http://wso2.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.identity.core.dao;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.core.IdentityRegistryResources;
import org.wso2.carbon.identity.core.model.XMPPSettingsDO;
import org.wso2.carbon.registry.core.Collection;
import org.wso2.carbon.registry.core.Registry;
import org.wso2.carbon.registry.core.RegistryConstants;
import org.wso2.carbon.registry.core.Resource;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.jdbc.utils.Transaction;
public class XMPPSettingsDAO extends AbstractDAO<XMPPSettingsDO> {
protected Log log = LogFactory.getLog(XMPPSettingsDAO.class);
/**
* @param registry
*/
public XMPPSettingsDAO(Registry registry) {
this.registry = registry;
}
/**
* @param resource
* @return
*/
protected XMPPSettingsDO resourceToObject(Resource resource) {
XMPPSettingsDO xmppSettingsDO = null;
if (resource != null) {
xmppSettingsDO = new XMPPSettingsDO();
xmppSettingsDO.setXmppServer(resource
.getProperty(IdentityRegistryResources.XMPP_SERVER));
xmppSettingsDO.setXmppUserName(resource
.getProperty(IdentityRegistryResources.XMPP_USERNAME));
xmppSettingsDO.setUserCode(resource
.getProperty(IdentityRegistryResources.XMPP_USERCODE));
if (resource.getProperty(IdentityRegistryResources.XMPP_ENABLED).trim().equals("true")) {
xmppSettingsDO.setXmppEnabled(true);
} else {
xmppSettingsDO.setXmppEnabled(false);
}
if (resource.getProperty(IdentityRegistryResources.XMPP_PIN_ENABLED).trim().equals(
"true")) {
xmppSettingsDO.setPINEnabled(true);
} else {
xmppSettingsDO.setPINEnabled(false);
}
}
return xmppSettingsDO;
}
/**
* Adding XMPP Settings corresponding to a user
*
* @param userId
* @param xmppServer
* @param xmppUserName
* @param xmppUserCode
* @throws IdentityException
*/
public void addXmppSettings(String userId, String xmppServer, String xmppUserName,
String xmppUserCode, boolean enabled, boolean pinEnabled) throws IdentityException {
String path = null;
Resource resource = null;
Collection userResource = null;
String xmppEnabled = "false";
String isPINEnabled = "false";
if (enabled) {
xmppEnabled = "true";
}
if (pinEnabled) {
isPINEnabled = "true";
}
try {
if (userId != null) {
path = IdentityRegistryResources.XMPP_SETTINGS_ROOT + userId;
}
if (registry.resourceExists(path)) {
if (log.isInfoEnabled()) {
log.info("XMPP Settings already exists for user " + userId);
}
return;
}
resource = registry.newResource();
resource.addProperty(IdentityRegistryResources.XMPP_SERVER, xmppServer);
resource.addProperty(IdentityRegistryResources.XMPP_USERNAME, xmppUserName);
resource.addProperty(IdentityRegistryResources.XMPP_USERCODE, xmppUserCode);
resource.addProperty(IdentityRegistryResources.XMPP_ENABLED, xmppEnabled);
resource.addProperty(IdentityRegistryResources.XMPP_PIN_ENABLED, isPINEnabled);
boolean transactionStarted = Transaction.isStarted();
try {
if (!transactionStarted) {
registry.beginTransaction();
}
registry.put(path, resource);
if (!registry.resourceExists(RegistryConstants.PROFILES_PATH + userId)) {
userResource = registry.newCollection();
registry.put(RegistryConstants.PROFILES_PATH + userId, userResource);
} else {
//userResource = (Collection) registry.get(RegistryConstants.PROFILES_PATH + userId);
}
registry.addAssociation(RegistryConstants.PROFILES_PATH + userId, path,
IdentityRegistryResources.ASSOCIATION_USER_XMPP_SETTINGS);
if (!transactionStarted) {
registry.commitTransaction();
}
} catch (Exception e) {
if (!transactionStarted) {
registry.rollbackTransaction();
}
if (e instanceof RegistryException) {
throw (RegistryException) e;
} else {
throw new IdentityException("Error occured while adding XMPP Settings", e);
}
}
if (log.isInfoEnabled()) {
log.info("XMPP Settings for " + userId + " added successfully.");
}
} catch (RegistryException e) {
log.error("Error occured while adding XMPP Settings.", e);
throw new IdentityException("Error occured while adding XMPP Settings.", e);
}
}
/**
* Update XMPP Settings of a user
*
* @param userId
* @param xmppServer
* @param xmppUserName
* @param xmppUserCode
* @throws IdentityException
*/
public void updateXmppSettings(String userId, String xmppServer, String xmppUserName,
String xmppUserCode, boolean enabled, boolean pinEnabled) throws IdentityException {
String path = null;
Resource resource = null;
String xmppEnabled = "false";
String isPINEnabled = "false";
if (enabled) {
xmppEnabled = "true";
}
if (pinEnabled) {
isPINEnabled = "true";
}
try {
if (userId != null) {
path = IdentityRegistryResources.XMPP_SETTINGS_ROOT + userId;
}
if (!registry.resourceExists(path)) {
if (log.isInfoEnabled()) {
log.info("XMPP Settings does not exist for the user " + userId);
}
return;
}
resource = registry.get(path);
resource.setProperty(IdentityRegistryResources.XMPP_SERVER, xmppServer);
resource.setProperty(IdentityRegistryResources.XMPP_USERNAME, xmppUserName);
resource.setProperty(IdentityRegistryResources.XMPP_USERCODE, xmppUserCode);
resource.setProperty(IdentityRegistryResources.XMPP_ENABLED, xmppEnabled);
resource.setProperty(IdentityRegistryResources.XMPP_PIN_ENABLED, isPINEnabled);
registry.put(path, resource);
if (log.isInfoEnabled()) {
log.info("XMPP Settings are updated for the user " + userId);
}
} catch (RegistryException e) {
log.error("Error occured while updating the XMPP Settings.", e);
throw new IdentityException("Error occured while updating the XMPP Settings.", e);
}
}
/**
* retrieve XMPP Settings of a user by providing the userId
*
* @param userId
* @return
*/
public XMPPSettingsDO getXmppSettings(String userId) {
XMPPSettingsDO xmppSettings = null;
try {
if (registry.resourceExists(IdentityRegistryResources.XMPP_SETTINGS_ROOT + userId)) {
xmppSettings = resourceToObject(registry
.get(IdentityRegistryResources.XMPP_SETTINGS_ROOT + userId));
}
} catch (RegistryException e) {
log.error("Cannot retrieve the XMPP Settings for the user " + userId, e);
}
return xmppSettings;
}
/**
* Checks whether the given user has enabled XMPP based multifactor auth.
*
* @param userId
* @return
*/
public boolean isXmppSettingsEnabled(String userId) {
boolean isEnabled = false;
XMPPSettingsDO xmppSettings;
try {
if (registry.resourceExists(IdentityRegistryResources.XMPP_SETTINGS_ROOT + userId)) {
xmppSettings = resourceToObject(registry
.get(IdentityRegistryResources.XMPP_SETTINGS_ROOT + userId));
isEnabled = xmppSettings.isXmppEnabled();
}
} catch (RegistryException e) {
log.error("Error when checking the availability of the user " + userId, e);
}
return isEnabled;
}
public boolean hasXmppSettings(String userId) {
boolean hasSettings = false;
try {
hasSettings = registry.resourceExists(IdentityRegistryResources.XMPP_SETTINGS_ROOT
+ userId);
} catch (RegistryException e) {
log.error("Error when checking the availability of the user " + userId, e);
}
return hasSettings;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.axis2.jaxws.marshaller.impl.alt;
import org.apache.axis2.jaxws.ExceptionFactory;
import org.apache.axis2.jaxws.description.EndpointDescription;
import org.apache.axis2.jaxws.description.EndpointInterfaceDescription;
import org.apache.axis2.jaxws.description.OperationDescription;
import org.apache.axis2.jaxws.description.ParameterDescription;
import org.apache.axis2.jaxws.i18n.Messages;
import org.apache.axis2.jaxws.marshaller.MethodMarshaller;
import org.apache.axis2.jaxws.message.Message;
import org.apache.axis2.jaxws.message.Protocol;
import org.apache.axis2.jaxws.message.factory.MessageFactory;
import org.apache.axis2.jaxws.registry.FactoryRegistry;
import org.apache.axis2.jaxws.runtime.description.marshal.MarshalServiceRuntimeDescription;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import javax.jws.soap.SOAPBinding.Style;
import javax.xml.namespace.QName;
import javax.xml.ws.WebServiceException;
import java.util.List;
import java.util.TreeSet;
/**
* The Doc/Lit Wrapped Minimal Marshaller is used when 1) The web service is Doc/Lit Wrapped, and 2)
* The wrapper and fault bean objects are missing (hence the term 'Minimal')
*/
public class DocLitWrappedMinimalMethodMarshaller implements MethodMarshaller {
private static Log log = LogFactory.getLog(DocLitWrappedMinimalMethodMarshaller.class);
public DocLitWrappedMinimalMethodMarshaller() {
super();
}
public Message marshalRequest(Object[] signatureArguments, OperationDescription operationDesc)
throws WebServiceException {
EndpointInterfaceDescription ed = operationDesc.getEndpointInterfaceDescription();
EndpointDescription endpointDesc = ed.getEndpointDescription();
Protocol protocol = Protocol.getProtocolForBinding(endpointDesc.getClientBindingID());
// Note all exceptions are caught and rethrown with a WebServiceException
try {
// Sample Document message
// ..
// <soapenv:body>
// <m:operation>
// <param>hello</param>
// </m:operation>
// </soapenv:body>
//
// Important points.
// 1) There is no operation element under the body.
// 2) The data blocks are located underneath the body.
// 3) The name of the data block (m:operation) is defined by the schema and match the name of the operation.
// This is called the wrapper element. The wrapper element has a corresponding JAXB element pojo.
// 4) The parameters (m:param) are child elements of the wrapper element.
// 5) NOTE: For doc/literal wrapped "minimal", the wrapper JAXB element pojo is missing.
// Get the operation information
ParameterDescription[] pds = operationDesc.getParameterDescriptions();
MarshalServiceRuntimeDescription marshalDesc =
MethodMarshallerUtils.getMarshalDesc(endpointDesc);
TreeSet<String> packages = marshalDesc.getPackages();
// TODO This needs more work. We need to check inside holders of input params. We also
// may want to exclude header params from this check
//Validate input parameters for operation and make sure no input parameters are null.
//As per JAXWS Specification section 3.6.2.3 if a null value is passes as an argument
//to a method then an implementation MUST throw WebServiceException.
if (pds.length > 0) {
if (signatureArguments == null) {
throw ExceptionFactory.makeWebServiceException(
Messages.getMessage("NullParamErr1",operationDesc.getJavaMethodName()));
}
if (signatureArguments != null) {
for (Object argument : signatureArguments) {
if (argument == null) {
throw ExceptionFactory.makeWebServiceException(
Messages.getMessage("NullParamErr1",operationDesc.getJavaMethodName()));
}
}
}
}
// Create the message
MessageFactory mf = (MessageFactory)FactoryRegistry.getFactory(MessageFactory.class);
Message m = mf.create(protocol);
// Indicate the style and wrapper element name. This triggers the message to
// put the data blocks underneath the wrapper element
m.setStyle(Style.DOCUMENT);
m.setIndirection(1);
m.setOperationElement(getRequestWrapperQName(operationDesc));
// The input object represent the signature arguments.
// Signature arguments are both holders and non-holders
// Convert the signature into a list of JAXB objects for marshalling
List<PDElement> pdeList =
MethodMarshallerUtils.getPDElements(marshalDesc,
pds,
signatureArguments,
true, // input
true, // doc/lit wrapped
true); // false
// We want to use "by Java Type" marshalling for
// all objects
for (PDElement pde : pdeList) {
ParameterDescription pd = pde.getParam();
Class type = pd.getParameterActualType();
pde.setByJavaTypeClass(type);
}
// Put values onto the message
MethodMarshallerUtils.toMessage(pdeList, m, packages);
// Enable SWA for nested SwaRef attachments
if (operationDesc.hasRequestSwaRefAttachments()) {
m.setDoingSWA(true);
}
return m;
} catch (Exception e) {
throw ExceptionFactory.makeWebServiceException(e);
}
}
public Object[] demarshalRequest(Message message, OperationDescription operationDesc)
throws WebServiceException {
EndpointInterfaceDescription ed = operationDesc.getEndpointInterfaceDescription();
EndpointDescription endpointDesc = ed.getEndpointDescription();
// Note all exceptions are caught and rethrown with a WebServiceException
try {
// Sample RPC message
// ..
// <soapenv:body>
// <m:op xmlns:m="urn://api">
// <param xsi:type="data:foo" >...</param>
// </m:op>
// </soapenv:body>
//
// Important points.
// 1) RPC has an operation element under the body. This is the name of the
// wsdl operation.
// 2) The data blocks are located underneath the operation element. (In doc/lit
// the data elements are underneath the body.
// 3) The name of the data blocks (param) are defined by the wsdl:part not the
// schema. Note that it is unqualified per WSI-BP
// 4) The type of the data block (data:foo) is defined by schema (thus there is
// JAXB type rendering.
// 5) We always send an xsi:type, but other vendor's may not.
// Get the operation information
ParameterDescription[] pds = operationDesc.getParameterDescriptions();
MarshalServiceRuntimeDescription marshalDesc =
MethodMarshallerUtils.getMarshalDesc(endpointDesc);
TreeSet<String> packages = marshalDesc.getPackages();
// Indicate that the style is Document, but the blocks are underneath
// the wrapper element
message.setStyle(Style.DOCUMENT);
message.setIndirection(1);
// We want to use "by Java Type" unmarshalling for
// all objects
Class[] javaTypes = new Class[pds.length];
for (int i = 0; i < pds.length; i++) {
ParameterDescription pd = pds[i];
javaTypes[i] = pd.getParameterActualType();
}
// Unmarshal the ParamValues from the Message
List<PDElement> pvList = MethodMarshallerUtils.getPDElements(pds,
message,
packages,
true, // input
false,
javaTypes); // sigh...unmarshal by type because there is no wrapper
// Build the signature arguments
Object[] sigArguments = MethodMarshallerUtils.createRequestSignatureArgs(pds, pvList);
// TODO This needs more work. We need to check inside holders of input params. We also
// may want to exclude header params from this check
//Validate input parameters for operation and make sure no input parameters are null.
//As per JAXWS Specification section 3.6.2.3 if a null value is passes as an argument
//to a method then an implementation MUST throw WebServiceException.
if (sigArguments != null) {
for (Object argument : sigArguments) {
if (argument == null) {
throw ExceptionFactory.makeWebServiceException(
Messages.getMessage("NullParamErr2",operationDesc.getJavaMethodName()));
}
}
}
return sigArguments;
} catch (Exception e) {
throw ExceptionFactory.makeWebServiceException(e);
}
}
public Message marshalResponse(Object returnObject, Object[] signatureArgs,
OperationDescription operationDesc, Protocol protocol)
throws WebServiceException {
EndpointInterfaceDescription ed = operationDesc.getEndpointInterfaceDescription();
EndpointDescription endpointDesc = ed.getEndpointDescription();
// We want to respond with the same protocol as the request,
// It the protocol is null, then use the Protocol defined by the binding
if (protocol == null) {
protocol = Protocol.getProtocolForBinding(endpointDesc.getBindingType());
}
// Note all exceptions are caught and rethrown with a WebServiceException
try {
// Sample RPC message
// ..
// <soapenv:body>
// <m:opResponse xmlns:m="urn://api">
// <param xsi:type="data:foo" >...</param>
// </m:op>
// </soapenv:body>
//
// Important points.
// 1) RPC has an operation element under the body. This is the name of the
// wsdl operation.
// 2) The data blocks are located underneath the operation element. (In doc/lit
// the data elements are underneath the body.
// 3) The name of the data blocks (param) are defined by the wsdl:part not the
// schema. Note that it is unqualified.
// 4) The type of the data block (data:foo) is defined by schema (thus there is
// JAXB type rendering. Since we are using JAXB to marshal the data,
// we always generate an xsi:type attribute. This is an implemenation detail
// and is not defined by any spec.
// Get the operation information
ParameterDescription[] pds = operationDesc.getParameterDescriptions();
MarshalServiceRuntimeDescription marshalDesc =
MethodMarshallerUtils.getMarshalDesc(endpointDesc);
TreeSet<String> packages = marshalDesc.getPackages();
// Create the message
MessageFactory mf = (MessageFactory)FactoryRegistry.getFactory(MessageFactory.class);
Message m = mf.create(protocol);
// Indicate the style and wrapper element name. This triggers the message to
// put the data blocks underneath the operation element
m.setStyle(Style.DOCUMENT);
m.setIndirection(1);
QName responseOp = getResponseWrapperQName(operationDesc);
m.setOperationElement(responseOp);
// Put the return object onto the message
Class returnType = operationDesc.getResultActualType();
String returnNS = null;
String returnLocalPart = null;
if (operationDesc.isResultHeader()) {
returnNS = operationDesc.getResultTargetNamespace();
returnLocalPart = operationDesc.getResultName();
} else {
returnNS = operationDesc.getResultTargetNamespace();
returnLocalPart = operationDesc.getResultPartName();
}
if (returnType != void.class) {
// TODO should we allow null if the return is a header?
//Validate input parameters for operation and make sure no input parameters are null.
//As per JAXWS Specification section 3.6.2.3 if a null value is passes as an argument
//to a method then an implementation MUST throw WebServiceException.
if (returnObject == null) {
throw ExceptionFactory.makeWebServiceException(
Messages.getMessage("NullParamErr3",operationDesc.getJavaMethodName()));
}
Element returnElement = null;
QName returnQName = new QName(returnNS, returnLocalPart);
if (marshalDesc.getAnnotationDesc(returnType).hasXmlRootElement()) {
returnElement = new Element(returnObject, returnQName);
} else {
returnElement = new Element(returnObject, returnQName, returnType);
}
MethodMarshallerUtils.toMessage(returnElement,
returnType,
operationDesc.isListType(),
marshalDesc,
m,
returnType, // force marshal by type
operationDesc.isResultHeader());
}
// Convert the holder objects into a list of JAXB objects for marshalling
List<PDElement> pdeList =
MethodMarshallerUtils.getPDElements(marshalDesc,
pds,
signatureArgs,
false, // output
true, // doc/lit wrapped
false); // not rpc
// We want to use "by Java Type" marshalling for
// all objects
for (PDElement pde : pdeList) {
ParameterDescription pd = pde.getParam();
Class type = pd.getParameterActualType();
pde.setByJavaTypeClass(type);
}
// TODO Should we check for null output body values? Should we check for null output header values ?
// Put values onto the message
MethodMarshallerUtils.toMessage(pdeList, m, packages);
// Enable SWA for nested SwaRef attachments
if (operationDesc.hasResponseSwaRefAttachments()) {
m.setDoingSWA(true);
}
return m;
} catch (Exception e) {
throw ExceptionFactory.makeWebServiceException(e);
}
}
public Object demarshalResponse(Message message, Object[] signatureArgs,
OperationDescription operationDesc)
throws WebServiceException {
EndpointInterfaceDescription ed = operationDesc.getEndpointInterfaceDescription();
EndpointDescription endpointDesc = ed.getEndpointDescription();
// Note all exceptions are caught and rethrown with a WebServiceException
try {
// Sample RPC message
// ..
// <soapenv:body>
// <m:opResponse xmlns:m="urn://api">
// <param xsi:type="data:foo" >...</param>
// </m:op>
// </soapenv:body>
//
// Important points.
// 1) RPC has an operation element under the body. This is the name of the
// wsdl operation.
// 2) The data blocks are located underneath the operation element. (In doc/lit
// the data elements are underneath the body.
// 3) The name of the data blocks (param) are defined by the wsdl:part not the
// schema. Note that it is unqualified per WSI-BP
// 4) The type of the data block (data:foo) is defined by schema (thus there is
// JAXB type rendering.
// 5) We always send an xsi:type, but other vendor's may not.
// Get the operation information
ParameterDescription[] pds = operationDesc.getParameterDescriptions();
MarshalServiceRuntimeDescription marshalDesc =
MethodMarshallerUtils.getMarshalDesc(endpointDesc);
TreeSet<String> packages = marshalDesc.getPackages();
// Indicate that the style is Document.
message.setStyle(Style.DOCUMENT);
message.setIndirection(1);
// Get the return value.
Class returnType = operationDesc.getResultActualType();
Object returnValue = null;
boolean hasReturnInBody = false;
if (returnType != void.class) {
// If the webresult is in the header, we need the name of the header so that we can find it.
Element returnElement = null;
if (operationDesc.isResultHeader()) {
returnElement = MethodMarshallerUtils.getReturnElement(packages,
message,
returnType,
operationDesc.isListType(),
true, // is a header
operationDesc.getResultTargetNamespace(),
// header ns
operationDesc.getResultPartName(), // header local part
MethodMarshallerUtils.numOutputBodyParams(pds) > 0);
} else {
returnElement = MethodMarshallerUtils.getReturnElement(packages,
message,
returnType,
operationDesc.isListType(),
false,
null,
null,
MethodMarshallerUtils.numOutputBodyParams(pds) > 0);
hasReturnInBody = true;
}
returnValue = returnElement.getTypeValue();
// TODO should we allow null if the return is a header?
//Validate input parameters for operation and make sure no input parameters are null.
//As per JAXWS Specification section 3.6.2.3 if a null value is passes as an argument
//to a method then an implementation MUST throw WebServiceException.
if (returnValue == null) {
throw ExceptionFactory.makeWebServiceException(
Messages.getMessage("NullParamErr3",operationDesc.getJavaMethodName()));
}
}
// We want to use "by Java Type" unmarshalling for
// all objects
Class[] javaTypes = new Class[pds.length];
for (int i = 0; i < pds.length; i++) {
ParameterDescription pd = pds[i];
Class type = pd.getParameterActualType();
javaTypes[i] = type;
}
// Unmarshall the ParamValues from the Message
List<PDElement> pvList = MethodMarshallerUtils.getPDElements(pds,
message,
packages,
false, // output
hasReturnInBody,
javaTypes); // unmarshal by type
// TODO Should we check for null output body values? Should we check for null output header values ?
// Populate the response Holders
MethodMarshallerUtils.updateResponseSignatureArgs(pds, pvList, signatureArgs);
return returnValue;
} catch (Exception e) {
throw ExceptionFactory.makeWebServiceException(e);
}
}
public Message marshalFaultResponse(Throwable throwable,
OperationDescription operationDesc, Protocol protocol)
throws WebServiceException {
EndpointInterfaceDescription ed = operationDesc.getEndpointInterfaceDescription();
EndpointDescription endpointDesc = ed.getEndpointDescription();
MarshalServiceRuntimeDescription marshalDesc =
MethodMarshallerUtils.getMarshalDesc(endpointDesc);
TreeSet<String> packages = marshalDesc.getPackages();
// We want to respond with the same protocol as the request,
// It the protocol is null, then use the Protocol defined by the binding
if (protocol == null) {
protocol = Protocol.getProtocolForBinding(endpointDesc.getBindingType());
}
// Note all exceptions are caught and rethrown with a WebServiceException
try {
// Create the message
MessageFactory mf = (MessageFactory)FactoryRegistry.getFactory(MessageFactory.class);
Message m = mf.create(protocol);
// Put the fault onto the message
MethodMarshallerUtils.marshalFaultResponse(throwable,
marshalDesc,
operationDesc,
m);
return m;
} catch (Exception e) {
throw ExceptionFactory.makeWebServiceException(e);
}
}
public Throwable demarshalFaultResponse(Message message, OperationDescription operationDesc)
throws WebServiceException {
EndpointInterfaceDescription ed = operationDesc.getEndpointInterfaceDescription();
EndpointDescription endpointDesc = ed.getEndpointDescription();
MarshalServiceRuntimeDescription marshalDesc =
MethodMarshallerUtils.getMarshalDesc(endpointDesc);
// Note all exceptions are caught and rethrown with a WebServiceException
try {
Throwable t = MethodMarshallerUtils
.demarshalFaultResponse(operationDesc, marshalDesc, message);
return t;
} catch (Exception e) {
throw ExceptionFactory.makeWebServiceException(e);
}
}
/**
* @param opDesc
* @return request wrapper qname
*/
private static QName getRequestWrapperQName(OperationDescription opDesc) {
QName qName = opDesc.getName();
String localPart = opDesc.getRequestWrapperLocalName();
String uri = opDesc.getRequestWrapperTargetNamespace();
String prefix = "dlwmin"; // Prefer using an actual prefix
qName = new QName(uri, localPart, prefix);
return qName;
}
/**
* @param opDesc
* @return request wrapper qname
*/
private static QName getResponseWrapperQName(OperationDescription opDesc) {
QName qName = opDesc.getName();
String localPart = opDesc.getResponseWrapperLocalName();
String uri = opDesc.getResponseWrapperTargetNamespace();
String prefix = "dlwmin"; // Prefer using an actual prefix
qName = new QName(uri, localPart, prefix);
return qName;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.internal;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
import org.elasticsearch.search.highlight.SearchContextHighlight;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.List;
/**
*/
public class SubSearchContext extends FilteredSearchContext {
// By default return 3 hits per bucket. A higher default would make the response really large by default, since
// the to hits are returned per bucket.
private final static int DEFAULT_SIZE = 3;
private int from;
private int size = DEFAULT_SIZE;
private Sort sort;
private final FetchSearchResult fetchSearchResult;
private final QuerySearchResult querySearchResult;
private int[] docIdsToLoad;
private int docsIdsToLoadFrom;
private int docsIdsToLoadSize;
private List<String> fieldNames;
private FieldDataFieldsContext fieldDataFields;
private ScriptFieldsContext scriptFields;
private FetchSourceContext fetchSourceContext;
private SearchContextHighlight highlight;
private boolean explain;
private boolean trackScores;
private boolean version;
private InnerHitsContext innerHitsContext;
public SubSearchContext(SearchContext context) {
super(context);
this.fetchSearchResult = new FetchSearchResult();
this.querySearchResult = new QuerySearchResult();
}
@Override
protected void doClose() {
}
@Override
public void preProcess() {
}
@Override
public Filter searchFilter(String[] types) {
throw new UnsupportedOperationException("this context should be read only");
}
@Override
public SearchContext searchType(SearchType searchType) {
throw new UnsupportedOperationException("this context should be read only");
}
@Override
public SearchContext queryBoost(float queryBoost) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public SearchContext scroll(Scroll scroll) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public SearchContext aggregations(SearchContextAggregations aggregations) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public SearchContextHighlight highlight() {
return highlight;
}
@Override
public void highlight(SearchContextHighlight highlight) {
this.highlight = highlight;
}
@Override
public void suggest(SuggestionSearchContext suggest) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public void addRescore(RescoreSearchContext rescore) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public boolean hasFieldDataFields() {
return fieldDataFields != null;
}
@Override
public FieldDataFieldsContext fieldDataFields() {
if (fieldDataFields == null) {
fieldDataFields = new FieldDataFieldsContext();
}
return this.fieldDataFields;
}
@Override
public boolean hasScriptFields() {
return scriptFields != null;
}
@Override
public ScriptFieldsContext scriptFields() {
if (scriptFields == null) {
scriptFields = new ScriptFieldsContext();
}
return this.scriptFields;
}
@Override
public boolean sourceRequested() {
return fetchSourceContext != null && fetchSourceContext.fetchSource();
}
@Override
public boolean hasFetchSourceContext() {
return fetchSourceContext != null;
}
@Override
public FetchSourceContext fetchSourceContext() {
return fetchSourceContext;
}
@Override
public SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext) {
this.fetchSourceContext = fetchSourceContext;
return this;
}
@Override
public void timeoutInMillis(long timeoutInMillis) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public void terminateAfter(int terminateAfter) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public SearchContext minimumScore(float minimumScore) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public SearchContext sort(Sort sort) {
this.sort = sort;
return this;
}
@Override
public Sort sort() {
return sort;
}
@Override
public SearchContext trackScores(boolean trackScores) {
this.trackScores = trackScores;
return this;
}
@Override
public boolean trackScores() {
return trackScores;
}
@Override
public SearchContext parsedPostFilter(ParsedQuery postFilter) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public SearchContext updateRewriteQuery(Query rewriteQuery) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public int from() {
return from;
}
@Override
public SearchContext from(int from) {
this.from = from;
return this;
}
@Override
public int size() {
return size;
}
@Override
public SearchContext size(int size) {
this.size = size;
return this;
}
@Override
public boolean hasFieldNames() {
return fieldNames != null;
}
@Override
public List<String> fieldNames() {
if (fieldNames == null) {
fieldNames = Lists.newArrayList();
}
return fieldNames;
}
@Override
public void emptyFieldNames() {
this.fieldNames = ImmutableList.of();
}
@Override
public boolean explain() {
return explain;
}
@Override
public void explain(boolean explain) {
this.explain = explain;
}
@Override
public void groupStats(List<String> groupStats) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public boolean version() {
return version;
}
@Override
public void version(boolean version) {
this.version = version;
}
@Override
public int[] docIdsToLoad() {
return docIdsToLoad;
}
@Override
public int docIdsToLoadFrom() {
return docsIdsToLoadFrom;
}
@Override
public int docIdsToLoadSize() {
return docsIdsToLoadSize;
}
@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize) {
this.docIdsToLoad = docIdsToLoad;
this.docsIdsToLoadFrom = docsIdsToLoadFrom;
this.docsIdsToLoadSize = docsIdsToLoadSize;
return this;
}
@Override
public void accessed(long accessTime) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public void keepAlive(long keepAlive) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public void lastEmittedDoc(ScoreDoc doc) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public QuerySearchResult queryResult() {
return querySearchResult;
}
@Override
public FetchSearchResult fetchResult() {
return fetchSearchResult;
}
private SearchLookup searchLookup;
@Override
public SearchLookup lookup() {
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService(), fieldData(), request().types());
}
return searchLookup;
}
@Override
public Counter timeEstimateCounter() {
throw new UnsupportedOperationException("Not supported");
}
@Override
public void innerHits(InnerHitsContext innerHitsContext) {
this.innerHitsContext = innerHitsContext;
}
@Override
public InnerHitsContext innerHits() {
return innerHitsContext;
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.resourcemanager.v3;
import static com.google.cloud.resourcemanager.v3.TagBindingsClient.ListTagBindingsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.resourcenames.ResourceName;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.Empty;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class TagBindingsClientTest {
private static MockServiceHelper mockServiceHelper;
private static MockTagBindings mockTagBindings;
private LocalChannelProvider channelProvider;
private TagBindingsClient client;
@BeforeClass
public static void startStaticServer() {
mockTagBindings = new MockTagBindings();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockTagBindings));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
TagBindingsSettings settings =
TagBindingsSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = TagBindingsClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void listTagBindingsTest() throws Exception {
TagBinding responsesElement = TagBinding.newBuilder().build();
ListTagBindingsResponse expectedResponse =
ListTagBindingsResponse.newBuilder()
.setNextPageToken("")
.addAllTagBindings(Arrays.asList(responsesElement))
.build();
mockTagBindings.addResponse(expectedResponse);
ResourceName parent = FolderName.of("[FOLDER]");
ListTagBindingsPagedResponse pagedListResponse = client.listTagBindings(parent);
List<TagBinding> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getTagBindingsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockTagBindings.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListTagBindingsRequest actualRequest = ((ListTagBindingsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listTagBindingsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockTagBindings.addException(exception);
try {
ResourceName parent = FolderName.of("[FOLDER]");
client.listTagBindings(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listTagBindingsTest2() throws Exception {
TagBinding responsesElement = TagBinding.newBuilder().build();
ListTagBindingsResponse expectedResponse =
ListTagBindingsResponse.newBuilder()
.setNextPageToken("")
.addAllTagBindings(Arrays.asList(responsesElement))
.build();
mockTagBindings.addResponse(expectedResponse);
String parent = "parent-995424086";
ListTagBindingsPagedResponse pagedListResponse = client.listTagBindings(parent);
List<TagBinding> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getTagBindingsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockTagBindings.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListTagBindingsRequest actualRequest = ((ListTagBindingsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listTagBindingsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockTagBindings.addException(exception);
try {
String parent = "parent-995424086";
client.listTagBindings(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createTagBindingTest() throws Exception {
TagBinding expectedResponse =
TagBinding.newBuilder()
.setName(TagBindingName.of("[TAG_BINDING]").toString())
.setParent("parent-995424086")
.setTagValue("tagValue-772697609")
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createTagBindingTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockTagBindings.addResponse(resultOperation);
TagBinding tagBinding = TagBinding.newBuilder().build();
TagBinding actualResponse = client.createTagBindingAsync(tagBinding).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockTagBindings.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateTagBindingRequest actualRequest = ((CreateTagBindingRequest) actualRequests.get(0));
Assert.assertEquals(tagBinding, actualRequest.getTagBinding());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createTagBindingExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockTagBindings.addException(exception);
try {
TagBinding tagBinding = TagBinding.newBuilder().build();
client.createTagBindingAsync(tagBinding).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteTagBindingTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteTagBindingTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockTagBindings.addResponse(resultOperation);
TagBindingName name = TagBindingName.of("[TAG_BINDING]");
client.deleteTagBindingAsync(name).get();
List<AbstractMessage> actualRequests = mockTagBindings.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteTagBindingRequest actualRequest = ((DeleteTagBindingRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteTagBindingExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockTagBindings.addException(exception);
try {
TagBindingName name = TagBindingName.of("[TAG_BINDING]");
client.deleteTagBindingAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteTagBindingTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteTagBindingTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockTagBindings.addResponse(resultOperation);
String name = "name3373707";
client.deleteTagBindingAsync(name).get();
List<AbstractMessage> actualRequests = mockTagBindings.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteTagBindingRequest actualRequest = ((DeleteTagBindingRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteTagBindingExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockTagBindings.addException(exception);
try {
String name = "name3373707";
client.deleteTagBindingAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.namespace;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.String.format;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.pulsar.broker.cache.LocalZooKeeperCacheService.LOCAL_POLICIES_ROOT;
import static org.apache.pulsar.broker.web.PulsarWebResource.joinPath;
import static org.apache.pulsar.common.naming.NamespaceBundleFactory.getBundlesData;
import static org.apache.pulsar.zookeeper.ZooKeeperCache.cacheTimeOutInSec;
import java.net.URI;
import java.net.URL;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.pulsar.broker.PulsarServerException;
import org.apache.pulsar.broker.PulsarService;
import org.apache.pulsar.broker.ServiceConfiguration;
import org.apache.pulsar.broker.admin.AdminResource;
import org.apache.pulsar.broker.loadbalance.LoadManager;
import org.apache.pulsar.broker.loadbalance.ResourceUnit;
import org.apache.pulsar.broker.lookup.LookupResult;
import org.apache.pulsar.broker.service.BrokerServiceException.ServerMetadataException;
import org.apache.pulsar.broker.service.BrokerServiceException.ServiceUnitNotReadyException;
import org.apache.pulsar.client.admin.PulsarAdmin;
import org.apache.pulsar.common.lookup.data.LookupData;
import org.apache.pulsar.common.naming.TopicName;
import org.apache.pulsar.common.naming.NamespaceBundle;
import org.apache.pulsar.common.naming.NamespaceBundleFactory;
import org.apache.pulsar.common.naming.NamespaceBundles;
import org.apache.pulsar.common.naming.NamespaceName;
import org.apache.pulsar.common.naming.ServiceUnitId;
import org.apache.pulsar.common.policies.NamespaceIsolationPolicy;
import org.apache.pulsar.common.policies.data.BrokerAssignment;
import org.apache.pulsar.common.policies.data.BundlesData;
import org.apache.pulsar.common.policies.data.LocalPolicies;
import org.apache.pulsar.common.policies.data.NamespaceOwnershipStatus;
import org.apache.pulsar.common.policies.impl.NamespaceIsolationPolicies;
import org.apache.pulsar.common.util.Codec;
import org.apache.pulsar.common.util.ObjectMapperFactory;
import org.apache.pulsar.policies.data.loadbalancer.ServiceLookupData;
import org.apache.zookeeper.AsyncCallback.StatCallback;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.Code;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
import com.google.common.hash.Hashing;
/**
* The <code>NamespaceService</code> provides resource ownership lookup as well as resource ownership claiming services
* for the <code>PulsarService</code>.
* <p/>
* The <code>PulsarService</code> relies on this service for resource ownership operations.
* <p/>
* The focus of this phase is to bring up the system and be able to iterate and improve the services effectively.
* <p/>
*
* @see org.apache.pulsar.broker.PulsarService
*/
public class NamespaceService {
public enum AddressType {
BROKER_URL, LOOKUP_URL
}
private static final Logger LOG = LoggerFactory.getLogger(NamespaceService.class);
private final ServiceConfiguration config;
private final AtomicReference<LoadManager> loadManager;
private final PulsarService pulsar;
private final OwnershipCache ownershipCache;
private final NamespaceBundleFactory bundleFactory;
private int uncountedNamespaces;
private final String host;
private static final int BUNDLE_SPLIT_RETRY_LIMIT = 7;
public static final String SLA_NAMESPACE_PROPERTY = "sla-monitor";
public static final Pattern HEARTBEAT_NAMESPACE_PATTERN = Pattern.compile("pulsar/[^/]+/([^:]+:\\d+)");
public static final Pattern SLA_NAMESPACE_PATTERN = Pattern.compile(SLA_NAMESPACE_PROPERTY + "/[^/]+/([^:]+:\\d+)");
public static final String HEARTBEAT_NAMESPACE_FMT = "pulsar/%s/%s:%s";
public static final String SLA_NAMESPACE_FMT = SLA_NAMESPACE_PROPERTY + "/%s/%s:%s";
/**
* Default constructor.
*
* @throws PulsarServerException
*/
public NamespaceService(PulsarService pulsar) {
this.pulsar = pulsar;
host = pulsar.getAdvertisedAddress();
this.config = pulsar.getConfiguration();
this.loadManager = pulsar.getLoadManager();
ServiceUnitZkUtils.initZK(pulsar.getLocalZkCache().getZooKeeper(), pulsar.getBrokerServiceUrl());
this.bundleFactory = new NamespaceBundleFactory(pulsar, Hashing.crc32());
this.ownershipCache = new OwnershipCache(pulsar, bundleFactory);
}
public CompletableFuture<Optional<LookupResult>> getBrokerServiceUrlAsync(TopicName topic,
boolean authoritative) {
return getBundleAsync(topic)
.thenCompose(bundle -> findBrokerServiceUrl(bundle, authoritative, false /* read-only */));
}
public CompletableFuture<NamespaceBundle> getBundleAsync(TopicName topic) {
return bundleFactory.getBundlesAsync(topic.getNamespaceObject())
.thenApply(bundles -> bundles.findBundle(topic));
}
public NamespaceBundle getBundle(TopicName topicName) throws Exception {
return bundleFactory.getBundles(topicName.getNamespaceObject()).findBundle(topicName);
}
public int getBundleCount(NamespaceName namespace) throws Exception {
return bundleFactory.getBundles(namespace).size();
}
private NamespaceBundle getFullBundle(NamespaceName fqnn) throws Exception {
return bundleFactory.getFullBundle(fqnn);
}
/**
* Return the URL of the broker who's owning a particular service unit.
*
* If the service unit is not owned, return an empty optional
*/
public Optional<URL> getWebServiceUrl(ServiceUnitId suName, boolean authoritative, boolean isRequestHttps, boolean readOnly)
throws Exception {
if (suName instanceof TopicName) {
TopicName name = (TopicName) suName;
if (LOG.isDebugEnabled()) {
LOG.debug("Getting web service URL of topic: {} - auth: {}", name, authoritative);
}
return this.internalGetWebServiceUrl(getBundle(name), authoritative, isRequestHttps, readOnly).get();
}
if (suName instanceof NamespaceName) {
return this.internalGetWebServiceUrl(getFullBundle((NamespaceName) suName), authoritative, isRequestHttps, readOnly).get();
}
if (suName instanceof NamespaceBundle) {
return this.internalGetWebServiceUrl((NamespaceBundle) suName, authoritative, isRequestHttps, readOnly).get();
}
throw new IllegalArgumentException("Unrecognized class of NamespaceBundle: " + suName.getClass().getName());
}
private CompletableFuture<Optional<URL>> internalGetWebServiceUrl(NamespaceBundle bundle, boolean authoritative,
boolean isRequestHttps, boolean readOnly) {
return findBrokerServiceUrl(bundle, authoritative, readOnly).thenApply(lookupResult -> {
if (lookupResult.isPresent()) {
try {
LookupData lookupData = lookupResult.get().getLookupData();
final String redirectUrl = isRequestHttps ? lookupData.getHttpUrlTls() : lookupData.getHttpUrl();
return Optional.of(new URL(redirectUrl));
} catch (Exception e) {
// just log the exception, nothing else to do
LOG.warn("internalGetWebServiceUrl [{}]", e.getMessage(), e);
}
}
return Optional.empty();
});
}
/**
* Register all the bootstrap name spaces including the heartbeat namespace
*
* @return
* @throws PulsarServerException
*/
public void registerBootstrapNamespaces() throws PulsarServerException {
// ensure that we own the heartbeat namespace
if (registerNamespace(getHeartbeatNamespace(host, config), true)) {
this.uncountedNamespaces++;
LOG.info("added heartbeat namespace name in local cache: ns={}", getHeartbeatNamespace(host, config));
}
// we may not need strict ownership checking for bootstrap names for now
for (String namespace : config.getBootstrapNamespaces()) {
if (registerNamespace(namespace, false)) {
LOG.info("added bootstrap namespace name in local cache: ns={}", namespace);
}
}
}
/**
* Tried to registers a namespace to this instance
*
* @param namespace
* @param ensureOwned
* @return
* @throws PulsarServerException
* @throws Exception
*/
private boolean registerNamespace(String namespace, boolean ensureOwned) throws PulsarServerException {
String myUrl = pulsar.getBrokerServiceUrl();
try {
NamespaceName nsname = NamespaceName.get(namespace);
String otherUrl = null;
NamespaceBundle nsFullBundle = null;
// all pre-registered namespace is assumed to have bundles disabled
nsFullBundle = bundleFactory.getFullBundle(nsname);
// v2 namespace will always use full bundle object
otherUrl = ownershipCache.tryAcquiringOwnership(nsFullBundle).get().getNativeUrl();
if (myUrl.equals(otherUrl)) {
if (nsFullBundle != null) {
// preload heartbeat namespace
pulsar.loadNamespaceTopics(nsFullBundle);
}
return true;
}
String msg = String.format("namespace already owned by other broker : ns=%s expected=%s actual=%s",
namespace, myUrl, otherUrl);
// ignore if not be owned for now
if (!ensureOwned) {
LOG.info(msg);
return false;
}
// should not happen
throw new IllegalStateException(msg);
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new PulsarServerException(e);
}
}
/**
* Main internal method to lookup and setup ownership of service unit to a broker
*
* @param bundle
* @param authoritative
* @param readOnly
* @return
* @throws PulsarServerException
*/
private CompletableFuture<Optional<LookupResult>> findBrokerServiceUrl(NamespaceBundle bundle, boolean authoritative,
boolean readOnly) {
if (LOG.isDebugEnabled()) {
LOG.debug("findBrokerServiceUrl: {} - read-only: {}", bundle, readOnly);
}
CompletableFuture<Optional<LookupResult>> future = new CompletableFuture<>();
// First check if we or someone else already owns the bundle
ownershipCache.getOwnerAsync(bundle).thenAccept(nsData -> {
if (!nsData.isPresent()) {
// No one owns this bundle
if (readOnly) {
// Do not attempt to acquire ownership
future.complete(Optional.empty());
} else {
// Now, no one owns the namespace yet. Hence, we will try to dynamically assign it
pulsar.getExecutor().execute(() -> {
searchForCandidateBroker(bundle, future, authoritative);
});
}
} else if (nsData.get().isDisabled()) {
future.completeExceptionally(
new IllegalStateException(String.format("Namespace bundle %s is being unloaded", bundle)));
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Namespace bundle {} already owned by {} ", bundle, nsData);
}
future.complete(Optional.of(new LookupResult(nsData.get())));
}
}).exceptionally(exception -> {
LOG.warn("Failed to check owner for bundle {}: {}", bundle, exception.getMessage(), exception);
future.completeExceptionally(exception);
return null;
});
return future;
}
private void searchForCandidateBroker(NamespaceBundle bundle,
CompletableFuture<Optional<LookupResult>> lookupFuture, boolean authoritative) {
String candidateBroker = null;
try {
// check if this is Heartbeat or SLAMonitor namespace
candidateBroker = checkHeartbeatNamespace(bundle);
if (candidateBroker == null) {
String broker = getSLAMonitorBrokerName(bundle);
// checking if the broker is up and running
if (broker != null && isBrokerActive(broker)) {
candidateBroker = broker;
}
}
if (candidateBroker == null) {
if (!this.loadManager.get().isCentralized() || pulsar.getLeaderElectionService().isLeader()) {
Optional<String> availableBroker = getLeastLoadedFromLoadManager(bundle);
if (!availableBroker.isPresent()) {
lookupFuture.complete(Optional.empty());
return;
}
candidateBroker = availableBroker.get();
} else {
if (authoritative) {
// leader broker already assigned the current broker as owner
candidateBroker = pulsar.getWebServiceAddress();
} else {
// forward to leader broker to make assignment
candidateBroker = pulsar.getLeaderElectionService().getCurrentLeader().getServiceUrl();
}
}
}
} catch (Exception e) {
LOG.warn("Error when searching for candidate broker to acquire {}: {}", bundle, e.getMessage(), e);
lookupFuture.completeExceptionally(e);
return;
}
try {
checkNotNull(candidateBroker);
if (pulsar.getWebServiceAddress().equals(candidateBroker)) {
// Load manager decided that the local broker should try to become the owner
ownershipCache.tryAcquiringOwnership(bundle).thenAccept(ownerInfo -> {
if (ownerInfo.isDisabled()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Namespace bundle {} is currently being unloaded", bundle);
}
lookupFuture.completeExceptionally(new IllegalStateException(
String.format("Namespace bundle %s is currently being unloaded", bundle)));
} else {
// Found owner for the namespace bundle
// Schedule the task to pre-load topics
pulsar.loadNamespaceTopics(bundle);
lookupFuture.complete(Optional.of(new LookupResult(ownerInfo)));
}
}).exceptionally(exception -> {
LOG.warn("Failed to acquire ownership for namespace bundle {}: ", bundle, exception.getMessage(),
exception);
lookupFuture.completeExceptionally(new PulsarServerException(
"Failed to acquire ownership for namespace bundle " + bundle, exception));
return null;
});
} else {
// Load managed decider some other broker should try to acquire ownership
if (LOG.isDebugEnabled()) {
LOG.debug("Redirecting to broker {} to acquire ownership of bundle {}", candidateBroker, bundle);
}
// Now setting the redirect url
createLookupResult(candidateBroker)
.thenAccept(lookupResult -> lookupFuture.complete(Optional.of(lookupResult)))
.exceptionally(ex -> {
lookupFuture.completeExceptionally(ex);
return null;
});
}
} catch (Exception e) {
LOG.warn("Error in trying to acquire namespace bundle ownership for {}: {}", bundle, e.getMessage(), e);
lookupFuture.completeExceptionally(e);
}
}
protected CompletableFuture<LookupResult> createLookupResult(String candidateBroker) throws Exception {
CompletableFuture<LookupResult> lookupFuture = new CompletableFuture<>();
try {
checkArgument(StringUtils.isNotBlank(candidateBroker), "Lookup broker can't be null " + candidateBroker);
URI uri = new URI(candidateBroker);
String path = String.format("%s/%s:%s", LoadManager.LOADBALANCE_BROKERS_ROOT, uri.getHost(),
uri.getPort());
pulsar.getLocalZkCache().getDataAsync(path, pulsar.getLoadManager().get().getLoadReportDeserializer()).thenAccept(reportData -> {
if (reportData.isPresent()) {
ServiceLookupData lookupData = reportData.get();
lookupFuture.complete(new LookupResult(lookupData.getWebServiceUrl(),
lookupData.getWebServiceUrlTls(), lookupData.getPulsarServiceUrl(),
lookupData.getPulsarServiceUrlTls()));
} else {
lookupFuture.completeExceptionally(new KeeperException.NoNodeException(path));
}
}).exceptionally(ex -> {
lookupFuture.completeExceptionally(ex);
return null;
});
} catch (Exception e) {
lookupFuture.completeExceptionally(e);
}
return lookupFuture;
}
private boolean isBrokerActive(String candidateBroker) throws KeeperException, InterruptedException {
Set<String> activeNativeBrokers = pulsar.getLocalZkCache().getChildren(LoadManager.LOADBALANCE_BROKERS_ROOT);
for (String brokerHostPort : activeNativeBrokers) {
if (candidateBroker.equals("http://" + brokerHostPort)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Broker {} found for SLA Monitoring Namespace", brokerHostPort);
}
return true;
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Broker not found for SLA Monitoring Namespace {}",
candidateBroker + ":" + config.getWebServicePort());
}
return false;
}
/**
* Helper function to encapsulate the logic to invoke between old and new load manager
*
* @return
* @throws Exception
*/
private Optional<String> getLeastLoadedFromLoadManager(ServiceUnitId serviceUnit) throws Exception {
Optional<ResourceUnit> leastLoadedBroker = loadManager.get().getLeastLoaded(serviceUnit);
if (!leastLoadedBroker.isPresent()) {
LOG.warn("No broker is available for {}", serviceUnit);
return Optional.empty();
}
String lookupAddress = leastLoadedBroker.get().getResourceId();
if (LOG.isDebugEnabled()) {
LOG.debug("{} : redirecting to the least loaded broker, lookup address={}", pulsar.getWebServiceAddress(),
lookupAddress);
}
return Optional.of(lookupAddress);
}
public void unloadNamespaceBundle(NamespaceBundle bundle) throws Exception {
unloadNamespaceBundle(bundle, 5, TimeUnit.MINUTES);
}
public void unloadNamespaceBundle(NamespaceBundle bundle, long timeout, TimeUnit timeoutUnit) throws Exception {
checkNotNull(ownershipCache.getOwnedBundle(bundle)).handleUnloadRequest(pulsar, timeout, timeoutUnit);
}
public Map<String, NamespaceOwnershipStatus> getOwnedNameSpacesStatus() throws Exception {
NamespaceIsolationPolicies nsIsolationPolicies = this.getLocalNamespaceIsolationPolicies();
Map<String, NamespaceOwnershipStatus> ownedNsStatus = new HashMap<String, NamespaceOwnershipStatus>();
for (OwnedBundle nsObj : this.ownershipCache.getOwnedBundles().values()) {
NamespaceOwnershipStatus nsStatus = this.getNamespaceOwnershipStatus(nsObj,
nsIsolationPolicies.getPolicyByNamespace(nsObj.getNamespaceBundle().getNamespaceObject()));
ownedNsStatus.put(nsObj.getNamespaceBundle().toString(), nsStatus);
}
return ownedNsStatus;
}
private NamespaceOwnershipStatus getNamespaceOwnershipStatus(OwnedBundle nsObj,
NamespaceIsolationPolicy nsIsolationPolicy) {
NamespaceOwnershipStatus nsOwnedStatus = new NamespaceOwnershipStatus(BrokerAssignment.shared, false,
nsObj.isActive());
if (nsIsolationPolicy == null) {
// no matching policy found, this namespace must be an uncontrolled one and using shared broker
return nsOwnedStatus;
}
// found corresponding policy, set the status to controlled
nsOwnedStatus.is_controlled = true;
if (nsIsolationPolicy.isPrimaryBroker(pulsar.getAdvertisedAddress())) {
nsOwnedStatus.broker_assignment = BrokerAssignment.primary;
} else if (nsIsolationPolicy.isSecondaryBroker(pulsar.getAdvertisedAddress())) {
nsOwnedStatus.broker_assignment = BrokerAssignment.secondary;
}
return nsOwnedStatus;
}
private NamespaceIsolationPolicies getLocalNamespaceIsolationPolicies() throws Exception {
String localCluster = pulsar.getConfiguration().getClusterName();
return pulsar.getConfigurationCache().namespaceIsolationPoliciesCache()
.get(AdminResource.path("clusters", localCluster, "namespaceIsolationPolicies")).orElseGet(() -> {
// the namespace isolation policies are empty/undefined = an empty object
return new NamespaceIsolationPolicies();
});
}
public boolean isNamespaceBundleDisabled(NamespaceBundle bundle) throws Exception {
try {
// Does ZooKeeper says that the namespace is disabled?
CompletableFuture<Optional<NamespaceEphemeralData>> nsDataFuture = ownershipCache.getOwnerAsync(bundle);
if (nsDataFuture != null) {
Optional<NamespaceEphemeralData> nsData = nsDataFuture.getNow(null);
if (nsData != null && nsData.isPresent()) {
return nsData.get().isDisabled();
} else {
return false;
}
} else {
// if namespace is not owned, it is not considered disabled
return false;
}
} catch (Exception e) {
LOG.warn("Exception in getting ownership info for service unit {}: {}", bundle, e.getMessage(), e);
}
return false;
}
/**
* 1. split the given bundle into two bundles 2. assign ownership of both the bundles to current broker 3. update
* policies with newly created bundles into LocalZK 4. disable original bundle and refresh the cache.
*
* It will call splitAndOwnBundleOnceAndRetry to do the real retry work, which will retry "retryTimes".
*
* @param bundle
* @return
* @throws Exception
*/
public CompletableFuture<Void> splitAndOwnBundle(NamespaceBundle bundle, boolean unload)
throws Exception {
final CompletableFuture<Void> unloadFuture = new CompletableFuture<>();
final AtomicInteger counter = new AtomicInteger(BUNDLE_SPLIT_RETRY_LIMIT);
splitAndOwnBundleOnceAndRetry(bundle, unload, counter, unloadFuture);
return unloadFuture;
}
void splitAndOwnBundleOnceAndRetry(NamespaceBundle bundle,
boolean unload,
AtomicInteger counter,
CompletableFuture<Void> unloadFuture) {
CompletableFuture<NamespaceBundles> updateFuture = new CompletableFuture<>();
final Pair<NamespaceBundles, List<NamespaceBundle>> splittedBundles = bundleFactory.splitBundles(bundle,
2 /* by default split into 2 */);
// Split and updateNamespaceBundles. Update may fail because of concurrent write to Zookeeper.
if (splittedBundles != null) {
checkNotNull(splittedBundles.getLeft());
checkNotNull(splittedBundles.getRight());
checkArgument(splittedBundles.getRight().size() == 2, "bundle has to be split in two bundles");
NamespaceName nsname = bundle.getNamespaceObject();
if (LOG.isDebugEnabled()) {
LOG.debug("[{}] splitAndOwnBundleOnce: {}, counter: {}, 2 bundles: {}, {}",
nsname.toString(), bundle.getBundleRange(), counter.get(),
splittedBundles != null ? splittedBundles.getRight().get(0).getBundleRange() : "null splittedBundles",
splittedBundles != null ? splittedBundles.getRight().get(1).getBundleRange() : "null splittedBundles");
}
try {
// take ownership of newly split bundles
for (NamespaceBundle sBundle : splittedBundles.getRight()) {
checkNotNull(ownershipCache.tryAcquiringOwnership(sBundle));
}
updateNamespaceBundles(nsname, splittedBundles.getLeft(),
(rc, path, zkCtx, stat) -> {
if (rc == Code.OK.intValue()) {
// invalidate cache as zookeeper has new split
// namespace bundle
bundleFactory.invalidateBundleCache(bundle.getNamespaceObject());
updateFuture.complete(splittedBundles.getLeft());
} else if (rc == Code.BADVERSION.intValue()) {
KeeperException keeperException = KeeperException.create(KeeperException.Code.get(rc));
String msg = format("failed to update namespace policies [%s], NamespaceBundle: %s " +
"due to %s, counter: %d",
nsname.toString(), bundle.getBundleRange(),
keeperException.getMessage(), counter.get());
LOG.warn(msg);
updateFuture.completeExceptionally(new ServerMetadataException(keeperException));
} else {
String msg = format("failed to update namespace policies [%s], NamespaceBundle: %s due to %s",
nsname.toString(), bundle.getBundleRange(),
KeeperException.create(KeeperException.Code.get(rc)).getMessage());
LOG.warn(msg);
updateFuture.completeExceptionally(new ServiceUnitNotReadyException(msg));
}
});
} catch (Exception e) {
String msg = format("failed to acquire ownership of split bundle for namespace [%s], %s",
nsname.toString(), e.getMessage());
LOG.warn(msg, e);
updateFuture.completeExceptionally(new ServiceUnitNotReadyException(msg));
}
} else {
String msg = format("bundle %s not found under namespace", bundle.toString());
LOG.warn(msg);
updateFuture.completeExceptionally(new ServiceUnitNotReadyException(msg));
}
// If success updateNamespaceBundles, then do invalidateBundleCache and unload.
// Else retry splitAndOwnBundleOnceAndRetry.
updateFuture.whenCompleteAsync((r, t)-> {
if (t != null) {
// retry several times on BadVersion
if ((t instanceof ServerMetadataException) && (counter.decrementAndGet() >= 0)) {
pulsar.getOrderedExecutor().submit(
() -> splitAndOwnBundleOnceAndRetry(bundle, unload, counter, unloadFuture));
} else {
// Retry enough, or meet other exception
String msg2 = format(" %s not success update nsBundles, counter %d, reason %s",
bundle.toString(), counter.get(), t.getMessage());
LOG.warn(msg2);
unloadFuture.completeExceptionally(new ServiceUnitNotReadyException(msg2));
}
return;
}
// success updateNamespaceBundles
try {
// disable old bundle in memory
getOwnershipCache().updateBundleState(bundle, false);
// update bundled_topic cache for load-report-generation
pulsar.getBrokerService().refreshTopicToStatsMaps(bundle);
loadManager.get().setLoadReportForceUpdateFlag();
if (unload) {
// unload new split bundles
r.getBundles().forEach(splitBundle -> {
try {
unloadNamespaceBundle(splitBundle);
} catch (Exception e) {
LOG.warn("Failed to unload split bundle {}", splitBundle, e);
throw new RuntimeException("Failed to unload split bundle " + splitBundle, e);
}
});
}
unloadFuture.complete(null);
} catch (Exception e) {
String msg1 = format(
"failed to disable bundle %s under namespace [%s] with error %s",
bundle.getNamespaceObject().toString(), bundle.toString(), e.getMessage());
LOG.warn(msg1, e);
unloadFuture.completeExceptionally(new ServiceUnitNotReadyException(msg1));
}
return;
}, pulsar.getOrderedExecutor());
}
/**
* Update new bundle-range to LocalZk (create a new node if not present).
* Update may fail because of concurrent write to Zookeeper.
*
* @param nsname
* @param nsBundles
* @param callback
* @throws Exception
*/
private void updateNamespaceBundles(NamespaceName nsname, NamespaceBundles nsBundles, StatCallback callback)
throws Exception {
checkNotNull(nsname);
checkNotNull(nsBundles);
String path = joinPath(LOCAL_POLICIES_ROOT, nsname.toString());
Optional<LocalPolicies> policies = pulsar.getLocalZkCacheService().policiesCache().get(path);
if (!policies.isPresent()) {
// if policies is not present into localZk then create new policies
this.pulsar.getLocalZkCacheService().createPolicies(path, false).get(cacheTimeOutInSec, SECONDS);
}
long version = nsBundles.getVersion();
LocalPolicies local = new LocalPolicies();
local.bundles = getBundlesData(nsBundles);
byte[] data = ObjectMapperFactory.getThreadLocal().writeValueAsBytes(local);
this.pulsar.getLocalZkCache().getZooKeeper()
.setData(path, data, Math.toIntExact(version), callback, null);
// invalidate namespace's local-policies
this.pulsar.getLocalZkCacheService().policiesCache().invalidate(path);
}
public OwnershipCache getOwnershipCache() {
return ownershipCache;
}
public int getTotalServiceUnitsLoaded() {
return ownershipCache.getOwnedBundles().size() - this.uncountedNamespaces;
}
public Set<NamespaceBundle> getOwnedServiceUnits() {
return ownershipCache.getOwnedBundles().values().stream().map(OwnedBundle::getNamespaceBundle)
.collect(Collectors.toSet());
}
public boolean isServiceUnitOwned(ServiceUnitId suName) throws Exception {
if (suName instanceof TopicName) {
return isTopicOwned((TopicName) suName);
}
if (suName instanceof NamespaceName) {
return isNamespaceOwned((NamespaceName) suName);
}
if (suName instanceof NamespaceBundle) {
return ownershipCache.isNamespaceBundleOwned((NamespaceBundle) suName);
}
throw new IllegalArgumentException("Invalid class of NamespaceBundle: " + suName.getClass().getName());
}
public boolean isServiceUnitActive(TopicName topicName) {
try {
return ownershipCache.getOwnedBundle(getBundle(topicName)).isActive();
} catch (Exception e) {
LOG.warn("Unable to find OwnedBundle for topic - [{}]", topicName);
return false;
}
}
private boolean isNamespaceOwned(NamespaceName fqnn) throws Exception {
return ownershipCache.getOwnedBundle(getFullBundle(fqnn)) != null;
}
private CompletableFuture<Boolean> isTopicOwnedAsync(TopicName topic) {
return getBundleAsync(topic).thenApply(bundle -> ownershipCache.isNamespaceBundleOwned(bundle));
}
private boolean isTopicOwned(TopicName topicName) throws Exception {
return ownershipCache.getOwnedBundle(getBundle(topicName)) != null;
}
public void removeOwnedServiceUnit(NamespaceName nsName) throws Exception {
ownershipCache.removeOwnership(getFullBundle(nsName)).get(cacheTimeOutInSec, SECONDS);
bundleFactory.invalidateBundleCache(nsName);
}
public void removeOwnedServiceUnit(NamespaceBundle nsBundle) throws Exception {
ownershipCache.removeOwnership(nsBundle).get(cacheTimeOutInSec, SECONDS);
bundleFactory.invalidateBundleCache(nsBundle.getNamespaceObject());
}
public void removeOwnedServiceUnits(NamespaceName nsName, BundlesData bundleData) throws Exception {
ownershipCache.removeOwnership(bundleFactory.getBundles(nsName, bundleData)).get(cacheTimeOutInSec, SECONDS);
bundleFactory.invalidateBundleCache(nsName);
}
public NamespaceBundleFactory getNamespaceBundleFactory() {
return bundleFactory;
}
public ServiceUnitId getServiceUnitId(TopicName topicName) throws Exception {
return getBundle(topicName);
}
public List<String> getListOfTopics(NamespaceName namespaceName) throws Exception {
List<String> topics = Lists.newArrayList();
// For every topic there will be a managed ledger created.
try {
String path = String.format("/managed-ledgers/%s/persistent", namespaceName);
if (LOG.isDebugEnabled()) {
LOG.debug("Getting children from managed-ledgers now: {}", path);
}
for (String topic : pulsar.getLocalZkCacheService().managedLedgerListCache().get(path)) {
topics.add(String.format("persistent://%s/%s", namespaceName, Codec.decode(topic)));
}
} catch (KeeperException.NoNodeException e) {
// NoNode means there are no persistent topics for this namespace
}
topics.sort(null);
return topics;
}
public Optional<NamespaceEphemeralData> getOwner(NamespaceBundle bundle) throws Exception {
// if there is no znode for the service unit, it is not owned by any broker
return getOwnerAsync(bundle).get(cacheTimeOutInSec, SECONDS);
}
public CompletableFuture<Optional<NamespaceEphemeralData>> getOwnerAsync(NamespaceBundle bundle) {
return ownershipCache.getOwnerAsync(bundle);
}
public void unloadSLANamespace() throws Exception {
PulsarAdmin adminClient = null;
String namespaceName = getSLAMonitorNamespace(host, config);
LOG.info("Checking owner for SLA namespace {}", namespaceName);
NamespaceBundle nsFullBundle = getFullBundle(NamespaceName.get(namespaceName));
if (!getOwner(nsFullBundle).isPresent()) {
// No one owns the namespace so no point trying to unload it
// Next lookup will assign the bundle to this broker.
return;
}
LOG.info("Trying to unload SLA namespace {}", namespaceName);
adminClient = pulsar.getAdminClient();
adminClient.namespaces().unload(namespaceName);
LOG.info("Namespace {} unloaded successfully", namespaceName);
}
public static String getHeartbeatNamespace(String host, ServiceConfiguration config) {
return String.format(HEARTBEAT_NAMESPACE_FMT, config.getClusterName(), host, config.getWebServicePort());
}
public static String getSLAMonitorNamespace(String host, ServiceConfiguration config) {
return String.format(SLA_NAMESPACE_FMT, config.getClusterName(), host, config.getWebServicePort());
}
public static String checkHeartbeatNamespace(ServiceUnitId ns) {
Matcher m = HEARTBEAT_NAMESPACE_PATTERN.matcher(ns.getNamespaceObject().toString());
if (m.matches()) {
LOG.debug("SLAMonitoring namespace matched the lookup namespace {}", ns.getNamespaceObject().toString());
return String.format("http://%s", m.group(1));
} else {
return null;
}
}
public static String getSLAMonitorBrokerName(ServiceUnitId ns) {
Matcher m = SLA_NAMESPACE_PATTERN.matcher(ns.getNamespaceObject().toString());
if (m.matches()) {
return String.format("http://%s", m.group(1));
} else {
return null;
}
}
public boolean registerSLANamespace() throws PulsarServerException {
boolean isNameSpaceRegistered = registerNamespace(getSLAMonitorNamespace(host, config), false);
if (isNameSpaceRegistered) {
this.uncountedNamespaces++;
if (LOG.isDebugEnabled()) {
LOG.debug("Added SLA Monitoring namespace name in local cache: ns={}",
getSLAMonitorNamespace(host, config));
}
} else if (LOG.isDebugEnabled()) {
LOG.debug("SLA Monitoring not owned by the broker: ns={}", getSLAMonitorNamespace(host, config));
}
return isNameSpaceRegistered;
}
}
| |
/*-------------------------------------------------------------------------
*
* Copyright (c) 2004-2014, PostgreSQL Global Development Group
*
*
*-------------------------------------------------------------------------
*/
package org.postgresql.test.jdbc2.optional;
import org.postgresql.PGConnection;
import org.postgresql.jdbc2.optional.ConnectionPool;
import org.postgresql.ds.PGConnectionPoolDataSource;
import org.postgresql.test.TestUtil;
import javax.sql.*;
import java.net.Socket;
import java.net.SocketImpl;
import java.sql.*;
import java.util.*;
import java.io.*;
/**
* Tests for the ConnectionPoolDataSource and PooledConnection
* implementations. They are tested together because the only client
* interface to the PooledConnection is through the CPDS.
*
* @author Aaron Mulder (ammulder@chariotsolutions.com)
*/
public class ConnectionPoolTest extends BaseDataSourceTest
{
private ArrayList connections = new ArrayList();
/**
* Constructor required by JUnit
*/
public ConnectionPoolTest(String name)
{
super(name);
}
/**
* Creates and configures a ConnectionPool
*/
protected void initializeDataSource()
{
if (bds == null)
{
bds = new ConnectionPool();
setupDataSource(bds);
}
}
protected void tearDown() throws Exception
{
for (Iterator i = connections.iterator(); i.hasNext(); ) {
PooledConnection c = (PooledConnection) i.next();
try {
c.close();
} catch (Exception ex) {
// close throws nullptr or other evil things if the connection
// is already closed
}
}
}
/**
* Though the normal client interface is to grab a Connection, in
* order to test the middleware/server interface, we need to deal
* with PooledConnections. Some tests use each.
*/
protected PooledConnection getPooledConnection() throws SQLException
{
initializeDataSource();
// we need to recast to PGConnectionPool rather than
// jdbc.optional.ConnectionPool because our ObjectFactory
// returns only the top level class, not the specific
// jdbc2/jdbc3 implementations.
PooledConnection c = ((PGConnectionPoolDataSource)bds).getPooledConnection();
connections.add(c);
return c;
}
/**
* Instead of just fetching a Connection from the ConnectionPool,
* get a PooledConnection, add a listener to close it when the
* Connection is closed, and then get the Connection. Without
* the listener the PooledConnection (and thus the physical connection)
* would never by closed. Probably not a disaster during testing, but
* you never know.
*/
protected Connection getDataSourceConnection() throws SQLException
{
initializeDataSource();
final PooledConnection pc = getPooledConnection();
// Since the pooled connection won't be reused in these basic tests, close it when the connection is closed
pc.addConnectionEventListener(new ConnectionEventListener()
{
public void connectionClosed(ConnectionEvent event)
{
try
{
pc.close();
}
catch (SQLException e)
{
fail("Unable to close PooledConnection: " + e);
}
}
public void connectionErrorOccurred(ConnectionEvent event)
{
}
}
);
return pc.getConnection();
}
/**
* Makes sure that if you get a connection from a PooledConnection,
* close it, and then get another one, you're really using the same
* physical connection. Depends on the implementation of toString
* for the connection handle.
*/
public void testPoolReuse()
{
try
{
PooledConnection pc = getPooledConnection();
con = pc.getConnection();
String name = con.toString();
con.close();
con = pc.getConnection();
String name2 = con.toString();
con.close();
pc.close();
assertTrue("Physical connection doesn't appear to be reused across PooledConnection wrappers", name.equals(name2));
}
catch (SQLException e)
{
fail(e.getMessage());
}
}
/**
* Makes sure that when you request a connection from the
* PooledConnection, and previous connection it might have given
* out is closed. See JDBC 2.0 Optional Package spec section
* 6.2.3
*/
public void testPoolCloseOldWrapper()
{
try
{
PooledConnection pc = getPooledConnection();
con = pc.getConnection();
Connection con2 = pc.getConnection();
try
{
con.createStatement();
fail("Original connection wrapper should be closed when new connection wrapper is generated");
}
catch (SQLException e)
{
}
con2.close();
pc.close();
}
catch (SQLException e)
{
fail(e.getMessage());
}
}
/**
* Makes sure that if you get two connection wrappers from the same
* PooledConnection, they are different, even though the represent
* the same physical connection. See JDBC 2.0 Optional Pacakge spec
* section 6.2.2
*/
public void testPoolNewWrapper()
{
try
{
PooledConnection pc = getPooledConnection();
con = pc.getConnection();
Connection con2 = pc.getConnection();
con2.close();
pc.close();
assertTrue("Two calls to PooledConnection.getConnection should not return the same connection wrapper", con != con2);
}
catch (SQLException e)
{
fail(e.getMessage());
}
}
/**
* Makes sure that exactly one close event is fired for each time a
* connection handle is closed. Also checks that events are not
* fired after a given handle has been closed once.
*/
public void testCloseEvent()
{
try
{
PooledConnection pc = getPooledConnection();
CountClose cc = new CountClose();
pc.addConnectionEventListener(cc);
con = pc.getConnection();
assertTrue(cc.getCount() == 0);
assertTrue(cc.getErrorCount() == 0);
con.close();
assertTrue(cc.getCount() == 1);
assertTrue(cc.getErrorCount() == 0);
con = pc.getConnection();
assertTrue(cc.getCount() == 1);
assertTrue(cc.getErrorCount() == 0);
con.close();
assertTrue(cc.getCount() == 2);
assertTrue(cc.getErrorCount() == 0);
// a double close shouldn't fire additional events
con.close();
assertTrue(cc.getCount() == 2);
assertTrue(cc.getErrorCount() == 0);
pc.close();
}
catch (SQLException e)
{
fail(e.getMessage());
}
}
/**
* Makes sure that close events are not fired after a listener has
* been removed.
*/
public void testNoCloseEvent()
{
try
{
PooledConnection pc = getPooledConnection();
CountClose cc = new CountClose();
pc.addConnectionEventListener(cc);
con = pc.getConnection();
assertTrue(cc.getCount() == 0);
assertTrue(cc.getErrorCount() == 0);
con.close();
assertTrue(cc.getCount() == 1);
assertTrue(cc.getErrorCount() == 0);
pc.removeConnectionEventListener(cc);
con = pc.getConnection();
assertTrue(cc.getCount() == 1);
assertTrue(cc.getErrorCount() == 0);
con.close();
assertTrue(cc.getCount() == 1);
assertTrue(cc.getErrorCount() == 0);
}
catch (SQLException e)
{
fail(e.getMessage());
}
}
/**
* Makes sure that a listener can be removed while dispatching
* events. Sometimes this causes a ConcurrentModificationException
* or something.
*/
public void testInlineCloseEvent()
{
try
{
PooledConnection pc = getPooledConnection();
RemoveClose rc1 = new RemoveClose();
RemoveClose rc2 = new RemoveClose();
RemoveClose rc3 = new RemoveClose();
pc.addConnectionEventListener(rc1);
pc.addConnectionEventListener(rc2);
pc.addConnectionEventListener(rc3);
con = pc.getConnection();
con.close();
con = pc.getConnection();
con.close();
}
catch (Exception e)
{
fail(e.getMessage());
}
}
/**
* Tests that a close event is not generated when a connection
* handle is closed automatically due to a new connection handle
* being opened for the same PooledConnection. See JDBC 2.0
* Optional Package spec section 6.3
*/
public void testAutomaticCloseEvent()
{
try
{
PooledConnection pc = getPooledConnection();
CountClose cc = new CountClose();
pc.addConnectionEventListener(cc);
con = pc.getConnection();
assertTrue(cc.getCount() == 0);
assertTrue(cc.getErrorCount() == 0);
con.close();
assertTrue(cc.getCount() == 1);
assertTrue(cc.getErrorCount() == 0);
con = pc.getConnection();
assertTrue(cc.getCount() == 1);
assertTrue(cc.getErrorCount() == 0);
// Open a 2nd connection, causing the first to be closed. No even should be generated.
Connection con2 = pc.getConnection();
assertTrue("Connection handle was not closed when new handle was opened", con.isClosed());
assertTrue(cc.getCount() == 1);
assertTrue(cc.getErrorCount() == 0);
con2.close();
assertTrue(cc.getCount() == 2);
assertTrue(cc.getErrorCount() == 0);
pc.close();
}
catch (SQLException e)
{
fail(e.getMessage());
}
}
/**
* Makes sure the isClosed method on a connection wrapper does what
* you'd expect. Checks the usual case, as well as automatic
* closure when a new handle is opened on the same physical connection.
*/
public void testIsClosed()
{
try
{
PooledConnection pc = getPooledConnection();
con = pc.getConnection();
assertTrue(!con.isClosed());
con.close();
assertTrue(con.isClosed());
con = pc.getConnection();
Connection con2 = pc.getConnection();
assertTrue(con.isClosed());
assertTrue(!con2.isClosed());
con2.close();
assertTrue(con.isClosed());
pc.close();
}
catch (SQLException e)
{
fail(e.getMessage());
}
}
/**
* Make sure that close status of pooled connection reflect the one
* of the underlying physical connection.
* @throws Exception
*/
public void testBackendIsClosed() throws Exception
{
try
{
PooledConnection pc = getPooledConnection();
con = pc.getConnection();
assertTrue(!con.isClosed());
int pid = ((PGConnection) con).getBackendPID();
Connection adminCon = TestUtil.openPrivilegedDB();
try
{
Statement statement = adminCon.createStatement();
statement.executeQuery("SELECT pg_terminate_backend("+pid+")");
}
finally
{
TestUtil.closeDB(adminCon);
}
try
{
Statement statement = con.createStatement();
statement.executeQuery("SELECT 1");
fail("The connection should not be opened anymore. An exception was expected");
}
catch (SQLException e)
{
// this is expected as the connection has been forcibly closed from backend
}
assertTrue(con.isClosed());
}
catch (SQLException e)
{
fail(e.getMessage());
}
}
/**
* Ensures that a statement generated by a proxied connection returns the
* proxied connection from getConnection() [not the physical connection].
*/
public void testStatementConnection() {
try
{
PooledConnection pc = getPooledConnection();
con = pc.getConnection();
Statement s = con.createStatement();
Connection conRetrieved = s.getConnection();
assertTrue(con.getClass().equals(conRetrieved.getClass()));
assertTrue(con.equals(conRetrieved));
}
catch (SQLException e)
{
fail(e.getMessage());
}
}
/**
* Ensures that the Statement proxy generated by the Connection handle
* throws the correct kind of exception.
*/
public void testStatementProxy() {
Statement s = null;
try
{
PooledConnection pc = getPooledConnection();
con = pc.getConnection();
s = con.createStatement();
}
catch (SQLException e)
{
fail(e.getMessage());
}
try
{
s.executeQuery("SELECT * FROM THIS_TABLE_SHOULD_NOT_EXIST");
fail("An SQL exception was not thrown that should have been");
}
catch (SQLException e)
{
; // This is the expected and correct path
}
catch (Exception e)
{
fail("bad exception; was expecting SQLException, not" +
e.getClass().getName());
}
}
/**
* Ensures that a prepared statement generated by a proxied connection
* returns the proxied connection from getConnection() [not the physical
* connection].
*/
public void testPreparedStatementConnection() {
try
{
PooledConnection pc = getPooledConnection();
con = pc.getConnection();
PreparedStatement s = con.prepareStatement("select 'x'");
Connection conRetrieved = s.getConnection();
assertTrue(con.getClass().equals(conRetrieved.getClass()));
assertTrue(con.equals(conRetrieved));
}
catch (SQLException e)
{
fail(e.getMessage());
}
}
/**
* Ensures that a callable statement generated by a proxied connection
* returns the proxied connection from getConnection() [not the physical
* connection].
*/
public void testCallableStatementConnection() {
try
{
PooledConnection pc = getPooledConnection();
con = pc.getConnection();
CallableStatement s = con.prepareCall("select 'x'");
Connection conRetrieved = s.getConnection();
assertTrue(con.getClass().equals(conRetrieved.getClass()));
assertTrue(con.equals(conRetrieved));
}
catch (SQLException e)
{
fail(e.getMessage());
}
}
/**
* Ensure that a statement created from a pool can be used
* like any other statement in regard to pg extensions.
*/
public void testStatementsProxyPGStatement() {
try
{
PooledConnection pc = getPooledConnection();
con = pc.getConnection();
Statement s = con.createStatement();
boolean b = ((org.postgresql.PGStatement)s).isUseServerPrepare();
PreparedStatement ps = con.prepareStatement("select 'x'");
b = ((org.postgresql.PGStatement)ps).isUseServerPrepare();
CallableStatement cs = con.prepareCall("select 'x'");
b = ((org.postgresql.PGStatement)cs).isUseServerPrepare();
}
catch (SQLException e)
{
fail(e.getMessage());
}
}
/**
* Helper class to remove a listener during event dispatching.
*/
private class RemoveClose implements ConnectionEventListener
{
public void connectionClosed(ConnectionEvent event)
{
((PooledConnection)event.getSource()).removeConnectionEventListener(this);
}
public void connectionErrorOccurred(ConnectionEvent event)
{
((PooledConnection)event.getSource()).removeConnectionEventListener(this);
}
}
/**
* Helper class that implements the event listener interface, and
* counts the number of events it sees.
*/
private class CountClose implements ConnectionEventListener
{
private int count = 0, errorCount = 0;
public void connectionClosed(ConnectionEvent event)
{
count++;
}
public void connectionErrorOccurred(ConnectionEvent event)
{
errorCount++;
}
public int getCount()
{
return count;
}
public int getErrorCount()
{
return errorCount;
}
public void clear()
{
count = errorCount = 0;
}
}
public void testSerializable() throws IOException, ClassNotFoundException
{
ConnectionPool pool = new ConnectionPool();
pool.setDefaultAutoCommit(false);
pool.setServerName("db.myhost.com");
pool.setDatabaseName("mydb");
pool.setUser("user");
pool.setPassword("pass");
pool.setPortNumber(1111);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(pool);
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
ObjectInputStream ois = new ObjectInputStream(bais);
ConnectionPool pool2 = (ConnectionPool)ois.readObject();
assertEquals(pool.isDefaultAutoCommit(), pool2.isDefaultAutoCommit());
assertEquals(pool.getServerName(), pool2.getServerName());
assertEquals(pool.getDatabaseName(), pool2.getDatabaseName());
assertEquals(pool.getUser(), pool2.getUser());
assertEquals(pool.getPassword(), pool2.getPassword());
assertEquals(pool.getPortNumber(), pool2.getPortNumber());
}
}
| |
/*
* Copyright 2011-2013, by Vladimir Kostyukov and Contributors.
*
* This file is part of la4j project (http://la4j.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributor(s): Evgenia Krivova
* Pavel Kalaidin
* Jakob Moellers
* Ewald Grusk
* Yuriy Drozd
* Maxim Samoylov
*
*/
package org.la4j.matrix;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.Random;
import org.la4j.decomposition.MatrixDecompositor;
import org.la4j.factory.Factory;
import org.la4j.inversion.MatrixInvertor;
import org.la4j.matrix.functor.AdvancedMatrixPredicate;
import org.la4j.matrix.functor.MatrixAccumulator;
import org.la4j.matrix.functor.MatrixFunction;
import org.la4j.matrix.functor.MatrixPredicate;
import org.la4j.matrix.functor.MatrixProcedure;
import org.la4j.vector.Vector;
public abstract class AbstractMatrix implements Matrix {
protected int rows;
protected int columns;
protected Factory factory;
protected AbstractMatrix(Factory factory) {
this(factory, 0, 0);
}
protected AbstractMatrix(Factory factory, int rows, int columns) {
ensureDimensionsAreNotNegative(rows, columns);
this.factory = factory;
this.rows = rows;
this.columns = columns;
}
@Override
public void assign(double value) {
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
set(i, j, value);
}
}
}
@Override
public int rows() {
return rows;
}
@Override
public int columns() {
return columns;
}
@Override
public Vector getRow(int i) {
return getRow(i, factory);
}
@Override
public Vector getRow(int i, Factory factory) {
ensureFactoryIsNotNull(factory);
Vector result = factory.createVector(columns);
for (int j = 0; j < columns; j++) {
result.set(j, get(i, j));
}
return result;
}
@Override
public Vector getColumn(int j) {
return getColumn(j, factory);
}
@Override
public Vector getColumn(int j, Factory factory) {
ensureFactoryIsNotNull(factory);
Vector result = factory.createVector(rows);
for (int i = 0; i < rows; i++) {
result.set(i, get(i, j));
}
return result;
}
@Override
public void setRow(int i, Vector row) {
if (row == null) {
throw new IllegalArgumentException("Row can't be null.");
}
if (columns != row.length()) {
throw new IllegalArgumentException("Wrong row length: "
+ row.length());
}
for (int j = 0; j < row.length(); j++) {
set(i, j, row.get(j));
}
}
@Override
public void setColumn(int j, Vector column) {
if (column == null) {
throw new IllegalArgumentException("Column can't be null.");
}
if (rows != column.length()) {
throw new IllegalArgumentException("Wrong column length: "
+ column.length());
}
for (int i = 0; i < column.length(); i++) {
set(i, j, column.get(i));
}
}
@Override
public void swapRows(int i, int j) {
if (i != j) {
Vector ii = getRow(i);
Vector jj = getRow(j);
setRow(i, jj);
setRow(j, ii);
}
}
@Override
public void swapColumns(int i, int j) {
if (i != j) {
Vector ii = getColumn(i);
Vector jj = getColumn(j);
setColumn(i, jj);
setColumn(j, ii);
}
}
@Override
public Matrix transpose() {
return transpose(factory);
}
@Override
public Matrix transpose(Factory factory) {
ensureFactoryIsNotNull(factory);
Matrix result = factory.createMatrix(columns, rows);
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
result.set(j, i, get(i, j));
}
}
return result;
}
public Matrix rotate() {
return rotate(factory);
}
public Matrix rotate(Factory factory) {
ensureFactoryIsNotNull(factory);
Matrix result = factory.createMatrix(columns, rows);
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
result.set(j, rows - 1 - i, get(i, j));
}
}
return result;
}
// private double determinantByCrout() {
// Matrix that = copy();
//
// BigDecimal big;
// BigDecimal sum;
// BigDecimal cur;
// BigDecimal t;
// int sign = 1;
//
// for (int i = 0; i < rows; i++) {
// boolean nonzero = false;
// for (int j = 0; j < columns; j++)
// if (Math.abs(that.get(i, j)) > Matrices.EPS) {
// nonzero = true;
// }
// if (!nonzero) {
// return 0;
// }
// }
//
// for (int j = 0; j < columns; j++) {
//
// for (int i = 0; i < j; i++) {
// sum = new BigDecimal(that.get(i, j));
// for (int k = 0; k < i; k++) {
// sum = sum.subtract(new BigDecimal(that.get(i, k)).multiply(
// new BigDecimal(that.get(k, j))));
// }
// that.set(i, j, sum.doubleValue());
// }
//
// big = new BigDecimal(BigInteger.ZERO);
// int imax = -1;
// for (int i = j; i < rows; i++) {
// sum = new BigDecimal(that.get(i, j));
// for (int k = 0; k < j; k++) {
// sum = sum.subtract(new BigDecimal(that.get(i, k)).multiply(
// new BigDecimal(that.get(k, j))));
// }
// that.set(i, j, sum.doubleValue());
// cur = sum.abs();
// if (cur.compareTo(big) > 0) {
// big = cur;
// imax = i;
// }
// }
//
// if (j != imax) {
// for (int k = 0; k < rows; k++) {
// t = new BigDecimal(that.get(j, k));
// that.set(j, k, that.get(imax, k));
// that.set(imax, k, t.doubleValue());
// }
// sign = -sign;
// }
//
// if (j != rows - 1) {
// for (int i = j + 1; i < rows; i++) {
// if (Math.abs(that.get(j, j)) < Matrices.EPS) {
// return 0.0;
// } else {
// that.set(i, j, (new BigDecimal(that.get(i, j)).divide
// (new BigDecimal(that.get(j, j)), Matrices.ROUND_FACTOR,
// RoundingMode.CEILING)).doubleValue());
// }
// }
// }
// }
//
// return sign * that.diagonalProduct();
// }
@Override
public double determinant() {
if (rows != columns) {
throw new IllegalStateException("Can't compute determinant for " +
"non-square matrix.");
}
if (rows == 0) {
return 0.0;
} else if (rows == 1) {
return get(0, 0);
} else if (rows == 2) {
return get(0, 0) * get(1, 1) -
get(0, 1) * get(1, 0);
} else if (rows == 3) {
return get(0, 0) * get(1, 1) * get(2, 2) +
get(0, 1) * get(1, 2) * get(2, 0) +
get(0, 2) * get(1, 0) * get(2, 1) -
get(0, 2) * get(1, 1) * get(2, 0) -
get(0, 1) * get(1, 0) * get(2, 2) -
get(0, 0) * get(1, 2) * get(2, 1);
}
Matrix lup[] = decompose(Matrices.LU_DECOMPOSITOR);
Matrix u = lup[Matrices.LU_U];
Matrix p = lup[Matrices.LU_P];
double result = u.diagonalProduct();
// TODO: we can do that in O(n log n)
// just google: "counting inversions divide and conqueror"
int permutations[] = new int[p.rows()];
for (int i = 0; i < p.rows(); i++) {
for (int j = 0; j < p.columns(); j++) {
if (p.get(i, j) > 0.0) {
permutations[i] = j;
break;
}
}
}
int sign = 1;
for (int i = 0; i < permutations.length; i++) {
for (int j = i + 1; j < permutations.length; j++) {
if (permutations[j] < permutations[i]) {
sign *= -1;
}
}
}
return sign * result;
}
@Override
public int rank() {
if (rows == 0 || columns == 0) {
return 0;
}
// TODO:
// handle small (1x1, 1xn, nx1, 2x2, 2xn, nx2, 3x3, 3xn, nx3)
// matrices without SVD
Matrix usv[] = decompose(Matrices.SINGULAR_VALUE_DECOMPOSITOR);
Matrix s = usv[Matrices.SVD_S];
double tolerance = Math.max(rows, columns) * s.get(0, 0) * Matrices.EPS;
int result = 0;
for (int i = 0; i < s.rows(); i++) {
if (s.get(i, i) > tolerance) {
result++;
}
}
return result;
}
public Matrix power(int n) {
return power(n, factory);
}
public Matrix power(int n, Factory factory) {
if (n < 0) {
throw new IllegalArgumentException(
"The exponent has to be larger than 0.");
}
Matrix result = factory.createIdentityMatrix(rows);
Matrix that = this;
while (n > 0) {
if (n % 2 == 1) {
result = result.multiply(that);
}
n /= 2;
that = that.multiply(that);
}
return result;
}
@Override
public Matrix multiply(double value) {
return multiply(value, factory);
}
@Override
public Matrix multiply(double value, Factory factory) {
ensureFactoryIsNotNull(factory);
Matrix result = blank(factory);
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
result.set(i, j, get(i, j) * value);
}
}
return result;
}
@Override
public Vector multiply(Vector vector) {
return multiply(vector, factory);
}
@Override
public Vector multiply(Vector vector, Factory factory) {
ensureFactoryIsNotNull(factory);
if (vector == null) {
throw new IllegalArgumentException("Vector can't be null.");
}
if (columns != vector.length()) {
throw new IllegalArgumentException("Wrong vector length: "
+ vector.length());
}
Vector result = factory.createVector(rows);
for (int i = 0; i < rows; i++) {
double summand = 0;
for (int j = 0; j < columns; j++) {
summand += get(i, j) * vector.get(j);
}
result.set(i, summand);
}
return result;
}
@Override
public Matrix multiply(Matrix matrix) {
return multiply(matrix, factory);
}
@Override
public Matrix multiply(Matrix matrix, Factory factory) {
ensureFactoryIsNotNull(factory);
if (matrix == null) {
throw new IllegalArgumentException("Matrix can't be null.");
}
if (columns != matrix.rows()) {
throw new IllegalArgumentException("Wrong matrix dimensions: "
+ matrix.rows() + "x"
+ matrix.columns());
}
Matrix result = factory.createMatrix(rows, matrix.columns());
for (int j = 0; j < matrix.columns(); j++) {
Vector column = matrix.getColumn(j);
for (int i = 0; i < rows; i++) {
double summand = 0;
for (int k = 0; k < columns; k++) {
summand += get(i, k) * column.get(k);
}
result.set(i, j, summand);
}
}
return result;
}
@Override
public Matrix subtract(double value) {
return subtract(value, factory);
}
@Override
public Matrix subtract(double value, Factory factory) {
return add(-value, factory);
}
@Override
public Matrix subtract(Matrix matrix) {
return subtract(matrix, factory);
}
@Override
public Matrix subtract(Matrix matrix, Factory factory) {
ensureFactoryIsNotNull(factory);
if (matrix == null) {
throw new IllegalArgumentException("Matrix can't be null.");
}
if (rows != matrix.rows() || columns != matrix.columns()) {
throw new IllegalArgumentException("Wrong matrix dimensions: "
+ matrix.rows() + "x"
+ matrix.columns());
}
Matrix result = blank(factory);
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
result.set(i, j, get(i, j) - matrix.get(i, j));
}
}
return result;
}
@Override
public Matrix add(double value) {
return add(value, factory);
}
@Override
public Matrix add(double value, Factory factory) {
ensureFactoryIsNotNull(factory);
Matrix result = blank(factory);
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
result.set(i, j, get(i, j) + value);
}
}
return result;
}
@Override
public Matrix add(Matrix matrix) {
return add(matrix, factory);
}
@Override
public Matrix add(Matrix matrix, Factory factory) {
ensureFactoryIsNotNull(factory);
if (matrix == null) {
throw new IllegalArgumentException("Matrix can't be null.");
}
if (rows != matrix.rows() || columns != matrix.columns()) {
throw new IllegalArgumentException("Wrong matrix dimensions: "
+ matrix.rows() + "x"
+ matrix.columns());
}
Matrix result = blank(factory);
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
result.set(i, j, get(i, j) + matrix.get(i, j));
}
}
return result;
}
@Override
public Matrix divide(double value) {
return divide(value, factory);
}
@Override
public Matrix divide(double value, Factory factory) {
return multiply(1.0 / value, factory);
}
@Override
public Matrix kroneckerProduct(Matrix matrix) {
return kroneckerProduct(matrix, factory);
}
@Override
public Matrix kroneckerProduct(Matrix matrix, Factory factory) {
ensureFactoryIsNotNull(factory);
if (matrix == null) {
throw new IllegalArgumentException("Matrix can't be null.");
}
int n = rows() * matrix.rows();
int m = columns() * matrix.columns();
Matrix result = factory.createMatrix(n, m);
int p = matrix.rows();
int q = matrix.columns();
for (int i = 0; i < n; i++) {
for (int j = 0; j < m; j++) {
result.set(i, j, get(i / p, j / q) * matrix.get(i % p, j % q));
}
}
return result;
}
@Override
public double trace() {
double result = 0.0;
for (int i = 0; i < rows; i++) {
result += get(i, i);
}
return result;
}
@Override
public double diagonalProduct() {
BigDecimal result = BigDecimal.ONE;
for (int i = 0; i < rows; i++) {
result = result.multiply(BigDecimal.valueOf(get(i, i)));
}
return result.setScale(Matrices.ROUND_FACTOR,
RoundingMode.CEILING).doubleValue();
}
@Override
public double product() {
return fold(Matrices.asProductAccumulator(1));
}
@Override
public Matrix hadamardProduct(Matrix matrix) {
return hadamardProduct(matrix, factory);
}
@Override
public Matrix hadamardProduct(Matrix matrix, Factory factory) {
ensureFactoryIsNotNull(factory);
if (matrix == null) {
throw new IllegalArgumentException("Matrix can not be null.");
}
if ((columns != matrix.columns()) || (rows != matrix.rows())) {
throw new IllegalArgumentException(
"Matrices dimensions are not equal: " + matrix.rows() + "x"
+ matrix.columns() + " not equal to " + rows + "x"
+ columns);
}
Matrix result = factory.createMatrix(rows, columns);
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
result.set(i, j, matrix.get(i, j) * get(i, j));
}
}
return result;
}
@Override
public double sum() {
return fold(Matrices.asSumAccumulator(0));
}
@Override
public Matrix triangularize() {
return triangularize(factory);
}
@Override
public Matrix triangularize(Factory factory) {
ensureFactoryIsNotNull(factory);
if (is(Matrices.UPPER_TRIANGULAR_MATRIX)
|| is(Matrices.LOWER_TRIANGULAR_MARTIX)) {
return copy(factory);
}
Matrix result = factory.createMatrix(rows, columns);
for (int i = 0; i < rows; i++) {
for (int j = i + 1; j < rows; j++) {
double c = get(j, i) / get(i, i);
for (int k = i; k < columns; k++) {
if (k == i) {
result.set(j, k, 0.0);
} else {
result.set(j, k, get(j, k) - (get(i, k) * c));
}
}
}
}
return result;
}
@Override
public Matrix[] decompose(MatrixDecompositor decompositor) {
return decompose(decompositor, factory);
}
@Override
public Matrix[] decompose(MatrixDecompositor decompositor,
Factory factory) {
return decompositor.decompose(this, factory);
}
@Override
public Matrix inverse(MatrixInvertor invertor) {
return inverse(invertor, factory);
}
@Override
public Matrix inverse(MatrixInvertor invertor, Factory factory) {
return invertor.inverse(this, factory);
}
@Override
public Matrix blank() {
return blank(factory);
}
@Override
public Matrix blank(Factory factory) {
ensureFactoryIsNotNull(factory);
return factory.createMatrix(rows, columns);
}
@Override
public Matrix copy() {
return copy(factory);
}
@Override
public Matrix copy(Factory factory) {
ensureFactoryIsNotNull(factory);
return factory.createMatrix(this);
}
@Override
public Matrix resize(int rows, int columns) {
return resize(rows, columns, factory);
}
@Override
public Matrix resizeRows(int rows) {
return resize(rows, columns, factory);
}
@Override
public Matrix resizeRows(int rows, Factory factory) {
return resize(rows, columns, factory);
}
@Override
public Matrix resizeColumns(int columns) {
return resize(rows, columns, factory);
}
@Override
public Matrix resizeColumns(int columns, Factory factory) {
return resize(rows, columns, factory);
}
@Override
public Matrix resize(int rows, int columns, Factory factory) {
ensureFactoryIsNotNull(factory);
Matrix result = factory.createMatrix(rows, columns);
for (int i = 0; i < Math.min(rows, this.rows); i++) {
for (int j = 0; j < Math.min(columns, this.columns); j++) {
result.set(i, j, get(i, j));
}
}
return result;
}
public Matrix shuffle() {
return shuffle(factory);
}
public Matrix shuffle(Factory factory) {
ensureFactoryIsNotNull(factory);
Matrix result = copy(factory);
// Conduct Fisher-Yates shuffle
Random rnd = new Random();
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
int ii = rnd.nextInt(rows - i) + i;
int jj = rnd.nextInt(columns - j) + j;
double a = result.get(ii, jj);
result.set(ii, jj, result.get(i, j));
result.set(i, j, a);
}
}
return result;
}
@Override
public Matrix slice(int fromRow, int fromColumn, int untilRow,
int untilColumn) {
return slice(fromRow, fromColumn, untilRow, untilColumn, factory);
}
@Override
public Matrix slice(int fromRow, int fromColumn, int untilRow,
int untilColumn, Factory factory) {
ensureFactoryIsNotNull(factory);
Matrix result = factory.createMatrix(untilRow - fromRow,
untilColumn - fromColumn);
for (int i = fromRow; i < untilRow; i++) {
for (int j = fromColumn; j < untilColumn; j++) {
result.set(i - fromRow, j - fromColumn, get(i, j));
}
}
return result;
}
@Override
public Matrix sliceTopLeft(int untilRow, int untilColumn) {
return slice(0, 0, untilRow, untilColumn, factory);
}
@Override
public Matrix sliceTopLeft(int untilRow, int untilColumn, Factory factory) {
return slice(0, 0, untilRow, untilColumn, factory);
}
@Override
public Matrix sliceBottomRight(int fromRow, int fromColumn) {
return slice(fromRow, fromColumn, rows, columns, factory);
}
@Override
public Matrix sliceBottomRight(int fromRow, int fromColumn, Factory fac) {
return slice(fromRow, fromColumn, rows, columns, factory);
}
@Override
public Factory factory() {
return factory;
}
@Override
public void each(MatrixProcedure procedure) {
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
procedure.apply(i, j, get(i, j));
}
}
}
@Override
public void eachInRow(int i,MatrixProcedure procedure) {
for (int j = 0; j < columns; j++) {
procedure.apply(i, j, get(i, j));
}
}
@Override
public void eachInColumn(int j,MatrixProcedure procedure) {
for (int i = 0; i < rows; i++) {
procedure.apply(i, j, get(i, j));
}
}
@Override
public void eachNonZero(MatrixProcedure procedure) {
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
if (Math.abs(get(i,j)) > Matrices.EPS) {
procedure.apply(i, j, get(i, j));
}
}
}
}
@Override
public void eachNonZeroInRow(int i, MatrixProcedure procedure) {
for (int j = 0; j < columns; j++) {
if (Math.abs(get(i, j)) > Matrices.EPS) {
procedure.apply(i, j, get(i, j));
}
}
}
@Override
public void eachNonZeroInColumn(int j, MatrixProcedure procedure) {
for (int i = 0; i < rows; i++) {
if (Math.abs(get(i, j)) > Matrices.EPS) {
procedure.apply(i, j, get(i, j));
}
}
}
@Override
public Matrix transform(MatrixFunction function) {
return transform(function, factory);
}
@Override
public Matrix transform(MatrixFunction function, Factory factory) {
Matrix result = blank(factory);
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
result.set(i, j, function.evaluate(i, j, get(i, j)));
}
}
return result;
}
@Override
public Matrix transform(int i, int j, MatrixFunction function) {
return transform(i, j, function, factory);
}
@Override
public Matrix transform(int i, int j, MatrixFunction function,
Factory factory) {
Matrix result = copy(factory);
result.set(i, j, function.evaluate(i, j, result.get(i, j)));
return result;
}
@Override
public void update(MatrixFunction function) {
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
set(i, j, function.evaluate(i, j, get(i, j)));
}
}
}
@Override
public void update(int i, int j, MatrixFunction function) {
set(i, j, function.evaluate(i, j, get(i, j)));
}
@Override
public double fold(MatrixAccumulator accumulator) {
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
accumulator.update(i, j, get(i, j));
}
}
return accumulator.accumulate();
}
@Override
public double foldRow(int i, MatrixAccumulator accumulator) {
for (int j = 0; j < columns; j++) {
accumulator.update(i, j, get(i, j));
}
return accumulator.accumulate();
}
@Override
public double foldColumn(int j, MatrixAccumulator accumulator) {
for (int i = 0; i < rows; i++) {
accumulator.update(i, j, get(i, j));
}
return accumulator.accumulate();
}
@Override
public boolean is(MatrixPredicate predicate) {
boolean result = (predicate instanceof AdvancedMatrixPredicate)
? ((AdvancedMatrixPredicate) predicate).test(rows, columns)
: rows > 0 && columns > 0;
for (int i = 0; result && i < rows; i++) {
for (int j = 0; result && j < columns; j++) {
result = result && predicate.test(i, j, get(i, j));
}
}
return result;
}
@Override
public Matrix unsafe() {
return this;
}
@Override
public int hashCode() {
int result = 17;
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
long value = (long) get(i, j);
result = 37 * result + (int) (value ^ (value >>> 32));
}
}
return result;
}
@Override
public boolean equals(Object object) {
if (this == object)
return true;
if (object == null)
return false;
if (!(object instanceof Matrix)) {
return false;
}
Matrix matrix = (Matrix) object;
if (rows != matrix.rows() || columns != matrix.columns()) {
return false;
}
boolean result = true;
for (int i = 0; result && i < rows; i++) {
for (int j = 0; result && j < columns; j++) {
double a = get(i, j);
double b = matrix.get(i, j);
double diff = Math.abs(a - b);
result = result && (a == b) ? true :
diff < Matrices.EPS ? true :
diff / Math.max(Math.abs(a), Math.abs(b))
< Matrices.EPS;
}
}
return result;
}
@Override
public String toString() {
final int precision = 3;
int formats[] = new int[columns];
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
double value = get(i, j);
int size = String.valueOf((long) value).length()
+ precision + (value < 0 && value > -1.0 ? 1 : 0) + 2;
formats[j] = size > formats[j] ? size : formats[j];
}
}
StringBuilder sb = new StringBuilder();
for (int i = 0; i < rows; i++) {
for (int j = 0; j < columns; j++) {
sb.append(String.format("%" + Integer.toString(formats[j])
+ "." + precision + "f", get(i, j)));
}
sb.append("\n");
}
return sb.toString();
}
protected void ensureFactoryIsNotNull(Factory factory) {
if (factory == null) {
throw new IllegalArgumentException("Factory can't be null.");
}
}
protected void ensureDimensionsAreNotNegative(int rows, int columns) {
if (rows < 0 || columns < 0) {
throw new IllegalArgumentException("Wrong matrix dimensions: "
+ rows + "x" + columns);
}
}
}
| |
package com.konkerlabs.platform.registry.test.business.services;
import com.konkerlabs.platform.registry.business.model.*;
import com.konkerlabs.platform.registry.business.model.validation.CommonValidations;
import com.konkerlabs.platform.registry.business.repositories.AlertTriggerRepository;
import com.konkerlabs.platform.registry.business.repositories.DeviceModelRepository;
import com.konkerlabs.platform.registry.business.repositories.LocationRepository;
import com.konkerlabs.platform.registry.business.repositories.TenantRepository;
import com.konkerlabs.platform.registry.business.services.api.AlertTriggerService;
import com.konkerlabs.platform.registry.business.services.api.AlertTriggerService.Validations;
import com.konkerlabs.platform.registry.business.services.api.ApplicationService;
import com.konkerlabs.platform.registry.business.services.api.ServiceResponse;
import com.konkerlabs.platform.registry.config.EmailConfig;
import com.konkerlabs.platform.registry.config.EventStorageConfig;
import com.konkerlabs.platform.registry.config.PubServerConfig;
import com.konkerlabs.platform.registry.test.base.*;
import com.lordofthejars.nosqlunit.annotation.UsingDataSet;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.time.Instant;
import java.util.List;
import static com.konkerlabs.platform.registry.test.base.matchers.ServiceResponseMatchers.hasErrorMessage;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes = {
MongoTestConfiguration.class,
EventRepositoryTestConfiguration.class,
MongoBillingTestConfiguration.class,
EmailConfig.class,
SpringMailTestConfiguration.class,
BusinessTestConfiguration.class,
PubServerConfig.class,
EventStorageConfig.class})
@UsingDataSet(locations = {"/fixtures/tenants.json", "/fixtures/applications.json", "/fixtures/device-model.json", "/fixtures/locations.json"})
public class AlertTriggerServiceTest extends BusinessLayerTestSupport {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Autowired
private AlertTriggerService alertTriggerService;
@Autowired
private AlertTriggerRepository alertTriggerRepository;
@Autowired
private TenantRepository tenantRepository;
@Autowired
private DeviceModelRepository deviceModelRepository;
@Autowired
private LocationRepository locationRepository;
private AlertTrigger triggerA;
private AlertTrigger triggerB;
private DeviceModel deviceModel;
private Location locationA;
private Location locationB;
private Application application;
private Tenant currentTenant;
@Before
public void setUp() {
currentTenant = tenantRepository.findByDomainName("konker");
application = Application.builder()
.name("smartffkonker")
.friendlyName("Konker Smart Frig")
.description("Konker Smart Frig - take pic, tells temperature")
.tenant(currentTenant)
.qualifier("konker")
.registrationDate(Instant.ofEpochMilli(1453320973747L))
.build();
deviceModel = DeviceModel.builder()
.guid("7d51c242-81db-11e6-a8c2-0746f908e887")
.name("SmartFF")
.description("Smart ff model")
.application(application)
.defaultModel(true)
.tenant(currentTenant)
.build();
deviceModel = deviceModelRepository.save(deviceModel);
locationA = Location.builder()
.guid("3bc07c9e-eb48-4c92-97a8-d9c662d1bfcd")
.name("BR")
.description("Brazil")
.application(application)
.defaultLocation(true)
.tenant(currentTenant)
.build();
locationA = locationRepository.save(locationA);
locationB = Location.builder()
.guid("b9cc9543-9230-4c63-a3bf-aaa1e47ffcf4")
.name("CL")
.description("Chile")
.application(application)
.defaultLocation(false)
.tenant(currentTenant)
.build();
locationB = locationRepository.save(locationB);
triggerA = AlertTrigger.builder().build();
triggerA.setGuid("95a79b96-6193-4d13-a85e-8bafc3a44837");
triggerA.setName("silence a");
triggerA.setTenant(currentTenant);
triggerA.setApplication(application);
triggerA.setDeviceModel(deviceModel);
triggerA.setLocation(locationA);
triggerA.setType(AlertTrigger.AlertTriggerType.SILENCE);
triggerA.setMinutes(100);
triggerA = alertTriggerRepository.save(triggerA);
triggerB = AlertTrigger.builder().build();
triggerB.setGuid("a702273d-dfca-4ca7-b61b-ed4f7b4cfb8e");
triggerB.setName("silence b");
triggerB.setTenant(currentTenant);
triggerB.setApplication(application);
triggerB.setDeviceModel(deviceModel);
triggerB.setLocation(locationB);
triggerB.setType(AlertTrigger.AlertTriggerType.SILENCE);
triggerB.setMinutes(200);
triggerB = alertTriggerRepository.save(triggerB);
}
@Test
public void shouldListByTenantAndApplication() {
ServiceResponse<List<AlertTrigger>> serviceResponse = alertTriggerService.listByTenantAndApplication(currentTenant, application);
assertThat(serviceResponse.isOk(), is(true));
assertThat(serviceResponse.getResult().size(), is(2));
assertThat(serviceResponse.getResult().get(0).getLocation().getName(), is("BR"));
assertThat(serviceResponse.getResult().get(0).getType(), is(AlertTrigger.AlertTriggerType.SILENCE));
assertThat(serviceResponse.getResult().get(1).getLocation().getName(), is("CL"));
assertThat(serviceResponse.getResult().get(1).getType(), is(AlertTrigger.AlertTriggerType.SILENCE));
}
@Test
public void shouldTryListByTenantAndApplicationWithNullTenant() {
ServiceResponse<List<AlertTrigger>> serviceResponse = alertTriggerService.listByTenantAndApplication(null, application);
assertThat(serviceResponse, hasErrorMessage(CommonValidations.TENANT_NULL.getCode()));
}
@Test
public void shouldTryListByTenantAndApplicationWithNullApplication() {
ServiceResponse<List<AlertTrigger>> serviceResponse = alertTriggerService.listByTenantAndApplication(currentTenant, null);
assertThat(serviceResponse, hasErrorMessage(ApplicationService.Validations.APPLICATION_NULL.getCode()));
}
@Test
public void shouldFindByTenantAndApplicationAndGuid() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.findByTenantAndApplicationAndGuid(currentTenant, application, triggerA.getGuid());
assertThat(serviceResponse.isOk(), is(true));
assertThat(serviceResponse.getResult().getGuid(), is(triggerA.getGuid()));
}
@Test
public void shouldTryFindByTenantAndApplicationAndGuidNonExistingTrigger() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.findByTenantAndApplicationAndGuid(currentTenant, application, "000-000");
assertThat(serviceResponse, hasErrorMessage(Validations.ALERT_TRIGGER_NOT_FOUND.getCode()));
}
/***************************** findByTenantAndApplicationAndName *****************************/
@Test
public void shouldTryFindByTenantAndApplicationAndNameNullApplication() {
AlertTrigger alertTrigger = AlertTrigger
.builder()
.name("silence")
.type(AlertTrigger.AlertTriggerType.SILENCE)
.build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.update(
currentTenant,
null,
triggerA.getGuid(),
alertTrigger
);
assertThat(serviceResponse, hasErrorMessage(ApplicationService.Validations.APPLICATION_NULL.getCode()));
}
@Test
public void shouldFindByTenantAndApplicationAndName() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.findByTenantAndApplicationAndName(
currentTenant,
application,
triggerA.getName()
);
assertThat(serviceResponse.isOk(), is(true));
assertThat(serviceResponse.getResult().getGuid(), is(triggerA.getGuid()));
}
@Test
public void shouldTryFindNonExistingByTenantAndApplicationAndName() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.findByTenantAndApplicationAndName(
currentTenant,
application,
"invalid name"
);
assertThat(serviceResponse, hasErrorMessage(Validations.ALERT_TRIGGER_NOT_FOUND.getCode()));
}
/***************************** findByLocationDeviceModelAndType *****************************/
@Test
public void shouldTryFindByLocationDeviceModelAndTypeNullTenant() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.findByLocationDeviceModelAndType(
null,
application,
locationA,
deviceModel,
AlertTrigger.AlertTriggerType.SILENCE);
assertThat(serviceResponse, hasErrorMessage(CommonValidations.TENANT_NULL.getCode()));
}
@Test
public void shouldTryFindByLocationDeviceModelAndTypeNullApplication() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.findByLocationDeviceModelAndType(
currentTenant,
null,
locationA,
deviceModel,
AlertTrigger.AlertTriggerType.SILENCE);
assertThat(serviceResponse, hasErrorMessage(ApplicationService.Validations.APPLICATION_NULL.getCode()));
}
@Test
public void shouldTryFindByLocationDeviceModelAndTypeNullLocation() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.findByLocationDeviceModelAndType(
currentTenant,
application,
null,
deviceModel,
AlertTrigger.AlertTriggerType.SILENCE);
assertThat(serviceResponse, hasErrorMessage(Validations.ALERT_TRIGGER_INVALID_LOCATION.getCode()));
}
@Test
public void shouldTryFindByLocationDeviceModelAndTypeNullDeviceModel() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.findByLocationDeviceModelAndType(
currentTenant,
application,
locationA,
null,
AlertTrigger.AlertTriggerType.SILENCE);
assertThat(serviceResponse, hasErrorMessage(Validations.ALERT_TRIGGER_INVALID_DEVICE_MODEL.getCode()));
}
@Test
public void shouldTryFindByLocationDeviceModelAndTypeNullAlertTriggerType() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.findByLocationDeviceModelAndType(
currentTenant,
application,
locationA,
deviceModel,
null);
assertThat(serviceResponse, hasErrorMessage(Validations.ALERT_TRIGGER_INVALID_TYPE.getCode()));
}
@Test
public void shouldFindByLocationDeviceModelAndType() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.findByLocationDeviceModelAndType(
currentTenant,
application,
locationA,
deviceModel,
AlertTrigger.AlertTriggerType.SILENCE);
assertThat(serviceResponse.isOk(), is(true));
assertThat(serviceResponse.getResult().getGuid(), is(triggerA.getGuid()));
}
@Test
public void shouldFindByLocationDeviceModelAndTypeReturningNullResult() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.findByLocationDeviceModelAndType(
currentTenant,
application,
Location.builder().id("nullResult").build(),
deviceModel,
AlertTrigger.AlertTriggerType.SILENCE);
assertThat(serviceResponse.isOk(), is(true));
assertThat(serviceResponse.getResult(), nullValue());
}
/***************************** remove *****************************/
@Test
public void shouldTryRemoveWithNullGuid() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.remove(
currentTenant,
application,
null
);
assertThat(serviceResponse, hasErrorMessage(Validations.ALERT_TRIGGER_GUID_NULL.getCode()));
}
@Test
public void shouldTryRemoveWithInvalidGuid() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.remove(
currentTenant,
application,
"invalid-guid"
);
assertThat(serviceResponse, hasErrorMessage(Validations.ALERT_TRIGGER_NOT_FOUND.getCode()));
}
@Test
public void shouldRemove() {
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.remove(
currentTenant,
application,
triggerA.getGuid()
);
assertThat(serviceResponse.isOk(), is(true));
}
/***************************** update *****************************/
@Test
public void shouldTryUpdateWithoutTenant() {
AlertTrigger alertTrigger = AlertTrigger
.builder()
.name("silence")
.type(AlertTrigger.AlertTriggerType.SILENCE)
.build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.update(
null,
application,
triggerA.getGuid(),
alertTrigger
);
assertThat(serviceResponse, hasErrorMessage(CommonValidations.TENANT_NULL.getCode()));
}
@Test
public void shouldTryUpdateWithNullGuid() {
AlertTrigger alertTrigger = AlertTrigger.
builder().
build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.update(
currentTenant,
application,
null,
alertTrigger
);
assertThat(serviceResponse, hasErrorMessage(Validations.ALERT_TRIGGER_GUID_NULL.getCode()));
}
@Test
public void shouldTryUpdateWithoutType() {
AlertTrigger alertTrigger = AlertTrigger
.builder()
.name("silence")
.build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.update(
currentTenant,
application,
"invalid-guid",
alertTrigger
);
assertThat(serviceResponse, hasErrorMessage(AlertTrigger.Validations.INVALID_TYPE.getCode()));
}
@Test
public void shouldTryUpdateWithInvalidGuid() {
AlertTrigger alertTrigger = AlertTrigger
.builder()
.name("silence")
.type(AlertTrigger.AlertTriggerType.CUSTOM)
.build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.update(
currentTenant,
application,
"invalid-guid",
alertTrigger
);
assertThat(serviceResponse, hasErrorMessage(Validations.ALERT_TRIGGER_NOT_FOUND.getCode()));
}
@Test
public void shouldTryUpdateWithInvalidName() {
AlertTrigger alertTrigger = AlertTrigger.
builder().
build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.update(
currentTenant,
application,
"invalid-guid",
alertTrigger
);
assertThat(serviceResponse, hasErrorMessage(AlertTrigger.Validations.NAME_INVALID.getCode()));
}
@Test
public void shouldUpdateWithInvalidMinutes() {
AlertTrigger alertTrigger = AlertTrigger
.builder()
.name("silence 42")
.type(AlertTrigger.AlertTriggerType.SILENCE)
.build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.update(
currentTenant,
application,
triggerA.getGuid(),
alertTrigger
);
assertThat(serviceResponse, hasErrorMessage(AlertTrigger.Validations.INVALID_MINUTES_VALUE.getCode()));
}
@Test
public void shouldUpdateCustom() {
AlertTrigger alertTrigger = AlertTrigger
.builder()
.name("silence 42")
.description("forty two")
.type(AlertTrigger.AlertTriggerType.CUSTOM)
.minutes(42)
.build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.update(
currentTenant,
application,
triggerA.getGuid(),
alertTrigger
);
assertThat(serviceResponse.isOk(), is(true));
assertThat(serviceResponse.getResult().getDescription(), is("forty two"));
assertThat(serviceResponse.getResult().getMinutes(), is(100));
}
@Test
public void shouldUpdateSilence() {
AlertTrigger alertTrigger = AlertTrigger
.builder()
.name("silence 42")
.description("forty two")
.type(AlertTrigger.AlertTriggerType.SILENCE)
.deviceModel(deviceModel)
.location(locationA)
.minutes(42)
.build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.update(
currentTenant,
application,
triggerA.getGuid(),
alertTrigger
);
assertThat(serviceResponse.isOk(), is(true));
assertThat(serviceResponse.getResult().getDescription(), is("forty two"));
assertThat(serviceResponse.getResult().getMinutes(), is(42));
}
/***************************** save *****************************/
@Test
public void shouldTrySaveWithoutTenant() {
AlertTrigger alertTrigger = AlertTrigger
.builder()
.name("silence")
.type(AlertTrigger.AlertTriggerType.SILENCE)
.build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.save(
null,
application,
alertTrigger
);
assertThat(serviceResponse, hasErrorMessage(CommonValidations.TENANT_NULL.getCode()));
}
@Test
public void shouldTrySaveSilenceWithInvalidMinutes() {
AlertTrigger alertTrigger = AlertTrigger
.builder()
.name("silence")
.type(AlertTrigger.AlertTriggerType.SILENCE)
.build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.save(
currentTenant,
application,
alertTrigger
);
assertThat(serviceResponse, hasErrorMessage(AlertTrigger.Validations.INVALID_MINUTES_VALUE.getCode()));
}
@Test
public void shouldTrySaveWithExistingName() {
AlertTrigger alertTrigger = AlertTrigger
.builder()
.name(triggerA.getName())
.type(AlertTrigger.AlertTriggerType.CUSTOM)
.minutes(200)
.build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.save(
currentTenant,
application,
alertTrigger
);
assertThat(serviceResponse, hasErrorMessage(Validations.ALERT_TRIGGER_ALREADY_EXISTS.getCode()));
}
@Test
public void shouldSaveCustom() {
AlertTrigger alertTrigger = AlertTrigger
.builder()
.name("silence")
.type(AlertTrigger.AlertTriggerType.CUSTOM)
.minutes(200)
.build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.save(
currentTenant,
application,
alertTrigger
);
assertThat(serviceResponse.isOk(), is(true));
assertThat(serviceResponse.getResult().getGuid(), Matchers.notNullValue());
}
@Test
public void shouldSaveSilenceTrigger() {
AlertTrigger alertTrigger = AlertTrigger
.builder()
.name("silence")
.type(AlertTrigger.AlertTriggerType.SILENCE)
.deviceModel(deviceModel)
.location(locationA)
.minutes(200)
.build();
ServiceResponse<AlertTrigger> serviceResponse = alertTriggerService.save(
currentTenant,
application,
alertTrigger
);
assertThat(serviceResponse.isOk(), is(true));
assertThat(serviceResponse.getResult().getGuid(), Matchers.notNullValue());
}
}
| |
/**
* Copyright (C) 2013 The Android Open Source Project
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.why168.volley.toolbox;
import android.content.Context;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.ViewGroup.LayoutParams;
import android.widget.ImageView;
import com.github.why168.volley.VolleyError;
import com.github.why168.volley.toolbox.ImageLoader.*;
/**
* Handles fetching an image from a URL as well as the life-cycle of the
* associated request.
*/
public class NetworkImageView extends ImageView {
/** The URL of the network image to load */
private String mUrl;
/**
* Resource ID of the image to be used as a placeholder until the network image is loaded.
*/
private int mDefaultImageId;
/**
* Resource ID of the image to be used if the network response fails.
*/
private int mErrorImageId;
/** Local copy of the ImageLoader. */
private ImageLoader mImageLoader;
/** Current ImageContainer. (either in-flight or finished) */
private ImageContainer mImageContainer;
public NetworkImageView(Context context) {
this(context, null);
}
public NetworkImageView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public NetworkImageView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
/**
* Sets URL of the image that should be loaded into this view. Note that calling this will
* immediately either set the cached image (if available) or the default image specified by
* {@link NetworkImageView#setDefaultImageResId(int)} on the view.
*
* NOTE: If applicable, {@link NetworkImageView#setDefaultImageResId(int)} and
* {@link NetworkImageView#setErrorImageResId(int)} should be called prior to calling
* this function.
*
* @param url The URL that should be loaded into this ImageView.
* @param imageLoader ImageLoader that will be used to make the request.
*/
public void setImageUrl(String url, ImageLoader imageLoader) {
mUrl = url;
mImageLoader = imageLoader;
// The URL has potentially changed. See if we need to load it.
loadImageIfNecessary(false);
}
/**
* Sets the default image resource ID to be used for this view until the attempt to load it
* completes.
*/
public void setDefaultImageResId(int defaultImage) {
mDefaultImageId = defaultImage;
}
/**
* Sets the error image resource ID to be used for this view in the event that the image
* requested fails to load.
*/
public void setErrorImageResId(int errorImage) {
mErrorImageId = errorImage;
}
/**
* Loads the image for the view if it isn't already loaded.
* @param isInLayoutPass True if this was invoked from a layout pass, false otherwise.
*/
void loadImageIfNecessary(final boolean isInLayoutPass) {
int width = getWidth();
int height = getHeight();
ScaleType scaleType = getScaleType();
boolean wrapWidth = false, wrapHeight = false;
if (getLayoutParams() != null) {
wrapWidth = getLayoutParams().width == LayoutParams.WRAP_CONTENT;
wrapHeight = getLayoutParams().height == LayoutParams.WRAP_CONTENT;
}
// if the view's bounds aren't known yet, and this is not a wrap-content/wrap-content
// view, hold off on loading the image.
boolean isFullyWrapContent = wrapWidth && wrapHeight;
if (width == 0 && height == 0 && !isFullyWrapContent) {
return;
}
// if the URL to be loaded in this view is empty, cancel any old requests and clear the
// currently loaded image.
if (TextUtils.isEmpty(mUrl)) {
if (mImageContainer != null) {
mImageContainer.cancelRequest();
mImageContainer = null;
}
setDefaultImageOrNull();
return;
}
// if there was an old request in this view, check if it needs to be canceled.
if (mImageContainer != null && mImageContainer.getRequestUrl() != null) {
if (mImageContainer.getRequestUrl().equals(mUrl)) {
// if the request is from the same URL, return.
return;
} else {
// if there is a pre-existing request, cancel it if it's fetching a different URL.
mImageContainer.cancelRequest();
setDefaultImageOrNull();
}
}
// Calculate the max image width / height to use while ignoring WRAP_CONTENT dimens.
int maxWidth = wrapWidth ? 0 : width;
int maxHeight = wrapHeight ? 0 : height;
// The pre-existing content of this view didn't match the current URL. Load the new image
// from the network.
ImageContainer newContainer = mImageLoader.get(mUrl,
new ImageListener() {
@Override
public void onErrorResponse(VolleyError error) {
if (mErrorImageId != 0) {
setImageResource(mErrorImageId);
}
}
@Override
public void onResponse(final ImageContainer response, boolean isImmediate) {
// If this was an immediate response that was delivered inside of a layout
// pass do not set the image immediately as it will trigger a requestLayout
// inside of a layout. Instead, defer setting the image by posting back to
// the main thread.
if (isImmediate && isInLayoutPass) {
post(new Runnable() {
@Override
public void run() {
onResponse(response, false);
}
});
return;
}
if (response.getBitmap() != null) {
setImageBitmap(response.getBitmap());
} else if (mDefaultImageId != 0) {
setImageResource(mDefaultImageId);
}
}
}, maxWidth, maxHeight, scaleType);
// update the ImageContainer to be the new bitmap container.
mImageContainer = newContainer;
}
private void setDefaultImageOrNull() {
if (mDefaultImageId != 0) {
setImageResource(mDefaultImageId);
} else {
setImageBitmap(null);
}
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
super.onLayout(changed, left, top, right, bottom);
loadImageIfNecessary(true);
}
@Override
protected void onDetachedFromWindow() {
if (mImageContainer != null) {
// If the view was bound to an image request, cancel it and clear
// out the image from the view.
mImageContainer.cancelRequest();
setImageBitmap(null);
// also clear out the container so we can reload the image if necessary.
mImageContainer = null;
}
super.onDetachedFromWindow();
}
@Override
protected void drawableStateChanged() {
super.drawableStateChanged();
invalidate();
}
}
| |
package cn.zxl.mvc.common.struts;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.lang.reflect.Modifier;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import net.sf.json.JsonConfig;
import org.apache.log4j.Logger;
import org.apache.struts2.interceptor.ServletRequestAware;
import org.apache.struts2.interceptor.ServletResponseAware;
import org.apache.struts2.interceptor.SessionAware;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.web.context.ServletContextAware;
import cn.zxl.common.ArrayUtil;
import cn.zxl.common.BeanUtil;
import cn.zxl.common.LogUtil;
import cn.zxl.common.ReflectUtil;
import cn.zxl.mvc.common.action.ContentType;
import cn.zxl.mvc.common.action.ContentType.Extension;
import cn.zxl.mvc.common.action.ErrorMessage;
import cn.zxl.orm.common.CycleSupport;
import com.opensymphony.xwork2.ActionSupport;
import com.opensymphony.xwork2.ModelDriven;
public abstract class AbstractStrutsAction<T> extends ActionSupport implements ApplicationContextAware, ServletContextAware, ServletRequestAware, ServletResponseAware, SessionAware, ModelDriven<T> {
private static final long serialVersionUID = -5236789144099156153L;
private static final Logger LOGGER = LogUtil.logger(AbstractStrutsAction.class);
private static final String DEFAULT_CHARSET = "UTF-8";
private static final int SUCCESS_CODE = 200;
private static final int ERROR_CODE = 500;
private static final String CODE_KEY = "code";
private static final String MESSAGE_KEY = "message";
private static final String SUCCESS_MESSAGE = "success";
protected static final String LIST = "list";
private Class<T> clazz;
protected T entity;
private ApplicationContext applicationContext;
private ServletContext servletContext;
private HttpServletRequest servletRequest;
private HttpServletResponse servletResponse;
private Map<String, Object> session;
@SuppressWarnings("unchecked")
public AbstractStrutsAction() {
super();
try {
clazz = ((Class<T>) ReflectUtil.getParameterizedType(getClass()));
if (!Modifier.isAbstract(clazz.getModifiers())) {
entity = clazz.newInstance();
}
} catch (InstantiationException instantiationException) {
LogUtil.error(LOGGER, "Instantiation error", instantiationException);
} catch (IllegalAccessException illegalAccessException) {
LogUtil.error(LOGGER, "Instantiation error", illegalAccessException);
} catch (NullPointerException nullPointerException) {
LogUtil.error(LOGGER, "Instantiation error", nullPointerException);
}
}
/**
* base method
*/
@Override
public T getModel() {
return entity;
}
public T getEntity() {
return entity;
}
protected Class<T> getClazz() {
return clazz;
}
@Override
public void setServletContext(ServletContext servletContext) {
this.servletContext = servletContext;
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
@Override
public void setServletRequest(HttpServletRequest servletRequest) {
this.servletRequest = servletRequest;
}
@Override
public void setServletResponse(HttpServletResponse servletResponse) {
this.servletResponse = servletResponse;
}
@Override
public void setSession(Map<String, Object> session) {
this.session = session;
}
protected ServletContext getServletContext() {
return servletContext;
}
protected ApplicationContext getApplicationContext() throws BeansException {
return applicationContext;
}
protected HttpServletRequest getHttpServletRequest() {
return servletRequest;
}
protected void setRequestAttribute(String name, Object value) {
servletRequest.setAttribute(name, value);
}
protected void setRequestJsonAttribute(String name, Object value) {
if (!BeanUtil.isEmpty(value)) {
setRequestAttribute(name, toJSONObject(value, jsonConfigForPageData(value.getClass())));
}
}
protected void setRequestJsonAttribute(String name, Collection<?> collection) {
if (!ArrayUtil.isEmpty(collection)) {
setRequestAttribute(name, toJSONArray(collection, jsonConfigForPageData(collection.iterator().next().getClass())));
}
}
protected void setRequestJsonAttribute(String name, Object[] array) {
if (!ArrayUtil.isEmpty(array)) {
setRequestAttribute(name, toJSONArray(array, jsonConfigForPageData(array[0].getClass())));
}
}
protected HttpServletResponse getHttpServletResponse() {
return servletResponse;
}
protected Map<String, Object> getSession() {
return session;
}
protected Object getSession(String key) {
return session.get(key);
}
protected Object getBean(String name) {
return applicationContext.getBean(name);
}
protected Object getServletContextAttribute(String name) {
return servletContext.getAttribute(name);
}
protected void setServletContextAttribute(String name, Object value) {
servletContext.setAttribute(name, value);
}
/**
* util method
*/
protected void ajaxPrimitiveType(Object primitiveObject) {
if (primitiveObject == null) {
throw new RuntimeException(ErrorMessage.NO_RESULT.getErrorMessage());
}
ajax(primitiveObject.toString());
}
protected void ajaxJson(Object object) {
ajaxJson(object, null);
}
protected void ajaxList(Collection<?> collection) {
ajaxList(collection, null);
}
protected void ajaxJson(Object object, JsonConfig jsonConfig) {
ajax(toJSONObject(object, jsonConfig));
}
protected void ajaxList(Collection<?> collection, JsonConfig jsonConfig) {
ajax(toJSONArray(collection, jsonConfig));
}
protected void ajaxSuccess() {
Map<String, Object> result = new HashMap<String, Object>();
result.put(CODE_KEY, SUCCESS_CODE);
result.put(MESSAGE_KEY, SUCCESS_MESSAGE);
ajax(toJSONObject(result));
}
protected void ajaxError(String message, Exception exception) {
if (message == null) {
throw new NullPointerException("the arg [message] is required!");
}
if (exception != null) {
LogUtil.error(LOGGER, message, exception);
} else {
LogUtil.error(LOGGER, message);
}
Map<String, Object> result = new HashMap<String, Object>();
result.put(CODE_KEY, ERROR_CODE);
result.put(MESSAGE_KEY, message);
ajax(toJSONObject(result));
}
protected String toJSONObject(Object object) {
return toJSONObject(object, null);
}
protected String toJSONObject(Object object, JsonConfig jsonConfig) {
return jsonConfig == null ? JSONObject.fromObject(object).toString() : JSONObject.fromObject(object, jsonConfig).toString();
}
protected String toJSONArray(Collection<?> collection) {
return toJSONArray(collection, null);
}
protected String toJSONArray(Collection<?> collection, JsonConfig jsonConfig) {
return jsonConfig == null ? JSONArray.fromObject(collection).toString() : JSONArray.fromObject(collection, jsonConfig).toString();
}
protected String toJSONArray(Object[] array) {
return toJSONArray(array, null);
}
protected String toJSONArray(Object[] array, JsonConfig jsonConfig) {
return jsonConfig == null ? JSONArray.fromObject(array).toString() : JSONArray.fromObject(array, jsonConfig).toString();
}
private void ajax(String content) {
ajax(content, ContentType.getContentType(Extension.txt), DEFAULT_CHARSET);
}
private void ajax(String content, String contentType, String charset) {
servletResponse.setCharacterEncoding(charset);
servletResponse.setContentType(contentType);
PrintWriter printWriter = null;
try {
printWriter = servletResponse.getWriter();
printWriter.write(content);
printWriter.flush();
} catch (IOException ioException) {
throw new RuntimeException(ErrorMessage.NO_RESULT.getErrorMessage());
} finally {
printWriter.close();
}
}
protected final void ajaxStream(byte[] bytes) {
ajaxStream(bytes, null);
}
protected final void ajaxStream(byte[] bytes, String charset) {
servletResponse.setCharacterEncoding(charset);
servletResponse.setContentType(ContentType.getContentType(Extension.all));
OutputStream outputStream = null;
try {
outputStream = servletResponse.getOutputStream();
outputStream.write(bytes, 0, bytes.length);
outputStream.flush();
} catch (IOException ioException) {
throw new RuntimeException(ErrorMessage.NO_RESULT.getErrorMessage());
} finally {
try {
outputStream.close();
} catch (IOException e) {
throw new RuntimeException(ErrorMessage.NO_RESULT.getErrorMessage());
}
}
}
protected JsonConfig jsonConfigForPageData() {
return CycleSupport.class.isAssignableFrom(entity.getClass()) ? ((CycleSupport) entity).jsonConfig() : null;
}
protected JsonConfig jsonConfigForPageData(Class<?> clazz) {
try {
return CycleSupport.class.isAssignableFrom(clazz) ? ((CycleSupport) clazz.newInstance()).jsonConfig() : null;
} catch (Exception e) {
LogUtil.warn(LOGGER, "new instance error", e);
return null;
}
}
}
| |
// ----------------------------------------------------------------------------
// Copyright 2007-2013, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Description:
// Main Entry point
// ----------------------------------------------------------------------------
// Change History:
// 2010/07/18 Martin D. Flynn
// -Initial release
// ----------------------------------------------------------------------------
package org.opengts.servers.aspicore;
import java.lang.*;
import java.util.*;
import java.io.*;
import java.net.*;
import org.opengts.util.*;
import org.opengts.dbtools.*;
import org.opengts.dbtypes.*;
import org.opengts.db.*;
import org.opengts.db.tables.*;
public class Main
{
// ------------------------------------------------------------------------
/* command-line argument keys */
public static final String ARG_HELP[] = new String[] { "help" , "h" };
public static final String ARG_CMD_PORT[] = new String[] { "command", "cmd" };
public static final String ARG_START[] = new String[] { "start" };
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
public static String DCServerFactory_LoadName() { return Main.getServerContextName(); }
/* return server config */
public static String getServerName()
{
return Constants.DEVICE_CODE;
}
public static String getServerContextName()
{
return RTConfig.getContextName(Main.getServerName());
}
/* return server config */
private static DCServerConfig dcServerCfg = null;
public static DCServerConfig getServerConfig()
{
if (dcServerCfg == null) {
dcServerCfg = DCServerFactory.getServerConfig(Main.getServerContextName());
DCServerConfig.startRemoteLogging(dcServerCfg);
}
return dcServerCfg;
}
// ------------------------------------------------------------------------
/* get server TCP ports (first check command-line, then config file) */
public static int[] getTcpPorts()
{
DCServerConfig dcs = getServerConfig();
if (dcs != null) {
return dcs.getTcpPorts();
} else {
Print.logError("DCServerConfig not found for server: " + getServerName());
return null;
}
}
/* get server UDP ports (first check command-line, then config file) */
public static int[] getUdpPorts()
{
DCServerConfig dcs = getServerConfig();
if (dcs != null) {
return dcs.getUdpPorts();
} else {
Print.logError("DCServerConfig not found for server: " + getServerName());
return null;
}
}
/* get server ports (first check command-line, then config file, then default) */
public static int getCommandDispatcherPort()
{
DCServerConfig dcs = getServerConfig();
if (dcs != null) {
return dcs.getCommandDispatcherPort();
} else {
return RTConfig.getInt(ARG_CMD_PORT,0);
}
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
public static String getUniqueIDPrefixList()
{
DCServerConfig dcsc = Main.getServerConfig();
if (dcsc != null) {
return DCServerFactory.getUniquePrefixString(dcsc.getUniquePrefix());
} else {
return "";
}
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
// Main entry point
/* display usage and exit */
private static void usage(String msg)
{
String tcp = StringTools.join(getTcpPorts(),",");
String udp = StringTools.join(getUdpPorts(),",");
/* print message */
if (msg != null) {
Print.logInfo(msg);
}
/* print usage */
Print.logInfo("");
Print.logInfo("Usage:");
Print.logInfo(" java ... " + Main.class.getName() + " {options}");
Print.logInfo("Options:");
Print.logInfo(" [-h[elp]] Print this help");
Print.logInfo(" [-port=<p>[,<p>]] Server TCP/UDP port(s) to listen");
Print.logInfo(" [-tcp=<p>[,<p>]] Server TCP port(s) to listen on [dft="+tcp+"]");
Print.logInfo(" [-udp=<p>[,<p>]] Server UDP port(s) to listen on [dft="+udp+"]");
Print.logInfo(" -start Start server on the specified port");
Print.logInfo("");
/* exit */
System.exit(1);
}
/* main entry point */
public static void main(String argv[])
{
/* configure server for MySQL data store */
DBConfig.cmdLineInit(argv,false); // main
DBConfig.check_GTS_HOME();
/* init configuration constants */
TrackClientPacketHandler.configInit();
TrackServer.configInit();
/* header */
String SEP = "--------------------------------------------------------------------------";
Print.logInfo(SEP);
Print.logInfo(Constants.TITLE_NAME);
Print.logInfo(Constants.COPYRIGHT);
Print.logInfo("Version: " +
Constants.VERSION +
" [" + org.opengts.Version.getCompileTime() + "]"
);
Print.logInfo("Unique-ID Prefix(s) : " + Main.getUniqueIDPrefixList());
Print.logInfo("Minimum Speed (KPH) : " + TrackClientPacketHandler.MINIMUM_SPEED_KPH);
Print.logInfo("Estimate GPS Odometer : " + TrackClientPacketHandler.ESTIMATE_ODOMETER);
Print.logInfo("Simulate Arrive/Depart: " + TrackClientPacketHandler.SIMEVENT_GEOZONES);
Print.logInfo("TCP Idle Timeout : " + TrackServer.getTcpIdleTimeout() + " ms");
Print.logInfo("TCP Packet Timeout : " + TrackServer.getTcpPacketTimeout() + " ms");
Print.logInfo("TCP Session Timeout : " + TrackServer.getTcpSessionTimeout() + " ms");
Print.logInfo("UDP Idle Timeout : " + TrackServer.getUdpIdleTimeout() + " ms");
Print.logInfo("UDP Packet Timeout : " + TrackServer.getUdpPacketTimeout() + " ms");
Print.logInfo("UDP Session Timeout : " + TrackServer.getUdpSessionTimeout() + " ms");
Print.logInfo(SEP);
/* explicit help? */
if (RTConfig.getBoolean(ARG_HELP,false)) {
Main.usage("Help ...");
// control doesn't reach here
}
/* make sure the DB is properly initialized */
if (!DBAdmin.verifyTablesExist()) {
Print.logFatal("MySQL database has not yet been properly initialized");
System.exit(1);
}
/* start server */
if (RTConfig.getBoolean(ARG_START,false)) {
/* start port listeners */
try {
int tcpPorts[] = getTcpPorts();
int udpPorts[] = getUdpPorts();
int commandPort = getCommandDispatcherPort();
TrackServer.startTrackServer(tcpPorts, udpPorts, commandPort);
} catch (Throwable t) { // trap any server exception
Print.logError("Error: " + t);
}
Print.logInfo(SEP);
/* wait here forever while the server is running in a thread */
while (true) {
try { Thread.sleep(60L * 60L * 1000L); } catch (Throwable t) {}
}
// control never reaches here
}
/* display usage */
Main.usage("Missing '-start' ...");
// control doesn't reach here
System.exit(99);
}
}
| |
package com.haskforce.settings;
import com.haskforce.utils.ExecUtil;
import com.haskforce.utils.GuiUtil;
import com.haskforce.utils.NotificationUtil;
import com.intellij.compiler.options.CompilerConfigurable;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.TextFieldWithBrowseButton;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.TextAccessor;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import scala.runtime.AbstractFunction1;
import javax.swing.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.util.Arrays;
/**
* The "Haskell Compiler" section in Preferences->Compiler.
*/
public class HaskellCompilerConfigurable extends CompilerConfigurable {
public static final String HASKELL_COMPILER_ID = "Haskell compiler";
// Swing components.
private JPanel mainPanel;
// GHC Binary components.
private TextFieldWithBrowseButton ghcPath;
private JLabel ghcVersion;
// Cabal binary components.
private TextFieldWithBrowseButton cabalPath;
private JLabel cabalVersion;
// Cabal configure flags
private com.intellij.ui.RawCommandLineEditor cabalFlags;
// Build configuration components.
private JCheckBox profilingBuild;
private JCheckBox cabalSandbox;
private JCheckBox installCabalDependencies;
private JCheckBox enableTests;
private JRadioButton buildWithCabal;
private TextFieldWithBrowseButton stackPath;
private JRadioButton buildWithStack;
private JLabel stackVersion;
private TextFieldWithBrowseButton stackFile;
private com.intellij.ui.RawCommandLineEditor stackFlags;
private ButtonGroup buildWith = new ButtonGroup();
// Data container for settings.
private final HaskellBuildSettings mySettings;
@SuppressWarnings("FieldCanBeLocal")
private final Project myProject;
public HaskellCompilerConfigurable(@NotNull final Project inProject) {
super(inProject);
myProject = inProject;
mySettings = HaskellBuildSettings.getInstance(myProject);
stackPath.setText(mySettings.getStackPath());
GuiUtil.addFolderListener(stackPath, "stack");
stackFile.setText(mySettings.getStackFile());
GuiUtil.addFolderListener(stackFile, "stack.yaml", inProject, new Condition<VirtualFile>() {
@Override
public boolean value(VirtualFile virtualFile) {
String ext = virtualFile.getExtension();
return ext != null && Arrays.asList("yaml", "yml").contains(ext.toLowerCase());
}
});
ghcPath.setText(mySettings.getGhcPath());
GuiUtil.addFolderListener(ghcPath, "ghc");
cabalPath.setText(mySettings.getCabalPath());
GuiUtil.addFolderListener(cabalPath, "cabal");
cabalSandbox.setSelected(mySettings.isCabalSandboxEnabled());
installCabalDependencies.setSelected(mySettings.isInstallCabalDependenciesEnabled());
enableTests.setSelected(mySettings.isEnableTestsEnabled());
initializeBuildWithButtons();
updateVersionInfoFields();
}
private void initializeBuildWithButtons() {
buildWith.add(buildWithStack);
buildWith.add(buildWithCabal);
boolean stackEnabled = mySettings.isStackEnabled();
buildWithStack.setSelected(stackEnabled);
setEnabledStackFields(stackEnabled);
// Cabal and Stack can't be enabled simultaneously, prefer Stack.
boolean cabalEnabled = !stackEnabled && mySettings.isCabalEnabled();
buildWithCabal.setSelected(cabalEnabled);
setEnabledCabalFields(cabalEnabled);
buildWithStack.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
setEnabledStackFields(true);
}
});
buildWithCabal.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
setEnabledCabalFields(true);
}
});
}
/**
* The setEnabledStack/CabalFields methods will toggle the other fields
* as enabled so that the Stack and Cabal fields won't be enabled simultaneously.
*/
private void setEnabledStackFields(boolean enabled) {
setEnabledStackFields(enabled, true);
}
private void setEnabledStackFields(boolean enabled, boolean toggle) {
stackPath.setEnabled(enabled);
stackFlags.setEnabled(enabled);
stackFile.setEnabled(enabled);
if (toggle) setEnabledCabalFields(!enabled, false);
}
private void setEnabledCabalFields(boolean enabled) {
setEnabledCabalFields(enabled, true);
}
private void setEnabledCabalFields(boolean enabled, boolean toggle) {
ghcPath.setEnabled(enabled);
cabalPath.setEnabled(enabled);
cabalFlags.setEnabled(enabled);
profilingBuild.setEnabled(enabled);
installCabalDependencies.setEnabled(enabled);
cabalSandbox.setEnabled(enabled);
enableTests.setEnabled(enabled);
if (toggle) setEnabledStackFields(!enabled, false);
}
@NotNull
@Override
public String getId() {
return HASKELL_COMPILER_ID;
}
@Nullable
@Override
public Runnable enableSearch(String s) {
return null;
}
@Nls
@Override
public String getDisplayName() {
return HASKELL_COMPILER_ID;
}
@Nullable
@Override
public String getHelpTopic() {
return null;
}
/**
* Constructs the compiler panel in Settings->Compiler. Also responsible
* for filling in previous values or constructing sane default values.
*/
@Nullable
@Override
public JComponent createComponent() {
return mainPanel;
}
/**
* Enables the apply button if anything changed.
*/
@Override
public boolean isModified() {
return !(ghcCabalStackUnchanged() &&
cabalFlags.getText().equals(mySettings.getCabalFlags()) &&
profilingBuild.isSelected() == mySettings.isProfilingEnabled() &&
buildWithCabal.isSelected() == mySettings.isCabalEnabled() &&
cabalSandbox.isSelected() == mySettings.isCabalSandboxEnabled() &&
installCabalDependencies.isSelected() == mySettings.isInstallCabalDependenciesEnabled() &&
enableTests.isSelected() == mySettings.isEnableTestsEnabled() &&
buildWithStack.isSelected() == mySettings.isStackEnabled() &&
stackFlags.getText().equals(mySettings.getStackFlags()));
}
/**
* Returns true if the ghc and cabal paths are unchanged.
*/
private boolean ghcCabalStackUnchanged() {
return ghcPath.getText().equals(mySettings.getGhcPath()) &&
cabalPath.getText().equals(mySettings.getCabalPath()) &&
stackPath.getText().equals(mySettings.getStackPath()) &&
stackFile.getText().equals(mySettings.getStackFile());
}
/**
* Triggered when the user pushes the apply button.
*/
@Override
public void apply() throws ConfigurationException {
validate();
saveState();
updateVersionInfoFields();
}
/**
* Triggered when the user pushes the cancel button.
*/
@Override
public void reset() {
restoreState();
}
@Override
public void disposeUIResources() {
}
/**
* Persistent save of the current state.
*/
private void saveState() {
// Save to disk and to communicate with build server.
mySettings.setProfilingBuild(profilingBuild.isSelected());
mySettings.setUseCabal(buildWithCabal.isSelected() && !buildWithStack.isSelected());
mySettings.setUseCabalSandbox(cabalSandbox.isSelected());
mySettings.setInstallCabalDependencies(installCabalDependencies.isSelected());
mySettings.setEnableTests(enableTests.isSelected());
mySettings.setGhcPath(ghcPath.getText());
mySettings.setCabalPath(cabalPath.getText());
mySettings.setCabalFlags(cabalFlags.getText());
mySettings.setUseStack(buildWithStack.isSelected());
mySettings.setStackPath(stackPath.getText());
mySettings.setStackFlags(stackFlags.getText());
mySettings.setStackFile(stackFile.getText());
}
private void validate() throws ConfigurationException {
if (buildWithCabal.isSelected()) {
validateExecutable("cabal", cabalPath);
validateExecutable("ghc", ghcPath);
}
if (buildWithStack.isSelected()) {
validateExecutable("stack", stackPath);
validateFileExists("stack.yaml", stackFile);
}
}
private void validateExecutable(String name, TextAccessor field) throws ConfigurationException {
if (new File(field.getText()).canExecute() || new File(myProject.getBasePath(), field.getText()).exists()) return;
throw new ConfigurationException("Not a valid '" + name + "' executable: '" + field.getText() + "'");
}
private void validateFileExists(String name, TextAccessor field) throws ConfigurationException {
if (new File(field.getText()).exists() || new File(myProject.getBasePath(), field.getText()).exists()) return;
throw new ConfigurationException("'" + name + "' file does not exist: '" + field.getText() + "'");
}
/**
* Updates the version info fields for all files configured.
*/
private void updateVersionInfoFields() {
updateVersionInfoField("ghc", ghcPath.getText(), "--numeric-version", ghcVersion);
updateVersionInfoField("cabal", cabalPath.getText(), "--numeric-version", cabalVersion);
updateVersionInfoField("stack", stackPath.getText(), "--numeric-version", stackVersion);
}
private void updateVersionInfoField(final String name, String exePath, String versionFlag,
final JLabel versionField) {
if (exePath.isEmpty()) {
versionField.setText("");
return;
}
ExecUtil.readCommandLine(null, exePath, versionFlag).fold(
new AbstractFunction1<ExecUtil.ExecError, Void>() {
@Override
public Void apply(ExecUtil.ExecError e) {
NotificationUtil.displaySimpleNotification(
NotificationType.ERROR, myProject, name, e.getMessage()
);
return null;
}
},
new AbstractFunction1<String, Void>() {
@Override
public Void apply(String version) {
versionField.setText(version);
return null;
}
}
);
}
/**
* Restore components to the initial state.
*/
private void restoreState() {
ghcPath.setText(mySettings.getGhcPath());
cabalPath.setText(mySettings.getCabalPath());
cabalFlags.setText(mySettings.getCabalFlags());
profilingBuild.setSelected(mySettings.isProfilingEnabled());
buildWithCabal.setSelected(mySettings.isCabalEnabled());
cabalSandbox.setSelected(mySettings.isCabalSandboxEnabled());
installCabalDependencies.setSelected(mySettings.isInstallCabalDependenciesEnabled());
enableTests.setSelected(mySettings.isEnableTestsEnabled());
buildWithStack.setSelected(mySettings.isStackEnabled());
stackPath.setText(mySettings.getStackPath());
stackFlags.setText(mySettings.getStackFlags());
stackFile.setText(mySettings.getStackFile());
}
}
| |
package edu.hm.hafner.analysis;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.*;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.*;
import edu.hm.hafner.analysis.ModuleDetector.FileSystem;
import edu.hm.hafner.util.ResourceTest;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
/**
* Tests the class {@link ModuleDetector}.
*/
@SuppressFBWarnings("DMI_HARDCODED_ABSOLUTE_FILENAME")
class ModuleDetectorTest extends ResourceTest {
private static final String MANIFEST = "MANIFEST.MF";
private static final String MANIFEST_NAME = "MANIFEST-NAME.MF";
private static final File ROOT = new File("/tmp");
private static final String PREFIX = normalizeRoot();
private static String normalizeRoot() {
return ROOT.getAbsolutePath().replace("\\", "/") + "/";
}
private static final int NO_RESULT = 0;
private static final String PATH_PREFIX_MAVEN = "path/to/maven";
private static final String PATH_PREFIX_OSGI = "path/to/osgi";
private static final String PATH_PREFIX_ANT = "path/to/ant";
private static final String EXPECTED_MAVEN_MODULE = "ADT Business Logic";
private static final String EXPECTED_ANT_MODULE = "checkstyle";
private static final String EXPECTED_OSGI_MODULE = "de.faktorlogik.prototyp";
private InputStream read(final String fileName) {
return asInputStream(fileName);
}
@Test
void shouldIdentifyModuleByReadingOsgiBundle() {
FileSystem factory = createFileSystemStub(stub -> {
when(stub.find(any(), anyString())).thenReturn(new String[]{PATH_PREFIX_OSGI + ModuleDetector.OSGI_BUNDLE});
when(stub.create(anyString())).thenReturn(read(MANIFEST));
});
ModuleDetector detector = new ModuleDetector(ROOT, factory);
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_OSGI + "/something.txt")))
.isEqualTo(EXPECTED_OSGI_MODULE);
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_OSGI + "/in/between/something.txt")))
.isEqualTo(EXPECTED_OSGI_MODULE);
assertThat(detector.guessModuleName(PREFIX + "/path/to/something.txt"))
.isEqualTo(StringUtils.EMPTY);
}
@Test
void shouldIdentifyModuleByReadingOsgiBundleWithVendorInL10nProperties() {
FileSystem factory = createFileSystemStub(stub -> {
when(stub.find(any(), anyString())).thenReturn(new String[]{PATH_PREFIX_OSGI + ModuleDetector.OSGI_BUNDLE});
when(stub.create(anyString())).thenReturn(read(MANIFEST), read("l10n.properties"));
});
ModuleDetector detector = new ModuleDetector(ROOT, factory);
String expectedName = "de.faktorlogik.prototyp (My Vendor)";
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_OSGI + "/something.txt")))
.isEqualTo(expectedName);
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_OSGI + "/in/between/something.txt")))
.isEqualTo(expectedName);
assertThat(detector.guessModuleName(PREFIX + "/path/to/something.txt"))
.isEqualTo(StringUtils.EMPTY);
}
@Test
void shouldIdentifyModuleByReadingOsgiBundleWithManifestName() {
FileSystem fileSystem = createFileSystemStub(stub -> {
when(stub.find(any(), anyString())).thenReturn(
new String[]{PATH_PREFIX_OSGI + ModuleDetector.OSGI_BUNDLE});
when(stub.create(anyString())).thenReturn(read(MANIFEST_NAME), read("l10n.properties"));
});
ModuleDetector detector = new ModuleDetector(ROOT, fileSystem);
String expectedName = "My Bundle";
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_OSGI + "/something.txt")))
.isEqualTo(expectedName);
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_OSGI + "/in/between/something.txt")))
.isEqualTo(expectedName);
assertThat(detector.guessModuleName(PREFIX + "/path/to/something.txt"))
.isEqualTo(StringUtils.EMPTY);
}
@Test
void shouldIdentifyModuleByReadingMavenPom() {
FileSystem factory = createFileSystemStub(stub -> {
when(stub.find(any(), anyString())).thenReturn(
new String[]{PATH_PREFIX_MAVEN + ModuleDetector.MAVEN_POM});
when(stub.create(anyString())).thenAnswer(fileName -> read(ModuleDetector.MAVEN_POM));
});
ModuleDetector detector = new ModuleDetector(ROOT, factory);
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_MAVEN + "/something.txt"))).isEqualTo(
EXPECTED_MAVEN_MODULE);
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_MAVEN + "/in/between/something.txt"))).isEqualTo(
EXPECTED_MAVEN_MODULE);
assertThat(detector.guessModuleName(PREFIX + "/path/to/something.txt")).isEqualTo(StringUtils.EMPTY);
}
@Test
void shouldIdentifyModuleByReadingMavenPomWithoutName() {
FileSystem factory = createFileSystemStub(stub -> {
when(stub.find(any(), anyString())).thenReturn(new String[]{PATH_PREFIX_MAVEN + ModuleDetector.MAVEN_POM});
when(stub.create(anyString())).thenAnswer(filename -> read("no-name-pom.xml"));
});
ModuleDetector detector = new ModuleDetector(ROOT, factory);
String artifactId = "com.avaloq.adt.core";
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_MAVEN + "/something.txt")))
.isEqualTo(artifactId);
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_MAVEN + "/in/between/something.txt")))
.isEqualTo(artifactId);
assertThat(detector.guessModuleName(PREFIX + "/path/to/something.txt"))
.isEqualTo(StringUtils.EMPTY);
}
@Test
void shouldIdentifyModuleByReadingAntProjectFile() {
FileSystem factory = createFileSystemStub(stub -> {
when(stub.find(any(), anyString())).thenReturn(new String[]{PATH_PREFIX_ANT + ModuleDetector.ANT_PROJECT});
when(stub.create(anyString())).thenAnswer(filename -> read(ModuleDetector.ANT_PROJECT));
});
ModuleDetector detector = new ModuleDetector(ROOT, factory);
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_ANT + "/something.txt")))
.isEqualTo(EXPECTED_ANT_MODULE);
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_ANT + "/in/between/something.txt")))
.isEqualTo(EXPECTED_ANT_MODULE);
assertThat(detector.guessModuleName(PREFIX + "/path/to/something.txt"))
.isEqualTo(StringUtils.EMPTY);
}
@Test
void shouldIgnoreExceptionsDuringParsing() {
FileSystem fileSystem = createFileSystemStub(stub -> {
when(stub.find(any(), anyString())).thenReturn(new String[NO_RESULT]);
when(stub.create(anyString())).thenThrow(new FileNotFoundException("File not found"));
});
ModuleDetector detector = new ModuleDetector(ROOT, fileSystem);
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_ANT + "/something.txt")))
.isEqualTo(StringUtils.EMPTY);
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_MAVEN + "/something.txt")))
.isEqualTo(StringUtils.EMPTY);
}
@Test
void shouldIdentifyModuleIfThereAreMoreEntries() {
FileSystem factory = createFileSystemStub(stub -> {
String ant = PATH_PREFIX_ANT + ModuleDetector.ANT_PROJECT;
String maven = PATH_PREFIX_MAVEN + ModuleDetector.MAVEN_POM;
when(stub.find(any(), anyString())).thenReturn(new String[]{ant, maven});
when(stub.create(PREFIX + ant)).thenReturn(read(ModuleDetector.ANT_PROJECT));
when(stub.create(PREFIX + maven)).thenAnswer(filename -> read(ModuleDetector.MAVEN_POM));
});
ModuleDetector detector = new ModuleDetector(ROOT, factory);
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_ANT + "/something.txt")))
.isEqualTo(EXPECTED_ANT_MODULE);
assertThat(detector.guessModuleName(PREFIX + (PATH_PREFIX_MAVEN + "/something.txt")))
.isEqualTo(EXPECTED_MAVEN_MODULE);
}
@Test
void shouldEnsureThatMavenHasPrecedenceOverAnt() {
String prefix = "/prefix/";
String ant = prefix + ModuleDetector.ANT_PROJECT;
String maven = prefix + ModuleDetector.MAVEN_POM;
verifyOrder(prefix, ant, maven, new String[]{ant, maven});
verifyOrder(prefix, ant, maven, new String[]{maven, ant});
}
private void verifyOrder(final String prefix, final String ant, final String maven, final String[] foundFiles) {
FileSystem factory = createFileSystemStub(stub -> {
when(stub.find(any(), anyString())).thenReturn(foundFiles);
when(stub.create(ant)).thenReturn(read(ModuleDetector.ANT_PROJECT));
when(stub.create(maven)).thenAnswer(filename -> read(ModuleDetector.MAVEN_POM));
});
ModuleDetector detector = new ModuleDetector(ROOT, factory);
assertThat(detector.guessModuleName(prefix + "/something.txt")).isEqualTo(EXPECTED_MAVEN_MODULE);
}
@Test
void shouldEnsureThatOsgiHasPrecedenceOverMavenAndAnt() {
String prefix = "/prefix/";
String ant = prefix + ModuleDetector.ANT_PROJECT;
String maven = prefix + ModuleDetector.MAVEN_POM;
String osgi = prefix + ModuleDetector.OSGI_BUNDLE;
verifyOrder(prefix, ant, maven, osgi, ant, maven, osgi);
verifyOrder(prefix, ant, maven, osgi, ant, osgi, maven);
verifyOrder(prefix, ant, maven, osgi, maven, ant, osgi);
verifyOrder(prefix, ant, maven, osgi, maven, osgi, ant);
verifyOrder(prefix, ant, maven, osgi, osgi, ant, maven);
verifyOrder(prefix, ant, maven, osgi, osgi, maven, osgi);
}
private void verifyOrder(final String prefix, final String ant, final String maven, final String osgi,
final String... foundFiles) {
FileSystem fileSystem = createFileSystemStub(stub -> {
when(stub.find(any(), anyString())).thenReturn(foundFiles);
when(stub.create(ant)).thenAnswer(filename -> read(ModuleDetector.ANT_PROJECT));
when(stub.create(maven)).thenAnswer(filename -> read(ModuleDetector.MAVEN_POM));
when(stub.create(osgi)).thenAnswer(filename -> read(MANIFEST));
when(stub.create(prefix + "/" + ModuleDetector.PLUGIN_PROPERTIES)).thenAnswer(filename -> createEmptyStream());
when(stub.create(prefix + "/" + ModuleDetector.BUNDLE_PROPERTIES)).thenAnswer(filename -> createEmptyStream());
});
ModuleDetector detector = new ModuleDetector(ROOT, fileSystem);
assertThat(detector.guessModuleName(prefix + "/something.txt")).isEqualTo(EXPECTED_OSGI_MODULE);
}
private InputStream createEmptyStream() {
try {
return IOUtils.toInputStream("", "UTF-8");
}
catch (IOException ignored) {
return null;
}
}
private FileSystem createFileSystemStub(final Stub stub) {
try {
FileSystem fileSystem = mock(FileSystem.class);
stub.apply(fileSystem);
return fileSystem;
}
catch (FileNotFoundException exception) {
throw new AssertionError(exception);
}
}
/**
* Stubs the {@link PackageDetectors.FileSystem} using a lambda.
*/
@FunctionalInterface
private interface Stub {
void apply(FileSystem f) throws FileNotFoundException;
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created by IntelliJ IDEA.
* User: max
* Date: Jun 4, 2002
* Time: 8:27:13 PM
* To change template for new class use
* Code Style | Class Templates options (Tools | IDE Options).
*/
package com.intellij.openapi.editor.impl;
import com.intellij.diagnostic.Dumpable;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.editor.ex.FoldingListener;
import com.intellij.openapi.editor.ex.FoldingModelEx;
import com.intellij.openapi.editor.ex.PrioritizedInternalDocumentListener;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.ModificationTracker;
import com.intellij.util.DocumentUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.awt.*;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
public class FoldingModelImpl implements FoldingModelEx, PrioritizedInternalDocumentListener, Dumpable, ModificationTracker {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.editor.impl.EditorFoldingModelImpl");
private static final Key<SavedCaretPosition> SAVED_CARET_POSITION = Key.create("saved.position.before.folding");
private static final Key<Boolean> MARK_FOR_UPDATE = Key.create("marked.for.position.update");
private final List<FoldingListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private boolean myIsFoldingEnabled;
private final EditorImpl myEditor;
final RangeMarkerTree<FoldRegionImpl> myRegionTree;
private final FoldRegionsTree myFoldTree;
private TextAttributes myFoldTextAttributes;
private boolean myIsBatchFoldingProcessing;
private boolean myDoNotCollapseCaret;
private boolean myFoldRegionsProcessed;
private int mySavedCaretShift;
private final MultiMap<FoldingGroup, FoldRegion> myGroups = new MultiMap<>();
private boolean myDocumentChangeProcessed = true;
private final AtomicLong myExpansionCounter = new AtomicLong();
public FoldingModelImpl(@NotNull EditorImpl editor) {
myEditor = editor;
myIsFoldingEnabled = true;
myIsBatchFoldingProcessing = false;
myDoNotCollapseCaret = false;
myRegionTree = new RangeMarkerTree<>(editor.getDocument());
myFoldTree = new FoldRegionsTree() {
@Override
protected boolean isFoldingEnabled() {
return FoldingModelImpl.this.isFoldingEnabled();
}
};
myFoldRegionsProcessed = false;
refreshSettings();
}
@Override
@NotNull
public List<FoldRegion> getGroupedRegions(@NotNull FoldingGroup group) {
return (List<FoldRegion>)myGroups.get(group);
}
@Override
public void clearDocumentRangesModificationStatus() {
assertIsDispatchThreadForEditor();
myFoldTree.clearDocumentRangesModificationStatus();
}
@Override
public boolean hasDocumentRegionChangedFor(@NotNull FoldRegion region) {
assertReadAccess();
return region instanceof FoldRegionImpl && ((FoldRegionImpl)region).hasDocumentRegionChanged();
}
@NotNull
FoldRegion getFirstRegion(@NotNull FoldingGroup group, @NotNull FoldRegion child) {
final List<FoldRegion> regions = getGroupedRegions(group);
if (regions.isEmpty()) {
final boolean inAll = Arrays.asList(getAllFoldRegions()).contains(child);
throw new AssertionError("Folding group without children; the known child is in all: " + inAll);
}
FoldRegion main = regions.get(0);
for (int i = 1; i < regions.size(); i++) {
FoldRegion region = regions.get(i);
if (main.getStartOffset() > region.getStartOffset()) {
main = region;
}
}
return main;
}
public int getEndOffset(@NotNull FoldingGroup group) {
final List<FoldRegion> regions = getGroupedRegions(group);
int endOffset = 0;
for (FoldRegion region : regions) {
if (region.isValid()) {
endOffset = Math.max(endOffset, region.getEndOffset());
}
}
return endOffset;
}
void refreshSettings() {
myFoldTextAttributes = myEditor.getColorsScheme().getAttributes(EditorColors.FOLDED_TEXT_ATTRIBUTES);
}
@Override
public boolean isFoldingEnabled() {
return myIsFoldingEnabled;
}
@Override
public boolean isOffsetCollapsed(int offset) {
assertReadAccess();
return getCollapsedRegionAtOffset(offset) != null;
}
private boolean isOffsetInsideCollapsedRegion(int offset) {
assertReadAccess();
FoldRegion region = getCollapsedRegionAtOffset(offset);
return region != null && region.getStartOffset() < offset;
}
private static void assertIsDispatchThreadForEditor() {
ApplicationManager.getApplication().assertIsDispatchThread();
}
private static void assertReadAccess() {
ApplicationManager.getApplication().assertReadAccessAllowed();
}
private static void assertOurRegion(FoldRegion region) {
if (!(region instanceof FoldRegionImpl)) {
throw new IllegalArgumentException("Only regions created by this instance of FoldingModel are accepted");
}
}
@Override
public void setFoldingEnabled(boolean isEnabled) {
assertIsDispatchThreadForEditor();
myIsFoldingEnabled = isEnabled;
}
@Override
public FoldRegion addFoldRegion(int startOffset, int endOffset, @NotNull String placeholderText) {
FoldRegion region = createFoldRegion(startOffset, endOffset, placeholderText, null, false);
if (region == null) return null;
if (!addFoldRegion(region)) {
region.dispose();
return null;
}
return region;
}
@Override
public boolean addFoldRegion(@NotNull final FoldRegion region) {
assertIsDispatchThreadForEditor();
assertOurRegion(region);
if (!isFoldingEnabled()) {
return false;
}
if (!myIsBatchFoldingProcessing) {
LOG.error("Fold regions must be added or removed inside batchFoldProcessing() only.");
return false;
}
if (!region.isValid() ||
DocumentUtil.isInsideSurrogatePair(myEditor.getDocument(), region.getStartOffset()) ||
DocumentUtil.isInsideSurrogatePair(myEditor.getDocument(), region.getEndOffset())) {
return false;
}
myFoldRegionsProcessed = true;
if (myFoldTree.addRegion(region)) {
final FoldingGroup group = region.getGroup();
if (group != null) {
myGroups.putValue(group, region);
}
notifyListenersOnFoldRegionStateChange(region);
return true;
}
return false;
}
@Override
public void runBatchFoldingOperation(@NotNull Runnable operation) {
runBatchFoldingOperation(operation, false, true);
}
@Override
public void runBatchFoldingOperation(@NotNull Runnable operation, boolean moveCaret) {
runBatchFoldingOperation(operation, false, moveCaret);
}
private void runBatchFoldingOperation(@NotNull Runnable operation, final boolean dontCollapseCaret, final boolean moveCaret) {
assertIsDispatchThreadForEditor();
boolean oldDontCollapseCaret = myDoNotCollapseCaret;
myDoNotCollapseCaret |= dontCollapseCaret;
boolean oldBatchFlag = myIsBatchFoldingProcessing;
if (!oldBatchFlag) {
((ScrollingModelImpl)myEditor.getScrollingModel()).finishAnimation();
mySavedCaretShift = myEditor.visibleLineToY(myEditor.getCaretModel().getVisualPosition().line) - myEditor.getScrollingModel().getVerticalScrollOffset();
}
myIsBatchFoldingProcessing = true;
try {
operation.run();
}
finally {
if (!oldBatchFlag) {
myIsBatchFoldingProcessing = false;
if (myFoldRegionsProcessed) {
notifyBatchFoldingProcessingDone(moveCaret);
myFoldRegionsProcessed = false;
}
}
myDoNotCollapseCaret = oldDontCollapseCaret;
}
}
@Override
public void runBatchFoldingOperationDoNotCollapseCaret(@NotNull final Runnable operation) {
runBatchFoldingOperation(operation, true, true);
}
/**
* Disables caret position adjustment after batch folding operation is finished.
* Should be called from inside batch operation runnable.
*/
void flushCaretShift() {
mySavedCaretShift = -1;
}
@Override
@NotNull
public FoldRegion[] getAllFoldRegions() {
assertReadAccess();
return myFoldTree.fetchAllRegions();
}
@Override
@Nullable
public FoldRegion getCollapsedRegionAtOffset(int offset) {
return myFoldTree.fetchOutermost(offset);
}
@Nullable
@Override
public FoldRegion getFoldRegion(int startOffset, int endOffset) {
assertReadAccess();
return myFoldTree.getRegionAt(startOffset, endOffset);
}
@Override
@Nullable
public FoldRegion getFoldingPlaceholderAt(@NotNull Point p) {
assertReadAccess();
LogicalPosition pos = myEditor.xyToLogicalPosition(p);
int line = pos.line;
if (line >= myEditor.getDocument().getLineCount()) return null;
int offset = myEditor.logicalPositionToOffset(pos);
return myFoldTree.fetchOutermost(offset);
}
@Override
public void removeFoldRegion(@NotNull final FoldRegion region) {
assertIsDispatchThreadForEditor();
assertOurRegion(region);
if (!myIsBatchFoldingProcessing) {
LOG.error("Fold regions must be added or removed inside batchFoldProcessing() only.");
}
((FoldRegionImpl)region).setExpanded(true, false);
notifyListenersOnFoldRegionStateChange(region);
final FoldingGroup group = region.getGroup();
if (group != null) {
myGroups.remove(group, region);
}
myFoldTree.removeRegion(region);
myFoldRegionsProcessed = true;
region.dispose();
}
void removeRegionFromTree(@NotNull FoldRegionImpl region) {
myRegionTree.removeInterval(region);
}
public void dispose() {
doClearFoldRegions();
myRegionTree.dispose();
}
@Override
public void clearFoldRegions() {
if (!myIsBatchFoldingProcessing) {
LOG.error("Fold regions must be added or removed inside batchFoldProcessing() only.");
return;
}
FoldRegion[] regions = getAllFoldRegions();
for (FoldRegion region : regions) {
if (!region.isExpanded()) {
notifyListenersOnFoldRegionStateChange(region);
myFoldRegionsProcessed = true;
}
}
doClearFoldRegions();
}
private void doClearFoldRegions() {
myGroups.clear();
myFoldTree.clear();
}
void expandFoldRegion(@NotNull FoldRegion region, boolean notify) {
assertIsDispatchThreadForEditor();
if (region.isExpanded() || region.shouldNeverExpand()) return;
if (!myIsBatchFoldingProcessing) {
LOG.error("Fold regions must be collapsed or expanded inside batchFoldProcessing() only.");
}
for (Caret caret : myEditor.getCaretModel().getAllCarets()) {
SavedCaretPosition savedPosition = caret.getUserData(SAVED_CARET_POSITION);
if (savedPosition != null && savedPosition.isUpToDate(myEditor)) {
int savedOffset = myEditor.logicalPositionToOffset(savedPosition.position);
FoldRegion[] allCollapsed = myFoldTree.fetchCollapsedAt(savedOffset);
if (allCollapsed.length == 1 && allCollapsed[0] == region) {
caret.putUserData(MARK_FOR_UPDATE, Boolean.TRUE);
}
}
}
myFoldRegionsProcessed = true;
myExpansionCounter.incrementAndGet();
((FoldRegionImpl) region).setExpandedInternal(true);
if (notify) notifyListenersOnFoldRegionStateChange(region);
}
void collapseFoldRegion(@NotNull FoldRegion region, boolean notify) {
assertIsDispatchThreadForEditor();
if (!region.isExpanded()) return;
if (!myIsBatchFoldingProcessing) {
LOG.error("Fold regions must be collapsed or expanded inside batchFoldProcessing() only.");
}
List<Caret> carets = myEditor.getCaretModel().getAllCarets();
for (Caret caret : carets) {
LogicalPosition caretPosition = caret.getLogicalPosition();
int caretOffset = myEditor.logicalPositionToOffset(caretPosition);
if (FoldRegionsTree.contains(region, caretOffset)) {
if (myDoNotCollapseCaret) return;
}
}
for (Caret caret : carets) {
int caretOffset = caret.getOffset();
if (FoldRegionsTree.contains(region, caretOffset)) {
SavedCaretPosition savedPosition = caret.getUserData(SAVED_CARET_POSITION);
if (savedPosition == null || !savedPosition.isUpToDate(myEditor)) {
caret.putUserData(SAVED_CARET_POSITION, new SavedCaretPosition(caret));
}
}
}
myFoldRegionsProcessed = true;
((FoldRegionImpl) region).setExpandedInternal(false);
if (notify) notifyListenersOnFoldRegionStateChange(region);
}
private void notifyBatchFoldingProcessingDone(final boolean moveCaretFromCollapsedRegion) {
rebuild();
for (FoldingListener listener : myListeners) {
listener.onFoldProcessingEnd();
}
myEditor.updateCaretCursor();
myEditor.recalculateSizeAndRepaint();
myEditor.getGutterComponentEx().updateSize();
myEditor.getGutterComponentEx().repaint();
myEditor.invokeDelayedErrorStripeRepaint();
for (Caret caret : myEditor.getCaretModel().getAllCarets()) {
// There is a possible case that caret position is already visual position aware. But visual position depends on number of folded
// logical lines as well, hence, we can't be sure that target logical position defines correct visual position because fold
// regions have just changed. Hence, we use 'raw' logical position instead.
LogicalPosition caretPosition = caret.getLogicalPosition().withoutVisualPositionInfo();
int caretOffset = myEditor.logicalPositionToOffset(caretPosition);
int selectionStart = caret.getSelectionStart();
int selectionEnd = caret.getSelectionEnd();
LogicalPosition positionToUse = null;
int offsetToUse = -1;
FoldRegion collapsed = myFoldTree.fetchOutermost(caretOffset);
SavedCaretPosition savedPosition = caret.getUserData(SAVED_CARET_POSITION);
boolean markedForUpdate = caret.getUserData(MARK_FOR_UPDATE) != null;
if (savedPosition != null && savedPosition.isUpToDate(myEditor)) {
int savedOffset = myEditor.logicalPositionToOffset(savedPosition.position);
FoldRegion collapsedAtSaved = myFoldTree.fetchOutermost(savedOffset);
if (collapsedAtSaved == null) {
positionToUse = savedPosition.position;
}
else {
offsetToUse = collapsedAtSaved.getStartOffset();
}
}
if (collapsed != null && positionToUse == null) {
positionToUse = myEditor.offsetToLogicalPosition(collapsed.getStartOffset());
}
if ((markedForUpdate || moveCaretFromCollapsedRegion) && caret.isUpToDate()) {
if (offsetToUse >= 0) {
caret.moveToOffset(offsetToUse);
}
else if (positionToUse != null) {
caret.moveToLogicalPosition(positionToUse);
}
else {
caret.moveToLogicalPosition(caretPosition);
}
}
caret.putUserData(SAVED_CARET_POSITION, savedPosition);
caret.putUserData(MARK_FOR_UPDATE, null);
if (isOffsetInsideCollapsedRegion(selectionStart) || isOffsetInsideCollapsedRegion(selectionEnd)) {
caret.removeSelection();
} else if (selectionStart < myEditor.getDocument().getTextLength()) {
caret.setSelection(selectionStart, selectionEnd);
}
}
if (mySavedCaretShift > 0) {
final ScrollingModel scrollingModel = myEditor.getScrollingModel();
scrollingModel.disableAnimation();
scrollingModel.scrollVertically(myEditor.visibleLineToY(myEditor.getCaretModel().getVisualPosition().line) - mySavedCaretShift);
scrollingModel.enableAnimation();
}
}
@Override
public void rebuild() {
if (!myEditor.getDocument().isInBulkUpdate()) {
myFoldTree.rebuild();
}
}
public boolean isInBatchFoldingOperation() {
return myIsBatchFoldingProcessing;
}
private void updateCachedOffsets() {
myFoldTree.updateCachedOffsets();
}
public int getFoldedLinesCountBefore(int offset) {
if (!myDocumentChangeProcessed && myEditor.getDocument().isInEventsHandling()) {
// There is a possible case that this method is called on document update before fold regions are recalculated.
// We return zero in such situations then.
return 0;
}
return myFoldTree.getFoldedLinesCountBefore(offset);
}
int getTotalNumberOfFoldedLines() {
if (!myDocumentChangeProcessed && myEditor.getDocument().isInEventsHandling()) {
// There is a possible case that this method is called on document update before fold regions are recalculated.
// We return zero in such situations then.
return 0;
}
return myFoldTree.getTotalNumberOfFoldedLines();
}
@Override
@Nullable
public FoldRegion[] fetchTopLevel() {
return myFoldTree.fetchTopLevel();
}
@NotNull
public FoldRegion[] fetchCollapsedAt(int offset) {
return myFoldTree.fetchCollapsedAt(offset);
}
@Override
public boolean intersectsRegion (int startOffset, int endOffset) {
return myFoldTree.intersectsRegion(startOffset, endOffset);
}
public FoldRegion[] fetchVisible() {
return myFoldTree.fetchVisible();
}
@Override
public int getLastCollapsedRegionBefore(int offset) {
return myFoldTree.getLastTopLevelIndexBefore(offset);
}
@Override
public TextAttributes getPlaceholderAttributes() {
return myFoldTextAttributes;
}
void flushCaretPosition(@NotNull Caret caret) {
caret.putUserData(SAVED_CARET_POSITION, null);
}
void onBulkDocumentUpdateStarted() {
myFoldTree.clearCachedValues();
}
void onBulkDocumentUpdateFinished() {
myFoldTree.rebuild();
}
@Override
public void beforeDocumentChange(DocumentEvent event) {
myDocumentChangeProcessed = false;
}
@Override
public void documentChanged(DocumentEvent event) {
try {
if (!((DocumentEx)event.getDocument()).isInBulkUpdate()) {
updateCachedOffsets();
}
}
finally {
myDocumentChangeProcessed = true;
}
}
@Override
public void moveTextHappened(int start, int end, int base) {
if (!myEditor.getDocument().isInBulkUpdate()) {
myFoldTree.rebuild();
}
}
@Override
public int getPriority() {
return EditorDocumentPriorities.FOLD_MODEL;
}
@Override
public FoldRegion createFoldRegion(int startOffset,
int endOffset,
@NotNull String placeholder,
@Nullable FoldingGroup group,
boolean neverExpands) {
FoldRegionImpl region = new FoldRegionImpl(myEditor, startOffset, endOffset, placeholder, group, neverExpands);
myRegionTree.addInterval(region, startOffset, endOffset, false, false, 0);
LOG.assertTrue(region.isValid());
return region;
}
@Override
public void addListener(@NotNull final FoldingListener listener, @NotNull Disposable parentDisposable) {
myListeners.add(listener);
Disposer.register(parentDisposable, () -> myListeners.remove(listener));
}
private void notifyListenersOnFoldRegionStateChange(@NotNull FoldRegion foldRegion) {
for (FoldingListener listener : myListeners) {
listener.onFoldRegionStateChange(foldRegion);
}
}
@NotNull
@Override
public String dumpState() {
return Arrays.toString(myFoldTree.fetchTopLevel());
}
@Override
public String toString() {
return dumpState();
}
@Override
public long getModificationCount() {
return myExpansionCounter.get();
}
@TestOnly
public void validateState() {
for (FoldRegion region : getAllFoldRegions()) {
LOG.assertTrue (!region.isValid() ||
!DocumentUtil.isInsideSurrogatePair(myEditor.getDocument(), region.getStartOffset()) &&
!DocumentUtil.isInsideSurrogatePair(myEditor.getDocument(), region.getEndOffset()));
}
}
private static class SavedCaretPosition {
private final LogicalPosition position;
private final long docStamp;
private SavedCaretPosition(Caret caret) {
position = caret.getLogicalPosition().withoutVisualPositionInfo();
docStamp = caret.getEditor().getDocument().getModificationStamp();
}
private boolean isUpToDate(Editor editor) {
return docStamp == editor.getDocument().getModificationStamp();
}
}
}
| |
/*
* Apache 2.0 License
*
* Copyright (c) Sebastian Katzer 2017
*
* This file contains Original Code and/or Modifications of Original Code
* as defined in and that are subject to the Apache License
* Version 2.0 (the 'License'). You may not use this file except in
* compliance with the License. Please obtain a copy of the License at
* http://opensource.org/licenses/Apache-2.0/ and read it before using this
* file.
*
* The Original Code and all software distributed under the License are
* distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
* EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
* INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
* Please see the License for the specific language governing rights and
* limitations under the License.
*/
// codebeat:disable[TOO_MANY_FUNCTIONS]
package de.appplant.cordova.plugin.notification;
import android.annotation.SuppressLint;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.content.Context;
import android.content.SharedPreferences;
import android.media.AudioAttributes;
import android.media.RingtoneManager;
import android.service.notification.StatusBarNotification;
import android.support.v4.app.NotificationManagerCompat;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import de.appplant.cordova.plugin.badge.BadgeImpl;
import static android.os.Build.VERSION.SDK_INT;
import static android.os.Build.VERSION_CODES.M;
import static android.os.Build.VERSION_CODES.O;
import static android.support.v4.app.NotificationCompat.PRIORITY_MIN;
import static android.support.v4.app.NotificationCompat.PRIORITY_LOW;
import static android.support.v4.app.NotificationCompat.PRIORITY_DEFAULT;
import static android.support.v4.app.NotificationCompat.PRIORITY_HIGH;
import static android.support.v4.app.NotificationCompat.PRIORITY_MAX;
import static android.support.v4.app.NotificationManagerCompat.IMPORTANCE_MIN;
import static android.support.v4.app.NotificationManagerCompat.IMPORTANCE_LOW;
import static android.support.v4.app.NotificationManagerCompat.IMPORTANCE_DEFAULT;
import static android.support.v4.app.NotificationManagerCompat.IMPORTANCE_HIGH;
import static de.appplant.cordova.plugin.notification.Notification.PREF_KEY_ID;
import static de.appplant.cordova.plugin.notification.Notification.Type.TRIGGERED;
import de.appplant.cordova.plugin.notification.Options;
/**
* Central way to access all or single local notifications set by specific
* state like triggered or scheduled. Offers shortcut ways to schedule,
* cancel or clear local notifications.
*/
public final class Manager {
static final String DEFAULT_CHANNEL_ID = "default-channel-id";
static final String DEFAULT_CHANNEL_DESCRIPTION = "Default channel";
// The application context
private Context context;
/**
* Constructor
*
* @param context Application context
*/
private Manager(Context context) {
this.context = context;
//createDefaultChannel();
}
/**
* Static method to retrieve class instance.
*
* @param context Application context
*/
public static Manager getInstance(Context context) {
return new Manager(context);
}
/**
* Check if app has local notification permission.
*/
public boolean hasPermission () {
return getNotCompMgr().areNotificationsEnabled();
}
/**
* Schedule local notification specified by request.
*
* @param request Set of notification options.
* @param receiver Receiver to handle the trigger event.
*/
public Notification schedule (Request request, Class<?> receiver) {
Options options = request.getOptions();
Notification toast = new Notification(context, options);
toast.schedule(request, receiver);
return toast;
}
/**
* TODO: temporary
*/
@SuppressLint("WrongConstant")
public void createChannel(Options options) {
NotificationManager mgr = getNotMgr();
int importance = IMPORTANCE_DEFAULT;
if (SDK_INT < O)
return;
NotificationChannel channel = mgr.getNotificationChannel(options.getChannel());
if (channel != null)
return;
switch (options.getPrio()) {
case PRIORITY_MIN:
importance = IMPORTANCE_MIN;
break;
case PRIORITY_LOW:
importance = IMPORTANCE_LOW;
break;
case PRIORITY_DEFAULT:
importance = IMPORTANCE_DEFAULT;
break;
case PRIORITY_HIGH:
importance = IMPORTANCE_HIGH;
break;
case PRIORITY_MAX:
importance = IMPORTANCE_HIGH;
break;
}
channel = new NotificationChannel(
options.getChannel(), options.getChannelDescription(), importance);
if(!options.isSilent() && importance > IMPORTANCE_DEFAULT) channel.setBypassDnd(true);
if(!options.isWithoutLights()) channel.enableLights(true);
if(options.isWithVibration()) {
channel.enableVibration(true);
} else {
channel.setVibrationPattern(new long[]{ 0 });
channel.enableVibration(true);
}
channel.setLightColor(options.getLedColor());
if(options.isWithoutSound()) {
channel.setSound(null, null);
} else {
AudioAttributes audioAttributes = new AudioAttributes.Builder()
.setContentType(AudioAttributes.CONTENT_TYPE_SONIFICATION)
.setUsage(AudioAttributes.USAGE_NOTIFICATION).build();
if(options.isWithDefaultSound()) {
channel.setSound(RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION), audioAttributes);
} else {
channel.setSound(options.getSound(), audioAttributes);
}
}
mgr.createNotificationChannel(channel);
}
/**
* Update local notification specified by ID.
*
* @param id The notification ID.
* @param updates JSON object with notification options.
* @param receiver Receiver to handle the trigger event.
*/
public Notification update (int id, JSONObject updates, Class<?> receiver) {
Notification notification = get(id);
if (notification == null)
return null;
notification.update(updates, receiver);
return notification;
}
/**
* Clear local notification specified by ID.
*
* @param id The notification ID.
*/
public Notification clear (int id) {
Notification toast = get(id);
if (toast != null) {
toast.clear();
}
return toast;
}
/**
* Clear all local notifications.
*/
public void clearAll () {
List<Notification> toasts = getByType(TRIGGERED);
for (Notification toast : toasts) {
toast.clear();
}
getNotCompMgr().cancelAll();
setBadge(0);
}
/**
* Clear local notification specified by ID.
*
* @param id The notification ID
*/
public Notification cancel (int id) {
Notification toast = get(id);
if (toast != null) {
toast.cancel();
}
return toast;
}
/**
* Cancel all local notifications.
*/
public void cancelAll () {
List<Notification> notifications = getAll();
for (Notification notification : notifications) {
notification.cancel();
}
getNotCompMgr().cancelAll();
setBadge(0);
}
/**
* All local notifications IDs.
*/
public List<Integer> getIds() {
Set<String> keys = getPrefs().getAll().keySet();
List<Integer> ids = new ArrayList<Integer>();
for (String key : keys) {
try {
ids.add(Integer.parseInt(key));
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
return ids;
}
/**
* All local notification IDs for given type.
*
* @param type The notification life cycle type
*/
public List<Integer> getIdsByType(Notification.Type type) {
if (type == Notification.Type.ALL)
return getIds();
StatusBarNotification[] activeToasts = getActiveNotifications();
List<Integer> activeIds = new ArrayList<Integer>();
for (StatusBarNotification toast : activeToasts) {
activeIds.add(toast.getId());
}
if (type == TRIGGERED)
return activeIds;
List<Integer> ids = getIds();
ids.removeAll(activeIds);
return ids;
}
/**
* List of local notifications with matching ID.
*
* @param ids Set of notification IDs.
*/
private List<Notification> getByIds(List<Integer> ids) {
List<Notification> toasts = new ArrayList<Notification>();
for (int id : ids) {
Notification toast = get(id);
if (toast != null) {
toasts.add(toast);
}
}
return toasts;
}
/**
* List of all local notification.
*/
public List<Notification> getAll() {
return getByIds(getIds());
}
/**
* List of local notifications from given type.
*
* @param type The notification life cycle type
*/
private List<Notification> getByType(Notification.Type type) {
if (type == Notification.Type.ALL)
return getAll();
List<Integer> ids = getIdsByType(type);
return getByIds(ids);
}
/**
* List of properties from all local notifications.
*/
public List<JSONObject> getOptions() {
return getOptionsById(getIds());
}
/**
* List of properties from local notifications with matching ID.
*
* @param ids Set of notification IDs
*/
public List<JSONObject> getOptionsById(List<Integer> ids) {
List<JSONObject> toasts = new ArrayList<JSONObject>();
for (int id : ids) {
Options options = getOptions(id);
if (options != null) {
toasts.add(options.getDict());
}
}
return toasts;
}
/**
* List of properties from all local notifications from given type.
*
* @param type
* The notification life cycle type
*/
public List<JSONObject> getOptionsByType(Notification.Type type) {
ArrayList<JSONObject> options = new ArrayList<JSONObject>();
List<Notification> notifications = getByType(type);
for (Notification notification : notifications) {
options.add(notification.getOptions().getDict());
}
return options;
}
/**
* Get local notification options.
*
* @param id Notification ID.
*
* @return null if could not found.
*/
public Options getOptions(int id) {
SharedPreferences prefs = getPrefs();
String toastId = Integer.toString(id);
if (!prefs.contains(toastId))
return null;
try {
String json = prefs.getString(toastId, null);
JSONObject dict = new JSONObject(json);
return new Options(context, dict);
} catch (JSONException e) {
e.printStackTrace();
return null;
}
}
/**
* Get existent local notification.
*
* @param id Notification ID.
*
* @return null if could not found.
*/
public Notification get(int id) {
Options options = getOptions(id);
if (options == null)
return null;
return new Notification(context, options);
}
/**
* Set the badge number of the app icon.
*
* @param badge The badge number.
*/
public void setBadge (int badge) {
if (badge == 0) {
new BadgeImpl(context).clearBadge();
} else {
new BadgeImpl(context).setBadge(badge);
}
}
/**
* Get all active status bar notifications.
*/
StatusBarNotification[] getActiveNotifications() {
if (SDK_INT >= M) {
return getNotMgr().getActiveNotifications();
} else {
return new StatusBarNotification[0];
}
}
/**
* Shared private preferences for the application.
*/
private SharedPreferences getPrefs () {
return context.getSharedPreferences(PREF_KEY_ID, Context.MODE_PRIVATE);
}
/**
* Notification manager for the application.
*/
private NotificationManager getNotMgr() {
return (NotificationManager) context.getSystemService(
Context.NOTIFICATION_SERVICE);
}
/**
* Notification compat manager for the application.
*/
public NotificationManagerCompat getNotCompMgr() {
return NotificationManagerCompat.from(context);
}
}
// codebeat:enable[TOO_MANY_FUNCTIONS]
| |
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.control;
import org.junit.Assert;
import org.junit.Test;
import org.kaaproject.kaa.common.dto.admin.UserDto;
import org.kaaproject.kaa.common.dto.event.EventClassDto;
import org.kaaproject.kaa.common.dto.event.EventClassFamilyDto;
import org.kaaproject.kaa.common.dto.event.EventClassFamilyVersionDto;
import org.kaaproject.kaa.common.dto.event.EventClassType;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* The Class ControlServerEventClassFamilyIT.
*/
public class ControlServerEventClassFamilyIT extends AbstractTestControlServer {
/* (non-Javadoc)
* @see org.kaaproject.kaa.server.control.AbstractTestControlServer#createTenantAdminNeeded()
*/
@Override
protected boolean createTenantAdminNeeded() {
return false;
}
/* (non-Javadoc)
* @see org.kaaproject.kaa.server.control.AbstractTestControlServer#createTenantDeveloperNeeded()
*/
@Override
protected boolean createTenantDeveloperNeeded() {
return false;
}
/**
* Test create event class family.
*
* @throws Exception the exception
*/
@Test
public void testCreateEventClassFamily() throws Exception {
EventClassFamilyDto eventClassFamily = createEventClassFamily();
Assert.assertFalse(strIsEmpty(eventClassFamily.getId()));
}
/**
* Test get event class family.
*
* @throws Exception the exception
*/
@Test
public void testGetEventClassFamily() throws Exception {
EventClassFamilyDto eventClassFamily = createEventClassFamily();
EventClassFamilyDto storedEventClassFamily = client.getEventClassFamilyById(eventClassFamily.getId());
Assert.assertNotNull(storedEventClassFamily);
assertEventClassFamiliesEquals(eventClassFamily, storedEventClassFamily);
}
/**
* Test get event class families by tenant id.
*
* @throws Exception the exception
*/
@Test
public void testGetEventClassFamiliesByTenantId() throws Exception {
List<EventClassFamilyDto> eventClassFamilies = new ArrayList<>(10);
UserDto tenant = createTenantAdmin(tenantAdminUser);
loginTenantAdmin(tenantAdminUser);
for (int i = 0; i < 10; i++) {
EventClassFamilyDto eventClassFamily = createEventClassFamily(tenant.getId(), "" + i);
eventClassFamilies.add(eventClassFamily);
}
Collections.sort(eventClassFamilies, new IdComparator());
List<EventClassFamilyDto> storedEventClassFamilies = client.getEventClassFamilies();
Collections.sort(storedEventClassFamilies, new IdComparator());
Assert.assertEquals(eventClassFamilies.size(), storedEventClassFamilies.size());
for (int i = 0; i < eventClassFamilies.size(); i++) {
EventClassFamilyDto eventClassFamily = eventClassFamilies.get(i);
EventClassFamilyDto storedEventClassFamily = storedEventClassFamilies.get(i);
assertEventClassFamiliesEquals(eventClassFamily, storedEventClassFamily);
}
}
/**
* Test update event class family.
*
* @throws Exception the exception
*/
@Test
public void testUpdateEventClassFamily() throws Exception {
EventClassFamilyDto eventClassFamily = createEventClassFamily();
eventClassFamily.setName(generateString(EVENT_CLASS_FAMILY));
EventClassFamilyDto updatedEventClassFamily = client
.editEventClassFamily(eventClassFamily);
assertEventClassFamiliesEquals(updatedEventClassFamily, eventClassFamily);
}
/**
* Test get event classes by family id version and type.
*
* @throws Exception the exception
*/
@Test
public void testGetEventClassesByFamilyIdVersionAndType() throws Exception {
UserDto tenant = createTenantAdmin(tenantAdminUser);
loginTenantAdmin(tenantAdminUser);
EventClassFamilyDto eventClassFamily = createEventClassFamily(tenant.getId());
EventClassFamilyVersionDto eventClassFamilyVersion = createEventClassFamilyVersion(tenant.getId());
client.addEventClassFamilyVersion(eventClassFamily.getId(), eventClassFamilyVersion);
List<EventClassDto> eventClasses = client.getEventClassesByFamilyIdVersionAndType(eventClassFamily.getId(), 1, EventClassType.EVENT);
Assert.assertNotNull(eventClasses);
Assert.assertEquals(1, eventClasses.size());
eventClassFamilyVersion = client.getEventClassFamilyVersionsById(eventClassFamily.getId()).get(0);
for (EventClassDto eventClass : eventClasses) {
Assert.assertEquals(eventClassFamilyVersion.getId(), eventClass.getEcfvId());
Assert.assertEquals(1, eventClass.getVersion());
}
}
/**
* Test duplicate event class family name.
*
* @throws Exception the exception
*/
@Test
public void testDuplicateEventClassFamilyName() throws Exception {
org.kaaproject.kaa.common.dto.admin.UserDto tenant = createTenantAdmin(tenantAdminUser);
loginTenantAdmin(tenantAdminUser);
EventClassFamilyDto eventClassFamily = createEventClassFamily(tenant.getId());
final EventClassFamilyDto secondEventClassFamily = createEventClassFamily(tenant.getId(), "test");
secondEventClassFamily.setName(eventClassFamily.getName());
checkBadRequest(new TestRestCall() {
@Override
public void executeRestCall() throws Exception {
client.editEventClassFamily(secondEventClassFamily);
}
});
}
/**
* Test add event class family schema.
*
* @throws Exception the exception
*/
@Test
public void testAddEventClassFamilyVersion() throws Exception {
UserDto tenantAdmin = createTenantAdmin(tenantAdminUser);
loginTenantAdmin(tenantAdminUser);
EventClassFamilyDto eventClassFamily = createEventClassFamily(tenantAdmin.getTenantId());
EventClassFamilyVersionDto eventClassFamilyVersion = createEventClassFamilyVersion(tenantAdmin.getTenantId());
client.addEventClassFamilyVersion(eventClassFamily.getId(), eventClassFamilyVersion);
List<EventClassFamilyVersionDto> schemas = eventClassService.findEventClassFamilyVersionsByEcfId(eventClassFamily.getId());
Assert.assertNotNull(schemas);
Assert.assertEquals(1, schemas.size());
EventClassFamilyVersionDto eventSchema = schemas.get(0);
Assert.assertNotNull(eventSchema);
Assert.assertEquals(1, eventSchema.getVersion());
eventClassFamilyVersion = createEventClassFamilyVersion(tenantAdmin.getTenantId());
client.addEventClassFamilyVersion(eventClassFamily.getId(), eventClassFamilyVersion);
schemas = eventClassService.findEventClassFamilyVersionsByEcfId(eventClassFamily.getId());
Assert.assertNotNull(schemas);
Assert.assertEquals(2, schemas.size());
eventSchema = schemas.get(1);
Assert.assertNotNull(eventSchema);
Assert.assertEquals(2, eventSchema.getVersion());
}
/**
* Test duplicate event class family fqns.
*
* @throws Exception the exception
*/
@Test
public void testDuplicateEventClassFamilyFqns() throws Exception {
org.kaaproject.kaa.common.dto.admin.UserDto tenant = createTenantAdmin(tenantAdminUser);
loginTenantAdmin(tenantAdminUser);
EventClassFamilyDto eventClassFamily = createEventClassFamily(tenant.getTenantId());
final EventClassFamilyVersionDto eventClassFamilyVersion = createEventClassFamilyVersion(tenant.getTenantId());
loginTenantAdmin(tenantAdminUser);
client.addEventClassFamilyVersion(eventClassFamily.getId(), eventClassFamilyVersion);
checkBadRequest(new TestRestCall() {
@Override
public void executeRestCall() throws Exception {
createEventClassFamily(tenant.getTenantId());
}
});
}
/**
* Assert event class families equals.
*
* @param eventClassFamily the event class family
* @param storedEventClassFamily the stored event class family
*/
private void assertEventClassFamiliesEquals(EventClassFamilyDto eventClassFamily, EventClassFamilyDto storedEventClassFamily) {
Assert.assertEquals(eventClassFamily.getId(), storedEventClassFamily.getId());
Assert.assertEquals(eventClassFamily.getName(), storedEventClassFamily.getName());
Assert.assertEquals(eventClassFamily.getTenantId(), storedEventClassFamily.getTenantId());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.query.continuous;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.UUID;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import javax.cache.Cache;
import javax.cache.configuration.CacheEntryListenerConfiguration;
import javax.cache.configuration.Factory;
import javax.cache.event.CacheEntryCreatedListener;
import javax.cache.event.CacheEntryEvent;
import javax.cache.event.CacheEntryEventFilter;
import javax.cache.event.CacheEntryExpiredListener;
import javax.cache.event.CacheEntryListener;
import javax.cache.event.CacheEntryRemovedListener;
import javax.cache.event.CacheEntryUpdatedListener;
import javax.cache.event.EventType;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cache.CacheEntryEventSerializableFilter;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.query.CacheQueryEntryEvent;
import org.apache.ignite.cache.query.ContinuousQuery;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.CacheObject;
import org.apache.ignite.internal.processors.cache.GridCacheEntryEx;
import org.apache.ignite.internal.processors.cache.GridCacheManagerAdapter;
import org.apache.ignite.internal.processors.cache.KeyCacheObject;
import org.apache.ignite.internal.processors.cache.database.CacheDataRow;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicAbstractUpdateFuture;
import org.apache.ignite.internal.processors.continuous.GridContinuousHandler;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.CI2;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteAsyncCallback;
import org.apache.ignite.lang.IgniteOutClosure;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.plugin.security.SecurityPermission;
import org.apache.ignite.resources.LoggerResource;
import org.jetbrains.annotations.Nullable;
import org.jsr166.ConcurrentHashMap8;
import static javax.cache.event.EventType.CREATED;
import static javax.cache.event.EventType.EXPIRED;
import static javax.cache.event.EventType.REMOVED;
import static javax.cache.event.EventType.UPDATED;
import static org.apache.ignite.events.EventType.EVT_CACHE_QUERY_OBJECT_READ;
import static org.apache.ignite.internal.GridTopic.TOPIC_CACHE;
/**
* Continuous queries manager.
*/
public class CacheContinuousQueryManager extends GridCacheManagerAdapter {
/** */
private static final byte CREATED_FLAG = 0b0001;
/** */
private static final byte UPDATED_FLAG = 0b0010;
/** */
private static final byte REMOVED_FLAG = 0b0100;
/** */
private static final byte EXPIRED_FLAG = 0b1000;
/** */
private static final long BACKUP_ACK_FREQ = 5000;
/** Listeners. */
private final ConcurrentMap<UUID, CacheContinuousQueryListener> lsnrs = new ConcurrentHashMap8<>();
/** Listeners count. */
private final AtomicInteger lsnrCnt = new AtomicInteger();
/** Internal entries listeners. */
private final ConcurrentMap<UUID, CacheContinuousQueryListener> intLsnrs = new ConcurrentHashMap8<>();
/** Internal listeners count. */
private final AtomicInteger intLsnrCnt = new AtomicInteger();
/** Query sequence number for message topic. */
private final AtomicLong seq = new AtomicLong();
/** JCache listeners. */
private final ConcurrentMap<CacheEntryListenerConfiguration, JCacheQuery> jCacheLsnrs =
new ConcurrentHashMap8<>();
/** Ordered topic prefix. */
private String topicPrefix;
/** {@inheritDoc} */
@Override protected void start0() throws IgniteCheckedException {
// Append cache name to the topic.
topicPrefix = "CONTINUOUS_QUERY" + (cctx.name() == null ? "" : "_" + cctx.name());
if (cctx.affinityNode()) {
cctx.io().addHandler(cctx.cacheId(), CacheContinuousQueryBatchAck.class,
new CI2<UUID, CacheContinuousQueryBatchAck>() {
@Override public void apply(UUID uuid, CacheContinuousQueryBatchAck msg) {
CacheContinuousQueryListener lsnr = lsnrs.get(msg.routineId());
if (lsnr != null)
lsnr.cleanupBackupQueue(msg.updateCntrs());
}
});
cctx.time().schedule(new BackupCleaner(lsnrs, cctx.kernalContext()), BACKUP_ACK_FREQ, BACKUP_ACK_FREQ);
}
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override protected void onKernalStart0() throws IgniteCheckedException {
Iterable<CacheEntryListenerConfiguration> cfgs = cctx.config().getCacheEntryListenerConfigurations();
if (cfgs != null) {
for (CacheEntryListenerConfiguration cfg : cfgs)
executeJCacheQuery(cfg, true, false);
}
}
/** {@inheritDoc} */
@Override protected void onKernalStop0(boolean cancel) {
super.onKernalStop0(cancel);
for (JCacheQuery lsnr : jCacheLsnrs.values()) {
try {
lsnr.cancel();
}
catch (IgniteCheckedException e) {
if (log.isDebugEnabled())
log.debug("Failed to stop JCache entry listener: " + e.getMessage());
}
}
}
/**
* @param lsnrs Listeners to notify.
* @param key Entry key.
* @param partId Partition id.
* @param updCntr Updated counter.
* @param primary Primary.
* @param topVer Topology version.
*/
public void skipUpdateEvent(Map<UUID, CacheContinuousQueryListener> lsnrs,
KeyCacheObject key,
int partId,
long updCntr,
boolean primary,
AffinityTopologyVersion topVer) {
assert lsnrs != null;
for (CacheContinuousQueryListener lsnr : lsnrs.values()) {
CacheContinuousQueryEntry e0 = new CacheContinuousQueryEntry(
cctx.cacheId(),
UPDATED,
key,
null,
null,
lsnr.keepBinary(),
partId,
updCntr,
topVer);
CacheContinuousQueryEvent evt = new CacheContinuousQueryEvent<>(
cctx.kernalContext().cache().jcache(cctx.name()), cctx, e0);
lsnr.skipUpdateEvent(evt, topVer, primary);
}
}
/**
* @param internal Internal entry flag (internal key or not user cache).
* @param preload Whether update happened during preloading.
* @return Registered listeners.
*/
@Nullable public Map<UUID, CacheContinuousQueryListener> updateListeners(
boolean internal,
boolean preload) {
if (preload && !internal)
return null;
ConcurrentMap<UUID, CacheContinuousQueryListener> lsnrCol;
if (internal)
lsnrCol = intLsnrCnt.get() > 0 ? intLsnrs : null;
else
lsnrCol = lsnrCnt.get() > 0 ? lsnrs : null;
return F.isEmpty(lsnrCol) ? null : lsnrCol;
}
/**
* @param key Key.
* @param newVal New value.
* @param oldVal Old value.
* @param internal Internal entry (internal key or not user cache).
* @param partId Partition.
* @param primary {@code True} if called on primary node.
* @param preload Whether update happened during preloading.
* @param updateCntr Update counter.
* @param fut Dht atomic future.
* @param topVer Topology version.
* @throws IgniteCheckedException In case of error.
*/
public void onEntryUpdated(
KeyCacheObject key,
CacheObject newVal,
CacheObject oldVal,
boolean internal,
int partId,
boolean primary,
boolean preload,
long updateCntr,
@Nullable GridDhtAtomicAbstractUpdateFuture fut,
AffinityTopologyVersion topVer
) throws IgniteCheckedException {
Map<UUID, CacheContinuousQueryListener> lsnrCol = updateListeners(internal, preload);
if (lsnrCol != null) {
onEntryUpdated(
lsnrCol,
key,
newVal,
oldVal,
internal,
partId,
primary,
preload,
updateCntr,
fut,
topVer);
}
}
/**
* @param lsnrCol Listeners to notify.
* @param key Key.
* @param newVal New value.
* @param oldVal Old value.
* @param internal Internal entry (internal key or not user cache),
* @param partId Partition.
* @param primary {@code True} if called on primary node.
* @param preload Whether update happened during preloading.
* @param updateCntr Update counter.
* @param topVer Topology version.
* @param fut Dht atomic future.
* @throws IgniteCheckedException In case of error.
*/
public void onEntryUpdated(
Map<UUID, CacheContinuousQueryListener> lsnrCol,
KeyCacheObject key,
CacheObject newVal,
CacheObject oldVal,
boolean internal,
int partId,
boolean primary,
boolean preload,
long updateCntr,
@Nullable GridDhtAtomicAbstractUpdateFuture fut,
AffinityTopologyVersion topVer)
throws IgniteCheckedException
{
assert key != null;
assert lsnrCol != null;
boolean hasNewVal = newVal != null;
boolean hasOldVal = oldVal != null;
if (!hasNewVal && !hasOldVal) {
skipUpdateEvent(lsnrCol, key, partId, updateCntr, primary, topVer);
return;
}
EventType evtType = !hasNewVal ? REMOVED : !hasOldVal ? CREATED : UPDATED;
boolean initialized = false;
boolean recordIgniteEvt = primary && !internal && cctx.gridEvents().isRecordable(EVT_CACHE_QUERY_OBJECT_READ);
for (CacheContinuousQueryListener lsnr : lsnrCol.values()) {
if (preload && !lsnr.notifyExisting())
continue;
if (!initialized) {
if (lsnr.oldValueRequired()) {
oldVal = (CacheObject)cctx.unwrapTemporary(oldVal);
if (oldVal != null)
oldVal.finishUnmarshal(cctx.cacheObjectContext(), cctx.deploy().globalLoader());
}
if (newVal != null)
newVal.finishUnmarshal(cctx.cacheObjectContext(), cctx.deploy().globalLoader());
initialized = true;
}
CacheContinuousQueryEntry e0 = new CacheContinuousQueryEntry(
cctx.cacheId(),
evtType,
key,
newVal,
lsnr.oldValueRequired() ? oldVal : null,
lsnr.keepBinary(),
partId,
updateCntr,
topVer);
CacheContinuousQueryEvent evt = new CacheContinuousQueryEvent<>(
cctx.kernalContext().cache().jcache(cctx.name()), cctx, e0);
lsnr.onEntryUpdated(evt, primary, recordIgniteEvt, fut);
}
}
/**
* @param e Entry.
* @param key Key.
* @param oldVal Old value.
* @throws IgniteCheckedException In case of error.
*/
public void onEntryExpired(GridCacheEntryEx e, KeyCacheObject key, CacheObject oldVal)
throws IgniteCheckedException {
assert e != null;
assert key != null;
if (e.isInternal())
return;
ConcurrentMap<UUID, CacheContinuousQueryListener> lsnrCol = lsnrCnt.get() > 0 ? lsnrs : null;
if (F.isEmpty(lsnrCol))
return;
boolean primary = cctx.affinity().primaryByPartition(cctx.localNode(), e.partition(), AffinityTopologyVersion.NONE);
if (cctx.isReplicated() || primary) {
boolean recordIgniteEvt = cctx.gridEvents().isRecordable(EVT_CACHE_QUERY_OBJECT_READ);
boolean initialized = false;
for (CacheContinuousQueryListener lsnr : lsnrCol.values()) {
if (!initialized) {
if (lsnr.oldValueRequired())
oldVal = (CacheObject)cctx.unwrapTemporary(oldVal);
if (oldVal != null)
oldVal.finishUnmarshal(cctx.cacheObjectContext(), cctx.deploy().globalLoader());
initialized = true;
}
CacheContinuousQueryEntry e0 = new CacheContinuousQueryEntry(
cctx.cacheId(),
EXPIRED,
key,
null,
lsnr.oldValueRequired() ? oldVal : null,
lsnr.keepBinary(),
e.partition(),
-1,
null);
CacheContinuousQueryEvent evt = new CacheContinuousQueryEvent(
cctx.kernalContext().cache().jcache(cctx.name()), cctx, e0);
lsnr.onEntryUpdated(evt, primary, recordIgniteEvt, null);
}
}
}
/**
* @param locLsnr Local listener.
* @param rmtFilter Remote filter.
* @param bufSize Buffer size.
* @param timeInterval Time interval.
* @param autoUnsubscribe Auto unsubscribe flag.
* @param loc Local flag.
* @return Continuous routine ID.
* @throws IgniteCheckedException In case of error.
*/
public UUID executeQuery(final CacheEntryUpdatedListener locLsnr,
@Nullable final CacheEntryEventSerializableFilter rmtFilter,
@Nullable final Factory<? extends CacheEntryEventFilter> rmtFilterFactory,
int bufSize,
long timeInterval,
boolean autoUnsubscribe,
boolean loc,
final boolean keepBinary,
final boolean includeExpired) throws IgniteCheckedException
{
IgniteOutClosure<CacheContinuousQueryHandler> clsr;
if (rmtFilterFactory != null)
clsr = new IgniteOutClosure<CacheContinuousQueryHandler>() {
@Override public CacheContinuousQueryHandler apply() {
CacheContinuousQueryHandler hnd;
hnd = new CacheContinuousQueryHandlerV2(
cctx.name(),
TOPIC_CACHE.topic(topicPrefix, cctx.localNodeId(), seq.getAndIncrement()),
locLsnr,
rmtFilterFactory,
true,
false,
!includeExpired,
false,
null);
return hnd;
}
};
else
clsr = new IgniteOutClosure<CacheContinuousQueryHandler>() {
@Override public CacheContinuousQueryHandler apply() {
return new CacheContinuousQueryHandler(
cctx.name(),
TOPIC_CACHE.topic(topicPrefix, cctx.localNodeId(), seq.getAndIncrement()),
locLsnr,
rmtFilter,
true,
false,
!includeExpired,
false);
}
};
return executeQuery0(
locLsnr,
clsr,
bufSize,
timeInterval,
autoUnsubscribe,
false,
false,
loc,
keepBinary,
false);
}
/**
* @param locLsnr Local listener.
* @param rmtFilter Remote filter.
* @param loc Local flag.
* @param notifyExisting Notify existing flag.
* @return Continuous routine ID.
* @throws IgniteCheckedException In case of error.
*/
public UUID executeInternalQuery(final CacheEntryUpdatedListener<?, ?> locLsnr,
final CacheEntryEventSerializableFilter rmtFilter,
final boolean loc,
final boolean notifyExisting,
final boolean ignoreClassNotFound)
throws IgniteCheckedException
{
return executeQuery0(
locLsnr,
new IgniteOutClosure<CacheContinuousQueryHandler>() {
@Override public CacheContinuousQueryHandler apply() {
return new CacheContinuousQueryHandler(
cctx.name(),
TOPIC_CACHE.topic(topicPrefix, cctx.localNodeId(), seq.getAndIncrement()),
locLsnr,
rmtFilter,
true,
false,
true,
ignoreClassNotFound);
}
},
ContinuousQuery.DFLT_PAGE_SIZE,
ContinuousQuery.DFLT_TIME_INTERVAL,
ContinuousQuery.DFLT_AUTO_UNSUBSCRIBE,
true,
notifyExisting,
loc,
false,
false);
}
/**
* @param routineId Consume ID.
*/
public void cancelInternalQuery(UUID routineId) {
try {
cctx.kernalContext().continuous().stopRoutine(routineId).get();
}
catch (IgniteCheckedException | IgniteException e) {
if (log.isDebugEnabled())
log.debug("Failed to stop internal continuous query: " + e.getMessage());
}
}
/**
* @param cfg Listener configuration.
* @param onStart Whether listener is created on node start.
* @throws IgniteCheckedException If failed.
*/
public void executeJCacheQuery(CacheEntryListenerConfiguration cfg, boolean onStart, boolean keepBinary)
throws IgniteCheckedException {
JCacheQuery lsnr = new JCacheQuery(cfg, onStart, keepBinary);
JCacheQuery old = jCacheLsnrs.putIfAbsent(cfg, lsnr);
if (old != null)
throw new IllegalArgumentException("Listener is already registered for configuration: " + cfg);
try {
lsnr.execute();
}
catch (IgniteCheckedException e) {
cancelJCacheQuery(cfg);
throw e;
}
}
/**
* @param cfg Listener configuration.
* @throws IgniteCheckedException In case of error.
*/
public void cancelJCacheQuery(CacheEntryListenerConfiguration cfg) throws IgniteCheckedException {
JCacheQuery lsnr = jCacheLsnrs.remove(cfg);
if (lsnr != null)
lsnr.cancel();
}
/**
* @param topVer Topology version.
*/
public void beforeExchange(AffinityTopologyVersion topVer) {
for (CacheContinuousQueryListener lsnr : lsnrs.values())
lsnr.flushBackupQueue(cctx.kernalContext(), topVer);
}
/**
* Partition evicted callback.
*
* @param part Partition number.
*/
public void onPartitionEvicted(int part) {
for (CacheContinuousQueryListener lsnr : lsnrs.values())
lsnr.onPartitionEvicted(part);
for (CacheContinuousQueryListener lsnr : intLsnrs.values())
lsnr.onPartitionEvicted(part);
}
/**
* @param locLsnr Local listener.
* @param clsr Closure to create CacheContinuousQueryHandler.
* @param bufSize Buffer size.
* @param timeInterval Time interval.
* @param autoUnsubscribe Auto unsubscribe flag.
* @param internal Internal flag.
* @param notifyExisting Notify existing flag.
* @param loc Local flag.
* @param keepBinary Keep binary flag.
* @param onStart Waiting topology exchange.
* @return Continuous routine ID.
* @throws IgniteCheckedException In case of error.
*/
private UUID executeQuery0(CacheEntryUpdatedListener locLsnr,
IgniteOutClosure<CacheContinuousQueryHandler> clsr,
int bufSize,
long timeInterval,
boolean autoUnsubscribe,
boolean internal,
boolean notifyExisting,
boolean loc,
final boolean keepBinary,
boolean onStart) throws IgniteCheckedException
{
cctx.checkSecurity(SecurityPermission.CACHE_READ);
int taskNameHash = !internal && cctx.kernalContext().security().enabled() ?
cctx.kernalContext().job().currentTaskNameHash() : 0;
boolean skipPrimaryCheck = loc && cctx.config().getCacheMode() == CacheMode.REPLICATED && cctx.affinityNode();
final CacheContinuousQueryHandler hnd = clsr.apply();
hnd.taskNameHash(taskNameHash);
hnd.skipPrimaryCheck(skipPrimaryCheck);
hnd.notifyExisting(notifyExisting);
hnd.internal(internal);
hnd.keepBinary(keepBinary);
hnd.localCache(cctx.isLocal());
IgnitePredicate<ClusterNode> pred = (loc || cctx.config().getCacheMode() == CacheMode.LOCAL) ?
F.nodeForNodeId(cctx.localNodeId()) : cctx.config().getNodeFilter();
assert pred != null : cctx.config();
UUID id = cctx.kernalContext().continuous().startRoutine(
hnd,
internal && loc,
bufSize,
timeInterval,
autoUnsubscribe,
pred).get();
try {
if (hnd.isQuery() && cctx.userCache() && !onStart)
hnd.waitTopologyFuture(cctx.kernalContext());
}
catch (IgniteCheckedException e) {
log.warning("Failed to start continuous query.", e);
cctx.kernalContext().continuous().stopRoutine(id);
throw new IgniteCheckedException("Failed to start continuous query.", e);
}
if (notifyExisting) {
final Iterator<CacheDataRow> it = cctx.offheap().iterator(true, true, AffinityTopologyVersion.NONE);
locLsnr.onUpdated(new Iterable<CacheEntryEvent>() {
@Override public Iterator<CacheEntryEvent> iterator() {
return new Iterator<CacheEntryEvent>() {
private CacheContinuousQueryEvent next;
{
advance();
}
@Override public boolean hasNext() {
return next != null;
}
@Override public CacheEntryEvent next() {
if (!hasNext())
throw new NoSuchElementException();
CacheEntryEvent next0 = next;
advance();
return next0;
}
@Override public void remove() {
throw new UnsupportedOperationException();
}
private void advance() {
next = null;
while (next == null) {
if (!it.hasNext())
break;
CacheDataRow e = it.next();
CacheContinuousQueryEntry entry = new CacheContinuousQueryEntry(
cctx.cacheId(),
CREATED,
e.key(),
e.value(),
null,
keepBinary,
0,
-1,
null);
next = new CacheContinuousQueryEvent<>(
cctx.kernalContext().cache().jcache(cctx.name()),
cctx, entry);
if (hnd.getEventFilter() != null && !hnd.getEventFilter().evaluate(next))
next = null;
}
}
};
}
});
}
return id;
}
/**
* @param keepBinary Keep binary flag.
* @param filter Filter.
* @return Iterable for events created for existing cache entries.
* @throws IgniteCheckedException If failed.
*/
public Iterable<CacheEntryEvent<?, ?>> existingEntries(final boolean keepBinary, final CacheEntryEventFilter filter)
throws IgniteCheckedException {
final Iterator<Cache.Entry<?, ?>> it = cctx.cache().igniteIterator(keepBinary);
final Cache cache = cctx.kernalContext().cache().jcache(cctx.name());
return new Iterable<CacheEntryEvent<?, ?>>() {
@Override public Iterator<CacheEntryEvent<?, ?>> iterator() {
return new Iterator<CacheEntryEvent<?, ?>>() {
private CacheQueryEntryEvent<?, ?> next;
{
advance();
}
@Override public boolean hasNext() {
return next != null;
}
@Override public CacheEntryEvent<?, ?> next() {
if (!hasNext())
throw new NoSuchElementException();
CacheEntryEvent next0 = next;
advance();
return next0;
}
@Override public void remove() {
throw new UnsupportedOperationException();
}
private void advance() {
next = null;
while (next == null) {
if (!it.hasNext())
break;
Cache.Entry e = it.next();
next = new CacheEntryEventImpl(
cache,
CREATED,
e.getKey(),
e.getValue());
if (filter != null && !filter.evaluate(next))
next = null;
}
}
};
}
};
}
/**
* @param lsnrId Listener ID.
* @param lsnr Listener.
* @param internal Internal flag.
* @return Whether listener was actually registered.
*/
GridContinuousHandler.RegisterStatus registerListener(UUID lsnrId,
CacheContinuousQueryListener lsnr,
boolean internal) {
boolean added;
if (internal) {
added = intLsnrs.putIfAbsent(lsnrId, lsnr) == null;
if (added)
intLsnrCnt.incrementAndGet();
}
else {
added = lsnrs.putIfAbsent(lsnrId, lsnr) == null;
if (added) {
lsnrCnt.incrementAndGet();
lsnr.onExecution();
}
}
return added ? GridContinuousHandler.RegisterStatus.REGISTERED : GridContinuousHandler.RegisterStatus.NOT_REGISTERED;
}
/**
* @param internal Internal flag.
* @param id Listener ID.
*/
void unregisterListener(boolean internal, UUID id) {
CacheContinuousQueryListener lsnr;
if (internal) {
if ((lsnr = intLsnrs.remove(id)) != null) {
intLsnrCnt.decrementAndGet();
lsnr.onUnregister();
}
}
else {
if ((lsnr = lsnrs.remove(id)) != null) {
lsnrCnt.decrementAndGet();
lsnr.onUnregister();
}
}
}
/**
*
*/
private class JCacheQuery {
/** */
private final CacheEntryListenerConfiguration cfg;
/** */
private final boolean onStart;
/** */
private final boolean keepBinary;
/** */
private volatile UUID routineId;
/**
* @param cfg Listener configuration.
* @param onStart {@code True} if executed on cache start.
*/
private JCacheQuery(CacheEntryListenerConfiguration cfg, boolean onStart, boolean keepBinary) {
this.cfg = cfg;
this.onStart = onStart;
this.keepBinary = keepBinary;
}
/**
* @throws IgniteCheckedException In case of error.
*/
@SuppressWarnings("unchecked")
void execute() throws IgniteCheckedException {
if (!onStart)
cctx.config().addCacheEntryListenerConfiguration(cfg);
CacheEntryListener locLsnrImpl = (CacheEntryListener)cfg.getCacheEntryListenerFactory().create();
if (locLsnrImpl == null)
throw new IgniteCheckedException("Local CacheEntryListener is mandatory and can't be null.");
byte types = 0;
types |= locLsnrImpl instanceof CacheEntryCreatedListener ? CREATED_FLAG : 0;
types |= locLsnrImpl instanceof CacheEntryUpdatedListener ? UPDATED_FLAG : 0;
types |= locLsnrImpl instanceof CacheEntryRemovedListener ? REMOVED_FLAG : 0;
types |= locLsnrImpl instanceof CacheEntryExpiredListener ? EXPIRED_FLAG : 0;
if (types == 0)
throw new IgniteCheckedException("Listener must implement one of CacheEntryListener sub-interfaces.");
final byte types0 = types;
final CacheEntryUpdatedListener locLsnr = new JCacheQueryLocalListener(
locLsnrImpl,
log);
routineId = executeQuery0(
locLsnr,
new IgniteOutClosure<CacheContinuousQueryHandler>() {
@Override public CacheContinuousQueryHandler apply() {
CacheContinuousQueryHandler hnd;
Factory<CacheEntryEventFilter> rmtFilterFactory = cfg.getCacheEntryEventFilterFactory();
if (rmtFilterFactory != null)
hnd = new CacheContinuousQueryHandlerV2(
cctx.name(),
TOPIC_CACHE.topic(topicPrefix, cctx.localNodeId(), seq.getAndIncrement()),
locLsnr,
rmtFilterFactory,
cfg.isOldValueRequired(),
cfg.isSynchronous(),
false,
false,
types0);
else {
JCacheQueryRemoteFilter jCacheFilter;
CacheEntryEventFilter filter = null;
if (rmtFilterFactory != null) {
filter = rmtFilterFactory.create();
if (!(filter instanceof Serializable))
throw new IgniteException("Topology has nodes of the old versions. " +
"In this case EntryEventFilter must implement java.io.Serializable " +
"interface. Filter: " + filter);
}
jCacheFilter = new JCacheQueryRemoteFilter(filter, types0);
hnd = new CacheContinuousQueryHandler(
cctx.name(),
TOPIC_CACHE.topic(topicPrefix, cctx.localNodeId(), seq.getAndIncrement()),
locLsnr,
jCacheFilter,
cfg.isOldValueRequired(),
cfg.isSynchronous(),
false,
false);
}
return hnd;
}
},
ContinuousQuery.DFLT_PAGE_SIZE,
ContinuousQuery.DFLT_TIME_INTERVAL,
ContinuousQuery.DFLT_AUTO_UNSUBSCRIBE,
false,
false,
false,
keepBinary,
onStart
);
}
/**
* @throws IgniteCheckedException In case of error.
*/
@SuppressWarnings("unchecked")
void cancel() throws IgniteCheckedException {
UUID routineId0 = routineId;
if (routineId0 != null)
cctx.kernalContext().continuous().stopRoutine(routineId0).get();
cctx.config().removeCacheEntryListenerConfiguration(cfg);
}
}
/**
*
*/
static class JCacheQueryLocalListener<K, V> implements CacheEntryUpdatedListener<K, V> {
/** */
final CacheEntryListener<K, V> impl;
/** */
private final IgniteLogger log;
/**
* @param impl Listener.
* @param log Logger.
*/
JCacheQueryLocalListener(CacheEntryListener<K, V> impl, IgniteLogger log) {
assert impl != null;
assert log != null;
this.impl = impl;
this.log = log;
}
/** {@inheritDoc} */
@Override public void onUpdated(Iterable<CacheEntryEvent<? extends K, ? extends V>> evts) {
for (CacheEntryEvent<? extends K, ? extends V> evt : evts) {
try {
switch (evt.getEventType()) {
case CREATED:
assert impl instanceof CacheEntryCreatedListener : evt;
((CacheEntryCreatedListener<K, V>)impl).onCreated(singleton(evt));
break;
case UPDATED:
assert impl instanceof CacheEntryUpdatedListener : evt;
((CacheEntryUpdatedListener<K, V>)impl).onUpdated(singleton(evt));
break;
case REMOVED:
assert impl instanceof CacheEntryRemovedListener : evt;
((CacheEntryRemovedListener<K, V>)impl).onRemoved(singleton(evt));
break;
case EXPIRED:
assert impl instanceof CacheEntryExpiredListener : evt;
((CacheEntryExpiredListener<K, V>)impl).onExpired(singleton(evt));
break;
default:
throw new IllegalStateException("Unknown type: " + evt.getEventType());
}
}
catch (Exception e) {
U.error(log, "CacheEntryListener failed: " + e);
}
}
}
/**
* @param evt Event.
* @return Singleton iterable.
*/
@SuppressWarnings("unchecked")
private Iterable<CacheEntryEvent<? extends K, ? extends V>> singleton(
CacheEntryEvent<? extends K, ? extends V> evt) {
assert evt instanceof CacheContinuousQueryEvent;
Collection<CacheEntryEvent<? extends K, ? extends V>> evts = new ArrayList<>(1);
evts.add(evt);
return evts;
}
/**
* @return {@code True} if listener should be executed in non-system thread.
*/
protected boolean async() {
return U.hasAnnotation(impl, IgniteAsyncCallback.class);
}
}
/**
* For handler version 2.0 this filter should not be serialized.
*/
protected static class JCacheQueryRemoteFilter implements CacheEntryEventSerializableFilter, Externalizable {
/** */
private static final long serialVersionUID = 0L;
/** */
protected CacheEntryEventFilter impl;
/** */
private byte types;
/** */
@LoggerResource
private IgniteLogger log;
/**
* For {@link Externalizable}.
*/
public JCacheQueryRemoteFilter() {
// no-op.
}
/**
* @param impl Filter.
* @param types Types.
*/
JCacheQueryRemoteFilter(@Nullable CacheEntryEventFilter impl, byte types) {
assert types != 0;
this.impl = impl;
this.types = types;
}
/** {@inheritDoc} */
@Override public boolean evaluate(CacheEntryEvent evt) {
try {
return (types & flag(evt.getEventType())) != 0 && (impl == null || impl.evaluate(evt));
}
catch (Exception e) {
U.error(log, "CacheEntryEventFilter failed: " + e);
return true;
}
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject(impl);
out.writeByte(types);
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
impl = (CacheEntryEventFilter)in.readObject();
types = in.readByte();
}
/**
* @return {@code True} if filter should be executed in non-system thread.
*/
protected boolean async() {
return U.hasAnnotation(impl, IgniteAsyncCallback.class);
}
/**
* @param evtType Type.
* @return Flag value.
*/
private byte flag(EventType evtType) {
switch (evtType) {
case CREATED:
return CREATED_FLAG;
case UPDATED:
return UPDATED_FLAG;
case REMOVED:
return REMOVED_FLAG;
case EXPIRED:
return EXPIRED_FLAG;
default:
throw new IllegalStateException("Unknown type: " + evtType);
}
}
}
/**
* Task flash backup queue.
*/
private static final class BackupCleaner implements Runnable {
/** Listeners. */
private final Map<UUID, CacheContinuousQueryListener> lsnrs;
/** Context. */
private final GridKernalContext ctx;
/**
* @param lsnrs Listeners.
*/
public BackupCleaner(Map<UUID, CacheContinuousQueryListener> lsnrs, GridKernalContext ctx) {
this.lsnrs = lsnrs;
this.ctx = ctx;
}
/** {@inheritDoc} */
@Override public void run() {
for (CacheContinuousQueryListener lsnr : lsnrs.values())
lsnr.acknowledgeBackupOnTimeout(ctx);
}
}
/**
*
*/
public static class CacheEntryEventImpl extends CacheQueryEntryEvent {
/** */
private static final long serialVersionUID = 0L;
/** */
@GridToStringInclude(sensitive = true)
private Object key;
/** */
@GridToStringInclude(sensitive = true)
private Object val;
/**
* @param src Event source.
* @param evtType Event type.
* @param key Key.
* @param val Value.
*/
public CacheEntryEventImpl(Cache src, EventType evtType, Object key, Object val) {
super(src, evtType);
this.key = key;
this.val = val;
}
/** {@inheritDoc} */
@Override public long getPartitionUpdateCounter() {
return 0;
}
/** {@inheritDoc} */
@Override public Object getOldValue() {
return null;
}
/** {@inheritDoc} */
@Override public boolean isOldValueAvailable() {
return false;
}
/** {@inheritDoc} */
@Override public Object getKey() {
return key;
}
/** {@inheritDoc} */
@Override public Object getValue() {
return val;
}
/** {@inheritDoc} */
@Override public Object unwrap(Class cls) {
if (cls.isAssignableFrom(getClass()))
return cls.cast(this);
throw new IllegalArgumentException("Unwrapping to class is not supported: " + cls);
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(CacheEntryEventImpl.class, this);
}
}
}
| |
package com.google.firebase.quickstart.auth.java;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.Task;
import com.google.android.material.snackbar.Snackbar;
import com.google.firebase.FirebaseException;
import com.google.firebase.FirebaseTooManyRequestsException;
import com.google.firebase.auth.AuthResult;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseAuthInvalidCredentialsException;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.auth.PhoneAuthCredential;
import com.google.firebase.auth.PhoneAuthProvider;
import com.google.firebase.quickstart.auth.R;
import com.google.firebase.quickstart.auth.databinding.ActivityPhoneAuthBinding;
import java.util.concurrent.TimeUnit;
public class PhoneAuthActivity extends AppCompatActivity implements
View.OnClickListener {
private static final String TAG = "PhoneAuthActivity";
private static final String KEY_VERIFY_IN_PROGRESS = "key_verify_in_progress";
private static final int STATE_INITIALIZED = 1;
private static final int STATE_CODE_SENT = 2;
private static final int STATE_VERIFY_FAILED = 3;
private static final int STATE_VERIFY_SUCCESS = 4;
private static final int STATE_SIGNIN_FAILED = 5;
private static final int STATE_SIGNIN_SUCCESS = 6;
// [START declare_auth]
private FirebaseAuth mAuth;
// [END declare_auth]
private boolean mVerificationInProgress = false;
private String mVerificationId;
private PhoneAuthProvider.ForceResendingToken mResendToken;
private PhoneAuthProvider.OnVerificationStateChangedCallbacks mCallbacks;
private ActivityPhoneAuthBinding mBinding;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mBinding = ActivityPhoneAuthBinding.inflate(getLayoutInflater());
setContentView(mBinding.getRoot());
// Restore instance state
if (savedInstanceState != null) {
onRestoreInstanceState(savedInstanceState);
}
// Assign click listeners
mBinding.buttonStartVerification.setOnClickListener(this);
mBinding.buttonVerifyPhone.setOnClickListener(this);
mBinding.buttonResend.setOnClickListener(this);
mBinding.signOutButton.setOnClickListener(this);
// [START initialize_auth]
// Initialize Firebase Auth
mAuth = FirebaseAuth.getInstance();
// [END initialize_auth]
// Initialize phone auth callbacks
// [START phone_auth_callbacks]
mCallbacks = new PhoneAuthProvider.OnVerificationStateChangedCallbacks() {
@Override
public void onVerificationCompleted(PhoneAuthCredential credential) {
// This callback will be invoked in two situations:
// 1 - Instant verification. In some cases the phone number can be instantly
// verified without needing to send or enter a verification code.
// 2 - Auto-retrieval. On some devices Google Play services can automatically
// detect the incoming verification SMS and perform verification without
// user action.
Log.d(TAG, "onVerificationCompleted:" + credential);
// [START_EXCLUDE silent]
mVerificationInProgress = false;
// [END_EXCLUDE]
// [START_EXCLUDE silent]
// Update the UI and attempt sign in with the phone credential
updateUI(STATE_VERIFY_SUCCESS, credential);
// [END_EXCLUDE]
signInWithPhoneAuthCredential(credential);
}
@Override
public void onVerificationFailed(FirebaseException e) {
// This callback is invoked in an invalid request for verification is made,
// for instance if the the phone number format is not valid.
Log.w(TAG, "onVerificationFailed", e);
// [START_EXCLUDE silent]
mVerificationInProgress = false;
// [END_EXCLUDE]
if (e instanceof FirebaseAuthInvalidCredentialsException) {
// Invalid request
// [START_EXCLUDE]
mBinding.fieldPhoneNumber.setError("Invalid phone number.");
// [END_EXCLUDE]
} else if (e instanceof FirebaseTooManyRequestsException) {
// The SMS quota for the project has been exceeded
// [START_EXCLUDE]
Snackbar.make(findViewById(android.R.id.content), "Quota exceeded.",
Snackbar.LENGTH_SHORT).show();
// [END_EXCLUDE]
}
// Show a message and update the UI
// [START_EXCLUDE]
updateUI(STATE_VERIFY_FAILED);
// [END_EXCLUDE]
}
@Override
public void onCodeSent(@NonNull String verificationId,
@NonNull PhoneAuthProvider.ForceResendingToken token) {
// The SMS verification code has been sent to the provided phone number, we
// now need to ask the user to enter the code and then construct a credential
// by combining the code with a verification ID.
Log.d(TAG, "onCodeSent:" + verificationId);
// Save verification ID and resending token so we can use them later
mVerificationId = verificationId;
mResendToken = token;
// [START_EXCLUDE]
// Update UI
updateUI(STATE_CODE_SENT);
// [END_EXCLUDE]
}
};
// [END phone_auth_callbacks]
}
// [START on_start_check_user]
@Override
public void onStart() {
super.onStart();
// Check if user is signed in (non-null) and update UI accordingly.
FirebaseUser currentUser = mAuth.getCurrentUser();
updateUI(currentUser);
// [START_EXCLUDE]
if (mVerificationInProgress && validatePhoneNumber()) {
startPhoneNumberVerification(mBinding.fieldPhoneNumber.getText().toString());
}
// [END_EXCLUDE]
}
// [END on_start_check_user]
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putBoolean(KEY_VERIFY_IN_PROGRESS, mVerificationInProgress);
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
mVerificationInProgress = savedInstanceState.getBoolean(KEY_VERIFY_IN_PROGRESS);
}
private void startPhoneNumberVerification(String phoneNumber) {
// [START start_phone_auth]
PhoneAuthProvider.getInstance().verifyPhoneNumber(
phoneNumber, // Phone number to verify
60, // Timeout duration
TimeUnit.SECONDS, // Unit of timeout
this, // Activity (for callback binding)
mCallbacks); // OnVerificationStateChangedCallbacks
// [END start_phone_auth]
mVerificationInProgress = true;
}
private void verifyPhoneNumberWithCode(String verificationId, String code) {
// [START verify_with_code]
PhoneAuthCredential credential = PhoneAuthProvider.getCredential(verificationId, code);
// [END verify_with_code]
signInWithPhoneAuthCredential(credential);
}
// [START resend_verification]
private void resendVerificationCode(String phoneNumber,
PhoneAuthProvider.ForceResendingToken token) {
PhoneAuthProvider.getInstance().verifyPhoneNumber(
phoneNumber, // Phone number to verify
60, // Timeout duration
TimeUnit.SECONDS, // Unit of timeout
this, // Activity (for callback binding)
mCallbacks, // OnVerificationStateChangedCallbacks
token); // ForceResendingToken from callbacks
}
// [END resend_verification]
// [START sign_in_with_phone]
private void signInWithPhoneAuthCredential(PhoneAuthCredential credential) {
mAuth.signInWithCredential(credential)
.addOnCompleteListener(this, new OnCompleteListener<AuthResult>() {
@Override
public void onComplete(@NonNull Task<AuthResult> task) {
if (task.isSuccessful()) {
// Sign in success, update UI with the signed-in user's information
Log.d(TAG, "signInWithCredential:success");
FirebaseUser user = task.getResult().getUser();
// [START_EXCLUDE]
updateUI(STATE_SIGNIN_SUCCESS, user);
// [END_EXCLUDE]
} else {
// Sign in failed, display a message and update the UI
Log.w(TAG, "signInWithCredential:failure", task.getException());
if (task.getException() instanceof FirebaseAuthInvalidCredentialsException) {
// The verification code entered was invalid
// [START_EXCLUDE silent]
mBinding.fieldVerificationCode.setError("Invalid code.");
// [END_EXCLUDE]
}
// [START_EXCLUDE silent]
// Update UI
updateUI(STATE_SIGNIN_FAILED);
// [END_EXCLUDE]
}
}
});
}
// [END sign_in_with_phone]
private void signOut() {
mAuth.signOut();
updateUI(STATE_INITIALIZED);
}
private void updateUI(int uiState) {
updateUI(uiState, mAuth.getCurrentUser(), null);
}
private void updateUI(FirebaseUser user) {
if (user != null) {
updateUI(STATE_SIGNIN_SUCCESS, user);
} else {
updateUI(STATE_INITIALIZED);
}
}
private void updateUI(int uiState, FirebaseUser user) {
updateUI(uiState, user, null);
}
private void updateUI(int uiState, PhoneAuthCredential cred) {
updateUI(uiState, null, cred);
}
private void updateUI(int uiState, FirebaseUser user, PhoneAuthCredential cred) {
switch (uiState) {
case STATE_INITIALIZED:
// Initialized state, show only the phone number field and start button
enableViews(mBinding.buttonStartVerification, mBinding.fieldPhoneNumber);
disableViews(mBinding.buttonVerifyPhone, mBinding.buttonResend, mBinding.fieldVerificationCode);
mBinding.detail.setText(null);
break;
case STATE_CODE_SENT:
// Code sent state, show the verification field, the
enableViews(mBinding.buttonVerifyPhone, mBinding.buttonResend, mBinding.fieldPhoneNumber, mBinding.fieldVerificationCode);
disableViews(mBinding.buttonStartVerification);
mBinding.detail.setText(R.string.status_code_sent);
break;
case STATE_VERIFY_FAILED:
// Verification has failed, show all options
enableViews(mBinding.buttonStartVerification, mBinding.buttonVerifyPhone, mBinding.buttonResend, mBinding.fieldPhoneNumber,
mBinding.fieldVerificationCode);
mBinding.detail.setText(R.string.status_verification_failed);
break;
case STATE_VERIFY_SUCCESS:
// Verification has succeeded, proceed to firebase sign in
disableViews(mBinding.buttonStartVerification, mBinding.buttonVerifyPhone, mBinding.buttonResend, mBinding.fieldPhoneNumber,
mBinding.fieldVerificationCode);
mBinding.detail.setText(R.string.status_verification_succeeded);
// Set the verification text based on the credential
if (cred != null) {
if (cred.getSmsCode() != null) {
mBinding.fieldVerificationCode.setText(cred.getSmsCode());
} else {
mBinding.fieldVerificationCode.setText(R.string.instant_validation);
}
}
break;
case STATE_SIGNIN_FAILED:
// No-op, handled by sign-in check
mBinding.detail.setText(R.string.status_sign_in_failed);
break;
case STATE_SIGNIN_SUCCESS:
// Np-op, handled by sign-in check
break;
}
if (user == null) {
// Signed out
mBinding.phoneAuthFields.setVisibility(View.VISIBLE);
mBinding.signedInButtons.setVisibility(View.GONE);
mBinding.status.setText(R.string.signed_out);
} else {
// Signed in
mBinding.phoneAuthFields.setVisibility(View.GONE);
mBinding.signedInButtons.setVisibility(View.VISIBLE);
enableViews(mBinding.fieldPhoneNumber, mBinding.fieldVerificationCode);
mBinding.fieldPhoneNumber.setText(null);
mBinding.fieldVerificationCode.setText(null);
mBinding.status.setText(R.string.signed_in);
mBinding.detail.setText(getString(R.string.firebase_status_fmt, user.getUid()));
}
}
private boolean validatePhoneNumber() {
String phoneNumber = mBinding.fieldPhoneNumber.getText().toString();
if (TextUtils.isEmpty(phoneNumber)) {
mBinding.fieldPhoneNumber.setError("Invalid phone number.");
return false;
}
return true;
}
private void enableViews(View... views) {
for (View v : views) {
v.setEnabled(true);
}
}
private void disableViews(View... views) {
for (View v : views) {
v.setEnabled(false);
}
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.buttonStartVerification:
if (!validatePhoneNumber()) {
return;
}
startPhoneNumberVerification(mBinding.fieldPhoneNumber.getText().toString());
break;
case R.id.buttonVerifyPhone:
String code = mBinding.fieldVerificationCode.getText().toString();
if (TextUtils.isEmpty(code)) {
mBinding.fieldVerificationCode.setError("Cannot be empty.");
return;
}
verifyPhoneNumberWithCode(mVerificationId, code);
break;
case R.id.buttonResend:
resendVerificationCode(mBinding.fieldPhoneNumber.getText().toString(), mResendToken);
break;
case R.id.signOutButton:
signOut();
break;
}
}
}
| |
package com.planet_ink.coffee_mud.Races;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2003-2016 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Flower extends Vine
{
@Override
public String ID()
{
return "Flower";
}
private final static String localizedStaticName = CMLib.lang().L("Flower");
@Override
public String name()
{
return localizedStaticName;
}
@Override
public int shortestMale()
{
return 4;
}
@Override
public int shortestFemale()
{
return 4;
}
@Override
public int heightVariance()
{
return 5;
}
@Override
public int lightestWeight()
{
return 1;
}
@Override
public int weightVariance()
{
return 1;
}
@Override
public long forbiddenWornBits()
{
return Integer.MAX_VALUE;
}
private final static String localizedStaticRacialCat = CMLib.lang().L("Vegetation");
@Override
public String racialCategory()
{
return localizedStaticRacialCat;
}
@Override
public int[] getBreathables()
{
return breatheAnythingArray;
}
// an ey ea he ne ar ha to le fo no gi mo wa ta wi
private static final int[] parts={0 ,0 ,0 ,0 ,0 ,8 ,8 ,1 ,0 ,0 ,0 ,0 ,0 ,0 ,0 ,0 };
@Override
public int[] bodyMask()
{
return parts;
}
protected static Vector<RawMaterial> resources = new Vector<RawMaterial>();
@Override
public int availabilityCode()
{
return Area.THEME_FANTASY | Area.THEME_SKILLONLYMASK;
}
@Override
public void affectPhyStats(Physical affected, PhyStats affectableStats)
{
affectableStats.setDisposition(affectableStats.disposition()|PhyStats.IS_GOLEM);
affectableStats.setSensesMask(affectableStats.sensesMask()|PhyStats.CAN_NOT_SPEAK|PhyStats.CAN_NOT_TASTE|PhyStats.CAN_NOT_MOVE);
}
@Override
public void affectCharState(MOB affectedMOB, CharState affectableState)
{
affectableState.setHunger((Integer.MAX_VALUE/2)+10);
affectedMOB.curState().setHunger(affectableState.getHunger());
}
@Override
public void affectCharStats(MOB affectedMOB, CharStats affectableStats)
{
super.affectCharStats(affectedMOB, affectableStats);
affectableStats.setStat(CharStats.STAT_GENDER,'N');
affectableStats.setStat(CharStats.STAT_SAVE_POISON,affectableStats.getStat(CharStats.STAT_SAVE_POISON)+100);
affectableStats.setStat(CharStats.STAT_SAVE_MIND,affectableStats.getStat(CharStats.STAT_SAVE_MIND)+100);
affectableStats.setStat(CharStats.STAT_SAVE_GAS,affectableStats.getStat(CharStats.STAT_SAVE_GAS)+100);
affectableStats.setStat(CharStats.STAT_SAVE_PARALYSIS,affectableStats.getStat(CharStats.STAT_SAVE_PARALYSIS)+100);
affectableStats.setStat(CharStats.STAT_SAVE_UNDEAD,affectableStats.getStat(CharStats.STAT_SAVE_UNDEAD)+100);
affectableStats.setStat(CharStats.STAT_SAVE_DISEASE,affectableStats.getStat(CharStats.STAT_SAVE_DISEASE)+100);
}
@Override
public String arriveStr()
{
return "floats in";
}
@Override
public String leaveStr()
{
return "floats";
}
@Override
public Weapon myNaturalWeapon()
{
if(naturalWeapon==null)
{
naturalWeapon=CMClass.getWeapon("StdWeapon");
naturalWeapon.setName(L("a nasty bloom"));
naturalWeapon.setRanges(0,3);
naturalWeapon.setMaterial(RawMaterial.RESOURCE_FLOWERS);
naturalWeapon.setUsesRemaining(1000);
naturalWeapon.setWeaponDamageType(Weapon.TYPE_BASHING);
}
return naturalWeapon;
}
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if((myHost!=null)
&&(myHost instanceof MOB)
&&(msg.amISource((MOB)myHost)))
{
if((msg.targetMinor()==CMMsg.TYP_LEAVE)
||(msg.sourceMinor()==CMMsg.TYP_ADVANCE)
||(msg.sourceMinor()==CMMsg.TYP_RECALL)
||(msg.sourceMinor()==CMMsg.TYP_RETREAT))
{
msg.source().tell(L("You can't really go anywhere -- you are rooted!"));
return false;
}
}
return super.okMessage(myHost,msg);
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
if(!super.tick(ticking,tickID))
return false;
if((tickID==Tickable.TICKID_MOB)&&(ticking instanceof MOB))
CMLib.combat().recoverTick((MOB)ticking);
return true;
}
@Override
public String healthText(MOB viewer, MOB mob)
{
final double pct=(CMath.div(mob.curState().getHitPoints(),mob.maxState().getHitPoints()));
if(pct<.10)
return L("^r@x1^r is near destruction!^N",mob.name(viewer));
else
if(pct<.20)
return L("^r@x1^r is massively shredded and damaged.^N",mob.name(viewer));
else
if(pct<.30)
return L("^r@x1^r is extremely shredded and damaged.^N",mob.name(viewer));
else
if(pct<.40)
return L("^y@x1^y is very shredded and damaged.^N",mob.name(viewer));
else
if(pct<.50)
return L("^y@x1^y is shredded and damaged.^N",mob.name(viewer));
else
if(pct<.60)
return L("^p@x1^p is shredded and slightly damaged.^N",mob.name(viewer));
else
if(pct<.70)
return L("^p@x1^p has lost numerous petals.^N",mob.name(viewer));
else
if(pct<.80)
return L("^g@x1^g has lost some petals.^N",mob.name(viewer));
else
if(pct<.90)
return L("^g@x1^g has lost a few petals.^N",mob.name(viewer));
else
if(pct<.99)
return L("^g@x1^g is no longer in perfect condition.^N",mob.name(viewer));
else
return L("^c@x1^c is in perfect condition.^N",mob.name(viewer));
}
@Override
public DeadBody getCorpseContainer(MOB mob, Room room)
{
final DeadBody body = super.getCorpseContainer(mob, room);
if(body != null)
{
body.setMaterial(RawMaterial.RESOURCE_HERBS);
}
return body;
}
@Override
public List<RawMaterial> myResources()
{
synchronized(resources)
{
if(resources.size()==0)
{
resources.addElement(makeResource
(L("some petals"),RawMaterial.RESOURCE_HERBS));
}
}
return resources;
}
}
| |
package galaxy;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.util.Random;
public class Spherize
{
public static void main(String[] args)
{
int W = 400, H = 400;
BufferedImage im = new BufferedImage(W, H, BufferedImage.TYPE_INT_ARGB);
Graphics2D g2 = im.createGraphics();
long t1 = System.currentTimeMillis();
Random rand = new Random();
float cx = W/2, cy = H/2;
final float maxrad = W/2;
int OCTAVES = 5;
final int RAD = W/2;
int oct[] = new int[OCTAVES];
double fac[] = new double[OCTAVES];
oct[0] = 1; fac[0] = 1;
double MAXVAL = 1.01;
double offx = rand.nextDouble() * 3, offy = rand.nextDouble() * 3;
for (int i = 1; i < OCTAVES; i++)
{
oct[i] = oct[i-1]*2;
fac[i] = fac[i-1]*.5;
MAXVAL += fac[i];
}
int planetAtmosColor =
Color.HSBtoRGB(rand.nextFloat(), rand.nextFloat() * .2f + .7f, .9f)
;
int planetSurfaceColor1 =
Color.HSBtoRGB(rand.nextFloat(), rand.nextFloat() * .2f + .2f,
/*rand.nextFloat() * .4f + .0f*/.9f);
int planetSurfaceColor2 =
Color.HSBtoRGB(rand.nextFloat(), rand.nextFloat() * .2f + .7f,
/*rand.nextFloat() * .3f + .6f*/.9f);
float planetFeatureSize = 2 + rand.nextFloat() * 5;
float planetWaterLevel = .1f + rand.nextFloat() * .4f;
float sun_dir[] = {
rand.nextFloat(),rand.nextFloat(), rand.nextFloat(),
};
norm_vec(sun_dir);
for (int x = 0; x < W; x++)
{
for (int y = 0; y < H; y++)
{
// spherize
float dx = x - cx;
float dy = y - cy;
float r = (float) Math.sqrt(dx*dx + dy*dy);
float r1 = r;
float r_n = r/maxrad;
if (r < maxrad)
{
r1 = (float) (Math.asin(r_n) * 2 / Math.PI) * maxrad;
}
double x2 = r1 * dx/r+cx;
double y2 = r1 * dy/r+cy;
x2 = x2*planetFeatureSize/RAD+offx;
y2 = y2*planetFeatureSize/RAD+offy;
// y2+=-x2;
if (r > maxrad)
im.setRGB(x, y, 0xff000000);
else
{
// Shading.
float nx = dx/RAD;
float ny = dy/RAD;
float nz = (float) Math.sqrt(1-nx*nx-ny*ny);
float bright = (nx*sun_dir[0]+ny*sun_dir[1]+nz*sun_dir[2]);
bright = (float) ImprovedPerlin.fade(bright);
if (bright > .1f)
{
int surface_rgb = 0;
double n = 0;
for (int i = 0; i < OCTAVES; i++)
{
n += ImprovedPerlin.noise(x2*oct[i], y2*oct[i], .5)*fac[i];
}
if (n < planetWaterLevel)
surface_rgb = 0xff0000af;
else
{
// int c = 0x7f+(int) (0xff * n/MAXVAL/2);
// c &= 0xff;
// c *= 0x000100;
// c += 0xff000000;
// int c = blend(planetSurfaceColor1, planetSurfaceColor2,
// (float)((n-planetWaterLevel)/(MAXVAL - planetWaterLevel)));
surface_rgb = planetSurfaceColor1;
}
// clouds
n = 0;
for (int i = 0; i < OCTAVES; i++)
{
n +=
Math.abs(
ImprovedPerlin.noise(
x2*oct[i] + ImprovedPerlin.noise(x2*oct[i], y2*oct[i], 3.5)*1
, y2*oct[i]*(i==0?3:1) + ImprovedPerlin.noise(x2*oct[i], y2*oct[i], 4.5)*1//3
, 1.5)*fac[i]
);
}
float cloud = 0;
if (n >.5f)
cloud = (float)Math.pow((n-.5)/(MAXVAL-.5), .1);
if (cloud > 0)
surface_rgb = blend(surface_rgb, planetAtmosColor, cloud);
// atmosphere
if (r_n > .9)
surface_rgb =
blend(planetAtmosColor, surface_rgb,
// (1-r_n)/.1f
(float)Math.pow((1-r_n)/.1f, .5)
);
surface_rgb =
blend(0xff000000, surface_rgb,
bright
);
im.setRGB(x, y, surface_rgb);
}
else
im.setRGB(x, y, 0xff000000);
}
}
}
long t2 = System.currentTimeMillis();
System.out.println("Took "+(t2-t1)+"ms");
NebulaStormGalaxyGfx.showImage(im);
}
private static void norm_vec(float[] v)
{
float r = (float) Math.sqrt(v[0]*v[0]+v[1]*v[1]+v[2]*v[2]);
v[0] /= r;
v[1] /= r;
v[2] /= r;
}
public static int blend( int c1, int c2, float ratio ) {
if ( ratio > 1f ) ratio = 1f;
else if ( ratio < 0f ) ratio = 0f;
float iRatio = 1.0f - ratio;
int i1 = c1;
int i2 = c2;
int a1 = (i1 >> 24 & 0xff);
int r1 = ((i1 & 0xff0000) >> 16);
int g1 = ((i1 & 0xff00) >> 8);
int b1 = (i1 & 0xff);
int a2 = (i2 >> 24 & 0xff);
int r2 = ((i2 & 0xff0000) >> 16);
int g2 = ((i2 & 0xff00) >> 8);
int b2 = (i2 & 0xff);
int a = (int)((a1 * iRatio) + (a2 * ratio));
int r = (int)((r1 * iRatio) + (r2 * ratio));
int g = (int)((g1 * iRatio) + (g2 * ratio));
int b = (int)((b1 * iRatio) + (b2 * ratio));
return (a << 24 | r << 16 | g << 8 | b );
}
}
| |
/*
* Copyright 2010-2015 Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.errorhandling;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.codeInspection.ui.MultipleCheckboxOptionsPanel;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.containers.ContainerUtil;
import com.siyeh.InspectionGadgetsBundle;
import com.siyeh.ig.BaseInspection;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.InspectionGadgetsFix;
import com.siyeh.ig.psiutils.ExceptionUtils;
import com.siyeh.ig.psiutils.LibraryUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.util.*;
public class TooBroadThrowsInspectionBase extends BaseInspection {
@SuppressWarnings({"PublicField"})
public boolean onlyWarnOnRootExceptions = false;
@SuppressWarnings({"PublicField", "UnusedDeclaration"})
public boolean ignoreInTestCode = false; // keep for compatibility
@SuppressWarnings("PublicField")
public boolean ignoreLibraryOverrides = false;
@SuppressWarnings("PublicField")
public boolean ignoreThrown = false;
@Override
@NotNull
public String getID() {
return "OverlyBroadThrowsClause";
}
@Override
@NotNull
public String getDisplayName() {
return InspectionGadgetsBundle.message("overly.broad.throws.clause.display.name");
}
@Override
@NotNull
protected String buildErrorString(Object... infos) {
final List<SmartTypePointer> typesMasked = (List<SmartTypePointer>)infos[0];
final PsiType type = typesMasked.get(0).getType();
String typesMaskedString = type != null ? type.getPresentableText() : "";
if (typesMasked.size() == 1) {
return InspectionGadgetsBundle.message(
"overly.broad.throws.clause.problem.descriptor1",
typesMaskedString);
}
else {
final int lastTypeIndex = typesMasked.size() - 1;
for (int i = 1; i < lastTypeIndex; i++) {
final PsiType psiType = typesMasked.get(i).getType();
if (psiType != null) {
typesMaskedString += ", ";
typesMaskedString += psiType.getPresentableText();
}
}
final PsiType psiType = typesMasked.get(lastTypeIndex).getType();
final String lastTypeString = psiType != null ? psiType.getPresentableText() : "";
return InspectionGadgetsBundle.message("overly.broad.throws.clause.problem.descriptor2", typesMaskedString, lastTypeString);
}
}
@Override
public JComponent createOptionsPanel() {
final MultipleCheckboxOptionsPanel panel = new MultipleCheckboxOptionsPanel(this);
panel.addCheckbox(InspectionGadgetsBundle.message("too.broad.catch.option"), "onlyWarnOnRootExceptions");
panel.addCheckbox(InspectionGadgetsBundle.message("ignore.exceptions.declared.on.library.override.option"), "ignoreLibraryOverrides");
panel.addCheckbox(InspectionGadgetsBundle.message("overly.broad.throws.clause.ignore.thrown.option"), "ignoreThrown");
return panel;
}
@NotNull
@Override
protected InspectionGadgetsFix buildFix(Object... infos) {
final Collection<SmartTypePointer> maskedExceptions = (Collection<SmartTypePointer>)infos[0];
final Boolean originalNeeded = (Boolean)infos[1];
return new AddThrowsClauseFix(maskedExceptions, originalNeeded.booleanValue());
}
private static class AddThrowsClauseFix extends InspectionGadgetsFix {
private final Collection<SmartTypePointer> types;
private final boolean originalNeeded;
AddThrowsClauseFix(Collection<SmartTypePointer> types, boolean originalNeeded) {
this.types = types;
this.originalNeeded = originalNeeded;
}
@Override
@NotNull
public String getName() {
if (originalNeeded) {
return InspectionGadgetsBundle.message("overly.broad.throws.clause.quickfix1");
}
else {
return InspectionGadgetsBundle.message("overly.broad.throws.clause.quickfix2");
}
}
@NotNull
@Override
public String getFamilyName() {
return "Fix 'throws' clause";
}
@Override
protected void doFix(Project project, ProblemDescriptor descriptor) throws IncorrectOperationException {
final PsiElement element = descriptor.getPsiElement();
final PsiElement parent = element.getParent();
if (!(parent instanceof PsiReferenceList)) {
return;
}
final PsiReferenceList referenceList = (PsiReferenceList)parent;
final PsiElementFactory factory = JavaPsiFacade.getElementFactory(project);
if (!originalNeeded) {
element.delete();
}
for (SmartTypePointer type : types) {
final PsiType psiType = type.getType();
if (psiType instanceof PsiClassType) {
final PsiJavaCodeReferenceElement referenceElement = factory.createReferenceElementByType((PsiClassType)psiType);
referenceList.add(referenceElement);
}
}
}
}
@Override
public BaseInspectionVisitor buildVisitor() {
return new TooBroadThrowsVisitor();
}
private class TooBroadThrowsVisitor extends BaseInspectionVisitor {
@Override
public void visitMethod(PsiMethod method) {
super.visitMethod(method);
final PsiReferenceList throwsList = method.getThrowsList();
if (!throwsList.isPhysical()) {
return;
}
final PsiJavaCodeReferenceElement[] throwsReferences = throwsList.getReferenceElements();
if (throwsReferences.length == 0) {
return;
}
final PsiCodeBlock body = method.getBody();
if (body == null) {
return;
}
if (ignoreLibraryOverrides && LibraryUtil.isOverrideOfLibraryMethod(method)) {
return;
}
final Set<PsiType> exceptionsThrown = ExceptionUtils.calculateExceptionsThrown(body);
final PsiClassType[] referencedExceptions = throwsList.getReferencedTypes();
final Set<PsiType> exceptionsDeclared = new HashSet<PsiType>(referencedExceptions.length);
ContainerUtil.addAll(exceptionsDeclared, referencedExceptions);
final int referencedExceptionsLength = referencedExceptions.length;
for (int i = 0; i < referencedExceptionsLength; i++) {
final PsiClassType referencedException = referencedExceptions[i];
if (onlyWarnOnRootExceptions) {
if (!ExceptionUtils.isGenericExceptionClass(
referencedException)) {
continue;
}
}
final List<SmartTypePointer> exceptionsMasked = new ArrayList<SmartTypePointer>();
final SmartTypePointerManager pointerManager = SmartTypePointerManager.getInstance(body.getProject());
for (PsiType exceptionThrown : exceptionsThrown) {
if (referencedException.isAssignableFrom(exceptionThrown) && !exceptionsDeclared.contains(exceptionThrown)) {
exceptionsMasked.add(pointerManager.createSmartTypePointer(exceptionThrown));
}
}
if (!exceptionsMasked.isEmpty()) {
final PsiJavaCodeReferenceElement throwsReference = throwsReferences[i];
final boolean originalNeeded = exceptionsThrown.contains(referencedException);
if (ignoreThrown && originalNeeded) {
continue;
}
registerError(throwsReference, exceptionsMasked, Boolean.valueOf(originalNeeded), throwsReference);
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.datastreamer;
import java.io.Serializable;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.CyclicBarrier;
import javax.cache.CacheException;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.cache.CacheServerNotFoundException;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
/**
* Tests for {@code IgniteDataStreamerImpl}.
*/
public class DataStreamerImplSelfTest extends GridCommonAbstractTest {
/** IP finder. */
private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true);
/** Number of keys to load via data streamer. */
private static final int KEYS_COUNT = 1000;
/** Started grid counter. */
private static int cnt;
/** No nodes filter. */
private static volatile boolean noNodesFilter;
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
super.afterTest();
stopAllGrids();
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(gridName);
TcpDiscoverySpi discoSpi = new TcpDiscoverySpi();
discoSpi.setIpFinder(IP_FINDER);
cfg.setDiscoverySpi(discoSpi);
// Forth node goes without cache.
if (cnt < 4)
cfg.setCacheConfiguration(cacheConfiguration());
cnt++;
return cfg;
}
/**
* @throws Exception If failed.
*/
public void testNullPointerExceptionUponDataStreamerClosing() throws Exception {
startGrids(5);
final CyclicBarrier barrier = new CyclicBarrier(2);
multithreadedAsync(new Callable<Object>() {
@Override public Object call() throws Exception {
U.awaitQuiet(barrier);
G.stopAll(true);
return null;
}
}, 1);
Ignite g4 = grid(4);
IgniteDataStreamer<Object, Object> dataLdr = g4.dataStreamer(null);
dataLdr.perNodeBufferSize(32);
for (int i = 0; i < 100000; i += 2) {
dataLdr.addData(i, i);
dataLdr.removeData(i + 1);
}
U.awaitQuiet(barrier);
info("Closing data streamer.");
try {
dataLdr.close(true);
}
catch (CacheException | IllegalStateException ignore) {
// This is ok to ignore this exception as test is racy by it's nature -
// grid is stopping in different thread.
}
}
/**
* Data streamer should correctly load entries from HashMap in case of grids with more than one node
* and with GridOptimizedMarshaller that requires serializable.
*
* @throws Exception If failed.
*/
public void testAddDataFromMap() throws Exception {
cnt = 0;
startGrids(2);
Ignite g0 = grid(0);
IgniteDataStreamer<Integer, String> dataLdr = g0.dataStreamer(null);
Map<Integer, String> map = U.newHashMap(KEYS_COUNT);
for (int i = 0; i < KEYS_COUNT; i++)
map.put(i, String.valueOf(i));
dataLdr.addData(map);
dataLdr.close();
Random rnd = new Random();
IgniteCache<Integer, String> c = g0.cache(null);
for (int i = 0; i < KEYS_COUNT; i++) {
Integer k = rnd.nextInt(KEYS_COUNT);
String v = c.get(k);
assertEquals(k.toString(), v);
}
}
/**
* Test logging on {@code DataStreamer.addData()} method when cache have no data nodes
*
* @throws Exception If fail.
*/
public void testNoDataNodesOnClose() throws Exception {
boolean failed = false;
cnt = 0;
noNodesFilter = true;
try {
Ignite ignite = startGrid(1);
try (IgniteDataStreamer<Integer, String> streamer = ignite.dataStreamer(null)) {
streamer.addData(1, "1");
}
catch (CacheException ex) {
failed = true;
}
}
finally {
noNodesFilter = false;
assertTrue(failed);
}
}
/**
* Test logging on {@code DataStreamer.addData()} method when cache have no data nodes
*
* @throws Exception If fail.
*/
public void testNoDataNodesOnFlush() throws Exception {
boolean failed = false;
cnt = 0;
noNodesFilter = true;
try {
Ignite ignite = startGrid(1);
IgniteFuture fut = null;
try (IgniteDataStreamer<Integer, String> streamer = ignite.dataStreamer(null)) {
fut = streamer.addData(1, "1");
streamer.flush();
}
catch (IllegalStateException ex) {
try {
fut.get();
fail("DataStreamer ignores failed streaming.");
}
catch (CacheServerNotFoundException ignored) {
// No-op.
}
failed = true;
}
}
finally {
noNodesFilter = false;
assertTrue(failed);
}
}
/**
* Gets cache configuration.
*
* @return Cache configuration.
*/
private CacheConfiguration cacheConfiguration() {
CacheConfiguration cacheCfg = defaultCacheConfiguration();
cacheCfg.setCacheMode(PARTITIONED);
cacheCfg.setBackups(1);
cacheCfg.setWriteSynchronizationMode(FULL_SYNC);
if (noNodesFilter)
cacheCfg.setNodeFilter(F.alwaysFalse());
return cacheCfg;
}
/**
*
*/
private static class TestObject implements Serializable {
/** */
private int val;
/**
*/
private TestObject() {
// No-op.
}
/**
* @param val Value.
*/
private TestObject(int val) {
this.val = val;
}
public Integer val() {
return val;
}
/** {@inheritDoc} */
@Override public int hashCode() {
return val;
}
/** {@inheritDoc} */
@Override public boolean equals(Object obj) {
return obj instanceof TestObject && ((TestObject)obj).val == val;
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2009 TopQuadrant, Inc.
* All rights reserved.
*******************************************************************************/
package org.topbraid.spin.arq;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.topbraid.spin.model.print.StringPrintContext;
import org.topbraid.spin.model.update.Update;
import org.topbraid.spin.system.ExtraPrefixes;
import org.topbraid.spin.util.JenaUtil;
import org.topbraid.spin.util.SPINExpressions;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.Syntax;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.shared.PrefixMapping;
import com.hp.hpl.jena.shared.impl.PrefixMappingImpl;
import com.hp.hpl.jena.sparql.core.DatasetImpl;
import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP;
import com.hp.hpl.jena.sparql.syntax.ElementNamedGraph;
import com.hp.hpl.jena.sparql.syntax.ElementVisitorBase;
import com.hp.hpl.jena.sparql.syntax.ElementWalker;
import com.hp.hpl.jena.update.UpdateFactory;
import com.hp.hpl.jena.update.UpdateRequest;
/**
* A singleton that can create ARQ SPARQL Queries and QueryExecution
* objects. SPIN API users should use the provided methods here.
*
* @author Holger Knublauch
*/
public class ARQFactory {
private static ARQFactory singleton = new ARQFactory();
/**
* Caches parsable query strings for each SPIN Command or expression Node.
*/
private Map<Node,String> node2String = new ConcurrentHashMap<Node,String>();
/**
* Caches Jena query objects for each SPIN Command or expression String.
*/
private Map<String,Query> string2Query = new ConcurrentHashMap<String,Query>();
/**
* Caches Jena query objects for each SPIN Command or expression String.
*/
private Map<String,UpdateRequest> string2Update = new ConcurrentHashMap<String,UpdateRequest>();
private boolean useCaches = true;
/**
* Gets the singleton instance of this class.
* @return the singleton
*/
public static ARQFactory get() {
return singleton;
}
/**
* Convenience method to get a named graph from the current ARQFactory's Dataset.
* @param graphURI the URI of the graph to get
* @return the named graph or null
*/
public static Model getNamedModel(String graphURI) {
return ARQFactory.get().getDataset(null).getNamedModel(graphURI);
}
/**
* Changes the singleton to some subclass.
* @param value the new ARQFactory (not null)
*/
public static void set(ARQFactory value) {
ARQFactory.singleton = value;
}
/**
* Can be overloaded to install extra things such as Lucene indices to all
* local QueryExecutions generated by this factory.
* Does nothing by default.
* @param qexec the QueryExecution to modify
*/
protected void adjustQueryExecution(QueryExecution qexec) {
}
/**
* Programmatically resets any cached queries.
*/
public void clearCaches() {
node2String.clear();
string2Query.clear();
string2Update.clear();
}
/**
* Converts a given SPIN Command (Query or Update) into a parsable String.
* This method is the recommended way of doing this task as it uses a cache
* to prevent duplicate computations.
* @param spinCommand the SPIN Command to convert to String
* @return the String
*/
public String createCommandString(org.topbraid.spin.model.Command spinCommand) {
String result = node2String.get(spinCommand.asNode());
if(result == null) {
String text = ARQ2SPIN.getTextOnly(spinCommand);
if(text != null) {
result = ARQFactory.get().createPrefixDeclarations(spinCommand.getModel()) + text;
}
else {
StringPrintContext p = new StringPrintContext();
p.setUsePrefixes(false);
p.setPrintPrefixes(false);
spinCommand.print(p);
result = p.getString();
}
if(useCaches) {
node2String.put(spinCommand.asNode(), result);
}
}
return result;
}
public String createExpressionString(RDFNode expression) {
String result = node2String.get(expression.asNode());
if(result == null) {
StringPrintContext p = new StringPrintContext();
p.setUsePrefixes(false);
SPINExpressions.printExpressionString(p, expression, false, false, expression.getModel().getGraph().getPrefixMapping());
result = p.getString();
if(useCaches) {
node2String.put(expression.asNode(), result);
}
}
return result;
}
public Query createExpressionQuery(RDFNode expression) {
String queryString = createExpressionString(expression);
return createExpressionQuery(queryString);
}
public Query createExpressionQuery(String expression) {
Query result = string2Query.get(expression);
if(result == null) {
String queryString = "SELECT (" + expression + ") WHERE {}";
result = doCreateQuery(queryString);
if(useCaches) {
string2Query.put(expression, result);
}
}
return result;
}
/**
* Same as <code>createPrefixDeclarations(model, true)</code>.
* @param model the Model to create prefix declarations for
* @return the prefix declarations
*/
public String createPrefixDeclarations(Model model) {
return createPrefixDeclarations(model, true);
}
/**
* Creates SPARQL prefix declarations for a given Model.
* @param model the Model to get the prefixes from
* @param includeExtraPrefixes true to also include implicit prefixes like afn
* @return the prefix declarations
*/
public String createPrefixDeclarations(Model model, boolean includeExtraPrefixes) {
StringBuffer queryString = new StringBuffer();
String defaultNamespace = JenaUtil.getNsPrefixURI(model, "");
if(defaultNamespace != null) {
queryString.append("PREFIX : <" + defaultNamespace + ">\n");
}
if(includeExtraPrefixes) {
Map<String,String> extraPrefixes = ExtraPrefixes.getExtraPrefixes();
for(String prefix : extraPrefixes.keySet()) {
String ns = extraPrefixes.get(prefix);
perhapsAppend(queryString, prefix, ns, model);
}
}
Iterator<String> prefixes = model.getNsPrefixMap().keySet().iterator();
while (prefixes.hasNext()) {
String prefix = prefixes.next();
String namespace = JenaUtil.getNsPrefixURI(model, prefix);
if(prefix.length() > 0 && namespace != null) {
queryString.append("PREFIX " + prefix + ": <" + namespace + ">\n");
}
}
return queryString.toString();
}
/**
* Converts a SPIN Query object into a ARQ Query.
* This method is the recommended way for this conversion -
* it uses a cache to retrieve queries that it has already seen before.
* @param spinQuery the SPIN query
* @return the ARQ Query
*/
public Query createQuery(org.topbraid.spin.model.Query spinQuery) {
String queryString = createCommandString(spinQuery);
return createQuery(queryString);
}
public Query createQuery(String queryString) {
Query result = string2Query.get(queryString);
if(result == null) {
result = doCreateQuery(queryString);
if(useCaches) {
string2Query.put(queryString, result);
}
}
return result;
}
public Query doCreateQuery(String queryString) {
return doCreateQuery(queryString, null);
}
/**
* Creates the "physical" Jena Query instance.
* Can be overloaded to create engine-specific Query objects such as those
* for AllegroGraph.
* @param queryString the parsable query string
* @param prefixMapping an optional PrefixMapping to initialize the Query with
* (this object may be modified)
* @return the ARQ Query object
*/
protected Query doCreateQuery(String queryString, PrefixMapping prefixMapping) {
Query query = new Query();
if(prefixMapping != null) {
query.setPrefixMapping(prefixMapping);
}
return QueryFactory.parse(query, queryString, null, getSyntax());
}
/**
* Creates a new Query from a partial query (possibly lacking
* PREFIX declarations), using the ARQ syntax specified by <code>getSyntax</code>.
* @param model the Model to operate on
* @param partialQuery the (partial) query string
* @return the Query
*/
public Query createQuery(Model model, String partialQuery) {
PrefixMapping pm = new PrefixMappingImpl();
String defaultNamespace = JenaUtil.getNsPrefixURI(model, "");
if(defaultNamespace != null) {
pm.setNsPrefix("", defaultNamespace);
}
Map<String,String> extraPrefixes = ExtraPrefixes.getExtraPrefixes();
for(String prefix : extraPrefixes.keySet()) {
String ns = extraPrefixes.get(prefix);
if(ns != null && pm.getNsPrefixURI(prefix) == null) {
pm.setNsPrefix(prefix, ns);
}
}
for(String prefix : model.getNsPrefixMap().keySet()) {
String namespace = JenaUtil.getNsPrefixURI(model, prefix);
if(prefix.length() > 0 && namespace != null) {
pm.setNsPrefix(prefix, namespace);
}
}
return doCreateQuery(partialQuery, pm);
}
/**
* Creates a QueryExecution for a given Query in a given Model,
* with no initial bindings.
* The implementation basically uses Jena's QueryExecutionFactory
* but with the option to use different Dataset as specified by
* <code>getDataset(model)</code>.
* @param query the Query
* @param model the Model to query
* @return a QueryExecution
*/
public QueryExecution createQueryExecution(Query query, Model model) {
return createQueryExecution(query, model, null);
}
/**
* Creates a QueryExecution for a given Query in a given Model, with
* some given initial bindings.
* The implementation basically uses Jena's QueryExecutionFactory
* but with the option to use different Dataset as specified by
* <code>getDataset(model)</code>.
* @param query the Query
* @param model the Model to query
* @param initialBinding the initial variable bindings or null
* @return a QueryExecution
*/
public QueryExecution createQueryExecution(Query query, Model model, QuerySolution initialBinding) {
Dataset dataset = getDataset(model);
if(dataset != null) {
return createQueryExecution(query, dataset, initialBinding);
}
else {
QueryExecution qexec = QueryExecutionFactory.create(query, model, initialBinding);
adjustQueryExecution(qexec);
return qexec;
}
}
public QueryExecution createQueryExecution(Query query, Dataset dataset) {
return createQueryExecution(query, dataset, null);
}
public QueryExecution createQueryExecution(Query query, Dataset dataset, QuerySolution initialBinding) {
if(!query.getGraphURIs().isEmpty() || !query.getNamedGraphURIs().isEmpty()) {
dataset = new FromDataset(dataset, query);
}
QueryExecution qexec = QueryExecutionFactory.create(query, dataset, initialBinding);
adjustQueryExecution(qexec);
return qexec;
}
/**
* Creates a remote QueryExecution on a given Query.
* @param query the Query to execute
* @return a remote QueryExecution
*/
public QueryEngineHTTP createRemoteQueryExecution(Query query) {
List<String> graphURIs = query.getGraphURIs();
return createRemoteQueryExecution(query, graphURIs);
}
public QueryEngineHTTP createRemoteQueryExecution(Query query, List<String> graphURIs) {
String service = graphURIs.get(0);
String serviceAsURI = service;
if(service.endsWith("/sparql")) {
serviceAsURI = service.substring(0, service.lastIndexOf('/'));
}
return createRemoteQueryExecution(service, query, Collections.singletonList(serviceAsURI), graphURIs, null, null);
}
public QueryEngineHTTP createRemoteQueryExecution(
String service,
Query query,
List<String> defaultGraphURIs,
List<String> namedGraphURIs,
String user,
char[] password) {
QueryEngineHTTP qexec = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(service, query);
if( defaultGraphURIs.size() > 0 ) {
qexec.setDefaultGraphURIs(defaultGraphURIs);
}
if( namedGraphURIs.size() > 0 ) {
qexec.setNamedGraphURIs(namedGraphURIs);
}
if( user != null ) {
qexec.setBasicAuthentication(user, password);
}
return qexec;
}
public UpdateRequest createUpdateRequest(String parsableString) {
UpdateRequest result = string2Update.get(parsableString);
if(result == null) {
result = UpdateFactory.create(parsableString);
if(useCaches) {
string2Update.put(parsableString, result);
}
}
return result;
}
public UpdateRequest createUpdateRequest(Update spinUpdate) {
String string = createCommandString(spinUpdate);
return createUpdateRequest(string);
}
/**
* Specifies a Dataset that shall be used for query execution.
* Returns a new DatasetImpl by default but may be overloaded in subclasses.
* For example, TopBraid delegates this to the currently open Graphs.
* @param defaultModel the default Model of the Dataset
* @return the Dataset or null
*/
public Dataset getDataset(Model defaultModel) {
return new DatasetImpl(defaultModel);
}
/**
* Gets a list of named graphs (GRAPH elements) mentioned in a given
* Query.
* @param query the Query to traverse
* @return a List of those GRAPHs
*/
public static List<String> getNamedGraphURIs(Query query) {
final List<String> results = new LinkedList<String>();
ElementWalker.walk(query.getQueryPattern(), new ElementVisitorBase() {
@Override
public void visit(ElementNamedGraph el) {
Node node = el.getGraphNameNode();
if(node != null && node.isURI()) {
String uri = node.getURI();
if(!results.contains(uri)) {
results.add(uri);
}
}
}
});
return results;
}
/**
* The ARQ Syntax used by default: Syntax.syntaxARQ.
* @return the default syntax
*/
public Syntax getSyntax() {
return Syntax.syntaxARQ;
}
public boolean isUsingCaches() {
return useCaches;
}
private static void perhapsAppend(StringBuffer queryString, String prefix, String namespace, Model model) {
if(model.getNsPrefixURI(prefix) == null && namespace != null) {
queryString.append("PREFIX ");
queryString.append(prefix);
queryString.append(": <");
queryString.append(namespace);
queryString.append(">\n");
}
}
/**
* Tells the ARQFactory whether to use caches for the various createXY functions.
* These are on by default.
* @param value false to switch caches off
*/
public void setUseCaches(boolean value) {
this.useCaches = value;
}
}
| |
/*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.inject.internal;
import com.google.inject.TypeLiteral;
import com.google.inject.internal.util.SourceProvider;
import com.google.inject.matcher.AbstractMatcher;
import com.google.inject.matcher.Matcher;
import com.google.inject.matcher.Matchers;
import com.google.inject.spi.TypeConverter;
import com.google.inject.spi.TypeConverterBinding;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
/**
* Handles {@code Binder.convertToTypes} commands.
*
* @author crazybob@google.com (Bob Lee)
* @author jessewilson@google.com (Jesse Wilson)
*/
final class TypeConverterBindingProcessor extends AbstractProcessor {
TypeConverterBindingProcessor(Errors errors) {
super(errors);
}
/** Installs default converters for primitives, enums, and class literals. */
static void prepareBuiltInConverters(InjectorImpl injector) {
// Configure type converters.
convertToPrimitiveType(injector, int.class, Integer.class);
convertToPrimitiveType(injector, long.class, Long.class);
convertToPrimitiveType(injector, boolean.class, Boolean.class);
convertToPrimitiveType(injector, byte.class, Byte.class);
convertToPrimitiveType(injector, short.class, Short.class);
convertToPrimitiveType(injector, float.class, Float.class);
convertToPrimitiveType(injector, double.class, Double.class);
convertToClass(
injector,
Character.class,
new TypeConverter() {
@Override
public Object convert(String value, TypeLiteral<?> toType) {
value = value.trim();
if (value.length() != 1) {
throw new RuntimeException("Length != 1.");
}
return value.charAt(0);
}
@Override
public String toString() {
return "TypeConverter<Character>";
}
});
convertToClasses(
injector,
Matchers.subclassesOf(Enum.class),
new TypeConverter() {
@Override
@SuppressWarnings("unchecked")
public Object convert(String value, TypeLiteral<?> toType) {
return Enum.valueOf((Class) toType.getRawType(), value);
}
@Override
public String toString() {
return "TypeConverter<E extends Enum<E>>";
}
});
internalConvertToTypes(
injector,
new AbstractMatcher<TypeLiteral<?>>() {
@Override
public boolean matches(TypeLiteral<?> typeLiteral) {
return typeLiteral.getRawType() == Class.class;
}
@Override
public String toString() {
return "Class<?>";
}
},
new TypeConverter() {
@Override
@SuppressWarnings("unchecked")
public Object convert(String value, TypeLiteral<?> toType) {
try {
return Class.forName(value);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e.getMessage());
}
}
@Override
public String toString() {
return "TypeConverter<Class<?>>";
}
});
}
private static <T> void convertToPrimitiveType(
InjectorImpl injector, Class<T> primitiveType, final Class<T> wrapperType) {
try {
final Method parser =
wrapperType.getMethod("parse" + capitalize(primitiveType.getName()), String.class);
TypeConverter typeConverter =
new TypeConverter() {
@Override
@SuppressWarnings("unchecked")
public Object convert(String value, TypeLiteral<?> toType) {
try {
return parser.invoke(null, value);
} catch (IllegalAccessException e) {
throw new AssertionError(e);
} catch (InvocationTargetException e) {
throw new RuntimeException(e.getTargetException().getMessage());
}
}
@Override
public String toString() {
return "TypeConverter<" + wrapperType.getSimpleName() + ">";
}
};
convertToClass(injector, wrapperType, typeConverter);
} catch (NoSuchMethodException e) {
throw new AssertionError(e);
}
}
private static <T> void convertToClass(
InjectorImpl injector, Class<T> type, TypeConverter converter) {
convertToClasses(injector, Matchers.identicalTo(type), converter);
}
private static void convertToClasses(
InjectorImpl injector, final Matcher<? super Class<?>> typeMatcher, TypeConverter converter) {
internalConvertToTypes(
injector,
new AbstractMatcher<TypeLiteral<?>>() {
@Override
public boolean matches(TypeLiteral<?> typeLiteral) {
Type type = typeLiteral.getType();
if (!(type instanceof Class)) {
return false;
}
Class<?> clazz = (Class<?>) type;
return typeMatcher.matches(clazz);
}
@Override
public String toString() {
return typeMatcher.toString();
}
},
converter);
}
private static void internalConvertToTypes(
InjectorImpl injector, Matcher<? super TypeLiteral<?>> typeMatcher, TypeConverter converter) {
injector.state.addConverter(
new TypeConverterBinding(SourceProvider.UNKNOWN_SOURCE, typeMatcher, converter));
}
@Override
public Boolean visit(TypeConverterBinding command) {
injector.state.addConverter(
new TypeConverterBinding(
command.getSource(), command.getTypeMatcher(), command.getTypeConverter()));
return true;
}
private static String capitalize(String s) {
if (s.length() == 0) {
return s;
}
char first = s.charAt(0);
char capitalized = Character.toUpperCase(first);
return (first == capitalized) ? s : capitalized + s.substring(1);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred.gridmix;
import java.util.concurrent.CountDownLatch;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.gridmix.Statistics.ClusterStats;
import org.apache.hadoop.mapred.gridmix.Statistics.JobStats;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.tools.rumen.JobStory;
import org.apache.hadoop.tools.rumen.TaskAttemptInfo;
import org.apache.hadoop.tools.rumen.TaskInfo;
import org.apache.hadoop.tools.rumen.Pre21JobHistoryConstants.Values;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Test the Gridmix's {@link Statistics} class.
*/
public class TestGridmixStatistics {
/**
* Test {@link Statistics.JobStats}.
*/
@Test
@SuppressWarnings("deprecation")
public void testJobStats() throws Exception {
Job job = new Job() {};
JobStats stats = new JobStats(1, 2, job);
assertEquals("Incorrect num-maps", 1, stats.getNoOfMaps());
assertEquals("Incorrect num-reds", 2, stats.getNoOfReds());
assertTrue("Incorrect job", job == stats.getJob());
assertNull("Unexpected job status", stats.getJobStatus());
// add a new status
JobStatus status = new JobStatus();
stats.updateJobStatus(status);
assertNotNull("Missing job status", stats.getJobStatus());
assertTrue("Incorrect job status", status == stats.getJobStatus());
}
private static JobStory getCustomJobStory(final int numMaps,
final int numReds) {
return new JobStory() {
@Override
public InputSplit[] getInputSplits() {
return null;
}
@Override
public JobConf getJobConf() {
return null;
}
@Override
public JobID getJobID() {
return null;
}
@Override
public TaskAttemptInfo getMapTaskAttemptInfoAdjusted(int arg0, int arg1,
int arg2) {
return null;
}
@Override
public String getName() {
return null;
}
@Override
public int getNumberMaps() {
return numMaps;
}
@Override
public int getNumberReduces() {
return numReds;
}
@Override
public Values getOutcome() {
return null;
}
@Override
public String getQueueName() {
return null;
}
@Override
public long getSubmissionTime() {
return 0;
}
@Override
public TaskAttemptInfo getTaskAttemptInfo(TaskType arg0, int arg1,
int arg2) {
return null;
}
@Override
public TaskInfo getTaskInfo(TaskType arg0, int arg1) {
return null;
}
@Override
public String getUser() {
return null;
}
};
}
/**
* Test {@link Statistics}.
*/
@Test
@SuppressWarnings("deprecation")
public void testStatistics() throws Exception {
// test job stats generation
Configuration conf = new Configuration();
// test dummy jobs like data-generation etc
Job job = new Job(conf) {
};
JobStats stats = Statistics.generateJobStats(job, null);
testJobStats(stats, -1, -1, null, job);
// add a job desc with 2 map and 1 reduce task
conf.setInt(GridmixJob.GRIDMIX_JOB_SEQ, 1);
// test dummy jobs like data-generation etc
job = new Job(conf) {
};
JobStory zjob = getCustomJobStory(2, 1);
stats = Statistics.generateJobStats(job, zjob);
testJobStats(stats, 2, 1, null, job);
// add a job status
JobStatus jStatus = new JobStatus();
stats.updateJobStatus(jStatus);
testJobStats(stats, 2, 1, jStatus, job);
// start the statistics
CountDownLatch startFlag = new CountDownLatch(1); // prevents the collector
// thread from starting
Statistics statistics = new Statistics(new JobConf(), 0, startFlag);
statistics.start();
testClusterStats(0, 0, 0);
// add to the statistics object
statistics.addJobStats(stats);
testClusterStats(2, 1, 1);
// add another job
JobStory zjob2 = getCustomJobStory(10, 5);
conf.setInt(GridmixJob.GRIDMIX_JOB_SEQ, 2);
job = new Job(conf) {
};
JobStats stats2 = Statistics.generateJobStats(job, zjob2);
statistics.addJobStats(stats2);
testClusterStats(12, 6, 2);
// finish off one job
statistics.add(stats2);
testClusterStats(2, 1, 1);
// finish off the other job
statistics.add(stats);
testClusterStats(0, 0, 0);
statistics.shutdown();
}
// test the job stats
private static void testJobStats(JobStats stats, int numMaps, int numReds,
JobStatus jStatus, Job job) {
assertEquals("Incorrect num map tasks", numMaps, stats.getNoOfMaps());
assertEquals("Incorrect num reduce tasks", numReds, stats.getNoOfReds());
if (job != null) {
assertNotNull("Missing job", job);
}
// check running job
assertTrue("Incorrect job", job == stats.getJob());
if (jStatus != null) {
assertNotNull("Missing job status", jStatus);
}
// check job stats
assertTrue("Incorrect job status", jStatus == stats.getJobStatus());
}
// test the cluster stats
private static void testClusterStats(int numSubmittedMapTasks,
int numSubmittedReduceTasks,
int numSubmittedJobs) {
assertEquals("Incorrect count of total number of submitted map tasks",
numSubmittedMapTasks, ClusterStats.getSubmittedMapTasks());
assertEquals("Incorrect count of total number of submitted reduce tasks",
numSubmittedReduceTasks,
ClusterStats.getSubmittedReduceTasks());
assertEquals("Incorrect submitted jobs",
numSubmittedJobs, ClusterStats.getRunningJobStats().size());
}
}
| |
/**
*
*/
package com.stoneworks.gui;
import java.awt.Color;
import java.awt.event.MouseEvent;
import java.awt.event.MouseMotionAdapter;
import java.awt.geom.Rectangle2D;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import edu.umd.cs.piccolo.PCanvas;
import edu.umd.cs.piccolo.PLayer;
import edu.umd.cs.piccolo.PNode;
import edu.umd.cs.piccolo.event.PDragSequenceEventHandler;
import edu.umd.cs.piccolo.event.PInputEvent;
import edu.umd.cs.piccolo.util.PDimension;
import edu.umd.cs.piccolo.util.PPaintContext;
import edu.umd.cs.piccolox.nodes.P3DRect;
/**
* The Birds Eye View Class
*/
public class BirdsEyeView extends PCanvas implements PropertyChangeListener {
/**
*
*/
private static final long serialVersionUID = -1113897215388843460L;
/**
* This is the node that shows the viewed area.
*/
PNode areaVisiblePNode;
/**
* This is the canvas that is being viewed
*/
PCanvas viewedCanvas;
/**
* The change listener to know when to update the birds eye view.
*/
PropertyChangeListener changeListener;
int layerCount;
/**
* Creates a new instance of a BirdsEyeView
*/
public BirdsEyeView() {
this.addMouseMotionListener(new MouseMotionAdapter() {
@Override
public void mouseMoved(MouseEvent e) {
BirdsEyeView.this.setCursor(new java.awt.Cursor(java.awt.Cursor.HAND_CURSOR));
}
});
// create the PropertyChangeListener for listening to the viewed
// canvas
this.changeListener = new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent evt) {
BirdsEyeView.this.updateFromViewed();
}
};
// create the coverage node
this.areaVisiblePNode = new P3DRect();
this.areaVisiblePNode.setPaint(new Color(128, 128, 255));
this.areaVisiblePNode.setTransparency(.6f);
this.areaVisiblePNode.setBounds(0, 0, 100, 100);
this.getCamera().addChild(this.areaVisiblePNode);
// add the drag event handler
this.getCamera().addInputEventListener(new PDragSequenceEventHandler() {
@Override
protected void startDrag(PInputEvent e) {
if (e.getPickedNode() == BirdsEyeView.this.areaVisiblePNode) {
super.startDrag(e);
}
}
@Override
protected void drag(PInputEvent e) {
PDimension dim = e.getDelta();
BirdsEyeView.this.viewedCanvas.getCamera().translateView(0 - dim.getWidth(),
0 - dim.getHeight());
}
});
// remove Pan and Zoom
this.removeInputEventListener(this.getPanEventHandler());
this.removeInputEventListener(this.getZoomEventHandler());
this.setDefaultRenderQuality(PPaintContext.LOW_QUALITY_RENDERING);
}
public void connect(PCanvas canvas, PLayer[] viewed_layers) {
this.viewedCanvas = canvas;
this.layerCount = 0;
this.viewedCanvas.getCamera().addPropertyChangeListener(this.changeListener);
for (this.layerCount = 0; this.layerCount < viewed_layers.length; ++this.layerCount) {
this.getCamera().addLayer(this.layerCount, viewed_layers[this.layerCount]);
}
}
/**
* Add a layer to list of viewed layers
*/
public void addLayer(PLayer new_layer) {
this.getCamera().addLayer(new_layer);
this.layerCount++;
}
/**
* Remove the layer from the viewed layers
*/
public void removeLayer(PLayer old_layer) {
this.getCamera().removeLayer(old_layer);
this.layerCount--;
}
/**
* Stop the birds eye view from receiving events from the viewed canvas and
* remove all layers
*/
public void disconnect() {
this.viewedCanvas.getCamera().removePropertyChangeListener(this.changeListener);
for (int i = 0; i < this.getCamera().getLayerCount(); ++i) {
this.getCamera().removeLayer(i);
}
}
/**
* This method will get called when the viewed canvas changes
*/
public void propertyChange(PropertyChangeEvent event) {
this.updateFromViewed();
}
/**
* This method gets the state of the viewed canvas and updates the
* BirdsEyeViewer This can be called from outside code
*/
public void updateFromViewed() {
double viewedX;
double viewedY;
double viewedHeight;
double viewedWidth;
double ul_camera_x = this.viewedCanvas.getCamera().getViewBounds().getX();
double ul_camera_y = this.viewedCanvas.getCamera().getViewBounds().getY();
double lr_camera_x = ul_camera_x
+ this.viewedCanvas.getCamera().getViewBounds().getWidth();
double lr_camera_y = ul_camera_y
+ this.viewedCanvas.getCamera().getViewBounds().getHeight();
Rectangle2D drag_bounds = this.getCamera().getUnionOfLayerFullBounds();
double ul_layer_x = drag_bounds.getX();
double ul_layer_y = drag_bounds.getY();
double lr_layer_x = drag_bounds.getX() + drag_bounds.getWidth();
double lr_layer_y = drag_bounds.getY() + drag_bounds.getHeight();
// find the upper left corner
// set to the lesser value
if (ul_camera_x < ul_layer_x) {
viewedX = ul_layer_x;
} else {
viewedX = ul_camera_x;
}
// same for y
if (ul_camera_y < ul_layer_y) {
viewedY = ul_layer_y;
} else {
viewedY = ul_camera_y;
}
// find the lower right corner
// set to the greater value
if (lr_camera_x < lr_layer_x) {
viewedWidth = lr_camera_x - viewedX;
} else {
viewedWidth = lr_layer_x - viewedX;
}
// same for height
if (lr_camera_y < lr_layer_y) {
viewedHeight = lr_camera_y - viewedY;
} else {
viewedHeight = lr_layer_y - viewedY;
}
Rectangle2D bounds = new Rectangle2D.Double(viewedX, viewedY,
viewedWidth, viewedHeight);
bounds = this.getCamera().viewToLocal(bounds);
this.areaVisiblePNode.setBounds(bounds);
// keep the birds eye view centered
this.getCamera().animateViewToCenterBounds(drag_bounds, true, 0);
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python.inspections.unusedLocal;
import com.intellij.codeInsight.controlflow.ControlFlowUtil;
import com.intellij.codeInsight.controlflow.Instruction;
import com.intellij.codeInspection.*;
import com.intellij.codeInspection.util.InspectionMessage;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.containers.ContainerUtil;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.PyPsiBundle;
import com.jetbrains.python.codeInsight.controlflow.ControlFlowCache;
import com.jetbrains.python.codeInsight.controlflow.ReadWriteInstruction;
import com.jetbrains.python.codeInsight.controlflow.ScopeOwner;
import com.jetbrains.python.codeInsight.dataflow.scope.Scope;
import com.jetbrains.python.codeInsight.dataflow.scope.ScopeUtil;
import com.jetbrains.python.inspections.PyInspectionExtension;
import com.jetbrains.python.inspections.PyInspectionVisitor;
import com.jetbrains.python.inspections.quickfix.*;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.impl.*;
import com.jetbrains.python.psi.resolve.PyResolveContext;
import com.jetbrains.python.psi.search.PyOverridingMethodsSearch;
import com.jetbrains.python.psi.search.PySuperMethodsSearch;
import com.jetbrains.python.pyi.PyiUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public final class PyUnusedLocalInspectionVisitor extends PyInspectionVisitor {
private final boolean myIgnoreTupleUnpacking;
private final boolean myIgnoreLambdaParameters;
private final boolean myIgnoreRangeIterationVariables;
private final boolean myIgnoreVariablesStartingWithUnderscore;
private final HashSet<PsiElement> myUnusedElements;
private final HashSet<PsiElement> myUsedElements;
public PyUnusedLocalInspectionVisitor(@NotNull ProblemsHolder holder,
@NotNull LocalInspectionToolSession session,
boolean ignoreTupleUnpacking,
boolean ignoreLambdaParameters,
boolean ignoreRangeIterationVariables,
boolean ignoreVariablesStartingWithUnderscore) {
super(holder, session);
myIgnoreTupleUnpacking = ignoreTupleUnpacking;
myIgnoreLambdaParameters = ignoreLambdaParameters;
myIgnoreRangeIterationVariables = ignoreRangeIterationVariables;
myIgnoreVariablesStartingWithUnderscore = ignoreVariablesStartingWithUnderscore;
myUnusedElements = new HashSet<>();
myUsedElements = new HashSet<>();
}
@Override
public void visitPyFunction(final @NotNull PyFunction node) {
if (!PyiUtil.isOverload(node, myTypeEvalContext)) {
processScope(node);
}
}
@Override
public void visitPyLambdaExpression(final @NotNull PyLambdaExpression node) {
processScope(node);
}
@Override
public void visitPyClass(@NotNull PyClass node) {
processScope(node);
}
private void processScope(final ScopeOwner owner) {
if (owner.getContainingFile() instanceof PyExpressionCodeFragment) {
return;
}
if (!(owner instanceof PyClass) && !callsLocals(owner)) {
collectAllWrites(owner);
}
collectUsedReads(owner);
}
@Override
public void visitPyStringLiteralExpression(@NotNull PyStringLiteralExpression pyString) {
final ScopeOwner owner = ScopeUtil.getScopeOwner(pyString);
if (owner != null && !(owner instanceof PsiFile)) {
final PsiElement instrAnchor = getControlFlowAnchorForString(pyString);
if (instrAnchor == null) return;
final Instruction[] instructions = ControlFlowCache.getControlFlow(owner).getInstructions();
final int startInstruction = ControlFlowUtil.findInstructionNumberByElement(instructions, instrAnchor);
if (startInstruction < 0) return;
final Project project = pyString.getProject();
final List<Pair<PsiElement, TextRange>> pairs = InjectedLanguageManager.getInstance(project).getInjectedPsiFiles(pyString);
if (pairs != null) {
for (Pair<PsiElement, TextRange> pair : pairs) {
pair.getFirst().accept(new PyRecursiveElementVisitor() {
@Override
public void visitPyReferenceExpression(@NotNull PyReferenceExpression expr) {
final PyExpression qualifier = expr.getQualifier();
if (qualifier != null) {
qualifier.accept(this);
return;
}
final String name = expr.getName();
if (name != null) {
analyzeReadsInScope(name, owner, instructions, startInstruction, pyString);
}
}
});
}
}
}
}
@Nullable
private static PsiElement getControlFlowAnchorForString(@NotNull PyStringLiteralExpression host) {
final PsiElement comprehensionPart = PsiTreeUtil.findFirstParent(host, element -> {
// Any comprehension component and its result are represented as children expressions of the comprehension element.
// Only they have respective nodes in CFG and thus can be used as anchors
return element instanceof PyExpression && element.getParent() instanceof PyComprehensionElement;
});
if (comprehensionPart != null) {
return comprehensionPart;
}
return PsiTreeUtil.getParentOfType(host, PyStatement.class);
}
private void collectAllWrites(ScopeOwner owner) {
final Instruction[] instructions = ControlFlowCache.getControlFlow(owner).getInstructions();
for (Instruction instruction : instructions) {
final PsiElement element = instruction.getElement();
if (element instanceof PyFunction && owner instanceof PyFunction) {
if (PyKnownDecoratorUtil.hasUnknownDecorator((PyFunction)element, myTypeEvalContext)) {
continue;
}
if (!myUsedElements.contains(element)) {
myUnusedElements.add(element);
}
}
else if (instruction instanceof ReadWriteInstruction) {
final ReadWriteInstruction readWriteInstruction = (ReadWriteInstruction)instruction;
final ReadWriteInstruction.ACCESS access = readWriteInstruction.getAccess();
if (!access.isWriteAccess()) {
continue;
}
final String name = readWriteInstruction.getName();
// Ignore empty, wildcards, global and nonlocal names
final Scope scope = ControlFlowCache.getScope(owner);
if (name == null || PyNames.UNDERSCORE.equals(name) || scope.isGlobal(name) || scope.isNonlocal(name)) {
continue;
}
if (element instanceof PyTargetExpression && ((PyTargetExpression)element).isQualified()) {
continue;
}
// Ignore underscore-prefixed parameters
if (name.startsWith(PyNames.UNDERSCORE) && element instanceof PyParameter) {
continue;
}
// Ignore elements out of scope
if (element == null || !PsiTreeUtil.isAncestor(owner, element, false)) {
continue;
}
// Ignore arguments of import statement
if (PyImportStatementNavigator.getImportStatementByElement(element) != null) {
continue;
}
if (PyAugAssignmentStatementNavigator.getStatementByTarget(element) != null) {
continue;
}
if (parameterInMethodWithFixedSignature(owner, element)) {
continue;
}
if (PyTypeDeclarationStatementNavigator.isTypeDeclarationTarget(element)) {
continue;
}
if (!myUsedElements.contains(element)) {
myUnusedElements.add(element);
}
}
}
}
private static boolean parameterInMethodWithFixedSignature(@NotNull ScopeOwner owner, @NotNull PsiElement element) {
if (owner instanceof PyFunction && element instanceof PyParameter) {
final PyFunction function = (PyFunction)owner;
final String functionName = function.getName();
final LanguageLevel level = LanguageLevel.forElement(function);
final Map<String, PyNames.BuiltinDescription> builtinMethods =
function.getContainingClass() != null ? PyNames.getBuiltinMethods(level) : PyNames.getModuleBuiltinMethods(level);
return !PyNames.INIT.equals(functionName) && builtinMethods.containsKey(functionName);
}
return false;
}
private void collectUsedReads(final ScopeOwner owner) {
final Instruction[] instructions = ControlFlowCache.getControlFlow(owner).getInstructions();
for (int i = 0; i < instructions.length; i++) {
final Instruction instruction = instructions[i];
if (instruction instanceof ReadWriteInstruction) {
final ReadWriteInstruction readWriteInstruction = (ReadWriteInstruction)instruction;
final ReadWriteInstruction.ACCESS access = readWriteInstruction.getAccess();
if (!access.isReadAccess()) {
continue;
}
final String name = readWriteInstruction.getName();
if (name == null) {
continue;
}
final PsiElement element = instruction.getElement();
// Ignore elements out of scope
if (element == null || !PsiTreeUtil.isAncestor(owner, element, false)) {
continue;
}
final int startInstruction;
if (access.isWriteAccess()) {
final PyAugAssignmentStatement augAssignmentStatement = PyAugAssignmentStatementNavigator.getStatementByTarget(element);
startInstruction = ControlFlowUtil.findInstructionNumberByElement(instructions, augAssignmentStatement);
}
else {
startInstruction = i;
}
analyzeReadsInScope(name, owner, instructions, startInstruction, PyUtil.as(element, PyReferenceExpression.class));
}
}
}
private void analyzeReadsInScope(@NotNull String name,
@NotNull ScopeOwner owner,
Instruction @NotNull [] instructions,
int startInstruction,
@Nullable PsiElement scopeAnchor) {
// Check if the element is declared out of scope, mark all out of scope write accesses as used
if (scopeAnchor != null) {
final ScopeOwner declOwner = ScopeUtil.getDeclarationScopeOwner(scopeAnchor, name);
if (declOwner != null && declOwner != owner) {
final Collection<PsiElement> writeElements = ScopeUtil.getElementsOfAccessType(name, declOwner, ReadWriteInstruction.ACCESS.WRITE);
for (PsiElement e : writeElements) {
myUsedElements.add(e);
myUnusedElements.remove(e);
}
}
}
ControlFlowUtil.iteratePrev(startInstruction, instructions, inst -> {
final PsiElement instElement = inst.getElement();
// Mark function as used
if (instElement instanceof PyFunction) {
if (name.equals(((PyFunction)instElement).getName())){
myUsedElements.add(instElement);
myUnusedElements.remove(instElement);
return ControlFlowUtil.Operation.CONTINUE;
}
}
// Mark write access as used
else if (inst instanceof ReadWriteInstruction) {
final ReadWriteInstruction rwInstruction = (ReadWriteInstruction)inst;
if (rwInstruction.getAccess().isWriteAccess() && name.equals(rwInstruction.getName())) {
// Look up higher in CFG for actual definitions
if (instElement != null && PyTypeDeclarationStatementNavigator.isTypeDeclarationTarget(instElement)) {
return ControlFlowUtil.Operation.NEXT;
}
// For elements in scope
if (instElement != null && PsiTreeUtil.isAncestor(owner, instElement, false)) {
myUsedElements.add(instElement);
myUnusedElements.remove(instElement);
}
return ControlFlowUtil.Operation.CONTINUE;
}
}
return ControlFlowUtil.Operation.NEXT;
});
}
static class DontPerformException extends RuntimeException {}
private static boolean callsLocals(final ScopeOwner owner) {
try {
owner.acceptChildren(new PyRecursiveElementVisitor(){
@Override
public void visitPyCallExpression(final @NotNull PyCallExpression node) {
final PyExpression callee = node.getCallee();
if (callee != null && "locals".equals(callee.getName())){
throw new DontPerformException();
}
node.acceptChildren(this); // look at call expr in arguments
}
@Override
public void visitPyFunction(final @NotNull PyFunction node) {
// stop here
}
});
}
catch (DontPerformException e) {
return true;
}
return false;
}
public void registerProblems() {
final List<PyInspectionExtension> filters = PyInspectionExtension.EP_NAME.getExtensionList();
// Register problems
final Set<PyFunction> functionsWithInheritors = new HashSet<>();
final Map<PyFunction, Boolean> emptyFunctions = new HashMap<>();
for (PsiElement element : myUnusedElements) {
boolean ignoreUnused = false;
for (PyInspectionExtension filter : filters) {
if (filter.ignoreUnused(element, myTypeEvalContext)) {
ignoreUnused = true;
}
}
if (ignoreUnused) continue;
if (element instanceof PyFunction) {
// Local function
final PsiElement nameIdentifier = ((PyFunction)element).getNameIdentifier();
registerWarning(nameIdentifier == null ? element : nameIdentifier,
PyPsiBundle.message("INSP.unused.locals.local.function.isnot.used",
((PyFunction)element).getName()), new PyRemoveStatementQuickFix());
}
else if (element instanceof PyClass) {
// Local class
final PyClass cls = (PyClass)element;
final PsiElement name = cls.getNameIdentifier();
registerWarning(name != null ? name : element,
PyPsiBundle.message("INSP.unused.locals.local.class.isnot.used", cls.getName()), new PyRemoveStatementQuickFix());
}
else {
// Local variable or parameter
String name = element.getText();
if (element instanceof PyNamedParameter || element.getParent() instanceof PyNamedParameter) {
PyNamedParameter namedParameter = element instanceof PyNamedParameter
? (PyNamedParameter) element
: (PyNamedParameter) element.getParent();
name = namedParameter.getName();
// When function is inside a class, first parameter may be either self or cls which is always 'used'.
if (namedParameter.isSelf()) {
continue;
}
if (myIgnoreLambdaParameters && PsiTreeUtil.getParentOfType(element, PyCallable.class) instanceof PyLambdaExpression) {
continue;
}
boolean mayBeField = false;
PyClass containingClass = null;
PyParameterList paramList = PsiTreeUtil.getParentOfType(element, PyParameterList.class);
if (paramList != null && paramList.getParent() instanceof PyFunction) {
final PyFunction func = (PyFunction) paramList.getParent();
containingClass = func.getContainingClass();
if (containingClass != null &&
PyUtil.isInitMethod(func) &&
!namedParameter.isKeywordContainer() &&
!namedParameter.isPositionalContainer()) {
mayBeField = true;
}
else if (ignoreUnusedParameters(func, functionsWithInheritors)) {
continue;
}
if (func.asMethod() != null) {
Boolean isEmpty = emptyFunctions.get(func);
if (isEmpty == null) {
isEmpty = PyUtil.isEmptyFunction(func);
emptyFunctions.put(func, isEmpty);
}
if (isEmpty && !mayBeField) {
continue;
}
}
}
boolean canRemove = !(PsiTreeUtil.getPrevSiblingOfType(element, PyParameter.class) instanceof PySingleStarParameter) ||
PsiTreeUtil.getNextSiblingOfType(element, PyParameter.class) != null;
final List<LocalQuickFix> fixes = new ArrayList<>();
if (mayBeField) {
fixes.add(new AddFieldQuickFix(name, name, containingClass.getName(), false));
}
if (canRemove) {
fixes.add(new PyRemoveParameterQuickFix());
}
registerWarning(element, PyPsiBundle.message("INSP.unused.locals.parameter.isnot.used", name), fixes.toArray(LocalQuickFix.EMPTY_ARRAY));
}
else {
if (myIgnoreVariablesStartingWithUnderscore && element.getText().startsWith(PyNames.UNDERSCORE)) continue;
if (myIgnoreTupleUnpacking && isTupleUnpacking(element)) continue;
final String warningMsg = PyPsiBundle.message("INSP.unused.locals.local.variable.isnot.used", name);
final PyForStatement forStatement = PyForStatementNavigator.getPyForStatementByIterable(element);
if (forStatement != null) {
if (!myIgnoreRangeIterationVariables || !isRangeIteration(forStatement)) {
registerWarning(element, warningMsg, new ReplaceWithWildCard());
}
continue;
}
if (isComprehensionTarget(element)) {
registerWarning(element, warningMsg, new ReplaceWithWildCard());
continue;
}
final PyExceptPart exceptPart = PyExceptPartNavigator.getPyExceptPartByTarget(element);
if (exceptPart != null) {
registerWarning(element, warningMsg, new PyRemoveExceptionTargetQuickFix());
continue;
}
final PyWithItem withItem = PsiTreeUtil.getParentOfType(element, PyWithItem.class);
if (withItem != null && PsiTreeUtil.isAncestor(withItem.getTarget(), element, false)) {
if (withItem.getTarget() == element) {
registerWarning(element, warningMsg, new PyRemoveWithTargetQuickFix());
}
else {
registerWarning(element, warningMsg, new ReplaceWithWildCard());
}
continue;
}
final PyAssignmentStatement assignmentStatement = PsiTreeUtil.getParentOfType(element, PyAssignmentStatement.class);
if (assignmentStatement != null && !PsiTreeUtil.isAncestor(assignmentStatement.getAssignedValue(), element, false)) {
if (assignmentStatement.getLeftHandSideExpression() == element) {
// Single assignment target (unused = value)
registerWarning(element, warningMsg, new PyRemoveAssignmentStatementTargetQuickFix(), new PyRemoveStatementQuickFix());
}
else if (ArrayUtil.contains(element, assignmentStatement.getRawTargets())) {
// Chained assignment target (used = unused = value)
registerWarning(element, warningMsg, new PyRemoveAssignmentStatementTargetQuickFix());
}
else {
// Unpacking (used, unused = value)
registerWarning(element, warningMsg, new ReplaceWithWildCard());
}
continue;
}
registerWarning(element, warningMsg);
}
}
}
}
private static boolean isComprehensionTarget(@NotNull PsiElement element) {
final PyComprehensionElement comprehensionExpr = PsiTreeUtil.getParentOfType(element, PyComprehensionElement.class);
if (comprehensionExpr == null) return false;
return ContainerUtil.exists(comprehensionExpr.getForComponents(),
it -> PsiTreeUtil.isAncestor(it.getIteratorVariable(), element, false));
}
private boolean isRangeIteration(@NotNull PyForStatement forStatement) {
final PyExpression source = forStatement.getForPart().getSource();
if (!(source instanceof PyCallExpression)) {
return false;
}
final PyCallExpression expr = (PyCallExpression)source;
if (expr.isCalleeText("range", "xrange")) {
final PyResolveContext resolveContext = PyResolveContext.defaultContext(myTypeEvalContext);
final PyBuiltinCache builtinCache = PyBuiltinCache.getInstance(forStatement);
return ContainerUtil.exists(expr.multiResolveCalleeFunction(resolveContext), builtinCache::isBuiltin);
}
return false;
}
private boolean ignoreUnusedParameters(PyFunction func, Set<PyFunction> functionsWithInheritors) {
if (functionsWithInheritors.contains(func)) {
return true;
}
if (!PyUtil.isInitMethod(func) && PySuperMethodsSearch.search(func, myTypeEvalContext).findFirst() != null ||
PyOverridingMethodsSearch.search(func, true).findFirst() != null) {
functionsWithInheritors.add(func);
return true;
}
return false;
}
private boolean isTupleUnpacking(PsiElement element) {
if (!(element instanceof PyTargetExpression)) {
return false;
}
// Handling of the star expressions
PsiElement parent = element.getParent();
if (parent instanceof PyStarExpression){
element = parent;
parent = element.getParent();
}
if (parent instanceof PyTupleExpression) {
// if all the items of the tuple are unused, we still highlight all of them; if some are unused, we ignore
final PyTupleExpression tuple = (PyTupleExpression)parent;
for (PyExpression expression : tuple.getElements()) {
if (expression instanceof PyStarExpression){
if (!myUnusedElements.contains(((PyStarExpression)expression).getExpression())){
return true;
}
} else if (!myUnusedElements.contains(expression)) {
return true;
}
}
}
return false;
}
private void registerWarning(@NotNull PsiElement element, @InspectionMessage String msg, LocalQuickFix @NotNull... quickfixes) {
registerProblem(element, msg, ProblemHighlightType.LIKE_UNUSED_SYMBOL, null, quickfixes);
}
private static class ReplaceWithWildCard implements LocalQuickFix {
@Override
@NotNull
public String getFamilyName() {
return PyPsiBundle.message("INSP.unused.locals.replace.with.wildcard");
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
PsiElement psiElement = descriptor.getPsiElement();
final PyFile pyFile = (PyFile) PyElementGenerator.getInstance(psiElement.getProject()).createDummyFile(LanguageLevel.getDefault(),
"for _ in tuples:\n pass"
);
final PyExpression target = ((PyForStatement)pyFile.getStatements().get(0)).getForPart().getTarget();
if (target != null) {
psiElement.replace(target);
}
}
}
}
| |
package com.cedricziel.idea.fluid.variables.provider;
import com.cedricziel.idea.fluid.extensionPoints.VariableProvider;
import com.cedricziel.idea.fluid.lang.FluidLanguage;
import com.cedricziel.idea.fluid.lang.psi.*;
import com.cedricziel.idea.fluid.util.FluidTypeResolver;
import com.cedricziel.idea.fluid.variables.FluidVariable;
import com.intellij.codeInsight.completion.CompletionParameters;
import com.intellij.lang.Language;
import com.intellij.lang.html.HTMLLanguage;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.patterns.PlatformPatterns;
import com.intellij.psi.FileViewProvider;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.XmlRecursiveElementWalkingVisitor;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlAttributeValue;
import com.intellij.psi.xml.XmlTag;
import com.intellij.util.ProcessingContext;
import com.jetbrains.php.PhpIndex;
import com.jetbrains.php.lang.psi.resolve.types.PhpType;
import gnu.trove.THashMap;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.NotNull;
import java.util.*;
public class InTemplateDeclarationVariableProvider implements VariableProvider {
private static boolean containsLanguage(@NotNull Language language, @NotNull PsiElement psiElement) {
PsiFile containingFile = psiElement.getContainingFile();
if (containingFile == null) {
return false;
}
FileViewProvider viewProvider = containingFile.getViewProvider();
return viewProvider.getLanguages().contains(language);
}
private static PsiElement extractLanguagePsiElementForElementAtPosition(@NotNull Language language, @NotNull PsiElement psiElement, int offset) {
FileViewProvider viewProvider = psiElement.getContainingFile().getViewProvider();
PsiFile psi = viewProvider.getPsi(language);
PsiElement elementAt = psi.findElementAt(offset);
if (elementAt == null) {
return null;
}
return elementAt.getParent();
}
@NotNull
private static Collection<String> collectForArrayScopeVariablesFoo(@NotNull Project project, @NotNull Collection<String> typeName, @NotNull FluidVariable psiVariable) {
Collection<String> previousElements = psiVariable.getTypes();
String[] strings = typeName.toArray(new String[0]);
for (int i = 1; i <= strings.length - 1; i++) {
previousElements = FluidTypeResolver.resolveFluidMethodName(project, previousElements, strings[i]);
// we can stop on empty list
if (previousElements.size() == 0) {
return Collections.emptyList();
}
}
return previousElements;
}
private static void collectForArrayScopeVariables(PsiElement psiElement, Map<String, FluidVariable> globalVars) {
if (!containsLanguage(HTMLLanguage.INSTANCE, psiElement)) {
return;
}
PsiFile psi = extractLanguagePsiForElement(HTMLLanguage.INSTANCE, psiElement);
if (psi == null) {
return;
}
PsiElement elementAt = psi.findElementAt(psiElement.getTextOffset() - 2);
PsiElement fForTag = PsiTreeUtil.findFirstParent(elementAt, psiElement2 -> PlatformPatterns
.psiElement(XmlTag.class)
.withName(PlatformPatterns.string().oneOf("f:for"))
.accepts(psiElement2)
);
if (!(fForTag instanceof XmlTag)) {
return;
}
XmlTag fForElement = (XmlTag) fForTag;
XmlAttribute eachAttribute = fForElement.getAttribute("each");
if (eachAttribute == null) {
return;
}
if (eachAttribute.getValueElement() == null) {
return;
}
String variableName = StringUtils.trim(StringUtils.stripStart(StringUtils.stripEnd(eachAttribute.getValueElement().getText(), "}\""), "\"{"));
if (variableName.split("\\.").length > 0) {
variableName = variableName.split("\\.")[0];
}
if (!globalVars.containsKey(variableName)) {
return;
}
XmlAttribute asAttribute = fForElement.getAttribute("as");
if (asAttribute == null) {
return;
}
PhpType phpType = new PhpType();
Collection<String> forTagInIdentifierString = FluidTypeResolver.getForTagIdentifierAsString(fForElement);
if (forTagInIdentifierString.size() > 1) {
// nested resolve
String rootElement = forTagInIdentifierString.iterator().next();
if (globalVars.containsKey(rootElement)) {
FluidVariable psiVariable = globalVars.get(rootElement);
for (String arrayType : collectForArrayScopeVariablesFoo(psiElement.getProject(), forTagInIdentifierString, psiVariable)) {
phpType.add(arrayType);
}
}
} else {
// add single "for" var
for (String s : globalVars.get(variableName).getTypes()) {
phpType.add(s);
}
}
String scopeVariable = asAttribute.getValue();
// find array types; since they are phptypes they ends with []
Set<String> types = new HashSet<>();
for (String arrayType : PhpIndex.getInstance(psiElement.getProject()).completeType(psiElement.getProject(), phpType, new HashSet<>()).getTypes()) {
if (arrayType.endsWith("[]")) {
types.add(arrayType.substring(0, arrayType.length() - 2));
}
}
// we already have same variable in scope, so merge types
if (globalVars.containsKey(scopeVariable)) {
globalVars.get(scopeVariable).getTypes().addAll(types);
} else {
globalVars.put(scopeVariable, new FluidVariable(types, asAttribute.getValueElement()));
}
}
private static PsiFile extractLanguagePsiForElement(@NotNull Language language, @NotNull PsiElement psiElement) {
FileViewProvider viewProvider = psiElement.getContainingFile().getViewProvider();
int textOffset = psiElement.getTextOffset();
PsiFile psi = viewProvider.getPsi(language);
PsiElement elementAt = psi.findElementAt(textOffset);
if (elementAt == null) {
return null;
}
return psi;
}
@Override
public void provide(@NotNull CompletionParameters parameters, ProcessingContext context, Map<String, FluidVariable> variableMap) {
PsiElement psiElement = parameters.getPosition();
getVariablesFromAroundElement(psiElement, variableMap);
}
private void getVariablesFromAroundElement(PsiElement psiElement, Map<String, FluidVariable> variableMap) {
variableMap.putAll(collectXmlViewHelperSetVariables(psiElement));
variableMap.putAll(collectInlineViewHelperSetVariables(psiElement));
variableMap.putAll(collectXmlMapViewHelperSetVariables(psiElement));
collectForArrayScopeVariables(psiElement, variableMap);
}
@Override
public void provide(@NotNull PsiElement element, Map<String, FluidVariable> variableMap) {
getVariablesFromAroundElement(element, variableMap);
}
private Map<String, FluidVariable> collectInlineViewHelperSetVariables(@NotNull PsiElement psiElement) {
if (!containsLanguage(FluidLanguage.INSTANCE, psiElement)) {
return new THashMap<>();
}
PsiFile psi = extractLanguagePsiForElement(FluidLanguage.INSTANCE, psiElement);
if (psi == null) {
return new THashMap<>();
}
InlineFVariableVisitor visitor = new InlineFVariableVisitor();
psi.accept(visitor);
return visitor.variables;
}
private Map<String, FluidVariable> collectXmlViewHelperSetVariables(@NotNull PsiElement psiElement) {
if (!containsLanguage(HTMLLanguage.INSTANCE, psiElement)) {
return new THashMap<>();
}
PsiFile psi = extractLanguagePsiForElement(HTMLLanguage.INSTANCE, psiElement);
if (psi == null) {
return new THashMap<>();
}
XmlVariableVisitor visitor = new XmlVariableVisitor();
psi.accept(visitor);
return visitor.variables;
}
private Map<String, FluidVariable> collectXmlMapViewHelperSetVariables(PsiElement psiElement) {
if (!containsLanguage(HTMLLanguage.INSTANCE, psiElement)) {
return new THashMap<>();
}
PsiFile psi = extractLanguagePsiForElement(HTMLLanguage.INSTANCE, psiElement);
if (psi == null) {
return new THashMap<>();
}
XmlFAliasVisitor visitor = new XmlFAliasVisitor();
psi.accept(visitor);
return visitor.variables;
}
private static class XmlVariableVisitor extends XmlRecursiveElementWalkingVisitor {
public Map<String, FluidVariable> variables = new THashMap<>();
@Override
public void visitXmlTag(XmlTag tag) {
if (tag.getName().equals("f:variable")) {
String variableName = tag.getAttributeValue("name");
if (variableName != null && !variableName.isEmpty()) {
variables.put(variableName, new FluidVariable(variableName));
}
}
super.visitXmlTag(tag);
}
}
private static class XmlFAliasVisitor extends XmlRecursiveElementWalkingVisitor {
public Map<String, FluidVariable> variables = new THashMap<>();
@Override
public void visitXmlTag(XmlTag tag) {
if (tag.getName().equals("f:alias")) {
XmlAttribute map = tag.getAttribute("map");
if (map != null) {
XmlAttributeValue valueElement = map.getValueElement();
if (valueElement != null) {
TextRange valueTextRange = valueElement.getValueTextRange();
PsiElement fluidElement = extractLanguagePsiElementForElementAtPosition(FluidLanguage.INSTANCE, tag, valueTextRange.getStartOffset() + 1);
FluidArrayCreationExpr fluidArray = (FluidArrayCreationExpr) PsiTreeUtil.findFirstParent(fluidElement, x -> x instanceof FluidArrayCreationExpr);
if (fluidArray != null) {
fluidArray.getArrayKeyList().forEach(fluidArrayKey -> {
if (fluidArrayKey.getFirstChild() instanceof FluidStringLiteral) {
String key = ((FluidStringLiteral) fluidArrayKey.getFirstChild()).getContents();
variables.put(key, new FluidVariable(key));
return;
}
variables.put(fluidArrayKey.getText(), new FluidVariable(fluidArrayKey.getText()));
});
}
}
}
}
super.visitXmlTag(tag);
}
}
private static class InlineFVariableVisitor extends FluidRecursiveWalkingVisitor {
public Map<String, FluidVariable> variables = new THashMap<>();
@Override
public void visitViewHelperExpr(@NotNull FluidViewHelperExpr o) {
String presentableName = o.getPresentableName();
if (presentableName.equals("f:variable")) {
FluidViewHelperArgumentList viewHelperArgumentList = o.getViewHelperArgumentList();
if (viewHelperArgumentList != null) {
FluidViewHelperArgument argument = viewHelperArgumentList.getArgument("name");
if (argument != null && argument.getArgumentValue() != null && argument.getArgumentValue().getLiteral() != null) {
FluidLiteral literal = argument.getArgumentValue().getLiteral();
if (literal instanceof FluidStringLiteral) {
String contents = ((FluidStringLiteral) literal).getContents();
variables.put(contents, new FluidVariable(contents));
} else {
variables.put(literal.getText(), new FluidVariable(literal.getText()));
}
}
}
}
super.visitViewHelperExpr(o);
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.ilm;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.license.XPackLicenseState;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.Spliterators;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import static org.elasticsearch.xpack.core.ilm.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY;
/**
* We cache the currently executing ILM phase in the index metadata so the ILM execution for managed indices is not irrecoverably
* interrupted by a concurrent update policy that, say, would remove the current execution phase altogether.
* <p>
* This contains class contains a series of methods that help manage the cached ILM phase.
*/
public final class PhaseCacheManagement {
private static final Logger logger = LogManager.getLogger(PhaseCacheManagement.class);
private PhaseCacheManagement() {
}
/**
* Rereads the phase JSON for the given index, returning a new cluster state.
*/
public static ClusterState refreshPhaseDefinition(final ClusterState state, final String index,
final LifecyclePolicyMetadata updatedPolicy) {
final IndexMetadata idxMeta = state.metadata().index(index);
Metadata.Builder metadataBuilder = Metadata.builder(state.metadata());
refreshPhaseDefinition(metadataBuilder, idxMeta, updatedPolicy);
return ClusterState.builder(state).metadata(metadataBuilder).build();
}
/**
* Rereads the phase JSON for the given index, and updates the provided metadata.
*/
public static void refreshPhaseDefinition(final Metadata.Builder metadataBuilder, final IndexMetadata idxMeta,
final LifecyclePolicyMetadata updatedPolicy) {
String index = idxMeta.getIndex().getName();
assert eligibleToCheckForRefresh(idxMeta) : "index " + index + " is missing crucial information needed to refresh phase definition";
logger.trace("[{}] updating cached phase definition for policy [{}]", index, updatedPolicy.getName());
LifecycleExecutionState currentExState = LifecycleExecutionState.fromIndexMetadata(idxMeta);
String currentPhase = currentExState.getPhase();
PhaseExecutionInfo pei = new PhaseExecutionInfo(updatedPolicy.getName(),
updatedPolicy.getPolicy().getPhases().get(currentPhase), updatedPolicy.getVersion(), updatedPolicy.getModifiedDate());
LifecycleExecutionState newExState = LifecycleExecutionState.builder(currentExState)
.setPhaseDefinition(Strings.toString(pei, false, false))
.build();
metadataBuilder.put(IndexMetadata.builder(idxMeta)
.putCustom(ILM_CUSTOM_METADATA_KEY, newExState.asMap()));
}
/**
* Ensure that we have the minimum amount of metadata necessary to check for cache phase
* refresh. This includes:
* - An execution state
* - Existing phase definition JSON
* - A current step key
* - A current phase in the step key
* - Not currently in the ERROR step
*/
public static boolean eligibleToCheckForRefresh(final IndexMetadata metadata) {
LifecycleExecutionState executionState = LifecycleExecutionState.fromIndexMetadata(metadata);
if (executionState == null || executionState.getPhaseDefinition() == null) {
return false;
}
Step.StepKey currentStepKey = LifecycleExecutionState.getCurrentStepKey(executionState);
if (currentStepKey == null || currentStepKey.getPhase() == null) {
return false;
}
return ErrorStep.NAME.equals(currentStepKey.getName()) == false;
}
/**
* For the given new policy, returns a new cluster with all updateable indices' phase JSON refreshed.
*/
public static ClusterState updateIndicesForPolicy(final ClusterState state, final NamedXContentRegistry xContentRegistry,
final Client client, final LifecyclePolicy oldPolicy,
final LifecyclePolicyMetadata newPolicy, XPackLicenseState licenseState) {
Metadata.Builder mb = Metadata.builder(state.metadata());
if (updateIndicesForPolicy(mb, state, xContentRegistry, client, oldPolicy, newPolicy, licenseState)) {
return ClusterState.builder(state).metadata(mb).build();
}
return state;
}
/**
* For the given new policy, update the provided metadata to reflect the refreshed phase JSON for all updateable indices.
* Returns true if any indices were updated and false otherwise.
* Users of this API should consider the returned value and only create a new {@link ClusterState} if `true` is returned.
*/
public static boolean updateIndicesForPolicy(final Metadata.Builder mb, final ClusterState currentState,
final NamedXContentRegistry xContentRegistry, final Client client,
final LifecyclePolicy oldPolicy, final LifecyclePolicyMetadata newPolicy,
final XPackLicenseState licenseState) {
assert oldPolicy.getName().equals(newPolicy.getName()) : "expected both policies to have the same id but they were: [" +
oldPolicy.getName() + "] vs. [" + newPolicy.getName() + "]";
// No need to update anything if the policies are identical in contents
if (oldPolicy.equals(newPolicy.getPolicy())) {
logger.debug("policy [{}] is unchanged and no phase definition refresh is needed", oldPolicy.getName());
return false;
}
final List<IndexMetadata> indicesThatCanBeUpdated =
StreamSupport.stream(Spliterators.spliteratorUnknownSize(currentState.metadata().indices().valuesIt(), 0), false)
.filter(meta -> newPolicy.getName().equals(LifecycleSettings.LIFECYCLE_NAME_SETTING.get(meta.getSettings())))
.filter(meta -> isIndexPhaseDefinitionUpdatable(xContentRegistry, client, meta, newPolicy.getPolicy(), licenseState))
.collect(Collectors.toList());
final List<String> refreshedIndices = new ArrayList<>(indicesThatCanBeUpdated.size());
for (IndexMetadata index : indicesThatCanBeUpdated) {
try {
refreshPhaseDefinition(mb, index, newPolicy);
refreshedIndices.add(index.getIndex().getName());
} catch (Exception e) {
logger.warn(new ParameterizedMessage("[{}] unable to refresh phase definition for updated policy [{}]",
index, newPolicy.getName()), e);
}
}
logger.debug("refreshed policy [{}] phase definition for [{}] indices", newPolicy.getName(), refreshedIndices.size());
return refreshedIndices.size() > 0;
}
/**
* Returns 'true' if the index's cached phase JSON can be safely reread, 'false' otherwise.
*/
public static boolean isIndexPhaseDefinitionUpdatable(final NamedXContentRegistry xContentRegistry, final Client client,
final IndexMetadata metadata, final LifecyclePolicy newPolicy,
final XPackLicenseState licenseState) {
final String index = metadata.getIndex().getName();
if (eligibleToCheckForRefresh(metadata) == false) {
logger.debug("[{}] does not contain enough information to check for eligibility of refreshing phase", index);
return false;
}
final String policyId = newPolicy.getName();
final LifecycleExecutionState executionState = LifecycleExecutionState.fromIndexMetadata(metadata);
final Step.StepKey currentStepKey = LifecycleExecutionState.getCurrentStepKey(executionState);
final String currentPhase = currentStepKey.getPhase();
final Set<Step.StepKey> newStepKeys = newPolicy.toSteps(client, licenseState).stream()
.map(Step::getKey)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (newStepKeys.contains(currentStepKey) == false) {
// The index is on a step that doesn't exist in the new policy, we
// can't safely re-read the JSON
logger.debug("[{}] updated policy [{}] does not contain the current step key [{}], so the policy phase will not be refreshed",
index, policyId, currentStepKey);
return false;
}
final String phaseDef = executionState.getPhaseDefinition();
final Set<Step.StepKey> oldStepKeys = readStepKeys(xContentRegistry, client, phaseDef, currentPhase, licenseState);
if (oldStepKeys == null) {
logger.debug("[{}] unable to parse phase definition for cached policy [{}], policy phase will not be refreshed",
index, policyId);
return false;
}
final Set<Step.StepKey> oldPhaseStepKeys = oldStepKeys.stream()
.filter(sk -> currentPhase.equals(sk.getPhase()))
.collect(Collectors.toCollection(LinkedHashSet::new));
final PhaseExecutionInfo phaseExecutionInfo = new PhaseExecutionInfo(policyId, newPolicy.getPhases().get(currentPhase), 1L, 1L);
final String peiJson = Strings.toString(phaseExecutionInfo);
final Set<Step.StepKey> newPhaseStepKeys = readStepKeys(xContentRegistry, client, peiJson, currentPhase, licenseState);
if (newPhaseStepKeys == null) {
logger.debug(new ParameterizedMessage("[{}] unable to parse phase definition for policy [{}] " +
"to determine if it could be refreshed", index, policyId));
return false;
}
if (newPhaseStepKeys.equals(oldPhaseStepKeys)) {
// The new and old phase have the same stepkeys for this current phase, so we can
// refresh the definition because we know it won't change the execution flow.
logger.debug("[{}] updated policy [{}] contains the same phase step keys and can be refreshed", index, policyId);
return true;
} else {
logger.debug("[{}] updated policy [{}] has different phase step keys and will NOT refresh phase " +
"definition as it differs too greatly. old: {}, new: {}",
index, policyId, oldPhaseStepKeys, newPhaseStepKeys);
return false;
}
}
/**
* Parse the {@code phaseDef} phase definition to get the stepkeys for the given phase.
* If there is an error parsing or if the phase definition is missing the required
* information, returns null.
*/
@Nullable
public static Set<Step.StepKey> readStepKeys(final NamedXContentRegistry xContentRegistry, final Client client,
final String phaseDef, final String currentPhase, final XPackLicenseState licenseState) {
final PhaseExecutionInfo phaseExecutionInfo;
try (XContentParser parser = JsonXContent.jsonXContent.createParser(xContentRegistry,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, phaseDef)) {
phaseExecutionInfo = PhaseExecutionInfo.parse(parser, currentPhase);
} catch (Exception e) {
logger.trace(new ParameterizedMessage("exception reading step keys checking for refreshability, phase definition: {}",
phaseDef), e);
return null;
}
if (phaseExecutionInfo == null || phaseExecutionInfo.getPhase() == null) {
return null;
}
return phaseExecutionInfo.getPhase().getActions().values().stream()
.flatMap(a -> a.toSteps(client, phaseExecutionInfo.getPhase().getName(), null, licenseState).stream())
.map(Step::getKey)
.collect(Collectors.toCollection(LinkedHashSet::new));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.s3native;
import static org.apache.hadoop.fs.s3native.NativeS3FileSystem.PATH_DELIMITER;
import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.S3_NATIVE_BUFFER_DIR_KEY;
import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.addDeprecatedConfigKeys;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.Map.Entry;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Time;
/**
* <p>
* A stub implementation of {@link NativeFileSystemStore} for testing
* {@link NativeS3FileSystem} without actually connecting to S3.
* </p>
*/
public class InMemoryNativeFileSystemStore implements NativeFileSystemStore {
static {
// Add the deprecated config keys
addDeprecatedConfigKeys();
}
private Configuration conf;
private SortedMap<String, FileMetadata> metadataMap =
new TreeMap<String, FileMetadata>();
private SortedMap<String, byte[]> dataMap = new TreeMap<String, byte[]>();
@Override
public void initialize(URI uri, Configuration conf) throws IOException {
this.conf = conf;
}
@Override
public void storeEmptyFile(String key) throws IOException {
metadataMap.put(key, new FileMetadata(key, 0, Time.now()));
dataMap.put(key, new byte[0]);
}
@Override
public void storeFile(String key, File file, byte[] md5Hash)
throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] buf = new byte[8192];
int numRead;
BufferedInputStream in = null;
try {
in = new BufferedInputStream(new FileInputStream(file));
while ((numRead = in.read(buf)) >= 0) {
out.write(buf, 0, numRead);
}
} finally {
if (in != null) {
in.close();
}
}
metadataMap.put(key,
new FileMetadata(key, file.length(), Time.now()));
dataMap.put(key, out.toByteArray());
}
@Override
public InputStream retrieve(String key) throws IOException {
return retrieve(key, 0);
}
@Override
public InputStream retrieve(String key, long byteRangeStart)
throws IOException {
byte[] data = dataMap.get(key);
File file = createTempFile();
BufferedOutputStream out = null;
try {
out = new BufferedOutputStream(new FileOutputStream(file));
out.write(data, (int) byteRangeStart,
data.length - (int) byteRangeStart);
} finally {
if (out != null) {
out.close();
}
}
return new FileInputStream(file);
}
private File createTempFile() throws IOException {
File dir = new File(conf.get(S3_NATIVE_BUFFER_DIR_KEY));
if (!dir.exists() && !dir.mkdirs()) {
throw new IOException("Cannot create S3 buffer directory: " + dir);
}
File result = File.createTempFile("test-", ".tmp", dir);
result.deleteOnExit();
return result;
}
@Override
public FileMetadata retrieveMetadata(String key) throws IOException {
return metadataMap.get(key);
}
@Override
public PartialListing list(String prefix, int maxListingLength)
throws IOException {
return list(prefix, maxListingLength, null, false);
}
@Override
public PartialListing list(String prefix, int maxListingLength,
String priorLastKey, boolean recursive) throws IOException {
return list(prefix, recursive ? null : PATH_DELIMITER, maxListingLength, priorLastKey);
}
private PartialListing list(String prefix, String delimiter,
int maxListingLength, String priorLastKey) throws IOException {
if (prefix.length() > 0 && !prefix.endsWith(PATH_DELIMITER)) {
prefix += PATH_DELIMITER;
}
List<FileMetadata> metadata = new ArrayList<FileMetadata>();
SortedSet<String> commonPrefixes = new TreeSet<String>();
for (String key : dataMap.keySet()) {
if (key.startsWith(prefix)) {
if (delimiter == null) {
metadata.add(retrieveMetadata(key));
} else {
int delimIndex = key.indexOf(delimiter, prefix.length());
if (delimIndex == -1) {
metadata.add(retrieveMetadata(key));
} else {
String commonPrefix = key.substring(0, delimIndex);
commonPrefixes.add(commonPrefix);
}
}
}
if (metadata.size() + commonPrefixes.size() == maxListingLength) {
new PartialListing(key, metadata.toArray(new FileMetadata[0]),
commonPrefixes.toArray(new String[0]));
}
}
return new PartialListing(null, metadata.toArray(new FileMetadata[0]),
commonPrefixes.toArray(new String[0]));
}
@Override
public void delete(String key) throws IOException {
metadataMap.remove(key);
dataMap.remove(key);
}
@Override
public void copy(String srcKey, String dstKey) throws IOException {
metadataMap.put(dstKey, metadataMap.get(srcKey));
dataMap.put(dstKey, dataMap.get(srcKey));
}
@Override
public void purge(String prefix) throws IOException {
Iterator<Entry<String, FileMetadata>> i =
metadataMap.entrySet().iterator();
while (i.hasNext()) {
Entry<String, FileMetadata> entry = i.next();
if (entry.getKey().startsWith(prefix)) {
dataMap.remove(entry.getKey());
i.remove();
}
}
}
@Override
public void dump() throws IOException {
System.out.println(metadataMap.values());
System.out.println(dataMap.keySet());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.operators;
import static org.apache.flink.util.Preconditions.checkArgument;
import java.io.IOException;
import java.io.Serializable;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.state.KeyedStateStore;
import org.apache.flink.api.common.state.State;
import org.apache.flink.api.common.state.StateDescriptor;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.MetricOptions;
import org.apache.flink.core.fs.FSDataInputStream;
import org.apache.flink.core.memory.DataInputViewStreamWrapper;
import org.apache.flink.core.memory.DataOutputViewStreamWrapper;
import org.apache.flink.metrics.Counter;
import org.apache.flink.metrics.Gauge;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.runtime.checkpoint.CheckpointOptions;
import org.apache.flink.runtime.checkpoint.CheckpointOptions.CheckpointType;
import org.apache.flink.runtime.metrics.groups.OperatorMetricGroup;
import org.apache.flink.runtime.state.AbstractKeyedStateBackend;
import org.apache.flink.runtime.state.CheckpointStreamFactory;
import org.apache.flink.runtime.state.DefaultKeyedStateStore;
import org.apache.flink.runtime.state.KeyGroupRange;
import org.apache.flink.runtime.state.KeyGroupRangeAssignment;
import org.apache.flink.runtime.state.KeyGroupStatePartitionStreamProvider;
import org.apache.flink.runtime.state.KeyGroupsList;
import org.apache.flink.runtime.state.KeyedStateBackend;
import org.apache.flink.runtime.state.KeyedStateCheckpointOutputStream;
import org.apache.flink.runtime.state.KeyedStateHandle;
import org.apache.flink.runtime.state.OperatorStateBackend;
import org.apache.flink.runtime.state.OperatorStateHandle;
import org.apache.flink.runtime.state.StateInitializationContext;
import org.apache.flink.runtime.state.StateInitializationContextImpl;
import org.apache.flink.runtime.state.StateSnapshotContext;
import org.apache.flink.runtime.state.StateSnapshotContextSynchronousImpl;
import org.apache.flink.runtime.state.StreamStateHandle;
import org.apache.flink.runtime.state.VoidNamespace;
import org.apache.flink.runtime.state.VoidNamespaceSerializer;
import org.apache.flink.streaming.api.graph.StreamConfig;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.runtime.streamrecord.LatencyMarker;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.runtime.tasks.OperatorStateHandles;
import org.apache.flink.streaming.runtime.tasks.ProcessingTimeService;
import org.apache.flink.streaming.runtime.tasks.StreamTask;
import org.apache.flink.util.OutputTag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Base class for all stream operators. Operators that contain a user function should extend the class
* {@link AbstractUdfStreamOperator} instead (which is a specialized subclass of this class).
*
* <p>For concrete implementations, one of the following two interfaces must also be implemented, to
* mark the operator as unary or binary:
* {@link OneInputStreamOperator} or {@link TwoInputStreamOperator}.
*
* <p>Methods of {@code StreamOperator} are guaranteed not to be called concurrently. Also, if using
* the timer service, timer callbacks are also guaranteed not to be called concurrently with
* methods on {@code StreamOperator}.
*
* @param <OUT> The output type of the operator
*/
@PublicEvolving
public abstract class AbstractStreamOperator<OUT>
implements StreamOperator<OUT>, Serializable, KeyContext {
private static final long serialVersionUID = 1L;
/** The logger used by the operator class and its subclasses. */
protected static final Logger LOG = LoggerFactory.getLogger(AbstractStreamOperator.class);
// ----------- configuration properties -------------
// A sane default for most operators
protected ChainingStrategy chainingStrategy = ChainingStrategy.HEAD;
// ---------------- runtime fields ------------------
/** The task that contains this operator (and other operators in the same chain). */
private transient StreamTask<?, ?> container;
protected transient StreamConfig config;
protected transient Output<StreamRecord<OUT>> output;
/** The runtime context for UDFs. */
private transient StreamingRuntimeContext runtimeContext;
// ----------------- general state -------------------
/** The factory that give this operator access to checkpoint storage. */
private transient CheckpointStreamFactory checkpointStreamFactory;
// ---------------- key/value state ------------------
/**
* {@code KeySelector} for extracting a key from an element being processed. This is used to
* scope keyed state to a key. This is null if the operator is not a keyed operator.
*
* <p>This is for elements from the first input.
*/
private transient KeySelector<?, ?> stateKeySelector1;
/**
* {@code KeySelector} for extracting a key from an element being processed. This is used to
* scope keyed state to a key. This is null if the operator is not a keyed operator.
*
* <p>This is for elements from the second input.
*/
private transient KeySelector<?, ?> stateKeySelector2;
/** Backend for keyed state. This might be empty if we're not on a keyed stream. */
private transient AbstractKeyedStateBackend<?> keyedStateBackend;
/** Keyed state store view on the keyed backend. */
private transient DefaultKeyedStateStore keyedStateStore;
// ---------------- operator state ------------------
/** Operator state backend / store. */
private transient OperatorStateBackend operatorStateBackend;
// --------------- Metrics ---------------------------
/** Metric group for the operator. */
protected transient MetricGroup metrics;
protected transient LatencyGauge latencyGauge;
// ---------------- time handler ------------------
private transient InternalTimeServiceManager<?, ?> timeServiceManager;
// ---------------- two-input operator watermarks ------------------
// We keep track of watermarks from both inputs, the combined input is the minimum
// Once the minimum advances we emit a new watermark for downstream operators
private long combinedWatermark = Long.MIN_VALUE;
private long input1Watermark = Long.MIN_VALUE;
private long input2Watermark = Long.MIN_VALUE;
// ------------------------------------------------------------------------
// Life Cycle
// ------------------------------------------------------------------------
@Override
public void setup(StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<OUT>> output) {
this.container = containingTask;
this.config = config;
this.metrics = container.getEnvironment().getMetricGroup().addOperator(config.getOperatorName());
this.output = new CountingOutput(output, ((OperatorMetricGroup) this.metrics).getIOMetricGroup().getNumRecordsOutCounter());
if (config.isChainStart()) {
((OperatorMetricGroup) this.metrics).getIOMetricGroup().reuseInputMetricsForTask();
}
if (config.isChainEnd()) {
((OperatorMetricGroup) this.metrics).getIOMetricGroup().reuseOutputMetricsForTask();
}
Configuration taskManagerConfig = container.getEnvironment().getTaskManagerInfo().getConfiguration();
int historySize = taskManagerConfig.getInteger(MetricOptions.LATENCY_HISTORY_SIZE);
if (historySize <= 0) {
LOG.warn("{} has been set to a value equal or below 0: {}. Using default.", MetricOptions.LATENCY_HISTORY_SIZE, historySize);
historySize = MetricOptions.LATENCY_HISTORY_SIZE.defaultValue();
}
latencyGauge = this.metrics.gauge("latency", new LatencyGauge(historySize));
this.runtimeContext = new StreamingRuntimeContext(this, container.getEnvironment(), container.getAccumulatorMap());
stateKeySelector1 = config.getStatePartitioner(0, getUserCodeClassloader());
stateKeySelector2 = config.getStatePartitioner(1, getUserCodeClassloader());
}
@Override
public MetricGroup getMetricGroup() {
return metrics;
}
@Override
public final void initializeState(OperatorStateHandles stateHandles) throws Exception {
Collection<KeyedStateHandle> keyedStateHandlesRaw = null;
Collection<OperatorStateHandle> operatorStateHandlesRaw = null;
Collection<OperatorStateHandle> operatorStateHandlesBackend = null;
boolean restoring = null != stateHandles;
initKeyedState(); //TODO we should move the actual initialization of this from StreamTask to this class
if (getKeyedStateBackend() != null && timeServiceManager == null) {
timeServiceManager = new InternalTimeServiceManager<>(
getKeyedStateBackend().getNumberOfKeyGroups(),
getKeyedStateBackend().getKeyGroupRange(),
this,
getRuntimeContext().getProcessingTimeService());
}
if (restoring) {
//pass directly
operatorStateHandlesBackend = stateHandles.getManagedOperatorState();
operatorStateHandlesRaw = stateHandles.getRawOperatorState();
if (null != getKeyedStateBackend()) {
//only use the keyed state if it is meant for us (aka head operator)
keyedStateHandlesRaw = stateHandles.getRawKeyedState();
}
}
checkpointStreamFactory = container.createCheckpointStreamFactory(this);
initOperatorState(operatorStateHandlesBackend);
StateInitializationContext initializationContext = new StateInitializationContextImpl(
restoring, // information whether we restore or start for the first time
operatorStateBackend, // access to operator state backend
keyedStateStore, // access to keyed state backend
keyedStateHandlesRaw, // access to keyed state stream
operatorStateHandlesRaw, // access to operator state stream
getContainingTask().getCancelables()); // access to register streams for canceling
initializeState(initializationContext);
if (restoring) {
// finally restore the legacy state in case we are
// migrating from a previous Flink version.
restoreStreamCheckpointed(stateHandles);
}
}
/**
* @deprecated Non-repartitionable operator state that has been deprecated.
* Can be removed when we remove the APIs for non-repartitionable operator state.
*/
@Deprecated
private void restoreStreamCheckpointed(OperatorStateHandles stateHandles) throws Exception {
StreamStateHandle state = stateHandles.getLegacyOperatorState();
if (null != state) {
if (this instanceof CheckpointedRestoringOperator) {
LOG.debug("Restore state of task {} in chain ({}).",
stateHandles.getOperatorChainIndex(), getContainingTask().getName());
FSDataInputStream is = state.openInputStream();
try {
getContainingTask().getCancelables().registerClosable(is);
((CheckpointedRestoringOperator) this).restoreState(is);
} finally {
getContainingTask().getCancelables().unregisterClosable(is);
is.close();
}
} else {
throw new Exception(
"Found legacy operator state for operator that does not implement StreamCheckpointedOperator.");
}
}
}
/**
* This method is called immediately before any elements are processed, it should contain the
* operator's initialization logic, e.g. state initialization.
*
* <p>The default implementation does nothing.
*
* @throws Exception An exception in this method causes the operator to fail.
*/
@Override
public void open() throws Exception {}
private void initKeyedState() {
try {
TypeSerializer<Object> keySerializer = config.getStateKeySerializer(getUserCodeClassloader());
// create a keyed state backend if there is keyed state, as indicated by the presence of a key serializer
if (null != keySerializer) {
KeyGroupRange subTaskKeyGroupRange = KeyGroupRangeAssignment.computeKeyGroupRangeForOperatorIndex(
container.getEnvironment().getTaskInfo().getMaxNumberOfParallelSubtasks(),
container.getEnvironment().getTaskInfo().getNumberOfParallelSubtasks(),
container.getEnvironment().getTaskInfo().getIndexOfThisSubtask());
this.keyedStateBackend = container.createKeyedStateBackend(
keySerializer,
// The maximum parallelism == number of key group
container.getEnvironment().getTaskInfo().getMaxNumberOfParallelSubtasks(),
subTaskKeyGroupRange);
this.keyedStateStore = new DefaultKeyedStateStore(keyedStateBackend, getExecutionConfig());
}
} catch (Exception e) {
throw new IllegalStateException("Could not initialize keyed state backend.", e);
}
}
private void initOperatorState(Collection<OperatorStateHandle> operatorStateHandles) {
try {
// create an operator state backend
this.operatorStateBackend = container.createOperatorStateBackend(this, operatorStateHandles);
} catch (Exception e) {
throw new IllegalStateException("Could not initialize operator state backend.", e);
}
}
/**
* This method is called after all records have been added to the operators via the methods
* {@link OneInputStreamOperator#processElement(StreamRecord)}, or
* {@link TwoInputStreamOperator#processElement1(StreamRecord)} and
* {@link TwoInputStreamOperator#processElement2(StreamRecord)}.
*
* <p>The method is expected to flush all remaining buffered data. Exceptions during this flushing
* of buffered should be propagated, in order to cause the operation to be recognized asa failed,
* because the last data items are not processed properly.
*
* @throws Exception An exception in this method causes the operator to fail.
*/
@Override
public void close() throws Exception {}
/**
* This method is called at the very end of the operator's life, both in the case of a successful
* completion of the operation, and in the case of a failure and canceling.
*
* <p>This method is expected to make a thorough effort to release all resources
* that the operator has acquired.
*/
@Override
public void dispose() throws Exception {
if (operatorStateBackend != null) {
operatorStateBackend.dispose();
}
if (keyedStateBackend != null) {
keyedStateBackend.dispose();
}
}
@Override
public final OperatorSnapshotResult snapshotState(long checkpointId, long timestamp, CheckpointOptions checkpointOptions) throws Exception {
KeyGroupRange keyGroupRange = null != keyedStateBackend ?
keyedStateBackend.getKeyGroupRange() : KeyGroupRange.EMPTY_KEY_GROUP_RANGE;
OperatorSnapshotResult snapshotInProgress = new OperatorSnapshotResult();
CheckpointStreamFactory factory = getCheckpointStreamFactory(checkpointOptions);
try (StateSnapshotContextSynchronousImpl snapshotContext = new StateSnapshotContextSynchronousImpl(
checkpointId,
timestamp,
factory,
keyGroupRange,
getContainingTask().getCancelables())) {
snapshotState(snapshotContext);
snapshotInProgress.setKeyedStateRawFuture(snapshotContext.getKeyedStateStreamFuture());
snapshotInProgress.setOperatorStateRawFuture(snapshotContext.getOperatorStateStreamFuture());
if (null != operatorStateBackend) {
snapshotInProgress.setOperatorStateManagedFuture(
operatorStateBackend.snapshot(checkpointId, timestamp, factory, checkpointOptions));
}
if (null != keyedStateBackend) {
snapshotInProgress.setKeyedStateManagedFuture(
keyedStateBackend.snapshot(checkpointId, timestamp, factory, checkpointOptions));
}
} catch (Exception snapshotException) {
try {
snapshotInProgress.cancel();
} catch (Exception e) {
snapshotException.addSuppressed(e);
}
throw new Exception("Could not complete snapshot " + checkpointId + " for operator " +
getOperatorName() + '.', snapshotException);
}
return snapshotInProgress;
}
/**
* Stream operators with state, which want to participate in a snapshot need to override this hook method.
*
* @param context context that provides information and means required for taking a snapshot
*/
public void snapshotState(StateSnapshotContext context) throws Exception {
if (getKeyedStateBackend() != null) {
KeyedStateCheckpointOutputStream out;
try {
out = context.getRawKeyedOperatorStateOutput();
} catch (Exception exception) {
throw new Exception("Could not open raw keyed operator state stream for " +
getOperatorName() + '.', exception);
}
try {
KeyGroupsList allKeyGroups = out.getKeyGroupList();
for (int keyGroupIdx : allKeyGroups) {
out.startNewKeyGroup(keyGroupIdx);
timeServiceManager.snapshotStateForKeyGroup(
new DataOutputViewStreamWrapper(out), keyGroupIdx);
}
} catch (Exception exception) {
throw new Exception("Could not write timer service of " + getOperatorName() +
" to checkpoint state stream.", exception);
} finally {
try {
out.close();
} catch (Exception closeException) {
LOG.warn("Could not close raw keyed operator state stream for {}. This " +
"might have prevented deleting some state data.", getOperatorName(), closeException);
}
}
}
}
/**
* @deprecated Non-repartitionable operator state that has been deprecated.
* Can be removed when we remove the APIs for non-repartitionable operator state.
*/
@SuppressWarnings("deprecation")
@Deprecated
@Override
public StreamStateHandle snapshotLegacyOperatorState(long checkpointId, long timestamp, CheckpointOptions checkpointOptions) throws Exception {
if (this instanceof StreamCheckpointedOperator) {
CheckpointStreamFactory factory = getCheckpointStreamFactory(checkpointOptions);
final CheckpointStreamFactory.CheckpointStateOutputStream outStream =
factory.createCheckpointStateOutputStream(checkpointId, timestamp);
getContainingTask().getCancelables().registerClosable(outStream);
try {
((StreamCheckpointedOperator) this).snapshotState(outStream, checkpointId, timestamp);
return outStream.closeAndGetHandle();
}
finally {
getContainingTask().getCancelables().unregisterClosable(outStream);
outStream.close();
}
} else {
return null;
}
}
/**
* Stream operators with state which can be restored need to override this hook method.
*
* @param context context that allows to register different states.
*/
public void initializeState(StateInitializationContext context) throws Exception {
if (getKeyedStateBackend() != null) {
KeyGroupsList localKeyGroupRange = getKeyedStateBackend().getKeyGroupRange();
// and then initialize the timer services
for (KeyGroupStatePartitionStreamProvider streamProvider : context.getRawKeyedStateInputs()) {
int keyGroupIdx = streamProvider.getKeyGroupId();
checkArgument(localKeyGroupRange.contains(keyGroupIdx),
"Key Group " + keyGroupIdx + " does not belong to the local range.");
timeServiceManager.restoreStateForKeyGroup(
new DataInputViewStreamWrapper(streamProvider.getStream()),
keyGroupIdx, getUserCodeClassloader());
}
}
}
@Override
public void notifyOfCompletedCheckpoint(long checkpointId) throws Exception {
if (keyedStateBackend != null) {
keyedStateBackend.notifyCheckpointComplete(checkpointId);
}
}
/**
* Returns a checkpoint stream factory for the provided options.
*
* <p>For {@link CheckpointType#FULL_CHECKPOINT} this returns the shared
* factory of this operator.
*
* <p>For {@link CheckpointType#SAVEPOINT} it creates a custom factory per
* savepoint.
*
* @param checkpointOptions Options for the checkpoint
* @return Checkpoint stream factory for the checkpoints
* @throws IOException Failures while creating a new stream factory are forwarded
*/
@VisibleForTesting
CheckpointStreamFactory getCheckpointStreamFactory(CheckpointOptions checkpointOptions) throws IOException {
CheckpointType checkpointType = checkpointOptions.getCheckpointType();
if (checkpointType == CheckpointType.FULL_CHECKPOINT) {
return checkpointStreamFactory;
} else if (checkpointType == CheckpointType.SAVEPOINT) {
return container.createSavepointStreamFactory(this, checkpointOptions.getTargetLocation());
} else {
throw new IllegalStateException("Unknown checkpoint type " + checkpointType);
}
}
// ------------------------------------------------------------------------
// Properties and Services
// ------------------------------------------------------------------------
/**
* Gets the execution config defined on the execution environment of the job to which this
* operator belongs.
*
* @return The job's execution config.
*/
public ExecutionConfig getExecutionConfig() {
return container.getExecutionConfig();
}
public StreamConfig getOperatorConfig() {
return config;
}
public StreamTask<?, ?> getContainingTask() {
return container;
}
public ClassLoader getUserCodeClassloader() {
return container.getUserCodeClassLoader();
}
/**
* Return the operator name. If the runtime context has been set, then the task name with
* subtask index is returned. Otherwise, the simple class name is returned.
*
* @return If runtime context is set, then return task name with subtask index. Otherwise return
* simple class name.
*/
protected String getOperatorName() {
if (runtimeContext != null) {
return runtimeContext.getTaskNameWithSubtasks();
} else {
return getClass().getSimpleName();
}
}
/**
* Returns a context that allows the operator to query information about the execution and also
* to interact with systems such as broadcast variables and managed state. This also allows
* to register timers.
*/
public StreamingRuntimeContext getRuntimeContext() {
return runtimeContext;
}
@SuppressWarnings("unchecked")
public <K> KeyedStateBackend<K> getKeyedStateBackend() {
return (KeyedStateBackend<K>) keyedStateBackend;
}
public OperatorStateBackend getOperatorStateBackend() {
return operatorStateBackend;
}
/**
* Returns the {@link ProcessingTimeService} responsible for getting the current
* processing time and registering timers.
*/
protected ProcessingTimeService getProcessingTimeService() {
return container.getProcessingTimeService();
}
/**
* Creates a partitioned state handle, using the state backend configured for this task.
*
* @throws IllegalStateException Thrown, if the key/value state was already initialized.
* @throws Exception Thrown, if the state backend cannot create the key/value state.
*/
protected <S extends State> S getPartitionedState(StateDescriptor<S, ?> stateDescriptor) throws Exception {
return getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, stateDescriptor);
}
protected <N, S extends State, T> S getOrCreateKeyedState(
TypeSerializer<N> namespaceSerializer,
StateDescriptor<S, T> stateDescriptor) throws Exception {
if (keyedStateStore != null) {
return keyedStateBackend.getOrCreateKeyedState(namespaceSerializer, stateDescriptor);
}
else {
throw new IllegalStateException("Cannot create partitioned state. " +
"The keyed state backend has not been set." +
"This indicates that the operator is not partitioned/keyed.");
}
}
/**
* Creates a partitioned state handle, using the state backend configured for this task.
*
* @throws IllegalStateException Thrown, if the key/value state was already initialized.
* @throws Exception Thrown, if the state backend cannot create the key/value state.
*/
protected <S extends State, N> S getPartitionedState(
N namespace,
TypeSerializer<N> namespaceSerializer,
StateDescriptor<S, ?> stateDescriptor) throws Exception {
/*
TODO: NOTE: This method does a lot of work caching / retrieving states just to update the namespace.
This method should be removed for the sake of namespaces being lazily fetched from the keyed
state backend, or being set on the state directly.
*/
if (keyedStateStore != null) {
return keyedStateBackend.getPartitionedState(namespace, namespaceSerializer, stateDescriptor);
} else {
throw new RuntimeException("Cannot create partitioned state. The keyed state " +
"backend has not been set. This indicates that the operator is not " +
"partitioned/keyed.");
}
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public void setKeyContextElement1(StreamRecord record) throws Exception {
setKeyContextElement(record, stateKeySelector1);
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public void setKeyContextElement2(StreamRecord record) throws Exception {
setKeyContextElement(record, stateKeySelector2);
}
private <T> void setKeyContextElement(StreamRecord<T> record, KeySelector<T, ?> selector) throws Exception {
if (selector != null) {
Object key = selector.getKey(record.getValue());
setCurrentKey(key);
}
}
@SuppressWarnings({"unchecked", "rawtypes"})
public void setCurrentKey(Object key) {
if (keyedStateBackend != null) {
try {
// need to work around type restrictions
@SuppressWarnings("unchecked,rawtypes")
AbstractKeyedStateBackend rawBackend = (AbstractKeyedStateBackend) keyedStateBackend;
rawBackend.setCurrentKey(key);
} catch (Exception e) {
throw new RuntimeException("Exception occurred while setting the current key context.", e);
}
}
}
@SuppressWarnings({"unchecked", "rawtypes"})
public Object getCurrentKey() {
if (keyedStateBackend != null) {
return keyedStateBackend.getCurrentKey();
} else {
throw new UnsupportedOperationException("Key can only be retrieven on KeyedStream.");
}
}
public KeyedStateStore getKeyedStateStore() {
return keyedStateStore;
}
// ------------------------------------------------------------------------
// Context and chaining properties
// ------------------------------------------------------------------------
@Override
public final void setChainingStrategy(ChainingStrategy strategy) {
this.chainingStrategy = strategy;
}
@Override
public final ChainingStrategy getChainingStrategy() {
return chainingStrategy;
}
// ------------------------------------------------------------------------
// Metrics
// ------------------------------------------------------------------------
// ------- One input stream
public void processLatencyMarker(LatencyMarker latencyMarker) throws Exception {
reportOrForwardLatencyMarker(latencyMarker);
}
// ------- Two input stream
public void processLatencyMarker1(LatencyMarker latencyMarker) throws Exception {
reportOrForwardLatencyMarker(latencyMarker);
}
public void processLatencyMarker2(LatencyMarker latencyMarker) throws Exception {
reportOrForwardLatencyMarker(latencyMarker);
}
protected void reportOrForwardLatencyMarker(LatencyMarker marker) {
// all operators are tracking latencies
this.latencyGauge.reportLatency(marker, false);
// everything except sinks forwards latency markers
this.output.emitLatencyMarker(marker);
}
// ----------------------- Helper classes -----------------------
/**
* The gauge uses a HashMap internally to avoid classloading issues when accessing
* the values using JMX.
*/
protected static class LatencyGauge implements Gauge<Map<String, HashMap<String, Double>>> {
private final Map<LatencySourceDescriptor, DescriptiveStatistics> latencyStats = new HashMap<>();
private final int historySize;
LatencyGauge(int historySize) {
this.historySize = historySize;
}
public void reportLatency(LatencyMarker marker, boolean isSink) {
LatencySourceDescriptor sourceDescriptor = LatencySourceDescriptor.of(marker, !isSink);
DescriptiveStatistics sourceStats = latencyStats.get(sourceDescriptor);
if (sourceStats == null) {
// 512 element window (4 kb)
sourceStats = new DescriptiveStatistics(this.historySize);
latencyStats.put(sourceDescriptor, sourceStats);
}
long now = System.currentTimeMillis();
sourceStats.addValue(now - marker.getMarkedTime());
}
@Override
public Map<String, HashMap<String, Double>> getValue() {
while (true) {
try {
Map<String, HashMap<String, Double>> ret = new HashMap<>();
for (Map.Entry<LatencySourceDescriptor, DescriptiveStatistics> source : latencyStats.entrySet()) {
HashMap<String, Double> sourceStatistics = new HashMap<>(6);
sourceStatistics.put("max", source.getValue().getMax());
sourceStatistics.put("mean", source.getValue().getMean());
sourceStatistics.put("min", source.getValue().getMin());
sourceStatistics.put("p50", source.getValue().getPercentile(50));
sourceStatistics.put("p95", source.getValue().getPercentile(95));
sourceStatistics.put("p99", source.getValue().getPercentile(99));
ret.put(source.getKey().toString(), sourceStatistics);
}
return ret;
// Concurrent access onto the "latencyStats" map could cause
// ConcurrentModificationExceptions. To avoid unnecessary blocking
// of the reportLatency() method, we retry this operation until
// it succeeds.
} catch (ConcurrentModificationException ignore) {
LOG.debug("Unable to report latency statistics", ignore);
}
}
}
}
/**
* Identifier for a latency source.
*/
private static class LatencySourceDescriptor {
/**
* A unique ID identifying a logical source in Flink.
*/
private final int vertexID;
/**
* Identifier for parallel subtasks of a logical source.
*/
private final int subtaskIndex;
/**
* Creates a {@code LatencySourceDescriptor} from a given {@code LatencyMarker}.
*
* @param marker The latency marker to extract the LatencySourceDescriptor from.
* @param ignoreSubtaskIndex Set to true to ignore the subtask index, to treat the latencies
* from all the parallel instances of a source as the same.
* @return A LatencySourceDescriptor for the given marker.
*/
public static LatencySourceDescriptor of(LatencyMarker marker, boolean ignoreSubtaskIndex) {
if (ignoreSubtaskIndex) {
return new LatencySourceDescriptor(marker.getVertexID(), -1);
} else {
return new LatencySourceDescriptor(marker.getVertexID(), marker.getSubtaskIndex());
}
}
private LatencySourceDescriptor(int vertexID, int subtaskIndex) {
this.vertexID = vertexID;
this.subtaskIndex = subtaskIndex;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LatencySourceDescriptor that = (LatencySourceDescriptor) o;
if (vertexID != that.vertexID) {
return false;
}
return subtaskIndex == that.subtaskIndex;
}
@Override
public int hashCode() {
int result = vertexID;
result = 31 * result + subtaskIndex;
return result;
}
@Override
public String toString() {
return "LatencySourceDescriptor{" +
"vertexID=" + vertexID +
", subtaskIndex=" + subtaskIndex +
'}';
}
}
/**
* Wrapping {@link Output} that updates metrics on the number of emitted elements.
*/
public class CountingOutput implements Output<StreamRecord<OUT>> {
private final Output<StreamRecord<OUT>> output;
private final Counter numRecordsOut;
public CountingOutput(Output<StreamRecord<OUT>> output, Counter counter) {
this.output = output;
this.numRecordsOut = counter;
}
@Override
public void emitWatermark(Watermark mark) {
output.emitWatermark(mark);
}
@Override
public void emitLatencyMarker(LatencyMarker latencyMarker) {
output.emitLatencyMarker(latencyMarker);
}
@Override
public void collect(StreamRecord<OUT> record) {
numRecordsOut.inc();
output.collect(record);
}
@Override
public <X> void collect(OutputTag<X> outputTag, StreamRecord<X> record) {
numRecordsOut.inc();
output.collect(outputTag, record);
}
@Override
public void close() {
output.close();
}
}
// ------------------------------------------------------------------------
// Watermark handling
// ------------------------------------------------------------------------
/**
* Returns a {@link InternalTimerService} that can be used to query current processing time
* and event time and to set timers. An operator can have several timer services, where
* each has its own namespace serializer. Timer services are differentiated by the string
* key that is given when requesting them, if you call this method with the same key
* multiple times you will get the same timer service instance in subsequent requests.
*
* <p>Timers are always scoped to a key, the currently active key of a keyed stream operation.
* When a timer fires, this key will also be set as the currently active key.
*
* <p>Each timer has attached metadata, the namespace. Different timer services
* can have a different namespace type. If you don't need namespace differentiation you
* can use {@link VoidNamespaceSerializer} as the namespace serializer.
*
* @param name The name of the requested timer service. If no service exists under the given
* name a new one will be created and returned.
* @param namespaceSerializer {@code TypeSerializer} for the timer namespace.
* @param triggerable The {@link Triggerable} that should be invoked when timers fire
*
* @param <N> The type of the timer namespace.
*/
public <K, N> InternalTimerService<N> getInternalTimerService(
String name,
TypeSerializer<N> namespaceSerializer,
Triggerable<K, N> triggerable) {
checkTimerServiceInitialization();
// the following casting is to overcome type restrictions.
TypeSerializer<K> keySerializer = (TypeSerializer<K>) getKeyedStateBackend().getKeySerializer();
InternalTimeServiceManager<K, N> keyedTimeServiceHandler = (InternalTimeServiceManager<K, N>) timeServiceManager;
return keyedTimeServiceHandler.getInternalTimerService(name, keySerializer, namespaceSerializer, triggerable);
}
public void processWatermark(Watermark mark) throws Exception {
if (timeServiceManager != null) {
timeServiceManager.advanceWatermark(mark);
}
output.emitWatermark(mark);
}
private void checkTimerServiceInitialization() {
if (getKeyedStateBackend() == null) {
throw new UnsupportedOperationException("Timers can only be used on keyed operators.");
} else if (timeServiceManager == null) {
throw new RuntimeException("The timer service has not been initialized.");
}
}
public void processWatermark1(Watermark mark) throws Exception {
input1Watermark = mark.getTimestamp();
long newMin = Math.min(input1Watermark, input2Watermark);
if (newMin > combinedWatermark) {
combinedWatermark = newMin;
processWatermark(new Watermark(combinedWatermark));
}
}
public void processWatermark2(Watermark mark) throws Exception {
input2Watermark = mark.getTimestamp();
long newMin = Math.min(input1Watermark, input2Watermark);
if (newMin > combinedWatermark) {
combinedWatermark = newMin;
processWatermark(new Watermark(combinedWatermark));
}
}
@VisibleForTesting
public int numProcessingTimeTimers() {
return timeServiceManager == null ? 0 :
timeServiceManager.numProcessingTimeTimers();
}
@VisibleForTesting
public int numEventTimeTimers() {
return timeServiceManager == null ? 0 :
timeServiceManager.numEventTimeTimers();
}
}
| |
package org.apache.maven.execution;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.RepositoryCache;
import org.apache.maven.monitor.event.EventDispatcher;
import org.apache.maven.plugin.descriptor.PluginDescriptor;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.ProjectBuildingRequest;
import org.apache.maven.settings.Settings;
import org.codehaus.plexus.PlexusContainer;
import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
import org.eclipse.aether.RepositorySystemSession;
/**
* A Maven execution session.
*
* @author Jason van Zyl
*/
public class MavenSession
implements Cloneable
{
private MavenExecutionRequest request;
private MavenExecutionResult result;
private RepositorySystemSession repositorySession;
private Properties executionProperties;
private MavenProject currentProject;
/**
* These projects have already been topologically sorted in the {@link org.apache.maven.Maven} component before
* being passed into the session. This is also the potentially constrained set of projects by using --projects
* on the command line.
*/
private List<MavenProject> projects;
/**
* The full set of projects before any potential constraining by --projects. Useful in the case where you want to
* build a smaller set of projects but perform other operations in the context of your reactor.
*/
private List<MavenProject> allProjects;
private MavenProject topLevelProject;
private ProjectDependencyGraph projectDependencyGraph;
private boolean parallel;
private final Map<String, Map<String, Map<String, Object>>> pluginContextsByProjectAndPluginKey =
new ConcurrentHashMap<>();
public void setProjects( List<MavenProject> projects )
{
if ( !projects.isEmpty() )
{
this.currentProject = projects.get( 0 );
this.topLevelProject = currentProject;
for ( MavenProject project : projects )
{
if ( project.isExecutionRoot() )
{
topLevelProject = project;
break;
}
}
}
else
{
this.currentProject = null;
this.topLevelProject = null;
}
this.projects = projects;
}
public ArtifactRepository getLocalRepository()
{
return request.getLocalRepository();
}
public List<String> getGoals()
{
return request.getGoals();
}
/**
* Gets the user properties to use for interpolation and profile activation. The user properties have been
* configured directly by the user on his discretion, e.g. via the {@code -Dkey=value} parameter on the command
* line.
*
* @return The user properties, never {@code null}.
*/
public Properties getUserProperties()
{
return request.getUserProperties();
}
/**
* Gets the system properties to use for interpolation and profile activation. The system properties are collected
* from the runtime environment like {@link System#getProperties()} and environment variables.
*
* @return The system properties, never {@code null}.
*/
public Properties getSystemProperties()
{
return request.getSystemProperties();
}
public Settings getSettings()
{
return settings;
}
public List<MavenProject> getProjects()
{
return projects;
}
public String getExecutionRootDirectory()
{
return request.getBaseDirectory();
}
public MavenExecutionRequest getRequest()
{
return request;
}
public void setCurrentProject( MavenProject currentProject )
{
this.currentProject = currentProject;
}
public MavenProject getCurrentProject()
{
return currentProject;
}
public ProjectBuildingRequest getProjectBuildingRequest()
{
return request.getProjectBuildingRequest().setRepositorySession( getRepositorySession() );
}
public List<String> getPluginGroups()
{
return request.getPluginGroups();
}
public boolean isOffline()
{
return request.isOffline();
}
public MavenProject getTopLevelProject()
{
return topLevelProject;
}
public MavenExecutionResult getResult()
{
return result;
}
// Backward compat
public Map<String, Object> getPluginContext( PluginDescriptor plugin, MavenProject project )
{
String projectKey = project.getId();
Map<String, Map<String, Object>> pluginContextsByKey = pluginContextsByProjectAndPluginKey.get( projectKey );
if ( pluginContextsByKey == null )
{
pluginContextsByKey = new ConcurrentHashMap<>();
pluginContextsByProjectAndPluginKey.put( projectKey, pluginContextsByKey );
}
String pluginKey = plugin.getPluginLookupKey();
Map<String, Object> pluginContext = pluginContextsByKey.get( pluginKey );
if ( pluginContext == null )
{
pluginContext = new ConcurrentHashMap<>();
pluginContextsByKey.put( pluginKey, pluginContext );
}
return pluginContext;
}
public ProjectDependencyGraph getProjectDependencyGraph()
{
return projectDependencyGraph;
}
public void setProjectDependencyGraph( ProjectDependencyGraph projectDependencyGraph )
{
this.projectDependencyGraph = projectDependencyGraph;
}
public String getReactorFailureBehavior()
{
return request.getReactorFailureBehavior();
}
@Override
public MavenSession clone()
{
try
{
return (MavenSession) super.clone();
}
catch ( CloneNotSupportedException e )
{
throw new RuntimeException( "Bug", e );
}
}
public Date getStartTime()
{
return request.getStartTime();
}
public boolean isParallel()
{
return parallel;
}
public void setParallel( boolean parallel )
{
this.parallel = parallel;
}
public RepositorySystemSession getRepositorySession()
{
return repositorySession;
}
private Map<String, MavenProject> projectMap;
public void setProjectMap( Map<String, MavenProject> projectMap )
{
this.projectMap = projectMap;
}
/** This is a provisional method and may be removed */
public List<MavenProject> getAllProjects()
{
return allProjects;
}
/** This is a provisional method and may be removed */
public void setAllProjects( List<MavenProject> allProjects )
{
this.allProjects = allProjects;
}
/*if_not[MAVEN4]*/
//
// Deprecated
//
private PlexusContainer container;
private final Settings settings;
@Deprecated
/** @deprecated This appears not to be used anywhere within Maven itself. */
public Map<String, MavenProject> getProjectMap()
{
return projectMap;
}
@Deprecated
public MavenSession( PlexusContainer container, RepositorySystemSession repositorySession,
MavenExecutionRequest request, MavenExecutionResult result )
{
this.container = container;
this.request = request;
this.result = result;
this.settings = new SettingsAdapter( request );
this.repositorySession = repositorySession;
}
@Deprecated
public MavenSession( PlexusContainer container, MavenExecutionRequest request, MavenExecutionResult result,
MavenProject project )
{
this( container, request, result, Arrays.asList( new MavenProject[]{project} ) );
}
@Deprecated
@SuppressWarnings( "checkstyle:parameternumber" )
public MavenSession( PlexusContainer container, Settings settings, ArtifactRepository localRepository,
EventDispatcher eventDispatcher, ReactorManager unused, List<String> goals,
String executionRootDir, Properties executionProperties, Date startTime )
{
this( container, settings, localRepository, eventDispatcher, unused, goals, executionRootDir,
executionProperties, null, startTime );
}
@Deprecated
@SuppressWarnings( "checkstyle:parameternumber" )
public MavenSession( PlexusContainer container, Settings settings, ArtifactRepository localRepository,
EventDispatcher eventDispatcher, ReactorManager unused, List<String> goals,
String executionRootDir, Properties executionProperties, Properties userProperties,
Date startTime )
{
this.container = container;
this.settings = settings;
this.executionProperties = executionProperties;
this.request = new DefaultMavenExecutionRequest();
this.request.setUserProperties( userProperties );
this.request.setLocalRepository( localRepository );
this.request.setGoals( goals );
this.request.setBaseDirectory( ( executionRootDir != null ) ? new File( executionRootDir ) : null );
this.request.setStartTime( startTime );
}
@Deprecated
public MavenSession( PlexusContainer container, MavenExecutionRequest request, MavenExecutionResult result,
List<MavenProject> projects )
{
this.container = container;
this.request = request;
this.result = result;
this.settings = new SettingsAdapter( request );
setProjects( projects );
}
@Deprecated
public List<MavenProject> getSortedProjects()
{
return getProjects();
}
@Deprecated
//
// Used by Tycho and will break users and force them to upgrade to Maven 3.1 so we should really leave
// this here, possibly indefinitely.
//
public RepositoryCache getRepositoryCache()
{
return null;
}
@Deprecated
public EventDispatcher getEventDispatcher()
{
return null;
}
@Deprecated
public boolean isUsingPOMsFromFilesystem()
{
return request.isProjectPresent();
}
/**
* @deprecated Use either {@link #getUserProperties()} or {@link #getSystemProperties()}.
*/
@Deprecated
public Properties getExecutionProperties()
{
if ( executionProperties == null )
{
executionProperties = new Properties();
executionProperties.putAll( request.getSystemProperties() );
executionProperties.putAll( request.getUserProperties() );
}
return executionProperties;
}
@Deprecated
public PlexusContainer getContainer()
{
return container;
}
@Deprecated
public Object lookup( String role )
throws ComponentLookupException
{
return container.lookup( role );
}
@Deprecated
public Object lookup( String role, String roleHint )
throws ComponentLookupException
{
return container.lookup( role, roleHint );
}
@Deprecated
public List<Object> lookupList( String role )
throws ComponentLookupException
{
return container.lookupList( role );
}
@Deprecated
public Map<String, Object> lookupMap( String role )
throws ComponentLookupException
{
return container.lookupMap( role );
}
/*end[MAVEN4]*/
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.gateway;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.nodes.BaseNodeResponse;
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.Releasable;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.transport.ReceiveTimeoutTransportException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import static java.util.Collections.emptySet;
/**
* Allows to asynchronously fetch shard related data from other nodes for allocation, without blocking
* the cluster update thread.
* <p>
* The async fetch logic maintains a map of which nodes are being fetched from in an async manner,
* and once the results are back, it makes sure to schedule a reroute to make sure those results will
* be taken into account.
*/
public abstract class AsyncShardFetch<T extends BaseNodeResponse> implements Releasable {
/**
* An action that lists the relevant shard data that needs to be fetched.
*/
public interface Lister<NodesResponse extends BaseNodesResponse<NodeResponse>, NodeResponse extends BaseNodeResponse> {
void list(ShardId shardId, @Nullable String customDataPath, DiscoveryNode[] nodes, ActionListener<NodesResponse> listener);
}
protected final Logger logger;
protected final String type;
protected final ShardId shardId;
protected final String customDataPath;
private final Lister<BaseNodesResponse<T>, T> action;
private final Map<String, NodeEntry<T>> cache = new HashMap<>();
private final Set<String> nodesToIgnore = new HashSet<>();
private final AtomicLong round = new AtomicLong();
private boolean closed;
@SuppressWarnings("unchecked")
protected AsyncShardFetch(Logger logger, String type, ShardId shardId, String customDataPath,
Lister<? extends BaseNodesResponse<T>, T> action) {
this.logger = logger;
this.type = type;
this.shardId = Objects.requireNonNull(shardId);
this.customDataPath = Objects.requireNonNull(customDataPath);
this.action = (Lister<BaseNodesResponse<T>, T>) action;
}
@Override
public synchronized void close() {
this.closed = true;
}
/**
* Returns the number of async fetches that are currently ongoing.
*/
public synchronized int getNumberOfInFlightFetches() {
int count = 0;
for (NodeEntry<T> nodeEntry : cache.values()) {
if (nodeEntry.isFetching()) {
count++;
}
}
return count;
}
/**
* Fetches the data for the relevant shard. If there any ongoing async fetches going on, or new ones have
* been initiated by this call, the result will have no data.
* <p>
* The ignoreNodes are nodes that are supposed to be ignored for this round, since fetching is async, we need
* to keep them around and make sure we add them back when all the responses are fetched and returned.
*/
public synchronized FetchResult<T> fetchData(DiscoveryNodes nodes, Set<String> ignoreNodes) {
if (closed) {
throw new IllegalStateException(shardId + ": can't fetch data on closed async fetch");
}
nodesToIgnore.addAll(ignoreNodes);
fillShardCacheWithDataNodes(cache, nodes);
List<NodeEntry<T>> nodesToFetch = findNodesToFetch(cache);
if (nodesToFetch.isEmpty() == false) {
// mark all node as fetching and go ahead and async fetch them
// use a unique round id to detect stale responses in processAsyncFetch
final long fetchingRound = round.incrementAndGet();
for (NodeEntry<T> nodeEntry : nodesToFetch) {
nodeEntry.markAsFetching(fetchingRound);
}
DiscoveryNode[] discoNodesToFetch = nodesToFetch.stream().map(NodeEntry::getNodeId).map(nodes::get)
.toArray(DiscoveryNode[]::new);
asyncFetch(discoNodesToFetch, fetchingRound);
}
// if we are still fetching, return null to indicate it
if (hasAnyNodeFetching(cache)) {
return new FetchResult<>(shardId, null, emptySet());
} else {
// nothing to fetch, yay, build the return value
Map<DiscoveryNode, T> fetchData = new HashMap<>();
Set<String> failedNodes = new HashSet<>();
for (Iterator<Map.Entry<String, NodeEntry<T>>> it = cache.entrySet().iterator(); it.hasNext(); ) {
Map.Entry<String, NodeEntry<T>> entry = it.next();
String nodeId = entry.getKey();
NodeEntry<T> nodeEntry = entry.getValue();
DiscoveryNode node = nodes.get(nodeId);
if (node != null) {
if (nodeEntry.isFailed()) {
// if its failed, remove it from the list of nodes, so if this run doesn't work
// we try again next round to fetch it again
it.remove();
failedNodes.add(nodeEntry.getNodeId());
} else {
if (nodeEntry.getValue() != null) {
fetchData.put(node, nodeEntry.getValue());
}
}
}
}
Set<String> allIgnoreNodes = Set.copyOf(nodesToIgnore);
// clear the nodes to ignore, we had a successful run in fetching everything we can
// we need to try them if another full run is needed
nodesToIgnore.clear();
// if at least one node failed, make sure to have a protective reroute
// here, just case this round won't find anything, and we need to retry fetching data
if (failedNodes.isEmpty() == false || allIgnoreNodes.isEmpty() == false) {
reroute(shardId, "nodes failed [" + failedNodes.size() + "], ignored [" + allIgnoreNodes.size() + "]");
}
return new FetchResult<>(shardId, fetchData, allIgnoreNodes);
}
}
/**
* Called by the response handler of the async action to fetch data. Verifies that its still working
* on the same cache generation, otherwise the results are discarded. It then goes and fills the relevant data for
* the shard (response + failures), issuing a reroute at the end of it to make sure there will be another round
* of allocations taking this new data into account.
*/
protected synchronized void processAsyncFetch(List<T> responses, List<FailedNodeException> failures, long fetchingRound) {
if (closed) {
// we are closed, no need to process this async fetch at all
logger.trace("{} ignoring fetched [{}] results, already closed", shardId, type);
return;
}
logger.trace("{} processing fetched [{}] results", shardId, type);
if (responses != null) {
for (T response : responses) {
NodeEntry<T> nodeEntry = cache.get(response.getNode().getId());
if (nodeEntry != null) {
if (nodeEntry.getFetchingRound() != fetchingRound) {
assert nodeEntry.getFetchingRound() > fetchingRound : "node entries only replaced by newer rounds";
logger.trace("{} received response for [{}] from node {} for an older fetching round (expected: {} but was: {})",
shardId, nodeEntry.getNodeId(), type, nodeEntry.getFetchingRound(), fetchingRound);
} else if (nodeEntry.isFailed()) {
logger.trace("{} node {} has failed for [{}] (failure [{}])", shardId, nodeEntry.getNodeId(), type,
nodeEntry.getFailure());
} else {
// if the entry is there, for the right fetching round and not marked as failed already, process it
logger.trace("{} marking {} as done for [{}], result is [{}]", shardId, nodeEntry.getNodeId(), type, response);
nodeEntry.doneFetching(response);
}
}
}
}
if (failures != null) {
for (FailedNodeException failure : failures) {
logger.trace("{} processing failure {} for [{}]", shardId, failure, type);
NodeEntry<T> nodeEntry = cache.get(failure.nodeId());
if (nodeEntry != null) {
if (nodeEntry.getFetchingRound() != fetchingRound) {
assert nodeEntry.getFetchingRound() > fetchingRound : "node entries only replaced by newer rounds";
logger.trace("{} received failure for [{}] from node {} for an older fetching round (expected: {} but was: {})",
shardId, nodeEntry.getNodeId(), type, nodeEntry.getFetchingRound(), fetchingRound);
} else if (nodeEntry.isFailed() == false) {
// if the entry is there, for the right fetching round and not marked as failed already, process it
Throwable unwrappedCause = ExceptionsHelper.unwrapCause(failure.getCause());
// if the request got rejected or timed out, we need to try it again next time...
if (unwrappedCause instanceof EsRejectedExecutionException ||
unwrappedCause instanceof ReceiveTimeoutTransportException ||
unwrappedCause instanceof ElasticsearchTimeoutException) {
nodeEntry.restartFetching();
} else {
logger.warn(() -> new ParameterizedMessage("{}: failed to list shard for {} on node [{}]",
shardId, type, failure.nodeId()), failure);
nodeEntry.doneFetching(failure.getCause());
}
}
}
}
}
reroute(shardId, "post_response");
}
/**
* Implement this in order to scheduled another round that causes a call to fetch data.
*/
protected abstract void reroute(ShardId shardId, String reason);
/**
* Clear cache for node, ensuring next fetch will fetch a fresh copy.
*/
synchronized void clearCacheForNode(String nodeId) {
cache.remove(nodeId);
}
/**
* Fills the shard fetched data with new (data) nodes and a fresh NodeEntry, and removes from
* it nodes that are no longer part of the state.
*/
private void fillShardCacheWithDataNodes(Map<String, NodeEntry<T>> shardCache, DiscoveryNodes nodes) {
// verify that all current data nodes are there
for (ObjectObjectCursor<String, DiscoveryNode> cursor : nodes.getDataNodes()) {
DiscoveryNode node = cursor.value;
if (shardCache.containsKey(node.getId()) == false) {
shardCache.put(node.getId(), new NodeEntry<T>(node.getId()));
}
}
// remove nodes that are not longer part of the data nodes set
shardCache.keySet().removeIf(nodeId -> nodes.nodeExists(nodeId) == false);
}
/**
* Finds all the nodes that need to be fetched. Those are nodes that have no
* data, and are not in fetch mode.
*/
private List<NodeEntry<T>> findNodesToFetch(Map<String, NodeEntry<T>> shardCache) {
List<NodeEntry<T>> nodesToFetch = new ArrayList<>();
for (NodeEntry<T> nodeEntry : shardCache.values()) {
if (nodeEntry.hasData() == false && nodeEntry.isFetching() == false) {
nodesToFetch.add(nodeEntry);
}
}
return nodesToFetch;
}
/**
* Are there any nodes that are fetching data?
*/
private boolean hasAnyNodeFetching(Map<String, NodeEntry<T>> shardCache) {
for (NodeEntry<T> nodeEntry : shardCache.values()) {
if (nodeEntry.isFetching()) {
return true;
}
}
return false;
}
/**
* Async fetches data for the provided shard with the set of nodes that need to be fetched from.
*/
// visible for testing
void asyncFetch(final DiscoveryNode[] nodes, long fetchingRound) {
logger.trace("{} fetching [{}] from {}", shardId, type, nodes);
action.list(shardId, customDataPath, nodes, new ActionListener<BaseNodesResponse<T>>() {
@Override
public void onResponse(BaseNodesResponse<T> response) {
processAsyncFetch(response.getNodes(), response.failures(), fetchingRound);
}
@Override
public void onFailure(Exception e) {
List<FailedNodeException> failures = new ArrayList<>(nodes.length);
for (final DiscoveryNode node: nodes) {
failures.add(new FailedNodeException(node.getId(), "total failure in fetching", e));
}
processAsyncFetch(null, failures, fetchingRound);
}
});
}
/**
* The result of a fetch operation. Make sure to first check {@link #hasData()} before
* fetching the actual data.
*/
public static class FetchResult<T extends BaseNodeResponse> {
private final ShardId shardId;
private final Map<DiscoveryNode, T> data;
private final Set<String> ignoreNodes;
public FetchResult(ShardId shardId, Map<DiscoveryNode, T> data, Set<String> ignoreNodes) {
this.shardId = shardId;
this.data = data;
this.ignoreNodes = ignoreNodes;
}
/**
* Does the result actually contain data? If not, then there are on going fetch
* operations happening, and it should wait for it.
*/
public boolean hasData() {
return data != null;
}
/**
* Returns the actual data, note, make sure to check {@link #hasData()} first and
* only use this when there is an actual data.
*/
public Map<DiscoveryNode, T> getData() {
assert data != null : "getData should only be called if there is data to be fetched, please check hasData first";
return this.data;
}
/**
* Process any changes needed to the allocation based on this fetch result.
*/
public void processAllocation(RoutingAllocation allocation) {
for (String ignoreNode : ignoreNodes) {
allocation.addIgnoreShardForNode(shardId, ignoreNode);
}
}
}
/**
* A node entry, holding the state of the fetched data for a specific shard
* for a giving node.
*/
static class NodeEntry<T> {
private final String nodeId;
private boolean fetching;
@Nullable
private T value;
private boolean valueSet;
private Throwable failure;
private long fetchingRound;
NodeEntry(String nodeId) {
this.nodeId = nodeId;
}
String getNodeId() {
return this.nodeId;
}
boolean isFetching() {
return fetching;
}
void markAsFetching(long fetchingRound) {
assert fetching == false : "double marking a node as fetching";
this.fetching = true;
this.fetchingRound = fetchingRound;
}
void doneFetching(T value) {
assert fetching : "setting value but not in fetching mode";
assert failure == null : "setting value when failure already set";
this.valueSet = true;
this.value = value;
this.fetching = false;
}
void doneFetching(Throwable failure) {
assert fetching : "setting value but not in fetching mode";
assert valueSet == false : "setting failure when already set value";
assert failure != null : "setting failure can't be null";
this.failure = failure;
this.fetching = false;
}
void restartFetching() {
assert fetching : "restarting fetching, but not in fetching mode";
assert valueSet == false : "value can't be set when restarting fetching";
assert failure == null : "failure can't be set when restarting fetching";
this.fetching = false;
}
boolean isFailed() {
return failure != null;
}
boolean hasData() {
return valueSet || failure != null;
}
Throwable getFailure() {
assert hasData() : "getting failure when data has not been fetched";
return failure;
}
@Nullable
T getValue() {
assert failure == null : "trying to fetch value, but its marked as failed, check isFailed";
assert valueSet : "value is not set, hasn't been fetched yet";
return value;
}
long getFetchingRound() {
return fetchingRound;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.configuration2;
import static org.junit.Assert.assertEquals;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.time.Duration;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.Properties;
import org.apache.commons.configuration2.ex.ConversionException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* Tests {@link ImmutableConfiguration} default methods.
*/
public class TestDefaultImmutableConfiguration {
/** Tests default methods. This class MUST NOT override the default methods! */
private class MapImmutableConfiguration implements ImmutableConfiguration {
Map<String, Object> map = new HashMap<>();
@Override
public boolean containsKey(final String key) {
// Super is not a default method.
return false;
}
@Override
public <T> T get(final Class<T> cls, final String key) {
// Super is not a default method.
return null;
}
@Override
public <T> T get(final Class<T> cls, final String key, final T defaultValue) {
// Super is not a default method.
return null;
}
@Override
public Object getArray(final Class<?> cls, final String key) {
// Super is not a default method.
return null;
}
@Override
public Object getArray(final Class<?> cls, final String key, final Object defaultValue) {
// Super is not a default method.
return null;
}
@Override
public BigDecimal getBigDecimal(final String key) {
// Super is not a default method.
return null;
}
@Override
public BigDecimal getBigDecimal(final String key, final BigDecimal defaultValue) {
// Super is not a default method.
return null;
}
@Override
public BigInteger getBigInteger(final String key) {
// Super is not a default method.
return null;
}
@Override
public BigInteger getBigInteger(final String key, final BigInteger defaultValue) {
// Super is not a default method.
return null;
}
@Override
public boolean getBoolean(final String key) {
// Super is not a default method.
return false;
}
@Override
public boolean getBoolean(final String key, final boolean defaultValue) {
// Super is not a default method.
return false;
}
@Override
public Boolean getBoolean(final String key, final Boolean defaultValue) {
// Super is not a default method.
return null;
}
@Override
public byte getByte(final String key) {
// Super is not a default method.
return 0;
}
@Override
public byte getByte(final String key, final byte defaultValue) {
// Super is not a default method.
return 0;
}
@Override
public Byte getByte(final String key, final Byte defaultValue) {
// Super is not a default method.
return null;
}
@Override
public <T> Collection<T> getCollection(final Class<T> cls, final String key, final Collection<T> target) {
// Super is not a default method.
return null;
}
@Override
public <T> Collection<T> getCollection(final Class<T> cls, final String key, final Collection<T> target, final Collection<T> defaultValue) {
// Super is not a default method.
return null;
}
@Override
public double getDouble(final String key) {
// Super is not a default method.
return 0;
}
@Override
public double getDouble(final String key, final double defaultValue) {
// Super is not a default method.
return 0;
}
@Override
public Double getDouble(final String key, final Double defaultValue) {
// Super is not a default method.
return null;
}
@Override
public String getEncodedString(final String key) {
// Super is not a default method.
return null;
}
@Override
public String getEncodedString(final String key, final ConfigurationDecoder decoder) {
// Super is not a default method.
return null;
}
@Override
public float getFloat(final String key) {
// Super is not a default method.
return 0;
}
@Override
public float getFloat(final String key, final float defaultValue) {
// Super is not a default method.
return 0;
}
@Override
public Float getFloat(final String key, final Float defaultValue) {
// Super is not a default method.
return null;
}
@Override
public int getInt(final String key) {
// Super is not a default method.
return 0;
}
@Override
public int getInt(final String key, final int defaultValue) {
// Super is not a default method.
return 0;
}
@Override
public Integer getInteger(final String key, final Integer defaultValue) {
// Super is not a default method.
return null;
}
@Override
public Iterator<String> getKeys() {
// Super is not a default method.
return null;
}
@Override
public Iterator<String> getKeys(final String prefix) {
// Super is not a default method.
return null;
}
@Override
public <T> List<T> getList(final Class<T> cls, final String key) {
// Super is not a default method.
return null;
}
@Override
public <T> List<T> getList(final Class<T> cls, final String key, final List<T> defaultValue) {
// Super is not a default method.
return null;
}
@Override
public List<Object> getList(final String key) {
// Super is not a default method.
return null;
}
@Override
public List<Object> getList(final String key, final List<?> defaultValue) {
// Super is not a default method.
return null;
}
@Override
public long getLong(final String key) {
// Super is not a default method.
return 0;
}
@Override
public long getLong(final String key, final long defaultValue) {
// Super is not a default method.
return 0;
}
@Override
public Long getLong(final String key, final Long defaultValue) {
// Super is not a default method.
return null;
}
@Override
public Properties getProperties(final String key) {
// Super is not a default method.
return null;
}
@Override
public Object getProperty(final String key) {
// Super is not a default method.
return map.get(key);
}
@Override
public short getShort(final String key) {
// Super is not a default method.
return 0;
}
@Override
public short getShort(final String key, final short defaultValue) {
// Super is not a default method.
return 0;
}
@Override
public Short getShort(final String key, final Short defaultValue) {
// Super is not a default method.
return null;
}
@Override
public String getString(final String key) {
return Objects.toString(map.get(key), null);
}
@Override
public String getString(final String key, final String defaultValue) {
// Super is not a default method.
return null;
}
@Override
public String[] getStringArray(final String key) {
// Super is not a default method.
return null;
}
@Override
public ImmutableConfiguration immutableSubset(final String prefix) {
// Super is not a default method.
return null;
}
@Override
public boolean isEmpty() {
// Super is not a default method.
return false;
}
@Override
public int size() {
// Super is not a default method.
return 0;
}
}
private final MapImmutableConfiguration config = new MapImmutableConfiguration();
@Before
@After
public void clearMap() {
config.map.clear();
}
@Test
public void testGetDuration() {
final Duration d = Duration.ofSeconds(1);
config.map.put("durationD", d.toString());
final Duration oneD = Duration.ofSeconds(1);
final Duration twoD = Duration.ofSeconds(2);
assertEquals("This returns 1(Duration)", oneD, config.getDuration("durationD"));
assertEquals("This returns 1(Duration)", oneD, config.getDuration("durationD", twoD));
assertEquals("This returns 2(default Duration)", twoD, config.getDuration("numberNotInConfig", twoD));
assertEquals("This returns 1(Duration)", oneD, config.getDuration("durationD", twoD));
}
@Test(expected = ConversionException.class)
public void testGetDurationIncompatibleType() {
config.map.put("test.empty", "");
config.getDuration("test.empty");
}
@Test(expected = NoSuchElementException.class)
public void testGetDurationUnknown() {
config.getDuration("numberNotInConfig");
}
}
| |
/**
* Copyright 2011 Nube Technologies
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package co.nubetech.hiho.job;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;
import org.junit.Test;
import co.nubetech.hiho.common.HIHOException;
import co.nubetech.hiho.common.HihoTestCase;
public class TestExportToDB extends HihoTestCase {
final static Logger logger = Logger
.getLogger(co.nubetech.hiho.job.TestExportToDB.class);
@Test
public void testCheckMandatoryConfsForValidValues() throws HIHOException {
String[] args = new String[] {
"-jdbcDriver", "com.mysql.jdbc.Driver",
"-jdbcUrl", "jdbc:mysql://localhost:3306/hiho",
"-jdbcUsername", "root",
"-jdbcPassword", "newpwd",
"-delimiter", ",",
"-numberOfMappers", "1",
"-tableName", "employee",
"-columnNames", "name,genderId,dateId,designation,department",
"-inputPath", "/input1,/input2"};
ExportToDB exportToDB = new ExportToDB();
Configuration conf = new Configuration();
exportToDB.populateConfiguration(args, conf);
exportToDB.checkMandatoryConfs(conf);
}
@Test(expected = HIHOException.class)
public void testCheckMandatoryConfsForDriverClass() throws HIHOException {
String[] args = new String[] {
"-jdbcUrl", "jdbc:mysql://localhost:3306/hiho",
"-jdbcUsername", "root",
"-jdbcPassword", "newpwd",
"-delimiter", ",",
"-numberOfMappers", "1",
"-tableName", "employee",
"-columnNames", "name,genderId,dateId,designation,department",
"-inputPath", "/input1,/input2"};
ExportToDB exportToDB = new ExportToDB();
Configuration conf = new Configuration();
exportToDB.populateConfiguration(args, conf);
exportToDB.checkMandatoryConfs(conf);
}
@Test(expected = HIHOException.class)
public void testCheckMandatoryConfsForUrlProperty() throws HIHOException {
String[] args = new String[] {
"-jdbcDriver", "com.mysql.jdbc.Driver",
"-jdbcUsername", "root",
"-jdbcPassword", "newpwd",
"-delimiter", ",",
"-numberOfMappers", "1",
"-tableName", "employee",
"-columnNames", "name,genderId,dateId,designation,department",
"-inputPath", "/input1,/input2"};
ExportToDB exportToDB = new ExportToDB();
Configuration conf = new Configuration();
exportToDB.populateConfiguration(args, conf);
exportToDB.checkMandatoryConfs(conf);
}
@Test(expected = HIHOException.class)
public void testCheckMandatoryConfsForUsernameProperty()
throws HIHOException {
String[] args = new String[] {
"-jdbcDriver", "com.mysql.jdbc.Driver",
"-jdbcUrl", "jdbc:mysql://localhost:3306/hiho",
"-jdbcPassword", "newpwd",
"-delimiter", ",",
"-numberOfMappers", "1",
"-tableName", "employee",
"-columnNames", "name,genderId,dateId,designation,department",
"-inputPath", "/input1,/input2"};
ExportToDB exportToDB = new ExportToDB();
Configuration conf = new Configuration();
exportToDB.populateConfiguration(args, conf);
exportToDB.checkMandatoryConfs(conf);
}
@Test(expected = HIHOException.class)
public void testCheckMandatoryConfsForPasswordProperty()
throws HIHOException {
String[] args = new String[] {
"-jdbcDriver", "com.mysql.jdbc.Driver",
"-jdbcUrl", "jdbc:mysql://localhost:3306/hiho",
"-jdbcUsername", "root",
"-delimiter", ",",
"-numberOfMappers", "1",
"-tableName", "employee",
"-columnNames", "name,genderId,dateId,designation,department",
"-inputPath", "/input1,/input2"};
ExportToDB exportToDB = new ExportToDB();
Configuration conf = new Configuration();
exportToDB.populateConfiguration(args, conf);
exportToDB.checkMandatoryConfs(conf);
}
@Test(expected = HIHOException.class)
public void testCheckMandatoryConfsFordelimiter() throws HIHOException {
String[] args = new String[] {
"-jdbcDriver", "com.mysql.jdbc.Driver",
"-jdbcUrl", "jdbc:mysql://localhost:3306/hiho",
"-jdbcUsername", "root",
"-jdbcPassword", "newpwd",
"-numberOfMappers", "1",
"-tableName", "employee",
"-columnNames", "name,genderId,dateId,designation,department",
"-inputPath", "/input1,/input2"};
ExportToDB exportToDB = new ExportToDB();
Configuration conf = new Configuration();
exportToDB.populateConfiguration(args, conf);
exportToDB.checkMandatoryConfs(conf);
}
@Test(expected = HIHOException.class)
public void testCheckMandatoryConfsForNumberOfMappers() throws HIHOException {
String[] args = new String[] {
"-jdbcDriver", "com.mysql.jdbc.Driver",
"-jdbcUrl", "jdbc:mysql://localhost:3306/hiho",
"-jdbcUsername", "root",
"-jdbcPassword", "newpwd",
"-delimiter", ",",
"-tableName", "employee",
"-columnNames", "name,genderId,dateId,designation,department",
"-inputPath", "/input1,/input2"};
ExportToDB exportToDB = new ExportToDB();
Configuration conf = new Configuration();
exportToDB.populateConfiguration(args, conf);
exportToDB.checkMandatoryConfs(conf);
}
@Test(expected = HIHOException.class)
public void testCheckMandatoryConfsForTableName() throws HIHOException {
String[] args = new String[] {
"-jdbcDriver", "com.mysql.jdbc.Driver",
"-jdbcUrl", "jdbc:mysql://localhost:3306/hiho",
"-jdbcUsername", "root",
"-jdbcPassword", "newpwd",
"-delimiter", ",",
"-numberOfMappers", "1",
"-columnNames", "name,genderId,dateId,designation,department",
"-inputPath", "/input1,/input2"};
ExportToDB exportToDB = new ExportToDB();
Configuration conf = new Configuration();
exportToDB.populateConfiguration(args, conf);
exportToDB.checkMandatoryConfs(conf);
}
@Test(expected = HIHOException.class)
public void testCheckMandatoryConfsForColumnNames() throws HIHOException {
String[] args = new String[] {
"-jdbcDriver", "com.mysql.jdbc.Driver",
"-jdbcUrl", "jdbc:mysql://localhost:3306/hiho",
"-jdbcUsername", "root",
"-jdbcPassword", "newpwd",
"-delimiter", ",",
"-numberOfMappers", "1",
"-tableName", "employee",
"-inputPath", "/input1,/input2"};
ExportToDB exportToDB = new ExportToDB();
Configuration conf = new Configuration();
exportToDB.populateConfiguration(args, conf);
exportToDB.checkMandatoryConfs(conf);
}
@Test(expected = HIHOException.class)
public void testCheckMandatoryConfsForInputPath() throws HIHOException {
String[] args = new String[] {
"-jdbcDriver", "com.mysql.jdbc.Driver",
"-jdbcUrl", "jdbc:mysql://localhost:3306/hiho",
"-jdbcUsername", "root",
"-jdbcPassword", "newpwd",
"-delimiter", ",",
"-numberOfMappers", "1",
"-tableName", "employee",
"-columnNames", "name,genderId,dateId,designation,department"};
ExportToDB exportToDB = new ExportToDB();
Configuration conf = new Configuration();
exportToDB.populateConfiguration(args, conf);
exportToDB.checkMandatoryConfs(conf);
}
@Test
public void testExportToDBWithValidValues() throws Exception{
final String inputData1 = "Xavier Wilson,1,99999,ASE,IT\n" +
"Drake Mckinney,1,99999,SSE,IT";
final String inputData2 = "Zephania Bauer,2,99999,PM,IT";
createTextFileInHDFS(inputData1, "/input", "testFile1.txt");
createTextFileInHDFS(inputData2, "/input", "testFile2.txt");
String[] args = new String[] {
"-jdbcDriver", "com.mysql.jdbc.Driver",
"-jdbcUrl", "jdbc:mysql://localhost:3306/hiho",
"-jdbcUsername", "root",
"-jdbcPassword", "newpwd",
"-delimiter", ",",
"-numberOfMappers", "1",
"-tableName", "employee",
"-columnNames", "name,genderId,dateId,designation,department",
"-inputPath", "/input"};
int res = ToolRunner.run(createJobConf(), new ExportToDB(), args);
assertEquals(0, res);
String userName = "root";
String password = "newpwd";
String url = "jdbc:mysql://localhost/hiho";
Connection conn;
Statement stmt;
try {
Class.forName("com.mysql.jdbc.Driver").newInstance();
conn = DriverManager.getConnection(url, userName, password);
stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("select * from employee where name='Xavier Wilson'");
boolean isRsExists = false;
while(rs.next()){
assertEquals(rs.getString("name"),"Xavier Wilson");
assertEquals(rs.getLong("genderId"),1l);
assertEquals(rs.getLong("dateId"),99999l);
assertEquals(rs.getString("designation"),"ASE");
assertEquals(rs.getString("department"),"IT");
isRsExists = true;
}
assertTrue(isRsExists);
stmt.executeUpdate("delete from employee where dateId=99999");
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void testExportToDBWithValidValuesNullEntry() throws Exception{
final String inputData1 = "Xavier Wilson,1,99999, ,IT\n" +
"Drake Mckinney,1,99999,ASE ,IT";
final String inputData2 = "Zephania Bauer,2,99999,PM,IT";
createTextFileInHDFS(inputData1, "/input", "testFile1.txt");
createTextFileInHDFS(inputData2, "/input", "testFile2.txt");
String[] args = new String[] {
"-jdbcDriver", "com.mysql.jdbc.Driver",
"-jdbcUrl", "jdbc:mysql://localhost:3306/hiho",
"-jdbcUsername", "root",
"-jdbcPassword", "newpwd",
"-delimiter", ",",
"-numberOfMappers", "1",
"-tableName", "employee",
"-columnNames", "name,genderId,dateId,designation,department",
"-inputPath", "/input"};
int res = ToolRunner.run(createJobConf(), new ExportToDB(), args);
assertEquals(0, res);
String userName = "root";
String password = "newpwd";
String url = "jdbc:mysql://localhost/hiho";
Connection conn;
Statement stmt;
try {
Class.forName("com.mysql.jdbc.Driver").newInstance();
conn = DriverManager.getConnection(url, userName, password);
stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("select * from employee where name='Xavier Wilson'");
boolean isRsExists = false;
while(rs.next()){
assertEquals(rs.getString("name"),"Xavier Wilson");
assertEquals(rs.getLong("genderId"),1l);
assertEquals(rs.getLong("dateId"),99999l);
assertEquals(rs.getString("designation"),null);
assertEquals(rs.getString("department"),"IT");
isRsExists = true;
}
assertTrue(isRsExists);
stmt.executeUpdate("delete from employee where dateId=99999");
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void testExportToDBWithUnequalLengthOfColumns() throws Exception{
final String inputData1 = "Xavier Wilson,1,99999,ASE,IT\n" +
"Drake Mckinney,1,99999,SSE,IT,425";
final String inputData2 = "Zephania Bauer,2,99999,PM,IT";
createTextFileInHDFS(inputData1, "/input", "testFile1.txt");
createTextFileInHDFS(inputData2, "/input", "testFile2.txt");
String[] args = new String[] {
"-jdbcDriver", "com.mysql.jdbc.Driver",
"-jdbcUrl", "jdbc:mysql://localhost:3306/hiho",
"-jdbcUsername", "root",
"-jdbcPassword", "newpwd",
"-delimiter", ",",
"-numberOfMappers", "1",
"-tableName", "employee",
"-columnNames", "name,genderId,dateId,designation,department",
"-inputPath", "/input"};
int res = ToolRunner.run(createJobConf(), new ExportToDB(), args);
assertEquals(1, res);
}
}
| |
package org.springframework.security.provisioning;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.userdetails.UserCache;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.cache.NullUserCache;
import org.springframework.security.core.userdetails.jdbc.JdbcDaoImpl;
import org.springframework.context.ApplicationContextException;
import org.springframework.dao.IncorrectResultSizeDataAccessException;
import org.springframework.jdbc.core.PreparedStatementSetter;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.util.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Collection;
import java.util.List;
/**
* Jdbc user management service, based on the same table structure as its parent class,
* <tt>JdbcDaoImpl</tt>.
* <p>
* Provides CRUD operations for both users and groups. Note that if the
* {@link #setEnableAuthorities(boolean) enableAuthorities} property is set to false,
* calls to createUser, updateUser and deleteUser will not store the authorities from the
* <tt>UserDetails</tt> or delete authorities for the user. Since this class cannot
* differentiate between authorities which were loaded for an individual or for a group of
* which the individual is a member, it's important that you take this into account when
* using this implementation for managing your users.
*
* @author Luke Taylor
* @since 2.0
*/
public class JdbcUserDetailsManager extends JdbcDaoImpl implements UserDetailsManager,
GroupManager {
// ~ Static fields/initializers
// =====================================================================================
// UserDetailsManager SQL
public static final String DEF_CREATE_USER_SQL = "insert into users (username, password, enabled) values (?,?,?)";
public static final String DEF_DELETE_USER_SQL = "delete from users where username = ?";
public static final String DEF_UPDATE_USER_SQL = "update users set password = ?, enabled = ? where username = ?";
public static final String DEF_INSERT_AUTHORITY_SQL = "insert into authorities (username, authority) values (?,?)";
public static final String DEF_DELETE_USER_AUTHORITIES_SQL = "delete from authorities where username = ?";
public static final String DEF_USER_EXISTS_SQL = "select username from users where username = ?";
public static final String DEF_CHANGE_PASSWORD_SQL = "update users set password = ? where username = ?";
// GroupManager SQL
public static final String DEF_FIND_GROUPS_SQL = "select group_name from groups";
public static final String DEF_FIND_USERS_IN_GROUP_SQL = "select username from group_members gm, groups g "
+ "where gm.group_id = g.id" + " and g.group_name = ?";
public static final String DEF_INSERT_GROUP_SQL = "insert into groups (group_name) values (?)";
public static final String DEF_FIND_GROUP_ID_SQL = "select id from groups where group_name = ?";
public static final String DEF_INSERT_GROUP_AUTHORITY_SQL = "insert into group_authorities (group_id, authority) values (?,?)";
public static final String DEF_DELETE_GROUP_SQL = "delete from groups where id = ?";
public static final String DEF_DELETE_GROUP_AUTHORITIES_SQL = "delete from group_authorities where group_id = ?";
public static final String DEF_DELETE_GROUP_MEMBERS_SQL = "delete from group_members where group_id = ?";
public static final String DEF_RENAME_GROUP_SQL = "update groups set group_name = ? where group_name = ?";
public static final String DEF_INSERT_GROUP_MEMBER_SQL = "insert into group_members (group_id, username) values (?,?)";
public static final String DEF_DELETE_GROUP_MEMBER_SQL = "delete from group_members where group_id = ? and username = ?";
public static final String DEF_GROUP_AUTHORITIES_QUERY_SQL = "select g.id, g.group_name, ga.authority "
+ "from groups g, group_authorities ga "
+ "where g.group_name = ? "
+ "and g.id = ga.group_id ";
public static final String DEF_DELETE_GROUP_AUTHORITY_SQL = "delete from group_authorities where group_id = ? and authority = ?";
// ~ Instance fields
// ================================================================================================
protected final Log logger = LogFactory.getLog(getClass());
private String createUserSql = DEF_CREATE_USER_SQL;
private String deleteUserSql = DEF_DELETE_USER_SQL;
private String updateUserSql = DEF_UPDATE_USER_SQL;
private String createAuthoritySql = DEF_INSERT_AUTHORITY_SQL;
private String deleteUserAuthoritiesSql = DEF_DELETE_USER_AUTHORITIES_SQL;
private String userExistsSql = DEF_USER_EXISTS_SQL;
private String changePasswordSql = DEF_CHANGE_PASSWORD_SQL;
private String findAllGroupsSql = DEF_FIND_GROUPS_SQL;
private String findUsersInGroupSql = DEF_FIND_USERS_IN_GROUP_SQL;
private String insertGroupSql = DEF_INSERT_GROUP_SQL;
private String findGroupIdSql = DEF_FIND_GROUP_ID_SQL;
private String insertGroupAuthoritySql = DEF_INSERT_GROUP_AUTHORITY_SQL;
private String deleteGroupSql = DEF_DELETE_GROUP_SQL;
private String deleteGroupAuthoritiesSql = DEF_DELETE_GROUP_AUTHORITIES_SQL;
private String deleteGroupMembersSql = DEF_DELETE_GROUP_MEMBERS_SQL;
private String renameGroupSql = DEF_RENAME_GROUP_SQL;
private String insertGroupMemberSql = DEF_INSERT_GROUP_MEMBER_SQL;
private String deleteGroupMemberSql = DEF_DELETE_GROUP_MEMBER_SQL;
private String groupAuthoritiesSql = DEF_GROUP_AUTHORITIES_QUERY_SQL;
private String deleteGroupAuthoritySql = DEF_DELETE_GROUP_AUTHORITY_SQL;
private AuthenticationManager authenticationManager;
private UserCache userCache = new NullUserCache();
// ~ Methods
// ========================================================================================================
protected void initDao() throws ApplicationContextException {
if (authenticationManager == null) {
logger.info("No authentication manager set. Reauthentication of users when changing passwords will "
+ "not be performed.");
}
super.initDao();
}
// ~ UserDetailsManager implementation
// ==============================================================================
public void createUser(final UserDetails user) {
validateUserDetails(user);
getJdbcTemplate().update(createUserSql, new PreparedStatementSetter() {
public void setValues(PreparedStatement ps) throws SQLException {
ps.setString(1, user.getUsername());
ps.setString(2, user.getPassword());
ps.setBoolean(3, user.isEnabled());
}
});
if (getEnableAuthorities()) {
insertUserAuthorities(user);
}
}
public void updateUser(final UserDetails user) {
validateUserDetails(user);
getJdbcTemplate().update(updateUserSql, new PreparedStatementSetter() {
public void setValues(PreparedStatement ps) throws SQLException {
ps.setString(1, user.getPassword());
ps.setBoolean(2, user.isEnabled());
ps.setString(3, user.getUsername());
}
});
if (getEnableAuthorities()) {
deleteUserAuthorities(user.getUsername());
insertUserAuthorities(user);
}
userCache.removeUserFromCache(user.getUsername());
}
private void insertUserAuthorities(UserDetails user) {
for (GrantedAuthority auth : user.getAuthorities()) {
getJdbcTemplate().update(createAuthoritySql, user.getUsername(),
auth.getAuthority());
}
}
public void deleteUser(String username) {
if (getEnableAuthorities()) {
deleteUserAuthorities(username);
}
getJdbcTemplate().update(deleteUserSql, username);
userCache.removeUserFromCache(username);
}
private void deleteUserAuthorities(String username) {
getJdbcTemplate().update(deleteUserAuthoritiesSql, username);
}
public void changePassword(String oldPassword, String newPassword)
throws AuthenticationException {
Authentication currentUser = SecurityContextHolder.getContext()
.getAuthentication();
if (currentUser == null) {
// This would indicate bad coding somewhere
throw new AccessDeniedException(
"Can't change password as no Authentication object found in context "
+ "for current user.");
}
String username = currentUser.getName();
// If an authentication manager has been set, re-authenticate the user with the
// supplied password.
if (authenticationManager != null) {
logger.debug("Reauthenticating user '" + username
+ "' for password change request.");
authenticationManager.authenticate(new UsernamePasswordAuthenticationToken(
username, oldPassword));
}
else {
logger.debug("No authentication manager set. Password won't be re-checked.");
}
logger.debug("Changing password for user '" + username + "'");
getJdbcTemplate().update(changePasswordSql, newPassword, username);
SecurityContextHolder.getContext().setAuthentication(
createNewAuthentication(currentUser, newPassword));
userCache.removeUserFromCache(username);
}
protected Authentication createNewAuthentication(Authentication currentAuth,
String newPassword) {
UserDetails user = loadUserByUsername(currentAuth.getName());
UsernamePasswordAuthenticationToken newAuthentication = new UsernamePasswordAuthenticationToken(
user, null, user.getAuthorities());
newAuthentication.setDetails(currentAuth.getDetails());
return newAuthentication;
}
public boolean userExists(String username) {
List<String> users = getJdbcTemplate().queryForList(userExistsSql,
new String[] { username }, String.class);
if (users.size() > 1) {
throw new IncorrectResultSizeDataAccessException(
"More than one user found with name '" + username + "'", 1);
}
return users.size() == 1;
}
// ~ GroupManager implementation
// ====================================================================================
public List<String> findAllGroups() {
return getJdbcTemplate().queryForList(findAllGroupsSql, String.class);
}
public List<String> findUsersInGroup(String groupName) {
Assert.hasText(groupName);
return getJdbcTemplate().queryForList(findUsersInGroupSql,
new String[] { groupName }, String.class);
}
public void createGroup(final String groupName,
final List<GrantedAuthority> authorities) {
Assert.hasText(groupName);
Assert.notNull(authorities);
logger.debug("Creating new group '" + groupName + "' with authorities "
+ AuthorityUtils.authorityListToSet(authorities));
getJdbcTemplate().update(insertGroupSql, groupName);
final int groupId = findGroupId(groupName);
for (GrantedAuthority a : authorities) {
final String authority = a.getAuthority();
getJdbcTemplate().update(insertGroupAuthoritySql,
new PreparedStatementSetter() {
public void setValues(PreparedStatement ps) throws SQLException {
ps.setInt(1, groupId);
ps.setString(2, authority);
}
});
}
}
public void deleteGroup(String groupName) {
logger.debug("Deleting group '" + groupName + "'");
Assert.hasText(groupName);
final int id = findGroupId(groupName);
PreparedStatementSetter groupIdPSS = new PreparedStatementSetter() {
public void setValues(PreparedStatement ps) throws SQLException {
ps.setInt(1, id);
}
};
getJdbcTemplate().update(deleteGroupMembersSql, groupIdPSS);
getJdbcTemplate().update(deleteGroupAuthoritiesSql, groupIdPSS);
getJdbcTemplate().update(deleteGroupSql, groupIdPSS);
}
public void renameGroup(String oldName, String newName) {
logger.debug("Changing group name from '" + oldName + "' to '" + newName + "'");
Assert.hasText(oldName);
Assert.hasText(newName);
getJdbcTemplate().update(renameGroupSql, newName, oldName);
}
public void addUserToGroup(final String username, final String groupName) {
logger.debug("Adding user '" + username + "' to group '" + groupName + "'");
Assert.hasText(username);
Assert.hasText(groupName);
final int id = findGroupId(groupName);
getJdbcTemplate().update(insertGroupMemberSql, new PreparedStatementSetter() {
public void setValues(PreparedStatement ps) throws SQLException {
ps.setInt(1, id);
ps.setString(2, username);
}
});
userCache.removeUserFromCache(username);
}
public void removeUserFromGroup(final String username, final String groupName) {
logger.debug("Removing user '" + username + "' to group '" + groupName + "'");
Assert.hasText(username);
Assert.hasText(groupName);
final int id = findGroupId(groupName);
getJdbcTemplate().update(deleteGroupMemberSql, new PreparedStatementSetter() {
public void setValues(PreparedStatement ps) throws SQLException {
ps.setInt(1, id);
ps.setString(2, username);
}
});
userCache.removeUserFromCache(username);
}
public List<GrantedAuthority> findGroupAuthorities(String groupName) {
logger.debug("Loading authorities for group '" + groupName + "'");
Assert.hasText(groupName);
return getJdbcTemplate().query(groupAuthoritiesSql, new String[] { groupName },
new RowMapper<GrantedAuthority>() {
public GrantedAuthority mapRow(ResultSet rs, int rowNum)
throws SQLException {
String roleName = getRolePrefix() + rs.getString(3);
return new SimpleGrantedAuthority(roleName);
}
});
}
public void removeGroupAuthority(String groupName, final GrantedAuthority authority) {
logger.debug("Removing authority '" + authority + "' from group '" + groupName
+ "'");
Assert.hasText(groupName);
Assert.notNull(authority);
final int id = findGroupId(groupName);
getJdbcTemplate().update(deleteGroupAuthoritySql, new PreparedStatementSetter() {
public void setValues(PreparedStatement ps) throws SQLException {
ps.setInt(1, id);
ps.setString(2, authority.getAuthority());
}
});
}
public void addGroupAuthority(final String groupName, final GrantedAuthority authority) {
logger.debug("Adding authority '" + authority + "' to group '" + groupName + "'");
Assert.hasText(groupName);
Assert.notNull(authority);
final int id = findGroupId(groupName);
getJdbcTemplate().update(insertGroupAuthoritySql, new PreparedStatementSetter() {
public void setValues(PreparedStatement ps) throws SQLException {
ps.setInt(1, id);
ps.setString(2, authority.getAuthority());
}
});
}
private int findGroupId(String group) {
return getJdbcTemplate().queryForObject(findGroupIdSql, Integer.class, group);
}
public void setAuthenticationManager(AuthenticationManager authenticationManager) {
this.authenticationManager = authenticationManager;
}
public void setCreateUserSql(String createUserSql) {
Assert.hasText(createUserSql);
this.createUserSql = createUserSql;
}
public void setDeleteUserSql(String deleteUserSql) {
Assert.hasText(deleteUserSql);
this.deleteUserSql = deleteUserSql;
}
public void setUpdateUserSql(String updateUserSql) {
Assert.hasText(updateUserSql);
this.updateUserSql = updateUserSql;
}
public void setCreateAuthoritySql(String createAuthoritySql) {
Assert.hasText(createAuthoritySql);
this.createAuthoritySql = createAuthoritySql;
}
public void setDeleteUserAuthoritiesSql(String deleteUserAuthoritiesSql) {
Assert.hasText(deleteUserAuthoritiesSql);
this.deleteUserAuthoritiesSql = deleteUserAuthoritiesSql;
}
public void setUserExistsSql(String userExistsSql) {
Assert.hasText(userExistsSql);
this.userExistsSql = userExistsSql;
}
public void setChangePasswordSql(String changePasswordSql) {
Assert.hasText(changePasswordSql);
this.changePasswordSql = changePasswordSql;
}
public void setFindAllGroupsSql(String findAllGroupsSql) {
Assert.hasText(findAllGroupsSql);
this.findAllGroupsSql = findAllGroupsSql;
}
public void setFindUsersInGroupSql(String findUsersInGroupSql) {
Assert.hasText(findUsersInGroupSql);
this.findUsersInGroupSql = findUsersInGroupSql;
}
public void setInsertGroupSql(String insertGroupSql) {
Assert.hasText(insertGroupSql);
this.insertGroupSql = insertGroupSql;
}
public void setFindGroupIdSql(String findGroupIdSql) {
Assert.hasText(findGroupIdSql);
this.findGroupIdSql = findGroupIdSql;
}
public void setInsertGroupAuthoritySql(String insertGroupAuthoritySql) {
Assert.hasText(insertGroupAuthoritySql);
this.insertGroupAuthoritySql = insertGroupAuthoritySql;
}
public void setDeleteGroupSql(String deleteGroupSql) {
Assert.hasText(deleteGroupSql);
this.deleteGroupSql = deleteGroupSql;
}
public void setDeleteGroupAuthoritiesSql(String deleteGroupAuthoritiesSql) {
Assert.hasText(deleteGroupAuthoritiesSql);
this.deleteGroupAuthoritiesSql = deleteGroupAuthoritiesSql;
}
public void setDeleteGroupMembersSql(String deleteGroupMembersSql) {
Assert.hasText(deleteGroupMembersSql);
this.deleteGroupMembersSql = deleteGroupMembersSql;
}
public void setRenameGroupSql(String renameGroupSql) {
Assert.hasText(renameGroupSql);
this.renameGroupSql = renameGroupSql;
}
public void setInsertGroupMemberSql(String insertGroupMemberSql) {
Assert.hasText(insertGroupMemberSql);
this.insertGroupMemberSql = insertGroupMemberSql;
}
public void setDeleteGroupMemberSql(String deleteGroupMemberSql) {
Assert.hasText(deleteGroupMemberSql);
this.deleteGroupMemberSql = deleteGroupMemberSql;
}
public void setGroupAuthoritiesSql(String groupAuthoritiesSql) {
Assert.hasText(groupAuthoritiesSql);
this.groupAuthoritiesSql = groupAuthoritiesSql;
}
public void setDeleteGroupAuthoritySql(String deleteGroupAuthoritySql) {
Assert.hasText(deleteGroupAuthoritySql);
this.deleteGroupAuthoritySql = deleteGroupAuthoritySql;
}
/**
* Optionally sets the UserCache if one is in use in the application. This allows the
* user to be removed from the cache after updates have taken place to avoid stale
* data.
*
* @param userCache the cache used by the AuthenticationManager.
*/
public void setUserCache(UserCache userCache) {
Assert.notNull(userCache, "userCache cannot be null");
this.userCache = userCache;
}
private void validateUserDetails(UserDetails user) {
Assert.hasText(user.getUsername(), "Username may not be empty or null");
validateAuthorities(user.getAuthorities());
}
private void validateAuthorities(Collection<? extends GrantedAuthority> authorities) {
Assert.notNull(authorities, "Authorities list must not be null");
for (GrantedAuthority authority : authorities) {
Assert.notNull(authority, "Authorities list contains a null entry");
Assert.hasText(authority.getAuthority(),
"getAuthority() method must return a non-empty string");
}
}
}
| |
package app;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.UIManager;
import javax.swing.UnsupportedLookAndFeelException;
import java.awt.Color;
import java.awt.Font;
import javax.swing.JButton;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import javax.swing.JTextPane;
public class MainGui extends JFrame{
JTextArea player1;
JTextArea p1Money;
private JButton p1Add;
private JButton p1Sub;
private JButton p1Trans;
private JButton p1Set;
JTextArea player2;
JTextArea p2Money;
private JButton p2Add;
private JButton p2Sub;
private JButton p2Trans;
private JButton p2Set;
JTextArea player3;
JTextArea p3Money;
private JButton p3Add;
private JButton p3Sub;
private JButton p3Trans;
private JButton p3Set;
JTextArea player4;
JTextArea p4Money;
private JButton p4Add;
private JButton p4Sub;
private JButton p4Trans;
private JButton p4Set;
private JTextPane P1Text;
private JTextPane P2Text;
private JTextPane P3Text;
private JTextPane P4Text;
private JTextPane MoneyText;
private JTextPane MoneyText2;
private JTextPane MoneyText3;
private JTextPane MoneyText4;
private JTextArea CharacterDescrip;
private JTextArea CharacterDescrip2;
private JTextArea CharacterDescrip3;
private JTextArea CharacterDescrip4;
private JTextPane times;
private JTextPane txtSides;
private JButton RollDice;
private JTextArea NoteBox;
private JScrollPane NoteBoxSP;
private JTextPane NotePane;
JTextPane RollNum;
JTextPane RollSides;
public MainGui() {
setFont(new Font("Times New Roman", Font.BOLD, 14));
setTitle("Board Game Tracker");
setSize(630, 425);
setResizable(false);
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
getContentPane().setBackground(Color.BLACK);
setBackground(Color.RED);
getContentPane().setLayout(null);
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
}
catch (UnsupportedLookAndFeelException e) {
e.printStackTrace();
System.exit(1);
}
catch (ClassNotFoundException e) {
e.printStackTrace();
System.exit(1);
}
catch (InstantiationException e) {
e.printStackTrace();
System.exit(1);
}
catch (IllegalAccessException e) {
e.printStackTrace();
System.exit(1);
}
player1 = new JTextArea();
player1.setToolTipText("<html>This field holds the name of the first player.</html>");
player1.setBounds(10, 30, 300, 20);
player1.setFont(new Font("Times New Roman", Font.BOLD, 14));
player1.setBackground(Color.LIGHT_GRAY);
player1.setText("Player 1");
player1.addMouseListener(new MouseAdapter(){
@Override
public void mouseClicked(MouseEvent e){
player1.setText("");
}
});
getContentPane().add(player1);
p1Money = new JTextArea();
p1Money.setToolTipText("<html>This field holds the value of the first player's money.\r\n<br> This can only be changed with the \"Add\", \"Subtract\", \"Transfer\", and \"Set\" <br>buttons.</html>");
p1Money.setForeground(Color.BLACK);
p1Money.setBounds(320, 30, 80, 20);
p1Money.setEditable(false);
p1Money.setFont(new Font("Times New Roman", Font.BOLD, 14));
p1Money.setBackground(Color.DARK_GRAY);
p1Money.setText("Money");
getContentPane().add(p1Money);
p1Add = new JButton("Add");
p1Add.setToolTipText("<html>This button will add the value input with the first player's money.</html>");
p1Add.setBounds(20, 60, 90, 20);
p1Add.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p1Add.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String addValue = inputBox("How much would you like to add to " + p1Money.getText() + "?");
try{
addButton(e, p1Money, Integer.parseInt(addValue));
} catch (NumberFormatException ex){
error("That is not a number!");
}
}
});
getContentPane().add(p1Add);
p1Sub = new JButton("Subtract");
p1Sub.setToolTipText("<html>This button will subtract the value input from the first player's money.</html>");
p1Sub.setBounds(120, 60, 90, 20);
p1Sub.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p1Sub.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String subValue = inputBox("How much would you like to subtract from " + p1Money.getText() + "?");
try{
subButton(e, p1Money, Integer.parseInt(subValue));
} catch (NumberFormatException ex){
error("That is not a number!");
}
}
});
getContentPane().add(p1Sub);
p1Trans = new JButton("Transfer");
p1Trans.setToolTipText("<html>This button will give the input player an input ammount of dollars.<br> The value input will be added to the input player's money and<br>subtracted from the first player's money.</html>");
p1Trans.setBounds(220, 60, 90, 20);
p1Trans.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p1Trans.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
transButton(e, p1Money);
}
});
getContentPane().add(p1Trans);
p1Set = new JButton("Set");
p1Set.setToolTipText("<html>This button will set first player's money to the value input.</html>");
p1Set.setBounds(320, 60, 90, 20);
p1Set.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p1Set.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String setValue = JOptionPane.showInputDialog("What would you like to set the value to?");
setButton(e, p1Money, setValue);
}
});
getContentPane().add(p1Set);
player2 = new JTextArea();
player2.setToolTipText("<html>This field holds the name of the second player.</html>");
player2.setBounds(10, 110, 300, 20);
player2.setBackground(Color.LIGHT_GRAY);
player2.setFont(new Font("Times New Roman", Font.BOLD, 14));
player2.setText("Player 2");
player2.addMouseListener(new MouseAdapter(){
@Override
public void mouseClicked(MouseEvent e){
player2.setText("");
}
});
getContentPane().add(player2);
p2Money = new JTextArea();
p2Money.setToolTipText("<html>This field holds the value of the second player's money.\r\n<br> This can only be changed with the \"Add\", \"Subtract\", \"Transfer\", and \"Set\" <br>buttons.</html>");
p2Money.setForeground(Color.BLACK);
p2Money.setBounds(320, 110, 80, 20);
p2Money.setEditable(false);
p2Money.setFont(new Font("Times New Roman", Font.BOLD, 14));
p2Money.setBackground(Color.DARK_GRAY);
p2Money.setText("Money");
getContentPane().add(p2Money);
p2Add = new JButton("Add");
p2Add.setToolTipText("<html>This button will add the value input with <br>the second player's money.</html>");
p2Add.setBounds(20, 140, 90, 20);
p2Add.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p2Add.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String addValue = inputBox("How much would you like to add to " + p2Money.getText() + "?");
try{
addButton(e, p2Money, Integer.parseInt(addValue));
} catch (NumberFormatException ex){
error("That is not a number!");
}
}
});
getContentPane().add(p2Add);
p2Sub = new JButton("Subtract");
p2Sub.setToolTipText("<html>This button will subtract the value input <br>from the second player's money.</html>");
p2Sub.setBounds(120, 140, 90, 20);
p2Sub.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p2Sub.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String subValue = inputBox("How much would you like to subtract from " + p2Money.getText() + "?");
try{
subButton(e, p2Money, Integer.parseInt(subValue));
} catch (NumberFormatException ex){
error("That is not a number!");
}
}
});
getContentPane().add(p2Sub);
p2Trans = new JButton("Transfer");
p2Trans.setToolTipText("<html>This button will give the input player an input ammount of dollars.<br> The value input will be added to the input player's money and<br>subtracted from the second player's money.</html>");
p2Trans.setBounds(220, 140, 90, 20);
p2Trans.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p2Trans.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
transButton(e, p2Money);
}
});
getContentPane().add(p2Trans);
p2Set = new JButton("Set");
p2Set.setToolTipText("<html>This button will set second player's money to the value input.</html>");
p2Set.setBounds(320, 140, 90, 20);
p2Set.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p2Set.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String setValue = JOptionPane.showInputDialog("What would you like to set the value to?");
setButton(e, p2Money, setValue);
}
});
getContentPane().add(p2Set);
player3 = new JTextArea();
player3.setToolTipText("<html>This field holds the name of the third player.</html>");
player3.setBounds(10, 190, 300, 20);
player3.setText("Player 3");
player3.setFont(new Font("Times New Roman", Font.BOLD, 14));
player3.setBackground(Color.LIGHT_GRAY);
player3.addMouseListener(new MouseAdapter(){
@Override
public void mouseClicked(MouseEvent e){
player3.setText("");
}
});
getContentPane().add(player3);
p3Money = new JTextArea();
p3Money.setToolTipText("<html>This field holds the value of the third player's money.\r\n<br> This can only be changed with the \"Add\", \"Subtract\", \"Transfer\", and \"Set\" <br>buttons.</html>");
p3Money.setForeground(Color.BLACK);
p3Money.setBounds(320, 190, 80, 20);
p3Money.setText("Money");
p3Money.setFont(new Font("Times New Roman", Font.BOLD, 14));
p3Money.setEditable(false);
p3Money.setBackground(Color.DARK_GRAY);
getContentPane().add(p3Money);
p3Add = new JButton("Add");
p3Add.setToolTipText("<html>This button will add the value input with<br>the third player's money.</html>");
p3Add.setBounds(20, 220, 90, 20);
p3Add.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p3Add.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String addValue = inputBox("How much would you like to add to " + p3Money.getText() + "?");
try{
addButton(e, p3Money, Integer.parseInt(addValue));
} catch (NumberFormatException ex){
error("That is not a number!");
}
}
});
getContentPane().add(p3Add);
p3Sub = new JButton("Subtract");
p3Sub.setToolTipText("<html>This button will subtract the value input from <br>the third player's money.</html>");
p3Sub.setBounds(120, 220, 90, 20);
p3Sub.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p3Sub.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String subValue = inputBox("How much would you like to subtract from " + p3Money.getText() + "?");
try{
subButton(e, p3Money, Integer.parseInt(subValue));
} catch (NumberFormatException ex){
error("That is not a number!");
}
}
});
getContentPane().add(p3Sub);
p3Trans = new JButton("Transfer");
p3Trans.setToolTipText("<html>This button will give the input player an input ammount of dollars.<br> The value input will be added to the input player's money and<br>subtracted from the third player's money.</html>");
p3Trans.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
}
});
p3Trans.setBounds(220, 220, 90, 20);
p3Trans.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p3Trans.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
transButton(e, p3Money);
}
});
getContentPane().add(p3Trans);
p3Set = new JButton("Set");
p3Set.setToolTipText("<html>This button will set third player's money to the value input.</html>");
p3Set.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String setValue = JOptionPane.showInputDialog("What would you like to set the value to?");
setButton(e, p3Money, setValue);
}
});
p3Set.setBounds(320, 220, 90, 20);
p3Set.setFont(new Font("Times New Roman", Font.PLAIN, 15));
getContentPane().add(p3Set);
player4 = new JTextArea();
player4.setToolTipText("<html>This field holds the name of the fourth player.</html>");
player4.setText("Player 4");
player4.setFont(new Font("Times New Roman", Font.BOLD, 14));
player4.setBackground(Color.LIGHT_GRAY);
player4.setBounds(10, 270, 300, 20);
player4.addMouseListener(new MouseAdapter(){
@Override
public void mouseClicked(MouseEvent e){
player4.setText("");
}
});
getContentPane().add(player4);
p4Money = new JTextArea();
p4Money.setToolTipText("<html>This field holds the value of the fourth player's money.\r\n<br> This can only be changed with the \"Add\", \"Subtract\", \"Transfer\", and \"Set\" <br>buttons.</html>");
p4Money.setForeground(Color.BLACK);
p4Money.setText("Money");
p4Money.setFont(new Font("Times New Roman", Font.BOLD, 14));
p4Money.setEditable(false);
p4Money.setBackground(Color.DARK_GRAY);
p4Money.setBounds(320, 270, 80, 20);
getContentPane().add(p4Money);
p4Add = new JButton("Add");
p4Add.setToolTipText("<html>This button will add the value input with<br>the fourth player's money.</html>");
p4Add.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p4Add.setBounds(20, 300, 90, 20);
p4Add.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String addValue = inputBox("How much would you like to add to " + p4Money.getText() + "?");
try{
addButton(e, p4Money, Integer.parseInt(addValue));
} catch (NumberFormatException ex){
error("That is not a number!");
}
}
});
getContentPane().add(p4Add);
p4Sub = new JButton("Subtract");
p4Sub.setToolTipText("<html>This button will subtract the value input from <br>the first player's money.</html>");
p4Sub.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p4Sub.setBounds(120, 300, 90, 20);
p4Sub.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String subValue = inputBox("How much would you like to subtract from " + p4Money.getText() + "?");
try{
subButton(e, p4Money, Integer.parseInt(subValue));
} catch (NumberFormatException ex){
error("That is not a number!");
}
}
});
getContentPane().add(p4Sub);
p4Trans = new JButton("Transfer");
p4Trans.setToolTipText("<html>This button will give the input player an input ammount of dollars.<br> The value input will be added to the input player's money and<br>subtracted from the fourth player's money.</html>");
p4Trans.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p4Trans.setBounds(220, 300, 90, 20);
p4Trans.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
transButton(e, p4Money);
}
});
getContentPane().add(p4Trans);
p4Set = new JButton("Set");
p4Set.setToolTipText("<html>This button will set fourth player's money to the value input.</html>");
p4Set.setFont(new Font("Times New Roman", Font.PLAIN, 15));
p4Set.setBounds(320, 300, 90, 20);
p4Set.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String setValue = JOptionPane.showInputDialog("What would you like to set the value to?");
setButton(e, p4Money, setValue);
}
});
getContentPane().add(p4Set);
P1Text = new JTextPane();
P1Text.setBackground(Color.BLACK);
P1Text.setForeground(Color.GRAY);
P1Text.setEnabled(false);
P1Text.setEditable(false);
P1Text.setFont(new Font("Times New Roman", Font.PLAIN, 12));
P1Text.setText("Player 1:");
P1Text.setBounds(10, 11, 54, 20);
getContentPane().add(P1Text);
P2Text = new JTextPane();
P2Text.setText("Player 2:");
P2Text.setForeground(Color.GRAY);
P2Text.setFont(new Font("Times New Roman", Font.PLAIN, 12));
P2Text.setEnabled(false);
P2Text.setEditable(false);
P2Text.setBackground(Color.BLACK);
P2Text.setBounds(10, 91, 54, 20);
getContentPane().add(P2Text);
P3Text = new JTextPane();
P3Text.setText("Player 3:");
P3Text.setForeground(Color.GRAY);
P3Text.setFont(new Font("Times New Roman", Font.PLAIN, 12));
P3Text.setEnabled(false);
P3Text.setEditable(false);
P3Text.setBackground(Color.BLACK);
P3Text.setBounds(10, 171, 54, 20);
getContentPane().add(P3Text);
P4Text = new JTextPane();
P4Text.setText("Player 4:");
P4Text.setForeground(Color.GRAY);
P4Text.setFont(new Font("Times New Roman", Font.PLAIN, 12));
P4Text.setEnabled(false);
P4Text.setEditable(false);
P4Text.setBackground(Color.BLACK);
P4Text.setBounds(10, 251, 54, 20);
getContentPane().add(P4Text);
MoneyText = new JTextPane();
MoneyText.setText("Money/Points:");
MoneyText.setForeground(Color.GRAY);
MoneyText.setFont(new Font("Times New Roman", Font.PLAIN, 12));
MoneyText.setEnabled(false);
MoneyText.setEditable(false);
MoneyText.setBackground(Color.BLACK);
MoneyText.setBounds(320, 11, 80, 20);
getContentPane().add(MoneyText);
MoneyText2 = new JTextPane();
MoneyText2.setText("Money/Points:");
MoneyText2.setForeground(Color.GRAY);
MoneyText2.setFont(new Font("Times New Roman", Font.PLAIN, 12));
MoneyText2.setEnabled(false);
MoneyText2.setEditable(false);
MoneyText2.setBackground(Color.BLACK);
MoneyText2.setBounds(320, 91, 80, 20);
getContentPane().add(MoneyText2);
MoneyText3 = new JTextPane();
MoneyText3.setText("Money/Points:");
MoneyText3.setForeground(Color.GRAY);
MoneyText3.setFont(new Font("Times New Roman", Font.PLAIN, 12));
MoneyText3.setEnabled(false);
MoneyText3.setEditable(false);
MoneyText3.setBackground(Color.BLACK);
MoneyText3.setBounds(320, 171, 80, 20);
getContentPane().add(MoneyText3);
MoneyText4 = new JTextPane();
MoneyText4.setText("Money/Points:");
MoneyText4.setForeground(Color.GRAY);
MoneyText4.setFont(new Font("Times New Roman", Font.PLAIN, 12));
MoneyText4.setEnabled(false);
MoneyText4.setEditable(false);
MoneyText4.setBackground(Color.BLACK);
MoneyText4.setBounds(320, 251, 80, 20);
getContentPane().add(MoneyText4);
CharacterDescrip = new JTextArea();
CharacterDescrip.setToolTipText("<html>This field holds a short description <br>of the first player's character.</html>");
CharacterDescrip.setFont(new Font("Times New Roman", Font.BOLD, 12));
CharacterDescrip.setBackground(Color.GRAY);
CharacterDescrip.setForeground(Color.BLACK);
CharacterDescrip.setText("P1 Character");
CharacterDescrip.setBounds(80, 10, 200, 20);
CharacterDescrip.addMouseListener(new MouseAdapter(){
@Override
public void mouseClicked(MouseEvent e){
CharacterDescrip.setText("");
}
});
getContentPane().add(CharacterDescrip);
CharacterDescrip2 = new JTextArea();
CharacterDescrip2.setToolTipText("<html>This field holds a short description <br>of the second player's character.</html>");
CharacterDescrip2.setFont(new Font("Times New Roman", Font.BOLD, 12));
CharacterDescrip2.setText("P2 Character");
CharacterDescrip2.setForeground(Color.BLACK);
CharacterDescrip2.setBackground(Color.GRAY);
CharacterDescrip2.setBounds(80, 90, 200, 20);
CharacterDescrip2.addMouseListener(new MouseAdapter(){
@Override
public void mouseClicked(MouseEvent e){
CharacterDescrip2.setText("");
}
});
getContentPane().add(CharacterDescrip2);
CharacterDescrip3 = new JTextArea();
CharacterDescrip3.setToolTipText("<html>This field holds a short description <br>of the third player's character.</html>");
CharacterDescrip3.setFont(new Font("Times New Roman", Font.BOLD, 12));
CharacterDescrip3.setText("P3 Character");
CharacterDescrip3.setForeground(Color.BLACK);
CharacterDescrip3.setBackground(Color.GRAY);
CharacterDescrip3.setBounds(80, 170, 200, 20);
CharacterDescrip3.addMouseListener(new MouseAdapter(){
@Override
public void mouseClicked(MouseEvent e){
CharacterDescrip3.setText("");
}
});
getContentPane().add(CharacterDescrip3);
CharacterDescrip4 = new JTextArea();
CharacterDescrip4.setToolTipText("<html>This field holds a short description <br>of the fourth player's character.</html>");
CharacterDescrip4.setFont(new Font("Times New Roman", Font.BOLD, 12));
CharacterDescrip4.setText("P4 Character");
CharacterDescrip4.setForeground(Color.BLACK);
CharacterDescrip4.setBackground(Color.GRAY);
CharacterDescrip4.setBounds(80, 250, 200, 20);
CharacterDescrip4.addMouseListener(new MouseAdapter(){
@Override
public void mouseClicked(MouseEvent e){
CharacterDescrip4.setText("");
}
});
getContentPane().add(CharacterDescrip4);
RollDice = new JButton("Roll Dice");
RollDice.setToolTipText("<html>This button will roll a dice with an input number of<br>faces an input number of times.<br>The number of faces and the number of times are<br>input in the \"Time(s)\" field and the \"Sides\" field.</html>");
RollDice.setFont(new Font("Times New Roman", Font.PLAIN, 15));
RollDice.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
rollButton(e);
}
});
RollDice.setBounds(220, 350, 90, 20);
getContentPane().add(RollDice);
NoteBox = new JTextArea();
NoteBox.setToolTipText("<html>This field will hold notes you may need to set during the game.</html>");
NoteBoxSP = new JScrollPane(NoteBox);
NoteBoxSP.setToolTipText("<html>This field will hold notes you may need to set during the game.</html>");
NoteBox.setWrapStyleWord(true);
NoteBox.setLineWrap(true);
NoteBox.setText("Click to add notes.");
NoteBox.setFont(new Font("Times New Roman", Font.PLAIN, 12));
NoteBox.setBounds(420, 30, 190, 340);
NoteBoxSP.setWheelScrollingEnabled(true);
NoteBoxSP.getVerticalScrollBar();
NoteBoxSP.setBounds(420, 30, 190, 340);
getContentPane().add(NoteBoxSP);
NotePane = new JTextPane();
NotePane.setFont(new Font("Times New Roman", Font.PLAIN, 12));
NotePane.setEnabled(false);
NotePane.setEditable(false);
NotePane.setBackground(Color.BLACK);
NotePane.setForeground(Color.LIGHT_GRAY);
NotePane.setText("Notes:");
NotePane.setBounds(420, 10, 60, 20);
getContentPane().add(NotePane);
RollNum = new JTextPane();
RollNum.setToolTipText("<html>This field holds the amount of times you want to roll the dice.</html>");
RollNum.setFont(new Font("Times New Roman", Font.PLAIN, 13));
RollNum.setText("1");
RollNum.setBounds(90, 350, 25, 20);
getContentPane().add(RollNum);
RollSides = new JTextPane();
RollSides.setToolTipText("<html>This field holds the amount of sides your die has.</html>");
RollSides.setText("6");
RollSides.setBounds(170, 350, 30, 20);
getContentPane().add(RollSides);
times = new JTextPane();
times.setEditable(false);
times.setEnabled(false);
times.setBackground(Color.BLACK);
times.setForeground(Color.GRAY);
times.setFont(new Font("Times New Roman", Font.PLAIN, 12));
times.setText("Time(s):");
times.setBounds(55, 330, 55, 20);
getContentPane().add(times);
txtSides = new JTextPane();
txtSides.setText("Sides:");
txtSides.setForeground(Color.GRAY);
txtSides.setFont(new Font("Times New Roman", Font.PLAIN, 12));
txtSides.setEnabled(false);
txtSides.setEditable(false);
txtSides.setBackground(Color.BLACK);
txtSides.setBounds(145, 330, 40, 20);
getContentPane().add(txtSides);
}
public void setButton(ActionEvent e, JTextArea text, String valueToSet) {
AppMechanics.set(text, valueToSet, this);
}
public void addButton(ActionEvent e, JTextArea text, int valueToAdd) {
AppMechanics.add(text, valueToAdd, this);
}
public void transButton(ActionEvent e, JTextArea text) {
AppMechanics.transfer(text, this);
}
public void error(String error){
JOptionPane.showMessageDialog(this, error, "Error", JOptionPane.ERROR_MESSAGE);
}
public String inputBox(String text){
return JOptionPane.showInputDialog(this, text, "Input");
}
public void subButton(ActionEvent e, JTextArea text, int valueToSub) {
AppMechanics.subtract(text, valueToSub, this);
}
public void rollButton(ActionEvent e){
AppMechanics.rollDice(this);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.kafka.pubsub;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.errors.AuthorizationException;
import org.apache.kafka.common.errors.OutOfOrderSequenceException;
import org.apache.kafka.common.errors.ProducerFencedException;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnStopped;
import org.apache.nifi.components.AllowableValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.PropertyDescriptor.Builder;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCallback;
import org.apache.nifi.processor.util.FlowFileFilters;
import org.apache.nifi.processor.util.StandardValidators;
import javax.xml.bind.DatatypeConverter;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
import java.util.regex.Pattern;
import static org.apache.nifi.expression.ExpressionLanguageScope.FLOWFILE_ATTRIBUTES;
import static org.apache.nifi.expression.ExpressionLanguageScope.NONE;
import static org.apache.nifi.expression.ExpressionLanguageScope.VARIABLE_REGISTRY;
import static org.apache.nifi.processors.kafka.pubsub.KafkaProcessorUtils.FAILURE_STRATEGY;
import static org.apache.nifi.processors.kafka.pubsub.KafkaProcessorUtils.FAILURE_STRATEGY_ROLLBACK;
@Tags({"Apache", "Kafka", "Put", "Send", "Message", "PubSub", "2.0"})
@CapabilityDescription("Sends the contents of a FlowFile as a message to Apache Kafka using the Kafka 2.0 Producer API."
+ "The messages to send may be individual FlowFiles or may be delimited, using a "
+ "user-specified delimiter, such as a new-line. "
+ "The complementary NiFi processor for fetching messages is ConsumeKafka_2_0.")
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@DynamicProperty(name = "The name of a Kafka configuration property.", value = "The value of a given Kafka configuration property.",
description = "These properties will be added on the Kafka configuration after loading any provided configuration properties."
+ " In the event a dynamic property represents a property that was already set, its value will be ignored and WARN message logged."
+ " For the list of available Kafka properties please refer to: http://kafka.apache.org/documentation.html#configuration. ",
expressionLanguageScope = VARIABLE_REGISTRY)
@WritesAttribute(attribute = "msg.count", description = "The number of messages that were sent to Kafka for this FlowFile. This attribute is added only to "
+ "FlowFiles that are routed to success. If the <Message Demarcator> Property is not set, this will always be 1, but if the Property is set, it may "
+ "be greater than 1.")
public class PublishKafka_2_0 extends AbstractProcessor {
protected static final String MSG_COUNT = "msg.count";
static final AllowableValue DELIVERY_REPLICATED = new AllowableValue("all", "Guarantee Replicated Delivery",
"FlowFile will be routed to failure unless the message is replicated to the appropriate "
+ "number of Kafka Nodes according to the Topic configuration");
static final AllowableValue DELIVERY_ONE_NODE = new AllowableValue("1", "Guarantee Single Node Delivery",
"FlowFile will be routed to success if the message is received by a single Kafka node, "
+ "whether or not it is replicated. This is faster than <Guarantee Replicated Delivery> "
+ "but can result in data loss if a Kafka node crashes");
static final AllowableValue DELIVERY_BEST_EFFORT = new AllowableValue("0", "Best Effort",
"FlowFile will be routed to success after successfully writing the content to a Kafka node, "
+ "without waiting for a response. This provides the best performance but may result in data loss.");
static final AllowableValue ROUND_ROBIN_PARTITIONING = new AllowableValue(Partitioners.RoundRobinPartitioner.class.getName(),
Partitioners.RoundRobinPartitioner.class.getSimpleName(),
"Messages will be assigned partitions in a round-robin fashion, sending the first message to Partition 1, "
+ "the next Partition to Partition 2, and so on, wrapping as necessary.");
static final AllowableValue RANDOM_PARTITIONING = new AllowableValue("org.apache.kafka.clients.producer.internals.DefaultPartitioner",
"DefaultPartitioner", "Messages will be assigned to random partitions.");
static final AllowableValue EXPRESSION_LANGUAGE_PARTITIONING = new AllowableValue(Partitioners.ExpressionLanguagePartitioner.class.getName(), "Expression Language Partitioner",
"Interprets the <Partition> property as Expression Language that will be evaluated against each FlowFile. This Expression will be evaluated once against the FlowFile, " +
"so all Records in a given FlowFile will go to the same partition.");
static final AllowableValue UTF8_ENCODING = new AllowableValue("utf-8", "UTF-8 Encoded", "The key is interpreted as a UTF-8 Encoded string.");
static final AllowableValue HEX_ENCODING = new AllowableValue("hex", "Hex Encoded",
"The key is interpreted as arbitrary binary data that is encoded using hexadecimal characters with uppercase letters.");
static final PropertyDescriptor TOPIC = new Builder()
.name("topic")
.displayName("Topic Name")
.description("The name of the Kafka Topic to publish to.")
.required(true)
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
.expressionLanguageSupported(FLOWFILE_ATTRIBUTES)
.build();
static final PropertyDescriptor DELIVERY_GUARANTEE = new Builder()
.name(ProducerConfig.ACKS_CONFIG)
.displayName("Delivery Guarantee")
.description("Specifies the requirement for guaranteeing that a message is sent to Kafka. Corresponds to Kafka's 'acks' property.")
.required(true)
.expressionLanguageSupported(NONE)
.allowableValues(DELIVERY_BEST_EFFORT, DELIVERY_ONE_NODE, DELIVERY_REPLICATED)
.defaultValue(DELIVERY_REPLICATED.getValue())
.build();
static final PropertyDescriptor METADATA_WAIT_TIME = new Builder()
.name(ProducerConfig.MAX_BLOCK_MS_CONFIG)
.displayName("Max Metadata Wait Time")
.description("The amount of time publisher will wait to obtain metadata or wait for the buffer to flush during the 'send' call before failing the "
+ "entire 'send' call. Corresponds to Kafka's 'max.block.ms' property")
.required(true)
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
.expressionLanguageSupported(VARIABLE_REGISTRY)
.defaultValue("5 sec")
.build();
static final PropertyDescriptor ACK_WAIT_TIME = new Builder()
.name("ack.wait.time")
.displayName("Acknowledgment Wait Time")
.description("After sending a message to Kafka, this indicates the amount of time that we are willing to wait for a response from Kafka. "
+ "If Kafka does not acknowledge the message within this time period, the FlowFile will be routed to 'failure'.")
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
.expressionLanguageSupported(NONE)
.required(true)
.defaultValue("5 secs")
.build();
static final PropertyDescriptor MAX_REQUEST_SIZE = new Builder()
.name("max.request.size")
.displayName("Max Request Size")
.description("The maximum size of a request in bytes. Corresponds to Kafka's 'max.request.size' property and defaults to 1 MB (1048576).")
.required(true)
.addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
.defaultValue("1 MB")
.build();
static final PropertyDescriptor KEY = new Builder()
.name("kafka-key")
.displayName("Kafka Key")
.description("The Key to use for the Message. "
+ "If not specified, the flow file attribute 'kafka.key' is used as the message key, if it is present."
+ "Beware that setting Kafka key and demarcating at the same time may potentially lead to many Kafka messages with the same key."
+ "Normally this is not a problem as Kafka does not enforce or assume message and key uniqueness. Still, setting the demarcator and Kafka key at the same time poses a risk of "
+ "data loss on Kafka. During a topic compaction on Kafka, messages will be deduplicated based on this key.")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(FLOWFILE_ATTRIBUTES)
.build();
static final PropertyDescriptor KEY_ATTRIBUTE_ENCODING = new Builder()
.name("key-attribute-encoding")
.displayName("Key Attribute Encoding")
.description("FlowFiles that are emitted have an attribute named '" + KafkaProcessorUtils.KAFKA_KEY + "'. This property dictates how the value of the attribute should be encoded.")
.required(true)
.defaultValue(UTF8_ENCODING.getValue())
.allowableValues(UTF8_ENCODING, HEX_ENCODING)
.build();
static final PropertyDescriptor MESSAGE_DEMARCATOR = new Builder()
.name("message-demarcator")
.displayName("Message Demarcator")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(FLOWFILE_ATTRIBUTES)
.description("Specifies the string (interpreted as UTF-8) to use for demarcating multiple messages within "
+ "a single FlowFile. If not specified, the entire content of the FlowFile will be used as a single message. If specified, the "
+ "contents of the FlowFile will be split on this delimiter and each section sent as a separate Kafka message. "
+ "To enter special character such as 'new line' use CTRL+Enter or Shift+Enter, depending on your OS.")
.build();
static final PropertyDescriptor PARTITION_CLASS = new Builder()
.name(ProducerConfig.PARTITIONER_CLASS_CONFIG)
.displayName("Partitioner class")
.description("Specifies which class to use to compute a partition id for a message. Corresponds to Kafka's 'partitioner.class' property.")
.allowableValues(ROUND_ROBIN_PARTITIONING, RANDOM_PARTITIONING, EXPRESSION_LANGUAGE_PARTITIONING)
.defaultValue(RANDOM_PARTITIONING.getValue())
.required(false)
.build();
static final PropertyDescriptor PARTITION = new Builder()
.name("partition")
.displayName("Partition")
.description("Specifies which Partition Records will go to.")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(FLOWFILE_ATTRIBUTES)
.build();
static final PropertyDescriptor COMPRESSION_CODEC = new Builder()
.name(ProducerConfig.COMPRESSION_TYPE_CONFIG)
.displayName("Compression Type")
.description("This parameter allows you to specify the compression codec for all data generated by this producer.")
.required(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.allowableValues("none", "gzip", "snappy", "lz4")
.defaultValue("none")
.build();
static final PropertyDescriptor ATTRIBUTE_NAME_REGEX = new Builder()
.name("attribute-name-regex")
.displayName("Attributes to Send as Headers (Regex)")
.description("A Regular Expression that is matched against all FlowFile attribute names. "
+ "Any attribute whose name matches the regex will be added to the Kafka messages as a Header. "
+ "If not specified, no FlowFile attributes will be added as headers.")
.addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR)
.expressionLanguageSupported(NONE)
.required(false)
.build();
static final PropertyDescriptor USE_TRANSACTIONS = new Builder()
.name("use-transactions")
.displayName("Use Transactions")
.description("Specifies whether or not NiFi should provide Transactional guarantees when communicating with Kafka. If there is a problem sending data to Kafka, "
+ "and this property is set to false, then the messages that have already been sent to Kafka will continue on and be delivered to consumers. "
+ "If this is set to true, then the Kafka transaction will be rolled back so that those messages are not available to consumers. Setting this to true "
+ "requires that the <Delivery Guarantee> property be set to \"Guarantee Replicated Delivery.\"")
.expressionLanguageSupported(NONE)
.allowableValues("true", "false")
.defaultValue("true")
.required(true)
.build();
static final PropertyDescriptor TRANSACTIONAL_ID_PREFIX = new Builder()
.name("transactional-id-prefix")
.displayName("Transactional Id Prefix")
.description("When Use Transaction is set to true, KafkaProducer config 'transactional.id' will be a generated UUID and will be prefixed with this string.")
.expressionLanguageSupported(VARIABLE_REGISTRY)
.addValidator(StandardValidators.NON_EMPTY_EL_VALIDATOR)
.required(false)
.build();
static final PropertyDescriptor MESSAGE_HEADER_ENCODING = new Builder()
.name("message-header-encoding")
.displayName("Message Header Encoding")
.description("For any attribute that is added as a message header, as configured via the <Attributes to Send as Headers> property, "
+ "this property indicates the Character Encoding to use for serializing the headers.")
.addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
.defaultValue("UTF-8")
.required(false)
.build();
static final Relationship REL_SUCCESS = new Relationship.Builder()
.name("success")
.description("FlowFiles for which all content was sent to Kafka.")
.build();
static final Relationship REL_FAILURE = new Relationship.Builder()
.name("failure")
.description("Any FlowFile that cannot be sent to Kafka will be routed to this Relationship")
.build();
private static final List<PropertyDescriptor> PROPERTIES;
private static final Set<Relationship> RELATIONSHIPS;
private volatile PublisherPool publisherPool = null;
static {
final List<PropertyDescriptor> properties = new ArrayList<>();
properties.addAll(KafkaProcessorUtils.getCommonPropertyDescriptors());
properties.add(TOPIC);
properties.add(DELIVERY_GUARANTEE);
properties.add(FAILURE_STRATEGY);
properties.add(USE_TRANSACTIONS);
properties.add(TRANSACTIONAL_ID_PREFIX);
properties.add(ATTRIBUTE_NAME_REGEX);
properties.add(MESSAGE_HEADER_ENCODING);
properties.add(KEY);
properties.add(KEY_ATTRIBUTE_ENCODING);
properties.add(MESSAGE_DEMARCATOR);
properties.add(MAX_REQUEST_SIZE);
properties.add(ACK_WAIT_TIME);
properties.add(METADATA_WAIT_TIME);
properties.add(PARTITION_CLASS);
properties.add(PARTITION);
properties.add(COMPRESSION_CODEC);
PROPERTIES = Collections.unmodifiableList(properties);
final Set<Relationship> relationships = new HashSet<>();
relationships.add(REL_SUCCESS);
relationships.add(REL_FAILURE);
RELATIONSHIPS = Collections.unmodifiableSet(relationships);
}
@Override
public Set<Relationship> getRelationships() {
return RELATIONSHIPS;
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return PROPERTIES;
}
@Override
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
return new Builder()
.description("Specifies the value for '" + propertyDescriptorName + "' Kafka Configuration.")
.name(propertyDescriptorName)
.addValidator(new KafkaProcessorUtils.KafkaConfigValidator(ProducerConfig.class))
.dynamic(true)
.expressionLanguageSupported(VARIABLE_REGISTRY)
.build();
}
@Override
protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
final List<ValidationResult> results = new ArrayList<>();
results.addAll(KafkaProcessorUtils.validateCommonProperties(validationContext));
final boolean useTransactions = validationContext.getProperty(USE_TRANSACTIONS).asBoolean();
if (useTransactions) {
final String deliveryGuarantee = validationContext.getProperty(DELIVERY_GUARANTEE).getValue();
if (!DELIVERY_REPLICATED.getValue().equals(deliveryGuarantee)) {
results.add(new ValidationResult.Builder()
.subject("Delivery Guarantee")
.valid(false)
.explanation("In order to use Transactions, the Delivery Guarantee must be \"Guarantee Replicated Delivery.\" "
+ "Either change the <Use Transactions> property or the <Delivery Guarantee> property.")
.build());
}
}
final String partitionClass = validationContext.getProperty(PARTITION_CLASS).getValue();
if (EXPRESSION_LANGUAGE_PARTITIONING.getValue().equals(partitionClass)) {
final String rawRecordPath = validationContext.getProperty(PARTITION).getValue();
if (rawRecordPath == null) {
results.add(new ValidationResult.Builder()
.subject("Partition")
.valid(false)
.explanation("The <Partition> property must be specified if using the Expression Language Partitioning class")
.build());
}
}
return results;
}
private synchronized PublisherPool getPublisherPool(final ProcessContext context) {
PublisherPool pool = publisherPool;
if (pool != null) {
return pool;
}
return publisherPool = createPublisherPool(context);
}
protected PublisherPool createPublisherPool(final ProcessContext context) {
final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
final String attributeNameRegex = context.getProperty(ATTRIBUTE_NAME_REGEX).getValue();
final Pattern attributeNamePattern = attributeNameRegex == null ? null : Pattern.compile(attributeNameRegex);
final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
final String transactionalIdPrefix = context.getProperty(TRANSACTIONAL_ID_PREFIX).evaluateAttributeExpressions().getValue();
Supplier<String> transactionalIdSupplier = KafkaProcessorUtils.getTransactionalIdSupplier(transactionalIdPrefix);
final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue();
final Charset charset = Charset.forName(charsetName);
final Map<String, Object> kafkaProperties = new HashMap<>();
KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis, useTransactions, transactionalIdSupplier, attributeNamePattern, charset);
}
@OnStopped
public void closePool() {
if (publisherPool != null) {
publisherPool.close();
}
publisherPool = null;
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final boolean useDemarcator = context.getProperty(MESSAGE_DEMARCATOR).isSet();
final List<FlowFile> flowFiles = session.get(FlowFileFilters.newSizeBasedFilter(250, DataUnit.KB, 500));
if (flowFiles.isEmpty()) {
return;
}
final PublisherPool pool = getPublisherPool(context);
if (pool == null) {
context.yield();
return;
}
final String securityProtocol = context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL).getValue();
final String bootstrapServers = context.getProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS).evaluateAttributeExpressions().getValue();
final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
final PublishFailureStrategy failureStrategy = getFailureStrategy(context);
final long startTime = System.nanoTime();
try (final PublisherLease lease = pool.obtainPublisher()) {
try {
if (useTransactions) {
lease.beginTransaction();
}
// Send each FlowFile to Kafka asynchronously.
for (final FlowFile flowFile : flowFiles) {
if (!isScheduled()) {
// If stopped, re-queue FlowFile instead of sending it
if (useTransactions) {
session.rollback();
lease.rollback();
return;
}
session.transfer(flowFile);
continue;
}
final byte[] messageKey = getMessageKey(flowFile, context);
final String topic = context.getProperty(TOPIC).evaluateAttributeExpressions(flowFile).getValue();
final byte[] demarcatorBytes;
if (useDemarcator) {
demarcatorBytes = context.getProperty(MESSAGE_DEMARCATOR).evaluateAttributeExpressions(flowFile).getValue().getBytes(StandardCharsets.UTF_8);
} else {
demarcatorBytes = null;
}
final Integer partition = getPartition(context, flowFile);
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream rawIn) throws IOException {
try (final InputStream in = new BufferedInputStream(rawIn)) {
lease.publish(flowFile, in, messageKey, demarcatorBytes, topic, partition);
}
}
});
}
// Complete the send
final PublishResult publishResult = lease.complete();
if (publishResult.isFailure()) {
getLogger().info("Failed to send FlowFile to kafka; transferring to specified failure strategy");
failureStrategy.routeFlowFiles(session, flowFiles);
return;
}
// Transfer any successful FlowFiles.
final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime);
for (FlowFile success : flowFiles) {
final String topic = context.getProperty(TOPIC).evaluateAttributeExpressions(success).getValue();
final int msgCount = publishResult.getSuccessfulMessageCount(success);
success = session.putAttribute(success, MSG_COUNT, String.valueOf(msgCount));
session.adjustCounter("Messages Sent", msgCount, true);
final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, topic);
session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis);
session.transfer(success, REL_SUCCESS);
}
} catch (final ProducerFencedException | OutOfOrderSequenceException | AuthorizationException e) {
lease.poison();
getLogger().error("Failed to send messages to Kafka; will yield Processor and transfer FlowFiles to specified failure strategy");
failureStrategy.routeFlowFiles(session, flowFiles);
context.yield();
}
}
}
private PublishFailureStrategy getFailureStrategy(final ProcessContext context) {
final String strategy = context.getProperty(FAILURE_STRATEGY).getValue();
if (FAILURE_STRATEGY_ROLLBACK.getValue().equals(strategy)) {
return (session, flowFiles) -> session.rollback();
} else {
return (session, flowFiles) -> session.transfer(flowFiles, REL_FAILURE);
}
}
private byte[] getMessageKey(final FlowFile flowFile, final ProcessContext context) {
final String uninterpretedKey;
if (context.getProperty(KEY).isSet()) {
uninterpretedKey = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
} else {
uninterpretedKey = flowFile.getAttribute(KafkaProcessorUtils.KAFKA_KEY);
}
if (uninterpretedKey == null) {
return null;
}
final String keyEncoding = context.getProperty(KEY_ATTRIBUTE_ENCODING).getValue();
if (UTF8_ENCODING.getValue().equals(keyEncoding)) {
return uninterpretedKey.getBytes(StandardCharsets.UTF_8);
}
return DatatypeConverter.parseHexBinary(uninterpretedKey);
}
private Integer getPartition(final ProcessContext context, final FlowFile flowFile) {
final String partitionClass = context.getProperty(PARTITION_CLASS).getValue();
if (EXPRESSION_LANGUAGE_PARTITIONING.getValue().equals(partitionClass)) {
final String partition = context.getProperty(PARTITION).evaluateAttributeExpressions(flowFile).getValue();
final int hash = Objects.hashCode(partition);
return hash;
}
return null;
}
}
| |
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.kie.services.impl;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.BiFunction;
import java.util.function.Function;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Query;
import org.jbpm.kie.services.impl.model.ProcessInstanceWithVarsDesc;
import org.jbpm.kie.services.impl.model.UserTaskInstanceWithPotOwnerDesc;
import org.jbpm.services.api.query.model.QueryParam;
import org.jbpm.shared.services.impl.QueryManager;
import org.jbpm.shared.services.impl.TransactionalCommandService;
import org.jbpm.shared.services.impl.commands.QueryNameCommand;
import org.kie.api.runtime.query.QueryContext;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonMap;
import static java.util.stream.Collectors.toList;
public abstract class AbstractAdvanceRuntimeDataServiceImpl {
private static final String ID_LIST = "idList";
private EntityManagerFactory emf;
private TransactionalCommandService commandService;
public AbstractAdvanceRuntimeDataServiceImpl() {
QueryManager.get().addNamedQueries("META-INF/Servicesorm.xml");
}
public void setCommandService(TransactionalCommandService commandService) {
this.commandService = commandService;
}
public void setEmf(EntityManagerFactory emf) {
this.emf = emf;
}
protected List<org.jbpm.services.api.model.ProcessInstanceWithVarsDesc> queryProcessByVariables(List<QueryParam> attributes,
List<QueryParam> processVariables,
int processType,
String varPrefix,
QueryContext queryContext) {
BiFunction<StringBuilder, StringBuilder, String> mainSQLProducer = (derivedTables, globalWhere) -> "SELECT DISTINCT pil.processInstanceId " +
" FROM ProcessInstanceLog pil \n " +
derivedTables +
" WHERE pil.processType = :processType " + globalWhere +
" ORDER BY pil.processInstanceId ASC ";
return queryProcessUserTasksByVariables(attributes, processVariables, emptyList(), emptyList(), processType, varPrefix, queryContext, mainSQLProducer, this::collectProcessData);
}
protected List<org.jbpm.services.api.model.ProcessInstanceWithVarsDesc> queryProcessByVariablesAndTask(List<QueryParam> attributes,
List<QueryParam> processVariables,
List<QueryParam> taskVariables,
List<String> owners,
int processType,
String varPrefix,
QueryContext queryContext) {
BiFunction<StringBuilder, StringBuilder, String> mainSQLProducer;
Optional<QueryParam> param = findQueryParamMode(attributes);
if(param.isPresent() && param.get().getObjectValue().equals("HISTORY")) {
mainSQLProducer = (derivedTables, globalWhere) -> "SELECT DISTINCT pil.processInstanceId " +
" FROM AuditTaskImpl task " +
" INNER JOIN ProcessInstanceLog pil ON pil.processInstanceId = task.processInstanceId \n " +
derivedTables +
" WHERE pil.processType = :processType " + globalWhere +
" ORDER BY pil.processInstanceId ASC ";
} else {
mainSQLProducer = (derivedTables, globalWhere) -> "SELECT DISTINCT pil.processInstanceId " +
" FROM Task task " +
" INNER JOIN ProcessInstanceLog pil ON pil.processInstanceId = task.processInstanceId \n " +
derivedTables +
" WHERE pil.processType = :processType " + globalWhere +
" ORDER BY pil.processInstanceId ASC ";
}
return queryProcessUserTasksByVariables(attributes, processVariables, taskVariables, owners, processType, varPrefix, queryContext, mainSQLProducer, this::collectProcessData);
}
protected List<org.jbpm.services.api.model.UserTaskInstanceWithPotOwnerDesc> queryUserTasksByVariables(List<QueryParam> attributes,
List<QueryParam> processVariables,
List<QueryParam> taskVariables,
List<String> owners,
int processType,
String varPrefix,
QueryContext queryContext) {
BiFunction<StringBuilder, StringBuilder, String> mainSQLProducer;
Optional<QueryParam> param = findQueryParamMode(attributes);
if(param.isPresent() && param.get().getObjectValue().equals("HISTORY")) {
mainSQLProducer = (derivedTables, globalWhere) -> "SELECT DISTINCT task.taskId as id " +
" FROM AuditTaskImpl task " +
" INNER JOIN ProcessInstanceLog pil ON pil.processInstanceId = task.processInstanceId \n " +
derivedTables +
" WHERE pil.processType = :processType " + globalWhere +
" ORDER BY task.taskId ASC ";
return queryProcessUserTasksByVariables(attributes, processVariables, taskVariables, owners, processType, varPrefix, queryContext, mainSQLProducer, this::collectHistoryUserTaskData);
} else {
mainSQLProducer = (derivedTables, globalWhere) -> "SELECT DISTINCT task.id " +
" FROM Task task " +
" INNER JOIN ProcessInstanceLog pil ON pil.processInstanceId = task.processInstanceId \n " +
derivedTables +
" WHERE pil.processType = :processType " + globalWhere +
" ORDER BY task.id ASC ";
return queryProcessUserTasksByVariables(attributes, processVariables, taskVariables, owners, processType, varPrefix, queryContext, mainSQLProducer, this::collectRuntimeUserTaskData);
}
}
private Optional<QueryParam> findQueryParamMode(List<QueryParam> params) {
return params.stream().filter(e -> e.getOperator().equals("MODE")).findFirst();
}
protected <R> List<R> queryProcessUserTasksByVariables(List<QueryParam> attributesArg,
List<QueryParam> processVariablesArg,
List<QueryParam> taskVariablesArg,
List<String> ownersArg,
int processType,
String varPrefix,
QueryContext queryContext,
BiFunction<StringBuilder, StringBuilder, String> mainSQLproducer,
BiFunction<List<Number>, String, List<R>> dataCollector) {
List<QueryParam> attributes = attributesArg != null ? attributesArg : emptyList();
attributes.removeIf(param -> param.getOperator().equals("MODE"));
List<QueryParam> processVariables = processVariablesArg != null ? processVariablesArg : emptyList();
List<QueryParam> taskVariables = taskVariablesArg != null ? taskVariablesArg : emptyList();
List<String> owners = ownersArg != null ? ownersArg : emptyList();
StringBuilder globalWhere = new StringBuilder();
StringBuilder derivedTables = new StringBuilder();
if (!taskVariables.isEmpty()) {
String where = computeVariableExpression(taskVariables, "V", "name", "value");
derivedTables.append("INNER JOIN (\n" +
" SELECT taskId \n" +
" FROM TaskVariableImpl \n" +
" WHERE " + where + " \n" +
" GROUP BY taskId \n" +
" HAVING COUNT(*) >= :NUMBER_OF_TASKVARS \n" +
") TABLE_TASK_VAR ON TABLE_TASK_VAR.taskId = task.id \n");
}
if (!processVariables.isEmpty()) {
String where = computeVariableExpression(processVariables, "P", "A1.variableId", "A1.value");
derivedTables.append("INNER JOIN (" +
"SELECT A1.processInstanceId \n" +
"FROM VariableInstanceLog A1 \n" +
"LEFT JOIN VariableInstanceLog A2 ON A1.processId = A2.processId AND A1.processInstanceId = A2.processInstanceId AND A1.variableInstanceId = A2.variableInstanceId AND A2.id > A1.id \n" +
"WHERE A2.id IS NULL AND (" + where + ") " +
"GROUP BY A1.processInstanceId " +
"HAVING COUNT(*) = :NUMBER_OF_PROCVARS " +
") TABLE_PROC_VAR ON TABLE_PROC_VAR.processInstanceId = pil.processInstanceId \n");
}
if (!owners.isEmpty()) {
derivedTables.append("INNER JOIN ( \n" +
" SELECT DISTINCT po.task_id \n" +
" FROM PeopleAssignments_PotOwners po \n" +
" WHERE po.entity_id IN (:owners) \n" +
" GROUP BY po.task_id \n" +
" HAVING COUNT(po.entity_id) = :num_owners \n" +
") pot ON pot.task_id = task.id ");
}
attributes.stream().forEach((expr) -> globalWhere.append(" AND " + computeExpression(expr, expr.getColumn(), ":ATTR_" + expr.getColumn())));
String procSQLString = mainSQLproducer.apply(derivedTables, globalWhere);
List<Number> ids = emptyList();
EntityManager entityManager = emf.createEntityManager();
try {
Query query = entityManager.createNativeQuery(procSQLString);
taskVariables.stream().forEach(var -> {
String nameParam = computeVarNameParameter("V", var.getColumn());
query.setParameter(nameParam, var.getColumn());
});
taskVariables.stream().filter(e -> e.getObjectValue() != null).forEach(var -> {
String valueParam = computeVarValueParameter(var, "V", var.getColumn());
query.setParameter(valueParam, var.getObjectValue());
});
if (!taskVariables.isEmpty()) {
query.setParameter("NUMBER_OF_TASKVARS", taskVariables.stream().map(QueryParam::getColumn).distinct().count());
}
processVariables.stream().forEach(var -> {
String nameParam = computeVarNameParameter("P", var.getColumn());
query.setParameter(nameParam, varPrefix + var.getColumn());
});
processVariables.stream().filter(e -> e.getObjectValue() != null).forEach(var -> {
String valueParam = computeVarValueParameter(var, "P", var.getColumn());
query.setParameter(valueParam, var.getObjectValue());
});
if (!processVariables.isEmpty()) {
query.setParameter("NUMBER_OF_PROCVARS", processVariables.stream().map(QueryParam::getColumn).distinct().count());
}
if (!owners.isEmpty()) {
List<String> distinctOwners = owners.stream().distinct().collect(toList());
query.setParameter("num_owners", distinctOwners.size());
query.setParameter("owners", distinctOwners);
}
attributes.stream().filter(e -> e.getObjectValue() != null).forEach(entry -> query.setParameter("ATTR_" + entry.getColumn(), entry.getObjectValue()));
query.setParameter("processType", processType);
addPagination(query, queryContext);
ids = query.getResultList();
if (ids.isEmpty()) {
return emptyList();
}
} finally {
if (entityManager.isOpen()) {
entityManager.close();
}
}
return dataCollector.apply(ids, varPrefix);
}
private List<org.jbpm.services.api.model.ProcessInstanceWithVarsDesc> collectProcessData(List<Number> ids, String varPrefix) {
List<Object[]> procRows = commandService.execute(new QueryNameCommand<List<Object[]>>("GetProcessInstanceByIdList", singletonMap(ID_LIST, ids)));
List<Object[]> varRows = commandService.execute(new QueryNameCommand<List<Object[]>>("GetVariablesByProcessInstanceIdList", singletonMap(ID_LIST, ids)));
int currentVarIdx = 0;
List<org.jbpm.services.api.model.ProcessInstanceWithVarsDesc> data = new ArrayList<>();
for (Object[] row : procRows) {
ProcessInstanceWithVarsDesc pwv = toProcessInstanceWithVarsDesc(row);
Map<String, Object> vars = new HashMap<>();
pwv.setVariables(vars);
Map<String, Object> extra = new HashMap<>();
pwv.setExtraData(extra);
while (currentVarIdx < varRows.size() && row[0].equals(varRows.get(currentVarIdx)[0])) {
String name = (String) varRows.get(currentVarIdx)[1];
if (!varPrefix.isEmpty() && name.startsWith(varPrefix)) {
extra.put(name.substring(varPrefix.length()), varRows.get(currentVarIdx)[2]);
} else {
vars.put(name, varRows.get(currentVarIdx)[2]);
}
currentVarIdx++;
}
data.add(pwv);
}
return data;
}
private String computeVariableExpression(List<QueryParam> params, String prefix, String varField, String valueField) {
// we get the variable names
List<String> vars = params.stream().map(QueryParam::getColumn).distinct().collect(toList());
List<String> conditions = new ArrayList<>();
for (String var : vars) {
StringBuilder condition = new StringBuilder();
String nameParam = computeVarNameParameter(prefix, var);
condition.append("(" + varField + " = :" + nameParam);
// get the conditions for this variables
List<QueryParam> varParams = params.stream().filter(e -> e.getColumn().equals(var)).collect(toList());
varParams.stream().forEach(expr -> {
String valueParam = computeVarValueParameter(expr, prefix, expr.getColumn());
condition.append(" AND " + computeExpression(expr, valueField, ":" + valueParam));
});
condition.append(")");
conditions.add(condition.toString());
}
return String.join(" OR ", conditions);
}
private String computeVarNameParameter(String prefix, String name) {
return prefix + "_NAME_" + name;
}
private String computeVarValueParameter(QueryParam expr, String prefix, String name) {
return prefix + "_VALUE_" + expr.getOperator() + "_" + expr.getColumn();
}
private String computeExpression(QueryParam expr, String leftOperand, String rightOperand) {
switch (expr.getOperator()) {
case "IS_NULL":
return leftOperand + " IS NULL ";
case "NOT_NULL":
return leftOperand + " IS NOT NULL ";
case "IN":
return leftOperand + " IN (" + rightOperand + ") ";
case "NOT_IN":
return leftOperand + " NOT IN (" + rightOperand + ") ";
case "TYPE":
return " type = " + rightOperand + " ";
case "EQUALS_TO":
return leftOperand + " = " + rightOperand + " ";
case "NOT_EQUALS_TO":
return leftOperand + " <> " + rightOperand + " ";
case "LIKE_TO":
return leftOperand + " LIKE " + rightOperand + " ";
default:
throw new UnsupportedOperationException("Queryparam: " + expr + " not supported");
}
}
private ProcessInstanceWithVarsDesc toProcessInstanceWithVarsDesc(Object[] row) {
return new ProcessInstanceWithVarsDesc(((Number) row[0]).longValue(),
(String) row[1],
(String) row[2],
(String) row[3],
((Number) row[4]).intValue(),
(String) row[5],
(Date) row[6],
(String) row[7],
(String) row[8]);
}
private void addPagination(Query query, QueryContext context) {
if (context.getCount() > 0) {
query.setFirstResult(context.getOffset());
query.setMaxResults(context.getCount());
}
}
private List<org.jbpm.services.api.model.UserTaskInstanceWithPotOwnerDesc> collectRuntimeUserTaskData(List<Number> ids, String varPrefix) {
return collectUserTaskData("GetTasksByIdList", this::toUserTaskInstanceWithPotOwnerDesc, ids, varPrefix);
}
private List<org.jbpm.services.api.model.UserTaskInstanceWithPotOwnerDesc> collectHistoryUserTaskData(List<Number> ids, String varPrefix) {
return collectUserTaskData("GetHistoryTasksByIdList", this::toHistoryUserTaskInstanceWithPotOwnerDesc, ids, varPrefix);
}
private List<org.jbpm.services.api.model.UserTaskInstanceWithPotOwnerDesc> collectUserTaskData(String taskRetriever, Function<Object[], UserTaskInstanceWithPotOwnerDesc> mapper, List<Number> ids, String varPrefix) {
// query data
List<Object[]> taskRows = commandService.execute(new QueryNameCommand<List<Object[]>>(taskRetriever, singletonMap(ID_LIST, ids)));
List<Object[]> varRows = commandService.execute(new QueryNameCommand<List<Object[]>>("GetTaskVariablesByTaskIdList", singletonMap(ID_LIST, ids)));
List<Object[]> potRows = commandService.execute(new QueryNameCommand<List<Object[]>>("GetPotentialOwnersByTaskIdList", singletonMap(ID_LIST, ids)));
List<Object[]> varProcSQLRows = commandService.execute(new QueryNameCommand<List<Object[]>>("GetProcessVariablesByTaskIdList", singletonMap(ID_LIST, ids)));
int currentVarIdx = 0;
int currentPotIdx = 0;
int currentVarProcIdx = 0;
List<org.jbpm.services.api.model.UserTaskInstanceWithPotOwnerDesc> data = new ArrayList<>();
for (Object[] row : taskRows) {
UserTaskInstanceWithPotOwnerDesc pwv = mapper.apply(row);
while (currentVarIdx < varRows.size() && row[0].equals(varRows.get(currentVarIdx)[0])) {
if (((Number) varRows.get(currentVarIdx)[1]).intValue() == 0) {
pwv.addInputdata((String) varRows.get(currentVarIdx)[2], varRows.get(currentVarIdx)[3]);
} else {
pwv.addOutputdata((String) varRows.get(currentVarIdx)[2], varRows.get(currentVarIdx)[3]);
}
currentVarIdx++;
}
pwv.getPotentialOwners().clear();
while (currentPotIdx < potRows.size() && row[0].equals(potRows.get(currentPotIdx)[0])) {
pwv.addPotOwner((String) potRows.get(currentPotIdx)[1]);
currentPotIdx++;
}
while (currentVarProcIdx < varProcSQLRows.size() && row[0].equals(varProcSQLRows.get(currentVarProcIdx)[0])) {
String name = (String) varProcSQLRows.get(currentVarProcIdx)[1];
Object value = varProcSQLRows.get(currentVarProcIdx)[2];
if (!varPrefix.isEmpty() && name.startsWith(varPrefix)) {
pwv.addExtraData(name.substring(varPrefix.length()), value);
} else {
pwv.addProcessVariable(name, value);
}
currentVarProcIdx++;
}
data.add(pwv);
}
return data;
}
private UserTaskInstanceWithPotOwnerDesc toUserTaskInstanceWithPotOwnerDesc(Object[] row) {
return new UserTaskInstanceWithPotOwnerDesc(
((Number) row[0]).longValue(), // id
(String) row[1], // task name
(String) row[2], // formName
(String) row[3], // subject
(String) row[4], // actualOwner_id
(String) null, // potOwner
(String) row[5], // correlationKey
(Date) row[6], // createdOn
(String) row[7], // createdBy
(Date) row[8], // expiration time
(Date) null, // lastModificationDate
(String) null, // lastModificationUser
((Number) row[9]).intValue(), //priority
((String) row[10]), // Status
((Number) row[11]).longValue(), // processInstanceId
(String) row[12], // processId
(String) row[13], // deployment Id
(String) row[14] // instance description
);
}
private UserTaskInstanceWithPotOwnerDesc toHistoryUserTaskInstanceWithPotOwnerDesc(Object[] row) {
return new UserTaskInstanceWithPotOwnerDesc(
((Number) row[0]).longValue(), // id
(String) row[1], // task name
(String) null, // formName
(String) null, // subject
(String) row[2], // actualOwner_id
(String) null, // potOwner
(String) row[3], // correlationKey
(Date) row[4], // createdOn
(String) row[5], // createdBy
(Date) null, // expiration time
(Date) null, // lastModificationDate
(String) null, // lastModificationUser
(Integer) null, //priority
(String) row[6], // Status
((Number) row[7]).longValue(), // processInstanceId
(String) row[8], // processId
(String) row[9], // deployment Id
(String) row[10] // instance description
);
}
protected List<QueryParam> translate(Map<String, String> translationTable, List<QueryParam> attributes) {
if (attributes == null) {
return emptyList();
}
List<QueryParam> translated = new ArrayList<>();
for (QueryParam entry : attributes) {
String column = translationTable.get(entry.getColumn());
// small correction for this column as it is not called the same
if(entry.getColumn() != null && entry.getColumn().equals("TASK_OWNER") && findQueryParamMode(attributes).isPresent()) {
column = "task.actualOwner";
}
translated.add(new QueryParam(column, entry.getOperator(), entry.getValue()));
}
return translated;
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.actions;
import com.intellij.ide.AboutPopupDescriptionProvider;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.nls.NlsMessages;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CustomShortcutSet;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.application.ex.ApplicationInfoEx;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.ide.CopyPasteManager;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.AppUIUtil;
import com.intellij.ui.HyperlinkAdapter;
import com.intellij.ui.HyperlinkLabel;
import com.intellij.ui.LicensingFacade;
import com.intellij.ui.components.JBLabel;
import com.intellij.ui.scale.ScaleContext;
import com.intellij.util.ObjectUtils;
import com.intellij.util.ui.JBFont;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.components.BorderLayoutPanel;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.HyperlinkEvent;
import java.awt.*;
import java.awt.datatransfer.StringSelection;
import java.awt.event.ActionEvent;
import java.text.DateFormat;
import java.text.MessageFormat;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.List;
/**
* @author Konstantin Bulenkov
*/
public class AboutDialog extends DialogWrapper {
private final List<String> myInfo = new ArrayList<>();
public AboutDialog(@Nullable Project project) {
this(project, false);
}
public AboutDialog(Project project, boolean showDebugInfo) {
super(project, false);
String appName = ApplicationNamesInfo.getInstance().getFullProductName();
setResizable(false);
setTitle(IdeBundle.message("about.popup.about.app", appName));
init();
new DumbAwareAction() {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
copyAboutInfoToClipboard();
close(OK_EXIT_CODE);
}
}.registerCustomShortcutSet(CustomShortcutSet.fromString("meta C", "control C"), getContentPanel(), getDisposable());
}
@Override
protected @Nullable JComponent createCenterPanel() {
Icon appIcon = AppUIUtil.loadApplicationIcon(ScaleContext.create(), 60);
Box box = getText();
JLabel icon = new JLabel(appIcon);
icon.setVerticalAlignment(SwingConstants.TOP);
icon.setBorder(JBUI.Borders.empty(20, 12, 0, 24));
box.setBorder(JBUI.Borders.empty(20,0,0,20));
return JBUI.Panels.simplePanel()
.addToLeft(icon)
.addToCenter(box);
}
@Override
protected void createDefaultActions() {
super.createDefaultActions();
myCancelAction = new DialogWrapperAction(IdeBundle.message("button.copy")) {
{
putValue(Action.SHORT_DESCRIPTION, IdeBundle.message("description.copy.text.to.clipboard"));
}
@Override
protected void doAction(ActionEvent e) {
copyAboutInfoToClipboard();
close(OK_EXIT_CODE);
}
};
}
private void copyAboutInfoToClipboard() {
try {
CopyPasteManager.getInstance().setContents(new StringSelection(getExtendedAboutText()));
}
catch (Exception ignore) { }
}
public String getExtendedAboutText() {
return StringUtil.join(myInfo, "\n") + "\n" + AboutPopup.getExtraInfo();
}
@NonNls
private Box getText() {
Box lines = Box.createVerticalBox();
ApplicationInfoEx appInfo = ApplicationInfoEx.getInstanceEx();
String appName = appInfo.getFullApplicationName(); //NON-NLS
String edition = ApplicationNamesInfo.getInstance().getEditionName();
if (edition != null) appName += " (" + edition + ")";
addLine(lines, appName, JBFont.h3().asBold());
lines.add(Box.createVerticalStrut(10));
String buildInfo = IdeBundle.message("about.box.build.number", appInfo.getBuild().asString());
String buildInfoNonLocalized = MessageFormat.format("Build #{0}", appInfo.getBuild().asString());
Date timestamp = appInfo.getBuildDate().getTime();
if (appInfo.getBuild().isSnapshot()) {
String time = new SimpleDateFormat("HH:mm").format(timestamp);
buildInfo += IdeBundle.message("about.box.build.date.time", NlsMessages.formatDateLong(timestamp), time);
buildInfoNonLocalized += MessageFormat.format(", built on {0} at {1}",
DateFormat.getDateInstance(DateFormat.LONG, Locale.US).format(timestamp), time);
}
else {
buildInfo += IdeBundle.message("about.box.build.date", NlsMessages.formatDateLong(timestamp));
buildInfoNonLocalized += MessageFormat.format(", built on {0}",
DateFormat.getDateInstance(DateFormat.LONG, Locale.US).format(timestamp));
}
addLineWithoutLog(lines, buildInfo);
myInfo.add(buildInfoNonLocalized);
addEmptyLine(lines);
LicensingFacade la = LicensingFacade.getInstance();
if (la != null) {
final String licensedTo = la.getLicensedToMessage(); //NON-NLS
if (licensedTo != null) {
addLine(lines, licensedTo);
}
la.getLicenseRestrictionsMessages()
.forEach(text -> addLine(lines, text)); //NON-NLS
}
addEmptyLine(lines);
Properties properties = System.getProperties();
String javaVersion = properties.getProperty("java.runtime.version", properties.getProperty("java.version", "unknown"));
String arch = properties.getProperty("os.arch", "");
String jreInfo = IdeBundle.message("about.box.jre", javaVersion, arch);
addLineWithoutLog(lines, jreInfo);
myInfo.add(MessageFormat.format("Runtime version: {0} {1}", javaVersion, arch));
String vmVersion = properties.getProperty("java.vm.name", "unknown");
String vmVendor = properties.getProperty("java.vendor", "unknown");
String vmVendorInfo = IdeBundle.message("about.box.vm", vmVersion, vmVendor);
addLineWithoutLog(lines, vmVendorInfo);
myInfo.add(MessageFormat.format("VM: {0} by {1}", vmVersion, vmVendor));
addEmptyLine(lines);
//Print extra information from plugins
ExtensionPointName<AboutPopupDescriptionProvider> ep = new ExtensionPointName<>("com.intellij.aboutPopupDescriptionProvider");
for (AboutPopupDescriptionProvider aboutInfoProvider : ep.getExtensions()) {
String description = aboutInfoProvider.getDescription(); //NON-NLS
if (description != null) {
addLineWithoutLog(lines, description);
addEmptyLine(lines);
}
}
//Link to open-source projects
HyperlinkLabel openSourceSoftware = new HyperlinkLabel();
//noinspection DialogTitleCapitalization
openSourceSoftware.setTextWithHyperlink(IdeBundle.message("about.box.powered.by.open.source"));
openSourceSoftware.addHyperlinkListener(new HyperlinkAdapter() {
@Override
protected void hyperlinkActivated(HyperlinkEvent e) {
AboutPopup.showOpenSoftwareSources(ObjectUtils.notNull(AboutPopup.loadThirdPartyLibraries(), ""));
}
});
openSourceSoftware.setFont(getDefaultTextFont());
JBLabel poweredBy = new JBLabel(IdeBundle.message("about.box.powered.by") + " ").withFont(getDefaultTextFont());
BorderLayoutPanel panel = JBUI.Panels.simplePanel(openSourceSoftware).addToLeft(poweredBy);
panel.setAlignmentX(Component.LEFT_ALIGNMENT);
lines.add(panel);
//Copyright
addLineWithoutLog(lines, AboutPopup.getCopyrightText());
addEmptyLine(lines);
return lines;
}
private static JBFont getDefaultTextFont() {
return JBFont.medium();
}
private static void addEmptyLine(Box box) {
box.add(Box.createVerticalStrut(18));
}
private void addLine(JComponent panel, @NlsContexts.Label String text, JBFont font) {
addLine(panel, text, font, true);
}
private void addLine(JComponent panel, @NlsContexts.Label String text, JBFont font, boolean log) {
JBLabel label = new JBLabel(text).withFont(font);
panel.add(label);
if (log) {
myInfo.add(text);
}
}
private void addLineWithoutLog(JComponent panel, @NlsContexts.Label String text) {
addLine(panel, text, getDefaultTextFont(), false);
}
private void addLine(JComponent panel, @NlsContexts.Label String text) {
addLine(panel, text, getDefaultTextFont());
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 Andreas Alanko, Emil Nilsson, Sony Mobile Communications AB.
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.sonymobile.jenkins.plugins.gitlab.gitlabauth.acl;
import java.util.ArrayList;
import java.util.List;
import com.sonymobile.gitlab.model.GitLabAccessLevel;
/**
* Used to create a permission identity.
*
* @author Andreas Alanko
*/
public class GitLabPermissionIdentity implements Comparable<GitLabPermissionIdentity> {
/** The visual name of this identity. */
public final String displayName;
/** The identifier of this identity. */
public final String id;
/** The type of this identity. */
public final IdentityType type;
private GitLabPermissionIdentity(String displayName, String id, IdentityType type) {
this.displayName = displayName;
this.id = id;
this.type = type;
}
private GitLabPermissionIdentity(GitLabAccessLevel accessLevel) {
this(accessLevel.toString(), accessLevel.name(), IdentityType.GITLAB);
}
private GitLabPermissionIdentity(JenkinsAccessLevel accessLevel) {
this(accessLevel.displayName, accessLevel.name(), IdentityType.JENKINS);
}
/**
* Creates a permission identity for a user with the given username.
*
* @param username the username
* @return the permission identity
*/
public static GitLabPermissionIdentity user(String username) {
return new GitLabPermissionIdentity(username, username, IdentityType.USER);
}
/**
* Creates a permission identity for a group with the given group name
*
* @param groupName the group name
* @return the permission identity
*/
public static GitLabPermissionIdentity group(String groupName) {
return new GitLabPermissionIdentity(groupName, groupName, IdentityType.GROUP);
}
@Override
public String toString() {
return type + ":" + id;
}
/**
* Checks if this object is equal to the given.
*
* Will return true if type and id of both objects are equals.
*
* @return true if this and object is equals
*/
@Override
public boolean equals(Object object) {
if (object instanceof GitLabPermissionIdentity) {
if (object != null) {
if (type.equals(((GitLabPermissionIdentity) object).type)) {
if (id.equals(((GitLabPermissionIdentity) object).id)) {
return true;
}
}
}
}
return false;
}
/**
* Compares this permission identity object with the given.
*
* Return 1 if this is considered greater than the given.
* Return 0 if objects are equals.
* Return -1 if this is considered lesser than the given.
*
* @param object the object to be compared
* @return an integer with the result (1, 0, -1)
*/
public int compareTo(GitLabPermissionIdentity object) {
int thisValue = type.ordinal();
int objectValue = object.type.ordinal();
if (thisValue < objectValue) {
return 1;
} else if (thisValue > objectValue) {
return -1;
} else {
if (type == IdentityType.GITLAB) {
int thisAccessLevel = GitLabAccessLevel.getAccessLevelWithName(id).ordinal();
int objectAccessLevel = GitLabAccessLevel.getAccessLevelWithName(object.id).ordinal();
if (thisAccessLevel < objectAccessLevel) {
return 1;
} else if (thisAccessLevel > objectAccessLevel) {
return -1;
} else {
return 0;
}
} else if (type == IdentityType.JENKINS) {
int thisAccessLevel = JenkinsAccessLevel.getAccessLevelWithName(id).ordinal();
int objectAccessLevel = JenkinsAccessLevel.getAccessLevelWithName(object.id).ordinal();
if (thisAccessLevel < objectAccessLevel) {
return 1;
} else if (thisAccessLevel > objectAccessLevel) {
return -1;
} else {
return 0;
}
} else {
return id.compareTo(object.id);
}
}
}
/**
* Gets all static identities such as Jenkins:admin and GitLab:Owner.
*
* The boolean getGitLab specifies if the static GitLab identities should
* be included.
*
* No user or group identities will be included.
*
* @param getGitLabIdentities if GitLab identities should be included
* @return a list with permission identities
*/
public static List<GitLabPermissionIdentity> getGlobalStaticPermissionIdentities(boolean getGitLabIdentities) {
List<GitLabPermissionIdentity> list = new ArrayList<GitLabPermissionIdentity>();
if (getGitLabIdentities) {
for (GitLabAccessLevel accessLevel : GitLabAccessLevel.values()) {
if (accessLevel != GitLabAccessLevel.NONE) {
list.add(0, getGitLabIdentityFromAccessLevel(accessLevel));
}
}
}
for (JenkinsAccessLevel accessLevel : JenkinsAccessLevel.values()) {
list.add(0, getJenkinsIdentityFromAccessLevel(accessLevel));
}
return list;
}
/**
* Gets the permission identity for the given GitLab access level.
*
* @param accessLevel the access level
* @return the permission identity
*/
public static GitLabPermissionIdentity getGitLabIdentityFromAccessLevel(GitLabAccessLevel accessLevel) {
switch (accessLevel) {
case OWNER:
return GITLAB_OWNER;
case MASTER:
return GITLAB_MASTER;
case DEVELOPER:
return GITLAB_DEVELOPER;
case REPORTER:
return GITLAB_REPORTER;
case GUEST:
return GITLAB_GUEST;
default:
return new GitLabPermissionIdentity(GitLabAccessLevel.NONE);
}
}
/**
* Gets the permission identity for the given Jenkins access level.
*
* @param accessLevel the access level
* @return the permission identity
*/
public static GitLabPermissionIdentity getJenkinsIdentityFromAccessLevel(JenkinsAccessLevel accessLevel) {
switch (accessLevel) {
case ADMIN:
return JENKINS_ADMIN;
case LOGGED_IN:
return JENKINS_LOGGED_IN;
default:
return JENKINS_ANONYMOUS;
}
}
public static final GitLabPermissionIdentity JENKINS_ADMIN =
new GitLabPermissionIdentity(JenkinsAccessLevel.ADMIN);
public static final GitLabPermissionIdentity JENKINS_LOGGED_IN =
new GitLabPermissionIdentity(JenkinsAccessLevel.LOGGED_IN);
public static final GitLabPermissionIdentity JENKINS_ANONYMOUS =
new GitLabPermissionIdentity(JenkinsAccessLevel.ANONYMOUS);
public static final GitLabPermissionIdentity GITLAB_OWNER =
new GitLabPermissionIdentity(GitLabAccessLevel.OWNER);
public static final GitLabPermissionIdentity GITLAB_MASTER =
new GitLabPermissionIdentity(GitLabAccessLevel.MASTER);
public static final GitLabPermissionIdentity GITLAB_DEVELOPER =
new GitLabPermissionIdentity(GitLabAccessLevel.DEVELOPER);
public static final GitLabPermissionIdentity GITLAB_REPORTER =
new GitLabPermissionIdentity(GitLabAccessLevel.REPORTER);
public static final GitLabPermissionIdentity GITLAB_GUEST =
new GitLabPermissionIdentity(GitLabAccessLevel.GUEST);
/** Enum for different identity types. */
public enum IdentityType {
GROUP("Group"),
USER("User"),
GITLAB("GitLab"),
JENKINS("Jenkins");
/** The display name of the enum. */
public final String displayName;
private IdentityType(String displayName) {
this.displayName = displayName;
}
}
}
| |
/**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.info.hive;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Represents a Hive table structure.
* @see Builder
* @since 0.8.1
*/
public class TableInfo {
private static final String ID_NAME = "name";
private static final String ID_COLUMNS = "columns";
private static final String ID_COMMENT = "comment";
private static final String ID_ROW = "row";
private static final String ID_STORAGE = "storage";
private static final String ID_PROPERTIES = "properties";
@JsonProperty(ID_NAME)
private final String name;
@JsonProperty(ID_COLUMNS)
private final List<ColumnInfo> columns;
@JsonProperty(ID_COMMENT)
private final String comment;
@JsonProperty(ID_ROW)
private final RowFormatInfo rowFormat;
@JsonProperty(ID_STORAGE)
private final StorageFormatInfo storageFormat;
@JsonProperty(ID_PROPERTIES)
@JsonInclude(Include.NON_EMPTY)
private final Map<String, String> properties;
/**
* Creates a new instance.
* @param name the table name
* @param columns the fields
* @param comment the table comment (nullable)
* @param rowFormat the row format (nullable)
* @param storageFormat the storage format (nullable)
* @param properties the table properties (nullable)
*/
@JsonCreator
public TableInfo(
@JsonProperty(ID_NAME) String name,
@JsonProperty(ID_COLUMNS) List<ColumnInfo> columns,
@JsonProperty(ID_COMMENT) String comment,
@JsonProperty(ID_ROW) RowFormatInfo rowFormat,
@JsonProperty(ID_STORAGE) StorageFormatInfo storageFormat,
@JsonProperty(ID_PROPERTIES) Map<String, String> properties) {
this.name = name;
this.columns = Optional.ofNullable(columns)
.filter(it -> it.isEmpty() == false)
.map(ArrayList::new)
.map(Collections::unmodifiableList)
.orElse(Collections.emptyList());
this.comment = comment;
this.rowFormat = rowFormat;
this.storageFormat = storageFormat;
this.properties = Optional.ofNullable(properties)
.filter(it -> it.isEmpty() == false)
.map(LinkedHashMap::new)
.map(Collections::unmodifiableMap)
.orElse(Collections.emptyMap());
}
/**
* Returns the table name.
* @return the table name
*/
public String getName() {
return name;
}
/**
* Returns the columns.
* @return the columns
*/
public List<ColumnInfo> getColumns() {
return columns;
}
/**
* Returns the table comment.
* @return the table comment, or {@code null} if it is not specified
*/
public String getComment() {
return comment;
}
/**
* Returns the table row format.
* @return the table row format, or {@code null} if it is not specified
*/
public RowFormatInfo getRowFormat() {
return rowFormat;
}
/**
* Returns the table storage format.
* @return the table storage format, or {@code null} if it is not specified
*/
public StorageFormatInfo getStorageFormat() {
return storageFormat;
}
/**
* Returns the table properties.
* @return the table properties, or an empty map if it is not specified
*/
public Map<String, String> getProperties() {
return properties;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Objects.hashCode(name);
result = prime * result + Objects.hashCode(columns);
result = prime * result + Objects.hashCode(rowFormat);
result = prime * result + Objects.hashCode(storageFormat);
result = prime * result + Objects.hashCode(properties);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
TableInfo other = (TableInfo) obj;
if (!Objects.equals(name, other.name)) {
return false;
}
if (!Objects.equals(columns, other.columns)) {
return false;
}
if (!Objects.equals(rowFormat, other.rowFormat)) {
return false;
}
if (!Objects.equals(storageFormat, other.storageFormat)) {
return false;
}
if (!Objects.equals(properties, other.properties)) {
return false;
}
return true;
}
@Override
public String toString() {
return String.format(
"Table(%s)", //$NON-NLS-1$
getName());
}
/**
* Provides {@link TableInfo}.
* @since 0.8.1
*/
@FunctionalInterface
public interface Provider {
/**
* Returns the {@link TableInfo} of this object.
* @return the table schema
*/
TableInfo getSchema();
}
/**
* A builder for {@link TableInfo}.
* @since 0.8.1
*/
public static class Builder {
private final String name;
private final List<ColumnInfo> columns = new ArrayList<>();
private final Map<String, String> properties = new LinkedHashMap<>();
private String comment;
private RowFormatInfo rowFormat;
private StorageFormatInfo storageFormat;
/**
* Creates a new instance.
* @param name the table name
*/
public Builder(String name) {
this.name = name;
}
/**
* Adds a column.
* @param info a new entry
* @return this
*/
public Builder withColumn(ColumnInfo info) {
columns.add(info);
return this;
}
/**
* Adds a column.
* @param columnName the column name
* @param columnType the column type
* @return this
*/
public Builder withColumn(String columnName, FieldType columnType) {
return withColumn(new ColumnInfo(columnName, columnType));
}
/**
* Adds a column.
* @param columnName the column name
* @param columnType the column type name (must be a plain type)
* @return this
*/
public Builder withColumn(String columnName, FieldType.TypeName columnType) {
return withColumn(new ColumnInfo(columnName, PlainType.of(columnType)));
}
/**
* Sets the table comment.
* @param text the comment text
* @return this
*/
public Builder withComment(String text) {
comment = text;
return this;
}
/**
* Sets the row format.
* @param info the row format information
* @return this
*/
public Builder withRowFormat(RowFormatInfo info) {
rowFormat = info;
return this;
}
/**
* Sets the storage format.
* @param info the storage format information
* @return this
*/
public Builder withStorageFormat(StorageFormatInfo info) {
storageFormat = info;
return this;
}
/**
* Sets the storage format.
* @param info the storage format information
* @return this
*/
public Builder withStorageFormat(StorageFormatInfo.FormatKind info) {
return withStorageFormat(BuiltinStorageFormatInfo.of(info));
}
/**
* Adds a table property.
* @param key the property key
* @param value the property value
* @return this
*/
public Builder withProperty(String key, String value) {
properties.put(key, value);
return this;
}
/**
* Adds table properties.
* @param additions the property map
* @return this
*/
public Builder withProperties(Map<String, String> additions) {
properties.putAll(additions);
return this;
}
/**
* Builds a {@link TableInfo}.
* @return the built object
*/
public TableInfo build() {
return new TableInfo(name, columns, comment, rowFormat, storageFormat, properties);
}
}
}
| |
/*
* Copyright 2013, The Sporting Exchange Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.betfair.platform.application;
import com.betfair.baseline.v2.BaselineSyncClient;
import com.betfair.baseline.v2.enumerations.SomeComplexObjectEnumParameterEnum;
import com.betfair.baseline.v2.exception.SimpleException;
import com.betfair.baseline.v2.to.*;
import com.betfair.cougar.api.LogExtension;
import com.betfair.cougar.api.LoggableEvent;
import com.betfair.cougar.api.RequestContext;
import com.betfair.cougar.api.RequestUUID;
import com.betfair.cougar.api.fault.CougarApplicationException;
import com.betfair.cougar.api.geolocation.GeoLocationDetails;
import com.betfair.cougar.api.security.IdentityChain;
import com.betfair.cougar.core.api.ev.ExecutionObserver;
import com.betfair.cougar.core.api.ev.ExecutionResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.ContextRefreshedEvent;
import java.io.UnsupportedEncodingException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.*;
import java.util.concurrent.TimeoutException;
import java.util.logging.Level;
/**
* Very basic class to test remote connectivity for client cougar to cougar calculation
*/
public class CougarToCougarCommsTester implements ApplicationListener {
private final static Logger LOGGER = LoggerFactory.getLogger(CougarToCougarCommsTester.class);
private BaselineSyncClient client;
private RequestContext ctx = new RequestContext() {
private GeoLocationDetails geoDetails;
@Override
public GeoLocationDetails getLocation() {
if (geoDetails == null) {
try {
final List<String> thisAddress = Collections.singletonList(InetAddress.getLocalHost().getHostAddress());
geoDetails = new GeoLocationDetails() {
@Override
public String getCountry() {
return "UK";
}
@Override
public String getLocation() {
return null;
}
@Override
public String getInferredCountry() {
return null;
}
@Override
public String getRemoteAddr() {
return thisAddress.get(0);
}
@Override
public List<String> getResolvedAddresses() {
return thisAddress;
}
@Override
public boolean isLowConfidenceGeoLocation() {
return false;
}
};
} catch (UnknownHostException ignored) {} //unecessary checked exception. wicked
}
return geoDetails;
}
@Override
public void trace(String msg, Object... args) {
}
@Override
public void addEventLogRecord(LoggableEvent record) {
}
@Override
public void setRequestLogExtension(LogExtension extension) {
}
@Override
public void setConnectedObjectLogExtension(LogExtension extension) {
}
@Override
public LogExtension getConnectedObjectLogExtension() {
return null;
}
@Override
public IdentityChain getIdentity() {
return null;
}
@Override
public RequestUUID getRequestUUID() {
return null;
}
@Override
public Date getReceivedTime() {
return null;
}
@Override
public Date getRequestTime() {
return null;
}
@Override
public boolean traceLoggingEnabled() {
return false;
}
@Override
public int getTransportSecurityStrengthFactor() {
return 0;
}
@Override
public boolean isTransportSecure() {
return false;
}
};
private ExecutionObserver obs = new ExecutionObserver() {
@Override
public void onResult(ExecutionResult result) {
LOGGER.info("Result received: [" + result.getResult().toString() + "]");
}
};
private ExecutionObserver byteObs = new ExecutionObserver() {
@Override
public void onResult(ExecutionResult executionResult) {
ByteOperationResponseObject obj = ((ByteOperationResponseObject)executionResult.getResult());
String result = null;
try {
result = new String(obj.getBodyParameter(), "utf-8");
} catch (UnsupportedEncodingException e) {
}
System.out.println(result);
}
};
private ExecutionObserver voidObs = new ExecutionObserver() {
@Override
public void onResult(ExecutionResult result) {
LOGGER.info("Void result received this valid here should be null [" + result.getResult() + "]");
}
};
public void onApplicationEvent(ApplicationEvent event) {
if (event instanceof ContextRefreshedEvent) {
System.out.println(new Date() + ": SIMPLE TEST:");
simple();
System.out.println(new Date() + ": SIMPLE DONE");
System.out.println(new Date() + ": SIMPLE TEST:");
timeout();
System.out.println(new Date() + ": SIMPLE DONE");
System.out.println(new Date() + ": ASYNC TEST:");
async();
System.out.println(new Date() + ": ASYNC DONE");
System.out.println(new Date() + ": EXCEPTION TEST:");
exception();
System.out.println(new Date() + ": EXCEPTION DONE");
System.out.println("SIMPLE TEST:");
run();
try {Thread.sleep(15000); } catch (InterruptedException e) {};
System.out.println("MT TEST:");
multithreadedInstanceTest();
}
}
public void simple() {
try {
String mesasge = getClient().testSimpleGet(ctx, "client foo").getMessage();
System.out.println("testSimpleGet() returned "+ mesasge);
} catch (Exception ex) {
System.err.println("EXCEPTION RECEIVED: " + ex.getMessage());
ex.printStackTrace();
}
}
public void timeout() {
try {
getClient().testSleep(ctx, 2000L, 1000L);
} catch (InterruptedException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
} catch (TimeoutException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
} catch (SimpleException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
public void async() {
try {
// getClient().testSleep(ctx, 1000L);
System.out.println("testGetTimeout() returned");
} catch (Exception ex) {
System.err.println("EXCEPTION RECEIVED: " + ex.getMessage());
ex.printStackTrace();
}
}
public void exception() {
try {
getClient().testException(ctx, "Unauthorised", "SUSPENDED");
} catch (CougarApplicationException cax) {
System.out.println("Expected Application Exception: " + cax.getExceptionCode()+", responseCode: "+ cax.getResponseCode());
} catch (Exception ex) {
System.err.println("EXCEPTION RECEIVED: " + ex.getMessage());
ex.printStackTrace();
}
}
public void run() {
try {
System.out.println("testSimpleGet() returned "+getClient().testSimpleGet(ctx, "FORWARD:foo").getMessage());
SomeComplexObject sco = new SomeComplexObject();
sco.setDateTimeParameter(new Date());
sco.setEnumParameter(SomeComplexObjectEnumParameterEnum.BAR);
sco.setListParameter(new ArrayList<String>() {{
add("bob");
}});
sco.setStringParameter("Foo");
Map<String,SomeComplexObject> m = new HashMap<String, SomeComplexObject>();
m.put("wibble", sco);
BodyParamComplexMapObject obj = new BodyParamComplexMapObject();
obj.setComplexMap(m);
ComplexMapOperationResponseObject response = getClient().complexMapOperation(ctx, obj);
System.out.println(response);
} catch (Exception ex) {
System.err.println("EXCEPTION RECEIVED: ");
ex.printStackTrace();
}
}
public void multithreadedInstanceTest() {
for (int i=0; i<10; i++) {
final String parrot = String.valueOf(i);
Thread t = new Thread(new Runnable() {
@Override
public void run() {
final String expected = parrot;
try {
while (true) {
long time = System.currentTimeMillis();
SimpleResponse response = getClient().testSimpleGet(ctx, parrot);
System.out.println("MT response: "+ response);
if (!response.getMessage().equals(expected)) {
System.out.println("WTF!");
// } else {
// System.out.println(expected+ ": Response returned in: " + (System.currentTimeMillis() - time) + "ms");
}
// Thread.sleep(50);
}
} catch (SimpleException e) {
LOGGER.error("An exception occurred", e);
// } catch (InterruptedException e) {
// LOGGER.error("An exception occurred", e);
}
}
}, "ARSE-"+i);
t.start();
}
}
public BaselineSyncClient getClient() {
return client;
}
public void setClient(BaselineSyncClient client) {
this.client = client;
}
}
| |
/*
* Copyright (C) 2016 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.pricer.index.e2e;
import static com.opengamma.strata.basics.currency.Currency.JPY;
import static com.opengamma.strata.basics.date.DayCounts.ACT_ACT_ISDA;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.data.Offset.offset;
import java.time.LocalDate;
import org.junit.jupiter.api.Test;
import com.opengamma.strata.basics.ReferenceData;
import com.opengamma.strata.basics.currency.CurrencyAmount;
import com.opengamma.strata.basics.currency.FxMatrix;
import com.opengamma.strata.basics.date.DaysAdjustment;
import com.opengamma.strata.basics.date.HolidayCalendarId;
import com.opengamma.strata.basics.index.IborIndex;
import com.opengamma.strata.basics.index.IborIndices;
import com.opengamma.strata.basics.schedule.RollConventions;
import com.opengamma.strata.basics.value.Rounding;
import com.opengamma.strata.collect.DoubleArrayMath;
import com.opengamma.strata.collect.array.DoubleArray;
import com.opengamma.strata.market.curve.CurveMetadata;
import com.opengamma.strata.market.curve.CurveName;
import com.opengamma.strata.market.curve.Curves;
import com.opengamma.strata.market.curve.InterpolatedNodalCurve;
import com.opengamma.strata.market.curve.interpolator.CurveInterpolator;
import com.opengamma.strata.market.curve.interpolator.CurveInterpolators;
import com.opengamma.strata.market.param.CurrencyParameterSensitivities;
import com.opengamma.strata.market.sensitivity.PointSensitivities;
import com.opengamma.strata.pricer.index.DiscountingIborFutureProductPricer;
import com.opengamma.strata.pricer.index.DiscountingIborFutureTradePricer;
import com.opengamma.strata.pricer.rate.ImmutableRatesProvider;
import com.opengamma.strata.product.SecurityId;
import com.opengamma.strata.product.TradedPrice;
import com.opengamma.strata.product.index.IborFuture;
import com.opengamma.strata.product.index.ResolvedIborFuture;
import com.opengamma.strata.product.index.ResolvedIborFutureTrade;
/**
* End to end test on JPY-dominated trades.
* <p>
* The trades involve futures contract on 3m Euroyen TIBOR.
*/
public class IborFuturesJpyEnd2EndTest {
private static final ReferenceData REF_DATA = ReferenceData.standard();
private static final double ONE_PERCENT = 1e-2;
private static final double ONE_BASIS_POINT = 1e-4;
private static final double HUNDRED = 100d;
private static final double TOL = 1e-10;
private static final LocalDate TRADE_DATE = LocalDate.of(2016, 2, 10);
private static final LocalDate VALUATION = TRADE_DATE;
private static final double NOTIONAL = 100_000_000D;
private static final long QUANTITY = 1L;
private static final Rounding ROUNDING = Rounding.ofFractionalDecimalPlaces(2, 2);
private static final IborIndex TIBOR_EUROYEN_3M = IborIndices.JPY_TIBOR_EUROYEN_3M;
private static final HolidayCalendarId CALENDAR = TIBOR_EUROYEN_3M.getFixingCalendar();
// curve
private static final CurveInterpolator INTERPOLATOR = CurveInterpolators.LINEAR;
private static final double[] TIMES_FWD = new double[] {0.25956284153005466, 0.3442622950819672, 0.4262295081967213,
0.5109289617486339, 0.5956284153005464, 0.6775956284153005, 0.7622950819672131, 0.8442622950819673,
0.9290665468972228, 1.0139980537465378, 1.509888464705442, 2.013998053746538, 3.013998053746538,
4.0136612021857925, 5.013998053746538, 6.013998053746538, 7.013998053746538, 8.013661202185792, 9.013998053746537,
10.013998053746537, 12.013661202185792, 15.013998053746537, 20.01366120218579, 25.013998053746537,
30.013998053746537, 35.01399805374654, 40.013661202185794};
private static final double[] RATES_FWD = new double[] {0.0011675730858146669, 0.0013523368085561177,
0.001131049534280983, 8.583742384839034E-4, 8.470484635395976E-4, 6.767689351179286E-4, 5.413609808841509E-4,
4.4761361391297197E-4, 3.289892481714955E-4, 2.2424609900293199E-4, -5.55328245806963E-4, -5.582260143032516E-4,
-0.0013213322970379335, -9.99374212934137E-4, -6.786337899984415E-4, -1.0429800511592125E-4, 4.58740553910201E-4,
0.0010754297421556789, 0.0017620501351558286, 0.0024797272826230436, 0.003796406741560559, 0.005230486692524101,
0.009225065993903405, 0.01128357115778175, 0.012172498030710542, 0.012953839426947904, 0.013546707965288615};
private static final CurveName NAME_FWD = CurveName.of("fwdCurve");
private static final CurveMetadata META_FWD = Curves.zeroRates(NAME_FWD, ACT_ACT_ISDA);
private static final InterpolatedNodalCurve CURVE_FWD =
InterpolatedNodalCurve.of(META_FWD, DoubleArray.copyOf(TIMES_FWD), DoubleArray.copyOf(RATES_FWD), INTERPOLATOR);
private static final ImmutableRatesProvider RATES_PROVIDER = ImmutableRatesProvider.builder(VALUATION)
.fxRateProvider(FxMatrix.empty())
.iborIndexCurve(TIBOR_EUROYEN_3M, CURVE_FWD)
.build();
// futures in March 2016
private static final LocalDate REFERENCE_MAR = RollConventions.IMM.adjust(LocalDate.of(2016, 3, 1));
private static final LocalDate LAST_TRADE_MAR = DaysAdjustment.ofBusinessDays(-2, CALENDAR).adjust(REFERENCE_MAR, REF_DATA);
private static final SecurityId FUTURE_SECURITY_ID_MAR = SecurityId.of("OG-Ticker", "EUROYEN3M-FUT-MAR");
private static final ResolvedIborFuture FUTURE_PRODUCT_MAR = IborFuture.builder()
.securityId(FUTURE_SECURITY_ID_MAR)
.currency(JPY)
.notional(NOTIONAL)
.lastTradeDate(LAST_TRADE_MAR)
.index(TIBOR_EUROYEN_3M)
.rounding(ROUNDING)
.build()
.resolve(REF_DATA);
private static final double REF_PRICE_MAR = 99.9d;
private static final double REF_PRICE_MAR_DECIMAL = REF_PRICE_MAR * ONE_PERCENT;
private static final ResolvedIborFutureTrade FUTURE_TRADE_MAR = ResolvedIborFutureTrade.builder()
.product(FUTURE_PRODUCT_MAR)
.quantity(QUANTITY)
.tradedPrice(TradedPrice.of(TRADE_DATE, REF_PRICE_MAR_DECIMAL))
.build();
// futures in June 2016
private static final LocalDate REFERENCE_JUN = RollConventions.IMM.adjust(LocalDate.of(2016, 6, 1));
private static final LocalDate LAST_TRADE_JUN = DaysAdjustment.ofBusinessDays(-2, CALENDAR).adjust(REFERENCE_JUN, REF_DATA);
private static final SecurityId FUTURE_SECURITY_ID_JUN = SecurityId.of("OG-Ticker", "EUROYEN3M-FUT-JUN");
private static final ResolvedIborFuture FUTURE_PRODUCT_JUN = IborFuture.builder()
.securityId(FUTURE_SECURITY_ID_JUN)
.currency(JPY)
.notional(NOTIONAL)
.lastTradeDate(LAST_TRADE_JUN)
.index(TIBOR_EUROYEN_3M)
.rounding(ROUNDING)
.build()
.resolve(REF_DATA);
private static final double REF_PRICE_JUN = 100d;
private static final double REF_PRICE_JUN_DECIMAL = REF_PRICE_JUN * ONE_PERCENT;
private static final ResolvedIborFutureTrade FUTURE_TRADE_JUN = ResolvedIborFutureTrade.builder()
.product(FUTURE_PRODUCT_JUN)
.quantity(QUANTITY)
.tradedPrice(TradedPrice.of(TRADE_DATE, REF_PRICE_JUN_DECIMAL))
.build();
// futures in September 2016
private static final LocalDate REFERENCE_SEP = RollConventions.IMM.adjust(LocalDate.of(2016, 9, 1));
private static final LocalDate LAST_TRADE_SEP = DaysAdjustment.ofBusinessDays(-2, CALENDAR).adjust(REFERENCE_SEP, REF_DATA);
private static final SecurityId FUTURE_SECURITY_ID_SEP = SecurityId.of("OG-Ticker", "EUROYEN3M-FUT-SEP");
private static final ResolvedIborFuture FUTURE_PRODUCT_SEP = IborFuture.builder()
.securityId(FUTURE_SECURITY_ID_SEP)
.currency(JPY)
.notional(NOTIONAL)
.lastTradeDate(LAST_TRADE_SEP)
.index(TIBOR_EUROYEN_3M)
.rounding(ROUNDING)
.build()
.resolve(REF_DATA);
private static final double REF_PRICE_SEP = 100.075d;
private static final double REF_PRICE_SEP_DECIMAL = REF_PRICE_SEP * ONE_PERCENT;
private static final ResolvedIborFutureTrade FUTURE_TRADE_SEP = ResolvedIborFutureTrade.builder()
.product(FUTURE_PRODUCT_SEP)
.quantity(QUANTITY)
.tradedPrice(TradedPrice.of(TRADE_DATE, REF_PRICE_SEP_DECIMAL))
.build();
// futures in June 2017
private static final LocalDate REFERENCE_JUN_MID = RollConventions.IMM.adjust(LocalDate.of(2017, 6, 1));
private static final LocalDate LAST_TRADE_JUN_MID =
DaysAdjustment.ofBusinessDays(-2, CALENDAR).adjust(REFERENCE_JUN_MID, REF_DATA);
private static final SecurityId FUTURE_SECURITY_ID_JUN_MID = SecurityId.of("OG-Ticker", "EUROYEN3M-FUT-JUN_MID");
private static final ResolvedIborFuture FUTURE_PRODUCT_JUN_MID = IborFuture.builder()
.securityId(FUTURE_SECURITY_ID_JUN_MID)
.currency(JPY)
.notional(NOTIONAL)
.lastTradeDate(LAST_TRADE_JUN_MID)
.index(TIBOR_EUROYEN_3M)
.rounding(ROUNDING)
.build()
.resolve(REF_DATA);
private static final double REF_PRICE_JUN_MID = 100.165d;
private static final double REF_PRICE_JUN_MID_DECIMAL = REF_PRICE_JUN_MID * ONE_PERCENT;
private static final ResolvedIborFutureTrade FUTURE_TRADE_JUN_MID = ResolvedIborFutureTrade.builder()
.product(FUTURE_PRODUCT_JUN_MID)
.quantity(QUANTITY)
.tradedPrice(TradedPrice.of(TRADE_DATE, REF_PRICE_JUN_MID_DECIMAL))
.build();
// futures in March 2020
private static final LocalDate REFERENCE_MAR_LONG = RollConventions.IMM.adjust(LocalDate.of(2020, 3, 1));
private static final LocalDate LAST_TRADE_MAR_LONG =
DaysAdjustment.ofBusinessDays(-2, CALENDAR).adjust(REFERENCE_MAR_LONG, REF_DATA);
private static final SecurityId FUTURE_SECURITY_ID_MAR_LONG = SecurityId.of("OG-Ticker", "EUROYEN3M-FUT-MAR_LONG");
private static final ResolvedIborFuture FUTURE_PRODUCT_MAR_LONG = IborFuture.builder()
.securityId(FUTURE_SECURITY_ID_MAR_LONG)
.currency(JPY)
.notional(NOTIONAL)
.lastTradeDate(LAST_TRADE_MAR_LONG)
.index(TIBOR_EUROYEN_3M)
.rounding(ROUNDING)
.build()
.resolve(REF_DATA);
private static final double REF_PRICE_MAR_LONG = 99.815d;
private static final double REF_PRICE_MAR_LONG_DECIMAL = REF_PRICE_MAR_LONG * ONE_PERCENT;
private static final ResolvedIborFutureTrade FUTURE_TRADE_MAR_LONG = ResolvedIborFutureTrade.builder()
.product(FUTURE_PRODUCT_MAR_LONG)
.quantity(QUANTITY)
.tradedPrice(TradedPrice.of(TRADE_DATE, REF_PRICE_MAR_LONG_DECIMAL))
.build();
// pricers
private static final DiscountingIborFutureProductPricer PRODUCT_PRICER = DiscountingIborFutureProductPricer.DEFAULT;
private static final DiscountingIborFutureTradePricer TRADE_PRICER = DiscountingIborFutureTradePricer.DEFAULT;
@Test
public void price() {
// March 2016
double priceMar = PRODUCT_PRICER.price(FUTURE_PRODUCT_MAR, RATES_PROVIDER) * HUNDRED;
double priceMarRounded = FUTURE_PRODUCT_MAR.getRounding().round(priceMar);
assertThat(priceMar).isCloseTo(99.86104632448779, offset(TOL * HUNDRED));
assertThat(priceMarRounded).isCloseTo(99.86, offset(TOL * HUNDRED));
// June 2016
double priceJun = PRODUCT_PRICER.price(FUTURE_PRODUCT_JUN, RATES_PROVIDER) * HUNDRED;
double priceJunRounded = FUTURE_PRODUCT_JUN.getRounding().round(priceJun);
assertThat(priceJun).isCloseTo(99.98475152702353, offset(TOL * HUNDRED));
assertThat(priceJunRounded).isCloseTo(99.985, offset(TOL * HUNDRED));
// September 2016
double priceSep = PRODUCT_PRICER.price(FUTURE_PRODUCT_SEP, RATES_PROVIDER) * HUNDRED;
double priceSepRounded = FUTURE_PRODUCT_SEP.getRounding().round(priceSep);
assertThat(priceSep).isCloseTo(100.05224158750461, offset(TOL * HUNDRED));
assertThat(priceSepRounded).isCloseTo(100.05, offset(TOL * HUNDRED));
// June 2017
double priceJunMid = PRODUCT_PRICER.price(FUTURE_PRODUCT_JUN_MID, RATES_PROVIDER) * HUNDRED;
double priceJunMidRounded = FUTURE_PRODUCT_JUN_MID.getRounding().round(priceJunMid);
assertThat(priceJunMid).isCloseTo(100.18108895230915, offset(TOL * HUNDRED));
assertThat(priceJunMidRounded).isCloseTo(100.18, offset(TOL * HUNDRED));
// March 2020
double priceMarLong = PRODUCT_PRICER.price(FUTURE_PRODUCT_MAR_LONG, RATES_PROVIDER) * HUNDRED;
double priceMarLongRounded = FUTURE_PRODUCT_MAR_LONG.getRounding().round(priceMarLong);
assertThat(priceMarLong).isCloseTo(99.9582733152131, offset(TOL * HUNDRED));
assertThat(priceMarLongRounded).isCloseTo(99.96, offset(TOL * HUNDRED));
}
@Test
public void priceSensitivity() {
// March 2016
PointSensitivities pointMar =
PRODUCT_PRICER.priceSensitivity(FUTURE_PRODUCT_MAR, RATES_PROVIDER).multipliedBy(HUNDRED * ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiMar = RATES_PROVIDER.parameterSensitivity(pointMar);
double[] sensiFwdMar = new double[] {0.003743310260261194, -0.01313010637003998, -4.527622886220682E-4, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiMar.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdMar, TOL)).isTrue();
// June 2016
PointSensitivities pointJun =
PRODUCT_PRICER.priceSensitivity(FUTURE_PRODUCT_JUN, RATES_PROVIDER).multipliedBy(HUNDRED * ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiJun = RATES_PROVIDER.parameterSensitivity(pointJun);
double[] sensiFwdJun = new double[] {0.0, 0.01347165823324645, 0.0, 0.0, -0.023308107101966076, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiJun.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdJun, TOL)).isTrue();
// September 2016
PointSensitivities pointSep =
PRODUCT_PRICER.priceSensitivity(FUTURE_PRODUCT_SEP, RATES_PROVIDER).multipliedBy(HUNDRED * ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiSep = RATES_PROVIDER.parameterSensitivity(pointSep);
double[] sensiFwdSep = new double[] {0.0, 0.0, 0.0, 0.0, 0.01936692513656471, 0.0048417312841411864, 0.0,
-0.027462515988551, -0.006580907103066675, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiSep.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdSep, TOL)).isTrue();
// June 2017
PointSensitivities pointJunMid =
PRODUCT_PRICER.priceSensitivity(FUTURE_PRODUCT_JUN_MID, RATES_PROVIDER).multipliedBy(HUNDRED * ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiJunMid = RATES_PROVIDER.parameterSensitivity(pointJunMid);
double[] sensiFwdJunMid = new double[] {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.016154080854008976,
-0.013340017892182532, -0.012672512226590141, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiJunMid.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdJunMid, TOL)).isTrue();
// March 2020
PointSensitivities pointMarLong =
PRODUCT_PRICER.priceSensitivity(FUTURE_PRODUCT_MAR_LONG, RATES_PROVIDER).multipliedBy(HUNDRED * ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiMarLong = RATES_PROVIDER.parameterSensitivity(pointMarLong);
double[] sensiFwdMarLong = new double[] {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.03382389130551987, -0.043661005746776824, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiMarLong.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdMarLong, TOL)).isTrue();
}
@Test
public void presentValue() {
// March 2016
CurrencyAmount pvMar = TRADE_PRICER.presentValue(FUTURE_TRADE_MAR, RATES_PROVIDER, REF_PRICE_MAR_DECIMAL);
assertThat(pvMar.getAmount()).isCloseTo(-9738.418878056109, offset(TOL * NOTIONAL));
// June 2016
CurrencyAmount pvJun = TRADE_PRICER.presentValue(FUTURE_TRADE_JUN, RATES_PROVIDER, REF_PRICE_JUN_DECIMAL);
assertThat(pvJun.getAmount()).isCloseTo(-3812.1182441189885, offset(TOL * NOTIONAL));
// September 2016
CurrencyAmount pvSep = TRADE_PRICER.presentValue(FUTURE_TRADE_SEP, RATES_PROVIDER, REF_PRICE_SEP_DECIMAL);
assertThat(pvSep.getAmount()).isCloseTo(-5689.603123847395, offset(TOL * NOTIONAL));
// June 2017
CurrencyAmount pvJunMid = TRADE_PRICER.presentValue(FUTURE_TRADE_JUN_MID, RATES_PROVIDER, REF_PRICE_JUN_MID_DECIMAL);
assertThat(pvJunMid.getAmount()).isCloseTo(4022.2380772829056, offset(TOL * NOTIONAL));
// March 2020
CurrencyAmount pvMarLong = TRADE_PRICER.presentValue(FUTURE_TRADE_MAR_LONG, RATES_PROVIDER, REF_PRICE_MAR_LONG_DECIMAL);
assertThat(pvMarLong.getAmount()).isCloseTo(35818.328803278506, offset(TOL * NOTIONAL));
}
@Test
public void presentValueSensitivity() {
// March 2016
PointSensitivities pointMar =
TRADE_PRICER.presentValueSensitivity(FUTURE_TRADE_MAR, RATES_PROVIDER).multipliedBy(ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiMar = RATES_PROVIDER.parameterSensitivity(pointMar);
double[] sensiFwdMar = new double[] {935.8275650652985, -3282.5265925099943, -113.19057215551703, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiMar.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdMar, TOL)).isTrue();
// June 2016
PointSensitivities pointJun =
TRADE_PRICER.presentValueSensitivity(FUTURE_TRADE_JUN, RATES_PROVIDER).multipliedBy(ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiJun = RATES_PROVIDER.parameterSensitivity(pointJun);
double[] sensiFwdJun = new double[] {0.0, 3367.914558311612, 0.0, 0.0, -5827.0267754915185, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiJun.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdJun, TOL)).isTrue();
// September 2016
PointSensitivities pointSep =
TRADE_PRICER.presentValueSensitivity(FUTURE_TRADE_SEP, RATES_PROVIDER).multipliedBy(ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiSep = RATES_PROVIDER.parameterSensitivity(pointSep);
double[] sensiFwdSep = new double[] {0.0, 0.0, 0.0, 0.0, 4841.731284141179, 1210.432821035297, 0.0,
-6865.62899713775, -1645.2267757666687, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiSep.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdSep, TOL)).isTrue();
// June 2017
PointSensitivities pointJunMid =
TRADE_PRICER.presentValueSensitivity(FUTURE_TRADE_JUN_MID, RATES_PROVIDER).multipliedBy(ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiJunMid = RATES_PROVIDER.parameterSensitivity(pointJunMid);
double[] sensiFwdJunMid = new double[] {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4038.520213502244,
-3335.0044730456357, -3168.128056647536, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiJunMid.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdJunMid, TOL)).isTrue();
// March 2020
PointSensitivities pointMarLong =
TRADE_PRICER.presentValueSensitivity(FUTURE_TRADE_MAR_LONG, RATES_PROVIDER).multipliedBy(ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiMarLong = RATES_PROVIDER.parameterSensitivity(pointMarLong);
double[] sensiFwdMarLong = new double[] {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
8455.972826379962, -10915.251436694207, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiMarLong.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdMarLong, TOL)).isTrue();
}
@Test
public void parSpread() {
// March 2016
double psMar = TRADE_PRICER.parSpread(FUTURE_TRADE_MAR, RATES_PROVIDER, REF_PRICE_MAR_DECIMAL) * HUNDRED;
assertThat(psMar).isCloseTo(-0.038953675512221064, offset(TOL * HUNDRED));
// June 2016
double psJun = TRADE_PRICER.parSpread(FUTURE_TRADE_JUN, RATES_PROVIDER, REF_PRICE_JUN_DECIMAL) * HUNDRED;
assertThat(psJun).isCloseTo(-0.01524847297647014, offset(TOL * HUNDRED));
// September 2016
double psSep = TRADE_PRICER.parSpread(FUTURE_TRADE_SEP, RATES_PROVIDER, REF_PRICE_SEP_DECIMAL) * HUNDRED;
assertThat(psSep).isCloseTo(-0.022758412495393898, offset(TOL * HUNDRED));
// June 2017
double psJunMid = TRADE_PRICER.parSpread(FUTURE_TRADE_JUN_MID, RATES_PROVIDER, REF_PRICE_JUN_MID_DECIMAL) * HUNDRED;
assertThat(psJunMid).isCloseTo(0.01608895230913454, offset(TOL * HUNDRED));
// March 2020
double psMarLong = TRADE_PRICER.parSpread(FUTURE_TRADE_MAR_LONG, RATES_PROVIDER, REF_PRICE_MAR_LONG_DECIMAL) * HUNDRED;
assertThat(psMarLong).isCloseTo(0.14327331521311049, offset(TOL * HUNDRED));
}
@Test
public void parSpreadSensitivity() {
// March 2016
PointSensitivities pointMar =
TRADE_PRICER.parSpreadSensitivity(FUTURE_TRADE_MAR, RATES_PROVIDER).multipliedBy(HUNDRED * ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiMar = RATES_PROVIDER.parameterSensitivity(pointMar);
double[] sensiFwdMar = new double[] {0.003743310260261194, -0.01313010637003998, -4.527622886220682E-4, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiMar.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdMar, TOL)).isTrue();
// June 2016
PointSensitivities pointJun =
TRADE_PRICER.parSpreadSensitivity(FUTURE_TRADE_JUN, RATES_PROVIDER).multipliedBy(HUNDRED * ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiJun = RATES_PROVIDER.parameterSensitivity(pointJun);
double[] sensiFwdJun = new double[] {0.0, 0.01347165823324645, 0.0, 0.0, -0.023308107101966076, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiJun.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdJun, TOL)).isTrue();
// September 2016
PointSensitivities pointSep =
TRADE_PRICER.parSpreadSensitivity(FUTURE_TRADE_SEP, RATES_PROVIDER).multipliedBy(HUNDRED * ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiSep = RATES_PROVIDER.parameterSensitivity(pointSep);
double[] sensiFwdSep = new double[] {0.0, 0.0, 0.0, 0.0, 0.01936692513656471, 0.0048417312841411864, 0.0,
-0.027462515988551, -0.006580907103066675, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiSep.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdSep, TOL)).isTrue();
// June 2017
PointSensitivities pointJunMid =
TRADE_PRICER.parSpreadSensitivity(FUTURE_TRADE_JUN_MID, RATES_PROVIDER).multipliedBy(HUNDRED * ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiJunMid = RATES_PROVIDER.parameterSensitivity(pointJunMid);
double[] sensiFwdJunMid = new double[] {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.016154080854008976,
-0.013340017892182532, -0.012672512226590141, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiJunMid.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdJunMid,
TOL)).isTrue();
// March 2020
PointSensitivities pointMarLong =
TRADE_PRICER.parSpreadSensitivity(FUTURE_TRADE_MAR_LONG, RATES_PROVIDER).multipliedBy(HUNDRED * ONE_BASIS_POINT);
CurrencyParameterSensitivities sensiMarLong = RATES_PROVIDER.parameterSensitivity(pointMarLong);
double[] sensiFwdMarLong = new double[] {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.03382389130551987, -0.043661005746776824, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
assertThat(DoubleArrayMath.fuzzyEquals(
sensiMarLong.getSensitivity(NAME_FWD, JPY).getSensitivity().toArray(), sensiFwdMarLong, TOL)).isTrue();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
public class ComparisonTests extends ScriptTestCase {
public void testDefEq() {
assertEquals(true, exec("def x = (byte)7; def y = (int)7; return x == y"));
assertEquals(true, exec("def x = (short)6; def y = (int)6; return x == y"));
assertEquals(true, exec("def x = (char)5; def y = (int)5; return x == y"));
assertEquals(true, exec("def x = (int)4; def y = (int)4; return x == y"));
assertEquals(false, exec("def x = (long)5; def y = (int)3; return x == y"));
assertEquals(false, exec("def x = (float)6; def y = (int)2; return x == y"));
assertEquals(false, exec("def x = (double)7; def y = (int)1; return x == y"));
assertEquals(true, exec("def x = (byte)7; def y = (double)7; return x == y"));
assertEquals(true, exec("def x = (short)6; def y = (double)6; return x == y"));
assertEquals(true, exec("def x = (char)5; def y = (double)5; return x == y"));
assertEquals(true, exec("def x = (int)4; def y = (double)4; return x == y"));
assertEquals(false, exec("def x = (long)5; def y = (double)3; return x == y"));
assertEquals(false, exec("def x = (float)6; def y = (double)2; return x == y"));
assertEquals(false, exec("def x = (double)7; def y = (double)1; return x == y"));
assertEquals(false, exec("def x = false; def y = true; return x == y"));
assertEquals(false, exec("def x = true; def y = false; return x == y"));
assertEquals(false, exec("def x = true; def y = null; return x == y"));
assertEquals(false, exec("def x = null; def y = true; return x == y"));
assertEquals(true, exec("def x = true; def y = true; return x == y"));
assertEquals(true, exec("def x = false; def y = false; return x == y"));
assertEquals(true, exec("def x = new HashMap(); def y = new HashMap(); return x == y"));
assertEquals(false, exec("def x = new HashMap(); x.put(3, 3); def y = new HashMap(); return x == y"));
assertEquals(true, exec("def x = new HashMap(); x.put(3, 3); def y = new HashMap(); y.put(3, 3); return x == y"));
assertEquals(true, exec("def x = new HashMap(); def y = x; x.put(3, 3); y.put(3, 3); return x == y"));
}
public void testDefEqTypedLHS() {
assertEquals(true, exec("byte x = (byte)7; def y = (int)7; return x == y"));
assertEquals(true, exec("short x = (short)6; def y = (int)6; return x == y"));
assertEquals(true, exec("char x = (char)5; def y = (int)5; return x == y"));
assertEquals(true, exec("int x = (int)4; def y = (int)4; return x == y"));
assertEquals(false, exec("long x = (long)5; def y = (int)3; return x == y"));
assertEquals(false, exec("float x = (float)6; def y = (int)2; return x == y"));
assertEquals(false, exec("double x = (double)7; def y = (int)1; return x == y"));
assertEquals(true, exec("byte x = (byte)7; def y = (double)7; return x == y"));
assertEquals(true, exec("short x = (short)6; def y = (double)6; return x == y"));
assertEquals(true, exec("char x = (char)5; def y = (double)5; return x == y"));
assertEquals(true, exec("int x = (int)4; def y = (double)4; return x == y"));
assertEquals(false, exec("long x = (long)5; def y = (double)3; return x == y"));
assertEquals(false, exec("float x = (float)6; def y = (double)2; return x == y"));
assertEquals(false, exec("double x = (double)7; def y = (double)1; return x == y"));
assertEquals(false, exec("boolean x = false; def y = true; return x == y"));
assertEquals(false, exec("boolean x = true; def y = false; return x == y"));
assertEquals(false, exec("boolean x = true; def y = null; return x == y"));
assertEquals(true, exec("boolean x = true; def y = true; return x == y"));
assertEquals(true, exec("boolean x = false; def y = false; return x == y"));
assertEquals(true, exec("Map x = new HashMap(); def y = new HashMap(); return x == y"));
assertEquals(false, exec("Map x = new HashMap(); x.put(3, 3); def y = new HashMap(); return x == y"));
assertEquals(true, exec("Map x = new HashMap(); x.put(3, 3); def y = new HashMap(); y.put(3, 3); return x == y"));
assertEquals(true, exec("Map x = new HashMap(); def y = x; x.put(3, 3); y.put(3, 3); return x == y"));
}
public void testDefEqTypedRHS() {
assertEquals(true, exec("def x = (byte)7; int y = (int)7; return x == y"));
assertEquals(true, exec("def x = (short)6; int y = (int)6; return x == y"));
assertEquals(true, exec("def x = (char)5; int y = (int)5; return x == y"));
assertEquals(true, exec("def x = (int)4; int y = (int)4; return x == y"));
assertEquals(false, exec("def x = (long)5; int y = (int)3; return x == y"));
assertEquals(false, exec("def x = (float)6; int y = (int)2; return x == y"));
assertEquals(false, exec("def x = (double)7; int y = (int)1; return x == y"));
assertEquals(true, exec("def x = (byte)7; double y = (double)7; return x == y"));
assertEquals(true, exec("def x = (short)6; double y = (double)6; return x == y"));
assertEquals(true, exec("def x = (char)5; double y = (double)5; return x == y"));
assertEquals(true, exec("def x = (int)4; double y = (double)4; return x == y"));
assertEquals(false, exec("def x = (long)5; double y = (double)3; return x == y"));
assertEquals(false, exec("def x = (float)6; double y = (double)2; return x == y"));
assertEquals(false, exec("def x = (double)7; double y = (double)1; return x == y"));
assertEquals(false, exec("def x = false; boolean y = true; return x == y"));
assertEquals(false, exec("def x = true; boolean y = false; return x == y"));
assertEquals(false, exec("def x = null; boolean y = true; return x == y"));
assertEquals(true, exec("def x = true; boolean y = true; return x == y"));
assertEquals(true, exec("def x = false; boolean y = false; return x == y"));
assertEquals(true, exec("def x = new HashMap(); Map y = new HashMap(); return x == y"));
assertEquals(false, exec("def x = new HashMap(); x.put(3, 3); Map y = new HashMap(); return x == y"));
assertEquals(true, exec("def x = new HashMap(); x.put(3, 3); Map y = new HashMap(); y.put(3, 3); return x == y"));
assertEquals(true, exec("def x = new HashMap(); Map y = x; x.put(3, 3); y.put(3, 3); return x == y"));
}
public void testDefEqr() {
assertEquals(false, exec("def x = (byte)7; def y = (int)7; return x === y"));
assertEquals(false, exec("def x = (short)6; def y = (int)6; return x === y"));
assertEquals(false, exec("def x = (char)5; def y = (int)5; return x === y"));
assertEquals(true, exec("def x = (int)4; def y = (int)4; return x === y"));
assertEquals(false, exec("def x = (long)5; def y = (int)3; return x === y"));
assertEquals(false, exec("def x = (float)6; def y = (int)2; return x === y"));
assertEquals(false, exec("def x = (double)7; def y = (int)1; return x === y"));
assertEquals(false, exec("def x = false; def y = true; return x === y"));
assertEquals(false, exec("def x = new HashMap(); def y = new HashMap(); return x === y"));
assertEquals(false, exec("def x = new HashMap(); x.put(3, 3); def y = new HashMap(); return x === y"));
assertEquals(false, exec("def x = new HashMap(); x.put(3, 3); def y = new HashMap(); y.put(3, 3); return x === y"));
assertEquals(true, exec("def x = new HashMap(); def y = x; x.put(3, 3); y.put(3, 3); return x === y"));
}
public void testDefNe() {
assertEquals(false, exec("def x = (byte)7; def y = (int)7; return x != y"));
assertEquals(false, exec("def x = (short)6; def y = (int)6; return x != y"));
assertEquals(false, exec("def x = (char)5; def y = (int)5; return x != y"));
assertEquals(false, exec("def x = (int)4; def y = (int)4; return x != y"));
assertEquals(true, exec("def x = (long)5; def y = (int)3; return x != y"));
assertEquals(true, exec("def x = (float)6; def y = (int)2; return x != y"));
assertEquals(true, exec("def x = (double)7; def y = (int)1; return x != y"));
assertEquals(false, exec("def x = (byte)7; def y = (double)7; return x != y"));
assertEquals(false, exec("def x = (short)6; def y = (double)6; return x != y"));
assertEquals(false, exec("def x = (char)5; def y = (double)5; return x != y"));
assertEquals(false, exec("def x = (int)4; def y = (double)4; return x != y"));
assertEquals(true, exec("def x = (long)5; def y = (double)3; return x != y"));
assertEquals(true, exec("def x = (float)6; def y = (double)2; return x != y"));
assertEquals(true, exec("def x = (double)7; def y = (double)1; return x != y"));
assertEquals(false, exec("def x = new HashMap(); def y = new HashMap(); return x != y"));
assertEquals(true, exec("def x = new HashMap(); x.put(3, 3); def y = new HashMap(); return x != y"));
assertEquals(false, exec("def x = new HashMap(); x.put(3, 3); def y = new HashMap(); y.put(3, 3); return x != y"));
assertEquals(false, exec("def x = new HashMap(); def y = x; x.put(3, 3); y.put(3, 3); return x != y"));
assertEquals(false, exec("def x = true; def y = true; return x != y"));
assertEquals(true, exec("def x = true; def y = false; return x != y"));
assertEquals(true, exec("def x = false; def y = true; return x != y"));
assertEquals(false, exec("def x = false; def y = false; return x != y"));
}
public void testDefNeTypedLHS() {
assertEquals(false, exec("byte x = (byte)7; def y = (int)7; return x != y"));
assertEquals(false, exec("short x = (short)6; def y = (int)6; return x != y"));
assertEquals(false, exec("char x = (char)5; def y = (int)5; return x != y"));
assertEquals(false, exec("int x = (int)4; def y = (int)4; return x != y"));
assertEquals(true, exec("long x = (long)5; def y = (int)3; return x != y"));
assertEquals(true, exec("float x = (float)6; def y = (int)2; return x != y"));
assertEquals(true, exec("double x = (double)7; def y = (int)1; return x != y"));
assertEquals(false, exec("byte x = (byte)7; def y = (double)7; return x != y"));
assertEquals(false, exec("short x = (short)6; def y = (double)6; return x != y"));
assertEquals(false, exec("char x = (char)5; def y = (double)5; return x != y"));
assertEquals(false, exec("int x = (int)4; def y = (double)4; return x != y"));
assertEquals(true, exec("long x = (long)5; def y = (double)3; return x != y"));
assertEquals(true, exec("float x = (float)6; def y = (double)2; return x != y"));
assertEquals(true, exec("double x = (double)7; def y = (double)1; return x != y"));
assertEquals(false, exec("Map x = new HashMap(); def y = new HashMap(); return x != y"));
assertEquals(true, exec("Map x = new HashMap(); x.put(3, 3); def y = new HashMap(); return x != y"));
assertEquals(false, exec("Map x = new HashMap(); x.put(3, 3); def y = new HashMap(); y.put(3, 3); return x != y"));
assertEquals(false, exec("Map x = new HashMap(); def y = x; x.put(3, 3); y.put(3, 3); return x != y"));
assertEquals(false, exec("boolean x = true; def y = true; return x != y"));
assertEquals(true, exec("boolean x = true; def y = false; return x != y"));
assertEquals(true, exec("boolean x = false; def y = true; return x != y"));
assertEquals(false, exec("boolean x = false; def y = false; return x != y"));
}
public void testDefNeTypedRHS() {
assertEquals(false, exec("def x = (byte)7; int y = (int)7; return x != y"));
assertEquals(false, exec("def x = (short)6; int y = (int)6; return x != y"));
assertEquals(false, exec("def x = (char)5; int y = (int)5; return x != y"));
assertEquals(false, exec("def x = (int)4; int y = (int)4; return x != y"));
assertEquals(true, exec("def x = (long)5; int y = (int)3; return x != y"));
assertEquals(true, exec("def x = (float)6; int y = (int)2; return x != y"));
assertEquals(true, exec("def x = (double)7; int y = (int)1; return x != y"));
assertEquals(false, exec("def x = (byte)7; double y = (double)7; return x != y"));
assertEquals(false, exec("def x = (short)6; double y = (double)6; return x != y"));
assertEquals(false, exec("def x = (char)5; double y = (double)5; return x != y"));
assertEquals(false, exec("def x = (int)4; double y = (double)4; return x != y"));
assertEquals(true, exec("def x = (long)5; double y = (double)3; return x != y"));
assertEquals(true, exec("def x = (float)6; double y = (double)2; return x != y"));
assertEquals(true, exec("def x = (double)7; double y = (double)1; return x != y"));
assertEquals(false, exec("def x = new HashMap(); Map y = new HashMap(); return x != y"));
assertEquals(true, exec("def x = new HashMap(); x.put(3, 3); Map y = new HashMap(); return x != y"));
assertEquals(false, exec("def x = new HashMap(); x.put(3, 3); Map y = new HashMap(); y.put(3, 3); return x != y"));
assertEquals(false, exec("def x = new HashMap(); Map y = x; x.put(3, 3); y.put(3, 3); return x != y"));
assertEquals(false, exec("def x = true; boolean y = true; return x != y"));
assertEquals(true, exec("def x = true; boolean y = false; return x != y"));
assertEquals(true, exec("def x = false; boolean y = true; return x != y"));
assertEquals(false, exec("def x = false; boolean y = false; return x != y"));
}
public void testDefNer() {
assertEquals(true, exec("def x = (byte)7; def y = (int)7; return x !== y"));
assertEquals(true, exec("def x = (short)6; def y = (int)6; return x !== y"));
assertEquals(true, exec("def x = (char)5; def y = (int)5; return x !== y"));
assertEquals(false, exec("def x = (int)4; def y = (int)4; return x !== y"));
assertEquals(true, exec("def x = (long)5; def y = (int)3; return x !== y"));
assertEquals(true, exec("def x = (float)6; def y = (int)2; return x !== y"));
assertEquals(true, exec("def x = (double)7; def y = (int)1; return x !== y"));
assertEquals(true, exec("def x = new HashMap(); def y = new HashMap(); return x !== y"));
assertEquals(true, exec("def x = new HashMap(); x.put(3, 3); def y = new HashMap(); return x !== y"));
assertEquals(true, exec("def x = new HashMap(); x.put(3, 3); def y = new HashMap(); y.put(3, 3); return x !== y"));
assertEquals(false, exec("def x = new HashMap(); def y = x; x.put(3, 3); y.put(3, 3); return x !== y"));
}
public void testDefLt() {
assertEquals(true, exec("def x = (byte)1; def y = (int)7; return x < y"));
assertEquals(true, exec("def x = (short)2; def y = (int)6; return x < y"));
assertEquals(true, exec("def x = (char)3; def y = (int)5; return x < y"));
assertEquals(false, exec("def x = (int)4; def y = (int)4; return x < y"));
assertEquals(false, exec("def x = (long)5; def y = (int)3; return x < y"));
assertEquals(false, exec("def x = (float)6; def y = (int)2; return x < y"));
assertEquals(false, exec("def x = (double)7; def y = (int)1; return x < y"));
assertEquals(true, exec("def x = (byte)1; def y = (double)7; return x < y"));
assertEquals(true, exec("def x = (short)2; def y = (double)6; return x < y"));
assertEquals(true, exec("def x = (char)3; def y = (double)5; return x < y"));
assertEquals(false, exec("def x = (int)4; def y = (double)4; return x < y"));
assertEquals(false, exec("def x = (long)5; def y = (double)3; return x < y"));
assertEquals(false, exec("def x = (float)6; def y = (double)2; return x < y"));
assertEquals(false, exec("def x = (double)7; def y = (double)1; return x < y"));
}
public void testDefLtTypedLHS() {
assertEquals(true, exec("byte x = (byte)1; def y = (int)7; return x < y"));
assertEquals(true, exec("short x = (short)2; def y = (int)6; return x < y"));
assertEquals(true, exec("char x = (char)3; def y = (int)5; return x < y"));
assertEquals(false, exec("int x = (int)4; def y = (int)4; return x < y"));
assertEquals(false, exec("long x = (long)5; def y = (int)3; return x < y"));
assertEquals(false, exec("float x = (float)6; def y = (int)2; return x < y"));
assertEquals(false, exec("double x = (double)7; def y = (int)1; return x < y"));
assertEquals(true, exec("byte x = (byte)1; def y = (double)7; return x < y"));
assertEquals(true, exec("short x = (short)2; def y = (double)6; return x < y"));
assertEquals(true, exec("char x = (char)3; def y = (double)5; return x < y"));
assertEquals(false, exec("int x = (int)4; def y = (double)4; return x < y"));
assertEquals(false, exec("long x = (long)5; def y = (double)3; return x < y"));
assertEquals(false, exec("float x = (float)6; def y = (double)2; return x < y"));
assertEquals(false, exec("double x = (double)7; def y = (double)1; return x < y"));
}
public void testDefLtTypedRHS() {
assertEquals(true, exec("def x = (byte)1; int y = (int)7; return x < y"));
assertEquals(true, exec("def x = (short)2; int y = (int)6; return x < y"));
assertEquals(true, exec("def x = (char)3; int y = (int)5; return x < y"));
assertEquals(false, exec("def x = (int)4; int y = (int)4; return x < y"));
assertEquals(false, exec("def x = (long)5; int y = (int)3; return x < y"));
assertEquals(false, exec("def x = (float)6; int y = (int)2; return x < y"));
assertEquals(false, exec("def x = (double)7; int y = (int)1; return x < y"));
assertEquals(true, exec("def x = (byte)1; double y = (double)7; return x < y"));
assertEquals(true, exec("def x = (short)2; double y = (double)6; return x < y"));
assertEquals(true, exec("def x = (char)3; double y = (double)5; return x < y"));
assertEquals(false, exec("def x = (int)4; double y = (double)4; return x < y"));
assertEquals(false, exec("def x = (long)5; double y = (double)3; return x < y"));
assertEquals(false, exec("def x = (float)6; double y = (double)2; return x < y"));
assertEquals(false, exec("def x = (double)7; double y = (double)1; return x < y"));
}
public void testDefLte() {
assertEquals(true, exec("def x = (byte)1; def y = (int)7; return x <= y"));
assertEquals(true, exec("def x = (short)2; def y = (int)6; return x <= y"));
assertEquals(true, exec("def x = (char)3; def y = (int)5; return x <= y"));
assertEquals(true, exec("def x = (int)4; def y = (int)4; return x <= y"));
assertEquals(false, exec("def x = (long)5; def y = (int)3; return x <= y"));
assertEquals(false, exec("def x = (float)6; def y = (int)2; return x <= y"));
assertEquals(false, exec("def x = (double)7; def y = (int)1; return x <= y"));
assertEquals(true, exec("def x = (byte)1; def y = (double)7; return x <= y"));
assertEquals(true, exec("def x = (short)2; def y = (double)6; return x <= y"));
assertEquals(true, exec("def x = (char)3; def y = (double)5; return x <= y"));
assertEquals(true, exec("def x = (int)4; def y = (double)4; return x <= y"));
assertEquals(false, exec("def x = (long)5; def y = (double)3; return x <= y"));
assertEquals(false, exec("def x = (float)6; def y = (double)2; return x <= y"));
assertEquals(false, exec("def x = (double)7; def y = (double)1; return x <= y"));
}
public void testDefLteTypedLHS() {
assertEquals(true, exec("byte x = (byte)1; def y = (int)7; return x <= y"));
assertEquals(true, exec("short x = (short)2; def y = (int)6; return x <= y"));
assertEquals(true, exec("char x = (char)3; def y = (int)5; return x <= y"));
assertEquals(true, exec("int x = (int)4; def y = (int)4; return x <= y"));
assertEquals(false, exec("long x = (long)5; def y = (int)3; return x <= y"));
assertEquals(false, exec("float x = (float)6; def y = (int)2; return x <= y"));
assertEquals(false, exec("double x = (double)7; def y = (int)1; return x <= y"));
assertEquals(true, exec("byte x = (byte)1; def y = (double)7; return x <= y"));
assertEquals(true, exec("short x = (short)2; def y = (double)6; return x <= y"));
assertEquals(true, exec("char x = (char)3; def y = (double)5; return x <= y"));
assertEquals(true, exec("int x = (int)4; def y = (double)4; return x <= y"));
assertEquals(false, exec("long x = (long)5; def y = (double)3; return x <= y"));
assertEquals(false, exec("float x = (float)6; def y = (double)2; return x <= y"));
assertEquals(false, exec("double x = (double)7; def y = (double)1; return x <= y"));
}
public void testDefLteTypedRHS() {
assertEquals(true, exec("def x = (byte)1; int y = (int)7; return x <= y"));
assertEquals(true, exec("def x = (short)2; int y = (int)6; return x <= y"));
assertEquals(true, exec("def x = (char)3; int y = (int)5; return x <= y"));
assertEquals(true, exec("def x = (int)4; int y = (int)4; return x <= y"));
assertEquals(false, exec("def x = (long)5; int y = (int)3; return x <= y"));
assertEquals(false, exec("def x = (float)6; int y = (int)2; return x <= y"));
assertEquals(false, exec("def x = (double)7; int y = (int)1; return x <= y"));
assertEquals(true, exec("def x = (byte)1; double y = (double)7; return x <= y"));
assertEquals(true, exec("def x = (short)2; double y = (double)6; return x <= y"));
assertEquals(true, exec("def x = (char)3; double y = (double)5; return x <= y"));
assertEquals(true, exec("def x = (int)4; double y = (double)4; return x <= y"));
assertEquals(false, exec("def x = (long)5; double y = (double)3; return x <= y"));
assertEquals(false, exec("def x = (float)6; double y = (double)2; return x <= y"));
assertEquals(false, exec("def x = (double)7; double y = (double)1; return x <= y"));
}
public void testDefGt() {
assertEquals(false, exec("def x = (byte)1; def y = (int)7; return x > y"));
assertEquals(false, exec("def x = (short)2; def y = (int)6; return x > y"));
assertEquals(false, exec("def x = (char)3; def y = (int)5; return x > y"));
assertEquals(false, exec("def x = (int)4; def y = (int)4; return x > y"));
assertEquals(true, exec("def x = (long)5; def y = (int)3; return x > y"));
assertEquals(true, exec("def x = (float)6; def y = (int)2; return x > y"));
assertEquals(true, exec("def x = (double)7; def y = (int)1; return x > y"));
assertEquals(false, exec("def x = (byte)1; def y = (double)7; return x > y"));
assertEquals(false, exec("def x = (short)2; def y = (double)6; return x > y"));
assertEquals(false, exec("def x = (char)3; def y = (double)5; return x > y"));
assertEquals(false, exec("def x = (int)4; def y = (double)4; return x > y"));
assertEquals(true, exec("def x = (long)5; def y = (double)3; return x > y"));
assertEquals(true, exec("def x = (float)6; def y = (double)2; return x > y"));
assertEquals(true, exec("def x = (double)7; def y = (double)1; return x > y"));
}
public void testDefGtTypedLHS() {
assertEquals(false, exec("byte x = (byte)1; def y = (int)7; return x > y"));
assertEquals(false, exec("short x = (short)2; def y = (int)6; return x > y"));
assertEquals(false, exec("char x = (char)3; def y = (int)5; return x > y"));
assertEquals(false, exec("int x = (int)4; def y = (int)4; return x > y"));
assertEquals(true, exec("long x = (long)5; def y = (int)3; return x > y"));
assertEquals(true, exec("float x = (float)6; def y = (int)2; return x > y"));
assertEquals(true, exec("double x = (double)7; def y = (int)1; return x > y"));
assertEquals(false, exec("byte x = (byte)1; def y = (double)7; return x > y"));
assertEquals(false, exec("short x = (short)2; def y = (double)6; return x > y"));
assertEquals(false, exec("char x = (char)3; def y = (double)5; return x > y"));
assertEquals(false, exec("int x = (int)4; def y = (double)4; return x > y"));
assertEquals(true, exec("long x = (long)5; def y = (double)3; return x > y"));
assertEquals(true, exec("float x = (float)6; def y = (double)2; return x > y"));
assertEquals(true, exec("double x = (double)7; def y = (double)1; return x > y"));
}
public void testDefGtTypedRHS() {
assertEquals(false, exec("def x = (byte)1; int y = (int)7; return x > y"));
assertEquals(false, exec("def x = (short)2; int y = (int)6; return x > y"));
assertEquals(false, exec("def x = (char)3; int y = (int)5; return x > y"));
assertEquals(false, exec("def x = (int)4; int y = (int)4; return x > y"));
assertEquals(true, exec("def x = (long)5; int y = (int)3; return x > y"));
assertEquals(true, exec("def x = (float)6; int y = (int)2; return x > y"));
assertEquals(true, exec("def x = (double)7; int y = (int)1; return x > y"));
assertEquals(false, exec("def x = (byte)1; double y = (double)7; return x > y"));
assertEquals(false, exec("def x = (short)2; double y = (double)6; return x > y"));
assertEquals(false, exec("def x = (char)3; double y = (double)5; return x > y"));
assertEquals(false, exec("def x = (int)4; double y = (double)4; return x > y"));
assertEquals(true, exec("def x = (long)5; double y = (double)3; return x > y"));
assertEquals(true, exec("def x = (float)6; double y = (double)2; return x > y"));
assertEquals(true, exec("def x = (double)7; double y = (double)1; return x > y"));
}
public void testDefGte() {
assertEquals(false, exec("def x = (byte)1; def y = (int)7; return x >= y"));
assertEquals(false, exec("def x = (short)2; def y = (int)6; return x >= y"));
assertEquals(false, exec("def x = (char)3; def y = (int)5; return x >= y"));
assertEquals(true, exec("def x = (int)4; def y = (int)4; return x >= y"));
assertEquals(true, exec("def x = (long)5; def y = (int)3; return x >= y"));
assertEquals(true, exec("def x = (float)6; def y = (int)2; return x >= y"));
assertEquals(true, exec("def x = (double)7; def y = (int)1; return x >= y"));
assertEquals(false, exec("def x = (byte)1; def y = (double)7; return x >= y"));
assertEquals(false, exec("def x = (short)2; def y = (double)6; return x >= y"));
assertEquals(false, exec("def x = (char)3; def y = (double)5; return x >= y"));
assertEquals(true, exec("def x = (int)4; def y = (double)4; return x >= y"));
assertEquals(true, exec("def x = (long)5; def y = (double)3; return x >= y"));
assertEquals(true, exec("def x = (float)6; def y = (double)2; return x >= y"));
assertEquals(true, exec("def x = (double)7; def y = (double)1; return x >= y"));
}
public void testDefGteTypedLHS() {
assertEquals(false, exec("byte x = (byte)1; def y = (int)7; return x >= y"));
assertEquals(false, exec("short x = (short)2; def y = (int)6; return x >= y"));
assertEquals(false, exec("char x = (char)3; def y = (int)5; return x >= y"));
assertEquals(true, exec("int x = (int)4; def y = (int)4; return x >= y"));
assertEquals(true, exec("long x = (long)5; def y = (int)3; return x >= y"));
assertEquals(true, exec("float x = (float)6; def y = (int)2; return x >= y"));
assertEquals(true, exec("double x = (double)7; def y = (int)1; return x >= y"));
assertEquals(false, exec("byte x = (byte)1; def y = (double)7; return x >= y"));
assertEquals(false, exec("short x = (short)2; def y = (double)6; return x >= y"));
assertEquals(false, exec("char x = (char)3; def y = (double)5; return x >= y"));
assertEquals(true, exec("int x = (int)4; def y = (double)4; return x >= y"));
assertEquals(true, exec("long x = (long)5; def y = (double)3; return x >= y"));
assertEquals(true, exec("float x = (float)6; def y = (double)2; return x >= y"));
assertEquals(true, exec("double x = (double)7; def y = (double)1; return x >= y"));
}
public void testDefGteTypedRHS() {
assertEquals(false, exec("def x = (byte)1; int y = (int)7; return x >= y"));
assertEquals(false, exec("def x = (short)2; int y = (int)6; return x >= y"));
assertEquals(false, exec("def x = (char)3; int y = (int)5; return x >= y"));
assertEquals(true, exec("def x = (int)4; int y = (int)4; return x >= y"));
assertEquals(true, exec("def x = (long)5; int y = (int)3; return x >= y"));
assertEquals(true, exec("def x = (float)6; int y = (int)2; return x >= y"));
assertEquals(true, exec("def x = (double)7; int y = (int)1; return x >= y"));
assertEquals(false, exec("def x = (byte)1; double y = (double)7; return x >= y"));
assertEquals(false, exec("def x = (short)2; double y = (double)6; return x >= y"));
assertEquals(false, exec("def x = (char)3; double y = (double)5; return x >= y"));
assertEquals(true, exec("def x = (int)4; double y = (double)4; return x >= y"));
assertEquals(true, exec("def x = (long)5; double y = (double)3; return x >= y"));
assertEquals(true, exec("def x = (float)6; double y = (double)2; return x >= y"));
assertEquals(true, exec("def x = (double)7; double y = (double)1; return x >= y"));
}
public void testInstanceOf() {
assertEquals(true, exec("int x = 5; return x instanceof int"));
assertEquals(true, exec("int x = 5; return x instanceof Number"));
assertEquals(true, exec("int x = 5; return x instanceof Integer"));
assertEquals(true, exec("int x = 5; return x instanceof def"));
assertEquals(true, exec("int x = 5; return x instanceof Object"));
assertEquals(true, exec("def x = 5; return x instanceof int"));
assertEquals(true, exec("def x = 5; return x instanceof def"));
assertEquals(true, exec("def x = 5; return x instanceof Object"));
assertEquals(true, exec("def x = 5; return x instanceof Integer"));
assertEquals(true, exec("def x = 5; return x instanceof Number"));
assertEquals(false, exec("def x = 5; return x instanceof float"));
assertEquals(false, exec("def x = 5; return x instanceof Map"));
assertEquals(true, exec("List l = new ArrayList(); return l instanceof List"));
assertEquals(false, exec("List l = null; return l instanceof List"));
assertEquals(true, exec("List l = new ArrayList(); return l instanceof Collection"));
assertEquals(false, exec("List l = new ArrayList(); return l instanceof Map"));
assertEquals(true, exec("int[] x = new int[] { 5 }; return x instanceof int[]"));
assertEquals(false, exec("int[] x = new int[] { 5 }; return x instanceof float[]"));
assertEquals(false, exec("int[] x = new int[] { 5 }; return x instanceof int[][]"));
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationActivationListener;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.wm.AppIconScheme;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.util.IconUtil;
import com.intellij.util.containers.HashMap;
import com.intellij.util.ui.ImageUtil;
import com.intellij.util.ui.UIUtil;
import org.apache.commons.imaging.ImageWriteException;
import org.apache.commons.imaging.common.BinaryOutputStream;
import org.jetbrains.annotations.NotNull;
import javax.imageio.ImageIO;
import javax.swing.*;
import java.awt.*;
import java.awt.geom.Area;
import java.awt.geom.Rectangle2D;
import java.awt.geom.RoundRectangle2D;
import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.ByteOrder;
import java.util.Map;
public abstract class AppIcon {
private static final Logger LOG = Logger.getInstance(AppIcon.class);
private static AppIcon ourIcon;
@NotNull
public static AppIcon getInstance() {
if (ourIcon == null) {
if (SystemInfo.isMac) {
ourIcon = new MacAppIcon();
}
else if (SystemInfo.isWin7OrNewer) {
ourIcon = new Win7AppIcon();
}
else {
ourIcon = new EmptyIcon();
}
}
return ourIcon;
}
public abstract boolean setProgress(Project project, Object processId, AppIconScheme.Progress scheme, double value, boolean isOk);
public abstract boolean hideProgress(Project project, Object processId);
public abstract void setErrorBadge(Project project, String text);
public abstract void setOkBadge(Project project, boolean visible);
public abstract void requestAttention(Project project, boolean critical);
public abstract void requestFocus(IdeFrame frame);
private static abstract class BaseIcon extends AppIcon {
private ApplicationActivationListener myAppListener;
protected Object myCurrentProcessId;
protected double myLastValue;
@Override
public final boolean setProgress(Project project, Object processId, AppIconScheme.Progress scheme, double value, boolean isOk) {
if (!isAppActive() && Registry.is("ide.appIcon.progress") && (myCurrentProcessId == null || myCurrentProcessId.equals(processId))) {
return _setProgress(getIdeFrame(project), processId, scheme, value, isOk);
}
else {
return false;
}
}
@Override
public final boolean hideProgress(Project project, Object processId) {
if (Registry.is("ide.appIcon.progress")) {
return _hideProgress(getIdeFrame(project), processId);
}
else {
return false;
}
}
@Override
public final void setErrorBadge(Project project, String text) {
if (!isAppActive() && Registry.is("ide.appIcon.badge")) {
_setOkBadge(getIdeFrame(project), false);
_setTextBadge(getIdeFrame(project), text);
}
}
@Override
public final void setOkBadge(Project project, boolean visible) {
if (!isAppActive() && Registry.is("ide.appIcon.badge")) {
_setTextBadge(getIdeFrame(project), null);
_setOkBadge(getIdeFrame(project), visible);
}
}
@Override
public final void requestAttention(Project project, boolean critical) {
if (!isAppActive() && Registry.is("ide.appIcon.requestAttention")) {
_requestAttention(getIdeFrame(project), critical);
}
}
public abstract boolean _setProgress(IdeFrame frame, Object processId, AppIconScheme.Progress scheme, double value, boolean isOk);
public abstract boolean _hideProgress(IdeFrame frame, Object processId);
public abstract void _setTextBadge(IdeFrame frame, String text);
public abstract void _setOkBadge(IdeFrame frame, boolean visible);
public abstract void _requestAttention(IdeFrame frame, boolean critical);
protected abstract IdeFrame getIdeFrame(Project project);
private boolean isAppActive() {
Application app = ApplicationManager.getApplication();
if (app != null && myAppListener == null) {
myAppListener = new ApplicationActivationListener() {
@Override
public void applicationActivated(IdeFrame ideFrame) {
hideProgress(ideFrame.getProject(), myCurrentProcessId);
_setOkBadge(ideFrame, false);
_setTextBadge(ideFrame, null);
}
};
app.getMessageBus().connect().subscribe(ApplicationActivationListener.TOPIC, myAppListener);
}
return app != null && app.isActive();
}
}
@SuppressWarnings("UseJBColor")
static class MacAppIcon extends BaseIcon {
private BufferedImage myAppImage;
private Map<Object, AppImage> myProgressImagesCache = new HashMap<>();
private BufferedImage getAppImage() {
assertIsDispatchThread();
try {
if (myAppImage != null) return myAppImage;
Object app = getApp();
Image appImage = (Image)getAppMethod("getDockIconImage").invoke(app);
if (appImage == null) return null;
myAppImage = ImageUtil.toBufferedImage(appImage);
}
catch (NoSuchMethodException e) {
return null;
}
catch (Exception e) {
LOG.error(e);
}
return myAppImage;
}
@Override
public void _setTextBadge(IdeFrame frame, String text) {
assertIsDispatchThread();
try {
getAppMethod("setDockIconBadge", String.class).invoke(getApp(), text);
}
catch (NoSuchMethodException ignored) { }
catch (Exception e) {
LOG.error(e);
}
}
@Override
public void requestFocus(IdeFrame frame) {
assertIsDispatchThread();
try {
getAppMethod("requestForeground", boolean.class).invoke(getApp(), true);
}
catch (NoSuchMethodException ignored) { }
catch (Exception e) {
LOG.error(e);
}
}
@Override
public void _requestAttention(IdeFrame frame, boolean critical) {
assertIsDispatchThread();
try {
getAppMethod("requestUserAttention", boolean.class).invoke(getApp(), critical);
}
catch (NoSuchMethodException ignored) { }
catch (Exception e) {
LOG.error(e);
}
}
@Override
protected IdeFrame getIdeFrame(Project project) {
return null;
}
@Override
public boolean _hideProgress(IdeFrame frame, Object processId) {
assertIsDispatchThread();
if (getAppImage() == null) return false;
if (myCurrentProcessId != null && !myCurrentProcessId.equals(processId)) return false;
setDockIcon(getAppImage());
myProgressImagesCache.remove(myCurrentProcessId);
myCurrentProcessId = null;
myLastValue = 0;
return true;
}
@Override
public void _setOkBadge(IdeFrame frame, boolean visible) {
assertIsDispatchThread();
if (getAppImage() == null) return;
AppImage img = createAppImage();
if (visible) {
Icon okIcon = AllIcons.Mac.AppIconOk512;
int myImgWidth = img.myImg.getWidth();
if (myImgWidth != 128) {
okIcon = IconUtil.scale(okIcon, myImgWidth / 128);
}
int x = myImgWidth - okIcon.getIconWidth();
int y = 0;
okIcon.paintIcon(JOptionPane.getRootFrame(), img.myG2d, x, y);
}
setDockIcon(img.myImg);
}
// white 80% transparent
private static Color PROGRESS_BACKGROUND_COLOR = new Color(255, 255, 255, 217);
private static Color PROGRESS_OUTLINE_COLOR = new Color(140, 139, 140);
@Override
public boolean _setProgress(IdeFrame frame, Object processId, AppIconScheme.Progress scheme, double value, boolean isOk) {
assertIsDispatchThread();
if (getAppImage() == null) return false;
myCurrentProcessId = processId;
if (myLastValue > value) return true;
if (Math.abs(myLastValue - value) < 0.02d) return true;
try {
double progressHeight = (myAppImage.getHeight() * 0.13);
double xInset = (myAppImage.getWidth() * 0.05);
double yInset = (myAppImage.getHeight() * 0.15);
final double width = myAppImage.getWidth() - xInset * 2;
final double y = myAppImage.getHeight() - progressHeight - yInset;
Area borderArea = new Area( new RoundRectangle2D.Double(
xInset - 1, y - 1, width + 2, progressHeight + 2,
(progressHeight + 2), (progressHeight + 2
)));
Area backgroundArea = new Area(new Rectangle2D.Double(xInset, y, width, progressHeight));
backgroundArea.intersect(borderArea);
Area progressArea = new Area(new Rectangle2D.Double(xInset + 1, y + 1,(width - 2) * value, progressHeight - 1));
progressArea.intersect(borderArea);
AppImage appImg = myProgressImagesCache.get(myCurrentProcessId);
if (appImg == null) myProgressImagesCache.put(myCurrentProcessId, appImg = createAppImage());
appImg.myG2d.setColor(PROGRESS_BACKGROUND_COLOR);
appImg.myG2d.fill(backgroundArea);
final Color color = isOk ? scheme.getOkColor() : scheme.getErrorColor();
appImg.myG2d.setColor(color);
appImg.myG2d.fill(progressArea);
appImg.myG2d.setColor(PROGRESS_OUTLINE_COLOR);
appImg.myG2d.draw(backgroundArea);
appImg.myG2d.draw(borderArea);
setDockIcon(appImg.myImg);
myLastValue = value;
}
catch (Exception e) {
LOG.error(e);
}
finally {
myCurrentProcessId = null;
}
return true;
}
private AppImage createAppImage() {
BufferedImage appImage = getAppImage();
assert appImage != null;
@SuppressWarnings("UndesirableClassUsage")
BufferedImage current = new BufferedImage(appImage.getWidth(), appImage.getHeight(), BufferedImage.TYPE_INT_ARGB);
Graphics2D g = current.createGraphics();
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
UIUtil.drawImage(g, appImage, 0, 0, null);
return new AppImage(current, g);
}
private static class AppImage {
BufferedImage myImg;
Graphics2D myG2d;
AppImage(BufferedImage img, Graphics2D g2d) {
myImg = img;
myG2d = g2d;
}
}
static void setDockIcon(BufferedImage image) {
try {
getAppMethod("setDockIconImage", Image.class).invoke(getApp(), image);
}
catch (Exception e) {
LOG.error(e);
}
}
private static Method getAppMethod(final String name, Class... args) throws NoSuchMethodException, ClassNotFoundException {
return getAppClass().getMethod(name, args);
}
private static Object getApp() throws NoSuchMethodException, ClassNotFoundException, InvocationTargetException, IllegalAccessException {
return getAppClass().getMethod("getApplication").invoke(null);
}
private static Class<?> getAppClass() throws ClassNotFoundException {
return Class.forName("com.apple.eawt.Application");
}
}
@SuppressWarnings("UseJBColor")
private static class Win7AppIcon extends BaseIcon {
@Override
public boolean _setProgress(IdeFrame frame, Object processId, AppIconScheme.Progress scheme, double value, boolean isOk) {
myCurrentProcessId = processId;
if (Math.abs(myLastValue - value) < 0.02d) {
return true;
}
try {
if (isValid(frame)) {
Win7TaskBar.setProgress(frame, value, isOk);
}
}
catch (Throwable e) {
LOG.error(e);
}
myLastValue = value;
myCurrentProcessId = null;
return true;
}
@Override
public boolean _hideProgress(IdeFrame frame, Object processId) {
if (myCurrentProcessId != null && !myCurrentProcessId.equals(processId)) {
return false;
}
try {
if (isValid(frame)) {
Win7TaskBar.hideProgress(frame);
}
}
catch (Throwable e) {
LOG.error(e);
}
myCurrentProcessId = null;
myLastValue = 0;
return true;
}
private static void writeTransparentIco(BufferedImage src, OutputStream os)
throws ImageWriteException, IOException {
LOG.assertTrue(BufferedImage.TYPE_INT_ARGB == src.getType() || BufferedImage.TYPE_4BYTE_ABGR == src.getType());
int bitCount = 32;
BinaryOutputStream bos = new BinaryOutputStream(os, ByteOrder.LITTLE_ENDIAN);
try {
int scanline_size = (bitCount * src.getWidth() + 7) / 8;
if ((scanline_size % 4) != 0)
scanline_size += 4 - (scanline_size % 4); // pad scanline to 4 byte size.
int t_scanline_size = (src.getWidth() + 7) / 8;
if ((t_scanline_size % 4) != 0)
t_scanline_size += 4 - (t_scanline_size % 4); // pad scanline to 4 byte size.
int imageSize = 40 + src.getHeight() * scanline_size + src.getHeight() * t_scanline_size;
// ICONDIR
bos.write2Bytes(0); // reserved
bos.write2Bytes(1); // 1=ICO, 2=CUR
bos.write2Bytes(1); // count
// ICONDIRENTRY
int iconDirEntryWidth = src.getWidth();
int iconDirEntryHeight = src.getHeight();
if (iconDirEntryWidth > 255 || iconDirEntryHeight > 255) {
iconDirEntryWidth = 0;
iconDirEntryHeight = 0;
}
bos.write(iconDirEntryWidth);
bos.write(iconDirEntryHeight);
bos.write(0);
bos.write(0); // reserved
bos.write2Bytes(1); // color planes
bos.write2Bytes(bitCount);
bos.write4Bytes(imageSize);
bos.write4Bytes(22); // image offset
// BITMAPINFOHEADER
bos.write4Bytes(40); // size
bos.write4Bytes(src.getWidth());
bos.write4Bytes(2 * src.getHeight());
bos.write2Bytes(1); // planes
bos.write2Bytes(bitCount);
bos.write4Bytes(0); // compression
bos.write4Bytes(0); // image size
bos.write4Bytes(0); // x pixels per meter
bos.write4Bytes(0); // y pixels per meter
bos.write4Bytes(0); // colors used, 0 = (1 << bitCount) (ignored)
bos.write4Bytes(0); // colors important
int bit_cache = 0;
int bits_in_cache = 0;
int row_padding = scanline_size - (bitCount * src.getWidth() + 7) / 8;
for (int y = src.getHeight() - 1; y >= 0; y--) {
for (int x = 0; x < src.getWidth(); x++) {
int argb = src.getRGB(x, y);
bos.write(0xff & argb);
bos.write(0xff & (argb >> 8));
bos.write(0xff & (argb >> 16));
bos.write(0xff & (argb >> 24));
}
for (int x = 0; x < row_padding; x++)
bos.write(0);
}
int t_row_padding = t_scanline_size - (src.getWidth() + 7) / 8;
for (int y = src.getHeight() - 1; y >= 0; y--) {
for (int x = 0; x < src.getWidth(); x++) {
int argb = src.getRGB(x, y);
int alpha = 0xff & (argb >> 24);
bit_cache <<= 1;
if (alpha == 0)
bit_cache |= 1;
bits_in_cache++;
if (bits_in_cache >= 8) {
bos.write(0xff & bit_cache);
bit_cache = 0;
bits_in_cache = 0;
}
}
if (bits_in_cache > 0) {
bit_cache <<= (8 - bits_in_cache);
bos.write(0xff & bit_cache);
bit_cache = 0;
bits_in_cache = 0;
}
for (int x = 0; x < t_row_padding; x++)
bos.write(0);
}
}
finally {
try {
bos.close();
} catch (IOException ignored) { }
}
}
private static Color errorBadgeShadowColor = new Color(0,0,0,102);
private static Color errorBadgeMainColor = new Color(255,98,89);
private static Color errorBadgeTextBackgroundColor = new Color(0,0,0,39);
@Override
public void _setTextBadge(IdeFrame frame, String text) {
if (!isValid(frame)) {
return;
}
Object icon = null;
if (text != null) {
try {
int size = 16;
BufferedImage image = UIUtil.createImage(frame.getComponent(), size, size, BufferedImage.TYPE_INT_ARGB);
Graphics2D g = image.createGraphics();
int shadowRadius = 16;
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g.setPaint(errorBadgeShadowColor);
g.fillRoundRect(size / 2 - shadowRadius / 2, size / 2 - shadowRadius / 2, shadowRadius, shadowRadius, size, size);
int mainRadius = 14;
g.setPaint(errorBadgeMainColor);
g.fillRoundRect(size / 2 - mainRadius / 2, size / 2 - mainRadius / 2, mainRadius, mainRadius, size, size);
Font font = g.getFont();
g.setFont(new Font(font.getName(), Font.BOLD, 9));
FontMetrics fontMetrics = g.getFontMetrics();
int textWidth = fontMetrics.stringWidth(text);
int textHeight = UIUtil.getHighestGlyphHeight(text, font, g);
g.setPaint(errorBadgeTextBackgroundColor);
g.fillOval( size / 2 - textWidth / 2, size / 2 - textHeight / 2, textWidth, textHeight);
g.setColor(Color.white);
g.drawString(text, size / 2 - textWidth / 2, size / 2 - fontMetrics.getHeight() / 2 + fontMetrics.getAscent());
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
writeTransparentIco(image, bytes);
icon = Win7TaskBar.createIcon(bytes.toByteArray());
}
catch (Throwable e) {
LOG.error(e);
}
}
try {
Win7TaskBar.setOverlayIcon(frame, icon, icon != null);
}
catch (Throwable e) {
LOG.error(e);
}
}
private Object myOkIcon;
@Override
public void _setOkBadge(IdeFrame frame, boolean visible) {
if (!isValid(frame)) {
return;
}
Object icon = null;
if (visible) {
synchronized (Win7AppIcon.class) {
if (myOkIcon == null) {
try {
BufferedImage image = ImageIO.read(getClass().getResource("/mac/appIconOk512.png"));
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
writeTransparentIco(image, bytes);
myOkIcon = Win7TaskBar.createIcon(bytes.toByteArray());
}
catch (Throwable e) {
LOG.error(e);
myOkIcon = null;
}
}
icon = myOkIcon;
}
}
try {
Win7TaskBar.setOverlayIcon(frame, icon, false);
}
catch (Throwable e) {
LOG.error(e);
}
}
@Override
public void _requestAttention(IdeFrame frame, boolean critical) {
try {
if (isValid(frame)) {
Win7TaskBar.attention(frame, critical);
}
}
catch (Throwable e) {
LOG.error(e);
}
}
@Override
protected IdeFrame getIdeFrame(Project project) {
return WindowManager.getInstance().getIdeFrame(project);
}
@Override
public void requestFocus(IdeFrame frame) { }
private static boolean isValid(IdeFrame frame) {
return frame != null && ((Component)frame).isDisplayable();
}
}
private static class EmptyIcon extends AppIcon {
@Override
public boolean setProgress(Project project, Object processId, AppIconScheme.Progress scheme, double value, boolean isOk) {
return false;
}
@Override
public boolean hideProgress(Project project, Object processId) {
return false;
}
@Override
public void setErrorBadge(Project project, String text) { }
@Override
public void setOkBadge(Project project, boolean visible) { }
@Override
public void requestAttention(Project project, boolean critical) { }
@Override
public void requestFocus(IdeFrame frame) { }
}
private static void assertIsDispatchThread() {
Application app = ApplicationManager.getApplication();
if (app != null) {
if (!app.isUnitTestMode()) {
app.assertIsDispatchThread();
}
}
else {
assert EventQueue.isDispatchThread();
}
}
}
| |
package com.github.i49.hibiscus.validation;
import static com.github.i49.hibiscus.schema.SchemaComponents.*;
import static org.junit.Assert.*;
import org.junit.Test;
import com.github.i49.hibiscus.common.Bound;
import com.github.i49.hibiscus.common.TypeId;
import com.github.i49.hibiscus.problems.InclusiveLowerBoundProblem;
import com.github.i49.hibiscus.problems.InclusiveUpperBoundProblem;
import com.github.i49.hibiscus.problems.ExclusiveUpperBoundProblem;
import com.github.i49.hibiscus.problems.ExclusiveLowerBoundProblem;
import com.github.i49.hibiscus.problems.Problem;
import com.github.i49.hibiscus.problems.TypeMismatchProblem;
import com.github.i49.hibiscus.problems.NoSuchEnumeratorProblem;
import com.github.i49.hibiscus.schema.Schema;
import java.io.StringReader;
import java.math.BigDecimal;
import java.util.Set;
import javax.json.JsonNumber;
import static com.github.i49.hibiscus.validation.CustomAssertions.*;
public class NumberValidationTest {
/**
* Tests of various kinds of values.
*/
public static class NumberValueTest {
@Test
public void postiveNumber() {
String json = "[123.45]";
Schema schema = schema(array(number()));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertFalse(result.hasProblems());
}
@Test
public void negativeNumber() {
String json = "[-123.45]";
Schema schema = schema(array(number()));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertFalse(result.hasProblems());
}
@Test
public void zero() {
String json = "[0.0]";
Schema schema = schema(array(number()));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertFalse(result.hasProblems());
}
@Test
public void integralNumber() {
String json = "[123]";
Schema schema = schema(array(number()));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertFalse(result.hasProblems());
}
}
public static class TypeMismatchTest {
@Test
public void notNumberButString() {
String json = "[\"123.45\"]";
Schema schema = schema(array(bool()));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertEquals(1, result.getProblems().size());
Problem p = result.getProblems().get(0);
assertTrue(p instanceof TypeMismatchProblem);
assertEquals(TypeId.STRING, ((TypeMismatchProblem)p).getActualType());
assertNotNull(p.getDescription());
}
}
public static class EnumerationTest {
@Test
public void notExistInNone() {
String json = "[12.34]";
Schema schema = schema(array(number().enumeration()));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertEquals(1, result.getProblems().size());
assertTrue(result.getProblems().get(0) instanceof NoSuchEnumeratorProblem);
NoSuchEnumeratorProblem p = (NoSuchEnumeratorProblem)result.getProblems().get(0);
assertEquals(new BigDecimal("12.34"), ((JsonNumber)p.getCauseValue()).bigDecimalValue());
Set<Object> expected = p.getEnumerators();
assertEquals(0, expected.size());
assertNotNull(p.getDescription());
}
@Test
public void existInOne() {
String json = "[12.34]";
Schema schema = schema(array(number().enumeration(new BigDecimal("12.34"))));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertFalse(result.hasProblems());
}
@Test
public void notExistInOne() {
String json = "[12.34]";
Schema schema = schema(array(number().enumeration(new BigDecimal("56.78"))));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertEquals(1, result.getProblems().size());
assertTrue(result.getProblems().get(0) instanceof NoSuchEnumeratorProblem);
NoSuchEnumeratorProblem p = (NoSuchEnumeratorProblem)result.getProblems().get(0);
assertEquals(new BigDecimal("12.34"), ((JsonNumber)p.getCauseValue()).bigDecimalValue());
Set<Object> expected = p.getEnumerators();
assertEquals(1, expected.size());
assertNotNull(p.getDescription());
}
@Test
public void existInMany() {
String json = "[12.34]";
Schema schema = schema(array(number().enumeration(new BigDecimal("56.78"), new BigDecimal("12.34"))));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertFalse(result.hasProblems());
}
@Test
public void notExistInMany() {
String json = "[3.14]";
Schema schema = schema(array(number().enumeration(new BigDecimal("12.34"), new BigDecimal("56.78"))));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertEquals(1, result.getProblems().size());
assertTrue(result.getProblems().get(0) instanceof NoSuchEnumeratorProblem);
NoSuchEnumeratorProblem p = (NoSuchEnumeratorProblem)result.getProblems().get(0);
assertEquals(new BigDecimal("3.14"), ((JsonNumber)p.getCauseValue()).bigDecimalValue());
Set<Object> expected = p.getEnumerators();
assertEquals(2, expected.size());
assertNotNull(p.getDescription());
}
}
public static class MinInclusiveTest {
@Test
public void lessThanMinimum() {
String json = "[12.33]";
Schema schema = schema(array(number().minInclusive(new BigDecimal("12.34"))));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertEquals(1, result.getProblems().size());
assertTrue(result.getProblems().get(0) instanceof InclusiveLowerBoundProblem);
InclusiveLowerBoundProblem p = (InclusiveLowerBoundProblem)result.getProblems().get(0);
assertEquals(new BigDecimal("12.33"), p.getCauseValue().bigDecimalValue());
Bound<BigDecimal> bound = p.getBound();
assertFalse(bound.isExclusive());
assertEquals(new BigDecimal("12.34"), bound.getValue());
assertNotNull(p.getDescription());
}
@Test
public void equalToMinimum() {
String json = "[12.340]";
Schema schema = schema(array(number().minInclusive(new BigDecimal("12.34"))));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertFalse(result.hasProblems());
}
@Test
public void moreThanMinimum() {
String json = "[12.35]";
Schema schema = schema(array(number().minInclusive(new BigDecimal("12.34"))));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertFalse(result.hasProblems());
}
}
public static class MinExclusiveTest {
@Test
public void equalToMinimum() {
String json = "[12.340]";
Schema schema = schema(array(number().minExclusive(new BigDecimal("12.34"))));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertEquals(1, result.getProblems().size());
assertTrue(result.getProblems().get(0) instanceof ExclusiveLowerBoundProblem);
ExclusiveLowerBoundProblem p = (ExclusiveLowerBoundProblem)result.getProblems().get(0);
assertEquals(new BigDecimal("12.340"), p.getCauseValue().bigDecimalValue());
Bound<BigDecimal> bound = p.getBound();
assertTrue(bound.isExclusive());
assertEquals(new BigDecimal("12.34"), bound.getValue());
assertNotNull(p.getDescription());
}
@Test
public void moreThanMinimum() {
String json = "[12.35]";
Schema schema = schema(array(number().minExclusive(new BigDecimal("12.34"))));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertFalse(result.hasProblems());
}
}
public static class MaxInclusiveTest {
@Test
public void equalToMaximum() {
String json = "[56.780]";
Schema schema = schema(array(number().maxInclusive(new BigDecimal("56.78"))));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertFalse(result.hasProblems());
}
@Test
public void moreThanMaximum() {
String json = "[56.79]";
Schema schema = schema(array(number().maxInclusive(new BigDecimal("56.78"))));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertEquals(1, result.getProblems().size());
assertTrue(result.getProblems().get(0) instanceof InclusiveUpperBoundProblem);
InclusiveUpperBoundProblem p = (InclusiveUpperBoundProblem)result.getProblems().get(0);
assertEquals(new BigDecimal("56.79"), p.getCauseValue().bigDecimalValue());
Bound<BigDecimal> bound = p.getBound();
assertFalse(bound.isExclusive());
assertEquals(new BigDecimal("56.78"), bound.getValue());
assertNotNull(p.getDescription());
}
}
public static class MaxExclusiveTest {
@Test
public void lessThanMaximum() {
String json = "[56.77]";
Schema schema = schema(array(number().maxExclusive(new BigDecimal("56.78"))));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertFalse(result.hasProblems());
}
@Test
public void equalToMaximum() {
String json = "[56.780]";
Schema schema = schema(array(number().maxExclusive(new BigDecimal("56.78"))));
JsonValidator validator = new BasicJsonValidator(schema);
ValidationResult result = validator.validate(new StringReader(json));
assertResultValid(result, json);
assertEquals(1, result.getProblems().size());
assertTrue(result.getProblems().get(0) instanceof ExclusiveUpperBoundProblem);
ExclusiveUpperBoundProblem p = (ExclusiveUpperBoundProblem)result.getProblems().get(0);
assertEquals(new BigDecimal("56.780"), p.getCauseValue().bigDecimalValue());
Bound<BigDecimal> bound = p.getBound();
assertTrue(bound.isExclusive());
assertEquals(new BigDecimal("56.78"), bound.getValue());
assertNotNull(p.getDescription());
}
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.java;
import static com.google.devtools.build.lib.rules.java.DeployArchiveBuilder.Compression.COMPRESSED;
import static com.google.devtools.build.lib.rules.java.DeployArchiveBuilder.Compression.UNCOMPRESSED;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.FileProvider;
import com.google.devtools.build.lib.analysis.FilesToRunProvider;
import com.google.devtools.build.lib.analysis.RuleConfiguredTarget.Mode;
import com.google.devtools.build.lib.analysis.RuleConfiguredTargetBuilder;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.Runfiles;
import com.google.devtools.build.lib.analysis.RunfilesProvider;
import com.google.devtools.build.lib.analysis.RunfilesSupport;
import com.google.devtools.build.lib.analysis.TransitiveInfoCollection;
import com.google.devtools.build.lib.analysis.actions.FileWriteAction;
import com.google.devtools.build.lib.analysis.config.CompilationMode;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.packages.BuildType;
import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException;
import com.google.devtools.build.lib.rules.RuleConfiguredTargetFactory;
import com.google.devtools.build.lib.rules.cpp.CppConfiguration;
import com.google.devtools.build.lib.rules.cpp.CppHelper;
import com.google.devtools.build.lib.rules.cpp.LinkerInput;
import com.google.devtools.build.lib.rules.java.JavaCompilationArgs.ClasspathType;
import com.google.devtools.build.lib.rules.java.ProguardHelper.ProguardOutput;
import com.google.devtools.build.lib.syntax.Type;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.annotation.Nullable;
/**
* An implementation of java_binary.
*/
public class JavaBinary implements RuleConfiguredTargetFactory {
private static final PathFragment CPP_RUNTIMES = new PathFragment("_cpp_runtimes");
private final JavaSemantics semantics;
protected JavaBinary(JavaSemantics semantics) {
this.semantics = semantics;
}
@Override
public ConfiguredTarget create(RuleContext ruleContext)
throws InterruptedException, RuleErrorException {
final JavaCommon common = new JavaCommon(ruleContext, semantics);
DeployArchiveBuilder deployArchiveBuilder = new DeployArchiveBuilder(semantics, ruleContext);
Runfiles.Builder runfilesBuilder = new Runfiles.Builder(
ruleContext.getWorkspaceName(), ruleContext.getConfiguration().legacyExternalRunfiles());
List<String> jvmFlags = new ArrayList<>();
JavaTargetAttributes.Builder attributesBuilder = common.initCommon();
attributesBuilder.addClassPathResources(
ruleContext.getPrerequisiteArtifacts("classpath_resources", Mode.TARGET).list());
List<String> userJvmFlags = JavaCommon.getJvmFlags(ruleContext);
ruleContext.checkSrcsSamePackage(true);
boolean createExecutable = ruleContext.attributes().get("create_executable", Type.BOOLEAN);
if (!createExecutable) {
// TODO(cushon): disallow combining launcher=JDK_LAUNCHER_LABEL with create_executable=0
// and use isAttributeExplicitlySpecified here
Label launcherAttribute = ruleContext.attributes().get("launcher", BuildType.LABEL);
if (launcherAttribute != null
&& !launcherAttribute.equals(semantics.getJdkLauncherLabel())) {
ruleContext.ruleError("launcher specified but create_executable is false");
}
}
List<TransitiveInfoCollection> deps =
// Do not remove <TransitiveInfoCollection>: workaround for Java 7 type inference.
Lists.<TransitiveInfoCollection>newArrayList(
common.targetsTreatedAsDeps(ClasspathType.COMPILE_ONLY));
semantics.checkRule(ruleContext, common);
semantics.checkForProtoLibraryAndJavaProtoLibraryOnSameProto(ruleContext, common);
String mainClass = semantics.getMainClass(ruleContext, common.getSrcsArtifacts());
String originalMainClass = mainClass;
if (ruleContext.hasErrors()) {
return null;
}
// Collect the transitive dependencies.
JavaCompilationHelper helper = new JavaCompilationHelper(
ruleContext, semantics, common.getJavacOpts(), attributesBuilder);
helper.addLibrariesToAttributes(deps);
helper.addProvidersToAttributes(
JavaCommon.compilationArgsFromSources(ruleContext), /* isNeverLink */ false);
attributesBuilder.addNativeLibraries(
collectNativeLibraries(common.targetsTreatedAsDeps(ClasspathType.BOTH)));
// deploy_env is valid for java_binary, but not for java_test.
if (ruleContext.getRule().isAttrDefined("deploy_env", BuildType.LABEL_LIST)) {
for (JavaRuntimeClasspathProvider envTarget : ruleContext.getPrerequisites(
"deploy_env", Mode.TARGET, JavaRuntimeClasspathProvider.class)) {
attributesBuilder.addExcludedArtifacts(envTarget.getRuntimeClasspath());
}
}
Artifact srcJar =
ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_BINARY_SOURCE_JAR);
Artifact classJar =
ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_BINARY_CLASS_JAR);
ImmutableList<Artifact> srcJars = ImmutableList.of(srcJar);
CppConfiguration cppConfiguration = ruleContext.getConfiguration().getFragment(
CppConfiguration.class);
boolean stripAsDefault = cppConfiguration.useFission()
&& cppConfiguration.getCompilationMode() == CompilationMode.OPT;
Artifact launcher = semantics.getLauncher(ruleContext, common, deployArchiveBuilder,
runfilesBuilder, jvmFlags, attributesBuilder, stripAsDefault);
DeployArchiveBuilder unstrippedDeployArchiveBuilder = null;
Artifact unstrippedLauncher = null;
if (stripAsDefault) {
unstrippedDeployArchiveBuilder = new DeployArchiveBuilder(semantics, ruleContext);
unstrippedLauncher = semantics.getLauncher(ruleContext, common,
unstrippedDeployArchiveBuilder, runfilesBuilder, jvmFlags, attributesBuilder,
false /* shouldStrip */);
}
JavaCompilationArtifacts.Builder javaArtifactsBuilder = new JavaCompilationArtifacts.Builder();
Artifact instrumentationMetadata =
helper.createInstrumentationMetadata(classJar, javaArtifactsBuilder);
NestedSetBuilder<Artifact> filesBuilder = NestedSetBuilder.stableOrder();
Artifact executable = null;
if (createExecutable) {
executable = ruleContext.createOutputArtifact(); // the artifact for the rule itself
filesBuilder.add(classJar).add(executable);
if (ruleContext.getConfiguration().isCodeCoverageEnabled()) {
mainClass = semantics.addCoverageSupport(helper, attributesBuilder,
executable, instrumentationMetadata, javaArtifactsBuilder, mainClass);
}
} else {
filesBuilder.add(classJar);
}
JavaTargetAttributes attributes = helper.getAttributes();
List<Artifact> nativeLibraries = attributes.getNativeLibraries();
if (!nativeLibraries.isEmpty()) {
jvmFlags.add("-Djava.library.path="
+ JavaCommon.javaLibraryPath(nativeLibraries, ruleContext.getRule().getWorkspaceName()));
}
JavaConfiguration javaConfig = ruleContext.getFragment(JavaConfiguration.class);
if (attributes.hasMessages()) {
helper.setTranslations(
semantics.translate(ruleContext, javaConfig, attributes.getMessages()));
}
if (attributes.hasSourceFiles() || attributes.hasSourceJars()
|| attributes.hasResources() || attributes.hasClassPathResources()) {
// We only want to add a jar to the classpath of a dependent rule if it has content.
javaArtifactsBuilder.addRuntimeJar(classJar);
}
Artifact outputDepsProto = helper.createOutputDepsProtoArtifact(classJar, javaArtifactsBuilder);
JavaCompilationArtifacts javaArtifacts = javaArtifactsBuilder.build();
common.setJavaCompilationArtifacts(javaArtifacts);
Artifact manifestProtoOutput = helper.createManifestProtoOutput(classJar);
// The gensrc jar is created only if the target uses annotation processing. Otherwise,
// it is null, and the source jar action will not depend on the compile action.
Artifact genSourceJar = null;
Artifact genClassJar = null;
if (helper.usesAnnotationProcessing()) {
genClassJar = helper.createGenJar(classJar);
genSourceJar = helper.createGensrcJar(classJar);
helper.createGenJarAction(classJar, manifestProtoOutput, genClassJar);
}
helper.createCompileAction(
classJar, manifestProtoOutput, genSourceJar, outputDepsProto, instrumentationMetadata);
helper.createSourceJarAction(srcJar, genSourceJar);
common.setClassPathFragment(
new ClasspathConfiguredFragment(
javaArtifacts, attributes, false, helper.getBootclasspathOrDefault()));
// Collect the action inputs for the runfiles collector here because we need to access the
// analysis environment, and that may no longer be safe when the runfiles collector runs.
Iterable<Artifact> dynamicRuntimeActionInputs =
CppHelper.getToolchain(ruleContext).getDynamicRuntimeLinkInputs();
Iterables.addAll(jvmFlags,
semantics.getJvmFlags(ruleContext, common.getSrcsArtifacts(), userJvmFlags));
if (ruleContext.hasErrors()) {
return null;
}
if (createExecutable) {
// Create a shell stub for a Java application
semantics.createStubAction(ruleContext, common, jvmFlags, executable, mainClass,
JavaCommon.getJavaBinSubstitution(ruleContext, launcher));
}
NestedSet<Artifact> transitiveSourceJars = collectTransitiveSourceJars(common, srcJar);
// TODO(bazel-team): if (getOptions().sourceJars) then make this a dummy prerequisite for the
// DeployArchiveAction ? Needs a few changes there as we can't pass inputs
SingleJarActionBuilder.createSourceJarAction(ruleContext,
ImmutableMap.<PathFragment, Artifact>of(), transitiveSourceJars.toCollection(),
ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_BINARY_DEPLOY_SOURCE_JAR));
RuleConfiguredTargetBuilder builder =
new RuleConfiguredTargetBuilder(ruleContext);
builder.add(
JavaPrimaryClassProvider.class,
new JavaPrimaryClassProvider(
semantics.getPrimaryClass(ruleContext, common.getSrcsArtifacts())));
semantics.addProviders(ruleContext, common, jvmFlags, classJar, srcJar,
genClassJar, genSourceJar, ImmutableMap.<Artifact, Artifact>of(),
filesBuilder, builder);
Artifact deployJar =
ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_BINARY_DEPLOY_JAR);
boolean runProguard = applyProguardIfRequested(
ruleContext, deployJar, common.getBootClasspath(), mainClass, semantics, filesBuilder);
NestedSet<Artifact> filesToBuild = filesBuilder.build();
// Need not include normal runtime classpath in runfiles if Proguard is used because _deploy.jar
// is used as classpath instead. Keeping runfiles unchanged has however the advantage that
// manually running executable without --singlejar works (although it won't depend on Proguard).
collectDefaultRunfiles(runfilesBuilder, ruleContext, common, javaArtifacts, filesToBuild,
launcher, dynamicRuntimeActionInputs);
Runfiles defaultRunfiles = runfilesBuilder.build();
RunfilesSupport runfilesSupport = null;
if (createExecutable) {
List<String> extraArgs =
new ArrayList<>(semantics.getExtraArguments(ruleContext, common.getSrcsArtifacts()));
if (runProguard) {
// Instead of changing the classpath written into the wrapper script, pass --singlejar when
// running the script (which causes the deploy.jar written by Proguard to be used instead of
// the normal classpath). It's a bit odd to do this b/c manually running the script wouldn't
// use Proguard's output unless --singlejar is explicitly supplied. On the other hand the
// behavior of the script is more consistent: the (proguarded) deploy.jar is only used with
// --singlejar. Moreover, people will almost always run tests using blaze test, which does
// use Proguard's output thanks to this extra arg when enabled. Also, it's actually hard to
// get the classpath changed in the wrapper script (would require calling
// JavaCommon.setClasspathFragment with a new fragment at the *end* of this method because
// the classpath is evaluated lazily when generating the wrapper script) and the wrapper
// script would essentially have an if (--singlejar was set), set classpath to deploy jar,
// otherwise, set classpath to deploy jar.
extraArgs.add("--wrapper_script_flag=--singlejar");
}
runfilesSupport =
RunfilesSupport.withExecutable(ruleContext, defaultRunfiles, executable, extraArgs);
}
RunfilesProvider runfilesProvider = RunfilesProvider.withData(
defaultRunfiles,
new Runfiles.Builder(
ruleContext.getWorkspaceName(),
ruleContext.getConfiguration().legacyExternalRunfiles())
.merge(runfilesSupport)
.build());
ImmutableList<String> deployManifestLines =
getDeployManifestLines(ruleContext, originalMainClass);
// When running Proguard:
// (1) write single jar to intermediate destination; Proguard will write _deploy.jar file
// (2) Don't depend on runfiles to avoid circular dependency, since _deploy.jar is itself part
// of runfiles when Proguard runs (because executable then needs it) and _deploy.jar depends
// on this single jar.
// (3) Don't bother with compression since Proguard will write the final jar anyways
deployArchiveBuilder
.setOutputJar(
runProguard
? ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_BINARY_MERGED_JAR)
: deployJar)
.setJavaStartClass(mainClass)
.setDeployManifestLines(deployManifestLines)
.setAttributes(attributes)
.addRuntimeJars(javaArtifacts.getRuntimeJars())
.setIncludeBuildData(true)
.setRunfilesMiddleman(
runProguard || runfilesSupport == null ? null : runfilesSupport.getRunfilesMiddleman())
.setCompression(runProguard ? UNCOMPRESSED : COMPRESSED)
.setLauncher(launcher)
.build();
Artifact unstrippedDeployJar =
ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_UNSTRIPPED_BINARY_DEPLOY_JAR);
if (stripAsDefault) {
unstrippedDeployArchiveBuilder
.setOutputJar(unstrippedDeployJar)
.setJavaStartClass(mainClass)
.setDeployManifestLines(deployManifestLines)
.setAttributes(attributes)
.addRuntimeJars(javaArtifacts.getRuntimeJars())
.setIncludeBuildData(true)
.setRunfilesMiddleman(
runfilesSupport == null ? null : runfilesSupport.getRunfilesMiddleman())
.setCompression(COMPRESSED)
.setLauncher(unstrippedLauncher);
unstrippedDeployArchiveBuilder.build();
} else {
// Write an empty file as the name_deploy.jar.unstripped when the default output jar is not
// stripped.
ruleContext.registerAction(
new FileWriteAction(ruleContext.getActionOwner(), unstrippedDeployJar, "", false));
}
common.addTransitiveInfoProviders(builder, filesToBuild, classJar);
common.addGenJarsProvider(builder, genClassJar, genSourceJar);
return builder
.setFilesToBuild(filesToBuild)
.add(JavaRuleOutputJarsProvider.class, JavaRuleOutputJarsProvider.builder()
.addOutputJar(classJar, null /* iJar */, srcJar)
.setJdeps(outputDepsProto)
.build())
.add(RunfilesProvider.class, runfilesProvider)
.setRunfilesSupport(runfilesSupport, executable)
.add(
JavaRuntimeClasspathProvider.class,
new JavaRuntimeClasspathProvider(common.getRuntimeClasspath()))
.add(
JavaSourceInfoProvider.class,
JavaSourceInfoProvider.fromJavaTargetAttributes(attributes, semantics))
.add(
JavaSourceJarsProvider.class, new JavaSourceJarsProvider(transitiveSourceJars, srcJars))
.addOutputGroup(JavaSemantics.SOURCE_JARS_OUTPUT_GROUP, transitiveSourceJars)
.build();
}
// Create the deploy jar and make it dependent on the runfiles middleman if an executable is
// created. Do not add the deploy jar to files to build, so we will only build it when it gets
// requested.
private ImmutableList<String> getDeployManifestLines(RuleContext ruleContext,
String originalMainClass) {
ImmutableList.Builder<String> builder = ImmutableList.<String>builder()
.addAll(ruleContext.attributes().get("deploy_manifest_lines", Type.STRING_LIST));
if (ruleContext.getConfiguration().isCodeCoverageEnabled()) {
builder.add("Coverage-Main-Class: " + originalMainClass);
}
return builder.build();
}
private void collectDefaultRunfiles(Runfiles.Builder builder, RuleContext ruleContext,
JavaCommon common, JavaCompilationArtifacts javaArtifacts, NestedSet<Artifact> filesToBuild,
Artifact launcher, Iterable<Artifact> dynamicRuntimeActionInputs) {
// Convert to iterable: filesToBuild has a different order.
builder.addArtifacts((Iterable<Artifact>) filesToBuild);
builder.addArtifacts(javaArtifacts.getRuntimeJars());
if (launcher != null) {
final TransitiveInfoCollection defaultLauncher =
JavaHelper.launcherForTarget(semantics, ruleContext);
final Artifact defaultLauncherArtifact =
JavaHelper.launcherArtifactForTarget(semantics, ruleContext);
if (!defaultLauncherArtifact.equals(launcher)) {
builder.addArtifact(launcher);
// N.B. The "default launcher" referred to here is the launcher target specified through
// an attribute or flag. We wish to retain the runfiles of the default launcher, *except*
// for the original cc_binary artifact, because we've swapped it out with our custom
// launcher. Hence, instead of calling builder.addTarget(), or adding an odd method
// to Runfiles.Builder, we "unravel" the call and manually add things to the builder.
// Because the NestedSet representing each target's launcher runfiles is re-built here,
// we may see increased memory consumption for representing the target's runfiles.
Runfiles runfiles =
defaultLauncher.getProvider(RunfilesProvider.class)
.getDefaultRunfiles();
NestedSetBuilder<Artifact> unconditionalArtifacts = NestedSetBuilder.compileOrder();
for (Artifact a : runfiles.getUnconditionalArtifacts()) {
if (!a.equals(defaultLauncherArtifact)) {
unconditionalArtifacts.add(a);
}
}
builder.addTransitiveArtifacts(unconditionalArtifacts.build());
builder.addSymlinks(runfiles.getSymlinks());
builder.addRootSymlinks(runfiles.getRootSymlinks());
builder.addPruningManifests(runfiles.getPruningManifests());
} else {
builder.addTarget(defaultLauncher, RunfilesProvider.DEFAULT_RUNFILES);
}
}
semantics.addRunfilesForBinary(ruleContext, launcher, builder);
builder.addRunfiles(ruleContext, RunfilesProvider.DEFAULT_RUNFILES);
builder.add(ruleContext, JavaRunfilesProvider.TO_RUNFILES);
List<? extends TransitiveInfoCollection> runtimeDeps =
ruleContext.getPrerequisites("runtime_deps", Mode.TARGET);
builder.addTargets(runtimeDeps, JavaRunfilesProvider.TO_RUNFILES);
builder.addTargets(runtimeDeps, RunfilesProvider.DEFAULT_RUNFILES);
semantics.addDependenciesForRunfiles(ruleContext, builder);
if (ruleContext.getConfiguration().isCodeCoverageEnabled()) {
Artifact instrumentedJar = javaArtifacts.getInstrumentedJar();
if (instrumentedJar != null) {
builder.addArtifact(instrumentedJar);
}
}
builder.addArtifacts((Iterable<Artifact>) common.getRuntimeClasspath());
// Add the JDK files if it comes from the source repository (see java_stub_template.txt).
TransitiveInfoCollection javabaseTarget = ruleContext.getPrerequisite(":jvm", Mode.HOST);
if (javabaseTarget != null) {
builder.addArtifacts(
(Iterable<Artifact>) javabaseTarget.getProvider(FileProvider.class).getFilesToBuild());
// Add symlinks to the C++ runtime libraries under a path that can be built
// into the Java binary without having to embed the crosstool, gcc, and grte
// version information contained within the libraries' package paths.
for (Artifact lib : dynamicRuntimeActionInputs) {
PathFragment path = CPP_RUNTIMES.getRelative(lib.getExecPath().getBaseName());
builder.addSymlink(path, lib);
}
}
}
private NestedSet<Artifact> collectTransitiveSourceJars(JavaCommon common, Artifact srcJar) {
NestedSetBuilder<Artifact> builder = NestedSetBuilder.stableOrder();
builder.add(srcJar);
for (JavaSourceJarsProvider dep : common.getDependencies(JavaSourceJarsProvider.class)) {
builder.addTransitive(dep.getTransitiveSourceJars());
}
return builder.build();
}
/**
* Collects the native libraries in the transitive closure of the deps.
*
* @param deps the dependencies to be included as roots of the transitive closure.
* @return the native libraries found in the transitive closure of the deps.
*/
public static Collection<Artifact> collectNativeLibraries(
Iterable<? extends TransitiveInfoCollection> deps) {
NestedSet<LinkerInput> linkerInputs = new NativeLibraryNestedSetBuilder()
.addJavaTargets(deps)
.build();
ImmutableList.Builder<Artifact> result = ImmutableList.builder();
for (LinkerInput linkerInput : linkerInputs) {
result.add(linkerInput.getArtifact());
}
return result.build();
}
/**
* This method uses {@link ProguardHelper#applyProguardIfRequested} to create a proguard action
* if necessary and adds any artifacts created by proguard to the given {@code filesBuilder}.
* This is convenience to make sure the proguarded Jar is included in the files to build, which is
* necessary because the Jar written by proguard is used at runtime.
* If this method returns {@code true} the Proguard is being used and we need to use a
* {@link DeployArchiveBuilder} to write the input artifact assumed by
* {@link ProguardHelper#applyProguardIfRequested}.
*/
private static boolean applyProguardIfRequested(RuleContext ruleContext, Artifact deployJar,
ImmutableList<Artifact> bootclasspath, String mainClassName, JavaSemantics semantics,
NestedSetBuilder<Artifact> filesBuilder) throws InterruptedException {
// We only support proguarding tests so Proguard doesn't try to proguard itself.
if (!ruleContext.getRule().getRuleClass().endsWith("_test")) {
return false;
}
ProguardOutput output =
JavaBinaryProguardHelper.INSTANCE.applyProguardIfRequested(
ruleContext, deployJar, bootclasspath, mainClassName, semantics);
if (output == null) {
return false;
}
output.addAllToSet(filesBuilder);
return true;
}
private static class JavaBinaryProguardHelper extends ProguardHelper {
static final JavaBinaryProguardHelper INSTANCE = new JavaBinaryProguardHelper();
@Override
@Nullable
protected FilesToRunProvider findProguard(RuleContext ruleContext) {
// TODO(bazel-team): Find a way to use Proguard specified in android_sdk rules
return ruleContext.getExecutablePrerequisite(":proguard", Mode.HOST);
}
@Override
protected ImmutableList<Artifact> collectProguardSpecsForRule(
RuleContext ruleContext, ImmutableList<Artifact> bootclasspath, String mainClassName) {
return ImmutableList.of(generateSpecForJavaBinary(ruleContext, mainClassName));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.kstream.internals;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.TopologyTestDriver;
import org.apache.kafka.streams.TopologyTestDriverWrapper;
import org.apache.kafka.streams.TopologyWrapper;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.processor.internals.InternalTopologyBuilder;
import org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender;
import org.apache.kafka.streams.state.ValueAndTimestamp;
import org.apache.kafka.streams.test.ConsumerRecordFactory;
import org.apache.kafka.test.MockProcessor;
import org.apache.kafka.test.MockProcessorSupplier;
import org.apache.kafka.test.StreamsTestUtils;
import org.junit.Test;
import java.util.Properties;
import static java.util.Arrays.asList;
import static org.apache.kafka.test.StreamsTestUtils.getMetricByName;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class KTableSourceTest {
private final Consumed<String, String> stringConsumed = Consumed.with(Serdes.String(), Serdes.String());
private final ConsumerRecordFactory<String, String> recordFactory =
new ConsumerRecordFactory<>(new StringSerializer(), new StringSerializer(), 0L);
private final Properties props = StreamsTestUtils.getStreamsConfig(Serdes.String(), Serdes.String());
@Test
public void testKTable() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTable<String, Integer> table1 = builder.table(topic1, Consumed.with(Serdes.String(), Serdes.Integer()));
final MockProcessorSupplier<String, Integer> supplier = new MockProcessorSupplier<>();
table1.toStream().process(supplier);
final ConsumerRecordFactory<String, Integer> integerFactory =
new ConsumerRecordFactory<>(new StringSerializer(), new IntegerSerializer(), 0L);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
driver.pipeInput(integerFactory.create(topic1, "A", 1, 10L));
driver.pipeInput(integerFactory.create(topic1, "B", 2, 11L));
driver.pipeInput(integerFactory.create(topic1, "C", 3, 12L));
driver.pipeInput(integerFactory.create(topic1, "D", 4, 13L));
driver.pipeInput(integerFactory.create(topic1, "A", null, 14L));
driver.pipeInput(integerFactory.create(topic1, "B", null, 15L));
}
assertEquals(
asList("A:1 (ts: 10)", "B:2 (ts: 11)", "C:3 (ts: 12)", "D:4 (ts: 13)", "A:null (ts: 14)", "B:null (ts: 15)"),
supplier.theCapturedProcessor().processed);
}
@Test
public void kTableShouldLogAndMeterOnSkippedRecords() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic = "topic";
builder.table(topic, stringConsumed);
final LogCaptureAppender appender = LogCaptureAppender.createAndRegister();
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
driver.pipeInput(recordFactory.create(topic, null, "value"));
LogCaptureAppender.unregister(appender);
assertEquals(1.0, getMetricByName(driver.metrics(), "skipped-records-total", "stream-metrics").metricValue());
assertThat(appender.getMessages(), hasItem("Skipping record due to null key. topic=[topic] partition=[0] offset=[0]"));
}
}
@Test
public void testValueGetter() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
@SuppressWarnings("unchecked")
final KTableImpl<String, String, String> table1 =
(KTableImpl<String, String, String>) builder.table(topic1, stringConsumed, Materialized.as("store"));
final Topology topology = builder.build();
final KTableValueGetterSupplier<String, String> getterSupplier1 = table1.valueGetterSupplier();
final InternalTopologyBuilder topologyBuilder = TopologyWrapper.getInternalTopologyBuilder(topology);
topologyBuilder.connectProcessorAndStateStores(table1.name, getterSupplier1.storeNames());
try (final TopologyTestDriverWrapper driver = new TopologyTestDriverWrapper(builder.build(), props)) {
final KTableValueGetter<String, String> getter1 = getterSupplier1.get();
getter1.init(driver.setCurrentNodeForProcessorContext(table1.name));
driver.pipeInput(recordFactory.create(topic1, "A", "01", 10L));
driver.pipeInput(recordFactory.create(topic1, "B", "01", 20L));
driver.pipeInput(recordFactory.create(topic1, "C", "01", 15L));
assertEquals(ValueAndTimestamp.make("01", 10L), getter1.get("A"));
assertEquals(ValueAndTimestamp.make("01", 20L), getter1.get("B"));
assertEquals(ValueAndTimestamp.make("01", 15L), getter1.get("C"));
driver.pipeInput(recordFactory.create(topic1, "A", "02", 30L));
driver.pipeInput(recordFactory.create(topic1, "B", "02", 5L));
assertEquals(ValueAndTimestamp.make("02", 30L), getter1.get("A"));
assertEquals(ValueAndTimestamp.make("02", 5L), getter1.get("B"));
assertEquals(ValueAndTimestamp.make("01", 15L), getter1.get("C"));
driver.pipeInput(recordFactory.create(topic1, "A", "03", 29L));
assertEquals(ValueAndTimestamp.make("03", 29L), getter1.get("A"));
assertEquals(ValueAndTimestamp.make("02", 5L), getter1.get("B"));
assertEquals(ValueAndTimestamp.make("01", 15L), getter1.get("C"));
driver.pipeInput(recordFactory.create(topic1, "A", (String) null, 50L));
driver.pipeInput(recordFactory.create(topic1, "B", (String) null, 3L));
assertNull(getter1.get("A"));
assertNull(getter1.get("B"));
assertEquals(ValueAndTimestamp.make("01", 15L), getter1.get("C"));
}
}
@Test
public void testNotSendingOldValue() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
@SuppressWarnings("unchecked")
final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, stringConsumed);
final MockProcessorSupplier<String, Integer> supplier = new MockProcessorSupplier<>();
final Topology topology = builder.build().addProcessor("proc1", supplier, table1.name);
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
final MockProcessor<String, Integer> proc1 = supplier.theCapturedProcessor();
driver.pipeInput(recordFactory.create(topic1, "A", "01", 10L));
driver.pipeInput(recordFactory.create(topic1, "B", "01", 20L));
driver.pipeInput(recordFactory.create(topic1, "C", "01", 15L));
proc1.checkAndClearProcessResult("A:(01<-null) (ts: 10)", "B:(01<-null) (ts: 20)", "C:(01<-null) (ts: 15)");
driver.pipeInput(recordFactory.create(topic1, "A", "02", 8L));
driver.pipeInput(recordFactory.create(topic1, "B", "02", 22L));
proc1.checkAndClearProcessResult("A:(02<-null) (ts: 8)", "B:(02<-null) (ts: 22)");
driver.pipeInput(recordFactory.create(topic1, "A", "03", 12L));
proc1.checkAndClearProcessResult("A:(03<-null) (ts: 12)");
driver.pipeInput(recordFactory.create(topic1, "A", (String) null, 15L));
driver.pipeInput(recordFactory.create(topic1, "B", (String) null, 20L));
proc1.checkAndClearProcessResult("A:(null<-null) (ts: 15)", "B:(null<-null) (ts: 20)");
}
}
@Test
public void testSendingOldValue() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
@SuppressWarnings("unchecked")
final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, stringConsumed);
table1.enableSendingOldValues();
assertTrue(table1.sendingOldValueEnabled());
final MockProcessorSupplier<String, Integer> supplier = new MockProcessorSupplier<>();
final Topology topology = builder.build().addProcessor("proc1", supplier, table1.name);
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
final MockProcessor<String, Integer> proc1 = supplier.theCapturedProcessor();
driver.pipeInput(recordFactory.create(topic1, "A", "01", 10L));
driver.pipeInput(recordFactory.create(topic1, "B", "01", 20L));
driver.pipeInput(recordFactory.create(topic1, "C", "01", 15L));
proc1.checkAndClearProcessResult("A:(01<-null) (ts: 10)", "B:(01<-null) (ts: 20)", "C:(01<-null) (ts: 15)");
driver.pipeInput(recordFactory.create(topic1, "A", "02", 8L));
driver.pipeInput(recordFactory.create(topic1, "B", "02", 22L));
proc1.checkAndClearProcessResult("A:(02<-01) (ts: 8)", "B:(02<-01) (ts: 22)");
driver.pipeInput(recordFactory.create(topic1, "A", "03", 12L));
proc1.checkAndClearProcessResult("A:(03<-02) (ts: 12)");
driver.pipeInput(recordFactory.create(topic1, "A", (String) null, 15L));
driver.pipeInput(recordFactory.create(topic1, "B", (String) null, 20L));
proc1.checkAndClearProcessResult("A:(null<-03) (ts: 15)", "B:(null<-02) (ts: 20)");
}
}
}
| |
/*
* Copyright 2014 Bernd Vogt and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sourcepit.osgifier.maven.impl;
import static org.junit.Assert.assertThat;
import org.apache.maven.artifact.versioning.DefaultArtifactVersion;
import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
import org.hamcrest.core.Is;
import org.hamcrest.core.IsEqual;
import org.hamcrest.core.IsNull;
import org.junit.Test;
import org.sourcepit.common.manifest.osgi.Version;
import org.sourcepit.common.manifest.osgi.VersionRange;
public class MavenToOSGiUtilsTest {
@Test
public void testToVersion() throws Exception {
// assertEquals("1.0.0", JavaUtil.cleanupVersion("1"));
// assertEquals("1.1.0", JavaUtil.cleanupVersion("1.1"));
// assertEquals("0.0.0.murks", JavaUtil.cleanupVersion("murks"));
// assertEquals("1.0.0.v200192827", JavaUtil.cleanupVersion("1.v200192827"));
// assertEquals("1.0.0.SNAPSHOT", JavaUtil.cleanupVersion("1-SNAPSHOT"));
// assertEquals("1.0.0.SNAPSHOT", JavaUtil.cleanupVersion("1.0.SNAPSHOT"));
Version version = MavenToOSGiUtils.toVersion("1", false);
assertThat("1.0.0", IsEqual.equalTo(version.toString()));
assertThat("1", IsEqual.equalTo(version.toMinimalString()));
version = MavenToOSGiUtils.toVersion("1.1", false);
assertThat("1.1.0", IsEqual.equalTo(version.toString()));
assertThat("1.1", IsEqual.equalTo(version.toMinimalString()));
version = MavenToOSGiUtils.toVersion("murks", false);
assertThat(version, IsNull.nullValue());
version = MavenToOSGiUtils.toVersion("murks", true);
assertThat("0.0.0.murks", IsEqual.equalTo(version.toString()));
assertThat("0.0.0.murks", IsEqual.equalTo(version.toMinimalString()));
version = MavenToOSGiUtils.toVersion("1.v200192827", false);
assertThat("1.0.0.v200192827", IsEqual.equalTo(version.toString()));
assertThat("1.0.0.v200192827", IsEqual.equalTo(version.toMinimalString()));
version = MavenToOSGiUtils.toVersion("1-SNAPSHOT", false);
assertThat("1.0.0.SNAPSHOT", IsEqual.equalTo(version.toString()));
assertThat("1.0.0.SNAPSHOT", IsEqual.equalTo(version.toMinimalString()));
version = MavenToOSGiUtils.toVersion("1.0.0-SNAPSHOT", false);
assertThat("1.0.0.SNAPSHOT", IsEqual.equalTo(version.toString()));
assertThat("1.0.0.SNAPSHOT", IsEqual.equalTo(version.toMinimalString()));
}
@Test
public void testToVersionRange() throws Exception {
// Form eclipse version range doc
// 1.0 Version 1.0
// [1.0,2.0) Versions 1.0 (included) to 2.0 (not included)
// [1.0,2.0] Versions 1.0 to 2.0 (both included)
// [1.5,) Versions 1.5 and higher
// (,1.0],[1.2,) Versions up to 1.0 (included) and 1.2 or higher
String mVersionRange;
VersionRange oVersionRange;
mVersionRange = "[1.0.0,2.0.0)";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("[1.0.0,2.0.0)"));
mVersionRange = "[1.0,2.2]";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("[1.0,2.2]"));
mVersionRange = "[1,2)";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("[1,2)"));
mVersionRange = "LATEST";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("0"));
mVersionRange = "RELEASE";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("0"));
mVersionRange = "1.0";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("1.0"));
mVersionRange = "1.0-RC1";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("1.0.0.RC1"));
mVersionRange = "[1.5,)";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("1.5"));
mVersionRange = "(,1.0],[1.2,)";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("1.2"));
mVersionRange = "[1.0.0-RC1,2.0.0)";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("[1.0.0,2.0.0)"));
mVersionRange = "[1,2-RC2]";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("[1,2]"));
// Maven Version Range Spec
// http://docs.codehaus.org/display/MAVEN/Dependency+Mediation+and+Conflict+Resolution#DependencyMediationandConflictResolution-DependencyVersionRanges
// Proposed syntax:
// Range | Meaning
// (,1.0] : x <= 1.0
// 1.0 : "Soft" requirement on 1.0 (just a recommendation - helps select the correct version if it matches all
// ranges)
// [1.0] : Hard requirement on 1.0
// [1.2,1.3] : 1.2 <= x <= 1.3
// [1.0,2.0) : 1.0 <= x < 2.0
// [1.5,) : x >= 1.5
// (,1.0],[1.2,) : x <= 1.0 or x >= 1.2. Multiple sets are comma-separated
// (,1.1),(1.1,) : This excludes 1.1 if it is known not to work in combination with this library
// Maven Envorcer Plugin
// http://maven.apache.org/plugins/maven-enforcer-plugin/rules/versionRanges.html
// Range Meaning
// 1.0 x >= 1.0 * The default Maven meaning for 1.0 is everything (,) but with 1.0 recommended. Obviously this
// doesn't work for enforcing versions here, so it has been redefined as a minimum version.
// (,1.0] x <= 1.0
// (,1.0) x < 1.0
// [1.0] x == 1.0
// [1.0,) x >= 1.0
// (1.0,) x > 1.0
// (1.0,2.0) 1.0 < x < 2.0
// [1.0,2.0] 1.0 <= x <= 2.0
// (,1.0],[1.2,) x <= 1.0 or x >= 1.2. Multiple sets are comma-separated
// (,1.1),(1.1,) x != 1.1
// OSGi Spec
// Example Predicate
// [1.2.3, 4.5.6) 1.2.3 <= x < 4.5.6
// [1.2.3, 4.5.6] 1.2.3 <= x <= 4.5.6
// (1.2.3, 4.5.6) 1.2.3 < x < 4.5.6
// (1.2.3, 4.5.6] 1.2.3 < x <= 4.5.6
// 1.2.3 1.2.3 <= x
// Version (Maven Syntax) :: Maven : Enforcer : OSGi :: Osgifier (OSGi Syntax)
// 1.0 :: * : x >= 1.0 : x >= 1.0 :: 1.0
// (,1.0] :: x <= 1.0 : 0 < x <= 1.0 : 0 < x <= 1.0 :: [,1.0]
// Mvn (,1] -> OSGi [,1]
osgi_assertIsNotIncluded("(,1]", "0"); // !!!
osgi_assertIsIncluded("[,1]", "0");
mavn_assertIsIncluded("(,1]", "0");
mVersionRange = "(,1]";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("[,1]"));
// Mvn [,1] -> OSGi [,1]
osgi_assertIsIncluded("[,1]", "0");
mavn_assertIsIncluded("[,1]", "0");
mVersionRange = "[,1]";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("[,1]"));
// Mvn [0,1] -> OSGi [0,1]
osgi_assertIsIncluded("[0,1]", "0");
mavn_assertIsIncluded("[0,1]", "0");
mVersionRange = "[0,1]";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("[0,1]"));
// Mvn (0,1] -> OSGi (0,1] or (,1]
osgi_assertIsNotIncluded("(,1]", "0");
osgi_assertIsIncluded("(,1]", "0.1");
osgi_assertIsNotIncluded("(0,1]", "0");
osgi_assertIsIncluded("(0,1]", "0.0.1");
mavn_assertIsNotIncluded("(0,1]", "0");
mavn_assertIsIncluded("(0,1]", "0.0.1");
mVersionRange = "(0,1]";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("(0,1]"));
// Mvn (1,2] -> OSGi (1,2]
osgi_assertIsNotIncluded("(1,2]", "1");
osgi_assertIsIncluded("(1,2]", "1.0.1");
mavn_assertIsNotIncluded("(1,2]", "1");
mavn_assertIsIncluded("(1,2]", "1.0.1");
mVersionRange = "(1,2]";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("(1,2]"));
// Mvn [1,2] -> OSGi [1,2]
osgi_assertIsIncluded("[1,2]", "1");
mavn_assertIsIncluded("[1,2]", "1");
mVersionRange = "[1,2]";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("[1,2]"));
// Mvn [1-RC1,2) -> OSGi [1.0.0.RC1,2)
// assertThat(VersionRange.parse("[1.0.0.RC1,2)").includes(Version.parse("1");
osgi_assertIsIncluded("[1,2)", "1.0.0");
osgi_assertIsIncluded("[1.0.0.RC1,2)", "1.0.0.RC1");
osgi_assertIsIncluded("[1.0.0.RC1,2)", "1.0.0.RC2");
osgi_assertIsIncluded("[1,2)", "1.0.0");
osgi_assertIsIncluded("[1,2)", "1.0.0.A");
osgi_assertIsIncluded("[1.0.0.RC1,2)", "1.0.0.a");
osgi_assertIsIncluded("[1.0.0.RC1,2)", "1.0.0.fooooooo");
osgi_assertIsIncluded("[1,2)", "1.0.0.AAA");
osgi_assertIsIncluded("[1,2)", "1.0.0.AAAA");
osgi_assertIsIncluded("[1,2)", "1.0.0.Z");
osgi_assertIsIncluded("[1,2)", "1.0.0.SNAPSHOT");
osgi_assertIsIncluded("[1,2)", "1.0.0.121212");
// maven magic
// assertThat(VersionRange.parse("[1.0.0.RC1,2)").includes(Version.parse("1.0.0.beta")), Is.is(false));
// assertThat(VersionRange.parse("[1.0.0.RC1,2)").includes(Version.parse("1.0.0.alpha")), Is.is(false));
// assertThat(VersionRange.parse("[1.0.0.RC1,2)").includes(Version.parse("1.0.0.ALPHA")), Is.is(false));
osgi_assertIsIncluded("[1,2)", "1.0.0");
osgi_assertIsIncluded("[1,2)", "1.0.0.RC1");
osgi_assertIsIncluded("[1,2)", "1.0.0.RC2");
osgi_assertIsIncluded("[1,2)", "1.0.0");
osgi_assertIsIncluded("[1,2)", "1.0.0.A");
osgi_assertIsIncluded("[1,2)", "1.0.0.a");
osgi_assertIsIncluded("[1,2)", "1.0.0.fooooooo");
osgi_assertIsIncluded("[1,2)", "1.0.0.AAA");
osgi_assertIsIncluded("[1,2)", "1.0.0.AAAA");
osgi_assertIsIncluded("[1,2)", "1.0.0.Z");
osgi_assertIsIncluded("[1,2)", "1.0.0.SNAPSHOT");
osgi_assertIsIncluded("[1,2)", "1.0.0.121212");
osgi_assertIsIncluded("[1.0.0,2)", "1.0.0");
osgi_assertIsIncluded("[1.0.0.RC1,2)", "1.0.0.RC1");
osgi_assertIsIncluded("[1.0.0.RC1,2)", "1.0.0.RC2");
osgi_assertIsIncluded("[1.0.0,2)", "1.0.0");
osgi_assertIsIncluded("[1.0.0,2)", "1.0.0.A");
osgi_assertIsIncluded("[1.0.0,2)", "1.0.0.a");
osgi_assertIsIncluded("[1.0.0,2)", "1.0.0.fooooooo");
osgi_assertIsIncluded("[1.0.0,2)", "1.0.0.AAA");
osgi_assertIsIncluded("[1.0.0,2)", "1.0.0.AAAA");
osgi_assertIsIncluded("[1.0.0.RC1,2)", "1.0.0.Z");
osgi_assertIsIncluded("[1.0.0.RC1,2)", "1.0.0.SNAPSHOT");
osgi_assertIsIncluded("[1.0.0,2)", "1.0.0.121212");
mavn_assertIsIncluded("[1-RC1,2)", "1");
mavn_assertIsIncluded("[1-RC1,2)", "1-RC1");
mavn_assertIsIncluded("[1-RC1,2)", "1-RC2");
mavn_assertIsIncluded("[1-RC1,2)", "1.0.0");
mavn_assertIsIncluded("[1-RC1,2)", "1-A");
mavn_assertIsIncluded("[1-RC1,2)", "1-a");
mavn_assertIsIncluded("[1-RC1,2)", "1-fooooooo");
mavn_assertIsIncluded("[1-RC1,2)", "1-AAA");
mavn_assertIsIncluded("[1-RC1,2)", "1-AAAA");
mavn_assertIsIncluded("[1-RC1,2)", "1-Z");
mavn_assertIsIncluded("[1-RC1,2)", "1-SNAPSHOT");
mavn_assertIsIncluded("[1-RC1,2)", "1-121212");
// Mvn [1-cc,2) -> OSGi [1.0.0.cc,2)
osgi_assertIsNotIncluded("[1.0.0.cc,2)", "1.0.0");
osgi_assertIsNotIncluded("[1.0.0.cc,2)", "1.0.0.RC1");
osgi_assertIsNotIncluded("[1.0.0.cc,2)", "1.0.0.RC2");
osgi_assertIsNotIncluded("[1.0.0.cc,2)", "1.0.0");
osgi_assertIsNotIncluded("[1.0.0.cc,2)", "1.0.0.A");
osgi_assertIsNotIncluded("[1.0.0.cc,2)", "1.0.0.a");
osgi_assertIsIncluded("[1.0.0.cc,2)", "1.0.0.fooooooo");
osgi_assertIsNotIncluded("[1.0.0.cc,2)", "1.0.0.AAA");
osgi_assertIsNotIncluded("[1.0.0.cc,2)", "1.0.0.AAAA");
osgi_assertIsIncluded("[1.0.0.cc,2)", "1.0.0.z");
osgi_assertIsNotIncluded("[1.0.0.cc,2)", "1.0.0.SNAPSHOT");
mavn_assertIsNotIncluded("[1-cc,2)", "1");
mavn_assertIsNotIncluded("[1-cc,2)", "1-RC1");
mavn_assertIsNotIncluded("[1-cc,2)", "1-RC2");
mavn_assertIsNotIncluded("[1-cc,2)", "1.0.0");
mavn_assertIsNotIncluded("[1-cc,2)", "1-A");
mavn_assertIsNotIncluded("[1-cc,2)", "1-a");
mavn_assertIsIncluded("[1-cc,2)", "1-fooooooo");
mavn_assertIsNotIncluded("[1-cc,2)", "1-AAA");
mavn_assertIsNotIncluded("[1-cc,2)", "1-AAAA");
mavn_assertIsIncluded("[1-cc,2)", "1-z");
mavn_assertIsNotIncluded("[1-cc,2)", "1-SNAPSHOT");
// maven magic
// FIX: Version to lower
osgi_assertIsNotIncluded("[1.0.0.cc,2)", "1.0.0.Z");
mavn_assertIsIncluded("[1-cc,2)", "1-Z");
// FIX: Version remove snapshot
osgi_assertIsIncluded("[1.0.0.cc,2)", "1.0.0.snapshot");
mavn_assertIsNotIncluded("[1-cc,2)", "1-snapshot");
osgi_assertIsNotIncluded("[1.0.0.zz,2)", "1.0.0.snapshot");
mavn_assertIsNotIncluded("[1-zz,2)", "1-snapshot");
// FIX ?????????????????????
osgi_assertIsNotIncluded("[1.0.0.cc,2)", "1.0.0.121212");
mavn_assertIsIncluded("[1-cc,2)", "1-121212");
osgi_assertIsIncluded("[1.0.0.121212,2)", "1.0.0.cc");
mavn_assertIsNotIncluded("[1-121212,2)", "1-cc");
// maven magic see org.apache.maven.artifact.versioning.ComparableVersion.StringItem.QUALIFIERS = { "alpha",
// "beta", "milestone", "rc", "snapshot", "", "sp" };
mavn_assertIsNotIncluded("[1-RC1,2)", "1-alpha");
mavn_assertIsNotIncluded("[1-RC1,2)", "1-ALPHA");
mavn_assertIsNotIncluded("[1-RC1,2)", "1-beta");
// / ----------------> CUT QUALIFIERS WHILE CONVERTING RANGES!!!!!!
mVersionRange = "[1-RC1,2)";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("[1,2)"));
// (,1.0) :: x < 1.0 : x < 1.0 : x < 1.0 :: [0,1.0)
// [1.0] :: x == 1.0: x == 1.0 : x == 1.0 :: [1.0,1.0]
// how to handle snapshots?
// assertEquals("[1.5,2)", JavaUtil.createVersionRange(null, "1.5-SNAPSHOT"));
// assertEquals("[1.5.2,1.6)", JavaUtil.createVersionRange("1.5.2", null));
// assertEquals("[1.5.2,1.6)", JavaUtil.createVersionRange(null, "1.5.2"));
// assertEquals("[1.5.2,1.6)", JavaUtil.createVersionRange(null, "1.5.2-SNAPSHOT"));
// assertEquals("[1.0.0,1.1)", JavaUtil.createVersionRange("1.0.0.v2011", null));
mavn_assertIsIncluded("1.0-SNAPSHOT", "1.0-SNAPSHOT");
osgi_assertIsIncluded("1.0.0.SNAPSHOT", "1.0.0.SNAPSHOT");
mVersionRange = "1.0-SNAPSHOT";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
// assertThat(oVersionRange.toString(), IsEqual.equalTo("[1.0,2)"));
// or
assertThat(oVersionRange.toString(), IsEqual.equalTo("1.0.0.SNAPSHOT"));
// ?
mVersionRange = "1.0-SnApShOt";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("1.0.0.SnApShOt"));
mVersionRange = "1.0-ABC-SNAPSHOT";
oVersionRange = MavenToOSGiUtils.toVersionRange(mVersionRange);
assertThat(oVersionRange.toString(), IsEqual.equalTo("1.0.0.ABC-SNAPSHOT"));
}
private static void mavn_assertIsIncluded(String versionRange, String version)
throws InvalidVersionSpecificationException {
assertThat(newMvnVersionRange(versionRange).containsVersion(newMvnVersion(version)), Is.is(true));
}
private static void mavn_assertIsNotIncluded(String versionRange, String version)
throws InvalidVersionSpecificationException {
assertThat(newMvnVersionRange(versionRange).containsVersion(newMvnVersion(version)), Is.is(false));
}
private static void osgi_assertIsIncluded(String versionRange, String version)
throws InvalidVersionSpecificationException {
assertThat(VersionRange.parse(versionRange).includes(Version.parse(version)), Is.is(true));
}
private static void osgi_assertIsNotIncluded(String versionRange, String version)
throws InvalidVersionSpecificationException {
assertThat(VersionRange.parse(versionRange).includes(Version.parse(version)), Is.is(false));
}
private static DefaultArtifactVersion newMvnVersion(String version) {
return new DefaultArtifactVersion(version);
}
private static org.apache.maven.artifact.versioning.VersionRange newMvnVersionRange(String versionRange)
throws InvalidVersionSpecificationException {
return org.apache.maven.artifact.versioning.VersionRange.createFromVersionSpec(versionRange);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.geo.builders;
import org.apache.lucene.geo.Line;
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.GeometryFactory;
import org.locationtech.jts.geom.LineString;
import org.elasticsearch.common.geo.GeoShapeType;
import org.elasticsearch.common.geo.parsers.ShapeParser;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.locationtech.spatial4j.shape.jts.JtsGeometry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.elasticsearch.common.geo.GeoUtils.normalizeLat;
import static org.elasticsearch.common.geo.GeoUtils.normalizeLon;
public class LineStringBuilder extends ShapeBuilder<JtsGeometry, LineStringBuilder> {
public static final GeoShapeType TYPE = GeoShapeType.LINESTRING;
/**
* Construct a new LineString.
* Per GeoJSON spec (http://geojson.org/geojson-spec.html#linestring)
* a LineString must contain two or more coordinates
* @param coordinates the initial list of coordinates
* @throws IllegalArgumentException if there are less then two coordinates defined
*/
public LineStringBuilder(List<Coordinate> coordinates) {
super(coordinates);
if (coordinates.size() < 2) {
throw new IllegalArgumentException("invalid number of points in LineString (found [" + coordinates.size()+ "] - must be >= 2)");
}
}
public LineStringBuilder(CoordinatesBuilder coordinates) {
this(coordinates.build());
}
/**
* Read from a stream.
*/
public LineStringBuilder(StreamInput in) throws IOException {
super(in);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName());
builder.field(ShapeParser.FIELD_COORDINATES.getPreferredName());
coordinatesToXcontent(builder, false);
builder.endObject();
return builder;
}
/**
* Closes the current lineString by adding the starting point as the end point.
* This will have no effect if starting and end point are already the same.
*/
public LineStringBuilder close() {
Coordinate start = coordinates.get(0);
Coordinate end = coordinates.get(coordinates.size() - 1);
if(start.x != end.x || start.y != end.y) {
coordinates.add(start);
}
return this;
}
@Override
public GeoShapeType type() {
return TYPE;
}
@Override
public int numDimensions() {
if (coordinates == null || coordinates.isEmpty()) {
throw new IllegalStateException("unable to get number of dimensions, " +
"LineString has not yet been initialized");
}
return Double.isNaN(coordinates.get(0).z) ? 2 : 3;
}
@Override
public JtsGeometry buildS4J() {
Coordinate[] coordinates = this.coordinates.toArray(new Coordinate[this.coordinates.size()]);
Geometry geometry;
if(wrapdateline) {
ArrayList<LineString> strings = decomposeS4J(FACTORY, coordinates, new ArrayList<LineString>());
if(strings.size() == 1) {
geometry = strings.get(0);
} else {
LineString[] linestrings = strings.toArray(new LineString[strings.size()]);
geometry = FACTORY.createMultiLineString(linestrings);
}
} else {
geometry = FACTORY.createLineString(coordinates);
}
return jtsGeometry(geometry);
}
@Override
public Object buildLucene() {
// decompose linestrings crossing dateline into array of Lines
Coordinate[] coordinates = this.coordinates.toArray(new Coordinate[this.coordinates.size()]);
if (wrapdateline) {
ArrayList<Line> linestrings = decomposeLucene(coordinates, new ArrayList<>());
if (linestrings.size() == 1) {
return linestrings.get(0);
} else {
return linestrings.toArray(new Line[linestrings.size()]);
}
}
return new Line(Arrays.stream(coordinates).mapToDouble(i->normalizeLat(i.y)).toArray(),
Arrays.stream(coordinates).mapToDouble(i->normalizeLon(i.x)).toArray());
}
static ArrayList<LineString> decomposeS4J(GeometryFactory factory, Coordinate[] coordinates, ArrayList<LineString> strings) {
for(Coordinate[] part : decompose(+DATELINE, coordinates)) {
for(Coordinate[] line : decompose(-DATELINE, part)) {
strings.add(factory.createLineString(line));
}
}
return strings;
}
static ArrayList<Line> decomposeLucene(Coordinate[] coordinates, ArrayList<Line> lines) {
for (Coordinate[] part : decompose(+DATELINE, coordinates)) {
for (Coordinate[] line : decompose(-DATELINE, part)) {
lines.add(new Line(Arrays.stream(line).mapToDouble(i->normalizeLat(i.y)).toArray(),
Arrays.stream(line).mapToDouble(i->normalizeLon(i.x)).toArray()));
}
}
return lines;
}
/**
* Decompose a linestring given as array of coordinates at a vertical line.
*
* @param dateline x-axis intercept of the vertical line
* @param coordinates coordinates forming the linestring
* @return array of linestrings given as coordinate arrays
*/
private static Coordinate[][] decompose(double dateline, Coordinate[] coordinates) {
int offset = 0;
ArrayList<Coordinate[]> parts = new ArrayList<>();
double shift = coordinates[0].x > DATELINE ? DATELINE : (coordinates[0].x < -DATELINE ? -DATELINE : 0);
for (int i = 1; i < coordinates.length; i++) {
double t = intersection(coordinates[i-1], coordinates[i], dateline);
if(!Double.isNaN(t)) {
Coordinate[] part;
if(t<1) {
part = Arrays.copyOfRange(coordinates, offset, i+1);
part[part.length-1] = Edge.position(coordinates[i-1], coordinates[i], t);
coordinates[offset+i-1] = Edge.position(coordinates[i-1], coordinates[i], t);
shift(shift, part);
offset = i-1;
shift = coordinates[i].x > DATELINE ? DATELINE : (coordinates[i].x < -DATELINE ? -DATELINE : 0);
} else {
part = shift(shift, Arrays.copyOfRange(coordinates, offset, i+1));
offset = i;
}
parts.add(part);
}
}
if(offset == 0) {
parts.add(shift(shift, coordinates));
} else if(offset < coordinates.length-1) {
Coordinate[] part = Arrays.copyOfRange(coordinates, offset, coordinates.length);
parts.add(shift(shift, part));
}
return parts.toArray(new Coordinate[parts.size()][]);
}
private static Coordinate[] shift(double shift, Coordinate...coordinates) {
if(shift != 0) {
for (int j = 0; j < coordinates.length; j++) {
coordinates[j] = new Coordinate(coordinates[j].x - 2 * shift, coordinates[j].y);
}
}
return coordinates;
}
}
| |
package org.oauthsimple.model;
import java.io.Serializable;
import org.oauthsimple.utils.Preconditions;
/**
* Represents an OAuth token (either request or access token) and its secret
* extended by mcxiaoke ,add expiresAt, uid, userName fields
*
* @author Pablo Fernandez
* @author mcxiaoke
*/
public class OAuthToken implements Serializable {
private static final long serialVersionUID = 715000866082812683L;
/**
* Factory method that returns an empty token (token = "", secret = "").
*
* Useful for two legged OAuth.
*/
public static OAuthToken EMPTY() {
return new OAuthToken("", "");
}
private String token;
private String secret; // oauth 1.0: secret, oauth 2.0 refresh token
private long expiresAt;
private long uid;
private String userName;
private String rawResponse;
public OAuthToken(OAuthToken token) {
this(token.getToken(), token.getSecret(), token.getExpiresAt(), token
.getUid(), token.getUserName(), token.getRawResponse());
}
public void update(OAuthToken token) {
if (token != null && !token.isEmpty()) {
this.token = token.getToken();
this.secret = token.getSecret();
this.expiresAt = token.getExpiresAt();
this.uid = token.getUid();
this.userName = token.getUserName();
this.rawResponse = token.getRawResponse();
}
}
public void update(String token, String secret) {
if (isNotEmpty(token) && isNotEmpty(secret)) {
this.token = token;
this.secret = secret;
}
}
/**
* Default constructor
*
* @param token
* token value. Can't be null.
* @param secret
* token secret. Can't be null.
*/
public OAuthToken(String token, String secret) {
this(token, secret, null);
}
public OAuthToken(String token, String secret, long expiresAt) {
this(token, secret, expiresAt, 0L, null, null);
}
public OAuthToken(String token, String secret, long expiresAt, long uid) {
this(token, secret, expiresAt, uid, null, null);
}
public OAuthToken(String token, String secret, long expiresAt, long uid,
String rawResponse) {
this(token, secret, expiresAt, uid, null, rawResponse);
}
public OAuthToken(String token, String secret, long expiresAt, long uid,
String userName, String rawResponse) {
Preconditions.checkNotNull(token, "Token can't be null");
Preconditions.checkNotNull(secret, "Secret can't be null");
this.token = token;
this.secret = secret;
this.expiresAt = expiresAt;
this.uid = uid;
this.userName = userName;
this.rawResponse = rawResponse;
}
public OAuthToken(String token, String secret, long expiresAt,
String rawResponse) {
this(token, secret, expiresAt, 0L, null, rawResponse);
}
public OAuthToken(String token, String secret, String rawResponse) {
this(token, secret, 0L, rawResponse);
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public String getSecret() {
return secret;
}
public void setSecret(String secret) {
this.secret = secret;
}
public long getExpiresAt() {
return expiresAt;
}
public void setExpiresAt(long expiresAt) {
this.expiresAt = expiresAt;
}
public long getUid() {
return uid;
}
public void setUid(long uid) {
this.uid = uid;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getRawResponse() {
return rawResponse;
}
public void setRawResponse(String rawResponse) {
this.rawResponse = rawResponse;
}
/**
* Returns true if the token is empty (token = "", secret = "")
*/
public boolean isEmpty() {
return "".equals(this.token) && "".equals(this.secret);
}
private static boolean isNotEmpty(String text) {
return text != null && text.length() > 0;
}
private static boolean isNullOrEmpty(String text) {
return text == null && text.length() == 0;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((secret == null) ? 0 : secret.hashCode());
result = prime * result + ((token == null) ? 0 : token.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OAuthToken other = (OAuthToken) obj;
if (secret == null) {
if (other.secret != null)
return false;
} else if (!secret.equals(other.secret))
return false;
if (token == null) {
if (other.token != null)
return false;
} else if (!token.equals(other.token))
return false;
return true;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("OAuthToken [token=");
builder.append(token);
builder.append(", secret=");
builder.append(secret);
builder.append(", expiresAt=");
builder.append(expiresAt);
builder.append(", uid=");
builder.append(uid);
builder.append(", userName=");
builder.append(userName);
builder.append(", rawResponse=");
builder.append(rawResponse);
builder.append("]");
return builder.toString();
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simpledb.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetAttributesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/** The name of the domain in which to perform the operation. */
private String domainName;
/** The name of the item. */
private String itemName;
/** The names of the attributes. */
private com.amazonaws.internal.SdkInternalList<String> attributeNames;
/**
* Determines whether or not strong consistency should be enforced when data is read from SimpleDB. If
* <code>true</code>, any data previously written to SimpleDB will be returned. Otherwise, results will be
* consistent eventually, and the client may not see data that was written immediately before your read.
*/
private Boolean consistentRead;
/**
* Default constructor for GetAttributesRequest object. Callers should use the setter or fluent setter (with...)
* methods to initialize the object after creating it.
*/
public GetAttributesRequest() {
}
/**
* Constructs a new GetAttributesRequest object. Callers should use the setter or fluent setter (with...) methods to
* initialize any additional object members.
*
* @param domainName
* The name of the domain in which to perform the operation.
* @param itemName
* The name of the item.
*/
public GetAttributesRequest(String domainName, String itemName) {
setDomainName(domainName);
setItemName(itemName);
}
/**
* The name of the domain in which to perform the operation.
*
* @param domainName
* The name of the domain in which to perform the operation.
*/
public void setDomainName(String domainName) {
this.domainName = domainName;
}
/**
* The name of the domain in which to perform the operation.
*
* @return The name of the domain in which to perform the operation.
*/
public String getDomainName() {
return this.domainName;
}
/**
* The name of the domain in which to perform the operation.
*
* @param domainName
* The name of the domain in which to perform the operation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetAttributesRequest withDomainName(String domainName) {
setDomainName(domainName);
return this;
}
/**
* The name of the item.
*
* @param itemName
* The name of the item.
*/
public void setItemName(String itemName) {
this.itemName = itemName;
}
/**
* The name of the item.
*
* @return The name of the item.
*/
public String getItemName() {
return this.itemName;
}
/**
* The name of the item.
*
* @param itemName
* The name of the item.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetAttributesRequest withItemName(String itemName) {
setItemName(itemName);
return this;
}
/**
* The names of the attributes.
*
* @return The names of the attributes.
*/
public java.util.List<String> getAttributeNames() {
if (attributeNames == null) {
attributeNames = new com.amazonaws.internal.SdkInternalList<String>();
}
return attributeNames;
}
/**
* The names of the attributes.
*
* @param attributeNames
* The names of the attributes.
*/
public void setAttributeNames(java.util.Collection<String> attributeNames) {
if (attributeNames == null) {
this.attributeNames = null;
return;
}
this.attributeNames = new com.amazonaws.internal.SdkInternalList<String>(attributeNames);
}
/**
* The names of the attributes.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setAttributeNames(java.util.Collection)} or {@link #withAttributeNames(java.util.Collection)} if you want
* to override the existing values.
* </p>
*
* @param attributeNames
* The names of the attributes.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetAttributesRequest withAttributeNames(String... attributeNames) {
if (this.attributeNames == null) {
setAttributeNames(new com.amazonaws.internal.SdkInternalList<String>(attributeNames.length));
}
for (String ele : attributeNames) {
this.attributeNames.add(ele);
}
return this;
}
/**
* The names of the attributes.
*
* @param attributeNames
* The names of the attributes.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetAttributesRequest withAttributeNames(java.util.Collection<String> attributeNames) {
setAttributeNames(attributeNames);
return this;
}
/**
* Determines whether or not strong consistency should be enforced when data is read from SimpleDB. If
* <code>true</code>, any data previously written to SimpleDB will be returned. Otherwise, results will be
* consistent eventually, and the client may not see data that was written immediately before your read.
*
* @param consistentRead
* Determines whether or not strong consistency should be enforced when data is read from SimpleDB. If
* <code>true</code>, any data previously written to SimpleDB will be returned. Otherwise, results will be
* consistent eventually, and the client may not see data that was written immediately before your read.
*/
public void setConsistentRead(Boolean consistentRead) {
this.consistentRead = consistentRead;
}
/**
* Determines whether or not strong consistency should be enforced when data is read from SimpleDB. If
* <code>true</code>, any data previously written to SimpleDB will be returned. Otherwise, results will be
* consistent eventually, and the client may not see data that was written immediately before your read.
*
* @return Determines whether or not strong consistency should be enforced when data is read from SimpleDB. If
* <code>true</code>, any data previously written to SimpleDB will be returned. Otherwise, results will be
* consistent eventually, and the client may not see data that was written immediately before your read.
*/
public Boolean getConsistentRead() {
return this.consistentRead;
}
/**
* Determines whether or not strong consistency should be enforced when data is read from SimpleDB. If
* <code>true</code>, any data previously written to SimpleDB will be returned. Otherwise, results will be
* consistent eventually, and the client may not see data that was written immediately before your read.
*
* @param consistentRead
* Determines whether or not strong consistency should be enforced when data is read from SimpleDB. If
* <code>true</code>, any data previously written to SimpleDB will be returned. Otherwise, results will be
* consistent eventually, and the client may not see data that was written immediately before your read.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetAttributesRequest withConsistentRead(Boolean consistentRead) {
setConsistentRead(consistentRead);
return this;
}
/**
* Determines whether or not strong consistency should be enforced when data is read from SimpleDB. If
* <code>true</code>, any data previously written to SimpleDB will be returned. Otherwise, results will be
* consistent eventually, and the client may not see data that was written immediately before your read.
*
* @return Determines whether or not strong consistency should be enforced when data is read from SimpleDB. If
* <code>true</code>, any data previously written to SimpleDB will be returned. Otherwise, results will be
* consistent eventually, and the client may not see data that was written immediately before your read.
*/
public Boolean isConsistentRead() {
return this.consistentRead;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDomainName() != null)
sb.append("DomainName: ").append(getDomainName()).append(",");
if (getItemName() != null)
sb.append("ItemName: ").append(getItemName()).append(",");
if (getAttributeNames() != null)
sb.append("AttributeNames: ").append(getAttributeNames()).append(",");
if (getConsistentRead() != null)
sb.append("ConsistentRead: ").append(getConsistentRead());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetAttributesRequest == false)
return false;
GetAttributesRequest other = (GetAttributesRequest) obj;
if (other.getDomainName() == null ^ this.getDomainName() == null)
return false;
if (other.getDomainName() != null && other.getDomainName().equals(this.getDomainName()) == false)
return false;
if (other.getItemName() == null ^ this.getItemName() == null)
return false;
if (other.getItemName() != null && other.getItemName().equals(this.getItemName()) == false)
return false;
if (other.getAttributeNames() == null ^ this.getAttributeNames() == null)
return false;
if (other.getAttributeNames() != null && other.getAttributeNames().equals(this.getAttributeNames()) == false)
return false;
if (other.getConsistentRead() == null ^ this.getConsistentRead() == null)
return false;
if (other.getConsistentRead() != null && other.getConsistentRead().equals(this.getConsistentRead()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDomainName() == null) ? 0 : getDomainName().hashCode());
hashCode = prime * hashCode + ((getItemName() == null) ? 0 : getItemName().hashCode());
hashCode = prime * hashCode + ((getAttributeNames() == null) ? 0 : getAttributeNames().hashCode());
hashCode = prime * hashCode + ((getConsistentRead() == null) ? 0 : getConsistentRead().hashCode());
return hashCode;
}
@Override
public GetAttributesRequest clone() {
return (GetAttributesRequest) super.clone();
}
}
| |
/*
* SilverFoxServer: massive multiplayer game server for Flash, ...
* VERSION:3.0
* PUBLISH DATE:2015-9-2
* GITHUB:github.com/wdmir/521266750_qq_com
* UPDATES AND DOCUMENTATION AT: http://www.silverfoxserver.net
* COPYRIGHT 2009-2015 SilverFoxServer.NET. All rights reserved.
* MAIL:521266750@qq.com
*/
package org.json;
import java.io.IOException;
import java.io.Writer;
/*
Copyright (c) 2006 JSON.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
/**
* JSONWriter provides a quick and convenient way of producing JSON text.
* The texts produced strictly conform to JSON syntax rules. No whitespace is
* added, so the results are ready for transmission or storage. Each instance of
* JSONWriter can produce one JSON text.
* <p>
* A JSONWriter instance provides a <code>value</code> method for appending
* values to the
* text, and a <code>key</code>
* method for adding keys before values in objects. There are <code>array</code>
* and <code>endArray</code> methods that make and bound array values, and
* <code>object</code> and <code>endObject</code> methods which make and bound
* object values. All of these methods return the JSONWriter instance,
* permitting a cascade style. For example, <pre>
* new JSONWriter(myWriter)
* .object()
* .key("JSON")
* .value("Hello, World!")
* .endObject();</pre> which writes <pre>
* {"JSON":"Hello, World!"}</pre>
* <p>
* The first method called must be <code>array</code> or <code>object</code>.
* There are no methods for adding commas or colons. JSONWriter adds them for
* you. Objects and arrays can be nested up to 20 levels deep.
* <p>
* This can sometimes be easier than using a JSONObject to build a string.
* @author JSON.org
* @version 2011-11-24
*/
public class JSONWriter {
private static final int maxdepth = 200;
/**
* The comma flag determines if a comma should be output before the next
* value.
*/
private boolean comma;
/**
* The current mode. Values:
* 'a' (array),
* 'd' (done),
* 'i' (initial),
* 'k' (key),
* 'o' (object).
*/
protected char mode;
/**
* The object/array stack.
*/
private final JSONObject stack[];
/**
* The stack top index. A value of 0 indicates that the stack is empty.
*/
private int top;
/**
* The writer that will receive the output.
*/
protected Writer writer;
/**
* Make a fresh JSONWriter. It can be used to build one JSON text.
*/
public JSONWriter(Writer w) {
this.comma = false;
this.mode = 'i';
this.stack = new JSONObject[maxdepth];
this.top = 0;
this.writer = w;
}
/**
* Append a value.
* @param string A string value.
* @return this
* @throws JSONException If the value is out of sequence.
*/
private JSONWriter append(String string) throws JSONException {
if (string == null) {
throw new JSONException("Null pointer");
}
if (this.mode == 'o' || this.mode == 'a') {
try {
if (this.comma && this.mode == 'a') {
this.writer.write(',');
}
this.writer.write(string);
} catch (IOException e) {
throw new JSONException(e);
}
if (this.mode == 'o') {
this.mode = 'k';
}
this.comma = true;
return this;
}
throw new JSONException("Value out of sequence.");
}
/**
* Begin appending a new array. All values until the balancing
* <code>endArray</code> will be appended to this array. The
* <code>endArray</code> method must be called to mark the array's end.
* @return this
* @throws JSONException If the nesting is too deep, or if the object is
* started in the wrong place (for example as a key or after the end of the
* outermost array or object).
*/
public JSONWriter array() throws JSONException {
if (this.mode == 'i' || this.mode == 'o' || this.mode == 'a') {
this.push(null);
this.append("[");
this.comma = false;
return this;
}
throw new JSONException("Misplaced array.");
}
/**
* End something.
* @param mode Mode
* @param c Closing character
* @return this
* @throws JSONException If unbalanced.
*/
private JSONWriter end(char mode, char c) throws JSONException {
if (this.mode != mode) {
throw new JSONException(mode == 'a'
? "Misplaced endArray."
: "Misplaced endObject.");
}
this.pop(mode);
try {
this.writer.write(c);
} catch (IOException e) {
throw new JSONException(e);
}
this.comma = true;
return this;
}
/**
* End an array. This method most be called to balance calls to
* <code>array</code>.
* @return this
* @throws JSONException If incorrectly nested.
*/
public JSONWriter endArray() throws JSONException {
return this.end('a', ']');
}
/**
* End an object. This method most be called to balance calls to
* <code>object</code>.
* @return this
* @throws JSONException If incorrectly nested.
*/
public JSONWriter endObject() throws JSONException {
return this.end('k', '}');
}
/**
* Append a key. The key will be associated with the next value. In an
* object, every value must be preceded by a key.
* @param string A key string.
* @return this
* @throws JSONException If the key is out of place. For example, keys
* do not belong in arrays or if the key is null.
*/
public JSONWriter key(String string) throws JSONException {
if (string == null) {
throw new JSONException("Null key.");
}
if (this.mode == 'k') {
try {
this.stack[this.top - 1].putOnce(string, Boolean.TRUE);
if (this.comma) {
this.writer.write(',');
}
this.writer.write(JSONObject.quote(string));
this.writer.write(':');
this.comma = false;
this.mode = 'o';
return this;
} catch (IOException e) {
throw new JSONException(e);
}
}
throw new JSONException("Misplaced key.");
}
/**
* Begin appending a new object. All keys and values until the balancing
* <code>endObject</code> will be appended to this object. The
* <code>endObject</code> method must be called to mark the object's end.
* @return this
* @throws JSONException If the nesting is too deep, or if the object is
* started in the wrong place (for example as a key or after the end of the
* outermost array or object).
*/
public JSONWriter object() throws JSONException {
if (this.mode == 'i') {
this.mode = 'o';
}
if (this.mode == 'o' || this.mode == 'a') {
this.append("{");
this.push(new JSONObject());
this.comma = false;
return this;
}
throw new JSONException("Misplaced object.");
}
/**
* Pop an array or object scope.
* @param c The scope to close.
* @throws JSONException If nesting is wrong.
*/
private void pop(char c) throws JSONException {
if (this.top <= 0) {
throw new JSONException("Nesting error.");
}
char m = this.stack[this.top - 1] == null ? 'a' : 'k';
if (m != c) {
throw new JSONException("Nesting error.");
}
this.top -= 1;
this.mode = this.top == 0
? 'd'
: this.stack[this.top - 1] == null
? 'a'
: 'k';
}
/**
* Push an array or object scope.
* @param jo The scope to open.
* @throws JSONException If nesting is too deep.
*/
private void push(JSONObject jo) throws JSONException {
if (this.top >= maxdepth) {
throw new JSONException("Nesting too deep.");
}
this.stack[this.top] = jo;
this.mode = jo == null ? 'a' : 'k';
this.top += 1;
}
/**
* Append either the value <code>true</code> or the value
* <code>false</code>.
* @param b A boolean.
* @return this
* @throws JSONException
*/
public JSONWriter value(boolean b) throws JSONException {
return this.append(b ? "true" : "false");
}
/**
* Append a double value.
* @param d A double.
* @return this
* @throws JSONException If the number is not finite.
*/
public JSONWriter value(double d) throws JSONException {
return this.value(new Double(d));
}
/**
* Append a long value.
* @param l A long.
* @return this
* @throws JSONException
*/
public JSONWriter value(long l) throws JSONException {
return this.append(Long.toString(l));
}
/**
* Append an object value.
* @param object The object to append. It can be null, or a Boolean, Number,
* String, JSONObject, or JSONArray, or an object that implements JSONString.
* @return this
* @throws JSONException If the value is out of sequence.
*/
public JSONWriter value(Object object) throws JSONException {
return this.append(JSONObject.valueToString(object));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.hystrix.processor;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import com.netflix.hystrix.HystrixCommand;
import com.netflix.hystrix.HystrixCommandGroupKey;
import com.netflix.hystrix.HystrixCommandKey;
import com.netflix.hystrix.HystrixCommandProperties;
import com.netflix.hystrix.HystrixThreadPoolKey;
import com.netflix.hystrix.HystrixThreadPoolProperties;
import org.apache.camel.CamelContext;
import org.apache.camel.ExtendedCamelContext;
import org.apache.camel.Processor;
import org.apache.camel.model.CircuitBreakerDefinition;
import org.apache.camel.model.HystrixConfigurationDefinition;
import org.apache.camel.model.Model;
import org.apache.camel.reifier.ProcessorReifier;
import org.apache.camel.spi.BeanIntrospection;
import org.apache.camel.spi.RouteContext;
import org.apache.camel.support.PropertyBindingSupport;
import org.apache.camel.util.function.Suppliers;
import static org.apache.camel.support.CamelContextHelper.lookup;
import static org.apache.camel.support.CamelContextHelper.mandatoryLookup;
public class HystrixReifier extends ProcessorReifier<CircuitBreakerDefinition> {
public HystrixReifier(CircuitBreakerDefinition definition) {
super(definition);
}
@Override
public Processor createProcessor(RouteContext routeContext) throws Exception {
// create the regular and fallback processors
Processor processor = createChildProcessor(routeContext, true);
Processor fallback = null;
if (definition.getOnFallback() != null) {
fallback = ProcessorReifier.reifier(definition.getOnFallback()).createProcessor(routeContext);
}
final HystrixConfigurationDefinition config = buildHystrixConfiguration(routeContext.getCamelContext());
final String id = getId(definition, routeContext);
// group and thread pool keys to use they can be configured on configRef and config, so look there first, and if none then use default
String groupKey = config.getGroupKey();
String threadPoolKey = config.getThreadPoolKey();
if (groupKey == null) {
groupKey = HystrixConfigurationDefinition.DEFAULT_GROUP_KEY;
}
if (threadPoolKey == null) {
// by default use the thread pool from the group
threadPoolKey = groupKey;
}
// use the node id as the command key
HystrixCommandKey hcCommandKey = HystrixCommandKey.Factory.asKey(id);
HystrixCommandKey hcFallbackCommandKey = HystrixCommandKey.Factory.asKey(id + "-fallback");
// use the configured group key
HystrixCommandGroupKey hcGroupKey = HystrixCommandGroupKey.Factory.asKey(groupKey);
HystrixThreadPoolKey tpKey = HystrixThreadPoolKey.Factory.asKey(threadPoolKey);
// create setter using the default options
HystrixCommand.Setter setter = HystrixCommand.Setter.withGroupKey(hcGroupKey)
.andCommandKey(hcCommandKey)
.andThreadPoolKey(tpKey);
HystrixCommandProperties.Setter commandSetter = HystrixCommandProperties.Setter();
setter.andCommandPropertiesDefaults(commandSetter);
HystrixThreadPoolProperties.Setter threadPoolSetter = HystrixThreadPoolProperties.Setter();
setter.andThreadPoolPropertiesDefaults(threadPoolSetter);
configureHystrix(commandSetter, threadPoolSetter, config);
// create setter for fallback via network
HystrixCommand.Setter fallbackSetter = null;
boolean fallbackViaNetwork = definition.getOnFallback() != null && parseBoolean(routeContext, definition.getOnFallback().getFallbackViaNetwork());
if (fallbackViaNetwork) {
// use a different thread pool that is for fallback (should never use the same thread pool as the regular command)
HystrixThreadPoolKey tpFallbackKey = HystrixThreadPoolKey.Factory.asKey(threadPoolKey + "-fallback");
fallbackSetter = HystrixCommand.Setter.withGroupKey(hcGroupKey)
.andCommandKey(hcFallbackCommandKey)
.andThreadPoolKey(tpFallbackKey);
HystrixCommandProperties.Setter commandFallbackSetter = HystrixCommandProperties.Setter();
fallbackSetter.andCommandPropertiesDefaults(commandFallbackSetter);
HystrixThreadPoolProperties.Setter fallbackThreadPoolSetter = HystrixThreadPoolProperties.Setter();
fallbackSetter.andThreadPoolPropertiesDefaults(fallbackThreadPoolSetter);
// at first configure any shared options
configureHystrix(commandFallbackSetter, fallbackThreadPoolSetter, config);
}
return new HystrixProcessor(hcGroupKey, hcCommandKey, hcFallbackCommandKey, setter, fallbackSetter, processor, fallback, fallbackViaNetwork);
}
private void configureHystrix(HystrixCommandProperties.Setter command, HystrixThreadPoolProperties.Setter threadPool, HystrixConfigurationDefinition config) {
// command
if (config.getCircuitBreakerEnabled() != null) {
command.withCircuitBreakerEnabled(Boolean.parseBoolean(config.getCircuitBreakerEnabled()));
}
if (config.getCircuitBreakerErrorThresholdPercentage() != null) {
command.withCircuitBreakerErrorThresholdPercentage(Integer.parseInt(config.getCircuitBreakerErrorThresholdPercentage()));
}
if (config.getCircuitBreakerForceClosed() != null) {
command.withCircuitBreakerForceClosed(Boolean.parseBoolean(config.getCircuitBreakerForceClosed()));
}
if (config.getCircuitBreakerForceOpen() != null) {
command.withCircuitBreakerForceOpen(Boolean.parseBoolean(config.getCircuitBreakerForceOpen()));
}
if (config.getCircuitBreakerRequestVolumeThreshold() != null) {
command.withCircuitBreakerRequestVolumeThreshold(Integer.parseInt(config.getCircuitBreakerRequestVolumeThreshold()));
}
if (config.getCircuitBreakerSleepWindowInMilliseconds() != null) {
command.withCircuitBreakerSleepWindowInMilliseconds(Integer.parseInt(config.getCircuitBreakerSleepWindowInMilliseconds()));
}
if (config.getExecutionIsolationSemaphoreMaxConcurrentRequests() != null) {
command.withExecutionIsolationSemaphoreMaxConcurrentRequests(Integer.parseInt(config.getExecutionIsolationSemaphoreMaxConcurrentRequests()));
}
if (config.getExecutionIsolationStrategy() != null) {
command.withExecutionIsolationStrategy(HystrixCommandProperties.ExecutionIsolationStrategy.valueOf(config.getExecutionIsolationStrategy()));
}
if (config.getExecutionIsolationThreadInterruptOnTimeout() != null) {
command.withExecutionIsolationThreadInterruptOnTimeout(Boolean.parseBoolean(config.getExecutionIsolationThreadInterruptOnTimeout()));
}
if (config.getExecutionTimeoutInMilliseconds() != null) {
command.withExecutionTimeoutInMilliseconds(Integer.parseInt(config.getExecutionTimeoutInMilliseconds()));
}
if (config.getExecutionTimeoutEnabled() != null) {
command.withExecutionTimeoutEnabled(Boolean.parseBoolean(config.getExecutionTimeoutEnabled()));
}
if (config.getFallbackIsolationSemaphoreMaxConcurrentRequests() != null) {
command.withFallbackIsolationSemaphoreMaxConcurrentRequests(Integer.parseInt(config.getFallbackIsolationSemaphoreMaxConcurrentRequests()));
}
if (config.getFallbackEnabled() != null) {
command.withFallbackEnabled(Boolean.parseBoolean(config.getFallbackEnabled()));
}
if (config.getMetricsHealthSnapshotIntervalInMilliseconds() != null) {
command.withMetricsHealthSnapshotIntervalInMilliseconds(Integer.parseInt(config.getMetricsHealthSnapshotIntervalInMilliseconds()));
}
if (config.getMetricsRollingPercentileBucketSize() != null) {
command.withMetricsRollingPercentileBucketSize(Integer.parseInt(config.getMetricsRollingPercentileBucketSize()));
}
if (config.getMetricsRollingPercentileEnabled() != null) {
command.withMetricsRollingPercentileEnabled(Boolean.parseBoolean(config.getMetricsRollingPercentileEnabled()));
}
if (config.getMetricsRollingPercentileWindowInMilliseconds() != null) {
command.withMetricsRollingPercentileWindowInMilliseconds(Integer.parseInt(config.getMetricsRollingPercentileWindowInMilliseconds()));
}
if (config.getMetricsRollingPercentileWindowBuckets() != null) {
command.withMetricsRollingPercentileWindowBuckets(Integer.parseInt(config.getMetricsRollingPercentileWindowBuckets()));
}
if (config.getMetricsRollingStatisticalWindowInMilliseconds() != null) {
command.withMetricsRollingStatisticalWindowInMilliseconds(Integer.parseInt(config.getMetricsRollingStatisticalWindowInMilliseconds()));
}
if (config.getMetricsRollingStatisticalWindowBuckets() != null) {
command.withMetricsRollingStatisticalWindowBuckets(Integer.parseInt(config.getMetricsRollingStatisticalWindowBuckets()));
}
if (config.getRequestLogEnabled() != null) {
command.withRequestLogEnabled(Boolean.parseBoolean(config.getRequestLogEnabled()));
}
if (config.getCorePoolSize() != null) {
threadPool.withCoreSize(Integer.parseInt(config.getCorePoolSize()));
}
if (config.getMaximumSize() != null) {
threadPool.withMaximumSize(Integer.parseInt(config.getMaximumSize()));
}
if (config.getKeepAliveTime() != null) {
threadPool.withKeepAliveTimeMinutes(Integer.parseInt(config.getKeepAliveTime()));
}
if (config.getMaxQueueSize() != null) {
threadPool.withMaxQueueSize(Integer.parseInt(config.getMaxQueueSize()));
}
if (config.getQueueSizeRejectionThreshold() != null) {
threadPool.withQueueSizeRejectionThreshold(Integer.parseInt(config.getQueueSizeRejectionThreshold()));
}
if (config.getThreadPoolRollingNumberStatisticalWindowInMilliseconds() != null) {
threadPool.withMetricsRollingStatisticalWindowInMilliseconds(Integer.parseInt(config.getThreadPoolRollingNumberStatisticalWindowInMilliseconds()));
}
if (config.getThreadPoolRollingNumberStatisticalWindowBuckets() != null) {
threadPool.withMetricsRollingStatisticalWindowBuckets(Integer.parseInt(config.getThreadPoolRollingNumberStatisticalWindowBuckets()));
}
if (config.getAllowMaximumSizeToDivergeFromCoreSize() != null) {
threadPool.withAllowMaximumSizeToDivergeFromCoreSize(Boolean.parseBoolean(config.getAllowMaximumSizeToDivergeFromCoreSize()));
}
}
// *******************************
// Helpers
// *******************************
HystrixConfigurationDefinition buildHystrixConfiguration(CamelContext camelContext) throws Exception {
Map<String, Object> properties = new HashMap<>();
// Extract properties from default configuration, the one configured on
// camel context takes the precedence over those in the registry
loadProperties(camelContext, properties, Suppliers.firstNotNull(
() -> camelContext.getExtension(Model.class).getHystrixConfiguration(null),
() -> lookup(camelContext, HystrixConstants.DEFAULT_HYSTRIX_CONFIGURATION_ID, HystrixConfigurationDefinition.class))
);
// Extract properties from referenced configuration, the one configured
// on camel context takes the precedence over those in the registry
if (definition.getConfigurationRef() != null) {
final String ref = definition.getConfigurationRef();
loadProperties(camelContext, properties, Suppliers.firstNotNull(
() -> camelContext.getExtension(Model.class).getHystrixConfiguration(ref),
() -> mandatoryLookup(camelContext, ref, HystrixConfigurationDefinition.class))
);
}
// Extract properties from local configuration
loadProperties(camelContext, properties, Optional.ofNullable(definition.getHystrixConfiguration()));
// Extract properties from definition
BeanIntrospection beanIntrospection = camelContext.adapt(ExtendedCamelContext.class).getBeanIntrospection();
beanIntrospection.getProperties(definition, properties, null, false);
HystrixConfigurationDefinition config = new HystrixConfigurationDefinition();
// Apply properties to a new configuration
PropertyBindingSupport.bindProperties(camelContext, config, properties);
return config;
}
private void loadProperties(CamelContext camelContext, Map<String, Object> properties, Optional<?> optional) {
BeanIntrospection beanIntrospection = camelContext.adapt(ExtendedCamelContext.class).getBeanIntrospection();
optional.ifPresent(bean -> beanIntrospection.getProperties(bean, properties, null, false));
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.eas.designer.explorer.project;
import com.eas.designer.application.project.ClientType;
import com.eas.designer.application.project.AppServerType;
import com.eas.designer.application.project.PlatypusProjectSettings;
import com.eas.util.StringUtils;
import java.beans.PropertyChangeSupport;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.logging.Level;
import org.openide.ErrorManager;
import org.openide.filesystems.FileObject;
import org.openide.util.EditableProperties;
/**
* The facade class for the settings of a project.
*
* @author vv
*/
public class PlatypusProjectSettingsImpl implements PlatypusProjectSettings {
public static final int DEFAULT_PLATYPUS_SERVER_PORT = 8500;
public static final int CLIENT_APP_DEFAULT_DEBUG_PORT = 8900;
public static final int SERVER_APP_DEFAULT_DEBUG_PORT = 8901;
public static final Level DEFAULT_LOG_LEVEL = Level.INFO;
public static final String PROJECT_SETTINGS_FILE = "project.properties"; //NOI18N
public static final String PROJECT_PRIVATE_SETTINGS_FILE = "private.properties"; //NOI18N
public static final String PROJECT_DISPLAY_NAME_KEY = "projectDisplayName"; //NOI18N
public static final String RUN_ELEMENT_KEY = "runElement"; //NOI18N
public static final String DEFAULT_DATA_SOURCE_ELEMENT_KEY = "defaultDataSource"; //NOI18N
public static final String RUN_USER_KEY = "runUser"; //NOI18N
public static final String RUN_PASSWORD_KEY = "runPassword"; //NOI18N
public static final String RUN_CLIENT_OPTIONS_KEY = "runClientOptions"; //NOI18N
public static final String RUN_CLIENT_VM_OPTIONS_KEY = "runClientVmOptions"; //NOI18N
public static final String RUN_SERVER_OPTIONS_KEY = "runServerOptions"; //NOI18N
public static final String RUN_SERVER_VM_OPTIONS_KEY = "runServerVmOptions"; //NOI18N
public static final String SERVER_PORT_KEY = "serverPort";//NOI18N
public static final String CLIENT_URL_KEY = "clientUrl";//NOI18N
public static final String NOT_START_SERVER_KEY = "notStartServer"; //NOI18N
public static final String DEBUG_CLIENT_PORT_KEY = "debugClientPort"; //NOI18N
public static final String DEBUG_SERVER_PORT_KEY = "debugServerPort"; //NOI18N
public static final String CLIENT_LOG_LEVEL = "clientLogLevel"; //NOI18N
public static final String SERVER_LOG_LEVEL = "serverLogLevel"; //NOI18N
public static final String J2EE_SERVER_ID_KEY = "j2eeServerId"; //NOI18N
public static final String SERVER_CONTEXT_KEY = "context";//NOI18N
public static final String ENABLE_SECURITY_REALM_KEY = "enableSecurityRealm";//NOI18N
public static final String CLIENT_TYPE_KEY = "clientType"; //NOI18N
public static final String SERVER_TYPE_KEY = "serverType"; //NOI18N
protected static final String START_JS_FILE_TEMPLATE = "" //NOI18N
+ "/**\n" //NOI18N
+ " * Do not edit this file manually, it will be overwritten by\n" //NOI18N
+ " * Platypus Application Designer.\n" //NOI18N
+ " */\n" //NOI18N
+ "// this === global\n" //NOI18N
+ "(function () {\n" //NOI18N
+ " function ready() {\n" //NOI18N
+ " P.cacheBust = true;\n"
+ " P.require('%s', function(){\n" //NOI18N
+ " %s"//NOI18N
+ " %s"//NOI18N
+ " }, function(e){\n" //NOI18N
+ " P.Logger.severe(e);\n"
+ " if(document){\n"
+ " var messageParagraph = document.createElement('p');\n"
+ " document.body.appendChild(messageParagraph);\n"
+ " messageParagraph.innerHTML = 'An error occured while require(\\'%s\\'). Error: ' + e;\n"
+ " messageParagraph.style.margin = '10px';\n"
+ " messageParagraph.style.fontFamily = 'Arial';\n"
+ " messageParagraph.style.fontSize = '14pt';\n"
+ " }\n" //NOI18N
+ " });\n"//NOI18N
+ " }\n"//NOI18N
+ " if(!this.P) {\n" //NOI18N
+ " this.P = {};\n" //NOI18N
+ " P.ready = ready;\n" //NOI18N
+ " } else {\n" //NOI18N
+ " ready();\n" //NOI18N
+ " }\n" //NOI18N
+ "})();\n"; //NOI18N
protected final FileObject projectDir;
protected final PropertyChangeSupport changeSupport = new PropertyChangeSupport(this);
protected EditableProperties projectProperties;
protected EditableProperties projectPrivateProperties;
private boolean projectPropertiesIsDirty;
private boolean projectPrivatePropertiesIsDirty;
public PlatypusProjectSettingsImpl(FileObject aProjectDir) throws Exception {
if (aProjectDir == null) {
throw new IllegalArgumentException("Project directory file object is null."); //NOI18N
}
projectDir = aProjectDir;
projectProperties = new EditableProperties(false);
try (InputStream is = getProjectSettingsFileObject().getInputStream()) {
projectProperties.load(is);
}
projectPrivateProperties = new EditableProperties(false);
try (InputStream is = getProjectPrivateSettingsFileObject().getInputStream()) {
projectPrivateProperties.load(is);
}
}
/**
* Gets the project's display name.
*
* @return title for the project
*/
@Override
public String getDisplayName() {
return projectProperties.get(PROJECT_DISPLAY_NAME_KEY);
}
/**
* Sets the project's display name.
*
* @param aValue title for the project
*/
@Override
public void setDisplayName(String aValue) {
if (aValue == null) {
throw new NullPointerException("The Display name parameter cannot be null."); // NOI18N
}
String oldValue = getDisplayName();
projectProperties.setProperty(PROJECT_DISPLAY_NAME_KEY, aValue);
projectPropertiesIsDirty = true;
changeSupport.firePropertyChange(PROJECT_DISPLAY_NAME_KEY, oldValue, aValue);
}
/**
* Gets default application element to run.
*
* @return application element name
*/
@Override
public String getRunElement() {
return projectProperties.get(RUN_ELEMENT_KEY);
}
/**
* Sets default application element to run.
*
* @param aValue application element name
* @throws java.lang.Exception
*/
@Override
public void setRunElement(String aValue) throws Exception {
String oldValue = getRunElement();
if (oldValue == null ? aValue != null : !oldValue.equals(aValue)) {
if (aValue != null && !aValue.isEmpty()) {
projectProperties.setProperty(RUN_ELEMENT_KEY, aValue);
projectPropertiesIsDirty = true;
} else {
projectProperties.remove(RUN_ELEMENT_KEY);
}
changeSupport.firePropertyChange(RUN_ELEMENT_KEY, oldValue, aValue);
}
}
/**
* Get the default data source name
*
* @return string of the default data source name
*/
@Override
public String getDefaultDataSourceName() {
return projectPrivateProperties.get(DEFAULT_DATA_SOURCE_ELEMENT_KEY);
}
/**
* Sets the default data source name for a project
*
* @param aValue a default data source name
*/
@Override
public void setDefaultDatasourceName(String aValue) {
String oldValue = getDefaultDataSourceName();
if (aValue != null) {
projectPrivateProperties.setProperty(DEFAULT_DATA_SOURCE_ELEMENT_KEY, aValue);
} else {
projectPrivateProperties.remove(DEFAULT_DATA_SOURCE_ELEMENT_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(DEFAULT_DATA_SOURCE_ELEMENT_KEY, oldValue, aValue);
}
/**
* Gets username for the Platypus user to login on application run.
*
* @return Platypus user name
*/
@Override
public String getRunUser() {
return projectPrivateProperties.get(RUN_USER_KEY);
}
/**
* Sets username for the Platypus user to login on application run.
*
* @param aValue Platypus user name
*/
@Override
public void setRunUser(String aValue) {
String oldValue = getRunUser();
if (aValue != null) {
projectPrivateProperties.setProperty(RUN_USER_KEY, aValue);
} else {
projectPrivateProperties.remove(RUN_USER_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(RUN_USER_KEY, oldValue, aValue);
}
/**
* Gets password for the Platypus user to login on application run.
*
* @return Platypus user name
*/
@Override
public String getRunPassword() {
return projectPrivateProperties.get(RUN_PASSWORD_KEY);
}
/**
* Sets password for the Platypus user to login on application run.
*
* @param aValue Platypus user name
*/
@Override
public void setRunPassword(String aValue) {
String oldValue = getRunPassword();
if (aValue != null) {
projectPrivateProperties.setProperty(RUN_PASSWORD_KEY, aValue);
} else {
projectPrivateProperties.remove(RUN_PASSWORD_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(RUN_PASSWORD_KEY, oldValue, aValue);
}
/**
* Gets optional parameters provided to Platypus Client.
*
* @return parameters string
*/
@Override
public String getRunClientOptions() {
return projectPrivateProperties.get(RUN_CLIENT_OPTIONS_KEY);
}
/**
* Sets optional parameters provided to Platypus Client.
*
* @param aValue
*/
@Override
public void setClientOptions(String aValue) {
String oldValue = getRunClientOptions();
if (aValue != null) {
projectPrivateProperties.setProperty(RUN_CLIENT_OPTIONS_KEY, aValue);
} else {
projectPrivateProperties.remove(RUN_CLIENT_OPTIONS_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(RUN_CLIENT_OPTIONS_KEY, oldValue, aValue);
}
/**
* Gets JVM options provided to Platypus Client.
*
* @return parameters string
*/
@Override
public String getRunClientVmOptions() {
return projectPrivateProperties.get(RUN_CLIENT_VM_OPTIONS_KEY);
}
/**
* Sets JVM options provided to Platypus Client.
*
* @param aValue
*/
@Override
public void setClientVmOptions(String aValue) {
String oldValue = getRunClientVmOptions();
if (aValue != null) {
projectPrivateProperties.setProperty(RUN_CLIENT_VM_OPTIONS_KEY, aValue);
} else {
projectPrivateProperties.remove(RUN_CLIENT_VM_OPTIONS_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(RUN_CLIENT_VM_OPTIONS_KEY, oldValue, aValue);
}
/**
* Gets optional parameters provided to Platypus Application Server.
*
* @return parameters string
*/
@Override
public String getRunServerOptions() {
return projectPrivateProperties.get(RUN_SERVER_OPTIONS_KEY);
}
/**
* Sets optional parameters provided to Platypus Application Server.
*
* @param aValue
*/
@Override
public void setServerOptions(String aValue) {
String oldValue = getRunServerOptions();
if (aValue != null) {
projectPrivateProperties.setProperty(RUN_SERVER_OPTIONS_KEY, aValue);
} else {
projectPrivateProperties.remove(RUN_SERVER_OPTIONS_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(RUN_SERVER_OPTIONS_KEY, oldValue, aValue);
}
/**
* Gets JVM options provided to Platypus Application Server.
*
* @return parameters string
*/
@Override
public String getRunServerVmOptions() {
return projectPrivateProperties.get(RUN_SERVER_VM_OPTIONS_KEY);
}
/**
* Sets JVM options provided to Platypus Application Server.
*
* @param aValue
*/
@Override
public void setServerVmOptions(String aValue) {
String oldValue = getRunServerVmOptions();
if (aValue != null) {
projectPrivateProperties.setProperty(RUN_SERVER_VM_OPTIONS_KEY, aValue);
} else {
projectPrivateProperties.remove(RUN_SERVER_VM_OPTIONS_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(RUN_SERVER_VM_OPTIONS_KEY, oldValue, aValue);
}
/**
* Gets application server's host.
*
* @return Url string
*/
@Override
public String getClientUrl() {
return projectPrivateProperties.get(CLIENT_URL_KEY);
}
/**
* Sets application's server host.
*
* @param aValue Url string
*/
@Override
public void setClientUrl(String aValue) {
String oldValue = getClientUrl();
if (aValue != null) {
projectPrivateProperties.setProperty(CLIENT_URL_KEY, aValue);
} else {
projectPrivateProperties.remove(CLIENT_URL_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(CLIENT_URL_KEY, oldValue, aValue);
}
/**
* Gets application's server port.
*
* @return server port
*/
@Override
public int getServerPort() {
return StringUtils.parseInt(projectPrivateProperties.get(SERVER_PORT_KEY), DEFAULT_PLATYPUS_SERVER_PORT);
}
/**
* Sets application's server port.
*
* @param aValue server port
*/
@Override
public void setServerPort(int aValue) {
int oldValue = getServerPort();
projectPrivateProperties.setProperty(SERVER_PORT_KEY, String.valueOf(aValue));
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(SERVER_PORT_KEY, oldValue, aValue);
}
/**
* Checks if NOT to start local development application server on
* application run.
*
* @return true not to start server
*/
@Override
public boolean isNotStartServer() {
return Boolean.valueOf(projectPrivateProperties.get(NOT_START_SERVER_KEY));
}
/**
* Sets flag NOT to start local development application server on
* application run.
*
* @param aValue true not to start server
*/
@Override
public void setNotStartServer(boolean aValue) {
boolean oldValue = isNotStartServer();
projectPrivateProperties.setProperty(NOT_START_SERVER_KEY, String.valueOf(aValue));
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(NOT_START_SERVER_KEY, oldValue, aValue);
}
/**
* Gets JMX debugging port for Platypus Client on local computer on
* development if null or empty, use default value.
*
* @return JMX debugging port
*/
@Override
public int getDebugClientPort() {
return StringUtils.parseInt(projectPrivateProperties.get(DEBUG_CLIENT_PORT_KEY), CLIENT_APP_DEFAULT_DEBUG_PORT);
}
/**
* Sets JMX debugging port for Platypus Client on local computer on
* development.
*
* @param aValue JMX debugging port
*/
@Override
public void setDebugClientPort(int aValue) {
int oldValue = getDebugClientPort();
projectPrivateProperties.setProperty(DEBUG_CLIENT_PORT_KEY, String.valueOf(aValue));
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(DEBUG_CLIENT_PORT_KEY, oldValue, aValue);
}
/**
* Gets JMX debugging port for Platypus Application Server on local computer
* on development if null or empty, use default value.
*
* @return JMX debugging port
*/
@Override
public int getDebugServerPort() {
return StringUtils.parseInt(projectPrivateProperties.get(DEBUG_SERVER_PORT_KEY), SERVER_APP_DEFAULT_DEBUG_PORT);
}
/**
* Sets JMX debugging port for Platypus Application Server on local computer
* on development.
*
* @param aValue JMX debugging port
*/
@Override
public void setDebugServerPort(int aValue) {
int oldValue = getDebugServerPort();
projectPrivateProperties.setProperty(DEBUG_SERVER_PORT_KEY, String.valueOf(aValue));
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(DEBUG_SERVER_PORT_KEY, oldValue, aValue);
}
/**
* Gets J2EE server instance ID.
*
* @return J2EE server ID
*/
@Override
public String getJ2eeServerId() {
return projectPrivateProperties.get(J2EE_SERVER_ID_KEY);
}
/**
* Sets J2EE server instance ID.
*
* @param aValue J2EE server ID
*/
@Override
public void setJ2eeServerId(String aValue) {
String oldValue = getJ2eeServerId();
if (aValue != null) {
projectPrivateProperties.setProperty(J2EE_SERVER_ID_KEY, aValue);
} else {
projectPrivateProperties.remove(J2EE_SERVER_ID_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(J2EE_SERVER_ID_KEY, oldValue, aValue);
}
/**
* Gets application's context name.
*
* @return The name of the context string
*/
@Override
public String getServerContext() {
return projectProperties.get(SERVER_CONTEXT_KEY);
}
/**
* Sets application's context name.
*
* @param aValue The name of the context string
*/
@Override
public void setServerContext(String aValue) {
String oldValue = getServerContext();
if (aValue != null) {
projectProperties.setProperty(SERVER_CONTEXT_KEY, aValue);
} else {
projectProperties.remove(SERVER_CONTEXT_KEY);
}
projectPropertiesIsDirty = true;
changeSupport.firePropertyChange(SERVER_CONTEXT_KEY, oldValue, aValue);
}
/**
* Checks if security realm to be configured on J2EE server startup.
*
* @return true to enable configure security realm
*/
@Override
public boolean isSecurityRealmEnabled() {
return Boolean.valueOf(projectPrivateProperties.get(ENABLE_SECURITY_REALM_KEY));
}
/**
* Sets if security realm to be configured on J2EE server startup.
*
* @param aValue true to enable configure security realm
*/
@Override
public void setSecurityRealmEnabled(boolean aValue) {
boolean oldValue = isSecurityRealmEnabled();
projectPrivateProperties.setProperty(ENABLE_SECURITY_REALM_KEY, String.valueOf(aValue));
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(ENABLE_SECURITY_REALM_KEY, oldValue, aValue);
}
/**
* Gets client type to be run.
*
* @return ClientType instance
*/
@Override
public ClientType getRunClientType() {
ClientType val = ClientType.getById(projectPrivateProperties.get(CLIENT_TYPE_KEY));
return val != null ? val : ClientType.PLATYPUS_CLIENT;
}
/**
* Sets client type to be run.
*
* @param aValue ClientType instance
*/
@Override
public void setRunClientType(ClientType aValue) {
ClientType oldValue = getRunClientType();
if (aValue != null) {
projectPrivateProperties.setProperty(CLIENT_TYPE_KEY, aValue.getId());
} else {
projectPrivateProperties.remove(CLIENT_TYPE_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(CLIENT_TYPE_KEY, aValue, oldValue);
}
/**
* Gets application server type to be run.
*
* @return AppServerType instance
*/
@Override
public AppServerType getRunAppServerType() {
AppServerType val = AppServerType.getById(projectPrivateProperties.get(SERVER_TYPE_KEY));
return val != null ? val : AppServerType.NONE;
}
/**
* Sets application server type to be run.
*
* @param aValue AppServerType instance
*/
@Override
public void setRunAppServerType(AppServerType aValue) {
AppServerType oldValue = getRunAppServerType();
if (aValue != null) {
projectPrivateProperties.setProperty(SERVER_TYPE_KEY, aValue.getId());
} else {
projectPrivateProperties.remove(SERVER_TYPE_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(SERVER_TYPE_KEY, aValue, oldValue);
}
@Override
public void save() throws Exception {
if (projectPropertiesIsDirty) {
try (OutputStream os = getProjectSettingsFileObject().getOutputStream()) {
projectProperties.store(os);
}
projectPropertiesIsDirty = false;
}
if (projectPrivatePropertiesIsDirty) {
try (OutputStream os = getProjectPrivateSettingsFileObject().getOutputStream()) {
projectPrivateProperties.store(os);
}
projectPrivatePropertiesIsDirty = false;
}
}
@Override
public PropertyChangeSupport getChangeSupport() {
return changeSupport;
}
protected final FileObject getProjectSettingsFileObject() {
FileObject fo = projectDir.getFileObject(PROJECT_SETTINGS_FILE);
if (fo == null) {
try {
fo = projectDir.createData(PROJECT_SETTINGS_FILE);
} catch (IOException ex) {
ErrorManager.getDefault().notify(ex);
}
}
return fo;
}
protected final FileObject getProjectPrivateSettingsFileObject() {
FileObject fo = projectDir.getFileObject(PROJECT_PRIVATE_SETTINGS_FILE);
if (fo == null) {
try {
fo = projectDir.createData(PROJECT_PRIVATE_SETTINGS_FILE);
} catch (IOException ex) {
ErrorManager.getDefault().notify(ex);
}
}
return fo;
}
/**
* Gets the log level for Platypus Client.
*
* @return Log level value
*/
@Override
public Level getClientLogLevel() {
String logLevel = projectPrivateProperties.get(CLIENT_LOG_LEVEL);
if (logLevel == null || logLevel.isEmpty()) {
return DEFAULT_LOG_LEVEL;
}
try {
return Level.parse(logLevel);
} catch (IllegalArgumentException ex) {
return DEFAULT_LOG_LEVEL;
}
}
/**
* Sets a log level for Platypus Client.
*
* @param aValue Log level value
*/
@Override
public void setClientLogLevel(Level aValue) {
Level oldValue = getClientLogLevel();
if (aValue != null) {
projectPrivateProperties.setProperty(CLIENT_LOG_LEVEL, aValue.getName());
} else {
projectPrivateProperties.remove(CLIENT_LOG_LEVEL);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(CLIENT_LOG_LEVEL, aValue, oldValue);
}
/**
* Gets the log level for Platypus Server.
*
* @return Log level value
*/
@Override
public Level getServerLogLevel() {
String logLevel = projectPrivateProperties.get(SERVER_LOG_LEVEL);
if (logLevel == null || logLevel.isEmpty()) {
return DEFAULT_LOG_LEVEL;
}
try {
return Level.parse(logLevel);
} catch (IllegalArgumentException ex) {
return DEFAULT_LOG_LEVEL;
}
}
/**
* Sets a log level for Platypus Server.
*
* @param aValue Log level value
*/
@Override
public void setServerLogLevel(Level aValue) {
Level oldValue = getServerLogLevel();
if (aValue != null) {
projectPrivateProperties.setProperty(SERVER_LOG_LEVEL, aValue.getName());
} else {
projectPrivateProperties.remove(SERVER_LOG_LEVEL);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(SERVER_LOG_LEVEL, aValue, oldValue);
}
}
| |
package gov.nih.nci.evs.api.service;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import gov.nih.nci.evs.api.model.AssociationEntry;
import gov.nih.nci.evs.api.model.Concept;
import gov.nih.nci.evs.api.model.ConceptMinimal;
import gov.nih.nci.evs.api.model.IncludeParam;
import gov.nih.nci.evs.api.model.Terminology;
import gov.nih.nci.evs.api.model.TerminologyMetadata;
import gov.nih.nci.evs.api.properties.StardogProperties;
import gov.nih.nci.evs.api.support.es.ElasticLoadConfig;
import gov.nih.nci.evs.api.support.es.ElasticObject;
import gov.nih.nci.evs.api.util.HierarchyUtils;
import gov.nih.nci.evs.api.util.MainTypeHierarchy;
import gov.nih.nci.evs.api.util.TerminologyUtils;
/**
* The implementation for {@link ElasticLoadService}.
*
* @author Arun
*/
@Service
public class StardogElasticLoadServiceImpl extends BaseLoaderService {
/** the logger *. */
private static final Logger logger = LoggerFactory.getLogger(StardogElasticLoadServiceImpl.class);
/** the concepts download location *. */
@Value("${nci.evs.bulkload.conceptsDir}")
private String CONCEPTS_OUT_DIR;
/** the lock file name *. */
@Value("${nci.evs.bulkload.lockFile}")
private String LOCK_FILE;
/** download batch size *. */
@Value("${nci.evs.bulkload.downloadBatchSize}")
private int DOWNLOAD_BATCH_SIZE;
/** index batch size *. */
@Value("${nci.evs.bulkload.indexBatchSize}")
private int INDEX_BATCH_SIZE;
/** the environment *. */
@Autowired
Environment env;
/** The Elasticsearch operations service instance *. */
@Autowired
ElasticOperationsService operationsService;
/** The sparql query manager service. */
@Autowired
private SparqlQueryManagerService sparqlQueryManagerService;
/** The stardog properties. */
@Autowired
StardogProperties stardogProperties;
/** The main type hierarchy. */
@Autowired
MainTypeHierarchy mainTypeHierarchy;
/** the sparql query service impl */
@Autowired
private SparqlQueryManagerServiceImpl sparqlQueryManagerServiceImpl;
/* see superclass */
@Override
public int loadConcepts(ElasticLoadConfig config, Terminology terminology,
HierarchyUtils hierarchy) throws IOException {
logger.debug("ElasticLoadServiceImpl::load() - index = {}, type = {}",
terminology.getIndexName(), ElasticOperationsService.CONCEPT_TYPE);
boolean result =
operationsService.createIndex(terminology.getIndexName(), config.isForceDeleteIndex());
if (result) {
operationsService.getElasticsearchOperations().putMapping(terminology.getIndexName(),
ElasticOperationsService.CONCEPT_TYPE, Concept.class);
}
logger.info("Getting all concepts");
List<Concept> allConcepts = sparqlQueryManagerService.getAllConcepts(terminology);
try {
// download concepts and upload to es in real time
logger.info("Loading in real time");
loadConceptsRealTime(allConcepts, terminology, hierarchy);
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new IOException(e);
}
return allConcepts.size();
}
/**
* load concepts directly from stardog in batches.
*
* @param allConcepts all concepts to load
* @param terminology the terminology
* @param hierarchy the hierarchy
* @throws Exception the exception
*/
private void loadConceptsRealTime(List<Concept> allConcepts, Terminology terminology,
HierarchyUtils hierarchy) throws Exception {
logger.info(" download batch size = " + DOWNLOAD_BATCH_SIZE);
logger.info(" index batch size = " + INDEX_BATCH_SIZE);
// Check assumptions
if (DOWNLOAD_BATCH_SIZE < INDEX_BATCH_SIZE) {
throw new Exception("The download batch size must not be less than the index batch size");
}
if (CollectionUtils.isEmpty(allConcepts)) {
logger.warn("Unable to load. No concepts found!");
return;
}
logger.info(" Initialize main type hierarchy");
mainTypeHierarchy.initialize(terminology);
logger.info(" Total concepts to load: {}", allConcepts.size());
Double total = (double) allConcepts.size();
int start = 0;
int end = DOWNLOAD_BATCH_SIZE;
Double taskSize = Math.ceil(total / INDEX_BATCH_SIZE);
CountDownLatch latch = new CountDownLatch(taskSize.intValue());
ExecutorService executor = Executors.newFixedThreadPool(10);
try {
while (start < total) {
if (total - start <= DOWNLOAD_BATCH_SIZE)
end = total.intValue();
logger.info(" Processing {} to {}", start + 1, end);
logger.info(" start reading {} to {}", start + 1, end);
List<Concept> concepts = sparqlQueryManagerService
.getConcepts(allConcepts.subList(start, end), terminology, hierarchy);
logger.info(" finish reading {} to {}", start + 1, end);
logger.info(" start computing extensions {} to {}", start + 1, end);
concepts.stream()
// .peek(c -> logger.info(" concept = " + c.getCode() + " " +
// c.getName()))
.peek(c -> c.setExtensions(mainTypeHierarchy.getExtensions(c)))
// .peek(c -> logger.info(" extensions = " + c.getExtensions()))
.count();
logger.info(" finish computing extensions {} to {}", start + 1, end);
int indexStart = 0;
int indexEnd = INDEX_BATCH_SIZE;
Double indexTotal = (double) concepts.size();
final List<Future<Void>> futures = new ArrayList<>();
while (indexStart < indexTotal) {
if (indexTotal - indexStart <= INDEX_BATCH_SIZE)
indexEnd = indexTotal.intValue();
futures.add(executor.submit(
new ConceptLoadTask(concepts.subList(indexStart, indexEnd), start + indexStart,
start + indexEnd, terminology.getIndexName(), latch, taskSize.intValue())));
indexStart = indexEnd;
indexEnd = indexEnd + INDEX_BATCH_SIZE;
}
// Look for exceptions
for (final Future<Void> future : futures) {
// This throws an exception if the callable had an issue
future.get();
}
start = end;
end = end + DOWNLOAD_BATCH_SIZE;
}
latch.await();
logger.info(" shutdown");
executor.shutdown();
logger.info(" await termination");
executor.awaitTermination(30, TimeUnit.SECONDS);
} catch (Exception e) {
logger.info(" shutdown now");
executor.shutdownNow();
logger.info(" await termination");
executor.awaitTermination(30, TimeUnit.SECONDS);
throw e;
}
logger.info("Done loading concepts!");
}
/**
* add subset links to subset hierarchy
*/
private void addSubsetLinks(Concept subset, Map<String, String> subsetLinks, String subsetPrefix) {
if (subsetLinks.containsKey(subset.getCode())) {
subset.setSubsetLink(subsetPrefix + subsetLinks.get(subset.getCode()));
}
for (Concept child : subset.getChildren()) {
addSubsetLinks(child, subsetLinks, subsetPrefix);
}
}
/* see superclass */
@Override
public void loadObjects(ElasticLoadConfig config, Terminology terminology,
HierarchyUtils hierarchy) throws IOException {
String indexName = terminology.getObjectIndexName();
logger.info("Loading Elastic Objects");
logger.debug("object index name: {}", indexName);
boolean result = operationsService.createIndex(indexName, config.isForceDeleteIndex());
logger.debug("index result: {}", result);
ElasticObject hierarchyObject = new ElasticObject("hierarchy");
hierarchyObject.setHierarchy(hierarchy);
operationsService.index(hierarchyObject, indexName, ElasticOperationsService.OBJECT_TYPE,
ElasticObject.class);
logger.info(" Hierarchy loaded");
List<ConceptMinimal> synonymSources = sparqlQueryManagerService.getSynonymSources(terminology);
ElasticObject ssObject = new ElasticObject("synonym_sources");
ssObject.setConceptMinimals(synonymSources);
operationsService.index(ssObject, indexName, ElasticOperationsService.OBJECT_TYPE,
ElasticObject.class);
logger.info(" Synonym Sources loaded");
List<Concept> qualifiers =
sparqlQueryManagerService.getAllQualifiers(terminology, new IncludeParam("full"));
ElasticObject conceptsObject = new ElasticObject("qualifiers");
conceptsObject.setConcepts(qualifiers);
operationsService.index(conceptsObject, indexName, ElasticOperationsService.OBJECT_TYPE,
ElasticObject.class);
logger.info(" Qualifiers loaded");
List<Concept> properties =
sparqlQueryManagerService.getAllProperties(terminology, new IncludeParam("full"));
ElasticObject propertiesObject = new ElasticObject("properties");
propertiesObject.setConcepts(properties);
operationsService.index(propertiesObject, indexName, ElasticOperationsService.OBJECT_TYPE,
ElasticObject.class);
logger.info(" Properties loaded");
List<Concept> associations =
sparqlQueryManagerService.getAllAssociations(terminology, new IncludeParam("full"));
ElasticObject associationsObject = new ElasticObject("associations");
associationsObject.setConcepts(associations);
operationsService.index(associationsObject, indexName, ElasticOperationsService.OBJECT_TYPE,
ElasticObject.class);
logger.info(" Associations loaded");
List<Concept> roles =
sparqlQueryManagerService.getAllRoles(terminology, new IncludeParam("full"));
ElasticObject rolesObject = new ElasticObject("roles");
rolesObject.setConcepts(roles);
operationsService.index(rolesObject, indexName, ElasticOperationsService.OBJECT_TYPE,
ElasticObject.class);
logger.info(" Roles loaded");
// synonymTypes
List<Concept> synonymTypes =
sparqlQueryManagerService.getAllSynonymTypes(terminology, new IncludeParam("full"));
ElasticObject synonymTypesObject = new ElasticObject("synonymTypes");
synonymTypesObject.setConcepts(synonymTypes);
operationsService.index(synonymTypesObject, indexName, ElasticOperationsService.OBJECT_TYPE,
ElasticObject.class);
logger.info(" Synonym Types loaded");
// definitionTypes
List<Concept> definitionTypes =
sparqlQueryManagerService.getAllDefinitionTypes(terminology, new IncludeParam("full"));
ElasticObject definitionTypesObject = new ElasticObject("definitionTypes");
definitionTypesObject.setConcepts(definitionTypes);
operationsService.index(definitionTypesObject, indexName, ElasticOperationsService.OBJECT_TYPE,
ElasticObject.class);
logger.info(" Definition Types loaded");
// subsets
List<Concept> subsets = sparqlQueryManagerServiceImpl.getAllSubsets(terminology);
ElasticObject subsetsObject = new ElasticObject("subsets");
for (Concept subset : subsets)
addSubsetLinks(subset, terminology.getMetadata().getSubsetLinks(), terminology.getMetadata().getSubsetPrefix());
subsetsObject.setConcepts(subsets);
operationsService.index(subsetsObject, indexName, ElasticOperationsService.OBJECT_TYPE,
ElasticObject.class);
logger.info(" Subsets loaded");
// associationEntries
for (Concept association : associations) {
logger.info(association.getName());
if (association.getName().equals("Concept_In_Subset"))
continue;
List<AssociationEntry> entries =
sparqlQueryManagerService.getAssociationEntries(terminology, association);
ElasticObject associationEntriesObject =
new ElasticObject("associationEntries_" + association.getName());
logger.info(" add associationEntries_" + association.getName() + " = " + entries.size());
associationEntriesObject.setAssociationEntries(entries);
operationsService.index(associationEntriesObject, indexName,
ElasticOperationsService.OBJECT_TYPE, ElasticObject.class);
}
logger.info(" Association Entries loaded");
logger.info("Done loading Elastic Objects!");
}
/**
* Task to load a batch of concepts to elasticsearch.
*
* @author Arun
*/
private class ConceptLoadTask implements Callable<Void> {
/** the logger *. */
private final Logger taskLogger = LoggerFactory.getLogger(ConceptLoadTask.class);
/** the concepts *. */
@SuppressWarnings("rawtypes")
private List concepts;
/** start index for the task *. */
private int startIndex;
/** end index for the task *. */
private int endIndex;
/** the index name *. */
private String indexName;
/** the count down latch *. */
private CountDownLatch latch;
/** the task size *. */
private int taskSize;
/**
* Instantiates a {@link ConceptLoadTask} from the specified parameters.
*
* @param concepts the concepts
* @param start the start
* @param end the end
* @param indexName the index name
* @param latch the latch
* @param taskSize the task size
* @throws Exception the exception
*/
@SuppressWarnings("rawtypes")
public ConceptLoadTask(List concepts, int start, int end, String indexName,
CountDownLatch latch, int taskSize) throws Exception {
this.concepts = concepts;
this.startIndex = start;
this.endIndex = end;
this.indexName = indexName;
this.latch = latch;
this.taskSize = taskSize;
}
/* see superclass */
@Override
public Void call() throws Exception {
try {
taskLogger.info(" start loading concepts: {} to {}", startIndex + 1, endIndex);
operationsService.bulkIndex(concepts, indexName, ElasticOperationsService.CONCEPT_TYPE,
Concept.class);
int progress = (int) Math.floor((1.0 - 1.0 * latch.getCount() / taskSize) * 100);
taskLogger.info(" finish loading concepts: {} to {} ({}% complete)", startIndex + 1,
endIndex, progress);
} catch (Throwable e) {
throw new Exception(e);
} finally {
concepts = null;
latch.countDown();
}
return null;
}
}
/* see superclass */
@Override
public Terminology getTerminology(ApplicationContext app, ElasticLoadConfig config,
String filepath, String terminology, boolean forceDelete) throws Exception {
TerminologyUtils termUtils = app.getBean(TerminologyUtils.class);
final Terminology term = termUtils.getTerminology(config.getTerminology(), false);
// Attempt to read the config, if anything goes wrong
// the config file is probably not there
final String resource = "metadata/" + term.getTerminology() + ".json";
try {
TerminologyMetadata metadata = new ObjectMapper().readValue(
IOUtils.toString(term.getClass().getClassLoader().getResourceAsStream(resource), "UTF-8"),
TerminologyMetadata.class);
term.setMetadata(metadata);
} catch (Exception e) {
throw new Exception("Unexpected error trying to load = " + resource, e);
}
// Compute tags because this is the new terminology
// Do this AFTER setting terminology metadata, which is needed
termUtils.setTags(term, stardogProperties.getDb());
return term;
}
/* see superclass */
@Override
public HierarchyUtils getHierarchyUtils(Terminology term)
throws JsonParseException, JsonMappingException, IOException {
return sparqlQueryManagerService.getHierarchyUtils(term);
}
}
| |
/*
* Copyright (C) 2012 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package retrofit2;
import static java.util.Collections.unmodifiableList;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.net.URL;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Deque;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executor;
import javax.annotation.Nullable;
import okhttp3.HttpUrl;
import okhttp3.OkHttpClient;
import okhttp3.RequestBody;
import okhttp3.ResponseBody;
import retrofit2.http.GET;
import retrofit2.http.HTTP;
import retrofit2.http.Header;
import retrofit2.http.Url;
/**
* Retrofit adapts a Java interface to HTTP calls by using annotations on the declared methods to
* define how requests are made. Create instances using {@linkplain Builder the builder} and pass
* your interface to {@link #create} to generate an implementation.
*
* <p>For example,
*
* <pre><code>
* Retrofit retrofit = new Retrofit.Builder()
* .baseUrl("https://api.example.com/")
* .addConverterFactory(GsonConverterFactory.create())
* .build();
*
* MyApi api = retrofit.create(MyApi.class);
* Response<User> user = api.getUser().execute();
* </code></pre>
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public final class Retrofit {
private final Map<Method, ServiceMethod<?>> serviceMethodCache = new ConcurrentHashMap<>();
final okhttp3.Call.Factory callFactory;
final HttpUrl baseUrl;
final List<Converter.Factory> converterFactories;
final int defaultConverterFactoriesSize;
final List<CallAdapter.Factory> callAdapterFactories;
final int defaultCallAdapterFactoriesSize;
final @Nullable Executor callbackExecutor;
final boolean validateEagerly;
Retrofit(
okhttp3.Call.Factory callFactory,
HttpUrl baseUrl,
List<Converter.Factory> converterFactories,
int defaultConverterFactoriesSize,
List<CallAdapter.Factory> callAdapterFactories,
int defaultCallAdapterFactoriesSize,
@Nullable Executor callbackExecutor,
boolean validateEagerly) {
this.callFactory = callFactory;
this.baseUrl = baseUrl;
this.converterFactories = converterFactories; // Copy+unmodifiable at call site.
this.defaultConverterFactoriesSize = defaultConverterFactoriesSize;
this.callAdapterFactories = callAdapterFactories; // Copy+unmodifiable at call site.
this.defaultCallAdapterFactoriesSize = defaultCallAdapterFactoriesSize;
this.callbackExecutor = callbackExecutor;
this.validateEagerly = validateEagerly;
}
/**
* Create an implementation of the API endpoints defined by the {@code service} interface.
*
* <p>The relative path for a given method is obtained from an annotation on the method describing
* the request type. The built-in methods are {@link retrofit2.http.GET GET}, {@link
* retrofit2.http.PUT PUT}, {@link retrofit2.http.POST POST}, {@link retrofit2.http.PATCH PATCH},
* {@link retrofit2.http.HEAD HEAD}, {@link retrofit2.http.DELETE DELETE} and {@link
* retrofit2.http.OPTIONS OPTIONS}. You can use a custom HTTP method with {@link HTTP @HTTP}. For
* a dynamic URL, omit the path on the annotation and annotate the first parameter with {@link
* Url @Url}.
*
* <p>Method parameters can be used to replace parts of the URL by annotating them with {@link
* retrofit2.http.Path @Path}. Replacement sections are denoted by an identifier surrounded by
* curly braces (e.g., "{foo}"). To add items to the query string of a URL use {@link
* retrofit2.http.Query @Query}.
*
* <p>The body of a request is denoted by the {@link retrofit2.http.Body @Body} annotation. The
* object will be converted to request representation by one of the {@link Converter.Factory}
* instances. A {@link RequestBody} can also be used for a raw representation.
*
* <p>Alternative request body formats are supported by method annotations and corresponding
* parameter annotations:
*
* <ul>
* <li>{@link retrofit2.http.FormUrlEncoded @FormUrlEncoded} - Form-encoded data with key-value
* pairs specified by the {@link retrofit2.http.Field @Field} parameter annotation.
* <li>{@link retrofit2.http.Multipart @Multipart} - RFC 2388-compliant multipart data with
* parts specified by the {@link retrofit2.http.Part @Part} parameter annotation.
* </ul>
*
* <p>Additional static headers can be added for an endpoint using the {@link
* retrofit2.http.Headers @Headers} method annotation. For per-request control over a header
* annotate a parameter with {@link Header @Header}.
*
* <p>By default, methods return a {@link Call} which represents the HTTP request. The generic
* parameter of the call is the response body type and will be converted by one of the {@link
* Converter.Factory} instances. {@link ResponseBody} can also be used for a raw representation.
* {@link Void} can be used if you do not care about the body contents.
*
* <p>For example:
*
* <pre>
* public interface CategoryService {
* @POST("category/{cat}/")
* Call<List<Item>> categoryList(@Path("cat") String a, @Query("page") int b);
* }
* </pre>
*/
@SuppressWarnings("unchecked") // Single-interface proxy creation guarded by parameter safety.
public <T> T create(final Class<T> service) {
validateServiceInterface(service);
return (T)
Proxy.newProxyInstance(
service.getClassLoader(),
new Class<?>[] {service},
new InvocationHandler() {
private final Object[] emptyArgs = new Object[0];
@Override
public @Nullable Object invoke(Object proxy, Method method, @Nullable Object[] args)
throws Throwable {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
args = args != null ? args : emptyArgs;
Platform platform = Platform.get();
return platform.isDefaultMethod(method)
? platform.invokeDefaultMethod(method, service, proxy, args)
: loadServiceMethod(method).invoke(args);
}
});
}
private void validateServiceInterface(Class<?> service) {
if (!service.isInterface()) {
throw new IllegalArgumentException("API declarations must be interfaces.");
}
Deque<Class<?>> check = new ArrayDeque<>(1);
check.add(service);
while (!check.isEmpty()) {
Class<?> candidate = check.removeFirst();
if (candidate.getTypeParameters().length != 0) {
StringBuilder message =
new StringBuilder("Type parameters are unsupported on ").append(candidate.getName());
if (candidate != service) {
message.append(" which is an interface of ").append(service.getName());
}
throw new IllegalArgumentException(message.toString());
}
Collections.addAll(check, candidate.getInterfaces());
}
if (validateEagerly) {
Platform platform = Platform.get();
for (Method method : service.getDeclaredMethods()) {
if (!platform.isDefaultMethod(method) && !Modifier.isStatic(method.getModifiers())) {
loadServiceMethod(method);
}
}
}
}
ServiceMethod<?> loadServiceMethod(Method method) {
ServiceMethod<?> result = serviceMethodCache.get(method);
if (result != null) return result;
synchronized (serviceMethodCache) {
result = serviceMethodCache.get(method);
if (result == null) {
result = ServiceMethod.parseAnnotations(this, method);
serviceMethodCache.put(method, result);
}
}
return result;
}
/**
* The factory used to create {@linkplain okhttp3.Call OkHttp calls} for sending a HTTP requests.
* Typically an instance of {@link OkHttpClient}.
*/
public okhttp3.Call.Factory callFactory() {
return callFactory;
}
/** The API base URL. */
public HttpUrl baseUrl() {
return baseUrl;
}
/**
* Returns a list of the factories tried when creating a {@linkplain #callAdapter(Type,
* Annotation[])} call adapter}.
*/
public List<CallAdapter.Factory> callAdapterFactories() {
return callAdapterFactories;
}
/**
* Returns the {@link CallAdapter} for {@code returnType} from the available {@linkplain
* #callAdapterFactories() factories}.
*
* @throws IllegalArgumentException if no call adapter available for {@code type}.
*/
public CallAdapter<?, ?> callAdapter(Type returnType, Annotation[] annotations) {
return nextCallAdapter(null, returnType, annotations);
}
/**
* Returns the {@link CallAdapter} for {@code returnType} from the available {@linkplain
* #callAdapterFactories() factories} except {@code skipPast}.
*
* @throws IllegalArgumentException if no call adapter available for {@code type}.
*/
public CallAdapter<?, ?> nextCallAdapter(
@Nullable CallAdapter.Factory skipPast, Type returnType, Annotation[] annotations) {
Objects.requireNonNull(returnType, "returnType == null");
Objects.requireNonNull(annotations, "annotations == null");
int start = callAdapterFactories.indexOf(skipPast) + 1;
for (int i = start, count = callAdapterFactories.size(); i < count; i++) {
CallAdapter<?, ?> adapter = callAdapterFactories.get(i).get(returnType, annotations, this);
if (adapter != null) {
return adapter;
}
}
StringBuilder builder =
new StringBuilder("Could not locate call adapter for ").append(returnType).append(".\n");
if (skipPast != null) {
builder.append(" Skipped:");
for (int i = 0; i < start; i++) {
builder.append("\n * ").append(callAdapterFactories.get(i).getClass().getName());
}
builder.append('\n');
}
builder.append(" Tried:");
for (int i = start, count = callAdapterFactories.size(); i < count; i++) {
builder.append("\n * ").append(callAdapterFactories.get(i).getClass().getName());
}
throw new IllegalArgumentException(builder.toString());
}
/**
* Returns an unmodifiable list of the factories tried when creating a {@linkplain
* #requestBodyConverter(Type, Annotation[], Annotation[]) request body converter}, a {@linkplain
* #responseBodyConverter(Type, Annotation[]) response body converter}, or a {@linkplain
* #stringConverter(Type, Annotation[]) string converter}.
*/
public List<Converter.Factory> converterFactories() {
return converterFactories;
}
/**
* Returns a {@link Converter} for {@code type} to {@link RequestBody} from the available
* {@linkplain #converterFactories() factories}.
*
* @throws IllegalArgumentException if no converter available for {@code type}.
*/
public <T> Converter<T, RequestBody> requestBodyConverter(
Type type, Annotation[] parameterAnnotations, Annotation[] methodAnnotations) {
return nextRequestBodyConverter(null, type, parameterAnnotations, methodAnnotations);
}
/**
* Returns a {@link Converter} for {@code type} to {@link RequestBody} from the available
* {@linkplain #converterFactories() factories} except {@code skipPast}.
*
* @throws IllegalArgumentException if no converter available for {@code type}.
*/
public <T> Converter<T, RequestBody> nextRequestBodyConverter(
@Nullable Converter.Factory skipPast,
Type type,
Annotation[] parameterAnnotations,
Annotation[] methodAnnotations) {
Objects.requireNonNull(type, "type == null");
Objects.requireNonNull(parameterAnnotations, "parameterAnnotations == null");
Objects.requireNonNull(methodAnnotations, "methodAnnotations == null");
int start = converterFactories.indexOf(skipPast) + 1;
for (int i = start, count = converterFactories.size(); i < count; i++) {
Converter.Factory factory = converterFactories.get(i);
Converter<?, RequestBody> converter =
factory.requestBodyConverter(type, parameterAnnotations, methodAnnotations, this);
if (converter != null) {
//noinspection unchecked
return (Converter<T, RequestBody>) converter;
}
}
StringBuilder builder =
new StringBuilder("Could not locate RequestBody converter for ").append(type).append(".\n");
if (skipPast != null) {
builder.append(" Skipped:");
for (int i = 0; i < start; i++) {
builder.append("\n * ").append(converterFactories.get(i).getClass().getName());
}
builder.append('\n');
}
builder.append(" Tried:");
for (int i = start, count = converterFactories.size(); i < count; i++) {
builder.append("\n * ").append(converterFactories.get(i).getClass().getName());
}
throw new IllegalArgumentException(builder.toString());
}
/**
* Returns a {@link Converter} for {@link ResponseBody} to {@code type} from the available
* {@linkplain #converterFactories() factories}.
*
* @throws IllegalArgumentException if no converter available for {@code type}.
*/
public <T> Converter<ResponseBody, T> responseBodyConverter(Type type, Annotation[] annotations) {
return nextResponseBodyConverter(null, type, annotations);
}
/**
* Returns a {@link Converter} for {@link ResponseBody} to {@code type} from the available
* {@linkplain #converterFactories() factories} except {@code skipPast}.
*
* @throws IllegalArgumentException if no converter available for {@code type}.
*/
public <T> Converter<ResponseBody, T> nextResponseBodyConverter(
@Nullable Converter.Factory skipPast, Type type, Annotation[] annotations) {
Objects.requireNonNull(type, "type == null");
Objects.requireNonNull(annotations, "annotations == null");
int start = converterFactories.indexOf(skipPast) + 1;
for (int i = start, count = converterFactories.size(); i < count; i++) {
Converter<ResponseBody, ?> converter =
converterFactories.get(i).responseBodyConverter(type, annotations, this);
if (converter != null) {
//noinspection unchecked
return (Converter<ResponseBody, T>) converter;
}
}
StringBuilder builder =
new StringBuilder("Could not locate ResponseBody converter for ")
.append(type)
.append(".\n");
if (skipPast != null) {
builder.append(" Skipped:");
for (int i = 0; i < start; i++) {
builder.append("\n * ").append(converterFactories.get(i).getClass().getName());
}
builder.append('\n');
}
builder.append(" Tried:");
for (int i = start, count = converterFactories.size(); i < count; i++) {
builder.append("\n * ").append(converterFactories.get(i).getClass().getName());
}
throw new IllegalArgumentException(builder.toString());
}
/**
* Returns a {@link Converter} for {@code type} to {@link String} from the available {@linkplain
* #converterFactories() factories}.
*/
public <T> Converter<T, String> stringConverter(Type type, Annotation[] annotations) {
Objects.requireNonNull(type, "type == null");
Objects.requireNonNull(annotations, "annotations == null");
for (int i = 0, count = converterFactories.size(); i < count; i++) {
Converter<?, String> converter =
converterFactories.get(i).stringConverter(type, annotations, this);
if (converter != null) {
//noinspection unchecked
return (Converter<T, String>) converter;
}
}
// Nothing matched. Resort to default converter which just calls toString().
//noinspection unchecked
return (Converter<T, String>) BuiltInConverters.ToStringConverter.INSTANCE;
}
/**
* The executor used for {@link Callback} methods on a {@link Call}. This may be {@code null}, in
* which case callbacks should be made synchronously on the background thread.
*/
public @Nullable Executor callbackExecutor() {
return callbackExecutor;
}
public Builder newBuilder() {
return new Builder(this);
}
/**
* Build a new {@link Retrofit}.
*
* <p>Calling {@link #baseUrl} is required before calling {@link #build()}. All other methods are
* optional.
*/
public static final class Builder {
private @Nullable okhttp3.Call.Factory callFactory;
private @Nullable HttpUrl baseUrl;
private final List<Converter.Factory> converterFactories = new ArrayList<>();
private final List<CallAdapter.Factory> callAdapterFactories = new ArrayList<>();
private @Nullable Executor callbackExecutor;
private boolean validateEagerly;
public Builder() {}
Builder(Retrofit retrofit) {
callFactory = retrofit.callFactory;
baseUrl = retrofit.baseUrl;
// Do not add the default BuiltIntConverters and platform-aware converters added by build().
for (int i = 1,
size = retrofit.converterFactories.size() - retrofit.defaultConverterFactoriesSize;
i < size;
i++) {
converterFactories.add(retrofit.converterFactories.get(i));
}
// Do not add the default, platform-aware call adapters added by build().
for (int i = 0,
size =
retrofit.callAdapterFactories.size() - retrofit.defaultCallAdapterFactoriesSize;
i < size;
i++) {
callAdapterFactories.add(retrofit.callAdapterFactories.get(i));
}
callbackExecutor = retrofit.callbackExecutor;
validateEagerly = retrofit.validateEagerly;
}
/**
* The HTTP client used for requests.
*
* <p>This is a convenience method for calling {@link #callFactory}.
*/
public Builder client(OkHttpClient client) {
return callFactory(Objects.requireNonNull(client, "client == null"));
}
/**
* Specify a custom call factory for creating {@link Call} instances.
*
* <p>Note: Calling {@link #client} automatically sets this value.
*/
public Builder callFactory(okhttp3.Call.Factory factory) {
this.callFactory = Objects.requireNonNull(factory, "factory == null");
return this;
}
/**
* Set the API base URL.
*
* @see #baseUrl(HttpUrl)
*/
public Builder baseUrl(URL baseUrl) {
Objects.requireNonNull(baseUrl, "baseUrl == null");
return baseUrl(HttpUrl.get(baseUrl.toString()));
}
/**
* Set the API base URL.
*
* @see #baseUrl(HttpUrl)
*/
public Builder baseUrl(String baseUrl) {
Objects.requireNonNull(baseUrl, "baseUrl == null");
return baseUrl(HttpUrl.get(baseUrl));
}
/**
* Set the API base URL.
*
* <p>The specified endpoint values (such as with {@link GET @GET}) are resolved against this
* value using {@link HttpUrl#resolve(String)}. The behavior of this matches that of an {@code
* <a href="">} link on a website resolving on the current URL.
*
* <p><b>Base URLs should always end in {@code /}.</b>
*
* <p>A trailing {@code /} ensures that endpoints values which are relative paths will correctly
* append themselves to a base which has path components.
*
* <p><b>Correct:</b><br>
* Base URL: http://example.com/api/<br>
* Endpoint: foo/bar/<br>
* Result: http://example.com/api/foo/bar/
*
* <p><b>Incorrect:</b><br>
* Base URL: http://example.com/api<br>
* Endpoint: foo/bar/<br>
* Result: http://example.com/foo/bar/
*
* <p>This method enforces that {@code baseUrl} has a trailing {@code /}.
*
* <p><b>Endpoint values which contain a leading {@code /} are absolute.</b>
*
* <p>Absolute values retain only the host from {@code baseUrl} and ignore any specified path
* components.
*
* <p>Base URL: http://example.com/api/<br>
* Endpoint: /foo/bar/<br>
* Result: http://example.com/foo/bar/
*
* <p>Base URL: http://example.com/<br>
* Endpoint: /foo/bar/<br>
* Result: http://example.com/foo/bar/
*
* <p><b>Endpoint values may be a full URL.</b>
*
* <p>Values which have a host replace the host of {@code baseUrl} and values also with a scheme
* replace the scheme of {@code baseUrl}.
*
* <p>Base URL: http://example.com/<br>
* Endpoint: https://github.com/square/retrofit/<br>
* Result: https://github.com/square/retrofit/
*
* <p>Base URL: http://example.com<br>
* Endpoint: //github.com/square/retrofit/<br>
* Result: http://github.com/square/retrofit/ (note the scheme stays 'http')
*/
public Builder baseUrl(HttpUrl baseUrl) {
Objects.requireNonNull(baseUrl, "baseUrl == null");
List<String> pathSegments = baseUrl.pathSegments();
if (!"".equals(pathSegments.get(pathSegments.size() - 1))) {
throw new IllegalArgumentException("baseUrl must end in /: " + baseUrl);
}
this.baseUrl = baseUrl;
return this;
}
/** Add converter factory for serialization and deserialization of objects. */
public Builder addConverterFactory(Converter.Factory factory) {
converterFactories.add(Objects.requireNonNull(factory, "factory == null"));
return this;
}
/**
* Add a call adapter factory for supporting service method return types other than {@link
* Call}.
*/
public Builder addCallAdapterFactory(CallAdapter.Factory factory) {
callAdapterFactories.add(Objects.requireNonNull(factory, "factory == null"));
return this;
}
/**
* The executor on which {@link Callback} methods are invoked when returning {@link Call} from
* your service method.
*
* <p>Note: {@code executor} is not used for {@linkplain #addCallAdapterFactory custom method
* return types}.
*/
public Builder callbackExecutor(Executor executor) {
this.callbackExecutor = Objects.requireNonNull(executor, "executor == null");
return this;
}
/** Returns a modifiable list of call adapter factories. */
public List<CallAdapter.Factory> callAdapterFactories() {
return this.callAdapterFactories;
}
/** Returns a modifiable list of converter factories. */
public List<Converter.Factory> converterFactories() {
return this.converterFactories;
}
/**
* When calling {@link #create} on the resulting {@link Retrofit} instance, eagerly validate the
* configuration of all methods in the supplied interface.
*/
public Builder validateEagerly(boolean validateEagerly) {
this.validateEagerly = validateEagerly;
return this;
}
/**
* Create the {@link Retrofit} instance using the configured values.
*
* <p>Note: If neither {@link #client} nor {@link #callFactory} is called a default {@link
* OkHttpClient} will be created and used.
*/
public Retrofit build() {
if (baseUrl == null) {
throw new IllegalStateException("Base URL required.");
}
Platform platform = Platform.get();
okhttp3.Call.Factory callFactory = this.callFactory;
if (callFactory == null) {
callFactory = new OkHttpClient();
}
Executor callbackExecutor = this.callbackExecutor;
if (callbackExecutor == null) {
callbackExecutor = platform.defaultCallbackExecutor();
}
// Make a defensive copy of the adapters and add the default Call adapter.
List<CallAdapter.Factory> callAdapterFactories = new ArrayList<>(this.callAdapterFactories);
List<? extends CallAdapter.Factory> defaultCallAdapterFactories =
platform.createDefaultCallAdapterFactories(callbackExecutor);
callAdapterFactories.addAll(defaultCallAdapterFactories);
// Make a defensive copy of the converters.
List<? extends Converter.Factory> defaultConverterFactories =
platform.createDefaultConverterFactories();
int defaultConverterFactoriesSize = defaultConverterFactories.size();
List<Converter.Factory> converterFactories =
new ArrayList<>(1 + this.converterFactories.size() + defaultConverterFactoriesSize);
// Add the built-in converter factory first. This prevents overriding its behavior but also
// ensures correct behavior when using converters that consume all types.
converterFactories.add(new BuiltInConverters());
converterFactories.addAll(this.converterFactories);
converterFactories.addAll(defaultConverterFactories);
return new Retrofit(
callFactory,
baseUrl,
unmodifiableList(converterFactories),
defaultConverterFactoriesSize,
unmodifiableList(callAdapterFactories),
defaultCallAdapterFactories.size(),
callbackExecutor,
validateEagerly);
}
}
}
| |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE_2_2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
******************************************************************************/
package org.apache.olingo.odata2.janos.processor.ref.jpa;
import org.apache.http.HttpResponse;
import org.apache.olingo.odata2.api.commons.HttpContentType;
import org.junit.Before;
import org.junit.Test;
import static org.custommonkey.xmlunit.XMLAssert.assertXpathExists;
import static org.junit.Assert.assertFalse;
/**
* Tests employing the reference scenario reading the metadata document in XML format
*
*/
public class MetadataTest extends AbstractRefXmlTest {
private static String payload;
@Before
public void prepare() throws Exception {
payload = getBody(callUri("$metadata"));
}
@Test
public void metadataDocument() throws Exception {
final HttpResponse response = callUri("$metadata");
checkMediaType(response, HttpContentType.APPLICATION_XML_UTF8);
assertFalse(getBody(response).isEmpty());
notFound("$invalid");
badRequest("$metadata?$format=atom");
}
@Test
public void testGeneral() throws Exception {
assertXpathExists("/edmx:Edmx[@Version='1.0']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices[@m:DataServiceVersion='1.0']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema[@Namespace='RefScenario']", payload);
}
@Test
public void testEntityTypes() throws Exception {
// Employee
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Employee' and @m:HasStream='true']", payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Employee' and @m:HasStream='true']/edm:Key",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Employee' and " +
"@m:HasStream='true']/edm:Key/edm:PropertyRef[@Name='EmployeeId']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Employee' and" +
" @m:HasStream='true']/edm:Property[@Name='EmployeeId' and @Type='Edm.String' and @Nullable='false']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Employee' and" +
" @m:HasStream='true']/edm:Property[@Name='EmployeeName' and @Type='Edm.String']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Employee' and" +
" @m:HasStream='true']/edm:Property[@Name='Location' and @Type='RefScenario.c_Location']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Employee' and " +
"@m:HasStream='true']/edm:Property[@Name='Age' and @Type='Edm.Int32']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Employee' and " +
"@m:HasStream='true']/edm:Property[@Name='EntryDate' and @Type='Edm.DateTime' and " +
"@Nullable='true']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Employee' and " +
"@m:HasStream='true']/edm:Property[@Name='ImageUrl' and @Type='Edm.String']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Employee' and" +
" @m:HasStream='true']/edm:NavigationProperty[@Name='ne_Manager' and " +
"@Relationship='RefScenario.ManagerEmployees' and @FromRole='r_Employees' and @ToRole='r_Manager']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Employee' and" +
" @m:HasStream='true']/edm:NavigationProperty[@Name='ne_Team' and " +
"@Relationship='RefScenario.TeamEmployees' and @FromRole='r_Employees' and @ToRole='r_Team']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Employee' and " +
"@m:HasStream='true']/edm:NavigationProperty[@Name='ne_Room' and " +
"@Relationship='RefScenario.r_Employees_2_r_Room' and @FromRole='r_Employees' and @ToRole='r_Room']",
payload);
// Team
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Team' and @BaseType='RefScenario.Base']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Team' and " +
"@BaseType='RefScenario.Base']/edm:Property[@Name='IsScrumTeam' and " +
"@Type='Edm.Boolean' and @Nullable='true']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Team' and " +
"@BaseType='RefScenario.Base']/edm:NavigationProperty[@Name='nt_Employees' and " +
"@Relationship='RefScenario.TeamEmployees' and @FromRole='r_Team' and @ToRole='r_Employees']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Team' and " +
"@BaseType='RefScenario.Base']/edm:NavigationProperty[@Name='SubTeam' and " +
"@Relationship='RefScenario.Team_2_r_SubTeam' and @FromRole='Team' and @ToRole='r_SubTeam']",
payload);
// Room
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Room' and @BaseType='RefScenario.Base']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Room' and " +
"@BaseType='RefScenario.Base']/edm:Property[@Name='Seats' and @Type='Edm.Int32']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Room' and " +
"@BaseType='RefScenario.Base']/edm:Property[@Name='Version' and @Type='Edm.Int32']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Room' and" +
" @BaseType='RefScenario.Base']/edm:NavigationProperty[@Name='nr_Employees' and " +
"@Relationship='RefScenario.r_Employees_2_r_Room' and @FromRole='r_Room' and @ToRole='r_Employees']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Room' and " +
"@BaseType='RefScenario.Base']/edm:NavigationProperty[@Name='nr_Building' and " +
"@Relationship='RefScenario.BuildingRooms' and @FromRole='r_Rooms' and @ToRole='r_Building']",
payload);
// Manager
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Manager' and " +
"@BaseType='RefScenario.Employee']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Manager' and " +
"@BaseType='RefScenario.Employee']/edm:NavigationProperty[@Name='nm_Employees' and " +
"@Relationship='RefScenario.ManagerEmployees' and @FromRole='r_Manager' and @ToRole='r_Employees']",
payload);
// Building
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Building']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Building']/edm:Key", payload);
assertXpathExists( "/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Building']/edm:Key/edm" +
":PropertyRef[@Name='Id']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Building']" +
"/edm:Property[@Name='Id' and @Type='Edm.Int32' and @Nullable='false']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Building']" +
"/edm:Property[@Name='Name' and @Type='Edm.String']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Building']/" +
"edm:Property[@Name='Image' and @Type='Edm.Binary']", payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Building']" +
"/edm:NavigationProperty[@Name='nb_Rooms' and @Relationship='RefScenario.BuildingRooms' " +
"and @FromRole='r_Building' and @ToRole='r_Rooms']", payload);
// Base
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Base' and @Abstract='true']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Base' and @Abstract='true']/edm:Key", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Base' and @Abstract='true']" +
"/edm:Key/edm:PropertyRef[@Name='Id']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Base' and @Abstract='true']" +
"/edm:Property[@Name='Id' and @Type='Edm.String' and @Nullable='false']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityType[@Name='Base' and @Abstract='true']" +
"/edm:Property[@Name='Name' and @Type='Edm.String']",payload);
}
@Test
public void testComplexTypes() throws Exception {
// Location
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:ComplexType[@Name='c_Location']", payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:ComplexType[@Name='c_Location']/edm:Property[@Name='City' and " +
"@Type='RefScenario.c_City']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:ComplexType[@Name='c_Location']/edm:Property[@Name='Country' " +
"and @Type='Edm.String']",
payload);
// Location
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:ComplexType[@Name='c_City']", payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:ComplexType[@Name='c_City']/edm:Property[@Name='PostalCode' " +
"and @Type='Edm.String']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:ComplexType[@Name='c_City']/edm:Property[@Name='CityName' " +
"and @Type='Edm.String']",
payload);
}
@Test
public void testAssociation() throws Exception {
// ManagerEmployees
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='ManagerEmployees']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='ManagerEmployees']" +
"/edm:End[@Type='RefScenario.Employee' and @Multiplicity='*' and @Role='r_Employees']", payload);
assertXpathExists( "/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='ManagerEmployees']" +
"/edm:End[@Type='RefScenario.Manager' and @Multiplicity='1' and @Role='r_Manager']", payload);
// TeamEmployees
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='TeamEmployees']", payload);
assertXpathExists( "/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='TeamEmployees']" +
"/edm:End[@Type='RefScenario.Employee' and @Multiplicity='*' and @Role='r_Employees']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='TeamEmployees']" +
"/edm:End[@Type='RefScenario.Team' and @Multiplicity='1' and @Role='r_Team']", payload);
// Team_2_r_SubTeam
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='Team_2_r_SubTeam']", payload);
assertXpathExists( "/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='Team_2_r_SubTeam']" +
"/edm:End[@Type='RefScenario.Team' and @Multiplicity='1' and @Role='Team']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='Team_2_r_SubTeam']" +
"/edm:End[@Type='RefScenario.Team' and @Multiplicity='1' and @Role='r_SubTeam']", payload);
// RoomEmployees
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='r_Employees_2_r_Room']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='r_Employees_2_r_Room']" +
"/edm:End[@Type='RefScenario.Employee' and @Multiplicity='*' and @Role='r_Employees']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='r_Employees_2_r_Room']" +
"/edm:End[@Type='RefScenario.Room' and @Multiplicity='1' and @Role='r_Room']", payload);
// BuildingRooms
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='BuildingRooms']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='BuildingRooms']" +
"/edm:End[@Type='RefScenario.Building' and @Multiplicity='1' and @Role='r_Building']", payload);
assertXpathExists("/edmx:Edmx/edmx:DataServices/edm:Schema/edm:Association[@Name='BuildingRooms']" +
"/edm:End[@Type='RefScenario.Room' and @Multiplicity='*' and @Role='r_Rooms']", payload);
}
@Test
public void testEntityContainer() throws Exception {
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']", payload);
// EntitySets
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and "
+
"@m:IsDefaultEntityContainer='true']/edm:EntitySet[@Name='Employees' and " +
"@EntityType='RefScenario.Employee']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:EntitySet[@Name='Teams' and @EntityType='RefScenario.Team']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:EntitySet[@Name='Rooms' and @EntityType='RefScenario.Room']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:EntitySet[@Name='Managers' and @EntityType='RefScenario.Manager']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and "
+
"@m:IsDefaultEntityContainer='true']/edm:EntitySet[@Name='Buildings' and " +
"@EntityType='RefScenario.Building']",
payload);
// AssociationSets
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='ManagerEmployees' and " +
"@Association='RefScenario.ManagerEmployees']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='ManagerEmployees' and " +
"@Association='RefScenario.ManagerEmployees']/edm:End[@EntitySet='Managers' and @Role='r_Manager']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='ManagerEmployees' and " +
"@Association='RefScenario.ManagerEmployees']/edm:End[@EntitySet='Employees' and @Role='r_Employees']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='TeamEmployees' and " +
"@Association='RefScenario.TeamEmployees']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='TeamEmployees' and " +
"@Association='RefScenario.TeamEmployees']/edm:End[@EntitySet='Teams' and @Role='r_Team']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='TeamEmployees' and " +
"@Association='RefScenario.TeamEmployees']/edm:End[@EntitySet='Employees' and @Role='r_Employees']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='Team_2_r_SubTeam' and " +
"@Association='RefScenario.Team_2_r_SubTeam']/edm:End[@EntitySet='Teams' and @Role='Team']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='Team_2_r_SubTeam' and " +
"@Association='RefScenario.Team_2_r_SubTeam']/edm:End[@EntitySet='Teams' and @Role='r_SubTeam']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='r_Employees_2_r_Room' and " +
"@Association='RefScenario.r_Employees_2_r_Room']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='r_Employees_2_r_Room' and " +
"@Association='RefScenario.r_Employees_2_r_Room']/edm:End[@EntitySet='Rooms' and @Role='r_Room']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='r_Employees_2_r_Room' and " +
"@Association='RefScenario.r_Employees_2_r_Room']/edm:End[@EntitySet='Employees' and @Role='r_Employees']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='BuildingRooms' and " +
"@Association='RefScenario.BuildingRooms']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='BuildingRooms' and " +
"@Association='RefScenario.BuildingRooms']/edm:End[@EntitySet='Buildings' and @Role='r_Building']",
payload);
assertXpathExists(
"/edmx:Edmx/edmx:DataServices/edm:Schema/edm:EntityContainer[@Name='DefaultContainer' and " +
"@m:IsDefaultEntityContainer='true']/edm:AssociationSet[@Name='BuildingRooms' and " +
"@Association='RefScenario.BuildingRooms']/edm:End[@EntitySet='Rooms' and @Role='r_Rooms']",
payload);
}
}
| |
/*
* This file is part of the Jikes RVM project (http://jikesrvm.org).
*
* This file is licensed to You under the Common Public License (CPL);
* You may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.opensource.org/licenses/cpl1.0.php
*
* See the COPYRIGHT.txt file distributed with this work for information
* regarding copyright ownership.
*/
package org.jikesrvm.scheduler;
import java.lang.instrument.Instrumentation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import org.jikesrvm.VM;
import org.jikesrvm.Callbacks;
import org.jikesrvm.CommandLineArgs;
import org.jikesrvm.classloader.Atom;
import org.jikesrvm.classloader.RVMClass;
import org.jikesrvm.classloader.RVMClassLoader;
import org.jikesrvm.classloader.RVMMethod;
import org.jikesrvm.classloader.TypeReference;
import org.jikesrvm.runtime.Reflection;
import org.vmmagic.pragma.Entrypoint;
/**
* Thread in which user's "main" program runs.
*/
public final class MainThread extends Thread {
private final String[] args;
private final String[] agents;
private RVMMethod mainMethod;
protected boolean launched = false;
private static final boolean dbg = false;
/**
* Create "main" thread.
* Taken: args[0] = name of class containing "main" method
* args[1..N] = parameters to pass to "main" method
*/
public MainThread(String[] args) {
super("MainThread");
setDaemon(false); // NB otherwise we inherit the boot threads daemon status
this.agents = CommandLineArgs.getJavaAgentArgs();
this.args = args;
if (dbg) {
VM.sysWriteln("MainThread(args.length == ", args.length, "): constructor done");
}
}
private void runAgents(ClassLoader cl) {
if (agents.length > 0) {
Instrumentation instrumenter = null;
if (VM.BuildForGnuClasspath) {
try {
instrumenter = (Instrumentation)Class.forName("gnu.java.lang.JikesRVMSupport")
.getMethod("createInstrumentation").invoke(null);
java.lang.JikesRVMSupport.initializeInstrumentation(instrumenter);
} catch (Exception _) {
}
}
for (String agent : agents) {
/*
* Parse agent string according to the form
* given in the java.lang.instrumentation package
* documentation:
* jarpath[=options]
*
* (The -javaagent: part of the agent options has
* already been stripped)
*/
int equalsIndex = agent.indexOf('=');
String agentJar;
String agentOptions;
if (equalsIndex != -1) {
agentJar = agent.substring(0, equalsIndex);
agentOptions = agent.substring(equalsIndex + 1);
} else {
agentJar = agent;
agentOptions = "";
}
runAgent(instrumenter, cl, agentJar, agentOptions);
}
}
}
private static void runAgent(Instrumentation instrumenter, ClassLoader cl, String agentJar, String agentOptions) {
Manifest mf = null;
try {
JarFile jf = new JarFile(agentJar);
mf = jf.getManifest();
} catch (Exception e) {
VM.sysWriteln("vm: IO Exception opening JAR file ", agentJar, ": ", e.getMessage());
VM.sysExit(VM.EXIT_STATUS_BOGUS_COMMAND_LINE_ARG);
}
if (mf == null) {
VM.sysWriteln("The jar file is missing the manifest: ", agentJar);
VM.sysExit(VM.EXIT_STATUS_BOGUS_COMMAND_LINE_ARG);
}
String agentClassName = mf.getMainAttributes().getValue("Premain-Class");
if (agentClassName == null) {
VM.sysWriteln("The jar file is missing the Premain-Class manifest entry for the agent class: ", agentJar);
VM.sysExit(VM.EXIT_STATUS_BOGUS_COMMAND_LINE_ARG);
}
//TODO: By this stage all agent jars and classes they reference via their manifest
try {
Class<?> agentClass = cl.loadClass(agentClassName);
Method agentPremainMethod = agentClass.getMethod("premain", new Class<?>[]{String.class, Instrumentation.class});
agentPremainMethod.invoke(null, new Object[]{agentOptions, instrumenter});
} catch (InvocationTargetException e) {
// According to the spec, exceptions from premain() can be ignored
} catch (Throwable e) {
VM.sysWriteln("Failed to run the agent's premain: " + e.getMessage());
e.printStackTrace();
System.exit(0);
}
}
RVMMethod getMainMethod() {
return mainMethod;
}
/**
* Run "main" thread.
*
* This code could be made a little shorter by relying on Reflection
* to do the classloading and compilation. We intentionally do it here
* to give us a chance to provide error messages that are specific to
* not being able to find the main class the user wants to run.
* This may be a little silly, since it results in code duplication
* just to provide debug messages in a place where very little is actually
* likely to go wrong, but there you have it....
*/
@Override
@Entrypoint
public void run() {
launched = true;
if (dbg) VM.sysWriteln("MainThread.run() starting ");
// Set up application class loader
ClassLoader cl = RVMClassLoader.getApplicationClassLoader();
setContextClassLoader(cl);
runAgents(cl);
if (dbg) VM.sysWrite("[MainThread.run() loading class to run... ");
// find method to run
// load class specified by args[0]
RVMClass cls = null;
try {
Atom mainAtom = Atom.findOrCreateUnicodeAtom(args[0].replace('.', '/'));
TypeReference mainClass = TypeReference.findOrCreate(cl, mainAtom.descriptorFromClassName());
cls = mainClass.resolve().asClass();
cls.resolve();
cls.instantiate();
cls.initialize();
} catch (NoClassDefFoundError e) {
if (dbg) VM.sysWrite("failed.]");
// no such class
VM.sysWrite(e + "\n");
return;
}
if (dbg) VM.sysWriteln("loaded.]");
// find "main" method
//
mainMethod = cls.findMainMethod();
if (mainMethod == null) {
// no such method
VM.sysWrite(cls + " doesn't have a \"public static void main(String[])\" method to execute\n");
return;
}
if (dbg) VM.sysWrite("[MainThread.run() making arg list... ");
// create "main" argument list
//
String[] mainArgs = new String[args.length - 1];
for (int i = 0, n = mainArgs.length; i < n; ++i) {
mainArgs[i] = args[i + 1];
}
if (dbg) VM.sysWriteln("made.]");
if (dbg) VM.sysWrite("[MainThread.run() compiling main(String[])... ");
mainMethod.compile();
if (dbg) VM.sysWriteln("compiled.]");
// Notify other clients that the startup is complete.
//
Callbacks.notifyStartup();
if (dbg) VM.sysWriteln("[MainThread.run() invoking \"main\" method... ");
// invoke "main" method with argument list
Reflection.invoke(mainMethod, null, new Object[]{mainArgs}, false);
if (dbg) VM.sysWriteln(" MainThread.run(): \"main\" method completed.]");
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2017_10_01.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.AzureServiceFuture;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.ListOperationCallback;
import com.microsoft.azure.Page;
import com.microsoft.azure.PagedList;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.Path;
import retrofit2.http.Query;
import retrofit2.http.Url;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in LoadBalancerFrontendIPConfigurations.
*/
public class LoadBalancerFrontendIPConfigurationsInner {
/** The Retrofit service to perform REST calls. */
private LoadBalancerFrontendIPConfigurationsService service;
/** The service client containing this operation class. */
private NetworkManagementClientImpl client;
/**
* Initializes an instance of LoadBalancerFrontendIPConfigurationsInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public LoadBalancerFrontendIPConfigurationsInner(Retrofit retrofit, NetworkManagementClientImpl client) {
this.service = retrofit.create(LoadBalancerFrontendIPConfigurationsService.class);
this.client = client;
}
/**
* The interface defining all the services for LoadBalancerFrontendIPConfigurations to be
* used by Retrofit to perform actually REST calls.
*/
interface LoadBalancerFrontendIPConfigurationsService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2017_10_01.LoadBalancerFrontendIPConfigurations list" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/frontendIPConfigurations")
Observable<Response<ResponseBody>> list(@Path("resourceGroupName") String resourceGroupName, @Path("loadBalancerName") String loadBalancerName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2017_10_01.LoadBalancerFrontendIPConfigurations get" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/frontendIPConfigurations/{frontendIPConfigurationName}")
Observable<Response<ResponseBody>> get(@Path("resourceGroupName") String resourceGroupName, @Path("loadBalancerName") String loadBalancerName, @Path("frontendIPConfigurationName") String frontendIPConfigurationName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2017_10_01.LoadBalancerFrontendIPConfigurations listNext" })
@GET
Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Gets all the load balancer frontend IP configurations.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<FrontendIPConfigurationInner> object if successful.
*/
public PagedList<FrontendIPConfigurationInner> list(final String resourceGroupName, final String loadBalancerName) {
ServiceResponse<Page<FrontendIPConfigurationInner>> response = listSinglePageAsync(resourceGroupName, loadBalancerName).toBlocking().single();
return new PagedList<FrontendIPConfigurationInner>(response.body()) {
@Override
public Page<FrontendIPConfigurationInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Gets all the load balancer frontend IP configurations.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<FrontendIPConfigurationInner>> listAsync(final String resourceGroupName, final String loadBalancerName, final ListOperationCallback<FrontendIPConfigurationInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listSinglePageAsync(resourceGroupName, loadBalancerName),
new Func1<String, Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Gets all the load balancer frontend IP configurations.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<FrontendIPConfigurationInner> object
*/
public Observable<Page<FrontendIPConfigurationInner>> listAsync(final String resourceGroupName, final String loadBalancerName) {
return listWithServiceResponseAsync(resourceGroupName, loadBalancerName)
.map(new Func1<ServiceResponse<Page<FrontendIPConfigurationInner>>, Page<FrontendIPConfigurationInner>>() {
@Override
public Page<FrontendIPConfigurationInner> call(ServiceResponse<Page<FrontendIPConfigurationInner>> response) {
return response.body();
}
});
}
/**
* Gets all the load balancer frontend IP configurations.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<FrontendIPConfigurationInner> object
*/
public Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>> listWithServiceResponseAsync(final String resourceGroupName, final String loadBalancerName) {
return listSinglePageAsync(resourceGroupName, loadBalancerName)
.concatMap(new Func1<ServiceResponse<Page<FrontendIPConfigurationInner>>, Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>> call(ServiceResponse<Page<FrontendIPConfigurationInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Gets all the load balancer frontend IP configurations.
*
ServiceResponse<PageImpl<FrontendIPConfigurationInner>> * @param resourceGroupName The name of the resource group.
ServiceResponse<PageImpl<FrontendIPConfigurationInner>> * @param loadBalancerName The name of the load balancer.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<FrontendIPConfigurationInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>> listSinglePageAsync(final String resourceGroupName, final String loadBalancerName) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (loadBalancerName == null) {
throw new IllegalArgumentException("Parameter loadBalancerName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
final String apiVersion = "2017-10-01";
return service.list(resourceGroupName, loadBalancerName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<FrontendIPConfigurationInner>> result = listDelegate(response);
return Observable.just(new ServiceResponse<Page<FrontendIPConfigurationInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<FrontendIPConfigurationInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<FrontendIPConfigurationInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<FrontendIPConfigurationInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Gets load balancer frontend IP configuration.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @param frontendIPConfigurationName The name of the frontend IP configuration.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the FrontendIPConfigurationInner object if successful.
*/
public FrontendIPConfigurationInner get(String resourceGroupName, String loadBalancerName, String frontendIPConfigurationName) {
return getWithServiceResponseAsync(resourceGroupName, loadBalancerName, frontendIPConfigurationName).toBlocking().single().body();
}
/**
* Gets load balancer frontend IP configuration.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @param frontendIPConfigurationName The name of the frontend IP configuration.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<FrontendIPConfigurationInner> getAsync(String resourceGroupName, String loadBalancerName, String frontendIPConfigurationName, final ServiceCallback<FrontendIPConfigurationInner> serviceCallback) {
return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, loadBalancerName, frontendIPConfigurationName), serviceCallback);
}
/**
* Gets load balancer frontend IP configuration.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @param frontendIPConfigurationName The name of the frontend IP configuration.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the FrontendIPConfigurationInner object
*/
public Observable<FrontendIPConfigurationInner> getAsync(String resourceGroupName, String loadBalancerName, String frontendIPConfigurationName) {
return getWithServiceResponseAsync(resourceGroupName, loadBalancerName, frontendIPConfigurationName).map(new Func1<ServiceResponse<FrontendIPConfigurationInner>, FrontendIPConfigurationInner>() {
@Override
public FrontendIPConfigurationInner call(ServiceResponse<FrontendIPConfigurationInner> response) {
return response.body();
}
});
}
/**
* Gets load balancer frontend IP configuration.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @param frontendIPConfigurationName The name of the frontend IP configuration.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the FrontendIPConfigurationInner object
*/
public Observable<ServiceResponse<FrontendIPConfigurationInner>> getWithServiceResponseAsync(String resourceGroupName, String loadBalancerName, String frontendIPConfigurationName) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (loadBalancerName == null) {
throw new IllegalArgumentException("Parameter loadBalancerName is required and cannot be null.");
}
if (frontendIPConfigurationName == null) {
throw new IllegalArgumentException("Parameter frontendIPConfigurationName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
final String apiVersion = "2017-10-01";
return service.get(resourceGroupName, loadBalancerName, frontendIPConfigurationName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<FrontendIPConfigurationInner>>>() {
@Override
public Observable<ServiceResponse<FrontendIPConfigurationInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<FrontendIPConfigurationInner> clientResponse = getDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<FrontendIPConfigurationInner> getDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<FrontendIPConfigurationInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<FrontendIPConfigurationInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Gets all the load balancer frontend IP configurations.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<FrontendIPConfigurationInner> object if successful.
*/
public PagedList<FrontendIPConfigurationInner> listNext(final String nextPageLink) {
ServiceResponse<Page<FrontendIPConfigurationInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single();
return new PagedList<FrontendIPConfigurationInner>(response.body()) {
@Override
public Page<FrontendIPConfigurationInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Gets all the load balancer frontend IP configurations.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param serviceFuture the ServiceFuture object tracking the Retrofit calls
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<FrontendIPConfigurationInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<FrontendIPConfigurationInner>> serviceFuture, final ListOperationCallback<FrontendIPConfigurationInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listNextSinglePageAsync(nextPageLink),
new Func1<String, Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Gets all the load balancer frontend IP configurations.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<FrontendIPConfigurationInner> object
*/
public Observable<Page<FrontendIPConfigurationInner>> listNextAsync(final String nextPageLink) {
return listNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<FrontendIPConfigurationInner>>, Page<FrontendIPConfigurationInner>>() {
@Override
public Page<FrontendIPConfigurationInner> call(ServiceResponse<Page<FrontendIPConfigurationInner>> response) {
return response.body();
}
});
}
/**
* Gets all the load balancer frontend IP configurations.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<FrontendIPConfigurationInner> object
*/
public Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>> listNextWithServiceResponseAsync(final String nextPageLink) {
return listNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<FrontendIPConfigurationInner>>, Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>> call(ServiceResponse<Page<FrontendIPConfigurationInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Gets all the load balancer frontend IP configurations.
*
ServiceResponse<PageImpl<FrontendIPConfigurationInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<FrontendIPConfigurationInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>> listNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
String nextUrl = String.format("%s", nextPageLink);
return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<FrontendIPConfigurationInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<FrontendIPConfigurationInner>> result = listNextDelegate(response);
return Observable.just(new ServiceResponse<Page<FrontendIPConfigurationInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<FrontendIPConfigurationInner>> listNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<FrontendIPConfigurationInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<FrontendIPConfigurationInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
| |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <p>
*/
package org.olat.presentation.ims.qti.editor;
import java.util.List;
import org.olat.lms.ims.qti.editor.QTIEditorPackageEBL;
import org.olat.lms.ims.qti.objects.ChoiceQuestion;
import org.olat.lms.ims.qti.objects.ChoiceResponse;
import org.olat.lms.ims.qti.objects.Item;
import org.olat.lms.ims.qti.objects.Material;
import org.olat.lms.ims.qti.objects.Mattext;
import org.olat.lms.ims.qti.objects.Question;
import org.olat.lms.ims.qti.objects.Response;
import org.olat.presentation.framework.core.UserRequest;
import org.olat.presentation.framework.core.components.Component;
import org.olat.presentation.framework.core.components.form.flexible.impl.components.SimpleFormErrorText;
import org.olat.presentation.framework.core.components.velocity.VelocityContainer;
import org.olat.presentation.framework.core.control.Controller;
import org.olat.presentation.framework.core.control.ControllerEventListener;
import org.olat.presentation.framework.core.control.WindowControl;
import org.olat.presentation.framework.core.control.controller.BasicController;
import org.olat.presentation.framework.core.control.generic.closablewrapper.CloseableModalController;
import org.olat.presentation.framework.core.control.generic.modal.DialogBoxController;
import org.olat.presentation.framework.core.control.generic.modal.DialogBoxUIFactory;
import org.olat.presentation.framework.core.translator.PackageUtil;
import org.olat.presentation.framework.core.translator.Translator;
import org.olat.system.event.Event;
/**
* Initial Date: Oct 21, 2004 <br>
*
* @author mike
* @author oliver.buehler@agility-informatik.ch
*/
public class ChoiceItemController extends BasicController implements ControllerEventListener {
/*
* Logging, Velocity
*/
private static final String PACKAGE = PackageUtil.getPackageName(ChoiceItemController.class);
private static final String VC_ROOT = PackageUtil.getPackageVelocityRoot(PACKAGE);
private VelocityContainer main;
private Translator trnsltr;
private Item item;
private final QTIEditorPackageEBL qtiPackage;
private DialogBoxController delYesNoCtrl;
private final boolean restrictedEdit;
private Material editQuestion;
private Response editResponse;
private CloseableModalController dialogCtr;
private MaterialFormController matFormCtr;
/**
* @param item
* @param qtiPackage
* @param trnsltr
* @param wControl
*/
public ChoiceItemController(final Item item, final QTIEditorPackageEBL qtiPackage, final Translator trnsltr, final UserRequest ureq, final WindowControl wControl,
final boolean restrictedEdit) {
super(ureq, wControl);
this.restrictedEdit = restrictedEdit;
this.item = item;
this.qtiPackage = qtiPackage;
this.trnsltr = trnsltr;
main = new VelocityContainer("scitem", VC_ROOT + "/tab_scItem.html", trnsltr, this);
main.contextPut("question", item.getQuestion());
main.contextPut("isSurveyMode", qtiPackage.getQTIDocument().isSurvey() ? "true" : "false");
main.contextPut("isRestrictedEdit", restrictedEdit ? Boolean.TRUE : Boolean.FALSE);
main.contextPut("mediaBaseURL", qtiPackage.getMediaBaseURL());
if (item.getQuestion().getType() == Question.TYPE_MC) {
main.setPage(VC_ROOT + "/tab_mcItem.html");
} else if (item.getQuestion().getType() == Question.TYPE_KPRIM) {
main.setPage(VC_ROOT + "/tab_kprimItem.html");
}
putInitialPanel(main);
}
/**
*/
@Override
protected void event(final UserRequest ureq, final Component source, final Event event) {
if (source == main) {
// olat::: as: improve easy fix since almost all operations change the main vc.
main.setDirty(true);
main.contextRemove("formError");
final String cmd = event.getCommand();
final String sPosid = ureq.getParameter("posid");
int posid = 0;
if (sPosid != null) {
posid = Integer.parseInt(sPosid);
}
if (cmd.equals("up")) {
if (posid > 0) {
final List<Response> elements = item.getQuestion().getResponses();
final Response obj = elements.remove(posid);
elements.add(posid - 1, obj);
}
} else if (cmd.equals("down")) {
final List<Response> elements = item.getQuestion().getResponses();
if (posid < elements.size() - 1) {
final Response obj = elements.remove(posid);
elements.add(posid + 1, obj);
}
} else if (cmd.equals("editq")) {
editQuestion = item.getQuestion().getQuestion();
displayMaterialFormController(ureq, editQuestion, restrictedEdit);
} else if (cmd.equals("editr")) {
editResponse = ((Response) item.getQuestion().getResponses().get(posid));
final Material responseMat = editResponse.getContent();
displayMaterialFormController(ureq, responseMat, restrictedEdit);
} else if (cmd.equals("addchoice")) {
final ChoiceQuestion question = (ChoiceQuestion) item.getQuestion();
final List<Response> choices = question.getResponses();
final ChoiceResponse newChoice = new ChoiceResponse();
newChoice.getContent().add(new Mattext(trnsltr.translate("newresponsetext")));
newChoice.setCorrect(false);
newChoice.setPoints(-1f); // default value is negative to make sure
// people understand the meaning of this value
choices.add(newChoice);
} else if (cmd.equals("del")) {
delYesNoCtrl = activateYesNoDialog(ureq, "", trnsltr.translate("confirm.delete.element"), null);
delYesNoCtrl.setUserObject(new Integer(posid));
} else if (cmd.equals("ssc")) { // submit sc
float score = 0;
try {
score = Float.parseFloat(ureq.getParameter("single_score"));
if (score <= 0) {
main.contextPut("formError", true);
main.put("formErrorText", new SimpleFormErrorText("questionform.validation.ssc.score", translate("questionform.validation.ssc.score")));
return;
}
} catch (NumberFormatException ex) {
main.contextPut("formError", true);
main.put("formErrorText", new SimpleFormErrorText("questionform.validation.score.nan", translate("questionform.validation.score.nan")));
return;
}
final ChoiceQuestion question = (ChoiceQuestion) item.getQuestion();
final List<Response> q_choices = question.getResponses();
boolean correctChoiceDefined = false;
final String correctChoice = ureq.getParameter("correctChoice");
for (int i = 0; i < q_choices.size(); i++) {
final Response choice = q_choices.get(i);
if (correctChoice != null && correctChoice.equals("value_q" + i)) {
choice.setCorrect(true);
choice.setPoints(score);
correctChoiceDefined = true;
} else {
choice.setCorrect(false);
}
}
if (!correctChoiceDefined) {
// TODO Oliver
showWarning("questionform.validation.ssc.selection");
return;
}
question.setSingleCorrectScore(score);
question.setMaxValue(score);
} else if (cmd.equals("smc")) { // submit mc
final ChoiceQuestion question = (ChoiceQuestion) item.getQuestion();
question.setSingleCorrect(ureq.getParameter("valuation_method").equals("single"));
if (question.isSingleCorrect()) {
float score = 0;
try {
score = Float.parseFloat(ureq.getParameter("single_score"));
if (score <= 0) {
main.contextPut("formError", true);
main.put("formErrorText", new SimpleFormErrorText("questionform.validation.smc.score", translate("questionform.validation.smc.score")));
return;
}
} catch (NumberFormatException ex) {
main.contextPut("formError", true);
main.put("formErrorText", new SimpleFormErrorText("questionform.validation.score.nan", translate("questionform.validation.score.nan")));
return;
}
question.setSingleCorrectScore(score);
}
final List<Response> choices = question.getResponses();
boolean hasZeroPointChoice = false;
for (int i = 0; i < choices.size(); i++) {
final Response choice = choices.get(i);
if (ureq.getParameter("value_q" + i) != null && ureq.getParameter("value_q" + i).equalsIgnoreCase("true")) {
choice.setCorrect(true);
} else {
choice.setCorrect(false);
}
choice.setPoints(ureq.getParameter("points_q" + i));
if (choice.getPoints() == 0) {
hasZeroPointChoice = true;
}
}
if (hasZeroPointChoice && !question.isSingleCorrect()) {
getWindowControl().setInfo(trnsltr.translate("editor.info.mc.zero.points"));
}
// set min/max before single_correct score
// will be corrected by single_correct score afterwards
question.setMinValue(ureq.getParameter("min_value"));
question.setMaxValue(ureq.getParameter("max_value"));
} else if (cmd.equals("skprim")) { // submit kprim
float maxValue = 0;
try {
maxValue = Float.parseFloat(ureq.getParameter("max_value"));
} catch (final NumberFormatException e) {
// invalid input, set maxValue 0
}
final ChoiceQuestion question = (ChoiceQuestion) item.getQuestion();
final List<Response> q_choices = question.getResponses();
for (int i = 0; i < q_choices.size(); i++) {
final String correctChoice = ureq.getParameter("correctChoice_q" + i);
final ChoiceResponse choice = (ChoiceResponse) q_choices.get(i);
choice.setPoints(maxValue / 4);
if ("correct".equals(correctChoice)) {
choice.setCorrect(true);
} else {
choice.setCorrect(false);
}
}
question.setMaxValue(maxValue);
}
qtiPackage.serializeQTIDocument();
}
}
/**
*/
@Override
protected void event(final UserRequest ureq, final Controller controller, final Event event) {
if (controller == matFormCtr) {
if (event instanceof QTIObjectBeforeChangeEvent) {
final QTIObjectBeforeChangeEvent qobce = (QTIObjectBeforeChangeEvent) event;
final NodeBeforeChangeEvent nce = new NodeBeforeChangeEvent();
if (editQuestion != null) {
nce.setNewQuestionMaterial(qobce.getContent());
nce.setItemIdent(item.getIdent());
nce.setQuestionIdent(editQuestion.getId());
nce.setMatIdent(qobce.getId());
fireEvent(ureq, nce);
} else if (editResponse != null) {
nce.setNewResponseMaterial(qobce.getContent());
nce.setItemIdent(item.getIdent());
nce.setResponseIdent(editResponse.getIdent());
nce.setMatIdent(qobce.getId());
fireEvent(ureq, nce);
}
} else if (event == Event.DONE_EVENT || event == Event.CANCELLED_EVENT) {
if (event == Event.DONE_EVENT) {
// serialize document
qtiPackage.serializeQTIDocument();
// force rerendering of view
main.setDirty(true);
editQuestion = null;
editResponse = null;
}
// dispose controllers
dialogCtr.deactivate();
dialogCtr.dispose();
dialogCtr = null;
matFormCtr.dispose();
matFormCtr = null;
}
} else if (controller == dialogCtr) {
if (event == Event.CANCELLED_EVENT) {
dialogCtr.dispose();
dialogCtr = null;
matFormCtr.dispose();
matFormCtr = null;
}
} else if (controller == delYesNoCtrl) {
if (DialogBoxUIFactory.isYesEvent(event)) {
item.getQuestion().getResponses().remove(((Integer) delYesNoCtrl.getUserObject()).intValue());
main.setDirty(true);// repaint
}
}
}
/**
* Displays the MaterialFormController in a closable box.
*
* @param ureq
* @param mat
* @param isRestrictedEditMode
*/
private void displayMaterialFormController(final UserRequest ureq, final Material mat, final boolean isRestrictedEditMode) {
matFormCtr = new MaterialFormController(ureq, getWindowControl(), mat, qtiPackage, isRestrictedEditMode);
matFormCtr.addControllerListener(this);
dialogCtr = new CloseableModalController(getWindowControl(), "close", matFormCtr.getInitialComponent());
matFormCtr.addControllerListener(dialogCtr);
dialogCtr.activate();
}
/**
*/
@Override
protected void doDispose() {
main = null;
item = null;
trnsltr = null;
if (dialogCtr != null) {
dialogCtr.dispose();
dialogCtr = null;
}
if (matFormCtr != null) {
matFormCtr.dispose();
matFormCtr = null;
}
}
}
| |
package org.motechproject.nms.rejectionhandler.domain;
import org.motechproject.mds.annotations.Entity;
import org.motechproject.mds.annotations.Field;
@Entity(tableName = "nms_mother_rejects")
public class MotherImportRejection {
@Field
private Long stateId;
@Field
private Long districtId;
@Field
private String districtName;
@Field
private String talukaId;
@Field
private String talukaName;
@Field
private Long healthBlockId;
@Field
private String healthBlockName;
@Field
private Long phcId;
@Field
private String phcName;
@Field
private Long subcentreId;
@Field
private String subcentreName;
@Field
private Long villageId;
@Field
private String villageName;
@Field
private Integer yr;
@Field
private String gPVillage;
@Field
private String address;
@Field
private String idNo;
@Field
private String name;
@Field
private String husbandName;
@Field
private String phoneNumberWhom;
@Field
private String birthDate;
@Field
private String jSYBeneficiary;
@Field
private String caste;
@Field
private String subcenterName1;
@Field
private String aNMName;
@Field
private String aNMPhone;
@Field
private String ashaName;
@Field
private String ashaPhone;
@Field
private String deliveryLnkFacility;
@Field
private String facilityName;
@Field
private String lmpDate;
@Field
private String aNC1Date;
@Field
private String aNC2Date;
@Field
private String aNC3Date;
@Field
private String aNC4Date;
@Field
private String tT1Date;
@Field
private String tT2Date;
@Field
private String tTBoosterDate;
@Field
private String iFA100GivenDate;
@Field
private String anemia;
@Field
private String aNCComplication;
@Field
private String rTISTI;
@Field
private String dlyDate;
@Field
private String dlyPlaceHomeType;
@Field
private String dlyPlacePublic;
@Field
private String dlyPlacePrivate;
@Field
private String dlyType;
@Field
private String dlyComplication;
@Field
private String dischargeDate;
@Field
private String jSYPaidDate;
@Field
private String abortion;
@Field
private String pNCHomeVisit;
@Field
private String pNCComplication;
@Field
private String pPCMethod;
@Field
private String pNCCheckup;
@Field
private Integer outcomeNos;
@Field
private String child1Name;
@Field
private String child1Sex;
@Field
private Double child1Wt;
@Field
private String child1Brestfeeding;
@Field
private String child2Name;
@Field
private String child2Sex;
@Field
private Double child2Wt;
@Field
private String child2Brestfeeding;
@Field
private String child3Name;
@Field
private String child3Sex;
@Field
private Double child3Wt;
@Field
private String child3Brestfeeding;
@Field
private String child4Name;
@Field
private String child4Sex;
@Field
private Double child4Wt;
@Field
private String child4Brestfeeding;
@Field
private Integer age;
@Field
private String mTHRREGDATE;
@Field
private String lastUpdateDate;
@Field
private String remarks;
@Field
private Integer aNMID;
@Field
private Integer aSHAID;
@Field
private Boolean callAns;
@Field
private Integer noCallReason;
@Field
private Integer noPhoneReason;
@Field
private Integer createdBy;
@Field
private Integer updatedBy;
@Field
private Integer aadharNo;
@Field
private Integer bPLAPL;
@Field
private Integer eID;
@Field
private String eIDTime;
@Field
private Integer entryType;
@Field
private String registrationNo;
@Field
private Long caseNo;
@Field
private String mobileNo;
@Field
private String abortionType;
@Field
private String deliveryOutcomes;
@Field
private String execDate;
@Field
private Boolean accepted;
@Field
private String rejectionReason;
@Field
private String source;
@Field
private String action;
public Long getStateId() {
return stateId;
}
public void setStateId(Long stateId) {
this.stateId = stateId;
}
public Long getDistrictId() {
return districtId;
}
public void setDistrictId(Long districtId) {
this.districtId = districtId;
}
public String getDistrictName() {
return districtName;
}
public void setDistrictName(String districtName) {
this.districtName = districtName;
}
public String getTalukaId() {
return talukaId;
}
public void setTalukaId(String talukaId) {
this.talukaId = talukaId;
}
public String getTalukaName() {
return talukaName;
}
public void setTalukaName(String talukaName) {
this.talukaName = talukaName;
}
public Long getHealthBlockId() {
return healthBlockId;
}
public void setHealthBlockId(Long healthBlockId) {
this.healthBlockId = healthBlockId;
}
public String getHealthBlockName() {
return healthBlockName;
}
public void setHealthBlockName(String healthBlockName) {
this.healthBlockName = healthBlockName;
}
public Long getPhcId() {
return phcId;
}
public void setPhcId(Long phcId) {
this.phcId = phcId;
}
public String getPhcName() {
return phcName;
}
public void setPhcName(String phcName) {
this.phcName = phcName;
}
public Long getSubcentreId() {
return subcentreId;
}
public void setSubcentreId(Long subcentreId) {
this.subcentreId = subcentreId;
}
public String getSubcentreName() {
return subcentreName;
}
public void setSubcentreName(String subcentreName) {
this.subcentreName = subcentreName;
}
public Long getVillageId() {
return villageId;
}
public void setVillageId(Long villageId) {
this.villageId = villageId;
}
public String getVillageName() {
return villageName;
}
public void setVillageName(String villageName) {
this.villageName = villageName;
}
public Integer getYr() {
return yr;
}
public void setYr(Integer yr) {
this.yr = yr;
}
public String getgPVillage() {
return gPVillage;
}
public void setgPVillage(String gPVillage) {
this.gPVillage = gPVillage;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public String getIdNo() {
return idNo;
}
public void setIdNo(String idNo) {
this.idNo = idNo;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getHusbandName() {
return husbandName;
}
public void setHusbandName(String husbandName) {
this.husbandName = husbandName;
}
public String getPhoneNumberWhom() {
return phoneNumberWhom;
}
public void setPhoneNumberWhom(String phoneNumberWhom) {
this.phoneNumberWhom = phoneNumberWhom;
}
public String getBirthDate() {
return birthDate;
}
public void setBirthDate(String birthDate) {
this.birthDate = birthDate;
}
public String getjSYBeneficiary() {
return jSYBeneficiary;
}
public void setjSYBeneficiary(String jSYBeneficiary) {
this.jSYBeneficiary = jSYBeneficiary;
}
public String getCaste() {
return caste;
}
public void setCaste(String caste) {
this.caste = caste;
}
public String getSubcenterName1() {
return subcenterName1;
}
public void setSubcenterName1(String subcenterName1) {
this.subcenterName1 = subcenterName1;
}
public String getaNMName() {
return aNMName;
}
public void setaNMName(String aNMName) {
this.aNMName = aNMName;
}
public String getaNMPhone() {
return aNMPhone;
}
public void setaNMPhone(String aNMPhone) {
this.aNMPhone = aNMPhone;
}
public String getAshaName() {
return ashaName;
}
public void setAshaName(String ashaName) {
this.ashaName = ashaName;
}
public String getAshaPhone() {
return ashaPhone;
}
public void setAshaPhone(String ashaPhone) {
this.ashaPhone = ashaPhone;
}
public String getDeliveryLnkFacility() {
return deliveryLnkFacility;
}
public void setDeliveryLnkFacility(String deliveryLnkFacility) {
this.deliveryLnkFacility = deliveryLnkFacility;
}
public String getFacilityName() {
return facilityName;
}
public void setFacilityName(String facilityName) {
this.facilityName = facilityName;
}
public String getLmpDate() {
return lmpDate;
}
public void setLmpDate(String lmpDate) {
this.lmpDate = lmpDate;
}
public String getaNC1Date() {
return aNC1Date;
}
public void setaNC1Date(String aNC1Date) {
this.aNC1Date = aNC1Date;
}
public String getaNC2Date() {
return aNC2Date;
}
public void setaNC2Date(String aNC2Date) {
this.aNC2Date = aNC2Date;
}
public String getaNC3Date() {
return aNC3Date;
}
public void setaNC3Date(String aNC3Date) {
this.aNC3Date = aNC3Date;
}
public String getaNC4Date() {
return aNC4Date;
}
public void setaNC4Date(String aNC4Date) {
this.aNC4Date = aNC4Date;
}
public String gettT1Date() {
return tT1Date;
}
public void settT1Date(String tT1Date) {
this.tT1Date = tT1Date;
}
public String gettT2Date() {
return tT2Date;
}
public void settT2Date(String tT2Date) {
this.tT2Date = tT2Date;
}
public String gettTBoosterDate() {
return tTBoosterDate;
}
public void settTBoosterDate(String tTBoosterDate) {
this.tTBoosterDate = tTBoosterDate;
}
public String getiFA100GivenDate() {
return iFA100GivenDate;
}
public void setiFA100GivenDate(String iFA100GivenDate) {
this.iFA100GivenDate = iFA100GivenDate;
}
public String getAnemia() {
return anemia;
}
public void setAnemia(String anemia) {
this.anemia = anemia;
}
public String getaNCComplication() {
return aNCComplication;
}
public void setaNCComplication(String aNCComplication) {
this.aNCComplication = aNCComplication;
}
public String getrTISTI() {
return rTISTI;
}
public void setrTISTI(String rTISTI) {
this.rTISTI = rTISTI;
}
public String getDlyDate() {
return dlyDate;
}
public void setDlyDate(String dlyDate) {
this.dlyDate = dlyDate;
}
public String getDlyPlaceHomeType() {
return dlyPlaceHomeType;
}
public void setDlyPlaceHomeType(String dlyPlaceHomeType) {
this.dlyPlaceHomeType = dlyPlaceHomeType;
}
public String getDlyPlacePublic() {
return dlyPlacePublic;
}
public void setDlyPlacePublic(String dlyPlacePublic) {
this.dlyPlacePublic = dlyPlacePublic;
}
public String getDlyPlacePrivate() {
return dlyPlacePrivate;
}
public void setDlyPlacePrivate(String dlyPlacePrivate) {
this.dlyPlacePrivate = dlyPlacePrivate;
}
public String getDlyType() {
return dlyType;
}
public void setDlyType(String dlyType) {
this.dlyType = dlyType;
}
public String getDlyComplication() {
return dlyComplication;
}
public void setDlyComplication(String dlyComplication) {
this.dlyComplication = dlyComplication;
}
public String getDischargeDate() {
return dischargeDate;
}
public void setDischargeDate(String dischargeDate) {
this.dischargeDate = dischargeDate;
}
public String getjSYPaidDate() {
return jSYPaidDate;
}
public void setjSYPaidDate(String jSYPaidDate) {
this.jSYPaidDate = jSYPaidDate;
}
public String getAbortion() {
return abortion;
}
public void setAbortion(String abortion) {
this.abortion = abortion;
}
public String getpNCHomeVisit() {
return pNCHomeVisit;
}
public void setpNCHomeVisit(String pNCHomeVisit) {
this.pNCHomeVisit = pNCHomeVisit;
}
public String getpNCComplication() {
return pNCComplication;
}
public void setpNCComplication(String pNCComplication) {
this.pNCComplication = pNCComplication;
}
public String getpPCMethod() {
return pPCMethod;
}
public void setpPCMethod(String pPCMethod) {
this.pPCMethod = pPCMethod;
}
public String getpNCCheckup() {
return pNCCheckup;
}
public void setpNCCheckup(String pNCCheckup) {
this.pNCCheckup = pNCCheckup;
}
public Integer getOutcomeNos() {
return outcomeNos;
}
public void setOutcomeNos(Integer outcomeNos) {
this.outcomeNos = outcomeNos;
}
public String getChild1Name() {
return child1Name;
}
public void setChild1Name(String child1Name) {
this.child1Name = child1Name;
}
public String getChild1Sex() {
return child1Sex;
}
public void setChild1Sex(String child1Sex) {
this.child1Sex = child1Sex;
}
public Double getChild1Wt() {
return child1Wt;
}
public void setChild1Wt(Double child1Wt) {
this.child1Wt = child1Wt;
}
public String getChild1Brestfeeding() {
return child1Brestfeeding;
}
public void setChild1Brestfeeding(String child1Brestfeeding) {
this.child1Brestfeeding = child1Brestfeeding;
}
public String getChild2Name() {
return child2Name;
}
public void setChild2Name(String child2Name) {
this.child2Name = child2Name;
}
public String getChild2Sex() {
return child2Sex;
}
public void setChild2Sex(String child2Sex) {
this.child2Sex = child2Sex;
}
public Double getChild2Wt() {
return child2Wt;
}
public void setChild2Wt(Double child2Wt) {
this.child2Wt = child2Wt;
}
public String getChild2Brestfeeding() {
return child2Brestfeeding;
}
public void setChild2Brestfeeding(String child2Brestfeeding) {
this.child2Brestfeeding = child2Brestfeeding;
}
public String getChild3Name() {
return child3Name;
}
public void setChild3Name(String child3Name) {
this.child3Name = child3Name;
}
public String getChild3Sex() {
return child3Sex;
}
public void setChild3Sex(String child3Sex) {
this.child3Sex = child3Sex;
}
public Double getChild3Wt() {
return child3Wt;
}
public void setChild3Wt(Double child3Wt) {
this.child3Wt = child3Wt;
}
public String getChild3Brestfeeding() {
return child3Brestfeeding;
}
public void setChild3Brestfeeding(String child3Brestfeeding) {
this.child3Brestfeeding = child3Brestfeeding;
}
public String getChild4Name() {
return child4Name;
}
public void setChild4Name(String child4Name) {
this.child4Name = child4Name;
}
public String getChild4Sex() {
return child4Sex;
}
public void setChild4Sex(String child4Sex) {
this.child4Sex = child4Sex;
}
public Double getChild4Wt() {
return child4Wt;
}
public void setChild4Wt(Double child4Wt) {
this.child4Wt = child4Wt;
}
public String getChild4Brestfeeding() {
return child4Brestfeeding;
}
public void setChild4Brestfeeding(String child4Brestfeeding) {
this.child4Brestfeeding = child4Brestfeeding;
}
public Integer getAge() {
return age;
}
public void setAge(Integer age) {
this.age = age;
}
public String getmTHRREGDATE() {
return mTHRREGDATE;
}
public void setmTHRREGDATE(String mTHRREGDATE) {
this.mTHRREGDATE = mTHRREGDATE;
}
public String getLastUpdateDate() {
return lastUpdateDate;
}
public void setLastUpdateDate(String lastUpdateDate) {
this.lastUpdateDate = lastUpdateDate;
}
public String getRemarks() {
return remarks;
}
public void setRemarks(String remarks) {
this.remarks = remarks;
}
public Integer getaNMID() {
return aNMID;
}
public void setaNMID(Integer aNMID) {
this.aNMID = aNMID;
}
public Integer getaSHAID() {
return aSHAID;
}
public void setaSHAID(Integer aSHAID) {
this.aSHAID = aSHAID;
}
public Boolean getCallAns() {
return callAns;
}
public void setCallAns(Boolean callAns) {
this.callAns = callAns;
}
public Integer getNoCallReason() {
return noCallReason;
}
public void setNoCallReason(Integer noCallReason) {
this.noCallReason = noCallReason;
}
public Integer getNoPhoneReason() {
return noPhoneReason;
}
public void setNoPhoneReason(Integer noPhoneReason) {
this.noPhoneReason = noPhoneReason;
}
public Integer getCreatedBy() {
return createdBy;
}
public void setCreatedBy(Integer createdBy) {
this.createdBy = createdBy;
}
public Integer getUpdatedBy() {
return updatedBy;
}
public void setUpdatedBy(Integer updatedBy) {
this.updatedBy = updatedBy;
}
public Integer getAadharNo() {
return aadharNo;
}
public void setAadharNo(Integer aadharNo) {
this.aadharNo = aadharNo;
}
public Integer getbPLAPL() {
return bPLAPL;
}
public void setbPLAPL(Integer bPLAPL) {
this.bPLAPL = bPLAPL;
}
public Integer geteID() {
return eID;
}
public void seteID(Integer eID) {
this.eID = eID;
}
public String geteIDTime() {
return eIDTime;
}
public void seteIDTime(String eIDTime) {
this.eIDTime = eIDTime;
}
public Integer getEntryType() {
return entryType;
}
public void setEntryType(Integer entryType) {
this.entryType = entryType;
}
public String getRegistrationNo() {
return registrationNo;
}
public void setRegistrationNo(String registrationNo) {
this.registrationNo = registrationNo;
}
public Long getCaseNo() {
return caseNo;
}
public void setCaseNo(Long caseNo) {
this.caseNo = caseNo;
}
public String getMobileNo() {
return mobileNo;
}
public void setMobileNo(String mobileNo) {
this.mobileNo = mobileNo;
}
public String getAbortionType() {
return abortionType;
}
public void setAbortionType(String abortionType) {
this.abortionType = abortionType;
}
public String getDeliveryOutcomes() {
return deliveryOutcomes;
}
public void setDeliveryOutcomes(String deliveryOutcomes) {
this.deliveryOutcomes = deliveryOutcomes;
}
public String getExecDate() {
return execDate;
}
public void setExecDate(String execDate) {
this.execDate = execDate;
}
public Boolean getAccepted() {
return accepted;
}
public void setAccepted(Boolean accepted) {
this.accepted = accepted;
}
public String getRejectionReason() {
return rejectionReason;
}
public void setRejectionReason(String rejectionReason) {
this.rejectionReason = rejectionReason;
}
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = source;
}
public String getAction() {
return action;
}
public void setAction(String action) {
this.action = action;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.core;
import static org.apache.jackrabbit.core.RepositoryImpl.SYSTEM_ROOT_NODE_ID;
import static org.apache.jackrabbit.spi.commons.name.NameConstants.JCR_BASEVERSION;
import static org.apache.jackrabbit.spi.commons.name.NameConstants.JCR_ISCHECKEDOUT;
import static org.apache.jackrabbit.spi.commons.name.NameConstants.JCR_PREDECESSORS;
import static org.apache.jackrabbit.spi.commons.name.NameConstants.JCR_ROOTVERSION;
import static org.apache.jackrabbit.spi.commons.name.NameConstants.JCR_VERSIONHISTORY;
import static org.apache.jackrabbit.spi.commons.name.NameConstants.MIX_VERSIONABLE;
import static org.apache.jackrabbit.spi.commons.name.NameConstants.MIX_REFERENCEABLE;
import java.util.Calendar;
import java.util.HashSet;
import java.util.Set;
import javax.jcr.ItemNotFoundException;
import javax.jcr.RepositoryException;
import org.apache.jackrabbit.core.id.NodeId;
import org.apache.jackrabbit.core.id.PropertyId;
import org.apache.jackrabbit.core.persistence.PersistenceManager;
import org.apache.jackrabbit.core.state.ChangeLog;
import org.apache.jackrabbit.core.state.ChildNodeEntry;
import org.apache.jackrabbit.core.state.ItemStateException;
import org.apache.jackrabbit.core.state.NodeState;
import org.apache.jackrabbit.core.version.InconsistentVersioningState;
import org.apache.jackrabbit.core.version.InternalVersion;
import org.apache.jackrabbit.core.version.InternalVersionHistory;
import org.apache.jackrabbit.core.version.InternalVersionManagerImpl;
import org.apache.jackrabbit.core.version.VersionHistoryInfo;
import org.apache.jackrabbit.spi.Name;
import org.apache.jackrabbit.spi.NameFactory;
import org.apache.jackrabbit.spi.commons.name.NameFactoryImpl;
import org.apache.jackrabbit.util.ISO8601;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Tool for checking for and optionally fixing consistency issues in a
* repository. Currently this class only contains a simple versioning
* recovery feature for
* <a href="https://issues.apache.org/jira/browse/JCR-2551">JCR-2551</a>.
*/
class RepositoryChecker {
/**
* Logger instance.
*/
private static final Logger log =
LoggerFactory.getLogger(RepositoryChecker.class);
private final PersistenceManager workspace;
private final ChangeLog workspaceChanges;
private final ChangeLog vworkspaceChanges;
private final InternalVersionManagerImpl versionManager;
// maximum size of changelog when running in "fixImmediately" mode
private final static long CHUNKSIZE = 256;
// number of nodes affected by pending changes
private long dirtyNodes = 0;
// total nodes checked, with problems
private long totalNodes = 0;
private long brokenNodes = 0;
// start time
private long startTime;
public RepositoryChecker(PersistenceManager workspace,
InternalVersionManagerImpl versionManager) {
this.workspace = workspace;
this.workspaceChanges = new ChangeLog();
this.vworkspaceChanges = new ChangeLog();
this.versionManager = versionManager;
}
public void check(NodeId id, boolean recurse, boolean fixImmediately)
throws RepositoryException {
log.info("Starting RepositoryChecker");
startTime = System.currentTimeMillis();
internalCheck(id, recurse, fixImmediately);
if (fixImmediately) {
internalFix(true);
}
log.info("RepositoryChecker finished; checked " + totalNodes
+ " nodes in " + (System.currentTimeMillis() - startTime)
+ "ms, problems found: " + brokenNodes);
}
private void internalCheck(NodeId id, boolean recurse,
boolean fixImmediately) throws RepositoryException {
try {
log.debug("Checking consistency of node {}", id);
totalNodes += 1;
NodeState state = workspace.load(id);
checkVersionHistory(state);
if (fixImmediately && dirtyNodes > CHUNKSIZE) {
internalFix(false);
}
if (recurse) {
for (ChildNodeEntry child : state.getChildNodeEntries()) {
if (!SYSTEM_ROOT_NODE_ID.equals(child.getId())) {
internalCheck(child.getId(), recurse, fixImmediately);
}
}
}
} catch (ItemStateException e) {
throw new RepositoryException("Unable to access node " + id, e);
}
}
private void fix(PersistenceManager pm, ChangeLog changes, String store,
boolean verbose) throws RepositoryException {
if (changes.hasUpdates()) {
if (log.isWarnEnabled()) {
log.warn("Fixing " + store + " inconsistencies: "
+ changes.toString());
}
try {
pm.store(changes);
changes.reset();
} catch (ItemStateException e) {
String message = "Failed to fix " + store
+ " inconsistencies (aborting)";
log.error(message, e);
throw new RepositoryException(message, e);
}
} else {
if (verbose) {
log.info("No " + store + " inconsistencies found");
}
}
}
public void fix() throws RepositoryException {
internalFix(true);
}
private void internalFix(boolean verbose) throws RepositoryException {
fix(workspace, workspaceChanges, "workspace", verbose);
fix(versionManager.getPersistenceManager(), vworkspaceChanges,
"versioning workspace", verbose);
dirtyNodes = 0;
}
private void checkVersionHistory(NodeState node) {
String message = null;
NodeId nid = node.getNodeId();
boolean isVersioned = node.hasPropertyName(JCR_VERSIONHISTORY);
NodeId vhid = null;
try {
String type = isVersioned ? "in-use" : "candidate";
log.debug("Checking " + type + " version history of node {}", nid);
String intro = "Removing references to an inconsistent " + type
+ " version history of node " + nid;
message = intro + " (getting the VersionInfo)";
VersionHistoryInfo vhi = versionManager.getVersionHistoryInfoForNode(node);
if (vhi != null) {
// get the version history's node ID as early as possible
// so we can attempt a fixup even when the next call fails
vhid = vhi.getVersionHistoryId();
}
message = intro + " (getting the InternalVersionHistory)";
InternalVersionHistory vh = null;
try {
vh = versionManager.getVersionHistoryOfNode(nid);
}
catch (ItemNotFoundException ex) {
// it's ok if we get here if the node didn't claim to be versioned
if (isVersioned) {
throw ex;
}
}
if (vh == null) {
if (isVersioned) {
message = intro + "getVersionHistoryOfNode returned null";
throw new InconsistentVersioningState(message);
}
} else {
vhid = vh.getId();
// additional checks, see JCR-3101
message = intro + " (getting the version names failed)";
Name[] versionNames = vh.getVersionNames();
boolean seenRoot = false;
for (Name versionName : versionNames) {
seenRoot |= JCR_ROOTVERSION.equals(versionName);
log.debug("Checking version history of node {}, version {}", nid, versionName);
message = intro + " (getting version " + versionName + " failed)";
InternalVersion v = vh.getVersion(versionName);
message = intro + "(frozen node of root version " + v.getId() + " missing)";
if (null == v.getFrozenNode()) {
throw new InconsistentVersioningState(message);
}
}
if (!seenRoot) {
message = intro + " (root version is missing)";
throw new InconsistentVersioningState(message);
}
}
} catch (InconsistentVersioningState e) {
log.info(message, e);
NodeId nvhid = e.getVersionHistoryNodeId();
if (nvhid != null) {
if (vhid != null && !nvhid.equals(vhid)) {
log.error("vhrid returned with InconsistentVersioningState does not match the id we already had: "
+ vhid + " vs " + nvhid);
}
vhid = nvhid;
}
removeVersionHistoryReferences(node, vhid);
} catch (Exception e) {
log.info(message, e);
removeVersionHistoryReferences(node, vhid);
}
}
// un-versions the node, and potentially moves the version history away
private void removeVersionHistoryReferences(NodeState node, NodeId vhid) {
dirtyNodes += 1;
brokenNodes += 1;
NodeState modified =
new NodeState(node, NodeState.STATUS_EXISTING_MODIFIED, true);
Set<Name> mixins = new HashSet<Name>(node.getMixinTypeNames());
if (mixins.remove(MIX_VERSIONABLE)) {
// we are keeping jcr:uuid, so we need to make sure the type info stays valid
mixins.add(MIX_REFERENCEABLE);
modified.setMixinTypeNames(mixins);
}
removeProperty(modified, JCR_VERSIONHISTORY);
removeProperty(modified, JCR_BASEVERSION);
removeProperty(modified, JCR_PREDECESSORS);
removeProperty(modified, JCR_ISCHECKEDOUT);
workspaceChanges.modified(modified);
if (vhid != null) {
// attempt to rename the version history, so it doesn't interfere with
// a future attempt to put the node under version control again
// (see JCR-3115)
log.info("trying to rename version history of node " + node.getId());
NameFactory nf = NameFactoryImpl.getInstance();
// Name of VHR in parent folder is ID of versionable node
Name vhrname = nf.create(Name.NS_DEFAULT_URI, node.getId().toString());
try {
NodeState vhrState = versionManager.getPersistenceManager().load(vhid);
NodeState vhrParentState = versionManager.getPersistenceManager().load(vhrState.getParentId());
if (vhrParentState.hasChildNodeEntry(vhrname)) {
NodeState modifiedParent = (NodeState) vworkspaceChanges.get(vhrState.getParentId());
if (modifiedParent == null) {
modifiedParent = new NodeState(vhrParentState, NodeState.STATUS_EXISTING_MODIFIED, true);
}
Calendar now = Calendar.getInstance();
String appendme = " (disconnected by RepositoryChecker on "
+ ISO8601.format(now) + ")";
modifiedParent.renameChildNodeEntry(vhid,
nf.create(vhrname.getNamespaceURI(), vhrname.getLocalName() + appendme));
vworkspaceChanges.modified(modifiedParent);
}
else {
log.info("child node entry " + vhrname + " for version history not found inside parent folder.");
}
} catch (Exception ex) {
log.error("while trying to rename the version history", ex);
}
}
}
private void removeProperty(NodeState node, Name name) {
if (node.hasPropertyName(name)) {
node.removePropertyName(name);
try {
workspaceChanges.deleted(workspace.load(
new PropertyId(node.getNodeId(), name)));
} catch (ItemStateException ignoe) {
}
}
}
}
| |
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.commands.Command;
import org.eclipse.gmf.runtime.diagram.core.util.ViewUtil;
import org.eclipse.gmf.runtime.diagram.ui.commands.DeferredLayoutCommand;
import org.eclipse.gmf.runtime.diagram.ui.commands.ICommandProxy;
import org.eclipse.gmf.runtime.diagram.ui.commands.SetViewMutabilityCommand;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.CanonicalEditPolicy;
import org.eclipse.gmf.runtime.diagram.ui.requests.CreateViewRequest;
import org.eclipse.gmf.runtime.emf.core.util.EObjectAdapter;
import org.eclipse.gmf.runtime.notation.Node;
import org.eclipse.gmf.runtime.notation.View;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressingEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AggregateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BAMMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BeanMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BuilderMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CacheMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallTemplateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CalloutMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ClassMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloneMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorOperationEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CommandMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ConditionalRouterMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBLookupMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBReportMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DataMapperMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DefaultEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DropMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EJBMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnqueueMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnrichMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EntitlementMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EventMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FailoverEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FastXSLTMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FaultMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FilterMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ForEachMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HTTPEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HeaderMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.IterateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LogMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoopBackMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.NamedEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.OAuthMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PayloadFactoryMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PublishEventMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RMSequenceMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RecipientListEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RespondMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RouterMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RuleMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ScriptMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SendMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequenceEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SmooksMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SpringMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.StoreMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SwitchMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ThrottleMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TransactionMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.URLRewriteMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ValidateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.WSDLEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XQueryMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XSLTMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbDiagramUpdater;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbNodeDescriptor;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry;
/**
* @generated
*/
public class MediatorFlowMediatorFlowCompartment5CanonicalEditPolicy extends CanonicalEditPolicy {
/**
* @generated
*/
protected void refreshOnActivate() {
// Need to activate editpart children before invoking the canonical refresh for EditParts to add event listeners
List<?> c = getHost().getChildren();
for (int i = 0; i < c.size(); i++) {
((EditPart) c.get(i)).activate();
}
super.refreshOnActivate();
}
/**
* @generated
*/
protected EStructuralFeature getFeatureToSynchronize() {
return EsbPackage.eINSTANCE.getMediatorFlow_Children();
}
/**
* @generated
*/
@SuppressWarnings("rawtypes")
protected List getSemanticChildrenList() {
View viewObject = (View) getHost().getModel();
LinkedList<EObject> result = new LinkedList<EObject>();
List<EsbNodeDescriptor> childDescriptors = EsbDiagramUpdater
.getMediatorFlowMediatorFlowCompartment_7035SemanticChildren(viewObject);
for (EsbNodeDescriptor d : childDescriptors) {
result.add(d.getModelElement());
}
return result;
}
/**
* @generated
*/
protected boolean isOrphaned(Collection<EObject> semanticChildren, final View view) {
return isMyDiagramElement(view) && !semanticChildren.contains(view.getElement());
}
/**
* @generated
*/
private boolean isMyDiagramElement(View view) {
int visualID = EsbVisualIDRegistry.getVisualID(view);
switch (visualID) {
case DropMediatorEditPart.VISUAL_ID:
case PropertyMediatorEditPart.VISUAL_ID:
case ThrottleMediatorEditPart.VISUAL_ID:
case FilterMediatorEditPart.VISUAL_ID:
case LogMediatorEditPart.VISUAL_ID:
case EnrichMediatorEditPart.VISUAL_ID:
case XSLTMediatorEditPart.VISUAL_ID:
case SwitchMediatorEditPart.VISUAL_ID:
case SequenceEditPart.VISUAL_ID:
case EventMediatorEditPart.VISUAL_ID:
case EntitlementMediatorEditPart.VISUAL_ID:
case ClassMediatorEditPart.VISUAL_ID:
case SpringMediatorEditPart.VISUAL_ID:
case ScriptMediatorEditPart.VISUAL_ID:
case FaultMediatorEditPart.VISUAL_ID:
case XQueryMediatorEditPart.VISUAL_ID:
case CommandMediatorEditPart.VISUAL_ID:
case DBLookupMediatorEditPart.VISUAL_ID:
case DBReportMediatorEditPart.VISUAL_ID:
case SmooksMediatorEditPart.VISUAL_ID:
case SendMediatorEditPart.VISUAL_ID:
case HeaderMediatorEditPart.VISUAL_ID:
case CloneMediatorEditPart.VISUAL_ID:
case CacheMediatorEditPart.VISUAL_ID:
case IterateMediatorEditPart.VISUAL_ID:
case CalloutMediatorEditPart.VISUAL_ID:
case TransactionMediatorEditPart.VISUAL_ID:
case RMSequenceMediatorEditPart.VISUAL_ID:
case RuleMediatorEditPart.VISUAL_ID:
case OAuthMediatorEditPart.VISUAL_ID:
case AggregateMediatorEditPart.VISUAL_ID:
case StoreMediatorEditPart.VISUAL_ID:
case BuilderMediatorEditPart.VISUAL_ID:
case CallTemplateMediatorEditPart.VISUAL_ID:
case PayloadFactoryMediatorEditPart.VISUAL_ID:
case EnqueueMediatorEditPart.VISUAL_ID:
case URLRewriteMediatorEditPart.VISUAL_ID:
case ValidateMediatorEditPart.VISUAL_ID:
case RouterMediatorEditPart.VISUAL_ID:
case ConditionalRouterMediatorEditPart.VISUAL_ID:
case BAMMediatorEditPart.VISUAL_ID:
case BeanMediatorEditPart.VISUAL_ID:
case EJBMediatorEditPart.VISUAL_ID:
case DefaultEndPointEditPart.VISUAL_ID:
case AddressEndPointEditPart.VISUAL_ID:
case FailoverEndPointEditPart.VISUAL_ID:
case RecipientListEndPointEditPart.VISUAL_ID:
case WSDLEndPointEditPart.VISUAL_ID:
case NamedEndpointEditPart.VISUAL_ID:
case LoadBalanceEndPointEditPart.VISUAL_ID:
case APIResourceEndpointEditPart.VISUAL_ID:
case AddressingEndpointEditPart.VISUAL_ID:
case HTTPEndpointEditPart.VISUAL_ID:
case TemplateEndpointEditPart.VISUAL_ID:
case CloudConnectorEditPart.VISUAL_ID:
case CloudConnectorOperationEditPart.VISUAL_ID:
case LoopBackMediatorEditPart.VISUAL_ID:
case RespondMediatorEditPart.VISUAL_ID:
case CallMediatorEditPart.VISUAL_ID:
case DataMapperMediatorEditPart.VISUAL_ID:
case FastXSLTMediatorEditPart.VISUAL_ID:
case ForEachMediatorEditPart.VISUAL_ID:
case PublishEventMediatorEditPart.VISUAL_ID:
return true;
}
return false;
}
/**
* @generated
*/
protected void refreshSemantic() {
if (resolveSemanticElement() == null) {
return;
}
LinkedList<IAdaptable> createdViews = new LinkedList<IAdaptable>();
List<EsbNodeDescriptor> childDescriptors = EsbDiagramUpdater
.getMediatorFlowMediatorFlowCompartment_7035SemanticChildren((View) getHost().getModel());
LinkedList<View> orphaned = new LinkedList<View>();
// we care to check only views we recognize as ours
LinkedList<View> knownViewChildren = new LinkedList<View>();
for (View v : getViewChildren()) {
if (isMyDiagramElement(v)) {
knownViewChildren.add(v);
}
}
// alternative to #cleanCanonicalSemanticChildren(getViewChildren(), semanticChildren)
//
// iteration happens over list of desired semantic elements, trying to find best matching View, while original CEP
// iterates views, potentially losing view (size/bounds) information - i.e. if there are few views to reference same EObject, only last one
// to answer isOrphaned == true will be used for the domain element representation, see #cleanCanonicalSemanticChildren()
for (Iterator<EsbNodeDescriptor> descriptorsIterator = childDescriptors.iterator(); descriptorsIterator
.hasNext();) {
EsbNodeDescriptor next = descriptorsIterator.next();
String hint = EsbVisualIDRegistry.getType(next.getVisualID());
LinkedList<View> perfectMatch = new LinkedList<View>(); // both semanticElement and hint match that of NodeDescriptor
for (View childView : getViewChildren()) {
EObject semanticElement = childView.getElement();
if (next.getModelElement().equals(semanticElement)) {
if (hint.equals(childView.getType())) {
perfectMatch.add(childView);
// actually, can stop iteration over view children here, but
// may want to use not the first view but last one as a 'real' match (the way original CEP does
// with its trick with viewToSemanticMap inside #cleanCanonicalSemanticChildren
}
}
}
if (perfectMatch.size() > 0) {
descriptorsIterator.remove(); // precise match found no need to create anything for the NodeDescriptor
// use only one view (first or last?), keep rest as orphaned for further consideration
knownViewChildren.remove(perfectMatch.getFirst());
}
}
// those left in knownViewChildren are subject to removal - they are our diagram elements we didn't find match to,
// or those we have potential matches to, and thus need to be recreated, preserving size/location information.
orphaned.addAll(knownViewChildren);
//
ArrayList<CreateViewRequest.ViewDescriptor> viewDescriptors = new ArrayList<CreateViewRequest.ViewDescriptor>(
childDescriptors.size());
for (EsbNodeDescriptor next : childDescriptors) {
String hint = EsbVisualIDRegistry.getType(next.getVisualID());
IAdaptable elementAdapter = new CanonicalElementAdapter(next.getModelElement(), hint);
CreateViewRequest.ViewDescriptor descriptor = new CreateViewRequest.ViewDescriptor(elementAdapter,
Node.class, hint, ViewUtil.APPEND, false, host().getDiagramPreferencesHint());
viewDescriptors.add(descriptor);
}
boolean changed = deleteViews(orphaned.iterator());
//
CreateViewRequest request = getCreateViewRequest(viewDescriptors);
Command cmd = getCreateViewCommand(request);
if (cmd != null && cmd.canExecute()) {
SetViewMutabilityCommand.makeMutable(new EObjectAdapter(host().getNotationView())).execute();
executeCommand(cmd);
@SuppressWarnings("unchecked")
List<IAdaptable> nl = (List<IAdaptable>) request.getNewObject();
createdViews.addAll(nl);
}
if (changed || createdViews.size() > 0) {
postProcessRefreshSemantic(createdViews);
}
if (createdViews.size() > 1) {
// perform a layout of the container
DeferredLayoutCommand layoutCmd = new DeferredLayoutCommand(host().getEditingDomain(), createdViews, host());
executeCommand(new ICommandProxy(layoutCmd));
}
makeViewsImmutable(createdViews);
}
}/*package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.commands.Command;
import org.eclipse.gmf.runtime.diagram.core.util.ViewUtil;
import org.eclipse.gmf.runtime.diagram.ui.commands.DeferredLayoutCommand;
import org.eclipse.gmf.runtime.diagram.ui.commands.ICommandProxy;
import org.eclipse.gmf.runtime.diagram.ui.commands.SetViewMutabilityCommand;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.CanonicalEditPolicy;
import org.eclipse.gmf.runtime.diagram.ui.requests.CreateViewRequest;
import org.eclipse.gmf.runtime.emf.core.util.EObjectAdapter;
import org.eclipse.gmf.runtime.notation.Node;
import org.eclipse.gmf.runtime.notation.View;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AggregateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BuilderMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CacheMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallTemplateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CalloutMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ClassMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloneMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CommandMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBLookupMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBReportMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DropMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnqueueMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnrichMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EntitlementMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EventMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FaultMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FilterMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HeaderMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.IterateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LogMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.OAuthMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PayloadFactoryMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RMSequenceMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RuleMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ScriptMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SendMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequenceEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SmooksMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SpringMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.StoreMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SwitchMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ThrottleMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TransactionMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XQueryMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XSLTMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbDiagramUpdater;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbNodeDescriptor;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry;
*/
/**
* @generated
*/
/*
public class MediatorFlowMediatorFlowCompartment5CanonicalEditPolicy extends
CanonicalEditPolicy {
*//**
* @generated
*/
/*
protected void refreshOnActivate() {
// Need to activate editpart children before invoking the canonical refresh for EditParts to add event listeners
List<?> c = getHost().getChildren();
for (int i = 0; i < c.size(); i++) {
((EditPart) c.get(i)).activate();
}
super.refreshOnActivate();
}
*//**
* @generated
*/
/*
protected EStructuralFeature getFeatureToSynchronize() {
return EsbPackage.eINSTANCE.getMediatorFlow_Children();
}
*//**
* @generated
*/
/*
@SuppressWarnings("rawtypes")
protected List getSemanticChildrenList() {
View viewObject = (View) getHost().getModel();
LinkedList<EObject> result = new LinkedList<EObject>();
List<EsbNodeDescriptor> childDescriptors = EsbDiagramUpdater
.getMediatorFlowMediatorFlowCompartment_7018SemanticChildren(viewObject);
for (EsbNodeDescriptor d : childDescriptors) {
result.add(d.getModelElement());
}
return result;
}
*//**
* @generated
*/
/*
protected boolean isOrphaned(Collection<EObject> semanticChildren,
final View view) {
return isMyDiagramElement(view)
&& !semanticChildren.contains(view.getElement());
}
*//**
* @generated
*/
/*
private boolean isMyDiagramElement(View view) {
int visualID = EsbVisualIDRegistry.getVisualID(view);
switch (visualID) {
case DropMediatorEditPart.VISUAL_ID:
case PropertyMediatorEditPart.VISUAL_ID:
case ThrottleMediatorEditPart.VISUAL_ID:
case FilterMediatorEditPart.VISUAL_ID:
case LogMediatorEditPart.VISUAL_ID:
case EnrichMediatorEditPart.VISUAL_ID:
case XSLTMediatorEditPart.VISUAL_ID:
case SwitchMediatorEditPart.VISUAL_ID:
case SequenceEditPart.VISUAL_ID:
case EventMediatorEditPart.VISUAL_ID:
case EntitlementMediatorEditPart.VISUAL_ID:
case ClassMediatorEditPart.VISUAL_ID:
case SpringMediatorEditPart.VISUAL_ID:
case ScriptMediatorEditPart.VISUAL_ID:
case FaultMediatorEditPart.VISUAL_ID:
case XQueryMediatorEditPart.VISUAL_ID:
case CommandMediatorEditPart.VISUAL_ID:
case DBLookupMediatorEditPart.VISUAL_ID:
case DBReportMediatorEditPart.VISUAL_ID:
case SmooksMediatorEditPart.VISUAL_ID:
case SendMediatorEditPart.VISUAL_ID:
case HeaderMediatorEditPart.VISUAL_ID:
case CloneMediatorEditPart.VISUAL_ID:
case CacheMediatorEditPart.VISUAL_ID:
case IterateMediatorEditPart.VISUAL_ID:
case CalloutMediatorEditPart.VISUAL_ID:
case TransactionMediatorEditPart.VISUAL_ID:
case RMSequenceMediatorEditPart.VISUAL_ID:
case RuleMediatorEditPart.VISUAL_ID:
case OAuthMediatorEditPart.VISUAL_ID:
case AggregateMediatorEditPart.VISUAL_ID:
case StoreMediatorEditPart.VISUAL_ID:
case BuilderMediatorEditPart.VISUAL_ID:
case CallTemplateMediatorEditPart.VISUAL_ID:
case PayloadFactoryMediatorEditPart.VISUAL_ID:
case EnqueueMediatorEditPart.VISUAL_ID:
return true;
}
return false;
}
*//**
* @generated
*/
/*
protected void refreshSemantic() {
if (resolveSemanticElement() == null) {
return;
}
LinkedList<IAdaptable> createdViews = new LinkedList<IAdaptable>();
List<EsbNodeDescriptor> childDescriptors = EsbDiagramUpdater
.getMediatorFlowMediatorFlowCompartment_7018SemanticChildren((View) getHost()
.getModel());
LinkedList<View> orphaned = new LinkedList<View>();
// we care to check only views we recognize as ours
LinkedList<View> knownViewChildren = new LinkedList<View>();
for (View v : getViewChildren()) {
if (isMyDiagramElement(v)) {
knownViewChildren.add(v);
}
}
// alternative to #cleanCanonicalSemanticChildren(getViewChildren(), semanticChildren)
//
// iteration happens over list of desired semantic elements, trying to find best matching View, while original CEP
// iterates views, potentially losing view (size/bounds) information - i.e. if there are few views to reference same EObject, only last one
// to answer isOrphaned == true will be used for the domain element representation, see #cleanCanonicalSemanticChildren()
for (Iterator<EsbNodeDescriptor> descriptorsIterator = childDescriptors
.iterator(); descriptorsIterator.hasNext();) {
EsbNodeDescriptor next = descriptorsIterator.next();
String hint = EsbVisualIDRegistry.getType(next.getVisualID());
LinkedList<View> perfectMatch = new LinkedList<View>(); // both semanticElement and hint match that of NodeDescriptor
for (View childView : getViewChildren()) {
EObject semanticElement = childView.getElement();
if (next.getModelElement().equals(semanticElement)) {
if (hint.equals(childView.getType())) {
perfectMatch.add(childView);
// actually, can stop iteration over view children here, but
// may want to use not the first view but last one as a 'real' match (the way original CEP does
// with its trick with viewToSemanticMap inside #cleanCanonicalSemanticChildren
}
}
}
if (perfectMatch.size() > 0) {
descriptorsIterator.remove(); // precise match found no need to create anything for the NodeDescriptor
// use only one view (first or last?), keep rest as orphaned for further consideration
knownViewChildren.remove(perfectMatch.getFirst());
}
}
// those left in knownViewChildren are subject to removal - they are our diagram elements we didn't find match to,
// or those we have potential matches to, and thus need to be recreated, preserving size/location information.
orphaned.addAll(knownViewChildren);
//
ArrayList<CreateViewRequest.ViewDescriptor> viewDescriptors = new ArrayList<CreateViewRequest.ViewDescriptor>(
childDescriptors.size());
for (EsbNodeDescriptor next : childDescriptors) {
String hint = EsbVisualIDRegistry.getType(next.getVisualID());
IAdaptable elementAdapter = new CanonicalElementAdapter(
next.getModelElement(), hint);
CreateViewRequest.ViewDescriptor descriptor = new CreateViewRequest.ViewDescriptor(
elementAdapter, Node.class, hint, ViewUtil.APPEND, false,
host().getDiagramPreferencesHint());
viewDescriptors.add(descriptor);
}
boolean changed = deleteViews(orphaned.iterator());
//
CreateViewRequest request = getCreateViewRequest(viewDescriptors);
Command cmd = getCreateViewCommand(request);
if (cmd != null && cmd.canExecute()) {
SetViewMutabilityCommand.makeMutable(
new EObjectAdapter(host().getNotationView())).execute();
executeCommand(cmd);
@SuppressWarnings("unchecked")
List<IAdaptable> nl = (List<IAdaptable>) request.getNewObject();
createdViews.addAll(nl);
}
if (changed || createdViews.size() > 0) {
postProcessRefreshSemantic(createdViews);
}
if (createdViews.size() > 1) {
// perform a layout of the container
DeferredLayoutCommand layoutCmd = new DeferredLayoutCommand(host()
.getEditingDomain(), createdViews, host());
executeCommand(new ICommandProxy(layoutCmd));
}
makeViewsImmutable(createdViews);
}
}
*/
| |
package com.fincatto.nfe310.classes.nota;
import org.junit.Assert;
import org.junit.Test;
import com.fincatto.nfe310.FabricaDeObjetosFake;
import com.fincatto.nfe310.classes.NFEndereco;
import com.fincatto.nfe310.classes.NFUnidadeFederativa;
public class NFEnderecoTest {
@Test(expected = IllegalStateException.class)
public void naoDevePermitirBairroComTamanhoInvalido() {
try {
new NFEndereco().setBairro("");
} catch (final IllegalStateException e) {
new NFEndereco().setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU1");
}
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirTelefoneComTamanhoInvalido() {
try {
new NFEndereco().setTelefone("12345");
} catch (final IllegalStateException e) {
new NFEndereco().setTelefone("123456789012345");
}
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirNumeroComTamanhoInvalido() {
try {
new NFEndereco().setNumero("");
} catch (final IllegalStateException e) {
new NFEndereco().setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y1");
}
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirLogradouroComTamanhoInvalido() {
try {
new NFEndereco().setLogradouro("");
} catch (final IllegalStateException e) {
new NFEndereco().setLogradouro("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y1");
}
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirDescricaoMunicipioComTamanhoInvalido() {
try {
new NFEndereco().setDescricaoMunicipio("");
} catch (final IllegalStateException e) {
new NFEndereco().setDescricaoMunicipio("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y1");
}
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirComplementoComTamanhoInvalido() {
try {
new NFEndereco().setComplemento("");
} catch (final IllegalStateException e) {
new NFEndereco().setComplemento("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y1");
}
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirCodigoMunicipioComTamanhoInvalido() {
try {
new NFEndereco().setCodigoMunicipio("999999");
} catch (final IllegalStateException e) {
new NFEndereco().setCodigoMunicipio("10000000");
}
}
@Test
public void deveObterCepComoFoiSetado() {
final NFEndereco endereco = new NFEndereco();
endereco.setCep("88095550");
Assert.assertEquals("88095550", endereco.getCep());
}
@Test
public void deveObterBairroComoFoiSetado() {
final NFEndereco endereco = new NFEndereco();
endereco.setBairro("Principal");
Assert.assertEquals("Principal", endereco.getBairro());
}
@Test
public void deveObterCodigoMunicipioComoFoiSetado() {
final NFEndereco endereco = new NFEndereco();
endereco.setCodigoMunicipio("9999999");
Assert.assertEquals("9999999", endereco.getCodigoMunicipio());
}
@Test
public void deveObterCodigoPaisComoFoiSetado() {
final NFEndereco endereco = new NFEndereco();
endereco.setCodigoPais("999");
Assert.assertEquals("999", endereco.getCodigoPais());
}
@Test
public void deveObterComplementoComoFoiSetado() {
final NFEndereco endereco = new NFEndereco();
endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw");
Assert.assertEquals("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw", endereco.getComplemento());
}
@Test
public void deveObterDescricaoMunicipioComoFoiSetado() {
final NFEndereco endereco = new NFEndereco();
endereco.setDescricaoMunicipio("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw");
Assert.assertEquals("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw", endereco.getDescricaoMunicipio());
}
@Test
public void deveObterLogradouroComoFoiSetado() {
final NFEndereco endereco = new NFEndereco();
endereco.setLogradouro("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw");
Assert.assertEquals("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw", endereco.getLogradouro());
}
@Test
public void deveObterNumeroComoFoiSetado() {
final NFEndereco endereco = new NFEndereco();
endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y");
Assert.assertEquals("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y", endereco.getNumero());
}
@Test
public void deveObterTelefoneComoFoiSetado() {
final NFEndereco endereco = new NFEndereco();
endereco.setTelefone("12345678901324");
Assert.assertEquals("12345678901324", endereco.getTelefone());
}
@Test
public void deveObterUfComoFoiSetado() {
final NFEndereco endereco = new NFEndereco();
endereco.setUf(NFUnidadeFederativa.SC);
Assert.assertEquals("SC", endereco.getUf());
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirBairroNulo() {
final NFEndereco endereco = new NFEndereco();
endereco.setCep("88095550");
endereco.setCodigoMunicipio("9999999");
endereco.setCodigoPais("999");
endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw");
endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG");
endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV");
endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y");
endereco.setTelefone("12345678901324");
endereco.setUf(NFUnidadeFederativa.SC);
endereco.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirCepNulo() {
final NFEndereco endereco = new NFEndereco();
endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU");
endereco.setCodigoMunicipio("9999999");
endereco.setCodigoPais("999");
endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw");
endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG");
endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV");
endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y");
endereco.setTelefone("12345678901324");
endereco.setUf(NFUnidadeFederativa.SC);
endereco.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirCodigoMunicipioNulo() {
final NFEndereco endereco = new NFEndereco();
endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU");
endereco.setCep("88095550");
endereco.setCodigoPais("999");
endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw");
endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG");
endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV");
endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y");
endereco.setTelefone("12345678901324");
endereco.setUf(NFUnidadeFederativa.SC);
endereco.toString();
}
@Test
public void devePermitirCodigoPaisNulo() {
final NFEndereco endereco = new NFEndereco();
endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU");
endereco.setCep("88095550");
endereco.setCodigoMunicipio("9999999");
endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw");
endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG");
endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV");
endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y");
endereco.setTelefone("12345678901324");
endereco.setUf(NFUnidadeFederativa.SC);
endereco.toString();
}
@Test
public void devePermitirComplementoNulo() {
final NFEndereco endereco = new NFEndereco();
endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU");
endereco.setCep("88095550");
endereco.setCodigoMunicipio("9999999");
endereco.setCodigoPais("999");
endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG");
endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV");
endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y");
endereco.setTelefone("12345678901324");
endereco.setUf(NFUnidadeFederativa.SC);
endereco.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirDescricaoMunicipioNulo() {
final NFEndereco endereco = new NFEndereco();
endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU");
endereco.setCep("88095550");
endereco.setCodigoMunicipio("9999999");
endereco.setCodigoPais("999");
endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw");
endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV");
endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y");
endereco.setTelefone("12345678901324");
endereco.setUf(NFUnidadeFederativa.SC);
endereco.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirLogradouroNulo() {
final NFEndereco endereco = new NFEndereco();
endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU");
endereco.setCep("88095550");
endereco.setCodigoMunicipio("9999999");
endereco.setCodigoPais("999");
endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw");
endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG");
endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y");
endereco.setTelefone("12345678901324");
endereco.setUf(NFUnidadeFederativa.SC);
endereco.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirNumeroNulo() {
final NFEndereco endereco = new NFEndereco();
endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU");
endereco.setCep("88095550");
endereco.setCodigoMunicipio("9999999");
endereco.setCodigoPais("999");
endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw");
endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG");
endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV");
endereco.setTelefone("12345678901324");
endereco.setUf(NFUnidadeFederativa.SC);
endereco.toString();
}
@Test
public void devePermitirTelefoneNulo() {
final NFEndereco endereco = new NFEndereco();
endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU");
endereco.setCep("88095550");
endereco.setCodigoMunicipio("9999999");
endereco.setCodigoPais("999");
endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw");
endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG");
endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV");
endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y");
endereco.setUf(NFUnidadeFederativa.SC);
endereco.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirUFNulo() {
final NFEndereco endereco = new NFEndereco();
endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU");
endereco.setCep("88095550");
endereco.setCodigoMunicipio("9999999");
endereco.setCodigoPais("999");
endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw");
endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG");
endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV");
endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y");
endereco.setTelefone("12345678901324");
endereco.toString();
}
@Test
public void deveGerarXMLDeAcordoComOPadraoEstabelecido() {
final String xmlEsperado = "<NFEndereco><xLgr>NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV</xLgr><nro>11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y</nro><xCpl>Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw</xCpl><xBairro>67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU</xBairro><cMun>9999999</cMun><xMun>s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG</xMun><UF>RS</UF><CEP>88095550</CEP><cPais>1058</cPais><fone>12345678901324</fone></NFEndereco>";
Assert.assertEquals(xmlEsperado, FabricaDeObjetosFake.getNFEndereco().toString());
}
}
| |
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.primitives.impl;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;
import java.util.function.Consumer;
import org.apache.commons.io.IOUtils;
import org.onlab.util.Tools;
import org.onosproject.cluster.PartitionId;
import org.onosproject.store.cluster.messaging.MessagingException;
import org.onosproject.store.cluster.messaging.MessagingService;
import com.google.common.base.MoreObjects;
import com.google.common.base.Throwables;
import com.google.common.collect.Maps;
import static com.google.common.base.Preconditions.checkNotNull;
import io.atomix.catalyst.serializer.SerializationException;
import io.atomix.catalyst.transport.Address;
import io.atomix.catalyst.transport.Connection;
import io.atomix.catalyst.transport.MessageHandler;
import io.atomix.catalyst.transport.TransportException;
import io.atomix.catalyst.util.Assert;
import io.atomix.catalyst.util.Listener;
import io.atomix.catalyst.util.Listeners;
import io.atomix.catalyst.util.ReferenceCounted;
import io.atomix.catalyst.util.concurrent.ThreadContext;
/**
* {@link Connection} implementation for CopycatTransport.
*/
public class CopycatTransportConnection implements Connection {
private final Listeners<Throwable> exceptionListeners = new Listeners<>();
private final Listeners<Connection> closeListeners = new Listeners<>();
static final byte SUCCESS = 0x03;
static final byte FAILURE = 0x04;
private final long connectionId;
private final CopycatTransport.Mode mode;
private final Address remoteAddress;
private final MessagingService messagingService;
private final String outboundMessageSubject;
private final String inboundMessageSubject;
private final ThreadContext context;
private final Map<Class<?>, InternalHandler> handlers = Maps.newConcurrentMap();
CopycatTransportConnection(long connectionId,
CopycatTransport.Mode mode,
PartitionId partitionId,
Address address,
MessagingService messagingService,
ThreadContext context) {
this.connectionId = connectionId;
this.mode = checkNotNull(mode);
this.remoteAddress = checkNotNull(address);
this.messagingService = checkNotNull(messagingService);
if (mode == CopycatTransport.Mode.CLIENT) {
this.outboundMessageSubject = String.format("onos-copycat-%s", partitionId);
this.inboundMessageSubject = String.format("onos-copycat-%s-%d", partitionId, connectionId);
} else {
this.outboundMessageSubject = String.format("onos-copycat-%s-%d", partitionId, connectionId);
this.inboundMessageSubject = String.format("onos-copycat-%s", partitionId);
}
this.context = checkNotNull(context);
}
public void setBidirectional() {
messagingService.registerHandler(inboundMessageSubject, (sender, payload) -> {
try (DataInputStream input = new DataInputStream(new ByteArrayInputStream(payload))) {
if (input.readLong() != connectionId) {
throw new IllegalStateException("Invalid connection Id");
}
return handle(IOUtils.toByteArray(input));
} catch (IOException e) {
Throwables.propagate(e);
return null;
}
});
}
@Override
public <T, U> CompletableFuture<U> send(T message) {
ThreadContext context = ThreadContext.currentContextOrThrow();
CompletableFuture<U> result = new CompletableFuture<>();
try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
new DataOutputStream(baos).writeLong(connectionId);
context.serializer().writeObject(message, baos);
if (message instanceof ReferenceCounted) {
((ReferenceCounted<?>) message).release();
}
messagingService.sendAndReceive(CopycatTransport.toEndpoint(remoteAddress),
outboundMessageSubject,
baos.toByteArray(),
context.executor())
.whenComplete((r, e) -> {
Throwable wrappedError = e;
if (e != null) {
Throwable rootCause = Throwables.getRootCause(e);
if (MessagingException.class.isAssignableFrom(rootCause.getClass())) {
wrappedError = new TransportException(e);
}
}
handleResponse(r, wrappedError, result, context);
});
} catch (SerializationException | IOException e) {
result.completeExceptionally(e);
}
return result;
}
private <T> void handleResponse(byte[] response,
Throwable error,
CompletableFuture<T> future,
ThreadContext context) {
if (error != null) {
context.execute(() -> future.completeExceptionally(error));
return;
}
checkNotNull(response);
InputStream input = new ByteArrayInputStream(response);
try {
byte status = (byte) input.read();
if (status == FAILURE) {
Throwable t = context.serializer().readObject(input);
context.execute(() -> future.completeExceptionally(t));
} else {
context.execute(() -> future.complete(context.serializer().readObject(input)));
}
} catch (IOException e) {
context.execute(() -> future.completeExceptionally(e));
}
}
@Override
public <T, U> Connection handler(Class<T> type, MessageHandler<T, U> handler) {
Assert.notNull(type, "type");
handlers.put(type, new InternalHandler(handler, ThreadContext.currentContextOrThrow()));
return null;
}
public CompletableFuture<byte[]> handle(byte[] message) {
try {
Object request = context.serializer().readObject(new ByteArrayInputStream(message));
InternalHandler handler = handlers.get(request.getClass());
if (handler == null) {
return Tools.exceptionalFuture(new IllegalStateException(
"No handler registered for " + request.getClass()));
}
return handler.handle(request).handle((result, error) -> {
try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
baos.write(error != null ? FAILURE : SUCCESS);
context.serializer().writeObject(error != null ? error : result, baos);
return baos.toByteArray();
} catch (IOException e) {
Throwables.propagate(e);
return null;
}
});
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
@Override
public Listener<Throwable> exceptionListener(Consumer<Throwable> listener) {
return exceptionListeners.add(listener);
}
@Override
public Listener<Connection> closeListener(Consumer<Connection> listener) {
return closeListeners.add(listener);
}
@Override
public CompletableFuture<Void> close() {
closeListeners.forEach(listener -> listener.accept(this));
if (mode == CopycatTransport.Mode.CLIENT) {
messagingService.unregisterHandler(inboundMessageSubject);
}
return CompletableFuture.completedFuture(null);
}
@Override
public int hashCode() {
return Objects.hash(connectionId);
}
@Override
public boolean equals(Object other) {
if (!(other instanceof CopycatTransportConnection)) {
return false;
}
return connectionId == ((CopycatTransportConnection) other).connectionId;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("id", connectionId)
.toString();
}
@SuppressWarnings("rawtypes")
private final class InternalHandler {
private final MessageHandler handler;
private final ThreadContext context;
private InternalHandler(MessageHandler handler, ThreadContext context) {
this.handler = handler;
this.context = context;
}
@SuppressWarnings("unchecked")
public CompletableFuture<Object> handle(Object message) {
CompletableFuture<Object> answer = new CompletableFuture<>();
context.execute(() -> handler.handle(message).whenComplete((r, e) -> {
if (e != null) {
answer.completeExceptionally((Throwable) e);
} else {
answer.complete(r);
}
}));
return answer;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.fieldstats;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.action.fieldstats.FieldStatsResponse;
import org.elasticsearch.action.fieldstats.IndexConstraint;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GTE;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LT;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LTE;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MIN;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
/**
*/
public class FieldStatsIntegrationIT extends ESIntegTestCase {
public void testRandom() throws Exception {
assertAcked(prepareCreate("test").addMapping(
"test", "string", "type=text", "date", "type=date", "double", "type=double", "double", "type=double",
"float", "type=float", "long", "type=long", "integer", "type=integer", "short", "type=short", "byte", "type=byte"
));
ensureGreen("test");
long minByte = Byte.MAX_VALUE;
long maxByte = Byte.MIN_VALUE;
long minShort = Short.MAX_VALUE;
long maxShort = Short.MIN_VALUE;
long minInt = Integer.MAX_VALUE;
long maxInt = Integer.MIN_VALUE;
long minLong = Long.MAX_VALUE;
long maxLong = Long.MIN_VALUE;
double minFloat = Float.MAX_VALUE;
double maxFloat = Float.MIN_VALUE;
double minDouble = Double.MAX_VALUE;
double maxDouble = Double.MIN_VALUE;
String minString = new String(Character.toChars(1114111));
String maxString = "0";
int numDocs = scaledRandomIntBetween(128, 1024);
List<IndexRequestBuilder> request = new ArrayList<>(numDocs);
for (int doc = 0; doc < numDocs; doc++) {
byte b = randomByte();
minByte = Math.min(minByte, b);
maxByte = Math.max(maxByte, b);
short s = randomShort();
minShort = Math.min(minShort, s);
maxShort = Math.max(maxShort, s);
int i = randomInt();
minInt = Math.min(minInt, i);
maxInt = Math.max(maxInt, i);
long l = randomLong();
minLong = Math.min(minLong, l);
maxLong = Math.max(maxLong, l);
float f = randomFloat();
minFloat = Math.min(minFloat, f);
maxFloat = Math.max(maxFloat, f);
double d = randomDouble();
minDouble = Math.min(minDouble, d);
maxDouble = Math.max(maxDouble, d);
String str = randomRealisticUnicodeOfLength(3);
if (str.compareTo(minString) < 0) {
minString = str;
}
if (str.compareTo(maxString) > 0) {
maxString = str;
}
request.add(client().prepareIndex("test", "test", Integer.toString(doc))
.setSource("byte", b, "short", s, "integer", i, "long", l, "float", f, "double", d, "string", str)
);
}
indexRandom(true, false, request);
FieldStatsResponse response = client().prepareFieldStats().setFields("byte", "short", "integer", "long", "float", "double", "string").get();
assertAllSuccessful(response);
for (FieldStats stats : response.getAllFieldStats().values()) {
assertThat(stats.getMaxDoc(), equalTo((long) numDocs));
assertThat(stats.getDocCount(), equalTo((long) numDocs));
assertThat(stats.getDensity(), equalTo(100));
}
assertThat(response.getAllFieldStats().get("byte").getMinValue(), equalTo(minByte));
assertThat(response.getAllFieldStats().get("byte").getMaxValue(), equalTo(maxByte));
assertThat(response.getAllFieldStats().get("short").getMinValue(), equalTo(minShort));
assertThat(response.getAllFieldStats().get("short").getMaxValue(), equalTo(maxShort));
assertThat(response.getAllFieldStats().get("integer").getMinValue(), equalTo(minInt));
assertThat(response.getAllFieldStats().get("integer").getMaxValue(), equalTo(maxInt));
assertThat(response.getAllFieldStats().get("long").getMinValue(), equalTo(minLong));
assertThat(response.getAllFieldStats().get("long").getMaxValue(), equalTo(maxLong));
assertThat(response.getAllFieldStats().get("float").getMinValue(), equalTo(minFloat));
assertThat(response.getAllFieldStats().get("float").getMaxValue(), equalTo(maxFloat));
assertThat(response.getAllFieldStats().get("double").getMinValue(), equalTo(minDouble));
assertThat(response.getAllFieldStats().get("double").getMaxValue(), equalTo(maxDouble));
}
public void testFieldStatsIndexLevel() throws Exception {
assertAcked(prepareCreate("test1").addMapping(
"test", "value", "type=long"
));
assertAcked(prepareCreate("test2").addMapping(
"test", "value", "type=long"
));
assertAcked(prepareCreate("test3").addMapping(
"test", "value", "type=long"
));
ensureGreen("test1", "test2", "test3");
indexRange("test1", -10, 100);
indexRange("test2", 101, 200);
indexRange("test3", 201, 300);
// default:
FieldStatsResponse response = client().prepareFieldStats().setFields("value").get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(-10L));
assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(300L));
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(-10L));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(300L));
// Level: cluster
response = client().prepareFieldStats().setFields("value").setLevel("cluster").get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(-10L));
assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(300L));
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(-10L));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(300L));
// Level: indices
response = client().prepareFieldStats().setFields("value").setLevel("indices").get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(3));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(-10L));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(100L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200L));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201L));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300L));
// Illegal level option:
try {
client().prepareFieldStats().setFields("value").setLevel("illegal").get();
fail();
} catch (ActionRequestValidationException e) {
assertThat(e.getMessage(), equalTo("Validation Failed: 1: invalid level option [illegal];"));
}
}
public void testIncompatibleFieldTypes() {
assertAcked(prepareCreate("test1").addMapping(
"test", "value", "type=long"
));
assertAcked(prepareCreate("test2").addMapping(
"test", "value", "type=text"
));
ensureGreen("test1", "test2");
client().prepareIndex("test1", "test").setSource("value", 1L).get();
client().prepareIndex("test1", "test").setSource("value", 2L).get();
client().prepareIndex("test2", "test").setSource("value", "a").get();
client().prepareIndex("test2", "test").setSource("value", "b").get();
refresh();
try {
client().prepareFieldStats().setFields("value").get();
fail();
} catch (IllegalStateException e){
assertThat(e.getMessage(), containsString("trying to merge the field stats of field [value]"));
}
FieldStatsResponse response = client().prepareFieldStats().setFields("value").setLevel("indices").get();
assertAllSuccessful(response);
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1L));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(2L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(new BytesRef("a")));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(new BytesRef("b")));
}
public void testFieldStatsFiltering() throws Exception {
assertAcked(prepareCreate("test1").addMapping(
"test", "value", "type=long"
));
assertAcked(prepareCreate("test2").addMapping(
"test", "value", "type=long"
));
assertAcked(prepareCreate("test3").addMapping(
"test", "value", "type=long"
));
ensureGreen("test1", "test2", "test3");
indexRange("test1", -10, 100);
indexRange("test2", 101, 200);
indexRange("test3", 201, 300);
FieldStatsResponse response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "200"), new IndexConstraint("value", MAX , LTE, "300"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201L));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300L));
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MAX, LTE, "200"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(-10L));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(100L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200L));
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "100"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200L));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201L));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300L));
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "-20"), new IndexConstraint("value", MAX, LT, "-10"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "-100"), new IndexConstraint("value", MAX, LTE, "-20"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "100"), new IndexConstraint("value", MAX, LTE, "200"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200L));
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "150"), new IndexConstraint("value", MAX, LTE, "300"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201L));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300L));
}
public void testIncompatibleFilter() throws Exception {
assertAcked(prepareCreate("test1").addMapping(
"test", "value", "type=long"
));
indexRange("test1", -10, 100);
try {
client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MAX, LTE, "abc"))
.setLevel("indices")
.get();
fail("exception should have been thrown, because value abc is incompatible");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("For input string: \"abc\""));
}
}
private void indexRange(String index, long from, long to) throws Exception {
List<IndexRequestBuilder> requests = new ArrayList<>();
for (long value = from; value <= to; value++) {
requests.add(client().prepareIndex(index, "test").setSource("value", value));
}
indexRandom(true, false, requests);
}
}
| |
/* $Id$ */
package ibis.ipl.impl;
import ibis.io.IbisIOException;
import ibis.ipl.Credentials;
import ibis.ipl.IbisCapabilities;
import ibis.ipl.IbisConfigurationException;
import ibis.ipl.IbisCreationFailedException;
import ibis.ipl.IbisFactory;
import ibis.ipl.IbisProperties;
import ibis.ipl.IbisStarter;
import ibis.ipl.MessageUpcall;
import ibis.ipl.NoSuchPropertyException;
import ibis.ipl.PortType;
import ibis.ipl.ReceivePortConnectUpcall;
import ibis.ipl.RegistryEventHandler;
import ibis.ipl.SendPortDisconnectUpcall;
import ibis.ipl.registry.Registry;
import ibis.ipl.support.management.ManagementClient;
import ibis.ipl.support.vivaldi.Coordinates;
import ibis.ipl.support.vivaldi.VivaldiClient;
import ibis.util.TypedProperties;
import java.io.IOException;
import java.io.PrintStream;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import java.util.Map.Entry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This implementation of the {@link ibis.ipl.Ibis} interface is a base class,
* to be extended by specific Ibis implementations.
*/
public abstract class Ibis implements ibis.ipl.Ibis // , IbisMBean
{
// property to uniquely identify an Ibis locally, even when it has not
// joined the registry yet
public static final String ID_PROPERTY = "ibis.local.id";
/** Debugging output. */
private static final Logger logger = LoggerFactory
.getLogger("ibis.ipl.impl.Ibis");
/** The IbisCapabilities as specified by the user. */
private final IbisCapabilities capabilities;
/** List of port types given by the user */
private final PortType[] portTypes;
private final IbisStarter starter;
/**
* Properties, as given to
* {@link ibis.ipl.IbisFactory#createIbis(IbisCapabilities, Properties, boolean, RegistryEventHandler, PortType...)}
* .
*/
protected TypedProperties properties;
/** The Ibis registry. */
private final Registry registry;
/** Management Client */
private final ManagementClient managementClient;
/** Vivaldi Client */
private final VivaldiClient vivaldiClient;
/** Identifies this Ibis instance in the registry. */
public final IbisIdentifier ident;
/** Set when {@link #end()} is called. */
private boolean ended = false;
/** The receiveports running on this Ibis instance. */
private HashMap<String, ReceivePort> receivePorts;
/** The sendports running on this Ibis instance. */
private HashMap<String, SendPort> sendPorts;
private HashMap<ibis.ipl.IbisIdentifier, Long> sentBytesPerIbis = null;
private HashMap<ibis.ipl.IbisIdentifier, Long> receivedBytesPerIbis = null;
/** Counter for allocating names for anonymous sendports. */
private static int send_counter = 0;
/** Counter for allocating names for anonymous receiveports. */
private static int receive_counter = 0;
/** Total number of messages send by closed send ports */
private long outgoingMessageCount = 0;
/** Total number of messages received by closed receive ports */
private long incomingMessageCount = 0;
/** Total number of bytes written to messages closed send ports */
private long bytesWritten = 0;
/** Total number of bytes send by closed send ports */
private long bytesSent = 0;
/** Total number of bytes read by closed receive ports */
private long bytesReceived = 0;
/** Total number of bytes read from messages (for closed received ports) */
private long bytesRead = 0;
/**
* Version, consisting of both the generic implementation version, and the
* "actual" implementation version.
*/
private String getImplementationVersion() throws Exception {
String genericVersion = Ibis.class.getPackage()
.getImplementationVersion();
// --Roelof on android the implementation version from the manifest gets
// overwritten with a default implementation version of "0.0". This is
// not the value we're searching for.
if (genericVersion == null || genericVersion.equals("0.0")) {
// try to get version from IPL_MANIFEST properties
genericVersion = IbisFactory
.getManifestProperty("implementation.version");
}
logger.debug("Version of Generic Ibis = " + genericVersion);
if (genericVersion == null
|| starter.getImplementationVersion() == null) {
throw new Exception("cannot get version for ibis");
}
return genericVersion + starter.getImplementationVersion();
}
/**
* Constructs an <code>Ibis</code> instance with the specified parameters.
*
* @param registryHandler
* the registryHandler.
* @param capabilities
* the capabilities.
* @param applicationTag
* an application level tag for this Ibis instance
* @param portTypes
* the port types requested for this ibis implementation.
* @param userProperties
* the properties as provided by the Ibis factory.
*/
protected Ibis(RegistryEventHandler registryHandler,
IbisCapabilities capabilities, Credentials credentials,
byte[] applicationTag, PortType[] portTypes,
Properties userProperties, IbisStarter starter)
throws IbisCreationFailedException {
if (capabilities == null) {
throw new IbisConfigurationException("capabilities not specified");
}
this.capabilities = capabilities;
this.portTypes = portTypes;
this.starter = starter;
this.properties = new TypedProperties();
// bottom up add properties, starting with hard coded ones
properties.addProperties(IbisProperties.getHardcodedProperties());
properties.addProperties(userProperties);
// set unique ID for this Ibis.
properties.setProperty(ID_PROPERTY, UUID.randomUUID().toString());
if (logger.isDebugEnabled()) {
logger.debug("Ibis constructor: properties = " + properties);
}
receivePorts = new HashMap<String, ReceivePort>();
sendPorts = new HashMap<String, SendPort>();
if (registryHandler != null) {
// Only install wrapper if user actually has an event handler.
// Otherwise, registry downcalls won't work. There needs to be another
// way to let an Ibis know of died Ibises. --Ceriel
registryHandler = new RegistryEventHandlerWrapper(registryHandler, this);
}
try {
registry = Registry.createRegistry(this.capabilities,
registryHandler, properties, getData(),
getImplementationVersion(), applicationTag, credentials);
} catch (IbisConfigurationException e) {
throw e;
} catch (Throwable e) {
throw new IbisCreationFailedException("Could not create registry",
e);
}
ident = registry.getIbisIdentifier();
if (properties.getBooleanProperty("ibis.vivaldi")) {
try {
vivaldiClient = new VivaldiClient(properties, registry);
} catch (Exception e) {
throw new IbisCreationFailedException(
"Could not create vivaldi client", e);
}
} else {
vivaldiClient = null;
}
if (properties.getBooleanProperty("ibis.bytescount")) {
sentBytesPerIbis = new HashMap<ibis.ipl.IbisIdentifier, Long>();
receivedBytesPerIbis = new HashMap<ibis.ipl.IbisIdentifier, Long>();
}
if (properties.getBooleanProperty("ibis.managementclient")) {
try {
managementClient = new ManagementClient(properties, this);
} catch (Throwable e) {
throw new IbisCreationFailedException(
"Could not create management client", e);
}
} else {
managementClient = null;
}
/*
* // add bean to JMX try { MBeanServer mbs =
* ManagementFactory.getPlatformMBeanServer(); ObjectName name = new
* ObjectName("ibis.ipl.impl:type=Ibis"); mbs.registerMBean(this, name);
* } catch (Exception e) { logger.warn("cannot registry MBean", e); }
*/
}
void died(ibis.ipl.IbisIdentifier corpse) {
killConnections(corpse);
}
void left(ibis.ipl.IbisIdentifier leftIbis) {
killConnections(leftIbis);
}
protected void killConnections(ibis.ipl.IbisIdentifier corpse) {
SendPort[] sps;
ReceivePort[] rps;
synchronized(this) {
sps = sendPorts.values().toArray(new SendPort[sendPorts.size()]);
rps = receivePorts.values().toArray(new ReceivePort[receivePorts.size()]);
}
for (SendPort s : sps) {
try {
s.killConnectionsWith(corpse);
} catch (Throwable e) {
if (logger.isDebugEnabled()) {
logger.debug("Got exception from killConnectionsWith", e);
}
}
}
for (ReceivePort p : rps) {
try {
p.killConnectionsWith(corpse);
} catch (Throwable e) {
if (logger.isDebugEnabled()) {
logger.debug("Got exception from killConnectionsWith", e);
}
}
}
}
/**
* Returns the current Ibis version.
*
* @return the ibis version.
*/
public String getVersion() {
return starter.getNickName() + "-" + starter.getIplVersion();
}
public ibis.ipl.Registry registry() {
return registry;
}
public ibis.ipl.IbisIdentifier identifier() {
return ident;
}
public Properties properties() {
return new Properties(properties);
}
public void end() throws IOException {
synchronized (this) {
if (ended) {
return;
}
ended = true;
}
try {
registry.leave();
} catch (Throwable e) {
throw new IbisIOException("Registry: leave failed ", e);
}
if (managementClient != null) {
managementClient.end();
}
if (vivaldiClient != null) {
vivaldiClient.end();
}
quit();
}
public void poll() throws IOException {
// Default has empty implementation.
}
synchronized void register(ReceivePort p) throws IOException {
if (receivePorts.get(p.name) != null) {
throw new IOException("Multiple instances of receiveport named "
+ p.name);
}
receivePorts.put(p.name, p);
}
synchronized void deRegister(ReceivePort p) {
if (receivePorts.remove(p.name) != null) {
// add statistics for this receive port to "total" statistics
incomingMessageCount += p.getMessageCount();
bytesReceived += p.getBytesReceived();
bytesRead += p.getBytesRead();
}
}
synchronized void register(SendPort p) throws IOException {
if (sendPorts.get(p.name) != null) {
throw new IOException("Multiple instances of sendport named "
+ p.name);
}
sendPorts.put(p.name, p);
}
synchronized void deRegister(SendPort p) {
if (sendPorts.remove(p.name) != null) {
// add statistics for this sendport to "total" statistics
outgoingMessageCount += p.getMessageCount();
bytesSent += p.getBytesSent();
bytesWritten += p.getBytesWritten();
}
}
synchronized void addSentPerIbis(long cnt, ibis.ipl.ReceivePortIdentifier[] idents) {
if (sentBytesPerIbis == null) {
return;
}
for (ibis.ipl.ReceivePortIdentifier rp : idents) {
ibis.ipl.IbisIdentifier i = rp.ibisIdentifier();
Long oldval = sentBytesPerIbis.get(i);
if (oldval != null) {
cnt += oldval.longValue();
}
sentBytesPerIbis.put(i, new Long(cnt));
}
}
synchronized void addReceivedPerIbis(long cnt, ibis.ipl.SendPortIdentifier[] idents) {
if (receivedBytesPerIbis == null) {
return;
}
for (ibis.ipl.SendPortIdentifier sp : idents) {
ibis.ipl.IbisIdentifier i = sp.ibisIdentifier();
Long oldval = receivedBytesPerIbis.get(i);
if (oldval != null) {
cnt += oldval.longValue();
}
receivedBytesPerIbis.put(i, new Long(cnt));
}
}
// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// Public methods, may called by Ibis implementations.
// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
/**
* Returns the receiveport with the specified name, or <code>null</code> if
* not present.
*
* @param name
* the name of the receiveport.
* @return the receiveport.
*/
public synchronized ReceivePort findReceivePort(String name) {
return receivePorts.get(name);
}
/**
* Returns the sendport with the specified name, or <code>null</code> if not
* present.
*
* @param name
* the name of the sendport.
* @return the sendport.
*/
public synchronized SendPort findSendPort(String name) {
return sendPorts.get(name);
}
public ReceivePortIdentifier createReceivePortIdentifier(String name,
IbisIdentifier id) {
return new ReceivePortIdentifier(name, id);
}
public SendPortIdentifier createSendPortIdentifier(String name,
IbisIdentifier id) {
return new SendPortIdentifier(name, id);
}
// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// Protected methods, to be implemented by Ibis implementations.
// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
/**
* Implementation-dependent part of the {@link #end()} implementation.
*/
protected abstract void quit();
/**
* This method should provide the implementation-dependent data of the Ibis
* identifier for this Ibis instance. This method gets called from the Ibis
* constructor.
*
* @exception IOException
* may be thrown in case of trouble.
* @return the implementation-dependent data, as a byte array.
*/
protected abstract byte[] getData() throws IOException;
public ibis.ipl.SendPort createSendPort(PortType tp) throws IOException {
return createSendPort(tp, null, null, null);
}
public ibis.ipl.SendPort createSendPort(PortType tp, String name)
throws IOException {
return createSendPort(tp, name, null, null);
}
private void matchPortType(PortType tp) {
boolean matched = false;
for (PortType p : portTypes) {
if (tp.equals(p)) {
matched = true;
}
}
if (!matched) {
throw new IbisConfigurationException("PortType \"" + tp
+ "\" not specified when creating this Ibis instance");
}
}
public ibis.ipl.SendPort createSendPort(PortType tp, String name,
SendPortDisconnectUpcall cU, Properties properties)
throws IOException {
if (tp.hasCapability(PortType.CONNECTION_ULTRALIGHT)) {
if (tp.hasCapability(PortType.CONNECTION_UPCALLS)) {
throw new IbisConfigurationException(
"Ultralight connections to not support connection upcalls");
}
if (tp.hasCapability(PortType.COMMUNICATION_RELIABLE)) {
throw new IbisConfigurationException(
"Ultralight connections do not support reliability");
}
if (tp.hasCapability(PortType.COMMUNICATION_FIFO)) {
throw new IbisConfigurationException(
"Ultralight connections do not support FIFO message ordering");
}
}
if (cU != null) {
if (!tp.hasCapability(PortType.CONNECTION_UPCALLS)) {
throw new IbisConfigurationException(
"no connection upcalls requested for this port type");
}
}
if (name == null) {
synchronized (this.getClass()) {
name = "anonymous send port " + send_counter++;
}
}
matchPortType(tp);
return doCreateSendPort(tp, name, cU, properties);
}
/**
* Creates a {@link ibis.ipl.SendPort} of the specified port type.
*
* @param tp
* the port type.
* @param name
* the name of this sendport.
* @param cU
* object implementing the
* {@link SendPortDisconnectUpcall#lostConnection(ibis.ipl.SendPort, ReceivePortIdentifier, Throwable)}
* method.
* @param properties
* the port properties.
* @return the new sendport.
* @exception java.io.IOException
* is thrown when the port could not be created.
*/
protected abstract ibis.ipl.SendPort doCreateSendPort(PortType tp,
String name, SendPortDisconnectUpcall cU, Properties properties)
throws IOException;
public ibis.ipl.ReceivePort createReceivePort(PortType tp, String name)
throws IOException {
return createReceivePort(tp, name, null, null, null);
}
public ibis.ipl.ReceivePort createReceivePort(PortType tp, String name,
MessageUpcall u) throws IOException {
return createReceivePort(tp, name, u, null, null);
}
public ibis.ipl.ReceivePort createReceivePort(PortType tp, String name,
ReceivePortConnectUpcall cU) throws IOException {
return createReceivePort(tp, name, null, cU, null);
}
public ibis.ipl.ReceivePort createReceivePort(PortType tp, String name,
MessageUpcall u, ReceivePortConnectUpcall cU, Properties properties)
throws IOException {
if (tp.hasCapability(PortType.CONNECTION_ULTRALIGHT)) {
if (tp.hasCapability(PortType.CONNECTION_UPCALLS)) {
throw new IbisConfigurationException(
"Ultralight connections to not support connection upcalls");
}
if (tp.hasCapability(PortType.COMMUNICATION_RELIABLE)) {
throw new IbisConfigurationException(
"Ultralight connections do not support reliability");
}
if (tp.hasCapability(PortType.COMMUNICATION_FIFO)) {
throw new IbisConfigurationException(
"Ultralight connections do not support FIFO message ordering");
}
}
if (cU != null) {
if (!tp.hasCapability(PortType.CONNECTION_UPCALLS)) {
throw new IbisConfigurationException(
"no connection upcalls requested for this port type");
}
}
if (u != null) {
if (!tp.hasCapability(PortType.RECEIVE_AUTO_UPCALLS)
&& !tp.hasCapability(PortType.RECEIVE_POLL_UPCALLS)) {
throw new IbisConfigurationException(
"no message upcalls requested for this port type");
}
} else {
if (!tp.hasCapability(PortType.RECEIVE_EXPLICIT)) {
throw new IbisConfigurationException(
"no explicit receive requested for this port type");
}
}
if (name == null) {
synchronized (this.getClass()) {
name = "anonymous receive port " + receive_counter++;
}
}
matchPortType(tp);
return doCreateReceivePort(tp, name, u, cU, properties);
}
/**
* Creates a named {@link ibis.ipl.ReceivePort} of the specified port type,
* with upcall based communication. New connections will not be accepted
* until {@link ibis.ipl.ReceivePort#enableConnections()} is invoked. This
* is done to avoid upcalls during initialization. When a new connection
* request arrives, or when a connection is lost, a ConnectUpcall is
* performed.
*
* @param tp
* the port type.
* @param name
* the name of this receiveport.
* @param u
* the upcall handler.
* @param cU
* object implementing <code>gotConnection</code>() and
* <code>lostConnection</code>() upcalls.
* @param properties
* the port properties.
* @return the new receiveport.
* @exception java.io.IOException
* is thrown when the port could not be created.
*/
protected abstract ibis.ipl.ReceivePort doCreateReceivePort(PortType tp,
String name, MessageUpcall u, ReceivePortConnectUpcall cU,
Properties properties) throws IOException;
// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// Protected management methods, can be overriden/used in implementations
// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
public String getManagementProperty(String key)
throws NoSuchPropertyException {
String result = managementProperties().get(key);
if (result == null) {
throw new NoSuchPropertyException("property \"" + key
+ "\" not found");
}
return result;
}
public synchronized long getOutgoingMessageCount() {
long outgoingMessageCount = this.outgoingMessageCount;
// also add numbers for current send ports
for (SendPort sendPort : sendPorts.values()) {
outgoingMessageCount += sendPort.getMessageCount();
}
return outgoingMessageCount;
}
public synchronized long getBytesSent() {
long bytesSend = this.bytesSent;
// also add numbers for current send ports
for (SendPort sendPort : sendPorts.values()) {
bytesSend += sendPort.getBytesSent();
}
return bytesSend;
}
public synchronized long getBytesWritten() {
long bytesWritten = this.bytesWritten;
// also add numbers for current send ports
for (SendPort sendPort : sendPorts.values()) {
bytesWritten += sendPort.getBytesWritten();
}
return bytesWritten;
}
public synchronized long getIncomingMessageCount() {
long incomingMessageCount = this.incomingMessageCount;
// also add numbers for current receive ports
for (ReceivePort receivePort : receivePorts.values()) {
incomingMessageCount += receivePort.getMessageCount();
}
return incomingMessageCount;
}
public synchronized long getBytesReceived() {
long bytesReceived = this.bytesReceived;
// also add numbers for current receive ports
for (ReceivePort receivePort : receivePorts.values()) {
bytesReceived += receivePort.getBytesReceived();
}
return bytesReceived;
}
public synchronized long getBytesRead() {
long bytesRead = this.bytesRead;
// also add numbers for current receive ports
for (ReceivePort receivePort : receivePorts.values()) {
bytesRead += receivePort.getBytesReceived();
}
return bytesRead;
}
/**
* @ibis.experimental
*/
public synchronized ibis.ipl.IbisIdentifier[] connectedTo() {
HashSet<ibis.ipl.IbisIdentifier> result = new HashSet<ibis.ipl.IbisIdentifier>();
Collection<SendPort> ports = sendPorts.values();
for (SendPort sendPort : ports) {
ibis.ipl.ReceivePortIdentifier[] receivePorts = sendPort
.connectedTo();
for (ibis.ipl.ReceivePortIdentifier receivePort : receivePorts) {
result.add(receivePort.ibisIdentifier());
}
}
return result.toArray(new ibis.ipl.IbisIdentifier[0]);
}
/**
* @ibis.experimental
*/
public Coordinates getVivaldiCoordinates() {
if (vivaldiClient == null) {
return null;
}
return vivaldiClient.getCoordinates();
}
/**
* @ibis.experimental
*/
public synchronized Map<ibis.ipl.IbisIdentifier, Long> getSentBytesPerIbis() {
if (sentBytesPerIbis == null) {
return null;
}
return new HashMap<ibis.ipl.IbisIdentifier, Long>(sentBytesPerIbis);
}
/**
* @ibis.experimental
*/
public synchronized Map<ibis.ipl.IbisIdentifier, Long> getReceivedBytesPerIbis() {
if (receivedBytesPerIbis == null) {
return null;
}
return new HashMap<ibis.ipl.IbisIdentifier, Long>(receivedBytesPerIbis);
}
/**
* @ibis.experimental
*/
public String[] wonElections() {
return registry.wonElections();
}
/**
* @ibis.experimental
*/
public synchronized Map<ibis.ipl.IbisIdentifier, Set<String>> getReceiverConnectionTypes() {
Map<ibis.ipl.IbisIdentifier, Set<String>> result = new HashMap<ibis.ipl.IbisIdentifier, Set<String>>();
for (ReceivePort port : receivePorts.values()) {
Map<IbisIdentifier, Set<String>> p = port.getConnectionTypes();
for (Entry<IbisIdentifier, Set<String>> entry : p.entrySet()) {
Set<String> r = result.get(entry.getKey());
if (r == null) {
r = new HashSet<String>();
}
r.addAll(entry.getValue());
result.put(entry.getKey(), r);
}
}
return result;
}
/**
* @ibis.experimental
*/
public synchronized Map<ibis.ipl.IbisIdentifier, Set<String>> getSenderConnectionTypes() {
Map<ibis.ipl.IbisIdentifier, Set<String>> result = new HashMap<ibis.ipl.IbisIdentifier, Set<String>>();
for (SendPort port : sendPorts.values()) {
Map<IbisIdentifier, Set<String>> p = port.getConnectionTypes();
for (Entry<IbisIdentifier, Set<String>> entry : p.entrySet()) {
Set<String> r = result.get(entry.getKey());
if (r == null) {
r = new HashSet<String>();
}
r.addAll(entry.getValue());
result.put(entry.getKey(), r);
}
}
return result;
}
public synchronized Map<String, String> managementProperties() {
Map<String, String> result = new HashMap<String, String>();
// put gathered statistics in the map
result.put("outgoingMessageCount", "" + getOutgoingMessageCount());
result.put("bytesWritten", "" + getBytesWritten());
result.put("bytesSent", "" + getBytesSent());
result.put("incomingMessageCount", "" + getIncomingMessageCount());
result.put("bytesReceived", "" + getBytesReceived());
result.put("bytesRead", "" + getBytesRead());
return result;
}
public void printManagementProperties(PrintStream stream) {
stream.format("Messages Sent: %d\n", getOutgoingMessageCount());
double mbWritten = getBytesWritten() / 1024.0 / 1024.0;
stream.format("Data written to messages: %.2f Mb\n", mbWritten);
double mbSent = getBytesSent() / 1024.0 / 1024.0;
stream.format("Data sent out on network: %.2f Mb\n", mbSent);
stream.format("Messages Received: %d\n", getIncomingMessageCount());
double mbReceived = getBytesReceived() / 1024.0 / 1024.0;
stream.format("Data received from network: %.2f Mb\n", mbReceived);
double mbRead = getBytesRead() / 1024.0 / 1024.0;
stream.format("Data read from messages: %.2f Mb\n", mbRead);
stream.flush();
}
public void setManagementProperties(Map<String, String> properties)
throws NoSuchPropertyException {
// override if an Ibis _can_ set properties
throw new NoSuchPropertyException("cannot set any properties");
}
public void setManagementProperty(String key, String value)
throws NoSuchPropertyException {
// override if an Ibis _can_ set properties
throw new NoSuchPropertyException("cannot set any properties");
}
// jmx function
public String getIdentifier() {
return ident.toString();
}
}
| |
/*
* Copyright 2012 Benjamin Glatzel <benjamin.glatzel@me.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.rendering.primitives;
import gnu.trove.iterator.TIntIterator;
import javax.vecmath.Vector3f;
import javax.vecmath.Vector4f;
import org.lwjgl.BufferUtils;
import org.terasology.math.Direction;
import org.terasology.math.Region3i;
import org.terasology.math.Side;
import org.terasology.math.Vector3i;
import org.terasology.performanceMonitor.PerformanceMonitor;
import org.terasology.world.MiniatureChunk;
import org.terasology.world.WorldBiomeProvider;
import org.terasology.world.WorldView;
import org.terasology.world.block.Block;
import org.terasology.world.block.BlockPart;
import org.terasology.world.chunks.Chunk;
/**
* Generates tessellated chunk meshes from chunks.
*
* @author Benjamin Glatzel <benjamin.glatzel@me.com>
*/
public final class ChunkTessellator {
private static int _statVertexArrayUpdateCount = 0;
private WorldBiomeProvider biomeProvider;
public enum ChunkVertexFlags {
BLOCK_HINT_WATER(1),
BLOCK_HINT_LAVA(2),
BLOCK_HINT_GRASS(3),
BLOCK_HINT_WAVING(4),
BLOCK_HINT_WAVING_BLOCK(5);
private int value;
private ChunkVertexFlags(int value) {
this.value = value;
}
public int getValue() {
return value;
}
}
public ChunkTessellator(WorldBiomeProvider biomeProvider) {
this.biomeProvider = biomeProvider;
}
public ChunkMesh generateMesh(WorldView worldView, Vector3i chunkPos, int meshHeight, int verticalOffset) {
PerformanceMonitor.startActivity("GenerateMesh");
ChunkMesh mesh = new ChunkMesh();
Vector3i chunkOffset = new Vector3i(chunkPos.x * Chunk.SIZE_X, chunkPos.y * Chunk.SIZE_Y, chunkPos.z * Chunk.SIZE_Z);
for (int x = 0; x < Chunk.SIZE_X; x++) {
for (int z = 0; z < Chunk.SIZE_Z; z++) {
float biomeTemp = biomeProvider.getTemperatureAt(chunkOffset.x + x, chunkOffset.z + z);
float biomeHumidity = biomeProvider.getHumidityAt(chunkOffset.x + x, chunkOffset.z + z);
for (int y = verticalOffset; y < verticalOffset + meshHeight; y++) {
Block block = worldView.getBlock(x, y, z);
if (block == null || block.isInvisible())
continue;
generateBlockVertices(worldView, mesh, x, y, z, biomeTemp, biomeHumidity);
}
}
}
generateOptimizedBuffers(worldView, mesh);
_statVertexArrayUpdateCount++;
PerformanceMonitor.endActivity();
return mesh;
}
public ChunkMesh generateMinaturizedMesh(MiniatureChunk miniatureChunk) {
PerformanceMonitor.startActivity("GenerateMinuatureMesh");
ChunkMesh mesh = new ChunkMesh();
MiniatureChunk[] chunks = { miniatureChunk };
WorldView localWorldView = new WorldView(chunks, Region3i.createFromCenterExtents(Vector3i.zero(), Vector3i.zero()), Vector3i.zero());
localWorldView.setChunkSize(new Vector3i(MiniatureChunk.CHUNK_SIZE));
for (int x = 0; x < MiniatureChunk.SIZE_X; x++) {
for (int z = 0; z < MiniatureChunk.SIZE_Z; z++) {
for (int y = 0; y < MiniatureChunk.SIZE_Y; y++) {
Block block = miniatureChunk.getBlock(x,y,z);
if (block == null || block.isInvisible())
continue;
generateBlockVertices(localWorldView, mesh, x, y, z, 0.0f, 0.0f);
}
}
}
generateOptimizedBuffers(localWorldView, mesh);
_statVertexArrayUpdateCount++;
PerformanceMonitor.endActivity();
return mesh;
}
private void generateOptimizedBuffers(WorldView worldView, ChunkMesh mesh) {
PerformanceMonitor.startActivity("OptimizeBuffers");
for (int j = 0; j < mesh._vertexElements.length; j++) {
// Vertices double to account for light info
mesh._vertexElements[j].finalVertices = BufferUtils.createByteBuffer(mesh._vertexElements[j].vertices.size() * 2 * 4 + mesh._vertexElements[j].tex.size() * 4 + mesh._vertexElements[j].flags.size() * 4 + mesh._vertexElements[j].color.size() * 4 + mesh._vertexElements[j].normals.size() * 4);
int cTex = 0;
int cColor = 0;
int cFlags = 0;
for (int i = 0; i < mesh._vertexElements[j].vertices.size(); i += 3, cTex += 2, cColor += 4, cFlags++) {
Vector3f vertexPos = new Vector3f(mesh._vertexElements[j].vertices.get(i), mesh._vertexElements[j].vertices.get(i + 1), mesh._vertexElements[j].vertices.get(i + 2));
mesh._vertexElements[j].finalVertices.putFloat(vertexPos.x);
mesh._vertexElements[j].finalVertices.putFloat(vertexPos.y);
mesh._vertexElements[j].finalVertices.putFloat(vertexPos.z);
mesh._vertexElements[j].finalVertices.putFloat(mesh._vertexElements[j].tex.get(cTex));
mesh._vertexElements[j].finalVertices.putFloat(mesh._vertexElements[j].tex.get(cTex + 1));
mesh._vertexElements[j].finalVertices.putFloat(mesh._vertexElements[j].flags.get(cFlags));
float[] result = new float[3];
Vector3f normal = new Vector3f(mesh._vertexElements[j].normals.get(i), mesh._vertexElements[j].normals.get(i+1), mesh._vertexElements[j].normals.get(i+2));
calcLightingValuesForVertexPos(worldView, vertexPos, result, normal);
mesh._vertexElements[j].finalVertices.putFloat(result[0]);
mesh._vertexElements[j].finalVertices.putFloat(result[1]);
mesh._vertexElements[j].finalVertices.putFloat(result[2]);
mesh._vertexElements[j].finalVertices.putFloat(mesh._vertexElements[j].color.get(cColor));
mesh._vertexElements[j].finalVertices.putFloat(mesh._vertexElements[j].color.get(cColor + 1));
mesh._vertexElements[j].finalVertices.putFloat(mesh._vertexElements[j].color.get(cColor + 2));
mesh._vertexElements[j].finalVertices.putFloat(mesh._vertexElements[j].color.get(cColor + 3));
mesh._vertexElements[j].finalVertices.putFloat(normal.x);
mesh._vertexElements[j].finalVertices.putFloat(normal.y);
mesh._vertexElements[j].finalVertices.putFloat(normal.z);
}
mesh._vertexElements[j].finalIndices = BufferUtils.createIntBuffer(mesh._vertexElements[j].indices.size());
TIntIterator indexIterator = mesh._vertexElements[j].indices.iterator();
while (indexIterator.hasNext()) {
mesh._vertexElements[j].finalIndices.put(indexIterator.next());
}
mesh._vertexElements[j].finalVertices.flip();
mesh._vertexElements[j].finalIndices.flip();
}
PerformanceMonitor.endActivity();
}
private void calcLightingValuesForVertexPos(WorldView worldView, Vector3f vertexPos, float[] output, Vector3f normal) {
PerformanceMonitor.startActivity("calcLighting");
float[] lights = new float[8];
float[] blockLights = new float[8];
Block[] blocks = new Block[4];
PerformanceMonitor.startActivity("gatherLightInfo");
Direction dir = Direction.inDirection(normal);
switch (dir) {
case LEFT:
case RIGHT:
blocks[0] = worldView.getBlock((vertexPos.x + 0.8f * normal.x), (vertexPos.y + 0.1f), (vertexPos.z + 0.1f));
blocks[1] = worldView.getBlock((vertexPos.x + 0.8f * normal.x), (vertexPos.y + 0.1f), (vertexPos.z - 0.1f));
blocks[2] = worldView.getBlock((vertexPos.x + 0.8f * normal.x), (vertexPos.y - 0.1f), (vertexPos.z - 0.1f));
blocks[3] = worldView.getBlock((vertexPos.x + 0.8f * normal.x), (vertexPos.y - 0.1f), (vertexPos.z + 0.1f));
break;
case FORWARD:
case BACKWARD:
blocks[0] = worldView.getBlock((vertexPos.x + 0.1f), (vertexPos.y + 0.1f), (vertexPos.z + 0.8f * normal.z));
blocks[1] = worldView.getBlock((vertexPos.x + 0.1f), (vertexPos.y - 0.1f), (vertexPos.z + 0.8f * normal.z));
blocks[2] = worldView.getBlock((vertexPos.x - 0.1f), (vertexPos.y - 0.1f), (vertexPos.z + 0.8f * normal.z));
blocks[3] = worldView.getBlock((vertexPos.x - 0.1f), (vertexPos.y + 0.1f), (vertexPos.z + 0.8f * normal.z));
break;
default:
blocks[0] = worldView.getBlock((vertexPos.x + 0.1f), (vertexPos.y + 0.8f * normal.y), (vertexPos.z + 0.1f));
blocks[1] = worldView.getBlock((vertexPos.x + 0.1f), (vertexPos.y + 0.8f * normal.y), (vertexPos.z - 0.1f));
blocks[2] = worldView.getBlock((vertexPos.x - 0.1f), (vertexPos.y + 0.8f * normal.y), (vertexPos.z - 0.1f));
blocks[3] = worldView.getBlock((vertexPos.x - 0.1f), (vertexPos.y + 0.8f * normal.y), (vertexPos.z + 0.1f));
break;
}
lights[0] = worldView.getSunlight((vertexPos.x + 0.1f), (vertexPos.y + 0.8f), (vertexPos.z + 0.1f));
lights[1] = worldView.getSunlight((vertexPos.x + 0.1f), (vertexPos.y + 0.8f), (vertexPos.z - 0.1f));
lights[2] = worldView.getSunlight((vertexPos.x - 0.1f), (vertexPos.y + 0.8f), (vertexPos.z - 0.1f));
lights[3] = worldView.getSunlight((vertexPos.x - 0.1f), (vertexPos.y + 0.8f), (vertexPos.z + 0.1f));
lights[4] = worldView.getSunlight((vertexPos.x + 0.1f), (vertexPos.y - 0.1f), (vertexPos.z + 0.1f));
lights[5] = worldView.getSunlight((vertexPos.x + 0.1f), (vertexPos.y - 0.1f), (vertexPos.z - 0.1f));
lights[6] = worldView.getSunlight((vertexPos.x - 0.1f), (vertexPos.y - 0.1f), (vertexPos.z - 0.1f));
lights[7] = worldView.getSunlight((vertexPos.x - 0.1f), (vertexPos.y - 0.1f), (vertexPos.z + 0.1f));
blockLights[0] = worldView.getLight((vertexPos.x + 0.1f), (vertexPos.y + 0.8f), (vertexPos.z + 0.1f));
blockLights[1] = worldView.getLight((vertexPos.x + 0.1f), (vertexPos.y + 0.8f), (vertexPos.z - 0.1f));
blockLights[2] = worldView.getLight((vertexPos.x - 0.1f), (vertexPos.y + 0.8f), (vertexPos.z - 0.1f));
blockLights[3] = worldView.getLight((vertexPos.x - 0.1f), (vertexPos.y + 0.8f), (vertexPos.z + 0.1f));
blockLights[4] = worldView.getLight((vertexPos.x + 0.1f), (vertexPos.y - 0.1f), (vertexPos.z + 0.1f));
blockLights[5] = worldView.getLight((vertexPos.x + 0.1f), (vertexPos.y - 0.1f), (vertexPos.z - 0.1f));
blockLights[6] = worldView.getLight((vertexPos.x - 0.1f), (vertexPos.y - 0.1f), (vertexPos.z - 0.1f));
blockLights[7] = worldView.getLight((vertexPos.x - 0.1f), (vertexPos.y - 0.1f), (vertexPos.z + 0.1f));
PerformanceMonitor.endActivity();
float resultLight = 0;
float resultBlockLight = 0;
int counterLight = 0;
int counterBlockLight = 0;
int occCounter = 0;
int occCounterBillboard = 0;
for (int i = 0; i < 8; i++) {
if (lights[i] > 0) {
resultLight += lights[i];
counterLight++;
}
if (blockLights[i] > 0) {
resultBlockLight += blockLights[i];
counterBlockLight++;
}
if (i < 4) {
Block b = blocks[i];
if (b.isShadowCasting() && !b.isTranslucent()) {
occCounter++;
} else if (b.isShadowCasting()) {
occCounterBillboard++;
}
}
}
double resultAmbientOcclusion = (Math.pow(0.40, occCounter) + Math.pow(0.80, occCounterBillboard)) / 2.0;
if (counterLight == 0)
output[0] = 0;
else
output[0] = resultLight / counterLight / 15f;
if (counterBlockLight == 0)
output[1] = 0;
else
output[1] = resultBlockLight / counterBlockLight / 15f;
output[2] = (float) resultAmbientOcclusion;
PerformanceMonitor.endActivity();
}
private void generateBlockVertices(WorldView view, ChunkMesh mesh, int x, int y, int z, float temp, float hum) {
Block block = view.getBlock(x, y, z);
int vertexFlags = 0;
// TODO: Needs review since the new per-vertex flags introduce a lot of special scenarios
if (block.getURI().toString().equals("engine:water")) {
vertexFlags = ChunkVertexFlags.BLOCK_HINT_WATER.getValue();
} else if (block.getURI().toString().equals("engine:lava")) {
vertexFlags = ChunkVertexFlags.BLOCK_HINT_LAVA.getValue();
} else if (block.isWaving() && block.isDoubleSided()) {
vertexFlags = ChunkVertexFlags.BLOCK_HINT_WAVING.getValue();
} else if (block.isWaving() && !block.isDoubleSided()) {
vertexFlags = ChunkVertexFlags.BLOCK_HINT_WAVING_BLOCK.getValue();
}
/*
* Determine the render process.
*/
ChunkMesh.RENDER_TYPE renderType = ChunkMesh.RENDER_TYPE.TRANSLUCENT;
if (!block.isTranslucent())
renderType = ChunkMesh.RENDER_TYPE.OPAQUE;
// TODO: Review special case, or alternatively compare uris.
if (block.getURI().toString().equals("engine:water") || block.getURI().toString().equals("engine:ice"))
renderType = ChunkMesh.RENDER_TYPE.WATER_AND_ICE;
if (block.isDoubleSided())
renderType = ChunkMesh.RENDER_TYPE.BILLBOARD;
if (block.getMeshPart(BlockPart.CENTER) != null) {
Vector4f colorOffset = block.calcColorOffsetFor(BlockPart.CENTER, temp, hum);
block.getMeshPart(BlockPart.CENTER).appendTo(mesh, x, y, z, colorOffset, renderType.getIndex(), vertexFlags);
}
boolean[] drawDir = new boolean[6];
for (Side side : Side.values()) {
Vector3i offset = side.getVector3i();
Block blockToCheck = view.getBlock(x + offset.x, y + offset.y, z + offset.z);
drawDir[side.ordinal()] = isSideVisibleForBlockTypes(blockToCheck, block, side);
}
if (y == 0) {
drawDir[Side.BOTTOM.ordinal()] = false;
}
// If the block is lowered, some more faces may have to be drawn
if (block.isLiquid()) {
// Draw horizontal sides if visible from below
for (Side side : Side.horizontalSides()) {
Vector3i offset = side.getVector3i();
Block adjacentBelow = view.getBlock(x + offset.x, y - 1, z + offset.z);
Block adjacent = view.getBlock(x + offset.x, y, z + offset.z);
Block below = view.getBlock(x, y - 1, z);
drawDir[side.ordinal()] |= (isSideVisibleForBlockTypes(adjacentBelow, block, side) && !isSideVisibleForBlockTypes(below, adjacent, side.reverse()));
}
// Draw the top if below a non-lowered block
// TODO: Don't need to render the top if each side and the block above each side are either liquid or opaque solids.
Block blockToCheck = view.getBlock(x, y + 1, z);
drawDir[Side.TOP.ordinal()] |= !blockToCheck.isLiquid();
Block bottomBlock = view.getBlock(x, y - 1, z);
if (bottomBlock.isLiquid() || bottomBlock.getId() == 0x0) {
for (Side dir : Side.values()) {
if (drawDir[dir.ordinal()]) {
Vector4f colorOffset = block.calcColorOffsetFor(BlockPart.fromSide(dir), temp, hum);
block.getLoweredLiquidMesh(dir).appendTo(mesh, x, y, z, colorOffset, renderType.getIndex(), vertexFlags);
}
}
return;
}
}
for (Side dir : Side.values()) {
if (drawDir[dir.ordinal()]) {
Vector4f colorOffset = block.calcColorOffsetFor(BlockPart.fromSide(dir), temp, hum);
// TODO: Needs review since the new per-vertex flags introduce a lot of special scenarios
// Don't mask grass on the top or bottom side...
if (block.getURI().toString().equals("engine:grass")) {
vertexFlags = (dir != Side.TOP && dir != Side.BOTTOM) ? ChunkVertexFlags.BLOCK_HINT_GRASS.getValue() : 0;
}
block.getMeshPart(BlockPart.fromSide(dir)).appendTo(mesh, x, y, z, colorOffset, renderType.getIndex(), vertexFlags);
}
}
}
/**
* Returns true if the side should be rendered adjacent to the second side provided.
*
* @param blockToCheck The block to check
* @param currentBlock The current block
* @return True if the side is visible for the given block types
*/
private boolean isSideVisibleForBlockTypes(Block blockToCheck, Block currentBlock, Side side) {
if (currentBlock.getMeshPart(BlockPart.fromSide(side)) == null) return false;
// Liquids can be transparent but there should be no visible adjacent faces
if (currentBlock.isLiquid() && blockToCheck.isLiquid()) return false;
// Draw faces adjacent to animated blocks (which are of different types)
//if (blockToCheck.isWaving() && !blockToCheck.isDoubleSided() && currentBlock.getId() != blockToCheck.getId()) return true;
return blockToCheck.getId() == 0x0 ||
!blockToCheck.isFullSide(side.reverse()) ||
(!currentBlock.isTranslucent() && blockToCheck.isTranslucent());
}
public static int getVertexArrayUpdateCount() {
return _statVertexArrayUpdateCount;
}
}
| |
// Copyright 2014 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.android.incrementaldeployment;
import android.app.Application;
import android.content.Context;
import android.content.ContextWrapper;
import android.content.res.AssetManager;
import android.content.res.Resources;
import android.util.ArrayMap;
import android.util.Log;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.ref.WeakReference;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* A stub application that patches the class loader, then replaces itself with the real application
* by applying a liberal amount of reflection on Android internals.
*
* <p>This is, of course, terribly error-prone. Most of this code was tested with API versions
* 8, 10, 14, 15, 16, 17, 18, 19 and 21 on the Android emulator, a Nexus 5 running Lollipop LRX22C
* and a Samsung GT-I5800 running Froyo XWJPE. The exception is {@code monkeyPatchAssetManagers},
* which only works on Kitkat and Lollipop.
*
* <p>Note that due to a bug in Dalvik, this only works on Kitkat if ART is the Java runtime.
*
* <p>Unfortunately, if this does not work, we don't have a fallback mechanism: as soon as we
* build the APK with this class as the Application, we are committed to going through with it.
*
* <p>This class should use as few other classes as possible before the class loader is patched
* because any class loaded before it cannot be incrementally deployed.
*/
public class StubApplication extends Application {
private static final String INCREMENTAL_DEPLOYMENT_DIR = "/data/local/tmp/incrementaldeployment";
private final String realClassName;
private final String packageName;
private String externalResourceFile;
private Application realApplication;
public StubApplication() {
String[] stubApplicationData = getResourceAsString("stub_application_data.txt").split("\n");
realClassName = stubApplicationData[0];
packageName = stubApplicationData[1];
Log.v("StubApplication", String.format(
"StubApplication created. Android package is %s, real application class is %s.",
packageName, realClassName));
}
private String getExternalResourceFile() {
String base = INCREMENTAL_DEPLOYMENT_DIR + "/" + packageName + "/";
String resourceFile = base + "resources.ap_";
if (!(new File(resourceFile).isFile())) {
resourceFile = base + "resources";
if (!(new File(resourceFile).isDirectory())) {
Log.v("StubApplication", "Cannot find external resources, not patching them in");
return null;
}
}
Log.v("StubApplication", "Found external resources at " + resourceFile);
return resourceFile;
}
private List<String> getDexList(String packageName) {
List<String> result = new ArrayList<>();
File[] dexes = new File(INCREMENTAL_DEPLOYMENT_DIR + "/" + packageName + "/dex").listFiles();
if (dexes == null) {
throw new IllegalStateException(".dex directory does not exist");
}
for (File dex : dexes) {
if (dex.getName().endsWith(".dex")) {
result.add(dex.getPath());
}
}
return result;
}
private String getResourceAsString(String resource) {
InputStream resourceStream = null;
// try-with-resources would be much nicer, but that requires SDK level 19, and we want this code
// to be compatible with earlier Android versions
try {
resourceStream = getClass().getClassLoader().getResourceAsStream(resource);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
int length = 0;
while ((length = resourceStream.read(buffer)) != -1) {
baos.write(buffer, 0, length);
}
String result = new String(baos.toByteArray(), "UTF-8");
return result;
} catch (IOException e) {
throw new IllegalStateException(e);
} finally {
if (resourceStream != null) {
try {
resourceStream.close();
} catch (IOException e) {
// Not much we can do here
}
}
}
}
@SuppressWarnings("unchecked") // Lots of conversions with generic types
private void monkeyPatchApplication() {
// StubApplication is created by reflection in Application#handleBindApplication() ->
// LoadedApk#makeApplication(), and its return value is used to set the Application field in all
// sorts of Android internals.
//
// Fortunately, Application#onCreate() is called quite soon after, so what we do is monkey
// patch in the real Application instance in StubApplication#onCreate().
//
// A few places directly use the created Application instance (as opposed to the fields it is
// eventually stored in). Fortunately, it's easy to forward those to the actual real
// Application class.
try {
// Find the ActivityThread instance for the current thread
Class<?> activityThread = Class.forName("android.app.ActivityThread");
Method m = activityThread.getMethod("currentActivityThread");
m.setAccessible(true);
Object currentActivityThread = m.invoke(null);
// Find the mInitialApplication field of the ActivityThread to the real application
Field mInitialApplication = activityThread.getDeclaredField("mInitialApplication");
mInitialApplication.setAccessible(true);
Application initialApplication = (Application) mInitialApplication.get(currentActivityThread);
if (initialApplication == StubApplication.this) {
mInitialApplication.set(currentActivityThread, realApplication);
}
// Replace all instance of the stub application in ActivityThread#mAllApplications with the
// real one
Field mAllApplications = activityThread.getDeclaredField("mAllApplications");
mAllApplications.setAccessible(true);
List<Application> allApplications = (List<Application>) mAllApplications
.get(currentActivityThread);
for (int i = 0; i < allApplications.size(); i++) {
if (allApplications.get(i) == StubApplication.this) {
allApplications.set(i, realApplication);
}
}
// Figure out how loaded APKs are stored.
// API version 8 has PackageInfo, 10 has LoadedApk. 9, I don't know.
Class<?> loadedApkClass;
try {
loadedApkClass = Class.forName("android.app.LoadedApk");
} catch (ClassNotFoundException e) {
loadedApkClass = Class.forName("android.app.ActivityThread$PackageInfo");
}
Field mApplication = loadedApkClass.getDeclaredField("mApplication");
mApplication.setAccessible(true);
Field mResDir = loadedApkClass.getDeclaredField("mResDir");
mResDir.setAccessible(true);
// 10 doesn't have this field, 14 does. Fortunately, there are not many Honeycomb devices
// floating around.
Field mLoadedApk = null;
try {
mLoadedApk = Application.class.getDeclaredField("mLoadedApk");
} catch (NoSuchFieldException e) {
// According to testing, it's okay to ignore this.
}
// Enumerate all LoadedApk (or PackageInfo) fields in ActivityThread#mPackages and
// ActivityThread#mResourcePackages and do two things:
// - Replace the Application instance in its mApplication field with the real one
// - Replace mResDir to point to the external resource file instead of the .apk. This is
// used as the asset path for new Resources objects.
// - Set Application#mLoadedApk to the found LoadedApk instance
for (String fieldName : new String[] { "mPackages", "mResourcePackages" }) {
Field field = activityThread.getDeclaredField(fieldName);
field.setAccessible(true);
Object value = field.get(currentActivityThread);
for (Map.Entry<String, WeakReference<?>> entry :
((Map<String, WeakReference<?>>) value).entrySet()) {
Object loadedApk = entry.getValue().get();
if (loadedApk == null) {
continue;
}
if (mApplication.get(loadedApk) == StubApplication.this) {
mApplication.set(loadedApk, realApplication);
if (externalResourceFile != null) {
mResDir.set(loadedApk, externalResourceFile);
}
if (mLoadedApk != null) {
mLoadedApk.set(realApplication, loadedApk);
}
}
}
}
} catch (IllegalAccessException | NoSuchFieldException | NoSuchMethodException |
ClassNotFoundException | InvocationTargetException e) {
throw new IllegalStateException(e);
}
}
private void monkeyPatchExistingResources() {
if (externalResourceFile == null) {
return;
}
try {
// Create a new AssetManager instance and point it to the resources installed under
// /sdcard
AssetManager newAssetManager = AssetManager.class.getConstructor().newInstance();
Method mAddAssetPath = AssetManager.class.getDeclaredMethod("addAssetPath", String.class);
mAddAssetPath.setAccessible(true);
if (((int) mAddAssetPath.invoke(newAssetManager, externalResourceFile)) == 0) {
throw new IllegalStateException("Could not create new AssetManager");
}
// Kitkat needs this method call, Lollipop doesn't. However, it doesn't seem to cause any harm
// in L, so we do it unconditionally.
Method mEnsureStringBlocks = AssetManager.class.getDeclaredMethod("ensureStringBlocks");
mEnsureStringBlocks.setAccessible(true);
mEnsureStringBlocks.invoke(newAssetManager);
// Find the singleton instance of ResourcesManager
Class<?> clazz = Class.forName("android.app.ResourcesManager");
Method mGetInstance = clazz.getDeclaredMethod("getInstance");
mGetInstance.setAccessible(true);
Object resourcesManager = mGetInstance.invoke(null);
Field mAssets = Resources.class.getDeclaredField("mAssets");
mAssets.setAccessible(true);
// Iterate over all known Resources objects
Field fMActiveResources = clazz.getDeclaredField("mActiveResources");
fMActiveResources.setAccessible(true);
@SuppressWarnings("unchecked")
ArrayMap<?, WeakReference<Resources>> arrayMap =
(ArrayMap<?, WeakReference<Resources>>) fMActiveResources.get(resourcesManager);
for (WeakReference<Resources> wr : arrayMap.values()) {
Resources resources = wr.get();
// Set the AssetManager of the Resources instance to our brand new one
mAssets.set(resources, newAssetManager);
resources.updateConfiguration(resources.getConfiguration(), resources.getDisplayMetrics());
}
} catch (IllegalAccessException | NoSuchFieldException | NoSuchMethodException |
ClassNotFoundException | InvocationTargetException | InstantiationException e) {
throw new IllegalStateException(e);
}
}
private void instantiateRealApplication(String codeCacheDir) {
externalResourceFile = getExternalResourceFile();
IncrementalClassLoader.inject(
StubApplication.class.getClassLoader(),
packageName,
codeCacheDir,
getDexList(packageName));
try {
@SuppressWarnings("unchecked")
Class<? extends Application> realClass =
(Class<? extends Application>) Class.forName(realClassName);
Constructor<? extends Application> ctor = realClass.getConstructor();
realApplication = ctor.newInstance();
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
@Override
protected void attachBaseContext(Context context) {
instantiateRealApplication(context.getCacheDir().getPath());
// This is called from ActivityThread#handleBindApplication() -> LoadedApk#makeApplication().
// Application#mApplication is changed right after this call, so we cannot do the monkey
// patching here. So just forward this method to the real Application instance.
super.attachBaseContext(context);
try {
Method attachBaseContext =
ContextWrapper.class.getDeclaredMethod("attachBaseContext", Context.class);
attachBaseContext.setAccessible(true);
attachBaseContext.invoke(realApplication, context);
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
@Override
public void onCreate() {
monkeyPatchApplication();
monkeyPatchExistingResources();
super.onCreate();
realApplication.onCreate();
}
}
| |
/**
*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*
*/
package com.microsoft.azure.management.compute.samples;
import com.jcraft.jsch.JSchException;
import com.microsoft.azure.management.Azure;
import com.microsoft.azure.management.compute.CachingTypes;
import com.microsoft.azure.management.compute.Disk;
import com.microsoft.azure.management.compute.DiskSkuTypes;
import com.microsoft.azure.management.compute.KnownLinuxVirtualMachineImage;
import com.microsoft.azure.management.compute.OperatingSystemTypes;
import com.microsoft.azure.management.compute.Snapshot;
import com.microsoft.azure.management.compute.VirtualMachine;
import com.microsoft.azure.management.compute.VirtualMachineCustomImage;
import com.microsoft.azure.management.compute.VirtualMachineDataDisk;
import com.microsoft.azure.management.compute.VirtualMachineScaleSet;
import com.microsoft.azure.management.compute.VirtualMachineScaleSetSkuTypes;
import com.microsoft.azure.management.compute.VirtualMachineSizeTypes;
import com.microsoft.azure.management.network.LoadBalancer;
import com.microsoft.azure.management.network.Network;
import com.microsoft.azure.management.network.PublicIpAddress;
import com.microsoft.azure.management.network.TransportProtocol;
import com.microsoft.azure.management.resources.fluentcore.arm.Region;
import com.microsoft.azure.management.resources.fluentcore.utils.SdkContext;
import com.microsoft.azure.management.samples.SSHShell;
import com.microsoft.azure.management.samples.Utils;
import com.microsoft.rest.LogLevel;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* This is sample will not be published, this is just to ensure out blog is honest.
*/
public final class ManageManagedDisks {
/**
* Main function which runs the actual sample.
* @param azure instance of the azure client
* @return true if sample runs successfully
*/
public static boolean runSample(Azure azure) {
final Region region = Region.US_EAST;
final String rgName = Utils.createRandomName("rgCOMV");
final String userName = "tirekicker";
final String sshkey = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfSPC2K7LZcFKEO+/t3dzmQYtrJFZNxOsbVgOVKietqHyvmYGHEC0J2wPdAqQ/63g/hhAEFRoyehM+rbeDri4txB3YFfnOK58jqdkyXzupWqXzOrlKY4Wz9SKjjN765+dqUITjKRIaAip1Ri137szRg71WnrmdP3SphTRlCx1Bk2nXqWPsclbRDCiZeF8QOTi4JqbmJyK5+0UqhqYRduun8ylAwKKQJ1NJt85sYIHn9f1Rfr6Tq2zS0wZ7DHbZL+zB5rSlAr8QyUdg/GQD+cmSs6LvPJKL78d6hMGk84ARtFo4A79ovwX/Fj01znDQkU6nJildfkaolH2rWFG/qttD azjava@javalib.com";
try {
// ::==Create a VM
// Create a virtual machine with an implicit Managed OS disk and explicit Managed data disk
System.out.println("Creating VM [with an implicit Managed OS disk and explicit Managed data disk]");
final String linuxVM1Name = SdkContext.randomResourceName("vm" + "-", 18);
final String linuxVM1Pip = SdkContext.randomResourceName("pip" + "-", 18);
VirtualMachine linuxVM1 = azure.virtualMachines()
.define(linuxVM1Name)
.withRegion(region)
.withNewResourceGroup(rgName)
.withNewPrimaryNetwork("10.0.0.0/28")
.withPrimaryPrivateIpAddressDynamic()
.withNewPrimaryPublicIpAddress(linuxVM1Pip)
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername(userName)
.withSsh(sshkey)
.withNewDataDisk(100)
.withSize(VirtualMachineSizeTypes.STANDARD_D3_V2)
.create();
System.out.println("Created VM [with an implicit Managed OS disk and explicit Managed data disk]");
// Creation is simplified with implicit creation of managed disks without specifying all the disk details. You will notice that you do not require storage accounts
// ::== Update the VM
// Create a VMSS with implicit managed OS disks and explicit managed data disks
System.out.println("Creating VMSS [with implicit managed OS disks and explicit managed data disks]");
final String vmScaleSetName = SdkContext.randomResourceName("vmss" + "-", 18);
VirtualMachineScaleSet vmScaleSet = azure.virtualMachineScaleSets()
.define(vmScaleSetName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_D5_V2)
.withExistingPrimaryNetworkSubnet(prepareNetwork(azure, region, rgName), "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(prepareLoadBalancer(azure, region, rgName))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("tirekicker")
.withSsh(sshkey)
.withNewDataDisk(100)
.withNewDataDisk(100, 1, CachingTypes.READ_WRITE)
.withNewDataDisk(100, 2, CachingTypes.READ_ONLY)
.withCapacity(3)
.create();
System.out.println("Created VMSS [with implicit managed OS disks and explicit managed data disks]");
// Create an empty disk and attach to a VM (Manage Virtual Machine With Disk)
System.out.println("Creating empty data disk [to attach to a VM]");
final String diskName = SdkContext.randomResourceName("dsk" + "-", 18);
Disk dataDisk = azure.disks().define(diskName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withData()
.withSizeInGB(50)
.create();
System.out.println("Created empty data disk [to attach to a VM]");
System.out.println("Creating VM [with new managed data disks and disk attached]");
final String linuxVM2Name = SdkContext.randomResourceName("vm" + "-", 10);
final String linuxVM2Pip = SdkContext.randomResourceName("pip" + "-", 18);
VirtualMachine linuxVM2 = azure.virtualMachines().define(linuxVM2Name)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withNewPrimaryNetwork("10.0.0.0/28")
.withPrimaryPrivateIpAddressDynamic()
.withNewPrimaryPublicIpAddress(linuxVM2Pip)
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername(userName)
.withSsh(sshkey)
// Begin: Managed data disks
.withNewDataDisk(100)
.withNewDataDisk(100, 1, CachingTypes.READ_WRITE)
.withExistingDataDisk(dataDisk)
// End: Managed data disks
.withSize(VirtualMachineSizeTypes.STANDARD_D3_V2)
.create();
System.out.println("Created VM [with new managed data disks and disk attached]");
// Upate a VM
System.out.println("De-allocating VM");
linuxVM2.deallocate();
System.out.println("De-allocated VM");
System.out.println("Updating VM [by detaching a disk and adding empty disk]");
linuxVM2.update()
.withoutDataDisk(2)
.withNewDataDisk(200)
.apply();
System.out.println("Updated VM [by detaching a disk and adding empty disk]");
// Create a VM from an image (Create Virtual Machine Using Custom Image from VM)
System.out.println("Preparing specialized virtual machine with un-managed disk");
final VirtualMachine linuxVM = prepareSpecializedUnmanagedVirtualMachine(azure, region, rgName);
System.out.println("Prepared specialized virtual machine with un-managed disk");
System.out.println("Creating custom image from specialized virtual machine");
final String customImageName = SdkContext.randomResourceName("cimg" + "-", 10);
VirtualMachineCustomImage virtualMachineCustomImage = azure.virtualMachineCustomImages()
.define(customImageName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.fromVirtualMachine(linuxVM) // from a deallocated and generalized VM
.create();
System.out.println("Created custom image from specialized virtual machine");
System.out.println("Creating VM [from custom image]");
final String linuxVM3Name = SdkContext.randomResourceName("vm" + "-", 10);
VirtualMachine linuxVM3 = azure.virtualMachines().define(linuxVM3Name)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withNewPrimaryNetwork("10.0.0.0/28")
.withPrimaryPrivateIpAddressDynamic()
.withoutPrimaryPublicIpAddress()
.withLinuxCustomImage(virtualMachineCustomImage.id())
.withRootUsername(userName)
.withSsh(sshkey)
.withSize(VirtualMachineSizeTypes.STANDARD_D3_V2)
.create();
System.out.println("Created VM [from custom image]");
// Create a VM from a VHD (Create Virtual Machine Using Specialized VHD)
final String linuxVmName4 = SdkContext.randomResourceName("vm" + "-", 10);
final String specializedVhd = linuxVM.osUnmanagedDiskVhdUri();
azure.virtualMachines().deleteById(linuxVM.id());
System.out.println("Creating VM [by attaching un-managed disk]");
VirtualMachine linuxVM4 = azure.virtualMachines().define(linuxVmName4)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withNewPrimaryNetwork("10.0.0.0/28")
.withPrimaryPrivateIpAddressDynamic()
.withoutPrimaryPublicIpAddress()
.withSpecializedOsUnmanagedDisk(specializedVhd, OperatingSystemTypes.LINUX)
.withSize(VirtualMachineSizeTypes.STANDARD_D3_V2)
.create();
System.out.println("Created VM [by attaching un-managed disk]");
// Create a Snapshot (Create Virtual Machine Using Specilaized Disks from Snapshot)
System.out.println("Preparing specialized virtual machine with managed disks");
final VirtualMachine linuxVM5 = prepareSpecializedManagedVirtualMachine(azure, region, rgName);
Disk osDisk = azure.disks().getById(linuxVM5.osDiskId());
List<Disk> dataDisks = new ArrayList<>();
for (VirtualMachineDataDisk disk : linuxVM5.dataDisks().values()) {
Disk d = azure.disks().getById(disk.id());
dataDisks.add(d);
}
System.out.println("Prepared specialized virtual machine with managed disks");
System.out.println("Deleting VM: " + linuxVM5.id());
azure.virtualMachines().deleteById(linuxVM5.id());
System.out.println("Deleted the VM: " + linuxVM5.id());
System.out.println("Creating snapshot [from managed OS disk]");
// Create a managed snapshot for an OS disk
final String managedOSSnapshotName = SdkContext.randomResourceName("snp" + "-", 10);
Snapshot osSnapshot = azure.snapshots().define(managedOSSnapshotName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withLinuxFromDisk(osDisk)
.create();
System.out.println("Created snapshot [from managed OS disk]");
System.out.println("Creating managed OS disk [from snapshot]");
// Create a managed disk from the managed snapshot for the OS disk
final String managedNewOSDiskName = SdkContext.randomResourceName("dsk" + "-", 10);
Disk newOSDisk = azure.disks().define(managedNewOSDiskName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withLinuxFromSnapshot(osSnapshot)
.withSizeInGB(100)
.create();
System.out.println("Created managed OS disk [from snapshot]");
System.out.println("Creating managed data snapshot [from managed data disk]");
// Create a managed snapshot for a data disk
final String managedDataDiskSnapshotName = SdkContext.randomResourceName("dsk" + "-", 10);
Snapshot dataSnapshot = azure.snapshots().define(managedDataDiskSnapshotName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withDataFromDisk(dataDisks.get(0))
.withSku(DiskSkuTypes.STANDARD_LRS)
.create();
System.out.println("Created managed data snapshot [from managed data disk]");
System.out.println("Creating managed data disk [from managed snapshot]");
// Create a managed disk from the managed snapshot for the data disk
final String managedNewDataDiskName = SdkContext.randomResourceName("dsk" + "-", 10);
Disk newDataDisk = azure.disks().define(managedNewDataDiskName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withData()
.fromSnapshot(dataSnapshot)
.create();
System.out.println("Created managed data disk [from managed snapshot]");
System.out.println("Creating VM [with specialized OS managed disk]");
final String linuxVm6Name = SdkContext.randomResourceName("vm" + "-", 10);
VirtualMachine linuxVM6 = azure.virtualMachines().define(linuxVm6Name)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withNewPrimaryNetwork("10.0.0.0/28")
.withPrimaryPrivateIpAddressDynamic()
.withoutPrimaryPublicIpAddress()
.withSpecializedOsDisk(newOSDisk, OperatingSystemTypes.LINUX)
.withExistingDataDisk(newDataDisk)
.withSize(VirtualMachineSizeTypes.STANDARD_D3_V2)
.create();
System.out.println("Created VM [with specialized OS managed disk]");
// ::== Migrate a VM to managed disks with a single reboot
System.out.println("Creating VM [with un-managed disk for migration]");
final String linuxVM7Name = SdkContext.randomResourceName("vm" + "-", 10);
final String linuxVM7Pip = SdkContext.randomResourceName("pip" + "-", 18);
VirtualMachine linuxVM7 = azure.virtualMachines().define(linuxVM7Name)
.withRegion(region)
.withNewResourceGroup(rgName)
.withNewPrimaryNetwork("10.0.0.0/28")
.withPrimaryPrivateIpAddressDynamic()
.withNewPrimaryPublicIpAddress(linuxVM7Pip)
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("tirekicker")
.withSsh(sshkey)
.withUnmanagedDisks() // uses storage accounts
.withNewUnmanagedDataDisk(100)
.withSize(VirtualMachineSizeTypes.STANDARD_D3_V2)
.create();
System.out.println("Created VM [with un-managed disk for migration]");
System.out.println("De-allocating VM :" + linuxVM7.id());
linuxVM7.deallocate();
System.out.println("De-allocated VM :" + linuxVM7.id());
System.out.println("Migrating VM");
linuxVM7.convertToManaged();
System.out.println("Migrated VM");
return true;
} catch (Exception f) {
System.out.println(f.getMessage());
f.printStackTrace();
} finally {
try {
System.out.println("Deleting Resource Group: " + rgName);
azure.resourceGroups().deleteByName(rgName);
System.out.println("Deleted Resource Group: " + rgName);
} catch (NullPointerException npe) {
System.out.println("Did not create any resources in Azure. No clean up is necessary");
} catch (Exception g) {
g.printStackTrace();
}
}
return false;
}
/**
* Main entry point.
* @param args the parameters
*/
public static void main(String[] args) {
try {
//=============================================================
// Authenticate
final File credFile = new File(System.getenv("AZURE_AUTH_LOCATION"));
Azure azure = Azure.configure()
.withLogLevel(LogLevel.BASIC)
.authenticate(credFile)
.withDefaultSubscription();
// Print selected subscription
System.out.println("Selected subscription: " + azure.subscriptionId());
runSample(azure);
} catch (Exception e) {
System.out.println(e.getMessage());
e.printStackTrace();
}
}
private static VirtualMachine prepareSpecializedUnmanagedVirtualMachine(Azure azure, Region region, String rgName) {
final String userName = "tirekicker";
final String password = "12NewPA$$w0rd!";
final String linuxVmName1 = SdkContext.randomResourceName("vm" + "-", 10);
final String publicIpDnsLabel = SdkContext.randomResourceName("pip" + "-", 20);
VirtualMachine linuxVM = azure.virtualMachines().define(linuxVmName1)
.withRegion(region)
.withNewResourceGroup(rgName)
.withNewPrimaryNetwork("10.0.0.0/28")
.withPrimaryPrivateIpAddressDynamic()
.withNewPrimaryPublicIpAddress(publicIpDnsLabel)
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername(userName)
.withRootPassword(password)
.withUnmanagedDisks()
.defineUnmanagedDataDisk("disk-1")
.withNewVhd(100)
.withLun(1)
.attach()
.defineUnmanagedDataDisk("disk-2")
.withNewVhd(50)
.withLun(2)
.attach()
.withSize(VirtualMachineSizeTypes.STANDARD_D3_V2)
.create();
// De-provision the virtual machine
deprovisionAgentInLinuxVM(linuxVM.getPrimaryPublicIpAddress().fqdn(), 22, userName, password);
System.out.println("Deallocate VM: " + linuxVM.id());
linuxVM.deallocate();
System.out.println("Deallocated VM: " + linuxVM.id() + "; state = " + linuxVM.powerState());
System.out.println("Generalize VM: " + linuxVM.id());
linuxVM.generalize();
System.out.println("Generalized VM: " + linuxVM.id());
return linuxVM;
}
private static VirtualMachine prepareSpecializedManagedVirtualMachine(Azure azure, Region region, String rgName) {
final String userName = "tirekicker";
final String password = "12NewPA$$w0rd!";
final String linuxVmName1 = SdkContext.randomResourceName("vm" + "-", 10);
final String publicIpDnsLabel = SdkContext.randomResourceName("pip" + "-", 20);
VirtualMachine linuxVM = azure.virtualMachines().define(linuxVmName1)
.withRegion(region)
.withNewResourceGroup(rgName)
.withNewPrimaryNetwork("10.0.0.0/28")
.withPrimaryPrivateIpAddressDynamic()
.withNewPrimaryPublicIpAddress(publicIpDnsLabel)
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername(userName)
.withRootPassword(password)
.withNewDataDisk(100)
.withNewDataDisk(200)
.withSize(VirtualMachineSizeTypes.STANDARD_D3_V2)
.create();
// De-provision the virtual machine
deprovisionAgentInLinuxVM(linuxVM.getPrimaryPublicIpAddress().fqdn(), 22, userName, password);
System.out.println("Deallocate VM: " + linuxVM.id());
linuxVM.deallocate();
System.out.println("Deallocated VM: " + linuxVM.id() + "; state = " + linuxVM.powerState());
System.out.println("Generalize VM: " + linuxVM.id());
linuxVM.generalize();
System.out.println("Generalized VM: " + linuxVM.id());
return linuxVM;
}
private static void deprovisionAgentInLinuxVM(String host, int port, String userName, String password) {
SSHShell shell = null;
try {
System.out.println("Trying to de-provision: " + host);
shell = SSHShell.open(host, port, userName, password);
List<String> deprovisionCommand = new ArrayList<>();
deprovisionCommand.add("sudo waagent -deprovision+user --force");
String output = shell.runCommands(deprovisionCommand);
System.out.println(output);
} catch (JSchException jSchException) {
System.out.println(jSchException.getMessage());
} catch (IOException ioException) {
System.out.println(ioException.getMessage());
} catch (Exception exception) {
System.out.println(exception.getMessage());
} finally {
if (shell != null) {
shell.close();
}
}
}
private static Network prepareNetwork(Azure azure, Region region, String rgName) {
final String vnetName = SdkContext.randomResourceName("vnet", 24);
Network network = azure.networks().define(vnetName)
.withRegion(region)
.withNewResourceGroup(rgName)
.withAddressSpace("172.16.0.0/16")
.defineSubnet("subnet1")
.withAddressPrefix("172.16.1.0/24")
.attach()
.create();
return network;
}
private static LoadBalancer prepareLoadBalancer(Azure azure, Region region, String rgName) {
final String loadBalancerName1 = SdkContext.randomResourceName("intlb" + "-", 18);
final String frontendName = loadBalancerName1 + "-FE1";
final String backendPoolName1 = loadBalancerName1 + "-BAP1";
final String backendPoolName2 = loadBalancerName1 + "-BAP2";
final String httpProbe = "httpProbe";
final String httpsProbe = "httpsProbe";
final String httpLoadBalancingRule = "httpRule";
final String httpsLoadBalancingRule = "httpsRule";
final String natPool50XXto22 = "natPool50XXto22";
final String natPool60XXto23 = "natPool60XXto23";
final String publicIpName = "pip-" + loadBalancerName1;
PublicIpAddress publicIpAddress = azure.publicIpAddresses().define(publicIpName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withLeafDomainLabel(publicIpName)
.create();
LoadBalancer loadBalancer = azure.loadBalancers().define(loadBalancerName1)
.withRegion(region)
.withExistingResourceGroup(rgName)
.definePublicFrontend(frontendName)
.withExistingPublicIpAddress(publicIpAddress)
.attach()
// Add two backend one per rule
.defineBackend(backendPoolName1)
.attach()
.defineBackend(backendPoolName2)
.attach()
// Add two probes one per rule
.defineHttpProbe(httpProbe)
.withRequestPath("/")
.withPort(80)
.attach()
.defineHttpProbe(httpsProbe)
.withRequestPath("/")
.withPort(443)
.attach()
// Add two rules that uses above backend and probe
.defineLoadBalancingRule(httpLoadBalancingRule)
.withProtocol(TransportProtocol.TCP)
.withFrontend(frontendName)
.withFrontendPort(80)
.withProbe(httpProbe)
.withBackend(backendPoolName1)
.attach()
.defineLoadBalancingRule(httpsLoadBalancingRule)
.withProtocol(TransportProtocol.TCP)
.withFrontend(frontendName)
.withFrontendPort(443)
.withProbe(httpsProbe)
.withBackend(backendPoolName2)
.attach()
// Add nat pools to enable direct VM connectivity for
// SSH to port 22 and TELNET to port 23
.defineInboundNatPool(natPool50XXto22)
.withProtocol(TransportProtocol.TCP)
.withFrontend(frontendName)
.withFrontendPortRange(5000, 5099)
.withBackendPort(22)
.attach()
.defineInboundNatPool(natPool60XXto23)
.withProtocol(TransportProtocol.TCP)
.withFrontend(frontendName)
.withFrontendPortRange(6000, 6099)
.withBackendPort(23)
.attach()
.create();
return loadBalancer;
}
private ManageManagedDisks() {
}
}
| |
/**
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package parquet.io;
import java.util.Arrays;
import java.util.List;
import parquet.Log;
import parquet.column.ColumnWriteStore;
import parquet.column.ColumnWriter;
import parquet.column.impl.ColumnReadStoreImpl;
import parquet.column.page.PageReadStore;
import parquet.io.api.Binary;
import parquet.io.api.RecordConsumer;
import parquet.io.api.RecordMaterializer;
import parquet.schema.MessageType;
/**
* Message level of the IO structure
*
*
* @author Julien Le Dem
*
*/
public class MessageColumnIO extends GroupColumnIO {
private static final Log logger = Log.getLog(MessageColumnIO.class);
private static final boolean DEBUG = Log.DEBUG;
private List<PrimitiveColumnIO> leaves;
private final boolean validating;
MessageColumnIO(MessageType messageType, boolean validating) {
super(messageType, null, 0);
this.validating = validating;
}
public List<String[]> getColumnNames() {
return super.getColumnNames();
}
public <T> RecordReader<T> getRecordReader(PageReadStore columns, RecordMaterializer<T> recordMaterializer) {
return new RecordReaderImplementation<T>(this, recordMaterializer, validating, new ColumnReadStoreImpl(columns));
}
private class MessageColumnIORecordConsumer extends RecordConsumer {
private ColumnIO currentColumnIO;
private int currentLevel = 0;
private final int[] currentIndex;
private final int[] r;
private final ColumnWriter[] columnWriter;
public MessageColumnIORecordConsumer(ColumnWriteStore columns) {
int maxDepth = 0;
this.columnWriter = new ColumnWriter[MessageColumnIO.this.getLeaves().size()];
for (PrimitiveColumnIO primitiveColumnIO : MessageColumnIO.this.getLeaves()) {
maxDepth = Math.max(maxDepth, primitiveColumnIO.getFieldPath().length);
columnWriter[primitiveColumnIO.getId()] = columns.getColumnWriter(primitiveColumnIO.getColumnDescriptor());
}
currentIndex = new int[maxDepth];
r = new int[maxDepth];
}
public void printState() {
log(currentLevel+", "+currentIndex[currentLevel]+": "+Arrays.toString(currentColumnIO.getFieldPath())+" r:"+r[currentLevel]);
if (r[currentLevel] > currentColumnIO.getRepetitionLevel()) {
// sanity check
throw new InvalidRecordException(r[currentLevel]+"(r) > "+currentColumnIO.getRepetitionLevel()+" ( schema r)");
}
}
private void log(Object m) {
String indent = "";
for (int i = 0; i<currentLevel; ++i) {
indent += " ";
}
logger.debug(indent + m);
}
@Override
public void startMessage() {
if (DEBUG) log("< MESSAGE START >");
currentColumnIO = MessageColumnIO.this;
r[0] = 0;
currentIndex[0] = 0;
if (DEBUG) printState();
}
@Override
public void endMessage() {
writeNullForMissingFields(((GroupColumnIO)currentColumnIO).getChildrenCount() - 1);
if (DEBUG) log("< MESSAGE END >");
if (DEBUG) printState();
}
@Override
public void startField(String field, int index) {
try {
if (DEBUG) log("startField("+field+", "+index+")");
writeNullForMissingFields(index - 1);
currentColumnIO = ((GroupColumnIO)currentColumnIO).getChild(index);
currentIndex[currentLevel] = index;
if (DEBUG) printState();
} catch (RuntimeException e) {
throw new ParquetEncodingException("error starting field " + field + " at " + index, e);
}
}
private void writeNullForMissingFields(final int to) {
final int from = currentIndex[currentLevel];
for (;currentIndex[currentLevel]<=to; ++currentIndex[currentLevel]) {
try {
ColumnIO undefinedField = ((GroupColumnIO)currentColumnIO).getChild(currentIndex[currentLevel]);
int d = currentColumnIO.getDefinitionLevel();
if (DEBUG) log(Arrays.toString(undefinedField.getFieldPath())+".writeNull("+r[currentLevel]+","+d+")");
writeNull(undefinedField, r[currentLevel], d);
} catch (RuntimeException e) {
throw new ParquetEncodingException("error while writing nulls from " + from + " to " + to + ". current index: "+currentIndex[currentLevel], e);
}
}
}
private void writeNull(ColumnIO undefinedField, int r, int d) {
if (undefinedField.getType().isPrimitive()) {
columnWriter[((PrimitiveColumnIO)undefinedField).getId()].writeNull(r, d);
} else {
GroupColumnIO groupColumnIO = (GroupColumnIO)undefinedField;
int childrenCount = groupColumnIO.getChildrenCount();
for (int i = 0; i < childrenCount; i++) {
writeNull(groupColumnIO.getChild(i), r, d);
}
}
}
private void setRepetitionLevel() {
r[currentLevel] = currentColumnIO.getRepetitionLevel();
if (DEBUG) log("r: "+r[currentLevel]);
}
@Override
public void endField(String field, int index) {
if (DEBUG) log("endField("+field+", "+index+")");
currentColumnIO = currentColumnIO.getParent();
currentIndex[currentLevel] = index + 1;
r[currentLevel] = currentLevel == 0 ? 0 : r[currentLevel - 1];
if (DEBUG) printState();
}
@Override
public void startGroup() {
if (DEBUG) log("startGroup()");
++ currentLevel;
r[currentLevel] = r[currentLevel - 1];
currentIndex[currentLevel] = 0;
if (DEBUG) printState();
}
@Override
public void endGroup() {
if (DEBUG) log("endGroup()");
int lastIndex = ((GroupColumnIO)currentColumnIO).getChildrenCount() - 1;
writeNullForMissingFields(lastIndex);
-- currentLevel;
setRepetitionLevel();
if (DEBUG) printState();
}
@Override
public void addInteger(int value) {
if (DEBUG) log("addInt("+value+")");
getColumnWriter().write(value, r[currentLevel], currentColumnIO.getDefinitionLevel());
setRepetitionLevel();
if (DEBUG) printState();
}
private ColumnWriter getColumnWriter() {
return columnWriter[((PrimitiveColumnIO)currentColumnIO).getId()];
}
@Override
public void addLong(long value) {
if (DEBUG) log("addLong("+value+")");
getColumnWriter().write(value, r[currentLevel], currentColumnIO.getDefinitionLevel());
setRepetitionLevel();
if (DEBUG) printState();
}
@Override
public void addBoolean(boolean value) {
if (DEBUG) log("addBoolean("+value+")");
getColumnWriter().write(value, r[currentLevel], currentColumnIO.getDefinitionLevel());
setRepetitionLevel();
if (DEBUG) printState();
}
@Override
public void addBinary(Binary value) {
if (DEBUG) log("addBinary("+value.length()+" bytes)");
getColumnWriter().write(value, r[currentLevel], currentColumnIO.getDefinitionLevel());
setRepetitionLevel();
if (DEBUG) printState();
}
@Override
public void addFloat(float value) {
if (DEBUG) log("addFloat("+value+")");
getColumnWriter().write(value, r[currentLevel], currentColumnIO.getDefinitionLevel());
setRepetitionLevel();
if (DEBUG) printState();
}
@Override
public void addDouble(double value) {
if (DEBUG) log("addDouble("+value+")");
getColumnWriter().write(value, r[currentLevel], currentColumnIO.getDefinitionLevel());
setRepetitionLevel();
if (DEBUG) printState();
}
}
public RecordConsumer getRecordWriter(ColumnWriteStore columns) {
RecordConsumer recordWriter = new MessageColumnIORecordConsumer(columns);
if (DEBUG) recordWriter = new RecordConsumerLoggingWrapper(recordWriter);
return validating ? new ValidatingRecordConsumer(recordWriter, getType()) : recordWriter;
}
void setLevels() {
setLevels(0, 0, new String[0], new int[0], Arrays.<ColumnIO>asList(this), Arrays.<ColumnIO>asList(this));
}
void setLeaves(List<PrimitiveColumnIO> leaves) {
this.leaves = leaves;
}
public List<PrimitiveColumnIO> getLeaves() {
return this.leaves;
}
@Override
public MessageType getType() {
return (MessageType)super.getType();
}
}
| |
package org.knowm.xchange.dto;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import org.knowm.xchange.currency.CurrencyPair;
/** Data object representing an order */
public abstract class Order implements Serializable {
private static final long serialVersionUID = -8132103343647993249L;
/** Order type i.e. bid or ask */
private final OrderType type;
/** Amount to be ordered / amount that was ordered */
private final BigDecimal originalAmount;
/** The currency pair */
private final CurrencyPair currencyPair;
/** An identifier that uniquely identifies the order */
private final String id;
/** The timestamp on the order according to the exchange's server, null if not provided */
private final Date timestamp;
/** Any applicable order flags */
private final Set<IOrderFlags> flags = new HashSet<>();
/** Status of order during it lifecycle */
private OrderStatus status;
/** Amount to be ordered / amount that has been matched against order on the order book/filled */
private BigDecimal cumulativeAmount;
/** Weighted Average price of the fills in the order */
private BigDecimal averagePrice;
/** The total of the fees incurred for all transactions related to this order */
private BigDecimal fee;
/** The leverage to use for margin related to this order */
private String leverage = null;
/**
* @param type Either BID (buying) or ASK (selling)
* @param originalAmount The amount to trade
* @param currencyPair currencyPair The identifier (e.g. BTC/USD)
* @param id An id (usually provided by the exchange)
* @param timestamp the absolute time for this order according to the exchange's server, null if
* not provided
*/
public Order(
OrderType type,
BigDecimal originalAmount,
CurrencyPair currencyPair,
String id,
Date timestamp) {
this(type, originalAmount, currencyPair, id, timestamp, null, null, null, null);
}
/**
* @param type Either BID (buying) or ASK (selling)
* @param originalAmount The amount to trade
* @param currencyPair currencyPair The identifier (e.g. BTC/USD)
* @param id An id (usually provided by the exchange)
* @param timestamp the absolute time for this order according to the exchange's server, null if
* not provided
* @param averagePrice the averagePrice of fill belonging to the order
* @param cumulativeAmount the amount that has been filled
* @param fee the fee associated with this order
* @param status the status of the order at the exchange
*/
public Order(
OrderType type,
BigDecimal originalAmount,
CurrencyPair currencyPair,
String id,
Date timestamp,
BigDecimal averagePrice,
BigDecimal cumulativeAmount,
BigDecimal fee,
OrderStatus status) {
this.type = type;
this.originalAmount = originalAmount;
this.currencyPair = currencyPair;
this.id = id;
this.timestamp = timestamp;
this.averagePrice = averagePrice;
this.cumulativeAmount = cumulativeAmount;
this.fee = fee;
this.status = status;
}
private static String print(BigDecimal value) {
return value == null ? null : value.toPlainString();
}
/**
* The total of the fees incurred for all transactions related to this order
*
* @return null if this information is not available on the order level on the given exchange in
* which case you will have to navigate trades which filled this order to calculate it
*/
public BigDecimal getFee() {
return fee;
}
public void setFee(BigDecimal fee) {
this.fee = fee;
}
/** @return The type (BID or ASK) */
public OrderType getType() {
return type;
}
/**
* @return The type (PENDING_NEW, NEW, PARTIALLY_FILLED, FILLED, PENDING_CANCEL, CANCELED,
* PENDING_REPLACE, REPLACED, STOPPED, REJECTED or EXPIRED)
*/
public OrderStatus getStatus() {
return status;
}
/** The amount to trade */
public BigDecimal getOriginalAmount() {
return originalAmount;
}
/** The amount that has been filled */
public BigDecimal getCumulativeAmount() {
return cumulativeAmount;
}
public void setCumulativeAmount(BigDecimal cumulativeAmount) {
this.cumulativeAmount = cumulativeAmount;
}
public BigDecimal getCumulativeCounterAmount() {
if (cumulativeAmount != null && averagePrice != null && averagePrice.compareTo(BigDecimal.ZERO) > 0) {
return cumulativeAmount.multiply(averagePrice);
}
return null;
}
/** @return The remaining order amount */
public BigDecimal getRemainingAmount() {
if (cumulativeAmount != null && originalAmount != null) {
return originalAmount.subtract(cumulativeAmount);
}
return originalAmount;
}
/**
* The average price of the fills in the order.
*
* @return null if this information is not available on the order level on the given exchange in
* which case you will have to navigate trades which filled this order to calculate it
*/
public BigDecimal getAveragePrice() {
return averagePrice;
}
public void setAveragePrice(BigDecimal averagePrice) {
this.averagePrice = averagePrice;
}
public CurrencyPair getCurrencyPair() {
return currencyPair;
}
/** @return A unique identifier (normally provided by the exchange) */
public String getId() {
return id;
}
public Date getTimestamp() {
return timestamp;
}
public Set<IOrderFlags> getOrderFlags() {
return flags;
}
public void setOrderFlags(Set<IOrderFlags> flags) {
this.flags.clear();
if (flags != null) {
this.flags.addAll(flags);
}
}
public boolean hasFlag(IOrderFlags flag) {
return flags.contains(flag);
}
public void addOrderFlag(IOrderFlags flag) {
flags.add(flag);
}
public void setOrderStatus(OrderStatus status) {
this.status = status;
}
public String getLeverage() {
return leverage;
}
public void setLeverage(String leverage) {
this.leverage = leverage;
}
@Override
public String toString() {
return "Order [type="
+ type
+ ", originalAmount="
+ print(originalAmount)
+ ", cumulativeAmount="
+ print(cumulativeAmount)
+ ", averagePrice="
+ print(averagePrice)
+ ", fee="
+ print(fee)
+ ", currencyPair="
+ currencyPair
+ ", id="
+ id
+ ", timestamp="
+ timestamp
+ ", status="
+ status
+ ", flags="
+ flags
+ "]";
}
@Override
public int hashCode() {
int hash = 7;
hash = 83 * hash + (this.type != null ? this.type.hashCode() : 0);
hash = 83 * hash + (this.originalAmount != null ? this.originalAmount.hashCode() : 0);
hash = 83 * hash + (this.currencyPair != null ? this.currencyPair.hashCode() : 0);
hash = 83 * hash + (this.id != null ? this.id.hashCode() : 0);
hash = 83 * hash + (this.timestamp != null ? this.timestamp.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Order other = (Order) obj;
if (this.type != other.type) {
return false;
}
if ((this.originalAmount == null)
? (other.originalAmount != null)
: this.originalAmount.compareTo(other.originalAmount) != 0) {
return false;
}
if ((this.currencyPair == null)
? (other.currencyPair != null)
: !this.currencyPair.equals(other.currencyPair)) {
return false;
}
if ((this.id == null) ? (other.id != null) : !this.id.equals(other.id)) {
return false;
}
if (this.timestamp != other.timestamp
&& (this.timestamp == null || !this.timestamp.equals(other.timestamp))) {
return false;
}
return true;
}
public enum OrderType {
/** Buying order (the trader is providing the counter currency) */
BID,
/** Selling order (the trader is providing the base currency) */
ASK,
/**
* This is to close a short position when trading crypto currency derivatives such as swaps,
* futures for CFD's.
*/
EXIT_ASK,
/**
* This is to close a long position when trading crypto currency derivatives such as swaps,
* futures for CFD's.
*/
EXIT_BID;
public OrderType getOpposite() {
switch (this) {
case BID:
return ASK;
case ASK:
return BID;
case EXIT_ASK:
return EXIT_BID;
case EXIT_BID:
return EXIT_ASK;
default:
return null;
}
}
}
public enum OrderStatus {
/** Initial order when instantiated */
PENDING_NEW,
/** Initial order when placed on the order book at exchange */
NEW,
/** Partially match against opposite order on order book at exchange */
PARTIALLY_FILLED,
/** Fully match against opposite order on order book at exchange */
FILLED,
/** Waiting to be removed from order book at exchange */
PENDING_CANCEL,
/** Order was partially canceled at exchange */
PARTIALLY_CANCELED,
/** Removed from order book at exchange */
CANCELED,
/** Waiting to be replaced by another order on order book at exchange */
PENDING_REPLACE,
/** Order has been replace by another order on order book at exchange */
REPLACED,
/** Order has been triggered at stop price */
STOPPED,
/** Order has been rejected by exchange and not place on order book */
REJECTED,
/** Order has expired it's time to live or trading session and been removed from order book */
EXPIRED,
/**
* The exchange returned a state which is not in the exchange's API documentation. The state of
* the order cannot be confirmed.
*/
UNKNOWN;
/** Returns true for final {@link OrderStatus} */
public boolean isFinal() {
switch (this) {
case FILLED:
case PARTIALLY_CANCELED: // Cancelled, partially-executed order is final status.
case CANCELED:
case REPLACED:
case STOPPED:
case REJECTED:
case EXPIRED:
return true;
default:
return false;
}
}
/** Returns true when open {@link OrderStatus} */
public boolean isOpen() {
switch (this) {
case PENDING_NEW:
case NEW:
case PARTIALLY_FILLED:
return true;
default:
return false;
}
}
}
public interface IOrderFlags {}
public abstract static class Builder {
protected final Set<IOrderFlags> flags = new HashSet<>();
protected OrderType orderType;
protected BigDecimal originalAmount;
protected BigDecimal cumulativeAmount;
protected BigDecimal remainingAmount;
protected CurrencyPair currencyPair;
protected String id;
protected Date timestamp;
protected BigDecimal averagePrice;
protected OrderStatus status;
protected BigDecimal fee;
protected Builder(OrderType orderType, CurrencyPair currencyPair) {
this.orderType = orderType;
this.currencyPair = currencyPair;
}
public Builder orderType(OrderType orderType) {
this.orderType = orderType;
return this;
}
public Builder orderStatus(OrderStatus status) {
this.status = status;
return this;
}
public Builder originalAmount(BigDecimal originalAmount) {
this.originalAmount = originalAmount;
return this;
}
public Builder cumulativeAmount(BigDecimal cumulativeAmount) {
this.cumulativeAmount = cumulativeAmount;
return this;
}
public Builder fee(BigDecimal fee) {
this.fee = fee;
return this;
}
public Builder remainingAmount(BigDecimal remainingAmount) {
this.remainingAmount = remainingAmount;
return this;
}
public Builder averagePrice(BigDecimal averagePrice) {
this.averagePrice = averagePrice;
return this;
}
public Builder currencyPair(CurrencyPair currencyPair) {
this.currencyPair = currencyPair;
return this;
}
public Builder id(String id) {
this.id = id;
return this;
}
public Builder timestamp(Date timestamp) {
this.timestamp = timestamp;
return this;
}
public Builder flags(Set<IOrderFlags> flags) {
this.flags.addAll(flags);
return this;
}
public Builder flag(IOrderFlags flag) {
this.flags.add(flag);
return this;
}
public abstract Order build();
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.core.http.rest;
import com.azure.core.annotation.ResumeOperation;
import com.azure.core.exception.HttpResponseException;
import com.azure.core.exception.UnexpectedLengthException;
import com.azure.core.http.ContentType;
import com.azure.core.http.HttpHeaders;
import com.azure.core.http.HttpMethod;
import com.azure.core.http.HttpPipeline;
import com.azure.core.http.HttpPipelineBuilder;
import com.azure.core.http.HttpRequest;
import com.azure.core.http.HttpResponse;
import com.azure.core.http.policy.CookiePolicy;
import com.azure.core.http.policy.HttpPipelinePolicy;
import com.azure.core.http.policy.RetryPolicy;
import com.azure.core.http.policy.UserAgentPolicy;
import com.azure.core.implementation.AccessibleByteArrayOutputStream;
import com.azure.core.implementation.TypeUtil;
import com.azure.core.implementation.http.UnexpectedExceptionInformation;
import com.azure.core.implementation.serializer.HttpResponseDecoder;
import com.azure.core.implementation.serializer.HttpResponseDecoder.HttpDecodedResponse;
import com.azure.core.util.Base64Url;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.UrlBuilder;
import com.azure.core.util.logging.ClientLogger;
import com.azure.core.util.serializer.JacksonAdapter;
import com.azure.core.util.serializer.SerializerAdapter;
import com.azure.core.util.serializer.SerializerEncoding;
import com.azure.core.util.tracing.TracerProxy;
import reactor.core.Exceptions;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.Signal;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.net.URL;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* Type to create a proxy implementation for an interface describing REST API methods.
*
* RestProxy can create proxy implementations for interfaces with methods that return deserialized Java objects as well
* as asynchronous Single objects that resolve to a deserialized Java object.
*/
public final class RestProxy implements InvocationHandler {
private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0);
private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes.";
private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes.";
private final ClientLogger logger = new ClientLogger(RestProxy.class);
private final HttpPipeline httpPipeline;
private final SerializerAdapter serializer;
private final SwaggerInterfaceParser interfaceParser;
private final HttpResponseDecoder decoder;
private final ResponseConstructorsCache responseConstructorsCache;
/**
* Create a RestProxy.
*
* @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests.
* @param serializer the serializer that will be used to convert response bodies to POJOs.
* @param interfaceParser the parser that contains information about the interface describing REST API methods that
* this RestProxy "implements".
*/
private RestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) {
this.httpPipeline = httpPipeline;
this.serializer = serializer;
this.interfaceParser = interfaceParser;
this.decoder = new HttpResponseDecoder(this.serializer);
this.responseConstructorsCache = new ResponseConstructorsCache();
}
/**
* Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this
* RestProxy was created to "implement".
*
* @param method the method to get a SwaggerMethodParser for
* @return the SwaggerMethodParser for the provided method
*/
private SwaggerMethodParser getMethodParser(Method method) {
return interfaceParser.getMethodParser(method);
}
/**
* Send the provided request asynchronously, applying any request policies provided to the HttpClient instance.
*
* @param request the HTTP request to send
* @param contextData the context
* @return a {@link Mono} that emits HttpResponse asynchronously
*/
public Mono<HttpResponse> send(HttpRequest request, Context contextData) {
return httpPipeline.send(request, contextData);
}
@Override
public Object invoke(Object proxy, final Method method, Object[] args) {
try {
if (method.isAnnotationPresent(ResumeOperation.class)) {
throw logger.logExceptionAsError(Exceptions.propagate(
new Exception("The resume operation isn't supported.")));
}
final SwaggerMethodParser methodParser = getMethodParser(method);
final HttpRequest request = createHttpRequest(methodParser, args);
Context context = methodParser.setContext(args)
.addData("caller-method", methodParser.getFullyQualifiedMethodName());
context = startTracingSpan(method, context);
if (request.getBody() != null) {
request.setBody(validateLength(request));
}
final Mono<HttpResponse> asyncResponse = send(request, context);
Mono<HttpDecodedResponse> asyncDecodedResponse = this.decoder.decode(asyncResponse, methodParser);
return handleRestReturnType(asyncDecodedResponse, methodParser, methodParser.getReturnType(), context);
} catch (IOException e) {
throw logger.logExceptionAsError(Exceptions.propagate(e));
}
}
static Flux<ByteBuffer> validateLength(final HttpRequest request) {
final Flux<ByteBuffer> bbFlux = request.getBody();
if (bbFlux == null) {
return Flux.empty();
}
final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length"));
return Flux.defer(() -> {
final long[] currentTotalLength = new long[1];
return Flux.concat(bbFlux, Flux.just(VALIDATION_BUFFER)).handle((buffer, sink) -> {
if (buffer == null) {
return;
}
if (buffer == VALIDATION_BUFFER) {
if (expectedLength != currentTotalLength[0]) {
sink.error(new UnexpectedLengthException(String.format(BODY_TOO_SMALL,
currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength));
} else {
sink.complete();
}
return;
}
currentTotalLength[0] += buffer.remaining();
if (currentTotalLength[0] > expectedLength) {
sink.error(new UnexpectedLengthException(String.format(BODY_TOO_LARGE,
currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength));
return;
}
sink.next(buffer);
});
});
}
/**
* Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing
* additional context information.
*
* @param method Service method being called.
* @param context Context information about the current service call.
* @return The updated context containing the span context.
*/
private Context startTracingSpan(Method method, Context context) {
if (!TracerProxy.isTracingEnabled()) {
return context;
}
String spanName = String.format("%s.%s", interfaceParser.getServiceName(), method.getName());
context = TracerProxy.setSpanName(spanName, context);
return TracerProxy.start(spanName, context);
}
/**
* Create a HttpRequest for the provided Swagger method using the provided arguments.
*
* @param methodParser the Swagger method parser to use
* @param args the arguments to use to populate the method's annotation values
* @return a HttpRequest
* @throws IOException thrown if the body contents cannot be serialized
*/
private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException {
// Sometimes people pass in a full URL for the value of their PathParam annotated argument.
// This definitely happens in paging scenarios. In that case, just use the full URL and
// ignore the Host annotation.
final String path = methodParser.setPath(args);
final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path);
final UrlBuilder urlBuilder;
if (pathUrlBuilder.getScheme() != null) {
urlBuilder = pathUrlBuilder;
} else {
urlBuilder = new UrlBuilder();
methodParser.setSchemeAndHost(args, urlBuilder);
// Set the path after host, concatenating the path
// segment in the host.
if (path != null && !path.isEmpty() && !"/".equals(path)) {
String hostPath = urlBuilder.getPath();
if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains("://")) {
urlBuilder.setPath(path);
} else {
urlBuilder.setPath(hostPath + "/" + path);
}
}
}
methodParser.setEncodedQueryParameters(args, urlBuilder);
final URL url = urlBuilder.toUrl();
final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url),
methodParser, args);
// Headers from Swagger method arguments always take precedence over inferred headers from body types
HttpHeaders httpHeaders = request.getHeaders();
methodParser.setHeaders(args, httpHeaders);
return request;
}
@SuppressWarnings("unchecked")
private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser,
final Object[] args) throws IOException {
final Object bodyContentObject = methodParser.setBody(args);
if (bodyContentObject == null) {
request.getHeaders().put("Content-Length", "0");
} else {
// We read the content type from the @BodyParam annotation
String contentType = methodParser.getBodyContentType();
// If this is null or empty, the service interface definition is incomplete and should
// be fixed to ensure correct definitions are applied
if (contentType == null || contentType.isEmpty()) {
if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) {
contentType = ContentType.APPLICATION_OCTET_STREAM;
} else {
contentType = ContentType.APPLICATION_JSON;
}
// throw logger.logExceptionAsError(new IllegalStateException(
// "The method " + methodParser.getFullyQualifiedMethodName() + " does does not have its content "
// + "type correctly specified in its service interface"));
}
request.getHeaders().put("Content-Type", contentType);
// TODO(jogiles) this feels hacky
boolean isJson = false;
final String[] contentTypeParts = contentType.split(";");
for (final String contentTypePart : contentTypeParts) {
if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) {
isJson = true;
break;
}
}
if (isJson) {
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream);
request.setHeader("Content-Length", String.valueOf(stream.size()));
request.setBody(Flux.defer(() -> Flux.just(ByteBuffer.wrap(stream.toByteArray(), 0, stream.size()))));
} else if (FluxUtil.isFluxByteBuffer(methodParser.getBodyJavaType())) {
// Content-Length or Transfer-Encoding: chunked must be provided by a user-specified header when a
// Flowable<byte[]> is given for the body.
request.setBody((Flux<ByteBuffer>) bodyContentObject);
} else if (bodyContentObject instanceof byte[]) {
request.setBody((byte[]) bodyContentObject);
} else if (bodyContentObject instanceof String) {
final String bodyContentString = (String) bodyContentObject;
if (!bodyContentString.isEmpty()) {
request.setBody(bodyContentString);
}
} else if (bodyContentObject instanceof ByteBuffer) {
request.setBody(Flux.just((ByteBuffer) bodyContentObject));
} else {
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream);
request.setHeader("Content-Length", String.valueOf(stream.size()));
request.setBody(Flux.defer(() -> Flux.just(ByteBuffer.wrap(stream.toByteArray(), 0, stream.size()))));
}
}
return request;
}
private Mono<HttpDecodedResponse> ensureExpectedStatus(final Mono<HttpDecodedResponse> asyncDecodedResponse,
final SwaggerMethodParser methodParser) {
return asyncDecodedResponse
.flatMap(decodedHttpResponse -> ensureExpectedStatus(decodedHttpResponse, methodParser));
}
private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception,
final HttpResponse httpResponse,
final byte[] responseContent,
final Object responseDecodedContent) {
final int responseStatusCode = httpResponse.getStatusCode();
final String contentType = httpResponse.getHeaderValue("Content-Type");
final String bodyRepresentation;
if ("application/octet-stream".equalsIgnoreCase(contentType)) {
bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)";
} else {
bodyRepresentation = responseContent == null || responseContent.length == 0
? "(empty body)"
: "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\"";
}
Exception result;
try {
final Constructor<? extends HttpResponseException> exceptionConstructor =
exception.getExceptionType().getConstructor(String.class, HttpResponse.class,
exception.getExceptionBodyType());
result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation,
httpResponse,
responseDecodedContent);
} catch (ReflectiveOperationException e) {
String message = "Status code " + responseStatusCode + ", but an instance of "
+ exception.getExceptionType().getCanonicalName() + " cannot be created."
+ " Response body: " + bodyRepresentation;
result = new IOException(message, e);
}
return result;
}
/**
* Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status
* code' OR (2) emits provided response if it's status code ia allowed.
*
* 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[]
* of additional allowed status codes.
*
* @param decodedResponse The HttpResponse to check.
* @param methodParser The method parser that contains information about the service interface method that initiated
* the HTTP request.
* @return An async-version of the provided decodedResponse.
*/
private Mono<HttpDecodedResponse> ensureExpectedStatus(final HttpDecodedResponse decodedResponse,
final SwaggerMethodParser methodParser) {
final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode();
final Mono<HttpDecodedResponse> asyncResult;
if (!methodParser.isExpectedResponseStatusCode(responseStatusCode)) {
Mono<byte[]> bodyAsBytes = decodedResponse.getSourceResponse().getBodyAsByteArray();
asyncResult = bodyAsBytes.flatMap((Function<byte[], Mono<HttpDecodedResponse>>) responseContent -> {
// bodyAsString() emits non-empty string, now look for decoded version of same string
Mono<Object> decodedErrorBody = decodedResponse.getDecodedBody(responseContent);
return decodedErrorBody
.flatMap((Function<Object, Mono<HttpDecodedResponse>>) responseDecodedErrorObject -> {
// decodedBody() emits 'responseDecodedErrorObject' the successfully decoded exception
// body object
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
responseContent,
responseDecodedErrorObject);
return Mono.error(exception);
})
.switchIfEmpty(Mono.defer((Supplier<Mono<HttpDecodedResponse>>) () -> {
// decodedBody() emits empty, indicate unable to decode 'responseContent',
// create exception with un-decodable content string and without exception body object.
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
responseContent,
null);
return Mono.error(exception);
}));
}).switchIfEmpty(Mono.defer((Supplier<Mono<HttpDecodedResponse>>) () -> {
// bodyAsString() emits empty, indicate no body, create exception empty content string no exception
// body object.
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
null,
null);
return Mono.error(exception);
}));
} else {
asyncResult = Mono.just(decodedResponse);
}
return asyncResult;
}
private Mono<?> handleRestResponseReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser,
final Type entityType) {
if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) {
final Type bodyType = TypeUtil.getRestResponseBodyType(entityType);
if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) {
return response.getSourceResponse().getBody().ignoreElements()
.then(createResponse(response, entityType, null));
} else {
return handleBodyReturnType(response, methodParser, bodyType)
.flatMap(bodyAsObject -> createResponse(response, entityType, bodyAsObject))
.switchIfEmpty(Mono.defer((Supplier<Mono<Response<?>>>) () -> createResponse(response,
entityType, null)));
}
} else {
// For now we're just throwing if the Maybe didn't emit a value.
return handleBodyReturnType(response, methodParser, entityType);
}
}
@SuppressWarnings("unchecked")
private Mono<Response<?>> createResponse(HttpDecodedResponse response, Type entityType, Object bodyAsObject) {
// determine the type of response class. If the type is the 'RestResponse' interface, we will use the
// 'RestResponseBase' class instead.
Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType);
if (cls.equals(Response.class)) {
cls = (Class<? extends Response<?>>) (Object) ResponseBase.class;
} else if (cls.equals(PagedResponse.class)) {
cls = (Class<? extends Response<?>>) (Object) PagedResponseBase.class;
if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) {
throw logger.logExceptionAsError(new RuntimeException(
"Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class));
}
}
Constructor<? extends Response<?>> ctr = this.responseConstructorsCache.get(cls);
if (ctr != null) {
return this.responseConstructorsCache.invoke(ctr, response, bodyAsObject);
} else {
return Mono.error(new RuntimeException("Cannot find suitable constructor for class " + cls));
}
}
private Mono<?> handleBodyReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser, final Type entityType) {
final int responseStatusCode = response.getSourceResponse().getStatusCode();
final HttpMethod httpMethod = methodParser.getHttpMethod();
final Type returnValueWireType = methodParser.getReturnValueWireType();
final Mono<?> asyncResult;
if (httpMethod == HttpMethod.HEAD
&& (TypeUtil.isTypeOrSubTypeOf(
entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) {
boolean isSuccess = (responseStatusCode / 100) == 2;
asyncResult = Mono.just(isSuccess);
} else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) {
// Mono<byte[]>
Mono<byte[]> responseBodyBytesAsync = response.getSourceResponse().getBodyAsByteArray();
if (returnValueWireType == Base64Url.class) {
// Mono<Base64Url>
responseBodyBytesAsync =
responseBodyBytesAsync.map(base64UrlBytes -> new Base64Url(base64UrlBytes).decodedBytes());
}
asyncResult = responseBodyBytesAsync;
} else if (FluxUtil.isFluxByteBuffer(entityType)) {
// Mono<Flux<ByteBuffer>>
asyncResult = Mono.just(response.getSourceResponse().getBody());
} else {
// Mono<Object> or Mono<Page<T>>
asyncResult = response.getDecodedBody((byte[]) null);
}
return asyncResult;
}
/**
* Handle the provided asynchronous HTTP response and return the deserialized value.
*
* @param asyncHttpDecodedResponse the asynchronous HTTP response to the original HTTP request
* @param methodParser the SwaggerMethodParser that the request originates from
* @param returnType the type of value that will be returned
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return the deserialized result
*/
private Object handleRestReturnType(final Mono<HttpDecodedResponse> asyncHttpDecodedResponse,
final SwaggerMethodParser methodParser,
final Type returnType,
final Context context) {
final Mono<HttpDecodedResponse> asyncExpectedResponse =
ensureExpectedStatus(asyncHttpDecodedResponse, methodParser)
.doOnEach(RestProxy::endTracingSpan)
.subscriberContext(reactor.util.context.Context.of("TRACING_CONTEXT", context));
final Object result;
if (TypeUtil.isTypeOrSubTypeOf(returnType, Mono.class)) {
final Type monoTypeParam = TypeUtil.getTypeArgument(returnType);
if (TypeUtil.isTypeOrSubTypeOf(monoTypeParam, Void.class)) {
// ProxyMethod ReturnType: Mono<Void>
result = asyncExpectedResponse.then();
} else {
// ProxyMethod ReturnType: Mono<? extends RestResponseBase<?, ?>>
result = asyncExpectedResponse.flatMap(response ->
handleRestResponseReturnType(response, methodParser, monoTypeParam));
}
} else if (FluxUtil.isFluxByteBuffer(returnType)) {
// ProxyMethod ReturnType: Flux<ByteBuffer>
result = asyncExpectedResponse.flatMapMany(ar -> ar.getSourceResponse().getBody());
} else if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType,
Void.class)) {
// ProxyMethod ReturnType: Void
asyncExpectedResponse.block();
result = null;
} else {
// ProxyMethod ReturnType: T where T != async (Mono, Flux) or sync Void
// Block the deserialization until a value T is received
result = asyncExpectedResponse
.flatMap(httpResponse -> handleRestResponseReturnType(httpResponse, methodParser, returnType))
.block();
}
return result;
}
// This handles each onX for the response mono.
// The signal indicates the status and contains the metadata we need to end the tracing span.
private static void endTracingSpan(Signal<HttpDecodedResponse> signal) {
if (!TracerProxy.isTracingEnabled()) {
return;
}
// Ignore the on complete and on subscribe events, they don't contain the information needed to end the span.
if (signal.isOnComplete() || signal.isOnSubscribe()) {
return;
}
// Get the context that was added to the mono, this will contain the information needed to end the span.
reactor.util.context.Context context = signal.getContext();
Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT");
if (!tracingContext.isPresent()) {
return;
}
int statusCode = 0;
HttpDecodedResponse httpDecodedResponse;
Throwable throwable = null;
// On next contains the response information.
if (signal.hasValue()) {
httpDecodedResponse = signal.get();
statusCode = httpDecodedResponse.getSourceResponse().getStatusCode();
} else if (signal.hasError()) {
// The last status available is on error, this contains the error thrown by the REST response.
throwable = signal.getThrowable();
// Only HttpResponseException contain a status code, this is the base REST response.
if (throwable instanceof HttpResponseException) {
HttpResponseException exception = (HttpResponseException) throwable;
statusCode = exception.getResponse().getStatusCode();
}
}
TracerProxy.end(statusCode, throwable, tracingContext.get());
}
/**
* Create an instance of the default serializer.
*
* @return the default serializer
*/
private static SerializerAdapter createDefaultSerializer() {
return JacksonAdapter.createDefaultSerializerAdapter();
}
/**
* Create the default HttpPipeline.
*
* @return the default HttpPipeline
*/
private static HttpPipeline createDefaultPipeline() {
return createDefaultPipeline(null);
}
/**
* Create the default HttpPipeline.
*
* @param credentialsPolicy the credentials policy factory to use to apply authentication to the pipeline
* @return the default HttpPipeline
*/
private static HttpPipeline createDefaultPipeline(HttpPipelinePolicy credentialsPolicy) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy());
policies.add(new RetryPolicy());
policies.add(new CookiePolicy());
if (credentialsPolicy != null) {
policies.add(credentialsPolicy);
}
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param <A> the type of the Swagger interface
* @return a proxy implementation of the provided Swagger interface
*/
public static <A> A create(Class<A> swaggerInterface) {
return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer());
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests
* @param <A> the type of the Swagger interface
* @return a proxy implementation of the provided Swagger interface
*/
public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) {
return create(swaggerInterface, httpPipeline, createDefaultSerializer());
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests
* @param serializer the serializer that will be used to convert POJOs to and from request and response bodies
* @param <A> the type of the Swagger interface.
* @return a proxy implementation of the provided Swagger interface
*/
@SuppressWarnings("unchecked")
public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) {
final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer);
final RestProxy restProxy = new RestProxy(httpPipeline, serializer, interfaceParser);
return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface},
restProxy);
}
}
| |
/*
* Copyright The Sett Ltd, 2005 to 2014.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thesett.aima.logic.fol.wam.machine;
import java.util.HashMap;
import java.util.Map;
import com.thesett.aima.logic.fol.LinkageException;
import com.thesett.aima.logic.fol.VariableAndFunctorInternerImpl;
import com.thesett.aima.logic.fol.wam.compiler.WAMCallPoint;
import com.thesett.aima.logic.fol.wam.compiler.WAMCompiledPredicate;
import com.thesett.aima.logic.fol.wam.compiler.WAMCompiledQuery;
import com.thesett.aima.logic.fol.wam.compiler.WAMReservedLabel;
import com.thesett.common.util.doublemaps.SymbolTable;
/**
* WAMBaseMachine provides the basic services common to all WAM machines. This consists of managing the interning name
* tables for functors and variables, and managing the call table for the entry addresses of procedures.
*
* <pre><p/><table id="crc"><caption>CRC Card</caption>
* <tr><th> Responsibilities <th> Collaborations
* <tr><td> Provide symbol table for functors names.
* <tr><td> Provide symbol table for variable names.
* <tr><td> Store and retrieve the entry points to byte code procedures.
* </table></pre>
*
* @author Rupert Smith
*/
public abstract class WAMBaseMachine extends VariableAndFunctorInternerImpl implements WAMMachine, WAMCodeView
{
/** Used for debugging. */
/* private static final Logger log = Logger.getLogger(WAMBaseMachine.class.getName()); */
/** The symbol table key for call points. */
protected static final String SYMKEY_CALLPOINTS = "call_points";
/** Holds the symbol table. */
protected SymbolTable<Integer, String, Object> symbolTable;
/** Holds the reverse symbol table to look up names by addresses. */
protected Map<Integer, Integer> reverseTable = new HashMap<Integer, Integer>();
/**
* Creates the base machine, providing variable and functor symbol tables.
*
* @param symbolTable The symbol table.
*/
protected WAMBaseMachine(SymbolTable<Integer, String, Object> symbolTable)
{
super("WAM_Variable_Namespace", "WAM_Functor_Namespace");
this.symbolTable = symbolTable;
}
/** {@inheritDoc} */
public abstract void emmitCode(WAMCompiledPredicate predicate) throws LinkageException;
/** {@inheritDoc} */
public abstract void emmitCode(WAMCompiledQuery query) throws LinkageException;
/** {@inheritDoc} */
public abstract void emmitCode(int offset, int address);
/**
* Extracts the raw byte code from the machine for a given call table entry.
*
* @param callPoint The call table entry giving the location and length of the code.
*
* @return The byte code at the specified location.
*/
public abstract byte[] retrieveCode(WAMCallPoint callPoint);
/**
* Looks up the offset of the start of the code for the named functor.
*
* @param functorName The interned name of the functor to find the start address of the code for.
*
* @return The call table entry of the functors code within the code area of the machine, or an invalid address if
* the functor is not known to the machine.
*/
public WAMCallPoint resolveCallPoint(int functorName)
{
/*log.fine("public WAMCallPoint resolveCallPoint(int functorName): called");*/
WAMCallPoint result = (WAMCallPoint) symbolTable.get(functorName, SYMKEY_CALLPOINTS);
if (result == null)
{
result = new WAMCallPoint(-1, 0, functorName);
}
return result;
}
/** {@inheritDoc} */
public void reserveReferenceToLabel(int labelName, int offset)
{
// Create call point with label name if it does not already exist.
WAMReservedLabel label = (WAMReservedLabel) symbolTable.get(labelName, SYMKEY_CALLPOINTS);
if (label == null)
{
label = new WAMReservedLabel(labelName);
symbolTable.put(labelName, SYMKEY_CALLPOINTS, label);
}
// Add to the mapping from the label to referenced from addresses to fill in later.
label.referenceList.add(offset);
}
/** {@inheritDoc} */
public void resolveLabelPoint(int labelName, int address)
{
// Create the label with resolved address, if it does not already exist.
WAMReservedLabel label = (WAMReservedLabel) symbolTable.get(labelName, SYMKEY_CALLPOINTS);
if (label == null)
{
label = new WAMReservedLabel(labelName);
symbolTable.put(labelName, SYMKEY_CALLPOINTS, label);
}
label.entryPoint = address;
// Fill in all references to the label with the correct value. This does nothing if the label was just created.
for (Integer offset : label.referenceList)
{
emmitCode(offset, label.entryPoint);
}
// Keep a reverse lookup from address to label name.
reverseTable.put(address, labelName);
}
/** {@inheritDoc} */
public Integer getNameForAddress(int address)
{
return reverseTable.get(address);
}
/**
* Resets the machine, to its initial state. This should clear any programs from the machine, and clear all of its
* stacks and heaps.
*/
public void reset()
{
// Clear the entire symbol table.
symbolTable.clear();
reverseTable.clear();
}
/**
* Records the offset of the start of the code for the named functor.
*
* @param functorName The interned name of the functor to find the start address of the code for.
* @param offset The offset of the start of the functors code within the code area.
* @param length The size of the code to set the address for.
*
* @return The call table entry for the functors code within the code area of the machine.
*/
protected WAMCallPoint setCodeAddress(int functorName, int offset, int length)
{
WAMCallPoint entry = new WAMCallPoint(offset, length, functorName);
symbolTable.put(functorName, SYMKEY_CALLPOINTS, entry);
// Keep a reverse lookup from address to functor name.
reverseTable.put(offset, functorName);
return entry;
}
/**
* Records the id of an internal function for the named functor. The method name uses the word 'address' but this is
* not really accurate, the address field is used to hold an id of the internal function to be invoked. This method
* differs from {@link #setCodeAddress(int, int, int)}, as it does not set the reverse mapping from the address to
* the functor name, since an address is not really being used.
*
* @param functorName The interned name of the functor to find the start address of the code for.
* @param id The offset of the start of the functors code within the code area.
*
* @return The call table entry for the functors code within the code area of the machine.
*/
protected WAMCallPoint setInternalCodeAddress(int functorName, int id)
{
WAMCallPoint entry = new WAMCallPoint(id, 0, functorName);
symbolTable.put(functorName, SYMKEY_CALLPOINTS, entry);
return entry;
}
}
| |
package carbon.widget;
import android.content.Context;
import android.content.res.ColorStateList;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PointF;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffColorFilter;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.support.annotation.NonNull;
import android.support.annotation.RequiresApi;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewAnimationUtils;
import android.view.ViewGroup;
import android.view.ViewOutlineProvider;
import android.view.animation.Interpolator;
import com.nineoldandroids.animation.Animator;
import com.nineoldandroids.animation.AnimatorListenerAdapter;
import com.nineoldandroids.animation.ValueAnimator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import carbon.Carbon;
import carbon.R;
import carbon.animation.AnimUtils;
import carbon.animation.AnimatedView;
import carbon.animation.StateAnimator;
import carbon.drawable.ripple.RippleDrawable;
import carbon.drawable.ripple.RippleView;
import carbon.internal.ElevationComparator;
import carbon.internal.MatrixHelper;
import carbon.internal.Reveal;
import carbon.shadow.Shadow;
import carbon.shadow.ShadowGenerator;
import carbon.shadow.ShadowShape;
import carbon.shadow.ShadowView;
import static com.nineoldandroids.view.animation.AnimatorProxy.NEEDS_PROXY;
import static com.nineoldandroids.view.animation.AnimatorProxy.wrap;
/**
* Created by Marcin on 2015-04-01.
* <p/>
* Carbon version of a drawer layout with support for shadows, ripples and other material features.
* Not really useful, but added for sake of completeness.
*/
public class DrawerLayout extends android.support.v4.widget.DrawerLayout implements ShadowView, RippleView, TouchMarginView, StateAnimatorView, AnimatedView, InsetView, CornerView, MaxSizeView, RevealView {
private OnTouchListener onDispatchTouchListener;
public DrawerLayout(Context context) {
super(context, null, R.attr.carbon_drawerLayoutStyle);
initDrawerLayout(null, R.attr.carbon_drawerLayoutStyle);
}
public DrawerLayout(Context context, AttributeSet attrs) {
super(Carbon.getThemedContext(context, attrs, R.styleable.DrawerLayout, R.attr.carbon_drawerLayoutStyle, R.styleable.DrawerLayout_carbon_theme), attrs, R.attr.carbon_drawerLayoutStyle);
initDrawerLayout(attrs, R.attr.carbon_drawerLayoutStyle);
}
public DrawerLayout(Context context, AttributeSet attrs, int defStyleAttr) {
super(Carbon.getThemedContext(context, attrs, R.styleable.DrawerLayout, defStyleAttr, R.styleable.DrawerLayout_carbon_theme), attrs, defStyleAttr);
initDrawerLayout(attrs, defStyleAttr);
}
private static int[] rippleIds = new int[]{
R.styleable.DrawerLayout_carbon_rippleColor,
R.styleable.DrawerLayout_carbon_rippleStyle,
R.styleable.DrawerLayout_carbon_rippleHotspot,
R.styleable.DrawerLayout_carbon_rippleRadius
};
private static int[] animationIds = new int[]{
R.styleable.DrawerLayout_carbon_inAnimation,
R.styleable.DrawerLayout_carbon_outAnimation
};
private static int[] touchMarginIds = new int[]{
R.styleable.DrawerLayout_carbon_touchMargin,
R.styleable.DrawerLayout_carbon_touchMarginLeft,
R.styleable.DrawerLayout_carbon_touchMarginTop,
R.styleable.DrawerLayout_carbon_touchMarginRight,
R.styleable.DrawerLayout_carbon_touchMarginBottom
};
private static int[] insetIds = new int[]{
R.styleable.DrawerLayout_carbon_inset,
R.styleable.DrawerLayout_carbon_insetLeft,
R.styleable.DrawerLayout_carbon_insetTop,
R.styleable.DrawerLayout_carbon_insetRight,
R.styleable.DrawerLayout_carbon_insetBottom,
R.styleable.DrawerLayout_carbon_insetColor
};
private static int[] maxSizeIds = new int[]{
R.styleable.DrawerLayout_carbon_maxWidth,
R.styleable.DrawerLayout_carbon_maxHeight,
};
private static int[] elevationIds = new int[]{
R.styleable.DrawerLayout_carbon_elevation,
R.styleable.DrawerLayout_carbon_elevationShadowColor
};
private void initDrawerLayout(AttributeSet attrs, int defStyleAttr) {
TypedArray a = getContext().obtainStyledAttributes(attrs, R.styleable.DrawerLayout, defStyleAttr, R.style.carbon_DrawerLayout);
Carbon.initRippleDrawable(this, a, rippleIds);
Carbon.initElevation(this, a, elevationIds);
Carbon.initAnimations(this, a, animationIds);
Carbon.initTouchMargin(this, a, touchMarginIds);
Carbon.initInset(this, a, insetIds);
Carbon.initMaxSize(this, a, maxSizeIds);
setCornerRadius(a.getDimension(R.styleable.DrawerLayout_carbon_cornerRadius, 0));
a.recycle();
setChildrenDrawingOrderEnabled(true);
setClipToPadding(false);
}
private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG | Paint.FILTER_BITMAP_FLAG);
private boolean drawCalled = false;
Reveal reveal;
@Override
public Animator startReveal(int x, int y, float startRadius, float finishRadius) {
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.KITKAT_WATCH) {
android.animation.Animator circularReveal = ViewAnimationUtils.createCircularReveal(this, x, y, startRadius, finishRadius);
circularReveal.start();
return new Animator() {
@Override
@RequiresApi(api = Build.VERSION_CODES.HONEYCOMB)
public long getStartDelay() {
return circularReveal.getStartDelay();
}
@Override
@RequiresApi(api = Build.VERSION_CODES.HONEYCOMB)
public void setStartDelay(long startDelay) {
circularReveal.setStartDelay(startDelay);
}
@Override
@RequiresApi(api = Build.VERSION_CODES.HONEYCOMB)
public Animator setDuration(long duration) {
circularReveal.setDuration(duration);
return this;
}
@Override
@RequiresApi(api = Build.VERSION_CODES.HONEYCOMB)
public long getDuration() {
return circularReveal.getDuration();
}
@Override
@RequiresApi(api = Build.VERSION_CODES.HONEYCOMB)
public void setInterpolator(Interpolator value) {
circularReveal.setInterpolator(value);
}
@Override
@RequiresApi(api = Build.VERSION_CODES.HONEYCOMB)
public boolean isRunning() {
return circularReveal.isRunning();
}
};
} else {
reveal = new Reveal(x, y, startRadius);
ValueAnimator animator = ValueAnimator.ofFloat(startRadius, finishRadius);
animator.setDuration(Carbon.getDefaultRevealDuration());
animator.addUpdateListener(animation -> {
reveal.radius = (float) animation.getAnimatedValue();
reveal.mask.reset();
reveal.mask.addCircle(reveal.x, reveal.y, Math.max(reveal.radius, 1), Path.Direction.CW);
postInvalidate();
});
animator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationCancel(Animator animation) {
reveal = null;
}
@Override
public void onAnimationEnd(Animator animation) {
reveal = null;
}
});
animator.start();
return animator;
}
}
@Override
protected void dispatchDraw(@NonNull Canvas canvas) {
boolean r = reveal != null;
boolean c = cornerRadius > 0;
// draw not called, we have to handle corners here
if (!drawCalled && (r || c) && getWidth() > 0 && getHeight() > 0 && Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT_WATCH) {
int saveCount = canvas.saveLayer(0, 0, getWidth(), getHeight(), null, Canvas.FULL_COLOR_LAYER_SAVE_FLAG);
if (r) {
int saveCount2 = canvas.saveLayer(0, 0, getWidth(), getHeight(), null, Canvas.CLIP_SAVE_FLAG);
canvas.clipRect(reveal.x - reveal.radius, reveal.y - reveal.radius, reveal.x + reveal.radius, reveal.y + reveal.radius);
internalDispatchDraw(canvas);
canvas.restoreToCount(saveCount2);
} else {
internalDispatchDraw(canvas);
}
paint.setXfermode(Carbon.CLEAR_MODE);
if (c)
canvas.drawPath(cornersMask, paint);
if (r)
canvas.drawPath(reveal.mask, paint);
canvas.restoreToCount(saveCount);
paint.setXfermode(null);
} else {
internalDispatchDraw(canvas);
}
drawCalled = false;
}
private void internalDispatchDraw(@NonNull Canvas canvas) {
Collections.sort(getViews(), new ElevationComparator());
super.dispatchDraw(canvas);
if (rippleDrawable != null && rippleDrawable.getStyle() == RippleDrawable.Style.Over)
rippleDrawable.draw(canvas);
if (insetColor != 0) {
paint.setColor(insetColor);
paint.setAlpha(255);
if (insetLeft != 0)
canvas.drawRect(0, 0, insetLeft, getHeight(), paint);
if (insetTop != 0)
canvas.drawRect(0, 0, getWidth(), insetTop, paint);
if (insetRight != 0)
canvas.drawRect(getWidth() - insetRight, 0, getWidth(), getHeight(), paint);
if (insetBottom != 0)
canvas.drawRect(0, getHeight() - insetBottom, getWidth(), getHeight(), paint);
}
}
RectF childRect = new RectF();
@Override
protected boolean drawChild(@NonNull Canvas canvas, @NonNull View child, long drawingTime) {
// TODO: why isShown() returns false after being reattached?
if (child instanceof ShadowView && (Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT_WATCH || ((ShadowView) child).getElevationShadowColor() != null)) {
ShadowView shadowView = (ShadowView) child;
shadowView.drawShadow(canvas);
}
if (child instanceof RippleView) {
RippleView rippleView = (RippleView) child;
RippleDrawable rippleDrawable = rippleView.getRippleDrawable();
if (rippleDrawable != null && rippleDrawable.getStyle() == RippleDrawable.Style.Borderless) {
int saveCount = canvas.save(Canvas.MATRIX_SAVE_FLAG);
canvas.translate(child.getLeft(), child.getTop());
canvas.concat(MatrixHelper.getMatrix(child));
rippleDrawable.draw(canvas);
canvas.restoreToCount(saveCount);
}
}
return super.drawChild(canvas, child, drawingTime);
}
@Override
protected int getChildDrawingOrder(int childCount, int child) {
return views != null ? indexOfChild(views.get(child)) : child;
}
protected boolean isTransformedTouchPointInView(float x, float y, View child, PointF outLocalPoint) {
final Rect frame = new Rect();
child.getHitRect(frame);
return frame.contains((int) x, (int) y);
}
// -------------------------------
// corners
// -------------------------------
private float cornerRadius;
private Path cornersMask;
public float getCornerRadius() {
return cornerRadius;
}
public void setCornerRadius(float cornerRadius) {
this.cornerRadius = cornerRadius;
invalidateShadow();
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
super.onLayout(changed, left, top, right, bottom);
if (!changed)
return;
invalidateShadow();
if (getWidth() == 0 || getHeight() == 0)
return;
initCorners();
if (rippleDrawable != null)
rippleDrawable.setBounds(0, 0, getWidth(), getHeight());
}
private void initCorners() {
if (cornerRadius > 0) {
cornerRadius = Math.min(cornerRadius, Math.min(getWidth(), getHeight()) / 2.0f);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
setClipToOutline(true);
setOutlineProvider(ShadowShape.viewOutlineProvider);
} else {
cornersMask = new Path();
cornersMask.addRoundRect(new RectF(0, 0, getWidth(), getHeight()), cornerRadius, cornerRadius, Path.Direction.CW);
cornersMask.setFillType(Path.FillType.INVERSE_WINDING);
}
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
setOutlineProvider(ViewOutlineProvider.BOUNDS);
}
}
@Override
public void draw(@NonNull Canvas canvas) {
drawCalled = true;
boolean r = reveal != null;
boolean c = cornerRadius > 0;
if ((r || c) && getWidth() > 0 && getHeight() > 0 && Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT_WATCH) {
int saveCount = canvas.saveLayer(0, 0, getWidth(), getHeight(), null, Canvas.FULL_COLOR_LAYER_SAVE_FLAG);
if (r) {
int saveCount2 = canvas.saveLayer(0, 0, getWidth(), getHeight(), null, Canvas.CLIP_SAVE_FLAG);
canvas.clipRect(reveal.x - reveal.radius, reveal.y - reveal.radius, reveal.x + reveal.radius, reveal.y + reveal.radius);
super.draw(canvas);
canvas.restoreToCount(saveCount2);
} else {
super.draw(canvas);
}
paint.setXfermode(Carbon.CLEAR_MODE);
if (c)
canvas.drawPath(cornersMask, paint);
if (r)
canvas.drawPath(reveal.mask, paint);
canvas.restoreToCount(saveCount);
paint.setXfermode(null);
} else {
super.draw(canvas);
}
}
// -------------------------------
// ripple
// -------------------------------
private RippleDrawable rippleDrawable;
@Override
public boolean dispatchTouchEvent(@NonNull MotionEvent event) {
if (onDispatchTouchListener != null && onDispatchTouchListener.onTouch(this, event))
return true;
if (rippleDrawable != null && event.getAction() == MotionEvent.ACTION_DOWN)
rippleDrawable.setHotspot(event.getX(), event.getY());
return super.dispatchTouchEvent(event);
}
@Override
public RippleDrawable getRippleDrawable() {
return rippleDrawable;
}
@Override
public void setRippleDrawable(RippleDrawable newRipple) {
if (rippleDrawable != null) {
rippleDrawable.setCallback(null);
if (rippleDrawable.getStyle() == RippleDrawable.Style.Background)
super.setBackgroundDrawable(rippleDrawable.getBackground());
}
if (newRipple != null) {
newRipple.setCallback(this);
newRipple.setBounds(0, 0, getWidth(), getHeight());
if (newRipple.getStyle() == RippleDrawable.Style.Background)
super.setBackgroundDrawable((Drawable) newRipple);
}
rippleDrawable = newRipple;
}
@Override
protected boolean verifyDrawable(@NonNull Drawable who) {
return super.verifyDrawable(who) || rippleDrawable == who;
}
@Override
public void invalidateDrawable(@NonNull Drawable drawable) {
super.invalidateDrawable(drawable);
if (getParent() == null || !(getParent() instanceof View))
return;
if (rippleDrawable != null && rippleDrawable.getStyle() == RippleDrawable.Style.Borderless)
((View) getParent()).invalidate();
if (getElevation() > 0 || getCornerRadius() > 0)
((View) getParent()).invalidate();
}
@Override
public void invalidate(@NonNull Rect dirty) {
super.invalidate(dirty);
if (getParent() == null || !(getParent() instanceof View))
return;
if (rippleDrawable != null && rippleDrawable.getStyle() == RippleDrawable.Style.Borderless)
((View) getParent()).invalidate(dirty);
if (getElevation() > 0 || getCornerRadius() > 0)
((View) getParent()).invalidate(dirty);
}
@Override
public void invalidate(int l, int t, int r, int b) {
super.invalidate(l, t, r, b);
if (getParent() == null || !(getParent() instanceof View))
return;
if (rippleDrawable != null && rippleDrawable.getStyle() == RippleDrawable.Style.Borderless)
((View) getParent()).invalidate(l, t, r, b);
if (getElevation() > 0 || getCornerRadius() > 0)
((View) getParent()).invalidate(l, t, r, b);
}
@Override
public void invalidate() {
super.invalidate();
if (getParent() == null || !(getParent() instanceof View))
return;
if (rippleDrawable != null && rippleDrawable.getStyle() == RippleDrawable.Style.Borderless)
((View) getParent()).invalidate();
if (getElevation() > 0 || getCornerRadius() > 0)
((View) getParent()).invalidate();
}
@Override
public void postInvalidateDelayed(long delayMilliseconds) {
super.postInvalidateDelayed(delayMilliseconds);
if (getParent() == null || !(getParent() instanceof View))
return;
if (rippleDrawable != null && rippleDrawable.getStyle() == RippleDrawable.Style.Borderless)
((View) getParent()).postInvalidateDelayed(delayMilliseconds);
if (getElevation() > 0 || getCornerRadius() > 0)
((View) getParent()).postInvalidateDelayed(delayMilliseconds);
}
@Override
public void postInvalidateDelayed(long delayMilliseconds, int left, int top, int right, int bottom) {
super.postInvalidateDelayed(delayMilliseconds, left, top, right, bottom);
if (getParent() == null || !(getParent() instanceof View))
return;
if (rippleDrawable != null && rippleDrawable.getStyle() == RippleDrawable.Style.Borderless)
((View) getParent()).postInvalidateDelayed(delayMilliseconds, left, top, right, bottom);
if (getElevation() > 0 || getCornerRadius() > 0)
((View) getParent()).postInvalidateDelayed(delayMilliseconds, left, top, right, bottom);
}
@Override
public void postInvalidate() {
super.postInvalidate();
if (getParent() == null || !(getParent() instanceof View))
return;
if (rippleDrawable != null && rippleDrawable.getStyle() == RippleDrawable.Style.Borderless)
((View) getParent()).postInvalidate();
if (getElevation() > 0 || getCornerRadius() > 0)
((View) getParent()).postInvalidate();
}
@Override
public void postInvalidate(int left, int top, int right, int bottom) {
super.postInvalidate(left, top, right, bottom);
if (getParent() == null || !(getParent() instanceof View))
return;
if (rippleDrawable != null && rippleDrawable.getStyle() == RippleDrawable.Style.Borderless)
((View) getParent()).postInvalidate(left, top, right, bottom);
if (getElevation() > 0 || getCornerRadius() > 0)
((View) getParent()).postInvalidate(left, top, right, bottom);
}
@Override
public void setBackground(Drawable background) {
setBackgroundDrawable(background);
}
@Override
public void setBackgroundDrawable(Drawable background) {
if (background instanceof RippleDrawable) {
setRippleDrawable((RippleDrawable) background);
return;
}
if (rippleDrawable != null && rippleDrawable.getStyle() == RippleDrawable.Style.Background) {
rippleDrawable.setCallback(null);
rippleDrawable = null;
}
super.setBackgroundDrawable(background);
}
// -------------------------------
// elevation
// -------------------------------
private float elevation = 0;
private float translationZ = 0;
private Shadow shadow;
private ColorStateList shadowColor;
private PorterDuffColorFilter shadowColorFilter;
private RectF shadowMaskRect = new RectF();
@Override
public float getElevation() {
return elevation;
}
@Override
public synchronized void setElevation(float elevation) {
if (elevation == this.elevation)
return;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
super.setElevation(shadowColor == null ? elevation : 0);
this.elevation = elevation;
if (getParent() != null)
((View) getParent()).postInvalidate();
}
@Override
public float getTranslationZ() {
return translationZ;
}
public synchronized void setTranslationZ(float translationZ) {
if (translationZ == this.translationZ)
return;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
super.setTranslationZ(shadowColor == null ? translationZ : 0);
this.translationZ = translationZ;
if (getParent() != null)
((View) getParent()).postInvalidate();
}
@Override
public ShadowShape getShadowShape() {
if (cornerRadius == getWidth() / 2 && getWidth() == getHeight())
return ShadowShape.CIRCLE;
if (cornerRadius > 0)
return ShadowShape.ROUND_RECT;
return ShadowShape.RECT;
}
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
}
@Override
public boolean hasShadow() {
return getElevation() + getTranslationZ() >= 0.01f && getWidth() > 0 && getHeight() > 0;
}
@Override
public void drawShadow(Canvas canvas) {
float alpha = getAlpha() * Carbon.getDrawableAlpha(getBackground()) / 255.0f * Carbon.getBackgroundTintAlpha(this) / 255.0f;
if (alpha == 0)
return;
if (!hasShadow())
return;
float z = getElevation() + getTranslationZ();
if (shadow == null || shadow.elevation != z)
shadow = ShadowGenerator.generateShadow(this, z);
int saveCount = 0;
boolean maskShadow = getBackground() != null && alpha != 1;
if (maskShadow)
saveCount = canvas.saveLayer(0, 0, getWidth(), getHeight(), null, Canvas.ALL_SAVE_FLAG);
paint.setAlpha((int) (Shadow.ALPHA * alpha));
Matrix matrix = MatrixHelper.getMatrix(this);
canvas.save(Canvas.MATRIX_SAVE_FLAG);
canvas.translate(this.getLeft(), this.getTop() + z / 2);
canvas.concat(matrix);
shadow.draw(canvas, this, paint, shadowColorFilter);
canvas.restore();
canvas.save(Canvas.MATRIX_SAVE_FLAG);
canvas.translate(this.getLeft(), this.getTop());
canvas.concat(matrix);
shadow.draw(canvas, this, paint, shadowColorFilter);
canvas.restore();
if (maskShadow) {
canvas.translate(this.getLeft(), this.getTop());
canvas.concat(matrix);
paint.setXfermode(Carbon.CLEAR_MODE);
shadowMaskRect.set(0, 0, getWidth(), getHeight());
canvas.drawRoundRect(shadowMaskRect, cornerRadius, cornerRadius, paint);
paint.setXfermode(null);
canvas.restoreToCount(saveCount);
}
}
@Override
public void invalidateShadow() {
shadow = null;
if (getParent() != null && getParent() instanceof View)
((View) getParent()).postInvalidate();
}
@Override
public void setElevationShadowColor(ColorStateList shadowColor) {
this.shadowColor = shadowColor;
shadowColorFilter = shadowColor != null ? new PorterDuffColorFilter(shadowColor.getColorForState(getDrawableState(), shadowColor.getDefaultColor()), PorterDuff.Mode.MULTIPLY) : Shadow.DEFAULT_FILTER;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
super.setElevation(shadowColor == null ? elevation : 0);
}
@Override
public void setElevationShadowColor(int color) {
shadowColor = ColorStateList.valueOf(color);
shadowColorFilter = new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
super.setElevation(0);
}
@Override
public ColorStateList getElevationShadowColor() {
return shadowColor;
}
// -------------------------------
// touch margin
// -------------------------------
private Rect touchMargin = new Rect();
@Override
public void setTouchMargin(int left, int top, int right, int bottom) {
touchMargin.set(left, top, right, bottom);
}
@Override
public void setTouchMarginLeft(int margin) {
touchMargin.left = margin;
}
@Override
public void setTouchMarginTop(int margin) {
touchMargin.top = margin;
}
@Override
public void setTouchMarginRight(int margin) {
touchMargin.right = margin;
}
@Override
public void setTouchMarginBottom(int margin) {
touchMargin.bottom = margin;
}
@Override
public Rect getTouchMargin() {
return touchMargin;
}
public void getHitRect(@NonNull Rect outRect) {
if (touchMargin == null) {
super.getHitRect(outRect);
return;
}
outRect.set(getLeft() - touchMargin.left, getTop() - touchMargin.top, getRight() + touchMargin.right, getBottom() + touchMargin.bottom);
}
// -------------------------------
// state animators
// -------------------------------
private StateAnimator stateAnimator = new StateAnimator(this);
@Override
public StateAnimator getStateAnimator() {
return stateAnimator;
}
@Override
protected void drawableStateChanged() {
super.drawableStateChanged();
if (rippleDrawable != null && rippleDrawable.getStyle() != RippleDrawable.Style.Background)
rippleDrawable.setState(getDrawableState());
if (stateAnimator != null)
stateAnimator.setState(getDrawableState());
}
// -------------------------------
// animations
// -------------------------------
private AnimUtils.Style inAnim = AnimUtils.Style.None, outAnim = AnimUtils.Style.None;
private Animator animator;
public void setVisibility(final int visibility) {
if (visibility == View.VISIBLE && (getVisibility() != View.VISIBLE || animator != null)) {
if (animator != null)
animator.cancel();
if (inAnim != AnimUtils.Style.None) {
animator = AnimUtils.animateIn(this, inAnim, new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator a) {
animator = null;
clearAnimation();
}
});
}
super.setVisibility(visibility);
} else if (visibility != View.VISIBLE && (getVisibility() == View.VISIBLE || animator != null)) {
if (animator != null)
animator.cancel();
if (outAnim == AnimUtils.Style.None) {
super.setVisibility(visibility);
return;
}
animator = AnimUtils.animateOut(this, outAnim, new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator a) {
if (((ValueAnimator) a).getAnimatedFraction() == 1)
DrawerLayout.super.setVisibility(visibility);
animator = null;
clearAnimation();
}
});
}
}
public void setVisibilityImmediate(final int visibility) {
super.setVisibility(visibility);
}
public Animator getAnimator() {
return animator;
}
public AnimUtils.Style getOutAnimation() {
return outAnim;
}
public void setOutAnimation(AnimUtils.Style outAnim) {
this.outAnim = outAnim;
}
public AnimUtils.Style getInAnimation() {
return inAnim;
}
public void setInAnimation(AnimUtils.Style inAnim) {
this.inAnim = inAnim;
}
// -------------------------------
// insets
// -------------------------------
int insetLeft = INSET_NULL, insetTop = INSET_NULL, insetRight = INSET_NULL, insetBottom = INSET_NULL;
int insetColor;
private OnInsetsChangedListener onInsetsChangedListener;
public int getInsetColor() {
return insetColor;
}
public void setInsetColor(int insetsColor) {
this.insetColor = insetsColor;
}
public void setInset(int left, int top, int right, int bottom) {
insetLeft = left;
insetTop = top;
insetRight = right;
insetBottom = bottom;
}
public int getInsetLeft() {
return insetLeft;
}
public void setInsetLeft(int insetLeft) {
this.insetLeft = insetLeft;
}
public int getInsetTop() {
return insetTop;
}
public void setInsetTop(int insetTop) {
this.insetTop = insetTop;
}
public int getInsetRight() {
return insetRight;
}
public void setInsetRight(int insetRight) {
this.insetRight = insetRight;
}
public int getInsetBottom() {
return insetBottom;
}
public void setInsetBottom(int insetBottom) {
this.insetBottom = insetBottom;
}
@Override
protected boolean fitSystemWindows(@NonNull Rect insets) {
if (insetLeft == INSET_NULL)
insetLeft = insets.left;
if (insetTop == INSET_NULL)
insetTop = insets.top;
if (insetRight == INSET_NULL)
insetRight = insets.right;
if (insetBottom == INSET_NULL)
insetBottom = insets.bottom;
insets.set(insetLeft, insetTop, insetRight, insetBottom);
if (onInsetsChangedListener != null)
onInsetsChangedListener.onInsetsChanged();
postInvalidate();
return super.fitSystemWindows(insets);
}
public void setOnInsetsChangedListener(OnInsetsChangedListener onInsetsChangedListener) {
this.onInsetsChangedListener = onInsetsChangedListener;
}
// -------------------------------
// ViewGroup utils
// -------------------------------
List<View> views = new ArrayList<>();
public List<View> getViews() {
views.clear();
for (int i = 0; i < getChildCount(); i++)
views.add(getChildAt(i));
return views;
}
public void setOnDispatchTouchListener(OnTouchListener onDispatchTouchListener) {
this.onDispatchTouchListener = onDispatchTouchListener;
}
public List<View> findViewsById(int id) {
List<View> result = new ArrayList<>();
List<ViewGroup> groups = new ArrayList<>();
groups.add(this);
while (!groups.isEmpty()) {
ViewGroup group = groups.remove(0);
for (int i = 0; i < group.getChildCount(); i++) {
View child = group.getChildAt(i);
if (child.getId() == id)
result.add(child);
if (child instanceof ViewGroup)
groups.add((ViewGroup) child);
}
}
return result;
}
public List<View> findViewsWithTag(Object tag) {
List<View> result = new ArrayList<>();
List<ViewGroup> groups = new ArrayList<>();
groups.add(this);
while (!groups.isEmpty()) {
ViewGroup group = groups.remove(0);
for (int i = 0; i < group.getChildCount(); i++) {
View child = group.getChildAt(i);
if (tag.equals(child.getTag()))
result.add(child);
if (child instanceof ViewGroup)
groups.add((ViewGroup) child);
}
}
return result;
}
// -------------------------------
// maximum width & height
// -------------------------------
int maxWidth = Integer.MAX_VALUE, maxHeight = Integer.MAX_VALUE;
@Override
public int getMaximumWidth() {
return maxWidth;
}
@Override
public void setMaximumWidth(int maxWidth) {
this.maxWidth = maxWidth;
requestLayout();
}
@Override
public int getMaximumHeight() {
return maxHeight;
}
@Override
public void setMaximumHeight(int maxHeight) {
this.maxHeight = maxHeight;
requestLayout();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
if (getMeasuredWidth() > maxWidth || getMeasuredHeight() > maxHeight) {
if (getMeasuredWidth() > maxWidth)
widthMeasureSpec = MeasureSpec.makeMeasureSpec(maxWidth, MeasureSpec.EXACTLY);
if (getMeasuredHeight() > maxHeight)
heightMeasureSpec = MeasureSpec.makeMeasureSpec(maxHeight, MeasureSpec.EXACTLY);
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
}
// -------------------------------
// transformations
// -------------------------------
public float getAlpha() {
return NEEDS_PROXY ? wrap(this).getAlpha() : super.getAlpha();
}
public void setAlpha(float alpha) {
if (NEEDS_PROXY) {
wrap(this).setAlpha(alpha);
} else {
super.setAlpha(alpha);
}
if (elevation + translationZ > 0 && getParent() != null && getParent() instanceof View)
((View) getParent()).invalidate();
}
public float getPivotX() {
return NEEDS_PROXY ? wrap(this).getPivotX() : super.getPivotX();
}
public void setPivotX(float pivotX) {
if (NEEDS_PROXY) {
wrap(this).setPivotX(pivotX);
} else {
super.setPivotX(pivotX);
}
if (elevation + translationZ > 0 && getParent() != null && getParent() instanceof View)
((View) getParent()).invalidate();
}
public float getPivotY() {
return NEEDS_PROXY ? wrap(this).getPivotY() : super.getPivotY();
}
public void setPivotY(float pivotY) {
if (NEEDS_PROXY) {
wrap(this).setPivotY(pivotY);
} else {
super.setPivotY(pivotY);
}
if (elevation + translationZ > 0 && getParent() != null && getParent() instanceof View)
((View) getParent()).invalidate();
}
public float getRotation() {
return NEEDS_PROXY ? wrap(this).getRotation() : super.getRotation();
}
public void setRotation(float rotation) {
if (NEEDS_PROXY) {
wrap(this).setRotation(rotation);
} else {
super.setRotation(rotation);
}
if (elevation + translationZ > 0 && getParent() != null && getParent() instanceof View)
((View) getParent()).invalidate();
}
public float getRotationX() {
return NEEDS_PROXY ? wrap(this).getRotationX() : super.getRotationX();
}
public void setRotationX(float rotationX) {
if (NEEDS_PROXY) {
wrap(this).setRotationX(rotationX);
} else {
super.setRotationX(rotationX);
}
if (elevation + translationZ > 0 && getParent() != null && getParent() instanceof View)
((View) getParent()).invalidate();
}
public float getRotationY() {
return NEEDS_PROXY ? wrap(this).getRotationY() : super.getRotationY();
}
public void setRotationY(float rotationY) {
if (NEEDS_PROXY) {
wrap(this).setRotationY(rotationY);
} else {
super.setRotationY(rotationY);
}
if (elevation + translationZ > 0 && getParent() != null && getParent() instanceof View)
((View) getParent()).invalidate();
}
public float getScaleX() {
return NEEDS_PROXY ? wrap(this).getScaleX() : super.getScaleX();
}
public void setScaleX(float scaleX) {
if (NEEDS_PROXY) {
wrap(this).setScaleX(scaleX);
} else {
super.setScaleX(scaleX);
}
if (elevation + translationZ > 0 && getParent() != null && getParent() instanceof View)
((View) getParent()).invalidate();
}
public float getScaleY() {
return NEEDS_PROXY ? wrap(this).getScaleY() : super.getScaleY();
}
public void setScaleY(float scaleY) {
if (NEEDS_PROXY) {
wrap(this).setScaleY(scaleY);
} else {
super.setScaleY(scaleY);
}
if (elevation + translationZ > 0 && getParent() != null && getParent() instanceof View)
((View) getParent()).invalidate();
}
public float getTranslationX() {
return NEEDS_PROXY ? wrap(this).getTranslationX() : super.getTranslationX();
}
public void setTranslationX(float translationX) {
if (NEEDS_PROXY) {
wrap(this).setTranslationX(translationX);
} else {
super.setTranslationX(translationX);
}
if (elevation + translationZ > 0 && getParent() != null && getParent() instanceof View)
((View) getParent()).invalidate();
}
public float getTranslationY() {
return NEEDS_PROXY ? wrap(this).getTranslationY() : super.getTranslationY();
}
public void setTranslationY(float translationY) {
if (NEEDS_PROXY) {
wrap(this).setTranslationY(translationY);
} else {
super.setTranslationY(translationY);
}
if (elevation + translationZ > 0 && getParent() != null && getParent() instanceof View)
((View) getParent()).invalidate();
}
public float getX() {
return NEEDS_PROXY ? wrap(this).getX() : super.getX();
}
public void setX(float x) {
if (NEEDS_PROXY) {
wrap(this).setX(x);
} else {
super.setX(x);
}
}
public float getY() {
return NEEDS_PROXY ? wrap(this).getY() : super.getY();
}
public void setY(float y) {
if (NEEDS_PROXY) {
wrap(this).setY(y);
} else {
super.setY(y);
}
if (elevation + translationZ > 0 && getParent() != null && getParent() instanceof View)
((View) getParent()).invalidate();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.shindig.gadgets.servlet;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Inject;
import org.apache.shindig.auth.SecurityToken;
import org.apache.shindig.auth.SecurityTokenCodec;
import org.apache.shindig.auth.SecurityTokenException;
import org.apache.shindig.common.uri.Uri;
import org.apache.shindig.gadgets.Gadget;
import org.apache.shindig.gadgets.GadgetContext;
import org.apache.shindig.gadgets.RenderingContext;
import org.apache.shindig.gadgets.http.HttpResponse;
import org.apache.shindig.gadgets.process.ProcessingException;
import org.apache.shindig.gadgets.process.Processor;
import org.apache.shindig.gadgets.spec.Feature;
import org.apache.shindig.gadgets.spec.GadgetSpec;
import org.apache.shindig.gadgets.spec.LinkSpec;
import org.apache.shindig.gadgets.spec.ModulePrefs;
import org.apache.shindig.gadgets.spec.UserPref;
import org.apache.shindig.gadgets.spec.View;
import org.apache.shindig.gadgets.spec.UserPref.EnumValuePair;
import org.apache.shindig.gadgets.uri.IframeUriManager;
import org.apache.shindig.protocol.conversion.BeanDelegator;
import org.apache.shindig.protocol.conversion.BeanFilter;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
/**
* Service that interfaces with the system to provide information about gadgets.
*
* @since 2.0.0
*/
public class GadgetsHandlerService {
// Map shindig data class to API interfaces
@VisibleForTesting
static final Map<Class<?>, Class<?>> apiClasses =
new ImmutableMap.Builder<Class<?>, Class<?>>()
.put(View.class, GadgetsHandlerApi.View.class)
.put(UserPref.class, GadgetsHandlerApi.UserPref.class)
.put(EnumValuePair.class, GadgetsHandlerApi.EnumValuePair.class)
.put(ModulePrefs.class, GadgetsHandlerApi.ModulePrefs.class)
.put(Feature.class, GadgetsHandlerApi.Feature.class)
.put(LinkSpec.class, GadgetsHandlerApi.LinkSpec.class)
// Enums
.put(View.ContentType.class, GadgetsHandlerApi.ViewContentType.class)
.put(UserPref.DataType.class, GadgetsHandlerApi.UserPrefDataType.class)
.build();
// Provide mapping for internal enums to api enums
@VisibleForTesting
static final Map<Enum<?>, Enum<?>> enumConversionMap =
new ImmutableMap.Builder<Enum<?>, Enum<?>>()
// View.ContentType mapping
.putAll(BeanDelegator.createDefaultEnumMap(View.ContentType.class,
GadgetsHandlerApi.ViewContentType.class))
// UserPref.DataType mapping
.putAll(BeanDelegator.createDefaultEnumMap(UserPref.DataType.class,
GadgetsHandlerApi.UserPrefDataType.class))
.build();
protected final Processor processor;
protected final IframeUriManager iframeUriManager;
protected final SecurityTokenCodec securityTokenCodec;
protected final BeanDelegator beanDelegator;
protected final BeanFilter beanFilter;
@Inject
public GadgetsHandlerService(Processor processor,
IframeUriManager iframeUriManager, SecurityTokenCodec securityTokenCodec,
BeanFilter beanFilter) {
this.processor = processor;
this.iframeUriManager = iframeUriManager;
this.securityTokenCodec = securityTokenCodec;
this.beanFilter = beanFilter;
this.beanDelegator = new BeanDelegator(apiClasses, enumConversionMap);
}
/**
* Get gadget metadata information and iframe url. Support filtering of fields
* @param request request parameters
* @return gadget metadata nd iframe url
* @throws ProcessingException
*/
public GadgetsHandlerApi.MetadataResponse getMetadata(GadgetsHandlerApi.MetadataRequest request)
throws ProcessingException {
if (request.getUrl() == null) {
throw new ProcessingException("Missing url paramater", HttpResponse.SC_BAD_REQUEST);
}
if (request.getContainer() == null) {
throw new ProcessingException("Missing container paramater", HttpResponse.SC_BAD_REQUEST);
}
if (request.getFields() == null) {
throw new ProcessingException("Missing fields paramater", HttpResponse.SC_BAD_REQUEST);
}
Set<String> fields = beanFilter.processBeanFields(request.getFields());
GadgetContext context = new MetadataGadgetContext(request);
Gadget gadget = processor.process(context);
String iframeUrl =
(fields.contains("iframeurl") || fields.contains(BeanFilter.ALL_FIELDS)) ?
iframeUriManager.makeRenderingUri(gadget).toString() : null;
return createMetadataResponse(context.getUrl(), gadget.getSpec(), iframeUrl, fields);
}
/**
* Create security token
* @param request token paramaters (gadget, owner and viewer)
* @return Security token
* @throws SecurityTokenException
*/
public GadgetsHandlerApi.TokenResponse getToken(GadgetsHandlerApi.TokenRequest request)
throws SecurityTokenException, ProcessingException {
if (request.getUrl() == null) {
throw new ProcessingException("Missing url paramater", HttpResponse.SC_BAD_REQUEST);
}
if (request.getContainer() == null) {
throw new ProcessingException("Missing container paramater", HttpResponse.SC_BAD_REQUEST);
}
if (request.getFields() == null) {
throw new ProcessingException("Missing fields paramater", HttpResponse.SC_BAD_REQUEST);
}
Set<String> fields = beanFilter.processBeanFields(request.getFields());
SecurityToken tokenData = convertToken(request.getToken(), request.getContainer(),
request.getUrl().toString());
String token = securityTokenCodec.encodeToken(tokenData);
return createTokenResponse(request.getUrl(), token, fields);
}
/**
* GadgetContext for metadata request. Used by the gadget processor
*/
protected class MetadataGadgetContext extends GadgetContext {
private final GadgetsHandlerApi.MetadataRequest request;
private final SecurityToken token;
public MetadataGadgetContext(GadgetsHandlerApi.MetadataRequest request) {
this.request = request;
this.token = convertToken(
request.getToken(), request.getContainer(), request.getUrl().toString());
}
@Override
public Uri getUrl() {
return request.getUrl();
}
@Override
public String getContainer() {
return request.getContainer();
}
@Override
public RenderingContext getRenderingContext() {
return RenderingContext.METADATA;
}
@Override
public int getModuleId() {
return 1;
}
@Override
public Locale getLocale() {
return request.getLocale();
}
@Override
public boolean getIgnoreCache() {
return request.getIgnoreCache();
}
@Override
public boolean getDebug() {
return request.getDebug();
}
@Override
public String getView() {
return request.getView();
}
@Override
public SecurityToken getToken() {
return token;
}
}
private SecurityToken convertToken(GadgetsHandlerApi.TokenData token,
String container, String url) {
if (token == null) {
return null;
}
return beanDelegator.createDelegator(token, SecurityToken.class,
ImmutableMap.<String, Object>of("container", container,
"appid", url, "appurl", url));
}
public GadgetsHandlerApi.BaseResponse createBaseResponse(Uri url, String error) {
return beanDelegator.createDelegator(error, GadgetsHandlerApi.BaseResponse.class,
ImmutableMap.<String, Object>of("url", url, "error", error));
}
private GadgetsHandlerApi.MetadataResponse createMetadataResponse(
Uri url, GadgetSpec spec, String iframeUrl, Set<String> fields) {
return (GadgetsHandlerApi.MetadataResponse) beanFilter.createFilteredBean(
beanDelegator.createDelegator(spec, GadgetsHandlerApi.MetadataResponse.class,
ImmutableMap.<String, Object>of(
"url", url, "error", BeanDelegator.NULL,
"iframeurl", BeanDelegator.nullable(iframeUrl))),
fields);
}
private GadgetsHandlerApi.TokenResponse createTokenResponse(
Uri url, String token, Set<String> fields) {
return (GadgetsHandlerApi.TokenResponse) beanFilter.createFilteredBean(
beanDelegator.createDelegator("empty", GadgetsHandlerApi.TokenResponse.class,
ImmutableMap.<String, Object>of("url", url, "error", BeanDelegator.NULL,
"token", BeanDelegator.nullable(token))),
fields);
}
}
| |
/*
* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.tools.doclets.internal.toolkit.builders;
import java.util.*;
import com.sun.javadoc.*;
import com.sun.tools.doclets.internal.toolkit.*;
import com.sun.tools.doclets.internal.toolkit.util.*;
/**
* Builds documentation for annotation type fields.
*
* <p><b>This is NOT part of any supported API.
* If you write code that depends on this, you do so at your own risk.
* This code and its internal interfaces are subject to change or
* deletion without notice.</b>
*
* @author Bhavesh Patel
* @since 1.8
*/
public class AnnotationTypeFieldBuilder extends AbstractMemberBuilder {
/**
* The annotation type whose members are being documented.
*/
protected ClassDoc classDoc;
/**
* The visible members for the given class.
*/
protected VisibleMemberMap visibleMemberMap;
/**
* The writer to output the member documentation.
*/
protected AnnotationTypeFieldWriter writer;
/**
* The list of members being documented.
*/
protected List<ProgramElementDoc> members;
/**
* The index of the current member that is being documented at this point
* in time.
*/
protected int currentMemberIndex;
/**
* Construct a new AnnotationTypeFieldsBuilder.
*
* @param context the build context.
* @param classDoc the class whose members are being documented.
* @param writer the doclet specific writer.
* @param memberType the type of member that is being documented.
*/
protected AnnotationTypeFieldBuilder(Context context,
ClassDoc classDoc,
AnnotationTypeFieldWriter writer,
int memberType) {
super(context);
this.classDoc = classDoc;
this.writer = writer;
this.visibleMemberMap = new VisibleMemberMap(classDoc, memberType,
configuration);
this.members = new ArrayList<ProgramElementDoc>(
this.visibleMemberMap.getMembersFor(classDoc));
if (configuration.getMemberComparator() != null) {
Collections.sort(this.members, configuration.getMemberComparator());
}
}
/**
* Construct a new AnnotationTypeFieldBuilder.
*
* @param context the build context.
* @param classDoc the class whose members are being documented.
* @param writer the doclet specific writer.
*/
public static AnnotationTypeFieldBuilder getInstance(
Context context, ClassDoc classDoc,
AnnotationTypeFieldWriter writer) {
return new AnnotationTypeFieldBuilder(context, classDoc,
writer, VisibleMemberMap.ANNOTATION_TYPE_FIELDS);
}
/**
* {@inheritDoc}
*/
public String getName() {
return "AnnotationTypeFieldDetails";
}
/**
* Returns a list of members that will be documented for the given class.
* This information can be used for doclet specific documentation
* generation.
*
* @param classDoc the {@link ClassDoc} we want to check.
* @return a list of members that will be documented.
*/
public List<ProgramElementDoc> members(ClassDoc classDoc) {
return visibleMemberMap.getMembersFor(classDoc);
}
/**
* Returns the visible member map for the members of this class.
*
* @return the visible member map for the members of this class.
*/
public VisibleMemberMap getVisibleMemberMap() {
return visibleMemberMap;
}
/**
* summaryOrder.size()
*/
public boolean hasMembersToDocument() {
return members.size() > 0;
}
/**
* Build the annotation type field documentation.
*
* @param node the XML element that specifies which components to document
* @param memberDetailsTree the content tree to which the documentation will be added
*/
public void buildAnnotationTypeField(XMLNode node, Content memberDetailsTree) {
buildAnnotationTypeMember(node, memberDetailsTree);
}
/**
* Build the member documentation.
*
* @param node the XML element that specifies which components to document
* @param memberDetailsTree the content tree to which the documentation will be added
*/
public void buildAnnotationTypeMember(XMLNode node, Content memberDetailsTree) {
if (writer == null) {
return;
}
int size = members.size();
if (size > 0) {
writer.addAnnotationFieldDetailsMarker(memberDetailsTree);
for (currentMemberIndex = 0; currentMemberIndex < size;
currentMemberIndex++) {
Content detailsTree = writer.getMemberTreeHeader();
writer.addAnnotationDetailsTreeHeader(classDoc, detailsTree);
Content annotationDocTree = writer.getAnnotationDocTreeHeader(
(MemberDoc) members.get(currentMemberIndex),
detailsTree);
buildChildren(node, annotationDocTree);
detailsTree.addContent(writer.getAnnotationDoc(
annotationDocTree, (currentMemberIndex == size - 1)));
memberDetailsTree.addContent(writer.getAnnotationDetails(detailsTree));
}
}
}
/**
* Build the signature.
*
* @param node the XML element that specifies which components to document
* @param annotationDocTree the content tree to which the documentation will be added
*/
public void buildSignature(XMLNode node, Content annotationDocTree) {
annotationDocTree.addContent(
writer.getSignature((MemberDoc) members.get(currentMemberIndex)));
}
/**
* Build the deprecation information.
*
* @param node the XML element that specifies which components to document
* @param annotationDocTree the content tree to which the documentation will be added
*/
public void buildDeprecationInfo(XMLNode node, Content annotationDocTree) {
writer.addDeprecated((MemberDoc) members.get(currentMemberIndex),
annotationDocTree);
}
/**
* Build the comments for the member. Do nothing if
* {@link Configuration#nocomment} is set to true.
*
* @param node the XML element that specifies which components to document
* @param annotationDocTree the content tree to which the documentation will be added
*/
public void buildMemberComments(XMLNode node, Content annotationDocTree) {
if(! configuration.nocomment){
writer.addComments((MemberDoc) members.get(currentMemberIndex),
annotationDocTree);
}
}
/**
* Build the tag information.
*
* @param node the XML element that specifies which components to document
* @param annotationDocTree the content tree to which the documentation will be added
*/
public void buildTagInfo(XMLNode node, Content annotationDocTree) {
writer.addTags((MemberDoc) members.get(currentMemberIndex),
annotationDocTree);
}
/**
* Return the annotation type field writer for this builder.
*
* @return the annotation type field writer for this builder.
*/
public AnnotationTypeFieldWriter getWriter() {
return writer;
}
}
| |
package org.drools.core.command.runtime.rule;
import org.drools.core.command.impl.ContextImpl;
import org.drools.core.command.impl.DefaultCommandService;
import org.drools.core.command.impl.FixedKnowledgeCommandContext;
import org.drools.core.common.InternalFactHandle;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.kie.internal.KnowledgeBase;
import org.kie.internal.KnowledgeBaseFactory;
import org.kie.internal.runtime.StatefulKnowledgeSession;
import org.kie.api.runtime.rule.FactHandle;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Random;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@SuppressWarnings("unchecked")
public class GetFactHandlesCommandTest {
private StatefulKnowledgeSession ksession;
private DefaultCommandService commandService;
private Random random = new Random();
@Before
public void setup() {
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
ksession = kbase.newStatefulKnowledgeSession();
FixedKnowledgeCommandContext kContext
= new FixedKnowledgeCommandContext( new ContextImpl( "ksession", null ), null, null, this.ksession, null );
commandService = new DefaultCommandService(kContext);
}
@After
public void cleanUp() {
ksession.dispose();
}
@Test
public void getEmptyFactHandlesTest() {
GetFactHandlesCommand command = new GetFactHandlesCommand();
Object result = commandService.execute(command);
if( result instanceof Collection<?> ) {
assertNotNull(result);
assertTrue(((Collection<?>) result).isEmpty());
}
else {
fail("result of command was NOT a collection of FactHandles");
}
}
@Test
public void getOneFactHandleTest() {
String randomFact = "" + random.nextLong();
ksession.insert(randomFact);
GetFactHandlesCommand command = new GetFactHandlesCommand();
Object result = commandService.execute(command);
verifyThatCollectionContains1FactHandleWithThisFact(randomFact, result);
}
@Test
public void getMultipleFactHandleTest() {
HashSet<String> factSet = new HashSet<String>();
int numFacts = 4;
for( int i = 0; i < numFacts; ++i ) {
factSet.add("" + random.nextInt());
}
for( String fact : factSet ) {
ksession.insert(fact);
}
GetFactHandlesCommand command = new GetFactHandlesCommand();
Object result = commandService.execute(command);
verifyThatCollectionContainsTheseFactHandle(factSet, result);
}
@Test
public void getEmptyDisconnectedFactHandlesTest() {
GetFactHandlesCommand command = new GetFactHandlesCommand(true);
Object result = commandService.execute(command);
if( result instanceof Collection<?> ) {
assertNotNull(result);
assertTrue(((Collection<?>) result).isEmpty());
}
else {
fail("result of command was NOT a collection of FactHandles");
}
}
@Test
public void getOneDisconnectedFactHandleTest() {
System.out.println( Thread.currentThread().getStackTrace()[1].getMethodName() );
String randomFact = "" + random.nextLong();
ksession.insert(randomFact);
// Retrieve and verify fact handle collections
GetFactHandlesCommand command = new GetFactHandlesCommand(false);
Object result = commandService.execute(command);
verifyThatCollectionContains1FactHandleWithThisFact(randomFact, result);
FactHandle factHandle = (FactHandle) ((Collection<FactHandle>) result).toArray()[0];
command = new GetFactHandlesCommand(false);
result = commandService.execute(command);
verifyThatCollectionContains1FactHandleWithThisFact(randomFact, result);
FactHandle connectedFactHandle = (FactHandle) ((Collection<FactHandle>) result).toArray()[0];
command = new GetFactHandlesCommand(true);
result = commandService.execute(command);
verifyThatCollectionContains1FactHandleWithThisFact(randomFact, result);
FactHandle disconnectedFactHandle = (FactHandle) ((Collection<FactHandle>) result).toArray()[0];
// Test fact handle collections
assertTrue( factHandle == connectedFactHandle );
assertTrue( ! (factHandle == disconnectedFactHandle) );
}
@Test
public void getMultipleDisconnectedFactHandleTest() {
System.out.println( "\nTest: " + Thread.currentThread().getStackTrace()[1].getMethodName() );
HashSet<String> factSet = new HashSet<String>();
int numFacts = 4;
for( int i = 0; i < numFacts; ++i ) {
factSet.add("" + random.nextInt());
}
for( String fact : factSet ) {
ksession.insert(fact);
}
GetFactHandlesCommand command = new GetFactHandlesCommand(false);
Object result = commandService.execute(command);
verifyThatCollectionContainsTheseFactHandle(factSet, result);
Collection<FactHandle> factHandles = ((Collection<FactHandle>) result);
command = new GetFactHandlesCommand(false);
result = commandService.execute(command);
verifyThatCollectionContainsTheseFactHandle(factSet, result);
Collection<FactHandle> connectedFactHandles = ((Collection<FactHandle>) result);
command = new GetFactHandlesCommand(true);
result = commandService.execute(command);
verifyThatCollectionContainsTheseFactHandle(factSet, result);
Collection<FactHandle> disconnectedFactHandles = ((Collection<FactHandle>) result);
// Test fact handle collections
HashSet<FactHandle> factHandlesCopy = new HashSet<FactHandle>(factHandles);
for( int i = 0; i < connectedFactHandles.size(); ++i ) {
for( Object connectedFact : connectedFactHandles ) {
Iterator<FactHandle> iter = factHandlesCopy.iterator();
while(iter.hasNext() ) {
Object fact = iter.next();
if( fact == connectedFact ) {
iter.remove();
}
}
}
}
assertTrue( factHandlesCopy.isEmpty() );
for( int i = 0; i < disconnectedFactHandles.size(); ++i ) {
for( Object disconnectedFact : disconnectedFactHandles ) {
for( Object fact : factHandles ) {
assertTrue( ! (fact == disconnectedFact) );
}
}
}
assertTrue( factHandles.size() == disconnectedFactHandles.size() );
}
/**
* Helper methods
*/
private void verifyThatCollectionContains1FactHandleWithThisFact(String fact, Object collection) {
if( collection instanceof Collection<?> ) {
Collection<FactHandle> factHandles = null;
try {
factHandles = (Collection<FactHandle>) collection;
}
catch( Exception e ) {
fail( "Collection was not a Colleciton<FactHandle> " + e.getMessage());
}
assertTrue(! factHandles.isEmpty());
assertTrue(factHandles.size() == 1);
InternalFactHandle factHandle = (InternalFactHandle) factHandles.toArray()[0];
assertTrue(fact.equals(factHandle.getObject()));
}
else {
fail("result of command was NOT a collection of FactHandles");
}
}
private void verifyThatCollectionContainsTheseFactHandle(HashSet<String> factSet, Object collection) {
factSet = (HashSet<String>) factSet.clone();
if( collection instanceof Collection<?> ) {
Collection<FactHandle> factHandles = (Collection<FactHandle>) collection;
assertTrue(! factHandles.isEmpty());
assertTrue(factSet.size() + "inserted but only " + factHandles.size() + " facts retrieved", factHandles.size() == factSet.size());
Object [] internalFactHandles = factHandles.toArray();
for( int i = 0; i < internalFactHandles.length; ++i ) {
Object factObject = ((InternalFactHandle) internalFactHandles[i]).getObject();
assertTrue(factSet.contains(factObject));
factSet.remove(factObject);
}
assertTrue( "Additional facts found that weren't inserted.", factSet.isEmpty() );
}
else {
fail("result of command was NOT a collection of FactHandles");
}
}
}
| |
/*
* Copyright (c) 2004, PostgreSQL Global Development Group
* See the LICENSE file in the project root for more information.
*/
package org.postgresql.util;
import org.postgresql.core.EncodingPredictor;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
public class ServerErrorMessage implements Serializable {
private static final Character SEVERITY = 'S';
private static final Character MESSAGE = 'M';
private static final Character DETAIL = 'D';
private static final Character HINT = 'H';
private static final Character POSITION = 'P';
private static final Character WHERE = 'W';
private static final Character FILE = 'F';
private static final Character LINE = 'L';
private static final Character ROUTINE = 'R';
private static final Character SQLSTATE = 'C';
private static final Character INTERNAL_POSITION = 'p';
private static final Character INTERNAL_QUERY = 'q';
private static final Character SCHEMA = 's';
private static final Character TABLE = 't';
private static final Character COLUMN = 'c';
private static final Character DATATYPE = 'd';
private static final Character CONSTRAINT = 'n';
private final Map<Character, String> m_mesgParts = new HashMap<Character, String>();
private final int verbosity;
public ServerErrorMessage(EncodingPredictor.DecodeResult serverError, int verbosity) {
this(serverError.result, verbosity);
if (serverError.encoding != null) {
m_mesgParts.put(MESSAGE, m_mesgParts.get(MESSAGE)
+ GT.tr(" (pgjdbc: autodetected server-encoding to be {0}, if the message is not readable, please check database logs and/or host, port, dbname, user, password, pg_hba.conf)",
serverError.encoding)
);
}
}
public ServerErrorMessage(String p_serverError, int verbosity) {
this.verbosity = verbosity;
char[] l_chars = p_serverError.toCharArray();
int l_pos = 0;
int l_length = l_chars.length;
while (l_pos < l_length) {
char l_mesgType = l_chars[l_pos];
if (l_mesgType != '\0') {
l_pos++;
int l_startString = l_pos;
// order here is important position must be checked before accessing the array
while (l_pos < l_length && l_chars[l_pos] != '\0') {
l_pos++;
}
String l_mesgPart = new String(l_chars, l_startString, l_pos - l_startString);
m_mesgParts.put(l_mesgType, l_mesgPart);
}
l_pos++;
}
}
public String getSQLState() {
return m_mesgParts.get(SQLSTATE);
}
public String getMessage() {
return m_mesgParts.get(MESSAGE);
}
public String getSeverity() {
return m_mesgParts.get(SEVERITY);
}
public String getDetail() {
return m_mesgParts.get(DETAIL);
}
public String getHint() {
return m_mesgParts.get(HINT);
}
public int getPosition() {
return getIntegerPart(POSITION);
}
public String getWhere() {
return m_mesgParts.get(WHERE);
}
public String getSchema() {
return m_mesgParts.get(SCHEMA);
}
public String getTable() {
return m_mesgParts.get(TABLE);
}
public String getColumn() {
return m_mesgParts.get(COLUMN);
}
public String getDatatype() {
return m_mesgParts.get(DATATYPE);
}
public String getConstraint() {
return m_mesgParts.get(CONSTRAINT);
}
public String getFile() {
return m_mesgParts.get(FILE);
}
public int getLine() {
return getIntegerPart(LINE);
}
public String getRoutine() {
return m_mesgParts.get(ROUTINE);
}
public String getInternalQuery() {
return m_mesgParts.get(INTERNAL_QUERY);
}
public int getInternalPosition() {
return getIntegerPart(INTERNAL_POSITION);
}
private int getIntegerPart(Character c) {
String s = m_mesgParts.get(c);
if (s == null) {
return 0;
}
return Integer.parseInt(s);
}
public String toString() {
// Now construct the message from what the server sent
// The general format is:
// SEVERITY: Message \n
// Detail: \n
// Hint: \n
// Position: \n
// Where: \n
// Internal Query: \n
// Internal Position: \n
// Location: File:Line:Routine \n
// SQLState: \n
//
// Normally only the message and detail is included.
// If INFO level logging is enabled then detail, hint, position and where are
// included. If DEBUG level logging is enabled then all information
// is included.
StringBuilder l_totalMessage = new StringBuilder();
String l_message = m_mesgParts.get(SEVERITY);
if (l_message != null) {
l_totalMessage.append(l_message).append(": ");
}
l_message = m_mesgParts.get(MESSAGE);
if (l_message != null) {
l_totalMessage.append(l_message);
}
l_message = m_mesgParts.get(DETAIL);
if (l_message != null) {
l_totalMessage.append("\n ").append(GT.tr("Detail: {0}", l_message));
}
l_message = m_mesgParts.get(HINT);
if (l_message != null) {
l_totalMessage.append("\n ").append(GT.tr("Hint: {0}", l_message));
}
l_message = m_mesgParts.get(POSITION);
if (l_message != null) {
l_totalMessage.append("\n ").append(GT.tr("Position: {0}", l_message));
}
l_message = m_mesgParts.get(WHERE);
if (l_message != null) {
l_totalMessage.append("\n ").append(GT.tr("Where: {0}", l_message));
}
if (verbosity > 2) {
String l_internalQuery = m_mesgParts.get(INTERNAL_QUERY);
if (l_internalQuery != null) {
l_totalMessage.append("\n ").append(GT.tr("Internal Query: {0}", l_internalQuery));
}
String l_internalPosition = m_mesgParts.get(INTERNAL_POSITION);
if (l_internalPosition != null) {
l_totalMessage.append("\n ").append(GT.tr("Internal Position: {0}", l_internalPosition));
}
String l_file = m_mesgParts.get(FILE);
String l_line = m_mesgParts.get(LINE);
String l_routine = m_mesgParts.get(ROUTINE);
if (l_file != null || l_line != null || l_routine != null) {
l_totalMessage.append("\n ").append(GT.tr("Location: File: {0}, Routine: {1}, Line: {2}",
l_file, l_routine, l_line));
}
l_message = m_mesgParts.get(SQLSTATE);
if (l_message != null) {
l_totalMessage.append("\n ").append(GT.tr("Server SQLState: {0}", l_message));
}
}
return l_totalMessage.toString();
}
}
| |
package com.google.ads.googleads.v9.services;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
* <pre>
* Service to manage ad group bid modifiers.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/ads/googleads/v9/services/ad_group_bid_modifier_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class AdGroupBidModifierServiceGrpc {
private AdGroupBidModifierServiceGrpc() {}
public static final String SERVICE_NAME = "google.ads.googleads.v9.services.AdGroupBidModifierService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v9.services.GetAdGroupBidModifierRequest,
com.google.ads.googleads.v9.resources.AdGroupBidModifier> getGetAdGroupBidModifierMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetAdGroupBidModifier",
requestType = com.google.ads.googleads.v9.services.GetAdGroupBidModifierRequest.class,
responseType = com.google.ads.googleads.v9.resources.AdGroupBidModifier.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v9.services.GetAdGroupBidModifierRequest,
com.google.ads.googleads.v9.resources.AdGroupBidModifier> getGetAdGroupBidModifierMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v9.services.GetAdGroupBidModifierRequest, com.google.ads.googleads.v9.resources.AdGroupBidModifier> getGetAdGroupBidModifierMethod;
if ((getGetAdGroupBidModifierMethod = AdGroupBidModifierServiceGrpc.getGetAdGroupBidModifierMethod) == null) {
synchronized (AdGroupBidModifierServiceGrpc.class) {
if ((getGetAdGroupBidModifierMethod = AdGroupBidModifierServiceGrpc.getGetAdGroupBidModifierMethod) == null) {
AdGroupBidModifierServiceGrpc.getGetAdGroupBidModifierMethod = getGetAdGroupBidModifierMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v9.services.GetAdGroupBidModifierRequest, com.google.ads.googleads.v9.resources.AdGroupBidModifier>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetAdGroupBidModifier"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v9.services.GetAdGroupBidModifierRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance()))
.setSchemaDescriptor(new AdGroupBidModifierServiceMethodDescriptorSupplier("GetAdGroupBidModifier"))
.build();
}
}
}
return getGetAdGroupBidModifierMethod;
}
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersRequest,
com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersResponse> getMutateAdGroupBidModifiersMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "MutateAdGroupBidModifiers",
requestType = com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersRequest.class,
responseType = com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersRequest,
com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersResponse> getMutateAdGroupBidModifiersMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersRequest, com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersResponse> getMutateAdGroupBidModifiersMethod;
if ((getMutateAdGroupBidModifiersMethod = AdGroupBidModifierServiceGrpc.getMutateAdGroupBidModifiersMethod) == null) {
synchronized (AdGroupBidModifierServiceGrpc.class) {
if ((getMutateAdGroupBidModifiersMethod = AdGroupBidModifierServiceGrpc.getMutateAdGroupBidModifiersMethod) == null) {
AdGroupBidModifierServiceGrpc.getMutateAdGroupBidModifiersMethod = getMutateAdGroupBidModifiersMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersRequest, com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "MutateAdGroupBidModifiers"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersResponse.getDefaultInstance()))
.setSchemaDescriptor(new AdGroupBidModifierServiceMethodDescriptorSupplier("MutateAdGroupBidModifiers"))
.build();
}
}
}
return getMutateAdGroupBidModifiersMethod;
}
/**
* Creates a new async stub that supports all call types for the service
*/
public static AdGroupBidModifierServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<AdGroupBidModifierServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<AdGroupBidModifierServiceStub>() {
@java.lang.Override
public AdGroupBidModifierServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new AdGroupBidModifierServiceStub(channel, callOptions);
}
};
return AdGroupBidModifierServiceStub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static AdGroupBidModifierServiceBlockingStub newBlockingStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<AdGroupBidModifierServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<AdGroupBidModifierServiceBlockingStub>() {
@java.lang.Override
public AdGroupBidModifierServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new AdGroupBidModifierServiceBlockingStub(channel, callOptions);
}
};
return AdGroupBidModifierServiceBlockingStub.newStub(factory, channel);
}
/**
* Creates a new ListenableFuture-style stub that supports unary calls on the service
*/
public static AdGroupBidModifierServiceFutureStub newFutureStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<AdGroupBidModifierServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<AdGroupBidModifierServiceFutureStub>() {
@java.lang.Override
public AdGroupBidModifierServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new AdGroupBidModifierServiceFutureStub(channel, callOptions);
}
};
return AdGroupBidModifierServiceFutureStub.newStub(factory, channel);
}
/**
* <pre>
* Service to manage ad group bid modifiers.
* </pre>
*/
public static abstract class AdGroupBidModifierServiceImplBase implements io.grpc.BindableService {
/**
* <pre>
* Returns the requested ad group bid modifier in full detail.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void getAdGroupBidModifier(com.google.ads.googleads.v9.services.GetAdGroupBidModifierRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v9.resources.AdGroupBidModifier> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetAdGroupBidModifierMethod(), responseObserver);
}
/**
* <pre>
* Creates, updates, or removes ad group bid modifiers.
* Operation statuses are returned.
* List of thrown errors:
* [AdGroupBidModifierError]()
* [AuthenticationError]()
* [AuthorizationError]()
* [ContextError]()
* [CriterionError]()
* [DatabaseError]()
* [DistinctError]()
* [FieldError]()
* [FieldMaskError]()
* [HeaderError]()
* [IdError]()
* [InternalError]()
* [MutateError]()
* [NewResourceCreationError]()
* [NotEmptyError]()
* [OperatorError]()
* [QuotaError]()
* [RangeError]()
* [RequestError]()
* [ResourceCountLimitExceededError]()
* [SizeLimitError]()
* [StringFormatError]()
* [StringLengthError]()
* </pre>
*/
public void mutateAdGroupBidModifiers(com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getMutateAdGroupBidModifiersMethod(), responseObserver);
}
@java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getGetAdGroupBidModifierMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v9.services.GetAdGroupBidModifierRequest,
com.google.ads.googleads.v9.resources.AdGroupBidModifier>(
this, METHODID_GET_AD_GROUP_BID_MODIFIER)))
.addMethod(
getMutateAdGroupBidModifiersMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersRequest,
com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersResponse>(
this, METHODID_MUTATE_AD_GROUP_BID_MODIFIERS)))
.build();
}
}
/**
* <pre>
* Service to manage ad group bid modifiers.
* </pre>
*/
public static final class AdGroupBidModifierServiceStub extends io.grpc.stub.AbstractAsyncStub<AdGroupBidModifierServiceStub> {
private AdGroupBidModifierServiceStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected AdGroupBidModifierServiceStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new AdGroupBidModifierServiceStub(channel, callOptions);
}
/**
* <pre>
* Returns the requested ad group bid modifier in full detail.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void getAdGroupBidModifier(com.google.ads.googleads.v9.services.GetAdGroupBidModifierRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v9.resources.AdGroupBidModifier> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetAdGroupBidModifierMethod(), getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Creates, updates, or removes ad group bid modifiers.
* Operation statuses are returned.
* List of thrown errors:
* [AdGroupBidModifierError]()
* [AuthenticationError]()
* [AuthorizationError]()
* [ContextError]()
* [CriterionError]()
* [DatabaseError]()
* [DistinctError]()
* [FieldError]()
* [FieldMaskError]()
* [HeaderError]()
* [IdError]()
* [InternalError]()
* [MutateError]()
* [NewResourceCreationError]()
* [NotEmptyError]()
* [OperatorError]()
* [QuotaError]()
* [RangeError]()
* [RequestError]()
* [ResourceCountLimitExceededError]()
* [SizeLimitError]()
* [StringFormatError]()
* [StringLengthError]()
* </pre>
*/
public void mutateAdGroupBidModifiers(com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getMutateAdGroupBidModifiersMethod(), getCallOptions()), request, responseObserver);
}
}
/**
* <pre>
* Service to manage ad group bid modifiers.
* </pre>
*/
public static final class AdGroupBidModifierServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<AdGroupBidModifierServiceBlockingStub> {
private AdGroupBidModifierServiceBlockingStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected AdGroupBidModifierServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new AdGroupBidModifierServiceBlockingStub(channel, callOptions);
}
/**
* <pre>
* Returns the requested ad group bid modifier in full detail.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v9.resources.AdGroupBidModifier getAdGroupBidModifier(com.google.ads.googleads.v9.services.GetAdGroupBidModifierRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetAdGroupBidModifierMethod(), getCallOptions(), request);
}
/**
* <pre>
* Creates, updates, or removes ad group bid modifiers.
* Operation statuses are returned.
* List of thrown errors:
* [AdGroupBidModifierError]()
* [AuthenticationError]()
* [AuthorizationError]()
* [ContextError]()
* [CriterionError]()
* [DatabaseError]()
* [DistinctError]()
* [FieldError]()
* [FieldMaskError]()
* [HeaderError]()
* [IdError]()
* [InternalError]()
* [MutateError]()
* [NewResourceCreationError]()
* [NotEmptyError]()
* [OperatorError]()
* [QuotaError]()
* [RangeError]()
* [RequestError]()
* [ResourceCountLimitExceededError]()
* [SizeLimitError]()
* [StringFormatError]()
* [StringLengthError]()
* </pre>
*/
public com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersResponse mutateAdGroupBidModifiers(com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getMutateAdGroupBidModifiersMethod(), getCallOptions(), request);
}
}
/**
* <pre>
* Service to manage ad group bid modifiers.
* </pre>
*/
public static final class AdGroupBidModifierServiceFutureStub extends io.grpc.stub.AbstractFutureStub<AdGroupBidModifierServiceFutureStub> {
private AdGroupBidModifierServiceFutureStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected AdGroupBidModifierServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new AdGroupBidModifierServiceFutureStub(channel, callOptions);
}
/**
* <pre>
* Returns the requested ad group bid modifier in full detail.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v9.resources.AdGroupBidModifier> getAdGroupBidModifier(
com.google.ads.googleads.v9.services.GetAdGroupBidModifierRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetAdGroupBidModifierMethod(), getCallOptions()), request);
}
/**
* <pre>
* Creates, updates, or removes ad group bid modifiers.
* Operation statuses are returned.
* List of thrown errors:
* [AdGroupBidModifierError]()
* [AuthenticationError]()
* [AuthorizationError]()
* [ContextError]()
* [CriterionError]()
* [DatabaseError]()
* [DistinctError]()
* [FieldError]()
* [FieldMaskError]()
* [HeaderError]()
* [IdError]()
* [InternalError]()
* [MutateError]()
* [NewResourceCreationError]()
* [NotEmptyError]()
* [OperatorError]()
* [QuotaError]()
* [RangeError]()
* [RequestError]()
* [ResourceCountLimitExceededError]()
* [SizeLimitError]()
* [StringFormatError]()
* [StringLengthError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersResponse> mutateAdGroupBidModifiers(
com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getMutateAdGroupBidModifiersMethod(), getCallOptions()), request);
}
}
private static final int METHODID_GET_AD_GROUP_BID_MODIFIER = 0;
private static final int METHODID_MUTATE_AD_GROUP_BID_MODIFIERS = 1;
private static final class MethodHandlers<Req, Resp> implements
io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AdGroupBidModifierServiceImplBase serviceImpl;
private final int methodId;
MethodHandlers(AdGroupBidModifierServiceImplBase serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_GET_AD_GROUP_BID_MODIFIER:
serviceImpl.getAdGroupBidModifier((com.google.ads.googleads.v9.services.GetAdGroupBidModifierRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v9.resources.AdGroupBidModifier>) responseObserver);
break;
case METHODID_MUTATE_AD_GROUP_BID_MODIFIERS:
serviceImpl.mutateAdGroupBidModifiers((com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v9.services.MutateAdGroupBidModifiersResponse>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
private static abstract class AdGroupBidModifierServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier {
AdGroupBidModifierServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.ads.googleads.v9.services.AdGroupBidModifierServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("AdGroupBidModifierService");
}
}
private static final class AdGroupBidModifierServiceFileDescriptorSupplier
extends AdGroupBidModifierServiceBaseDescriptorSupplier {
AdGroupBidModifierServiceFileDescriptorSupplier() {}
}
private static final class AdGroupBidModifierServiceMethodDescriptorSupplier
extends AdGroupBidModifierServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final String methodName;
AdGroupBidModifierServiceMethodDescriptorSupplier(String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (AdGroupBidModifierServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new AdGroupBidModifierServiceFileDescriptorSupplier())
.addMethod(getGetAdGroupBidModifierMethod())
.addMethod(getMutateAdGroupBidModifiersMethod())
.build();
}
}
}
return result;
}
}
| |
/**
* Copyright 2007-2015, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.k3po.driver.internal.behavior.handler.command;
import static java.lang.Boolean.TRUE;
import static java.util.Arrays.asList;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.jboss.netty.buffer.ChannelBuffers.copiedBuffer;
import static org.jboss.netty.buffer.ChannelBuffers.wrappedBuffer;
import static org.jboss.netty.channel.ChannelState.OPEN;
import static org.jboss.netty.channel.Channels.fireWriteComplete;
import static org.jboss.netty.channel.Channels.pipeline;
import static org.jboss.netty.util.CharsetUtil.UTF_8;
import static org.junit.Assert.assertFalse;
import static org.kaazing.k3po.driver.internal.behavior.handler.codec.Maskers.newMasker;
import static org.kaazing.k3po.lang.internal.el.ExpressionFactoryUtils.synchronizedSupplier;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.function.Supplier;
import javax.el.ExpressionFactory;
import javax.el.ValueExpression;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.channel.ChannelDownstreamHandler;
import org.jboss.netty.channel.ChannelEvent;
import org.jboss.netty.channel.ChannelException;
import org.jboss.netty.channel.ChannelFactory;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelUpstreamHandler;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.channel.SimpleChannelHandler;
import org.jboss.netty.channel.WriteCompletionEvent;
import org.jboss.netty.channel.local.DefaultLocalClientChannelFactory;
import org.jmock.lib.concurrent.Synchroniser;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.DisableOnDebug;
import org.junit.rules.TestRule;
import org.junit.rules.Timeout;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.kaazing.k3po.driver.internal.behavior.handler.ExecutionHandler;
import org.kaazing.k3po.driver.internal.behavior.handler.codec.Masker;
import org.kaazing.k3po.driver.internal.behavior.handler.codec.MessageEncoder;
import org.kaazing.k3po.driver.internal.behavior.handler.codec.WriteBytesEncoder;
import org.kaazing.k3po.driver.internal.behavior.handler.codec.WriteExpressionEncoder;
import org.kaazing.k3po.driver.internal.behavior.handler.codec.WriteTextEncoder;
import org.kaazing.k3po.driver.internal.behavior.handler.prepare.PreparationEvent;
import org.kaazing.k3po.driver.internal.jmock.Expectations;
import org.kaazing.k3po.driver.internal.jmock.Mockery;
import org.kaazing.k3po.lang.internal.el.ExpressionContext;
@RunWith(Parameterized.class)
public class WriteHandlerTest {
@Parameters
public static Iterable<byte[]> maskingKeys() {
byte[] identityKey = new byte[] { 0x00, 0x00, 0x00, 0x00 };
byte[] maskingKey = new byte[4];
new Random().nextBytes(maskingKey);
return asList(identityKey, maskingKey);
}
@Rule
public TestRule timeout = new DisableOnDebug(new Timeout(1, SECONDS));
private final byte[] maskingKey;
private Mockery context;
private ChannelUpstreamHandler upstream;
private ChannelDownstreamHandler downstream;
private ChannelPipeline pipeline;
private ChannelFactory channelFactory;
private WriteHandler handler;
private ExecutionHandler execution;
private ExpressionContext environment;
private ValueExpression expression;
private Masker masker;
public WriteHandlerTest(byte[] maskingKey) {
this.maskingKey = maskingKey;
}
@Before
public void setUp() throws Exception {
context = new Mockery() {
{
setThrowFirstErrorOnAssertIsSatisfied(true);
}
};
context.setThreadingPolicy(new Synchroniser());
upstream = context.mock(ChannelUpstreamHandler.class);
downstream = context.mock(ChannelDownstreamHandler.class);
execution = new ExecutionHandler();
List<MessageEncoder> encoders = new ArrayList<>();
encoders.add(new WriteBytesEncoder(new byte[] { 0x01, 0x02, 0x03 }));
encoders.add(new WriteTextEncoder("Hello, world", UTF_8));
ExpressionFactory expressionFactory = ExpressionFactory.newInstance();
environment = new ExpressionContext();
expression = expressionFactory.createValueExpression(environment, "${variable}", byte[].class);
Supplier<byte[]> supplier = synchronizedSupplier(expression, environment, byte[].class);
encoders.add(new WriteExpressionEncoder(supplier, expression));
masker = newMasker(maskingKey);
handler = new WriteHandler(encoders, newMasker(maskingKey));
pipeline = pipeline(new SimpleChannelHandler() {
@Override
public void handleDownstream(ChannelHandlerContext ctx, ChannelEvent evt) throws Exception {
downstream.handleDownstream(ctx, evt);
super.handleDownstream(ctx, evt);
}
@Override
public void writeRequested(ChannelHandlerContext ctx, MessageEvent e)
throws Exception {
Object message = e.getMessage();
e.getFuture().setSuccess();
if (message instanceof ChannelBuffer) {
ChannelBuffer buf = (ChannelBuffer) message;
fireWriteComplete(ctx, buf.readableBytes());
}
}
}, execution, handler, new SimpleChannelHandler() {
@Override
public void handleUpstream(ChannelHandlerContext ctx, ChannelEvent e) throws Exception {
upstream.handleUpstream(ctx, e);
super.handleUpstream(ctx, e);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception {
// prevent console error message
}
});
channelFactory = new DefaultLocalClientChannelFactory();
}
@Test
public void shouldPropagateDownstreamMessageOnPipelineFutureSuccess() throws Exception {
final ChannelBuffer[] expectedArr = new ChannelBuffer[3];
expectedArr[0] = wrappedBuffer(new byte[] { 0x01, 0x02, 0x03 });
expectedArr[1] = copiedBuffer("Hello, world", UTF_8);
expectedArr[2] = wrappedBuffer(new byte[] { 0x01, 0x02, 0x03 });
final ChannelBuffer expected = masker.applyMask(wrappedBuffer(expectedArr));
context.checking(new Expectations() {
{
oneOf(upstream).handleUpstream(with(any(ChannelHandlerContext.class)), with(any(PreparationEvent.class)));
oneOf(upstream).handleUpstream(with(any(ChannelHandlerContext.class)), with(channelState(OPEN, TRUE)));
oneOf(downstream).handleDownstream(with(any(ChannelHandlerContext.class)), with(message(expected)));
oneOf(upstream).handleUpstream(with(any(ChannelHandlerContext.class)), with(any(WriteCompletionEvent.class)));
}
});
expression.setValue(environment, new byte[] { 0x01, 0x02, 0x03 });
channelFactory.newChannel(pipeline);
ChannelFuture executionFuture = execution.getHandlerFuture();
executionFuture.setSuccess();
ChannelFuture handlerFuture = handler.getHandlerFuture();
handlerFuture.sync();
context.assertIsSatisfied();
}
@Test
public void shouldNotPropagateDownstreamMessageOnPipelineFutureFailure() throws Exception {
context.checking(new Expectations() {
{
oneOf(upstream).handleUpstream(with(any(ChannelHandlerContext.class)), with(any(PreparationEvent.class)));
oneOf(upstream).handleUpstream(with(any(ChannelHandlerContext.class)), with(channelState(OPEN, TRUE)));
}
});
expression.setValue(environment, new byte[] { 0x01, 0x02, 0x03 });
channelFactory.newChannel(pipeline);
ChannelFuture executionFuture = execution.getHandlerFuture();
executionFuture.setFailure(new ChannelException("pipeline already failed"));
ChannelFuture handlerFuture = handler.getHandlerFuture();
assertFalse(handlerFuture.isDone());
context.assertIsSatisfied();
}
}
| |
/**
* Copyright The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.lang.management.ManagementFactory;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.util.HeapMemorySizeUtil;
import org.apache.hadoop.hbase.regionserver.HeapMemStoreLAB.Chunk;
import org.apache.hadoop.util.StringUtils;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
* A pool of {@link HeapMemStoreLAB.Chunk} instances.
*
* MemStoreChunkPool caches a number of retired chunks for reusing, it could
* decrease allocating bytes when writing, thereby optimizing the garbage
* collection on JVM.
*
* The pool instance is globally unique and could be obtained through
* {@link MemStoreChunkPool#getPool(Configuration)}
*
* {@link MemStoreChunkPool#getChunk()} is called when MemStoreLAB allocating
* bytes, and {@link MemStoreChunkPool#putbackChunks(BlockingQueue)} is called
* when MemStore clearing snapshot for flush
*/
@SuppressWarnings("javadoc")
@InterfaceAudience.Private
public class MemStoreChunkPool {
private static final Log LOG = LogFactory.getLog(MemStoreChunkPool.class);
final static String CHUNK_POOL_MAXSIZE_KEY = "hbase.hregion.memstore.chunkpool.maxsize";
final static String CHUNK_POOL_INITIALSIZE_KEY = "hbase.hregion.memstore.chunkpool.initialsize";
final static float POOL_MAX_SIZE_DEFAULT = 0.0f;
final static float POOL_INITIAL_SIZE_DEFAULT = 0.0f;
// Static reference to the MemStoreChunkPool
private static MemStoreChunkPool globalInstance;
/** Boolean whether we have disabled the memstore chunk pool entirely. */
static boolean chunkPoolDisabled = false;
private final int maxCount;
// A queue of reclaimed chunks
private final BlockingQueue<Chunk> reclaimedChunks;
private final int chunkSize;
/** Statistics thread schedule pool */
private final ScheduledExecutorService scheduleThreadPool;
/** Statistics thread */
private static final int statThreadPeriod = 60 * 5;
private AtomicLong createdChunkCount = new AtomicLong();
private AtomicLong reusedChunkCount = new AtomicLong();
MemStoreChunkPool(Configuration conf, int chunkSize, int maxCount,
int initialCount) {
this.maxCount = maxCount;
this.chunkSize = chunkSize;
this.reclaimedChunks = new LinkedBlockingQueue<Chunk>();
for (int i = 0; i < initialCount; i++) {
Chunk chunk = new Chunk(chunkSize);
chunk.init();
reclaimedChunks.add(chunk);
}
final String n = Thread.currentThread().getName();
scheduleThreadPool = Executors.newScheduledThreadPool(1,
new ThreadFactoryBuilder().setNameFormat(n+"-MemStoreChunkPool Statistics")
.setDaemon(true).build());
this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),
statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);
}
/**
* Poll a chunk from the pool, reset it if not null, else create a new chunk
* to return
* @return a chunk
*/
Chunk getChunk() {
Chunk chunk = reclaimedChunks.poll();
if (chunk == null) {
chunk = new Chunk(chunkSize);
createdChunkCount.incrementAndGet();
} else {
chunk.reset();
reusedChunkCount.incrementAndGet();
}
return chunk;
}
/**
* Add the chunks to the pool, when the pool achieves the max size, it will
* skip the remaining chunks
* @param chunks
*/
void putbackChunks(BlockingQueue<Chunk> chunks) {
int maxNumToPutback = this.maxCount - reclaimedChunks.size();
if (maxNumToPutback <= 0) {
return;
}
chunks.drainTo(reclaimedChunks, maxNumToPutback);
}
/**
* Add the chunk to the pool, if the pool has achieved the max size, it will
* skip it
* @param chunk
*/
void putbackChunk(Chunk chunk) {
if (reclaimedChunks.size() >= this.maxCount) {
return;
}
reclaimedChunks.add(chunk);
}
int getPoolSize() {
return this.reclaimedChunks.size();
}
/*
* Only used in testing
*/
void clearChunks() {
this.reclaimedChunks.clear();
}
private static class StatisticsThread extends Thread {
MemStoreChunkPool mcp;
public StatisticsThread(MemStoreChunkPool mcp) {
super("MemStoreChunkPool.StatisticsThread");
setDaemon(true);
this.mcp = mcp;
}
@Override
public void run() {
mcp.logStats();
}
}
private void logStats() {
if (!LOG.isDebugEnabled()) return;
long created = createdChunkCount.get();
long reused = reusedChunkCount.get();
long total = created + reused;
LOG.debug("Stats: current pool size=" + reclaimedChunks.size()
+ ",created chunk count=" + created
+ ",reused chunk count=" + reused
+ ",reuseRatio=" + (total == 0 ? "0" : StringUtils.formatPercent(
(float) reused / (float) total, 2)));
}
/**
* @param conf
* @return the global MemStoreChunkPool instance
*/
static MemStoreChunkPool getPool(Configuration conf) {
if (globalInstance != null) return globalInstance;
synchronized (MemStoreChunkPool.class) {
if (chunkPoolDisabled) return null;
if (globalInstance != null) return globalInstance;
float poolSizePercentage = conf.getFloat(CHUNK_POOL_MAXSIZE_KEY, POOL_MAX_SIZE_DEFAULT);
if (poolSizePercentage <= 0) {
chunkPoolDisabled = true;
return null;
}
if (poolSizePercentage > 1.0) {
throw new IllegalArgumentException(CHUNK_POOL_MAXSIZE_KEY + " must be between 0.0 and 1.0");
}
long heapMax = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax();
long globalMemStoreLimit = (long) (heapMax * HeapMemorySizeUtil.getGlobalMemStorePercent(conf,
false));
int chunkSize = conf.getInt(HeapMemStoreLAB.CHUNK_SIZE_KEY,
HeapMemStoreLAB.CHUNK_SIZE_DEFAULT);
int maxCount = (int) (globalMemStoreLimit * poolSizePercentage / chunkSize);
float initialCountPercentage = conf.getFloat(CHUNK_POOL_INITIALSIZE_KEY,
POOL_INITIAL_SIZE_DEFAULT);
if (initialCountPercentage > 1.0 || initialCountPercentage < 0) {
throw new IllegalArgumentException(CHUNK_POOL_INITIALSIZE_KEY
+ " must be between 0.0 and 1.0");
}
int initialCount = (int) (initialCountPercentage * maxCount);
LOG.info("Allocating MemStoreChunkPool with chunk size " + StringUtils.byteDesc(chunkSize)
+ ", max count " + maxCount + ", initial count " + initialCount);
globalInstance = new MemStoreChunkPool(conf, chunkSize, maxCount, initialCount);
return globalInstance;
}
}
}
| |
/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/citations/tags/sakai-10.6/citations-osid/xserver/src/java/org/sakaibrary/osid/repository/xserver/AssetIterator.java $
* $Id: AssetIterator.java 105079 2012-02-24 23:08:11Z ottenhoff@longsight.com $
***********************************************************************************
*
* Copyright (c) 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaibrary.osid.repository.xserver;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.parsers.ParserConfigurationException;
import org.sakaibrary.xserver.session.MetasearchSession;
import org.sakaibrary.xserver.session.MetasearchSessionManager;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
/**
* @author gbhatnag
* @version
*/
public class AssetIterator extends org.xml.sax.helpers.DefaultHandler
implements org.osid.repository.AssetIterator {
/*
* Xserver error codes
*/
public static final int XSERVER_ERROR_MERGE_LIMIT = 134;
public static final int XSERVER_ERROR_ALL_MERGED = 137;
private static final long serialVersionUID = 1L;
private static final String REGULAR_EXPRESSION_FILE = "/data/citationRegex.txt";
private static final org.apache.commons.logging.Log LOG =
org.apache.commons.logging.LogFactory.getLog(
"org.sakaibrary.osid.repository.xserver.AssetIterator" );
private java.util.LinkedList assetQueue;
private java.util.ArrayList regexArray;
private String guid;
private int totalRecordsCursor = 0;
private int numRecordsReturned = 0;
private org.osid.shared.Id repositoryId;
private org.osid.shared.Id recordStructureId;
private org.osid.repository.Asset asset;
private org.osid.repository.Record record;
/*
* Preferred URL handling
*/
private String preferredUrl;
private String preferredUrlFormat;
// for SAX parsing
private StringBuilder textBuffer;
// session
private MetasearchSessionManager msm;
org.osid.shared.Properties statusProperties;
/**
* Constructs an empty AssetIterator
*
* @param guid globally unique identifier for this session
* @throws org.osid.repository.RepositoryException
*/
protected AssetIterator( String guid )
throws org.osid.repository.RepositoryException {
this.guid = guid;
// get session cache manager
msm = MetasearchSessionManager.getInstance();
// create assetQueue
assetQueue = new java.util.LinkedList();
// load citation regular expressions
try {
regexArray = loadCitationRegularExpressions( REGULAR_EXPRESSION_FILE );
} catch( java.io.IOException ioe ) {
LOG.warn( "AssetIterator() failed reading citation regular " +
"expressions - regex file: " + REGULAR_EXPRESSION_FILE, ioe );
}
}
private java.util.ArrayList loadCitationRegularExpressions( String filename )
throws java.io.IOException {
java.util.ArrayList regexArray = new java.util.ArrayList();
java.io.InputStream is = this.getClass().getResourceAsStream( filename );
try {
java.io.BufferedReader regexes = new java.io.BufferedReader(
new java.io.InputStreamReader( is ) );
try {
// read the regex file and add regexes to array
String regex;
while( ( regex = regexes.readLine() ) != null ) {
String [] nameRegex = regex.split( "=" );
CitationRegex citationRegex = new CitationRegex();
citationRegex.setName( nameRegex[ 0 ].trim() );
citationRegex.setRegex( nameRegex[ 1 ].trim() );
regexArray.add( citationRegex );
}
} finally {
regexes.close();
}
} finally {
is.close();
}
return regexArray;
}
public boolean hasNextAsset()
throws org.osid.repository.RepositoryException {
MetasearchSession metasearchSession = msm.getMetasearchSession( guid );
// get an XServer to check status and update number of records found
org.sakaibrary.xserver.XServer xserver = null;
statusProperties = null;
try {
xserver = new org.sakaibrary.xserver.XServer( guid );
xserver.updateSearchStatusProperties();
statusProperties = xserver.getSearchStatusProperties();
} catch( org.sakaibrary.xserver.XServerException xse ) {
LOG.warn( "X-Server error: " + xse.getErrorCode() +
" - " + xse.getErrorText() );
// throw exception now that status has been updated
throw new org.osid.repository.RepositoryException(
org.sakaibrary.osid.repository.xserver.MetasearchException.
METASEARCH_ERROR );
}
// check status for error/timeout
String status = null;
try {
status = ( String ) statusProperties.getProperty( "status" );
} catch( org.osid.shared.SharedException se ) {
LOG.warn( "hasNextAsset() failed getting status " +
"property", se );
}
if( status != null ) {
// status and statusMessage are set by XServer.updateSearchStatusProperties
if( status.equals( "error" ) ) {
throw new org.osid.repository.RepositoryException(
org.sakaibrary.osid.repository.xserver.MetasearchException.
METASEARCH_ERROR );
} else if( status.equals( "timeout" ) ) {
throw new org.osid.repository.RepositoryException(
org.sakaibrary.osid.repository.xserver.MetasearchException.
SESSION_TIMED_OUT );
} else if( status.equals( "empty" ) ) {
// no records found
return false;
}
} else {
LOG.warn( "hasNextAsset() - status property is null" );
}
// get updated metasearchSession
metasearchSession = msm.getMetasearchSession( guid );
Integer numRecordsFound = metasearchSession.getNumRecordsFound();
if( numRecordsFound == null || numRecordsFound.intValue() == 0 ) {
// still searching for records, return true
return true;
}
// check if passed max number of attainable records
int maxAttainable;
boolean gotMergeError = metasearchSession.isGotMergeError();
if( gotMergeError ) {
maxAttainable = 300;
} else {
maxAttainable = numRecordsFound.intValue();
}
return ( numRecordsReturned < maxAttainable );
}
public org.osid.repository.Asset nextAsset()
throws org.osid.repository.RepositoryException {
LOG.debug( "nextAsset() [entry] - returned: " + numRecordsReturned + "; total: " +
totalRecordsCursor + "; in queue: " + assetQueue.size() );
// return Asset, if ready
if( assetQueue.size() > 0 ) {
numRecordsReturned++;
return ( org.osid.repository.Asset ) assetQueue.removeFirst();
}
// assetQueue is empty - check whether we should get more records
// or throw an Exception
if( hasNextAsset() ) {
// hasNextAsset() will throw timeout/error Exceptions if any
String status = null;
try {
status = ( String ) statusProperties.getProperty( "status" );
} catch( org.osid.shared.SharedException se ) {
LOG.warn( "nextAsset() failed getting status property", se );
}
if( status == null || !status.equals( "ready" ) ) {
// the X-Server is still searching/fetching - try again later
throw new org.osid.repository.RepositoryException(
org.sakaibrary.osid.repository.xserver.
MetasearchException.ASSET_NOT_FETCHED );
}
// get records from the X-Server
MetasearchSession metasearchSession = msm.getMetasearchSession( guid );
org.osid.shared.Id repositoryId = metasearchSession.getRepositoryId();
try {
org.sakaibrary.xserver.XServer xserver =
new org.sakaibrary.xserver.XServer( guid );
LOG.debug( "nextAsset() calling XServer.getRecordsXML() - assets in " +
"queue: " + assetQueue.size() );
createAssets( xserver.getRecordsXML( totalRecordsCursor ),
repositoryId );
} catch( org.sakaibrary.xserver.XServerException xse ) {
LOG.warn( "X-Server error: " + xse.getErrorCode() + " - " +
xse.getErrorText() );
//
// Have all (or too many) records been merged? If so, indicate
// we've fetched everything we can (end-of-file)
//
if ((xse.getErrorCodeIntValue() == XSERVER_ERROR_MERGE_LIMIT) ||
(xse.getErrorCodeIntValue() == XSERVER_ERROR_ALL_MERGED))
{
LOG.debug("nextAsset(), Xserver Error "
+ xse.getErrorCodeIntValue()
+ ", throwing NO_MORE_ITERATOR_ELEMENTS");
throw new org.osid.repository.RepositoryException(
org.osid.shared.SharedException.NO_MORE_ITERATOR_ELEMENTS);
}
//
// Search error
//
throw new org.osid.repository.RepositoryException(
org.sakaibrary.osid.repository.xserver.MetasearchException.
METASEARCH_ERROR );
}
LOG.debug( "nextAsset(), XServer.getRecordsXML() returns - assets in " +
"queue: " + assetQueue.size() );
//
// Make sure there really is an asset available - if not, signal "end-of-file"
//
// Note: this issue can come up if a database provides an estimate but
// no actual results
//
if (assetQueue.size() == 0)
{
LOG.debug("nextAsset(), An asset is expected, but the asset queue is enpty");
throw new org.osid.repository.RepositoryException(
org.osid.shared.SharedException.NO_MORE_ITERATOR_ELEMENTS);
}
//
// records have been fetched and Assets queued
//
totalRecordsCursor += assetQueue.size();
numRecordsReturned++;
return ( org.osid.repository.Asset ) assetQueue.removeFirst();
} else {
// no assets available
throw new org.osid.repository.RepositoryException(
org.osid.shared.SharedException.NO_MORE_ITERATOR_ELEMENTS );
}
}
/**
* This method parses the xml StringBuilder and creates Assets, Records
* and Parts in the Repository with the given repositoryId.
*
* @param xml input xml in "sakaibrary" format
* @param log the log being used by the Repository
* @param repositoryId the Id of the Repository in which to create Assets,
* Records and Parts.
*
* @throws org.osid.repository.RepositoryException
*/
private void createAssets( java.io.ByteArrayInputStream xml,
org.osid.shared.Id repositoryId )
throws org.osid.repository.RepositoryException {
this.repositoryId = repositoryId;
recordStructureId = RecordStructure.getInstance().getId();
textBuffer = new StringBuilder();
// use a SAX parser
javax.xml.parsers.SAXParserFactory factory;
javax.xml.parsers.SAXParser saxParser;
// set up the parser
factory = javax.xml.parsers.SAXParserFactory.newInstance();
factory.setNamespaceAware( true );
// start parsing
try {
saxParser = factory.newSAXParser();
saxParser.parse( xml, this );
xml.close();
} catch (SAXParseException spe) {
// Use the contained exception, if any
Exception x = spe;
if (spe.getException() != null) {
x = spe.getException();
}
// Error generated by the parser
LOG.warn("createAssets() parsing exception: " +
spe.getMessage() + " - xml line " + spe.getLineNumber() +
", uri " + spe.getSystemId(), x );
} catch (SAXException sxe) {
// Error generated by this application
// (or a parser-initialization error)
Exception x = sxe;
if (sxe.getException() != null) {
x = sxe.getException();
}
LOG.warn( "createAssets() SAX exception: " + sxe.getMessage(), x );
} catch (ParserConfigurationException pce) {
// Parser with specified options can't be built
LOG.warn( "createAssets() SAX parser cannot be built with " +
"specified options" );
} catch (IOException ioe) {
// I/O error
LOG.warn( "createAssets() IO exception", ioe );
}
}
//----------------------------------
// SAX DEFAULT HANDLER IMPLEMENTATIONS -
//----------------------------------
/**
* Receive notification of the beginning of an element.
*
* @see DefaultHandler
*/
public void startElement( String namespaceURI, String sName,
String qName, org.xml.sax.Attributes attrs ) throws
org.xml.sax.SAXException {
if( qName.equals( "record" ) ) {
populateAssetFromText( "record_start" );
/*
* No preferred URL seen (yet)
*/
preferredUrl = null;
preferredUrlFormat = null;
}
}
/**
* Receive notification of the end of an element.
*
* @see DefaultHandler
*/
public void endElement( String namespaceURI, String sName, String qName )
throws org.xml.sax.SAXException {
populateAssetFromText( qName );
}
/**
* Receive notification of character data inside an element.
*
* @see DefaultHandler
*/
public void characters( char[] buf, int offset, int len )
throws org.xml.sax.SAXException {
// store character data
String text = new String( buf, offset, len );
if( textBuffer == null ) {
textBuffer = new StringBuilder( text );
} else {
textBuffer.append( text );
}
}
private void populateAssetFromText( String elementName ) {
// new record
if( elementName.equals( "record_start" ) ) {
try {
// create a new asset... need title, description, assetId
asset = new Asset( null, null, getId(), repositoryId );
// create a new record
record = asset.createRecord( recordStructureId );
} catch( org.osid.repository.RepositoryException re ) {
LOG.warn( "populateAssetFromText() failed to " +
"create new Asset/Record pair.", re );
}
} else if( elementName.equals( "record" ) ) {
// a record has ended: do post-processing //
// set dateRetrieved
setDateRetrieved();
// use inLineCitation to fill in other fields, if possible
org.osid.repository.Part inLineCitation;
try {
if( ( inLineCitation = recordHasPart(
InLineCitationPartStructure.getInstance().getType() ) )
!= null ) {
doRegexParse( ( String )inLineCitation.getValue() );
}
} catch( org.osid.repository.RepositoryException re ) {
LOG.warn( "populateAssetFromText() failed to " +
"gracefully process inLineCitation value.", re );
}
// create a preferred URL (if we found all the parts)
try
{
if (preferredUrl != null)
{
if ((preferredUrlFormat != null) &&
!(preferredUrlFormat.equalsIgnoreCase("HTML")))
{
LOG.debug("Unexpected URL format: " + preferredUrlFormat);
}
if ((preferredUrlFormat == null) ||
(preferredUrlFormat.equalsIgnoreCase("HTML")))
{
record.createPart(PreferredUrlPartStructure.getInstance().getId(),
preferredUrl);
}
}
}
catch( org.osid.repository.RepositoryException exception)
{
LOG.warn("Failed to create preferred URL Part", exception);
}
finally
{
preferredUrl = null;
preferredUrlFormat = null;
}
// All done with this asset
assetQueue.add( asset );
return;
}
if( textBuffer == null ) {
return;
}
String text = textBuffer.toString().trim();
if( text.equals( "" ) ) {
return;
}
try {
if( elementName.equals( "title" ) ) {
asset.updateDisplayName( text );
} else if( elementName.equals( "abstract" ) ) {
asset.updateDescription( text );
} else if( elementName.equals( "author" ) ) {
record.createPart( CreatorPartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "date" ) ) {
record.createPart( DatePartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "doi" ) ) {
record.createPart( DOIPartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "edition" ) ) {
record.createPart( EditionPartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "inLineCitation" ) ) {
record.createPart( InLineCitationPartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "isnIdentifier" ) ) {
record.createPart( IsnIdentifierPartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "issue" ) ) {
record.createPart( IssuePartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "language" ) ) {
record.createPart( LanguagePartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "note" ) ) {
record.createPart( NotePartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "openUrl" ) ) {
record.createPart( OpenUrlPartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "pages" ) ) {
createPagesPart( text );
} else if( elementName.equals( "publisherInfo" ) ) {
record.createPart( PublisherPartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "rights" ) ) {
record.createPart( RightsPartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "sourceTitle" ) ) {
record.createPart( SourceTitlePartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "subject" ) ) {
record.createPart( SubjectPartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "type" ) ) {
record.createPart( TypePartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "url" ) ) {
record.createPart( URLPartStructure.getInstance().getId(), text );
preferredUrl = text;
} else if( elementName.equals( "urlLabel" ) ) {
record.createPart( URLLabelPartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "urlFormat" ) ) {
record.createPart( URLFormatPartStructure.getInstance().getId(),
text );
preferredUrlFormat = text;
} else if( elementName.equals( "volume" ) ) {
record.createPart( VolumePartStructure.getInstance().getId(),
text );
} else if( elementName.equals( "volumeIssue" ) ) {
doRegexParse( text );
} else if( elementName.equals( "year" ) ) {
record.createPart( YearPartStructure.getInstance().getId(),
text );
}
} catch( org.osid.repository.RepositoryException re ) {
LOG.warn( "populateAssetFromText() failed to " +
"create new Part.", re );
}
textBuffer = null;
}
private void setDateRetrieved() {
java.util.GregorianCalendar now = new java.util.GregorianCalendar();
int month = now.get( java.util.Calendar.MONTH ) + 1;
int date = now.get( java.util.Calendar.DATE );
String monthStr, dateStr;
if( month < 10 ) {
monthStr = "0" + month;
} else {
monthStr = String.valueOf( month );
}
if( date < 10 ) {
dateStr = "0" + date;
} else {
dateStr = String.valueOf( date );
}
String dateRetrieved = now.get( java.util.Calendar.YEAR ) + "-" +
monthStr + "-" + dateStr;
try {
record.createPart( DateRetrievedPartStructure.getInstance().getId(),
dateRetrieved );
} catch( org.osid.repository.RepositoryException re ) {
LOG.warn( "setDateRetrieved() failed " +
"creating new dateRetrieved Part.", re );
}
}
/**
* This method searches the current record for a Part using its
* PartStructure Type.
*
* @param partStructureType PartStructure Type of Part you need.
* @return the Part if it exists in the current record, null if it does not.
*/
private org.osid.repository.Part recordHasPart(
org.osid.shared.Type partStructureType ) {
try {
org.osid.repository.PartIterator pit = record.getParts();
while( pit.hasNextPart() ) {
org.osid.repository.Part part = pit.nextPart();
if( part.getPartStructure().getType().isEqual( partStructureType ) ) {
return part;
}
}
} catch( org.osid.repository.RepositoryException re ) {
LOG.warn( "recordHasPart() failed getting Parts.", re );
}
// did not find the Part
return null;
}
/**
* This method does its best to map data contained in an inLineCitation to
* other fields such as volume, issue, etc. in the case that they are empty.
* It compares the citation to a known set of regular expressions contained
* in REGULAR_EXPRESSION_FILE. Adding a new regular expression entails
* adding a new case for parsing in this method.
*
* @param citation inLineCitation to be parsed
*/
private void doRegexParse( String citation ) {
String regexName = null;
Pattern pattern;
Matcher matcher;
boolean hasVolume = false;
boolean hasIssue = false;
boolean hasDate = false;
boolean hasPages = false;
boolean hasSourceTitle = false;
for( int i = 0; i < regexArray.size(); i++ ) {
CitationRegex citationRegex = ( CitationRegex ) regexArray.get( i );
pattern = Pattern.compile( citationRegex.getRegex() );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
regexName = citationRegex.getName();
break;
}
}
if( regexName != null ) {
// determine which fields are necessary
try {
hasVolume =
recordHasPart( VolumePartStructure.getInstance().getType() )
== null ? false : true;
hasIssue =
recordHasPart( IssuePartStructure.getInstance().getType() )
== null ? false : true;
hasDate =
recordHasPart( DatePartStructure.getInstance().getType() )
== null ? false : true;
hasPages =
recordHasPart( PagesPartStructure.getInstance().getType() )
== null ? false : true;
hasSourceTitle =
recordHasPart( SourceTitlePartStructure.getInstance().getType() )
== null ? false : true;
// if all true, no need to go further
if( hasVolume && hasIssue && hasDate && hasPages && hasSourceTitle ) {
return;
}
// check for matching regex
if( regexName.equals( "zooRec" ) ) {
// .+ \d+(\(\d+\))?, (.*)? \d{4}: \d+-\d+
if( !hasVolume ) {
pattern = Pattern.compile( "\\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( VolumePartStructure.getInstance().getId(),
matcher.group() );
}
}
if( !hasIssue ) {
pattern = Pattern.compile( "\\(\\d+\\)" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( IssuePartStructure.getInstance().getId(),
matcher.group().replaceAll( "\\D", "" ) );
}
}
if( !hasDate ) {
pattern = Pattern.compile( ", (.*)? \\d{4}:" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
String date = matcher.group().substring( 2,
matcher.group().length()-1 );
record.createPart( DatePartStructure.getInstance().getId(),
date );
}
}
if( !hasPages ) {
pattern = Pattern.compile( "\\d+-\\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
createPagesPart( matcher.group() );
}
}
if( !hasSourceTitle ) {
pattern = Pattern.compile( "\\D+\\d" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
String sourceTitle = matcher.group().substring( 0,
matcher.group().length()-2 );
record.createPart(
SourceTitlePartStructure.getInstance().getId(),
sourceTitle );
}
}
} else if( regexName.equals( "animBehavAbs" ) ) {
// .+ Vol\. \d+, no\. \d+, (\d+)? pp\.|p\. \d+(-\d+.)? (.*)? \d{4}\.$
if( !hasVolume ) {
pattern = Pattern.compile( "Vol\\. \\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( VolumePartStructure.getInstance().getId(),
matcher.group().replaceAll( "\\D", "" ) );
}
}
if( !hasIssue ) {
pattern = Pattern.compile( "no\\. \\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( IssuePartStructure.getInstance().getId(),
matcher.group().replaceAll( "\\D", "" ) );
}
}
if( !hasDate ) {
pattern = Pattern.compile( "(pp\\.|p\\.) \\d+(-\\d+\\.)? (.*)? \\d{4}\\.$" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
String date = matcher.group().substring(
matcher.group().indexOf( " ", 4 ) + 1,
matcher.group().length()-1 );
record.createPart( DatePartStructure.getInstance().getId(),
date );
}
}
if( !hasPages ) {
pattern = Pattern.compile( "(pp\\.|p\\.) \\d+(-\\d+\\.)?" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
createPagesPart( matcher.group() );
}
}
if( !hasSourceTitle ) {
pattern = Pattern.compile( ".+ \\[" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
String sourceTitle = matcher.group().substring( 0,
matcher.group().length() - 2 );
record.createPart(
SourceTitlePartStructure.getInstance().getId(),
sourceTitle );
}
}
} else if( regexName.equals( "pubMed" ) ) {
// .+ (Volume: \\d+, )?Issue: ((\\d+)|(\\w+)), Date: \\d{4} \\d+ \\d+,( Pages: \\d+-\\d+)?
if( !hasVolume ) {
pattern = Pattern.compile( "Volume: \\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( VolumePartStructure.getInstance().getId(),
matcher.group().replaceAll( "\\D", "" ) );
}
}
if( !hasIssue ) {
pattern = Pattern.compile( "Issue: ((\\d+)|(\\w+))" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
String issue = matcher.group().substring( 7,
matcher.group().length() );
record.createPart( IssuePartStructure.getInstance().getId(),
issue );
}
}
if( !hasDate ) {
pattern = Pattern.compile( "Date: \\d{4} \\d+ \\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
String date = matcher.group().substring( 6,
matcher.group().length() );
date = date.replaceAll( "\\s", "-" );
record.createPart( DatePartStructure.getInstance().getId(),
date );
}
}
if( !hasPages ) {
pattern = Pattern.compile( "\\d+-\\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
createPagesPart( matcher.group() );
}
}
if( !hasSourceTitle ) {
pattern = Pattern.compile( ".+\\. Vol" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
String sourceTitle = matcher.group().substring( 0,
matcher.group().length()-5 );
record.createPart(
SourceTitlePartStructure.getInstance().getId(),
sourceTitle );
}
}
} else if( regexName.equals( "isiWos" ) ) {
// ^\d+( \(\d+\))?: \w+-.+(.+)?( \w{3})?( \w{3}-\w{3})?( \d+)? \d{4}$
if( !hasVolume ) {
pattern = Pattern.compile( "^\\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( VolumePartStructure.getInstance().getId(),
matcher.group() );
}
}
if( !hasIssue ) {
pattern = Pattern.compile( "\\(\\d+\\)" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( IssuePartStructure.getInstance().getId(),
matcher.group().replaceAll( "\\D", "" ) );
}
}
if( !hasDate ) {
pattern = Pattern.compile( "( \\w{3})?( \\w{3}-\\w{3})?( \\d+)? \\d{4}$" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( DatePartStructure.getInstance().getId(),
matcher.group().trim() );
}
}
if( !hasPages ) {
pattern = Pattern.compile( " \\w+(-\\w+)?" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
createPagesPart( matcher.group().trim() );
}
}
} else if( regexName.equals( "jstor" ) ) {
// .+, Vol\. \d+(, No\. \d+)?
if( !hasVolume ) {
pattern = Pattern.compile( "Vol\\. \\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( VolumePartStructure.getInstance().getId(),
matcher.group().replaceAll( "\\D", "" ) );
}
}
if( !hasIssue ) {
pattern = Pattern.compile( "No\\. \\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( IssuePartStructure.getInstance().getId(),
matcher.group().replaceAll( "\\D", "" ) );
}
}
if( !hasSourceTitle ) {
pattern = Pattern.compile( ".+, Vol" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
String sourceTitle = matcher.group().substring( 0,
matcher.group().length() - 5 );
record.createPart(
SourceTitlePartStructure.getInstance().getId(),
sourceTitle );
}
}
} else if( regexName.equals( "eric" ) ) {
// ^v\d+ n|v\d+ p\d+-\d+( \w{3})?( \w{3}-\w{3})?( \d+)? \d{4}$
if( !hasVolume ) {
pattern = Pattern.compile( "^v\\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( VolumePartStructure.getInstance().getId(),
matcher.group().replaceAll( "\\D", "" ) );
}
}
if( !hasIssue ) {
pattern = Pattern.compile( " (n|v)\\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( IssuePartStructure.getInstance().getId(),
matcher.group().trim().replaceAll( "\\D", "" ) );
}
}
if( !hasDate ) {
pattern = Pattern.compile( "( \\w{3})?( \\w{3}-\\w{3})?( \\d+)? \\d{4}$" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( DatePartStructure.getInstance().getId(),
matcher.group().trim() );
}
}
if( !hasPages ) {
pattern = Pattern.compile( "\\d+-\\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
createPagesPart( matcher.group() );
}
}
} else if( regexName.equals( "proquest" ) ) {
// ^\d+; \d+(; .+)?
if( !hasVolume ) {
pattern = Pattern.compile( "^\\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( VolumePartStructure.getInstance().getId(),
matcher.group() );
}
}
if( !hasIssue ) {
pattern = Pattern.compile( "; \\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( IssuePartStructure.getInstance().getId(),
matcher.group().replaceAll( "\\D", "" ) );
}
}
if( !hasSourceTitle ) {
pattern = Pattern.compile( "; \\D+$" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( SourceTitlePartStructure.getInstance().getId(),
matcher.group().substring( 2, matcher.group().length() ) );
}
}
} else if( regexName.equals( "psycInfo" ) ) {
// ^Vol \d+\([\w\p{Punct}]+\))
if( !hasVolume ) {
pattern = Pattern.compile( "^Vol \\d+" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( VolumePartStructure.getInstance().getId(),
matcher.group().replaceAll( "\\D", "" ) );
}
}
if( !hasIssue ) {
pattern = Pattern.compile( "\\(.+\\)" );
matcher = pattern.matcher( citation );
if( matcher.find() ) {
record.createPart( IssuePartStructure.getInstance().getId(),
matcher.group().substring( 1,
matcher.group().length() - 1 ) );
}
}
}
} catch( org.osid.repository.RepositoryException re ) {
LOG.warn( "doRegexParse() failed getting " +
"PartStructure Types.", re );
}
}
}
private void createPagesPart( String text )
throws org.osid.repository.RepositoryException {
if( text.charAt( 0 ) == ',' ) {
// getting a poorly formatted field
return;
}
record.createPart( PagesPartStructure.getInstance().getId(), text );
// get start and end page if possible
String [] pages = text.split( "-" );
if( pages.length == 0 ) {
// cannot create start/end page.
return;
}
String spage = pages[ 0 ].trim();
// delete all non-digit chars (ie: p., pp., etc.)
spage = spage.replaceAll( "\\D", "" );
// create startPage part
record.createPart( StartPagePartStructure.getInstance().getId(),
spage );
// end page
if( pages.length == 2 ) {
String epage = pages[ 1 ].trim();
epage = epage.replaceAll( "\\D", "" );
record.createPart( EndPagePartStructure.getInstance().getId(),
epage );
}
}
private String getId() {
return "asset" +
Math.random() * 1000 +
System.currentTimeMillis();
}
}
| |
package com.nucleus.opengl.lwjgl3;
import java.lang.reflect.Field;
import java.nio.IntBuffer;
import java.util.Objects;
import org.lwjgl.egl.EGL;
import org.lwjgl.egl.EGL10;
import org.lwjgl.egl.EGLCapabilities;
import org.lwjgl.glfw.GLFW;
import org.lwjgl.glfw.GLFWErrorCallback;
import org.lwjgl.glfw.GLFWNativeEGL;
import org.lwjgl.glfw.GLFWVidMode;
import org.lwjgl.opengl.GL;
import org.lwjgl.opengles.GLES;
import org.lwjgl.opengles.GLESCapabilities;
import org.lwjgl.system.MemoryStack;
import org.lwjgl.system.MemoryUtil;
import com.nucleus.Backend.BackendFactory;
import com.nucleus.CoreApp;
import com.nucleus.J2SEWindow;
import com.nucleus.J2SEWindowApplication.PropertySettings;
import com.nucleus.SimpleLogger;
import com.nucleus.common.Environment;
import com.nucleus.egl.EGLUtils;
import com.nucleus.opengl.GLESWrapper.GLES20;
import com.nucleus.profiling.FrameSampler;
import com.nucleus.renderer.NucleusRenderer.RenderContextListener;
import com.nucleus.renderer.SurfaceConfiguration;
public class LWJGLEGLWindow extends J2SEWindow implements Runnable {
Thread thread;
protected SurfaceConfiguration surfaceConfig;
protected RenderContextListener renderListener;
protected long window;
protected GLESCapabilities gles;
Environment env;
/**
* Special surface attribs that may be specified when creating the surface - see
* https://www.khronos.org/registry/EGL/sdk/docs/man/html/eglCreateWindowSurface.xhtml
* Shall be terminateded by EGL_NONE
* EGL_RENDER_BUFFER
* EGL_VG_ALPHA_FORMAT
* EGL_VG_COLORSPACE
*/
protected int[] surfaceAttribs;
public LWJGLEGLWindow(BackendFactory factory, CoreApp.CoreAppStarter coreAppStarter, PropertySettings appSettings) {
super(factory, coreAppStarter, appSettings);
env = Environment.getInstance();
Thread t = new Thread(this);
t.start();
}
@Override
public VideoMode init(PropertySettings appSettings) {
backend = factory.createBackend(appSettings.version, window, null);
return new VideoMode(appSettings.width, appSettings.height, appSettings.fullscreen, appSettings.swapInterval);
}
/**
* Creates a list of egl config attribs for the surface config, if surfaceConfig is null then
* {@link #createDefaultConfigAttribs()} is called.
*
* @param surfaceConfig
* @return
*/
protected int[] createEGLConfigAttribs(SurfaceConfiguration surfaceConfig) {
if (surfaceConfig != null) {
return EGLUtils.createConfig(surfaceConfig);
} else {
// Create default.
return createDefaultConfigAttribs();
}
}
/**
* Creates the egl context, default is to set client version to {@link #version#major}.
*
* @throws IllegalArgumentException If context could not be created
*/
protected void createEGLContext() {
}
/**
* Choose the desired config
*/
protected void chooseEGLConfig() {
}
/**
* Creates the egl context, first getting the display by calling
*/
protected void createEglContext() {
GLFWErrorCallback.createPrint().set();
if (!GLFW.glfwInit()) {
throw new IllegalStateException("Unable to initialize glfw");
}
GLFW.glfwDefaultWindowHints();
GLFW.glfwWindowHint(GLFW.GLFW_VISIBLE, GLFW.GLFW_FALSE);
GLFW.glfwWindowHint(GLFW.GLFW_RESIZABLE, GLFW.GLFW_TRUE);
GLFW.glfwWindowHint(GLFW.GLFW_OPENGL_PROFILE, GLFW.GLFW_OPENGL_COMPAT_PROFILE);
GLFW.glfwWindowHint(GLFW.GLFW_OPENGL_FORWARD_COMPAT, GLES20.GL_TRUE);
// GLFW.glfwWindowHint(GLFW.GLFW_CONTEXT_CREATION_API, GLFW.GLFW_EGL_CONTEXT_API);
GLFW.glfwWindowHint(GLFW.GLFW_CONTEXT_CREATION_API, GLFW.GLFW_NATIVE_CONTEXT_API);
GLFW.glfwWindowHint(GLFW.GLFW_CONTEXT_VERSION_MAJOR, 4);
GLFW.glfwWindowHint(GLFW.GLFW_CONTEXT_VERSION_MINOR, 3);
// GLFW.glfwWindowHint(GLFW.GLFW_CLIENT_API, GLFW.GLFW_OPENGL_ES_API);
GLFW.glfwWindowHint(GLFW.GLFW_CLIENT_API, GLFW.GLFW_OPENGL_API);
// pretend we're using GLES in windows, instead use a subset of OpenGL 2.0 as GLES 2.0
// Bypasses the default create() method.
// Configuration.EGL_LIBRARY_NAME.set(Pointer.BITS64 ? "libEGL32" : "libEGL32");
// Configuration.EGL_EXPLICIT_INIT.set(true);
// EGL.create(EGL.getFunctionProvider());
// GLES.create(GL.getFunctionProvider());
// Configuration.OPENGLES_LIBRARY_NAME.set("opengl32");
// Configuration.OPENGLES_EXPLICIT_INIT.set(true);
// GLES.create(GLES.getFunctionProvider()); // omg?!
int WIDTH = 300;
int HEIGHT = 300;
window = GLFW.glfwCreateWindow(WIDTH, HEIGHT, "", MemoryUtil.NULL, MemoryUtil.NULL);
if (window == MemoryUtil.NULL) {
throw new RuntimeException("Failed to create the GLFW window");
}
long monitor = GLFW.glfwGetPrimaryMonitor();
GLFWVidMode vidmode = Objects.requireNonNull(GLFW.glfwGetVideoMode(monitor));
GLFW.glfwMakeContextCurrent(window);
org.lwjgl.system.Configuration.OPENGLES_EXPLICIT_INIT.set(true);
GLES.create(GL.getFunctionProvider());
gles = GLES.createCapabilities();
GLFW.glfwSetKeyCallback(window, (windowHnd, key, scancode, action, mods) -> {
if (action == GLFW.GLFW_RELEASE && key == GLFW.GLFW_KEY_ESCAPE) {
GLFW.glfwSetWindowShouldClose(windowHnd, true);
}
});
GLFW.glfwSetKeyCallback(window, (windowHnd, key, scancode, action, mods) -> {
if (action == GLFW.GLFW_RELEASE && key == GLFW.GLFW_KEY_ESCAPE) {
GLFW.glfwSetWindowShouldClose(windowHnd, true);
}
});
// EGL capabilities
long display = EGL10.eglGetDisplay(window);
long dpy = GLFWNativeEGL.glfwGetEGLDisplay();
if (dpy == EGL10.EGL_NO_DISPLAY) {
throw new IllegalArgumentException("EGL_NO_DISPLAY");
}
EGLCapabilities egl;
try (MemoryStack stack = MemoryStack.stackPush()) {
IntBuffer major = stack.mallocInt(1);
IntBuffer minor = stack.mallocInt(1);
if (!EGL10.eglInitialize(dpy, major, minor)) {
throw new IllegalStateException(String.format("Failed to initialize EGL [0x%X]", EGL10.eglGetError()));
}
egl = EGL.createDisplayCapabilities(dpy, major.get(0), minor.get(0));
}
try {
System.out.println("EGL Capabilities:");
for (Field f : EGLCapabilities.class.getFields()) {
if (f.getType() == boolean.class) {
if (f.get(egl).equals(Boolean.TRUE)) {
System.out.println("\t" + f.getName());
}
}
}
} catch (IllegalAccessException e) {
e.printStackTrace();
}
try {
System.out.println("OpenGL ES Capabilities:");
for (Field f : GLESCapabilities.class.getFields()) {
if (f.getType() == boolean.class) {
if (f.get(gles).equals(Boolean.TRUE)) {
System.out.println("\t" + f.getName());
}
}
}
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
protected int[] createDefaultConfigAttribs() {
return null;
}
protected void createEglSurface() {
}
protected void createEGL() {
createEglContext();
createEglSurface();
makeCurrent();
}
protected void makeCurrent() {
}
public void setRenderContextListener(RenderContextListener listener) {
this.renderListener = listener;
}
/**
* Sets the egl swap interval, if no EGLDisplay exists then nothing is done.
*
* @param interval
*/
public void setEGLSwapInterval(int interval) {
}
/**
* Sets an egl surfaceattrib, if EGLDisplay or EGLSurface is null then nothing is done.
*
* @param attribute
* @param value
*/
public void setEGLSurfaceAttrib(int attribute, int value) {
}
@Override
public void run() {
SimpleLogger.d(getClass(), "Starting EGL surface thread");
createEGL();
// while (surface != null) {
internalDoFrame();
// }
if (renderListener != null) {
renderListener.surfaceLost();
}
SimpleLogger.d(getClass(), "Exiting surface thread");
thread = null;
}
protected void internalDoFrame() {
drawFrame();
// if (EGLSurface != null) {
swapBuffers();
// }
}
/**
* Swapbuffers and syncronize
*/
protected void swapBuffers() {
Environment env = Environment.getInstance();
long start = System.currentTimeMillis();
// EGL14.eglSwapBuffers(EglDisplay, EGLSurface);
boolean eglWaitGL = env.isProperty(Environment.Property.EGLWAITGL, false);
if (eglWaitGL) {
// EGL14.eglWaitGL();
}
FrameSampler.getInstance().addTag(FrameSampler.Samples.EGLSWAPBUFFERS.name() + "-WAITGL=" + eglWaitGL,
start,
System.currentTimeMillis(), FrameSampler.Samples.EGLSWAPBUFFERS.detail);
}
@Override
public void internalCreateCoreApp(int width, int height) {
super.internalCreateCoreApp(width, height);
}
@Override
public void setVisible(boolean visible) {
if (visible) {
GLFW.glfwShowWindow(window);
} else {
GLFW.glfwHideWindow(window);
}
}
@Override
public void setWindowTitle(String title) {
if (window != 0) {
GLFW.glfwSetWindowTitle(window, title);
}
}
@Override
public VideoMode setVideoMode(VideoMode videoMode, int monitorIndex) {
throw new IllegalArgumentException("Not implemented");
}
@Override
public void destroy() {
throw new IllegalArgumentException("Not implemented");
}
}
| |
package com.mapswithme.maps;
import android.app.Application;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Handler;
import android.os.Message;
import android.support.annotation.NonNull;
import android.support.multidex.MultiDex;
import android.util.Log;
import com.appsflyer.AppsFlyerLib;
import com.mapswithme.maps.analytics.ExternalLibrariesMediator;
import com.mapswithme.maps.background.AppBackgroundTracker;
import com.mapswithme.maps.background.NotificationChannelFactory;
import com.mapswithme.maps.background.NotificationChannelProvider;
import com.mapswithme.maps.background.Notifier;
import com.mapswithme.maps.bookmarks.data.BookmarkManager;
import com.mapswithme.maps.downloader.CountryItem;
import com.mapswithme.maps.downloader.MapManager;
import com.mapswithme.maps.editor.Editor;
import com.mapswithme.maps.geofence.GeofenceRegistry;
import com.mapswithme.maps.geofence.GeofenceRegistryImpl;
import com.mapswithme.maps.location.LocationHelper;
import com.mapswithme.maps.location.TrackRecorder;
import com.mapswithme.maps.maplayer.subway.SubwayManager;
import com.mapswithme.maps.maplayer.traffic.TrafficManager;
import com.mapswithme.maps.base.MediaPlayerWrapper;
import com.mapswithme.maps.routing.RoutingController;
import com.mapswithme.maps.scheduling.ConnectivityJobScheduler;
import com.mapswithme.maps.scheduling.ConnectivityListener;
import com.mapswithme.maps.sound.TtsPlayer;
import com.mapswithme.maps.ugc.UGC;
import com.mapswithme.util.Config;
import com.mapswithme.util.Counters;
import com.mapswithme.util.KeyValue;
import com.mapswithme.util.SharedPropertiesUtils;
import com.mapswithme.util.StorageUtils;
import com.mapswithme.util.ThemeSwitcher;
import com.mapswithme.util.UiUtils;
import com.mapswithme.util.log.Logger;
import com.mapswithme.util.log.LoggerFactory;
import com.mapswithme.util.statistics.Statistics;
import java.util.HashMap;
import java.util.List;
public class MwmApplication extends Application
{
private Logger mLogger;
private final static String TAG = "MwmApplication";
private static MwmApplication sSelf;
private SharedPreferences mPrefs;
private AppBackgroundTracker mBackgroundTracker;
@SuppressWarnings("NullableProblems")
@NonNull
private SubwayManager mSubwayManager;
private boolean mFrameworkInitialized;
private boolean mPlatformInitialized;
private Handler mMainLoopHandler;
private final Object mMainQueueToken = new Object();
@NonNull
private final AppBackgroundTracker.OnVisibleAppLaunchListener mVisibleAppLaunchListener = new VisibleAppLaunchListener();
@SuppressWarnings("NullableProblems")
@NonNull
private ConnectivityListener mConnectivityListener;
@NonNull
private final MapManager.StorageCallback mStorageCallbacks = new StorageCallbackImpl();
@SuppressWarnings("NullableProblems")
@NonNull
private AppBackgroundTracker.OnTransitionListener mBackgroundListener;
@SuppressWarnings("NullableProblems")
@NonNull
private ExternalLibrariesMediator mMediator;
@SuppressWarnings("NullableProblems")
@NonNull
private PurchaseOperationObservable mPurchaseOperationObservable;
@SuppressWarnings("NullableProblems")
@NonNull
private MediaPlayerWrapper mPlayer;
@SuppressWarnings("NullableProblems")
@NonNull
private GeofenceRegistry mGeofenceRegistry;
@NonNull
public SubwayManager getSubwayManager()
{
return mSubwayManager;
}
public MwmApplication()
{
super();
sSelf = this;
}
@Deprecated
public static MwmApplication get()
{
return sSelf;
}
/**
*
* Use {@link #backgroundTracker(Context)} instead.
*/
@Deprecated
public static AppBackgroundTracker backgroundTracker()
{
return sSelf.mBackgroundTracker;
}
@NonNull
public static AppBackgroundTracker backgroundTracker(@NonNull Context context)
{
return ((MwmApplication) context.getApplicationContext()).getBackgroundTracker();
}
/**
*
* Use {@link #prefs(Context)} instead.
*/
@Deprecated
public synchronized static SharedPreferences prefs()
{
if (sSelf.mPrefs == null)
sSelf.mPrefs = sSelf.getSharedPreferences(sSelf.getString(R.string.pref_file_name), MODE_PRIVATE);
return sSelf.mPrefs;
}
@NonNull
public static SharedPreferences prefs(@NonNull Context context)
{
String prefFile = context.getString(R.string.pref_file_name);
return context.getSharedPreferences(prefFile, MODE_PRIVATE);
}
@Override
protected void attachBaseContext(Context base)
{
super.attachBaseContext(base);
MultiDex.install(this);
}
@SuppressWarnings("ResultOfMethodCallIgnored")
@Override
public void onCreate()
{
super.onCreate();
mBackgroundListener = new TransitionListener(this);
LoggerFactory.INSTANCE.initialize(this);
mLogger = LoggerFactory.INSTANCE.getLogger(LoggerFactory.Type.MISC);
mLogger.d(TAG, "Application is created");
mMainLoopHandler = new Handler(getMainLooper());
mMediator = new ExternalLibrariesMediator(this);
mMediator.initSensitiveDataToleranceLibraries();
mMediator.initSensitiveDataStrictLibrariesAsync();
Statistics.INSTANCE.setMediator(mMediator);
mPrefs = getSharedPreferences(getString(R.string.pref_file_name), MODE_PRIVATE);
initNotificationChannels();
mBackgroundTracker = new AppBackgroundTracker();
mBackgroundTracker.addListener(mVisibleAppLaunchListener);
mSubwayManager = new SubwayManager(this);
mConnectivityListener = new ConnectivityJobScheduler(this);
mConnectivityListener.listen();
mPurchaseOperationObservable = new PurchaseOperationObservable();
mPlayer = new MediaPlayerWrapper(this);
mGeofenceRegistry = new GeofenceRegistryImpl(this);
}
private void initNotificationChannels()
{
NotificationChannelProvider channelProvider = NotificationChannelFactory.createProvider(this);
channelProvider.setUGCChannel();
channelProvider.setDownloadingChannel();
}
/**
* Initialize native core of application: platform and framework. Caller must handle returned value
* and do nothing with native code if initialization is failed.
*
* @return boolean - indicator whether native initialization is successful or not.
*/
public boolean initCore()
{
initNativePlatform();
if (!mPlatformInitialized)
return false;
initNativeFramework();
return mFrameworkInitialized;
}
private void initNativePlatform()
{
if (mPlatformInitialized)
return;
final boolean isInstallationIdFound = mMediator.setInstallationIdToCrashlytics();
final String settingsPath = StorageUtils.getSettingsPath();
mLogger.d(TAG, "onCreate(), setting path = " + settingsPath);
final String filesPath = StorageUtils.getFilesPath(this);
mLogger.d(TAG, "onCreate(), files path = " + filesPath);
final String tempPath = StorageUtils.getTempPath(this);
mLogger.d(TAG, "onCreate(), temp path = " + tempPath);
// If platform directories are not created it means that native part of app will not be able
// to work at all. So, we just ignore native part initialization in this case, e.g. when the
// external storage is damaged or not available (read-only).
if (!createPlatformDirectories(settingsPath, filesPath, tempPath))
return;
// First we need initialize paths and platform to have access to settings and other components.
nativeInitPlatform(StorageUtils.getApkPath(this), StorageUtils.getStoragePath(settingsPath),
filesPath, tempPath, StorageUtils.getObbGooglePath(), BuildConfig.FLAVOR,
BuildConfig.BUILD_TYPE, UiUtils.isTablet());
Config.setStatisticsEnabled(SharedPropertiesUtils.isStatisticsEnabled());
@SuppressWarnings("unused")
Statistics s = Statistics.INSTANCE;
if (!isInstallationIdFound)
mMediator.setInstallationIdToCrashlytics();
mBackgroundTracker.addListener(mBackgroundListener);
TrackRecorder.init();
Editor.init(this);
UGC.init(this);
mPlatformInitialized = true;
}
private boolean createPlatformDirectories(@NonNull String settingsPath, @NonNull String filesPath,
@NonNull String tempPath)
{
if (SharedPropertiesUtils.shouldEmulateBadExternalStorage())
return false;
return StorageUtils.createDirectory(settingsPath) &&
StorageUtils.createDirectory(filesPath) &&
StorageUtils.createDirectory(tempPath);
}
private void initNativeFramework()
{
if (mFrameworkInitialized)
return;
nativeInitFramework();
MapManager.nativeSubscribe(mStorageCallbacks);
initNativeStrings();
BookmarkManager.loadBookmarks();
TtsPlayer.INSTANCE.init(this);
ThemeSwitcher.restart(false);
LocationHelper.INSTANCE.initialize();
RoutingController.get().initialize();
TrafficManager.INSTANCE.initialize();
SubwayManager.from(this).initialize();
mPurchaseOperationObservable.initialize();
mFrameworkInitialized = true;
}
private void initNativeStrings()
{
nativeAddLocalization("core_entrance", getString(R.string.core_entrance));
nativeAddLocalization("core_exit", getString(R.string.core_exit));
nativeAddLocalization("core_my_places", getString(R.string.core_my_places));
nativeAddLocalization("core_my_position", getString(R.string.core_my_position));
nativeAddLocalization("core_placepage_unknown_place", getString(R.string.core_placepage_unknown_place));
nativeAddLocalization("wifi", getString(R.string.wifi));
}
public boolean arePlatformAndCoreInitialized()
{
return mFrameworkInitialized && mPlatformInitialized;
}
@NonNull
public AppBackgroundTracker getBackgroundTracker()
{
return mBackgroundTracker;
}
static
{
System.loadLibrary("mapswithme");
}
@SuppressWarnings("unused")
void sendAppsFlyerTags(@NonNull String tag, @NonNull KeyValue[] params)
{
HashMap<String, Object> paramsMap = new HashMap<>();
for (KeyValue p : params)
paramsMap.put(p.mKey, p.mValue);
AppsFlyerLib.getInstance().trackEvent(this, tag, paramsMap);
}
@SuppressWarnings("unused")
void sendPushWooshTags(String tag, String[] values)
{
getMediator().getEventLogger().sendTags(tag, values);
}
@NonNull
public ExternalLibrariesMediator getMediator()
{
return mMediator;
}
@NonNull
PurchaseOperationObservable getPurchaseOperationObservable()
{
return mPurchaseOperationObservable;
}
public static void onUpgrade()
{
Counters.resetAppSessionCounters();
}
@SuppressWarnings("unused")
void forwardToMainThread(final long taskPointer)
{
Message m = Message.obtain(mMainLoopHandler, new Runnable()
{
@Override
public void run()
{
nativeProcessTask(taskPointer);
}
});
m.obj = mMainQueueToken;
mMainLoopHandler.sendMessage(m);
}
@NonNull
public ConnectivityListener getConnectivityListener()
{
return mConnectivityListener;
}
@NonNull
public MediaPlayerWrapper getMediaPlayer()
{
return mPlayer;
}
@NonNull
public GeofenceRegistry getGeofenceRegistry()
{
return mGeofenceRegistry;
}
private native void nativeInitPlatform(String apkPath, String storagePath, String privatePath,
String tmpPath, String obbGooglePath, String flavorName,
String buildType, boolean isTablet);
private static native void nativeInitFramework();
private static native void nativeProcessTask(long taskPointer);
private static native void nativeAddLocalization(String name, String value);
private static class VisibleAppLaunchListener implements AppBackgroundTracker.OnVisibleAppLaunchListener
{
@Override
public void onVisibleAppLaunch()
{
Statistics.INSTANCE.trackColdStartupInfo();
}
}
private class StorageCallbackImpl implements MapManager.StorageCallback
{
@Override
public void onStatusChanged(List<MapManager.StorageCallbackData> data)
{
Notifier notifier = Notifier.from(MwmApplication.this);
for (MapManager.StorageCallbackData item : data)
if (item.isLeafNode && item.newStatus == CountryItem.STATUS_FAILED)
{
if (MapManager.nativeIsAutoretryFailed())
{
notifier.notifyDownloadFailed(item.countryId, MapManager.nativeGetName(item.countryId));
MapManager.sendErrorStat(Statistics.EventName.DOWNLOADER_ERROR, MapManager.nativeGetError(item.countryId));
}
return;
}
}
@Override
public void onProgress(String countryId, long localSize, long remoteSize) {}
}
private static class TransitionListener implements AppBackgroundTracker.OnTransitionListener
{
@NonNull
private final MwmApplication mApplication;
TransitionListener(@NonNull MwmApplication application)
{
mApplication = application;
}
@Override
public void onTransit(boolean foreground)
{
if (!foreground && LoggerFactory.INSTANCE.isFileLoggingEnabled())
{
Log.i(TAG, "The app goes to background. All logs are going to be zipped.");
LoggerFactory.INSTANCE.zipLogs(null);
}
}
}
}
| |
/*
* Copyright (C) 2014 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.math;
import com.google.common.collect.ImmutableMap;
import java.math.RoundingMode;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
/**
* Enumerates several algorithms providing equivalent functionality to {@link Quantiles}, for use in
* {@link QuantilesBenchmark}. These algorithms each calculate either a single quantile or multiple
* quantiles. All algorithms modify the dataset they are given (the cost of a copy to avoid this
* will be constant across algorithms).
*
* @author Pete Gillin
* @since 20.0
*/
enum QuantilesAlgorithm {
/**
* Sorts the dataset, and picks values from it. When computing multiple quantiles, we sort once
* and pick multiple values.
*/
SORTING {
@Override
double singleQuantile(int index, int scale, double[] dataset) {
Arrays.sort(dataset);
return singleQuantileFromSorted(index, scale, dataset);
}
@Override
Map<Integer, Double> multipleQuantiles(
Collection<Integer> indexes, int scale, double[] dataset) {
Arrays.sort(dataset);
ImmutableMap.Builder<Integer, Double> builder = ImmutableMap.builder();
for (int index : indexes) {
builder.put(index, singleQuantileFromSorted(index, scale, dataset));
}
return builder.build();
}
private double singleQuantileFromSorted(int index, int scale, double[] dataset) {
long numerator = (long) index * (dataset.length - 1);
int positionFloor = (int) LongMath.divide(numerator, scale, RoundingMode.DOWN);
int remainder = (int) (numerator - positionFloor * scale);
if (remainder == 0) {
return dataset[positionFloor];
} else {
double positionFrac = (double) remainder / scale;
return dataset[positionFloor]
+ positionFrac * (dataset[positionFloor + 1] - dataset[positionFloor]);
}
}
},
/**
* Uses quickselect. When calculating multiple quantiles, each quickselect starts from scratch.
*/
QUICKSELECT {
@Override
double singleQuantile(int index, int scale, double[] dataset) {
long numerator = (long) index * (dataset.length - 1);
int positionFloor = (int) LongMath.divide(numerator, scale, RoundingMode.DOWN);
int remainder = (int) (numerator - positionFloor * scale);
double percentileFloor = select(positionFloor, dataset);
if (remainder == 0) {
return percentileFloor;
} else {
double percentileCeiling = getMinValue(dataset, positionFloor + 1);
double positionFrac = (double) remainder / scale;
return percentileFloor + positionFrac * (percentileCeiling - percentileFloor);
}
}
@Override
Map<Integer, Double> multipleQuantiles(
Collection<Integer> indexes, int scale, double[] dataset) {
ImmutableMap.Builder<Integer, Double> builder = ImmutableMap.builder();
for (int index : indexes) {
builder.put(index, singleQuantile(index, scale, dataset));
}
return builder.build();
}
},
/** Uses {@link Quantiles}. */
TARGET {
@Override
double singleQuantile(int index, int scale, double[] dataset) {
return Quantiles.scale(scale).index(index).computeInPlace(dataset);
}
@Override
Map<Integer, Double> multipleQuantiles(
Collection<Integer> indexes, int scale, double[] dataset) {
return Quantiles.scale(scale).indexes(indexes).computeInPlace(dataset);
}
},
;
/**
* Calculates a single quantile. Equivalent to {@code
* Quantiles.scale(scale).index(index).computeInPlace(dataset)}.
*/
abstract double singleQuantile(int index, int scale, double[] dataset);
/**
* Calculates multiple quantiles. Equivalent to {@code
* Quantiles.scale(scale).indexes(indexes).computeInPlace(dataset)}.
*/
abstract Map<Integer, Double> multipleQuantiles(
Collection<Integer> indexes, int scale, double[] dataset);
static double getMinValue(double[] array, int from) {
// This is basically a copy of com.google.math.Rank#getMinValue, with a small change in the
// method signature: we always search to the end of the array.
int min = from;
for (int i = from + 1; i < array.length; i++) {
if (array[min] > array[i]) {
min = i;
}
}
return array[min];
}
static double select(int k, double[] array) {
// This is basically a copy of com.google.math.Rank#select, with a small change in the method
// signature: we make k 0-based rather than 1-based; and we drop from and to, and always work on
// the whole array.
int from = 0;
int to = array.length - 1;
while (true) {
if (to <= from + 1) {
// Two or less elements left.
if (to == from + 1 && array[to] < array[from]) {
// Exactly two elements left.
swap(array, from, to);
}
return array[k];
} else {
int midIndex = (from + to) >>> 1;
// Choose the median of the elements at the from, to and mid indexes,
// and rearrange so that array[from]<=array[from+1], and
// array[to] => array[from + 1].
swap(array, midIndex, from + 1);
if (array[from] > array[to]) {
swap(array, from, to);
}
if (array[from + 1] > array[to]) {
swap(array, from + 1, to);
}
if (array[from] > array[from + 1]) {
swap(array, from, from + 1);
}
// Perform a partition with the selected median.
int low = from + 1, high = to; // Indexes for partitioning.
double partition = array[from + 1]; // Choose partitioning element.
while (true) {
// Skip the elements smaller than the partition.
do {
low++;
} while (array[low] < partition);
// Skip the elements larger than the partition.
do {
high--;
} while (array[high] > partition);
if (high < low) {
break; // Pointers crossed. Partitioning complete.
}
swap(array, low, high); // End of innermost loop.
}
array[from + 1] = array[high]; // Insert partitioning element.
array[high] = partition;
// Continue the partition that contains the kth element.
if (high >= k) {
to = high - 1;
}
if (high <= k) {
from = low;
}
}
}
}
private static void swap(double[] array, int i, int j) {
// This is a copy of com.google.math.Rank#swap.
double temp = array[i];
array[i] = array[j];
array[j] = temp;
}
}
| |
package nl.github.martijn9612.fishy.states;
import java.util.Enumeration;
import java.util.Vector;
import org.newdawn.slick.Color;
import org.newdawn.slick.Font;
import org.newdawn.slick.GameContainer;
import org.newdawn.slick.Graphics;
import org.newdawn.slick.Image;
import org.newdawn.slick.Input;
import org.newdawn.slick.SlickException;
import org.newdawn.slick.TrueTypeFont;
import org.newdawn.slick.state.BasicGameState;
import org.newdawn.slick.state.StateBasedGame;
import nl.github.martijn9612.fishy.Main;
import nl.github.martijn9612.fishy.models.Button;
import nl.github.martijn9612.fishy.position.MousePosition;
/**
* Implements the HelpState which shows instructions to the game.
* Software Engineering Methods Project - Group 11.
*/
public class HelpState extends BasicGameState {
private Image background;
private Image poison;
private Image shield;
private Image extralife;
private Image speedup;
private Image fish;
private Image squid;
private Image whale;
private MousePosition mouse;
private Button backButton;
private Font textFont;
private Font titleFont;
private Font introFont;
private Color myBlue = new Color(70, 175, 230);
public static int PREVIOUS_STATE = 0;
public static final int STATE_ID = 4;
private static final int LINE_HEIGHT = 20;
private static final int WRAP_LENGTH = 40;
private static final int POWERUP_SIZE = 40;
private static final int BACK_BUTTON_DRAW_X = 10;
private static final int BACK_BUTTON_DRAW_Y = 30;
private static final int INSTRUCTIONS_DRAW_X = 60;
private static final int INSTRUCTIONS_DRAW_Y = 85;
private static final String INSTRUCTIONS_TEXT = "This game is simple! Eat smaller " +
"fish, don't get eaten by bigger fish. Slowly grow to be the biggest " +
"fish in the ocean! Control your fish with the arrow keys or WASD. " +
"Press P to pause.";
private static final int BACK_DRAW_X = 70;
private static final int BACK_DRAW_Y = 40;
private static final String BACK_TEXT = "Back";
private static final int POWERUP_DRAW_X = 350;
private static final int POWERUP_DRAW_Y = 40;
private static final String POWERUP_TEXT = "Power-ups:";
private static final String ENEMIES_TEXT = "Enemies:";
private static final int POISON_DRAW_Y = 85;
private static final int POISON_TEXT_DRAW_Y = 127;
private static final String POISON_TEXT = "This is a poisonous mushroom! " +
"Eating this will disorientate you and reverse your controls. " +
"Avoid these if possible.";
private static final int SHIELD_DRAW_Y = 190;
private static final int SHIELD_TEXT_DRAW_Y = 232;
private static final String SHIELD_TEXT = "This is a shield! " +
"Eating this will protect you from dying for a little while. " +
"Definitely try to get these! ";
private static final int SPEEDUP_DRAW_Y = 300;
private static final int SPEEDUP_TEXT_DRAW_Y = 347;
private static final String SPEEDUP_TEXT = "This is a speedUp-up! " +
"Eating this will increase your speedUp significantly. " +
"Avoid these if possible. ";
private static final int EXTRALIFE_DRAW_Y = 415;
private static final int EXTRALIFE_TEXT_DRAW_Y = 457;
private static final String EXTRALIFE_TEXT = "This is an extra life! " +
"Eating this will allow you to be hit by a bigger fish one time. " +
"Definitely try to get these! ";
private static final int SQUID_DRAW_X = 105;
private static final int SQUID_DRAW_Y = 220;
private static final String ENEMIES_DESCRIPTION = "These are the standard " +
"enemies. The fish swim across the screen horizontally while the " +
"squid swim from the bottom up.";
private static final int ENEMIES_TEXT_DRAW_Y = 265;
private static final String WHALE_TEXT = "This is a special enemy, the whale," +
" which has a small chance of spawning following a short warning. " +
"Whales are huge and cannot be eaten.";
private static final int WHALE_TEXT_DRAW_Y = 430;
private static final String BACK_BUTTON_RESOURCE = "resources/back-button.png";
private static final String POISON_RESOURCE = "resources/poison.png";
private static final String EXTRALIFE_RESOURCE = "resources/ExtraLife-fish.png";
private static final String SHIELD_RESOURCE = "resources/shield.png";
private static final String SPEEDUP_RESOURCE = "resources/speedUp-fish.png";
private static final String FISH_RESOURCE = "resources/opponent-fish.png";
private static final String SQUID_RESOURCE = "resources/squid.png";
private static final String WHALE_RESOURCE = "resources/whale.png";
/**
* Initialize the game.
* @param gc - the container holding the game.
* @param game - the game holding the state.
* @throws SlickException - indicates internal error.
*/
public void init(GameContainer gc, StateBasedGame game) throws SlickException {
background = new Image("resources/" + Main.LEVEL_BACKGROUND + ".jpg");
background.setAlpha(0.1f);
backButton = new Button(BACK_BUTTON_DRAW_X, BACK_BUTTON_DRAW_Y, BACK_BUTTON_RESOURCE);
poison = new Image(POISON_RESOURCE).getScaledCopy(POWERUP_SIZE, POWERUP_SIZE);
shield = new Image(SHIELD_RESOURCE).getScaledCopy(POWERUP_SIZE, POWERUP_SIZE);
speedup = new Image(SPEEDUP_RESOURCE).getScaledCopy(POWERUP_SIZE, POWERUP_SIZE);
extralife = new Image(EXTRALIFE_RESOURCE).getScaledCopy(POWERUP_SIZE, POWERUP_SIZE);
fish = new Image(FISH_RESOURCE).getScaledCopy(POWERUP_SIZE, POWERUP_SIZE);
squid = new Image(SQUID_RESOURCE).getScaledCopy(POWERUP_SIZE, POWERUP_SIZE);
whale = new Image(WHALE_RESOURCE).getScaledCopy(POWERUP_SIZE * 2, POWERUP_SIZE * 2);
textFont = new TrueTypeFont(new java.awt.Font("Calibri", java.awt.Font.PLAIN , 16), true);
titleFont = new TrueTypeFont(new java.awt.Font("Calibri", java.awt.Font.BOLD , 24), true);
introFont = new TrueTypeFont(new java.awt.Font("Calibri", java.awt.Font.BOLD , 16), true);
mouse = new MousePosition();
}
/**
* Method executed when entering this game state.
* @param gameContainer - the container holding the game.
* @param stateBasedGame - the game holding the state.
* @throws SlickException - indicates internal error.
*/
@Override
public void enter(GameContainer gameContainer, StateBasedGame stateBasedGame) throws SlickException {
super.enter(gameContainer, stateBasedGame);
Main.actionLogger.logLine("Entering HelpState", getClass().getSimpleName());
}
/**
* Renders the game's screen.
* @param gc - the container holding the game.
* @param game - the game holding the state.
* @param g - the graphics content used to render.
* @throws SlickException - indicates internal error.
*/
public void render(GameContainer gc, StateBasedGame game, Graphics g) throws SlickException {
g.setColor(Color.black);
g.fillRect(0, 0, Main.WINDOW_WIDTH, Main.WINDOW_HEIGHT);
g.drawImage(background, 0, 0);
renderBackButton(g);
renderInstructions(g);
renderEnemies(g);
renderPowerups(g);
}
/**
* Update the game logic.
* @param gc - the container holding the game.
* @param game - the game holding the state.
* @param delta - the amount of time that has passed since last update in
* milliseconds.
* @throws SlickException - indicates internal error.
*/
public void update(GameContainer gc, StateBasedGame game, int delta) throws SlickException {
mouse.updatePosition();
Input input = gc.getInput();
if (backButton.wasClickedBy(mouse) || input.isKeyPressed(Input.KEY_P)) {
game.enterState(getPrevious());
}
}
/**
* Method executed when leaving this game state.
* @param gameContainer - the container holding the game.
* @param stateBasedGame - the game holding this state.
* @throws SlickException - indicates internal error.
*/
@Override
public void leave(GameContainer gameContainer, StateBasedGame stateBasedGame) throws SlickException {
super.leave(gameContainer, stateBasedGame);
Main.actionLogger.logLine("Leaving HelpState", getClass().getSimpleName());
}
/**
* Make sure text overflow works properly.
* @param text - the text to be wrapped.
* @param len - max length of the text in characters.
* @return Array of individual lines.
*/
static String[] wrapText(String text, int len) {
// return empty array for null text
if (text == null) {
return new String[]{};
}
// return text if len is zero or less
if (len <= 0) {
return new String[] {text};
}
// return text if less than length
if (text.length() <= len) {
return new String[]{text};
}
char[] chars = text.toCharArray();
Vector<String> lines = new Vector<String>();
StringBuilder line = new StringBuilder();
StringBuilder word = new StringBuilder();
for (int i = 0; i < chars.length; i++) {
word.append(chars[i]);
if (chars[i] == ' ') {
if ((line.length() + word.length()) > len) {
lines.add(line.toString());
line.delete(0, line.length());
}
line.append(word);
word.delete(0, word.length());
}
}
// handle any extra chars in current word
if (word.length() > 0) {
if ((line.length() + word.length()) > len) {
lines.add(line.toString());
line.delete(0, line.length());
}
line.append(word);
}
// handle extra line
if (line.length() > 0) {
lines.add(line.toString());
}
String [] ret = new String[lines.size()];
int c = 0; // counter
for (Enumeration<String> e = lines.elements(); e.hasMoreElements(); c++) {
ret[c] = (String) e.nextElement();
}
return ret;
}
/**
* Getter for the previous state the game was in.
* @return The previous state.
*/
public static int getPrevious() {
return PREVIOUS_STATE;
}
/**
* Setter to set the previous state.
* @param prev - The previous state
*/
public static void setPrevious(int prev) {
PREVIOUS_STATE = prev;
}
/**
* Get the ID of this state.
* @return the unique ID of this state.
*/
@Override
public int getID() {
return STATE_ID;
}
/**
* Renders the back button in the HelpState screen.
* @param g - the graphics content to render.
*/
private void renderBackButton(Graphics g) {
backButton.draw(g);
titleFont.drawString(BACK_DRAW_X, BACK_DRAW_Y, BACK_TEXT, myBlue);
}
/**
* Renders the game's instructions in the screen.
* @param g - the graphics content to render.
*/
private void renderInstructions(Graphics g) {
String [] instructions = wrapText(INSTRUCTIONS_TEXT, WRAP_LENGTH);
for (int i = 0; i < instructions.length; i++) {
introFont.drawString(INSTRUCTIONS_DRAW_X, INSTRUCTIONS_DRAW_Y + i * LINE_HEIGHT, instructions[i], Color.white);
}
}
/**
* Renders the information about the different enemies.
* @param g - the graphics content to render.
*/
private void renderEnemies(Graphics g) {
titleFont.drawString(INSTRUCTIONS_DRAW_X, SHIELD_DRAW_Y, ENEMIES_TEXT, myBlue);
renderEnemiesFishSquid(g);
renderEnemiesWhale(g);
}
/**
* Renders the information about the different powerups.
* @param g - the graphics content to render.
*/
private void renderPowerups(Graphics g) {
titleFont.drawString(POWERUP_DRAW_X, POWERUP_DRAW_Y, POWERUP_TEXT, myBlue);
renderPowerupPoison(g);
renderPowerupShield(g);
renderPowerupSpeedup(g);
renderPowerupExtraLife(g);
}
/**
* Renders the information about the powerup poison.
* @param g - the graphics content to render.
*/
private void renderPowerupPoison(Graphics g) {
g.drawImage(poison, POWERUP_DRAW_X, POISON_DRAW_Y);
String [] poisontext = wrapText(POISON_TEXT, WRAP_LENGTH);
for (int i = 0; i < poisontext.length; i++) {
textFont.drawString(POWERUP_DRAW_X, POISON_TEXT_DRAW_Y + i * LINE_HEIGHT, poisontext[i], Color.white);
}
}
/**
* Renders the information about the powerup shield.
* @param g - the graphics content to render.
*/
private void renderPowerupShield(Graphics g) {
g.drawImage(shield, POWERUP_DRAW_X, SHIELD_DRAW_Y);
String [] shieldtext = wrapText(SHIELD_TEXT, WRAP_LENGTH);
for (int i = 0; i < shieldtext.length; i++) {
textFont.drawString(POWERUP_DRAW_X, SHIELD_TEXT_DRAW_Y + i * LINE_HEIGHT, shieldtext[i], Color.white);
}
}
/**
* Renders the information about the powerup speedup.
* @param g - the graphics content to render.
*/
private void renderPowerupSpeedup(Graphics g) {
g.drawImage(speedup, POWERUP_DRAW_X, SPEEDUP_DRAW_Y);
String [] speeduptext = wrapText(SPEEDUP_TEXT, WRAP_LENGTH);
for (int i = 0; i < speeduptext.length; i++) {
textFont.drawString(POWERUP_DRAW_X, SPEEDUP_TEXT_DRAW_Y + i * LINE_HEIGHT, speeduptext[i], Color.white);
}
}
/**
* Renders the information about the powerup extralife.
* @param g - the graphics content to render.
*/
private void renderPowerupExtraLife(Graphics g) {
g.drawImage(extralife, POWERUP_DRAW_X, EXTRALIFE_DRAW_Y);
String [] extralifetext = wrapText(EXTRALIFE_TEXT, WRAP_LENGTH);
for (int i = 0; i < extralifetext.length; i++) {
textFont.drawString(POWERUP_DRAW_X, EXTRALIFE_TEXT_DRAW_Y + i * LINE_HEIGHT, extralifetext[i], Color.white);
}
}
/**
* Renders the information about the fish and squid enemies.
* @param g - the graphics content to render.
*/
private void renderEnemiesFishSquid(Graphics g) {
g.drawImage(fish, INSTRUCTIONS_DRAW_X, SQUID_DRAW_Y);
g.drawImage(squid, SQUID_DRAW_X, SQUID_DRAW_Y);
String [] enemiestext = wrapText(ENEMIES_DESCRIPTION, WRAP_LENGTH);
for (int i = 0; i < enemiestext.length; i++) {
textFont.drawString(INSTRUCTIONS_DRAW_X, ENEMIES_TEXT_DRAW_Y + i * LINE_HEIGHT, enemiestext[i], Color.white);
}
}
/**
* Renders the information about the whale enemy.
* @param g - the graphics content to render.
*/
private void renderEnemiesWhale(Graphics g) {
g.drawImage(whale, INSTRUCTIONS_DRAW_X, SPEEDUP_TEXT_DRAW_Y);
String [] whaletext = wrapText(WHALE_TEXT, WRAP_LENGTH);
for (int i = 0; i < whaletext.length; i++) {
textFont.drawString(INSTRUCTIONS_DRAW_X, WHALE_TEXT_DRAW_Y + i * LINE_HEIGHT, whaletext[i], Color.white);
}
}
}
| |
/**
* @@@ START COPYRIGHT @@@
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* @@@ END COPYRIGHT @@@
*/
package org.trafodion.libmgmt;
import java.io.File;
import java.io.FileFilter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.nio.channels.FileChannel;
import java.nio.channels.FileLock;
import java.nio.file.Files;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FileMgmt {
private static final Logger LOG = LoggerFactory.getLogger(FileMgmt.class);
private static final String url = "jdbc:default:connection";
// 100Mb
private static final long MAX_JAR_FILE_SIZE = 104857600;
private static final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static final int MaxDataSize = 12800;
private static final String CHARSET = "ISO-8859-1";
/**
* Print help info
*
* @param helps:
* INOUT parameter like PUT/LS/...
*/
public static void help(String[] helps) {
String[] help = new String[] {
"PUTFILE - Upload a library file. SHOWDDL PROCEDURE [SCHEMA NAME.]PUTFILE for more info.",
"LS - List library files. SHOWDDL PROCEDURE [SCHEMA NAME.]LS for more info.",
"LSALL - List all library files. SHOWDDL PROCEDURE [SCHEMA NAME.]LSALL for more info.",
"RM - Remove a library file. SHOWDDL PROCEDURE [SCHEMA NAME.]RM for more info.",
"RMREX - Remove library files by a perticular pattern. SHOWDDL PROCEDURE [SCHEMA NAME.]RMREX for more info.",
"GETFILE - Download a library file. SHOWDDL PROCEDURE [SCHEMA NAME.]GETFILE for more info.",
"ADDLIB - Create a library. SHOWDDL PROCEDURE [SCHEMA NAME.]ADDLIB for more info.",
"ALTERLIB - Update a library. SHOWDDL PROCEDURE [SCHEMA NAME.]ALTERLIB for more info.",
"DROPLIB - Drop a library. SHOWDDL PROCEDURE [SCHEMA NAME.]DROPLIB for more info."
};
List<String> index = new ArrayList<String>(help.length);
index.add("PUTFILE");
index.add("LS");
index.add("LSALL");
index.add("RM");
index.add("RMREX");
index.add("GETFILE");
String tmp = helps[0].trim().toUpperCase();
helps[0] = "HELP:\r\n";
switch (index.indexOf(tmp)) {
case 0:
helps[0] = help[0];
break;
case 1:
helps[0] = help[1];
break;
case 2:
helps[0] = help[2];
break;
case 3:
helps[0] = help[3];
break;
case 4:
helps[0] = help[4];
break;
case 5:
helps[0] = help[5];
break;
default:
for (String h : help) {
helps[0] += h + "\r\n";
}
}
}
/** create a library
* @param libName library name
* @param fileName related file name
* @param hostName host name
* @param localFile local file
* @throws SQLException
*/
public static void addLib(String libName, String fileName, String hostName,
String localFile) throws SQLException {
checkFileName(fileName);
Connection conn = getConn();
String userPath = getCodeFilePath(conn);
String sql = "create library " + libName + " file '" + userPath
+ fileName + "'";
if (hostName != null && !"".equals(hostName.trim())) {
sql += " HOST NAME '" + hostName + "'";
}
if (localFile != null && !"".equals(localFile.trim())) {
sql += " LOCAL FILE '" + localFile + "'";
}
execSQL(sql, conn);
}
/**
* change the library related attribute
*
* @param libName
* library name
* @param fileName
* uploaded file's name
* @param hostName
* @param localFile
* @throws SQLException
*/
public static void alterLib(String libName, String fileName,
String hostName, String localFile) throws SQLException {
checkFileName(fileName);
Connection conn = getConn();
String userPath = getCodeFilePath(conn);
String sql = "alter library " + libName + " FILE '" + userPath
+ fileName + "'";
if (hostName != null && !"".equals(hostName.trim())) {
sql += " HOST NAME '" + hostName + "'";
}
if (localFile != null && !"".equals(localFile.trim())) {
sql += " LOCAL FILE '" + localFile + "'";
}
execSQL(sql, conn);
}
/**
* drop the library
*
* @param libName
* @param isdefault
* true is RESTRICT false is CASCADE
* @throws SQLException
*/
public static void dropLib(String libName, String mode) throws SQLException {
String sql = "drop library " + libName;
if (mode != null)
if (mode.trim().equalsIgnoreCase("RESTRICT"))
sql += " RESTRICT";
else if (mode.trim().equalsIgnoreCase("CASCADE"))
sql += " CASCADE";
execSQL(sql, getConn());
}
public static void syncJar(String userPath, String fileName) throws SQLException, IOException {
// the local staging directory is the same as the destination directory
syncJarPdcp(userPath, userPath, fileName);
}
public static void syncJarPdcp(String stagingPath,
String dstDir,
String fileName) throws SQLException, IOException {
checkFileName(fileName);
String nodes = System.getenv("MY_NODES");
LOG.info("syncJars " + fileName + ", MY_NODES=" + nodes);
if (nodes != null && !"".equals(nodes.trim())) {
String pdcp = System.getenv("SQ_PDCP");
String pdsh = System.getenv("SQ_PDSH");
LOG.info("SQ_PDCP=" + pdcp + ", SQ_PDSH=" + pdsh);
if (pdcp == null) {
pdcp = "/usr/bin/pdcp";
}
if (pdsh == null) {
pdsh = "/usr/bin/pdsh";
}
execShell(pdsh + " " + nodes + " mkdir -p " + dstDir);
execShell(pdcp + " " + nodes + " " + stagingPath + fileName.trim() + " " + dstDir);
execShell(pdsh + " " + nodes + " chmod 755 " + dstDir + fileName.trim());
}
}
static void syncLibViaHDFS(String fileName, Connection conn) throws SQLException, IOException {
checkFileName(fileName);
String stagingDirPath = getLocalStagingDirPath(conn);
Path srcLocalPath = new Path(stagingDirPath, fileName);
Path dstHdfsDir = new Path(getHdfsStagingDirName(getCurrentUser(conn)));
Path dstHdfsPath = new Path(dstHdfsDir, fileName);
try {
// using HDFS Java interface
Configuration conf = new Configuration(true);
FileSystem fs = FileSystem.get(dstHdfsPath.toUri(),conf);
fs.mkdirs(dstHdfsDir, new FsPermission(FsAction.ALL,
FsAction.NONE,
FsAction.NONE));
fs.copyFromLocalFile(false, // do not delete src
true, // overwrite tgt
srcLocalPath,
dstHdfsPath);
LOG.info("syncJar " + fileName + ", target=" + dstHdfsDir.toString());
// now execute a TMUDF that will copy the file across all nodes
String sql = "select count(*) from udf(\"_LIBMGR_\".SyncLibUDF('c','"+ fileName + "'))";
String result = execSQL(sql, conn, true);
fs.delete(dstHdfsPath, false);
LOG.info("syncJar " + fileName + ", numSyncedNodes=" + result);
}
catch (Exception e1) {
try {
LOG.warn("syncJar " + fileName + ", sync via HDFS failed, trying pdcp");
// try using pdcp instead, this may not
// work on all systems
syncJarPdcp(stagingDirPath,
getCodeFilePath(conn),
fileName);
} catch (Exception e2) {
LOG.error(e1.getMessage(), e1);
LOG.error(e2.getMessage(), e2);
throw new SQLException(e1);
}
}
}
public static void rmJar(String fileName, Connection conn) throws SQLException, IOException {
checkFileName(fileName);
// execute a TMUDF that will delete the file across all nodes
String sql = "select count(*) from udf(\"_LIBMGR_\".SyncLibUDF('d','"+ fileName + "'))";
String result = execSQL(sql, conn, true);
LOG.info("rmJar " + fileName + ", numSyncedNodes=" + result);
}
private static String execShell(String cmd) throws IOException {
LOG.info("Processing command: " + cmd);
Process p = Runtime.getRuntime().exec(cmd);
if (p != null) {
StringBuilder sb = new StringBuilder();
InputStream in = null;
try {
in = p.getInputStream();
int c = -1;
while ((c = in.read()) != -1) {
sb.append((char) c);
}
} finally {
if (in != null)
in.close();
}
try {
in = p.getErrorStream();
int c = -1;
boolean flag = true;
while ((c = in.read()) != -1) {
if (flag) {
sb.append("\r\n");
} else {
flag = false;
}
sb.append((char) c);
}
} finally {
if (in != null)
in.close();
}
return sb.toString();
}
return null;
}
/**
* Download a JAR file
*
* @param fileName
* @param offset
* @param fileData
* @throws SQLException
* @throws IOException
*/
public static void get(String fileName, int offset, String[] fileData, long[] fileLength)
throws SQLException, IOException {
checkFileName(fileName);
Connection conn = getConn();
LOG.info("Get " + fileName);
String userPath = getCodeFilePath(conn);
close(conn);
File file = new File(userPath + fileName);
if (!file.exists()) {
throw new SQLException("No such file[" + fileName + "]");
}
RandomAccessFile rAFile = null;
try {
rAFile = new RandomAccessFile(file, "r");
rAFile.seek(offset);
byte bArray[] = new byte[MaxDataSize];
int bytesRead = rAFile.read(bArray, 0, MaxDataSize);
if (bytesRead != -1) {
fileData[0] = new String(Arrays.copyOf(bArray, bytesRead), CHARSET);
fileLength[0] = file.length();
LOG.info("Download: " + fileName + ", offset:" + offset + ",compressed length:" + fileData[0].length()
+ ",file length:" + fileLength[0]);
}
} catch(IOException e){
LOG.error(fileName,e);
throw e;
} finally {
if (rAFile != null) {
try {
rAFile.close();
} catch (Exception e) {
LOG.warn("Something wrong while close file[" + fileName + "] stream: " + e.getMessage());
}
}
}
}
/**
* Remove exact file
*
* @param fileName
* @throws SQLException
* @throws IOException
*/
public static void rm(String fileName) throws SQLException, IOException {
checkFileName(fileName);
Connection conn = getConn();
LOG.info("Remove " + fileName);
String userPath = getCodeFilePath(conn);
try {
File file = new File(userPath + fileName);
if (file.exists()) {
rmJar(fileName, conn);
LOG.info("Removed " + fileName + " successfully!");
return;
} else {
LOG.error("No such file[" + fileName + "]");
throw new SQLException("No such file[" + fileName + "]");
}
} finally {
close(conn);
}
}
/**
* Remove files via regular formulation
*
* @param pattern:
* to be deleted
* @param names
* : file names to be deleted
* @throws SQLException
* @throws IOException
*/
public static void rmRex(String pattern, String[] names) throws SQLException, IOException {
checkFileName(pattern);
Connection conn = getConn();
LOG.info("Try to remove files[" + pattern + "]");
String userPath = getCodeFilePath(conn);
File[] files = getFiles(pattern, new File(userPath));
StringBuilder sb = new StringBuilder();
sb.append("<rmRex>");
sb.append(toXML(files, "rmList"));
sb.append("<message>");
try {
for (int i = 0; i < files.length; i++) {
rmJar(files[i].getName(), conn);
}
} finally {
close(conn);
}
sb.append("Removed the files successfully!");
sb.append("</message>");
sb.append("</rmRex>");
names[0] = sb.toString();
LOG.info("Done for removing files[" + pattern + "].");
}
public static void lsAll(String[] names) throws SQLException {
ls("*", names);
}
/**
* list the Jars matching PATTERN
*
* @param pattern:
* @param names
* @throws SQLException
*/
public static void ls(String pattern, String[] names) throws SQLException {
checkFileName(pattern);
Connection conn = getConn();
LOG.info("List files[" + pattern + "]");
String userPath = getCodeFilePath(conn);
close(conn);
File dir = new File(userPath);
if (!dir.exists() || !dir.isDirectory()) {
LOG.error("Directory [" + userPath + "] is not found!");
throw new SQLException("Directory [" + userPath + "] is not found!");
}
if (pattern == null) {
LOG.error("File pattern should not be empty!");
throw new SQLException("Pattern is empty!");
}
File[] files = getFiles(pattern, dir);
names[0] = toXML(files, "ls");
}
/**
* upload a JAR file
*
* @param fileData
* @param fileName
* @param appendFlag
* 0: append; otherwise overwrite
* @param overwriteOnCreate
* when appendFlag is not 0, check if file exists and overwriteOnCreate is not 0,
* throw exception. Otherwise overwrite the file.
* @throws SQLException
*/
public static void put(String fileData, String fileName, int appendFlag, int overwriteOnCreate) throws SQLException, IOException {
checkFileName(fileName);
try {
byte[] data = fileData.getBytes(CHARSET);
Connection conn = getConn();
LOG.info("Put " + fileName + ", length: " + data.length + ", file string length:" + fileData.length());
String userPath = getCodeFilePath(conn);
close(conn);
String fname = userPath + fileName;
if (overwriteOnCreate != 0 && appendFlag != 0
&& new File(fname).exists()) {
throw new IOException("File " + fileName + " already exists!");
}
checkFile(fname, data.length);
FileOutputStream fos = null;
FileChannel channel = null;
FileLock lock = null;
try {
fos = new FileOutputStream(fname, (appendFlag == 0));
channel = fos.getChannel();
lock = channel.tryLock();
if (lock != null) {
fos.write(data);
fos.flush();
}else{
throw new SQLException("File "+fileName+" is locked, please try again later.");
}
} finally {
if(lock != null){
lock.release();
}
if(channel !=null){
channel.close();
}
if (fos != null)
fos.close();
}
syncJar(userPath, fileName);
LOG.info("PUT method out !!! " + fileName);
} catch (Throwable t) {
LOG.error(t.getMessage(), t);
throw new SQLException(t.getMessage());
}
}
/**
* upload a library (jar or DLL) in one or more chunks, passed in as strings
*
* @param fileData a string (chunk) of bytes to be added to the file on
* the server side. These bytes are assumed to be in the
* ISO-8859-1 character set, which allows any bit combination,
* so passing arbitrary bit patterns is ok.
* @param fileName unqualified name of the target file (library file)
* @param isFirstChunk indicates whether this is the first chunk for a file
* @param isLastChunk indicates whether this is the last chunk for a file
* @param overwriteExistingFile (used only for the first chunk) indicates
* whether we should silently overwrite an existing file
* (true) or raise an exception, if file "fileName"
* already exists (false)
* @throws SQLException
* @throws IOException
*/
public static void putFile(String fileData,
String fileName,
int isFirstChunk,
int isLastChunk,
int overwriteExistingFile)
throws SQLException, IOException {
checkFileName(fileName);
try {
byte[] data = fileData.getBytes(CHARSET);
Connection conn = getConn();
String stagingPath = getLocalStagingDirPath(conn);
String fname = stagingPath + fileName;
String dstFileName = getCodeFilePath(conn) + fileName;
boolean isFirst = (isFirstChunk != 0);
boolean isLast = (isLastChunk != 0);
LOG.info("PutFile " + fileName +
(isFirst && isLast ? "(single chunk)" :
(isFirst ? "(first chunk)" :
(isLast ? "(last chunk)" : "(intermediate chunk)"))) +
", length: " + data.length + ", file string length:" + fileData.length());
if (isFirst &&
overwriteExistingFile == 0 &&
new File(dstFileName).exists()) {
throw new IOException("File " + fileName + " already exists!");
}
checkFile(fname, data.length);
File stagingDir = null;
FileOutputStream fos = null;
FileChannel channel = null;
FileLock lock = null;
try {
if (isFirst) {
stagingDir = new File(stagingPath);
if (!stagingDir.exists())
stagingDir.mkdir();
}
fos = new FileOutputStream(fname, !isFirst);
channel = fos.getChannel();
lock = channel.tryLock();
if (lock != null) {
fos.write(data);
fos.flush();
}else{
throw new SQLException("File "+fileName+" is locked, please try again later.");
}
if (isLast)
syncLibViaHDFS(fileName, conn);
} finally {
if(lock != null){
lock.release();
}
if(channel !=null){
channel.close();
}
if (fos != null)
fos.close();
if (isLast) {
// delete the file in the staging area
new File(fname).delete();
}
close(conn);
}
LOG.info("PutFile method out !!! " + fileName);
} catch (Throwable t) {
LOG.error(t.getMessage(), t);
throw new SQLException(t.getMessage());
}
}
static void checkFileName(String fileName) throws SQLException {
if (fileName.contains("/") || fileName.contains("\\"))
throw new SQLException("Illegal file name: " + fileName
+ ". File name must not contain \"/\".");
}
static String getHdfsStagingDirName(String userName) {
return "/user/trafodion/udr/lib/staging/" + userName;
}
static String getLocalLibDirName(String userName) throws SQLException {
String root = System.getenv("TRAF_VAR");
if (root == null || "".equals(root.trim())) {
LOG.error("Cant get your traf installation path!");
throw new SQLException("Cant get your traf installation path!");
}
File file = new File(root + "/udr/lib/" + userName);
if (!file.exists()) {
file.mkdirs();
} else if (!file.isDirectory()) {
throw new SQLException("User Directory is not valid or you dont have permission!");
}
return file.getAbsolutePath() + "/";
}
private static void checkFile(String fname, int dataSize) throws SQLException {
File jar = new File(fname);
if (jar.length() + dataSize > MAX_JAR_FILE_SIZE) {
LOG.error("Jar file size is over the threshold[100Mb]");
throw new SQLException("Jar file size is over the threshold[100Mb]");
}
}
private static String getCodeFilePath(Connection conn) throws SQLException {
String user = getCurrentUser(conn);
String result = getLocalLibDirName(user);
LOG.info("SPJ JARs location: " + result);
return result;
}
private static String getLocalStagingDirPath(Connection conn) throws SQLException {
// this is the local directory where the put() method
// stores the file as it is being assembled from chunks
return getCodeFilePath(conn) + "staging/";
}
private static Connection getConn() throws SQLException {
Connection conn = null;
try {
conn = DriverManager.getConnection(url);
LOG.info("Create connection successfully. " + conn +", autocommit:"+conn.getAutoCommit());
} catch (Throwable t) {
LOG.error("Error encountered while getting connection ", t);
throw new SQLException(t.getMessage());
}
return conn;
}
private static void execSQL(String sqlString,
Connection conn) throws SQLException {
execSQL(sqlString, conn, false);
}
private static String execSQL(String sqlString,
Connection conn,
boolean executeQuery) throws SQLException {
Statement st = null;
ResultSet rs = null;
String result = null;
try {
st = conn.createStatement();
if (executeQuery) {
rs = st.executeQuery(sqlString);
if (rs.next()) {
result = rs.getString(1);
}
}
else
st.execute(sqlString);
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new SQLException(e);
} finally {
if (rs != null) {
try {
rs.close();
} catch (Exception e) {
LOG.warn(e.getMessage(), e);
}
}
if (st != null) {
try {
st.close();
} catch (Exception e) {
LOG.warn(e.getMessage(), e);
}
}
}
return result;
}
private static String getCurrentUser(Connection conn) throws SQLException {
String user = execSQL("values(session_user)", conn, true);
return user.replaceAll("[\\\\/]", "_");
}
private static File[] getFiles(String pattern, File dir) {
final String p = pattern.replaceAll("\\*", ".*").trim().toUpperCase();
return dir.listFiles(new FileFilter() {
@Override
public boolean accept(File name) {
if (name == null || !name.isFile()) {
return false;
}
return name.getName().trim().toUpperCase().matches(p);
}
});
}
private static String toXML(File[] files, String root) {
StringBuilder sb = new StringBuilder();
sb.append("<" + root + ">");
for (File f : files) {
sb.append("<file name='" + f.getName() + "' lastModifyTime='" + format.format(new Date(f.lastModified()))
+ "' size='" + f.length() + "'/>");
}
sb.append("</" + root + ">");
return sb.toString();
}
private static void close(Connection conn) {
try {
conn.close();
LOG.info("Closed connection");
} catch (Exception e) {
LOG.warn(e.getMessage());
}
}
}
| |
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.android;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.android.builder.core.VariantType;
import com.android.manifmerger.ManifestMerger2;
import com.android.manifmerger.ManifestMerger2.Invoker;
import com.android.manifmerger.ManifestMerger2.Invoker.Feature;
import com.android.manifmerger.ManifestMerger2.MergeFailureException;
import com.android.manifmerger.ManifestMerger2.MergeType;
import com.android.manifmerger.ManifestMerger2.SystemProperty;
import com.android.manifmerger.MergingReport;
import com.android.manifmerger.MergingReport.MergedManifestKind;
import com.android.manifmerger.PlaceholderHandler;
import com.android.utils.Pair;
import com.android.utils.StdLogger;
import com.google.common.base.Function;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.stream.FactoryConfigurationError;
import javax.xml.stream.XMLEventFactory;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLEventWriter;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.events.Attribute;
import javax.xml.stream.events.StartElement;
import javax.xml.stream.events.XMLEvent;
/** Provides manifest processing oriented tools. */
public class AndroidManifestProcessor {
private static final ImmutableMap<SystemProperty, String> SYSTEM_PROPERTY_NAMES =
Maps.toMap(
Arrays.asList(SystemProperty.values()),
new Function<SystemProperty, String>() {
@Override
public String apply(SystemProperty property) {
if (property == SystemProperty.PACKAGE) {
return "applicationId";
} else {
return property.toCamelCase();
}
}
});
/** Creates a new processor with the appropriate logger. */
public static AndroidManifestProcessor with(StdLogger stdLogger) {
return new AndroidManifestProcessor(stdLogger);
}
private final StdLogger stdLogger;
private AndroidManifestProcessor(StdLogger stdLogger) {
this.stdLogger = stdLogger;
}
/**
* Merge several manifests into one and perform placeholder substitutions. This operation uses
* Gradle semantics.
*
* @param manifest The primary manifest of the merge.
* @param mergeeManifests Manifests to be merged into {@code manifest}.
* @param mergeType Whether the merger should operate in application or library mode.
* @param values A map of strings to be used as manifest placeholders and overrides. packageName
* is the only disallowed value and will be ignored.
* @param output The path to write the resultant manifest to.
* @param logFile The path to write the merger log to.
* @return The path of the resultant manifest, either {@code output}, or {@code manifest} if no
* merging was required.
* @throws IOException if there was a problem writing the merged manifest.
*/
// TODO(corysmith): Extract manifest processing.
public Path mergeManifest(
Path manifest,
Map<Path, String> mergeeManifests,
MergeType mergeType,
Map<String, String> values,
Path output,
Path logFile)
throws IOException {
if (mergeeManifests.isEmpty() && values.isEmpty()) {
return manifest;
}
Invoker<?> manifestMerger = ManifestMerger2.newMerger(manifest.toFile(), stdLogger, mergeType);
MergedManifestKind mergedManifestKind = MergedManifestKind.MERGED;
if (mergeType == MergeType.APPLICATION) {
manifestMerger.withFeatures(Feature.REMOVE_TOOLS_DECLARATIONS);
}
// Add mergee manifests
List<Pair<String, File>> libraryManifests = new ArrayList<>();
for (Entry<Path, String> mergeeManifest : mergeeManifests.entrySet()) {
libraryManifests.add(Pair.of(mergeeManifest.getValue(), mergeeManifest.getKey().toFile()));
}
manifestMerger.addLibraryManifests(libraryManifests);
// Extract SystemProperties from the provided values.
Map<String, Object> placeholders = new HashMap<>();
placeholders.putAll(values);
for (SystemProperty property : SystemProperty.values()) {
if (values.containsKey(SYSTEM_PROPERTY_NAMES.get(property))) {
manifestMerger.setOverride(
property, values.get(SYSTEM_PROPERTY_NAMES.get(property)));
// The manifest merger does not allow explicitly specifying either applicationId or
// packageName as placeholders if SystemProperty.PACKAGE is specified. It forces these
// placeholders to have the same value as specified by SystemProperty.PACKAGE.
if (property == SystemProperty.PACKAGE) {
placeholders.remove(PlaceholderHandler.APPLICATION_ID);
placeholders.remove(PlaceholderHandler.PACKAGE_NAME);
}
}
}
// Add placeholders for all values.
// packageName is populated from either the applicationId override or from the manifest itself;
// it cannot be manually specified.
placeholders.remove(PlaceholderHandler.PACKAGE_NAME);
manifestMerger.setPlaceHolderValues(placeholders);
try {
MergingReport mergingReport = manifestMerger.merge();
if (logFile != null) {
logFile.getParent().toFile().mkdirs();
try (PrintStream stream = new PrintStream(logFile.toFile())) {
mergingReport.log(new AndroidResourceProcessor.PrintStreamLogger(stream));
}
}
switch (mergingReport.getResult()) {
case WARNING:
mergingReport.log(stdLogger);
Files.createDirectories(output.getParent());
writeMergedManifest(mergedManifestKind, mergingReport, output);
break;
case SUCCESS:
Files.createDirectories(output.getParent());
writeMergedManifest(mergedManifestKind, mergingReport, output);
break;
case ERROR:
mergingReport.log(stdLogger);
throw new RuntimeException(mergingReport.getReportString());
default:
throw new RuntimeException("Unhandled result type : " + mergingReport.getResult());
}
} catch (MergeFailureException e) {
throw new RuntimeException(e);
}
return output;
}
public MergedAndroidData processManifest(
VariantType variantType,
String customPackageForR,
String applicationId,
int versionCode,
String versionName,
MergedAndroidData primaryData,
Path processedManifest)
throws IOException {
ManifestMerger2.MergeType mergeType =
variantType == VariantType.DEFAULT
? ManifestMerger2.MergeType.APPLICATION
: ManifestMerger2.MergeType.LIBRARY;
String newManifestPackage =
variantType == VariantType.DEFAULT ? applicationId : customPackageForR;
if (versionCode != -1 || versionName != null || newManifestPackage != null) {
Files.createDirectories(processedManifest.getParent());
// The generics on Invoker don't make sense, so ignore them.
@SuppressWarnings("unchecked")
Invoker<?> manifestMergerInvoker =
ManifestMerger2.newMerger(primaryData.getManifest().toFile(), stdLogger, mergeType);
// Stamp new package
if (newManifestPackage != null) {
manifestMergerInvoker.setOverride(SystemProperty.PACKAGE, newManifestPackage);
}
// Stamp version and applicationId (if provided) into the manifest
if (versionCode > 0) {
manifestMergerInvoker.setOverride(SystemProperty.VERSION_CODE, String.valueOf(versionCode));
}
if (versionName != null) {
manifestMergerInvoker.setOverride(SystemProperty.VERSION_NAME, versionName);
}
MergedManifestKind mergedManifestKind = MergedManifestKind.MERGED;
if (mergeType == ManifestMerger2.MergeType.APPLICATION) {
manifestMergerInvoker.withFeatures(Invoker.Feature.REMOVE_TOOLS_DECLARATIONS);
}
try {
MergingReport mergingReport = manifestMergerInvoker.merge();
switch (mergingReport.getResult()) {
case WARNING:
mergingReport.log(stdLogger);
writeMergedManifest(mergedManifestKind, mergingReport, processedManifest);
break;
case SUCCESS:
writeMergedManifest(mergedManifestKind, mergingReport, processedManifest);
break;
case ERROR:
mergingReport.log(stdLogger);
throw new RuntimeException(mergingReport.getReportString());
default:
throw new RuntimeException("Unhandled result type : " + mergingReport.getResult());
}
} catch (IOException | MergeFailureException e) {
throw new RuntimeException(e);
}
return new MergedAndroidData(
primaryData.getResourceDir(), primaryData.getAssetDir(), processedManifest);
}
return primaryData;
}
/**
* Overwrite the package attribute of {@code <manifest>} in an AndroidManifest.xml file.
*
* @param manifest The input manifest.
* @param customPackage The package to write to the manifest.
* @param output The output manifest to generate.
* @return The output manifest if generated or the input manifest if no overwriting is required.
*/
/* TODO(apell): switch from custom xml parsing to Gradle merger with NO_PLACEHOLDER_REPLACEMENT
* set when android common is updated to version 2.5.0.
*/
public Path writeManifestPackage(Path manifest, String customPackage, Path output) {
if (Strings.isNullOrEmpty(customPackage)) {
return manifest;
}
try {
Files.createDirectories(output.getParent());
XMLEventReader reader =
XMLInputFactory.newInstance()
.createXMLEventReader(Files.newInputStream(manifest), UTF_8.name());
XMLEventWriter writer =
XMLOutputFactory.newInstance()
.createXMLEventWriter(Files.newOutputStream(output), UTF_8.name());
XMLEventFactory eventFactory = XMLEventFactory.newInstance();
while (reader.hasNext()) {
XMLEvent event = reader.nextEvent();
if (event.isStartElement()
&& event.asStartElement().getName().toString().equalsIgnoreCase("manifest")) {
StartElement element = event.asStartElement();
@SuppressWarnings("unchecked")
Iterator<Attribute> attributes = element.getAttributes();
ImmutableList.Builder<Attribute> newAttributes = ImmutableList.builder();
while (attributes.hasNext()) {
Attribute attr = attributes.next();
if (attr.getName().toString().equalsIgnoreCase("package")) {
newAttributes.add(eventFactory.createAttribute("package", customPackage));
} else {
newAttributes.add(attr);
}
}
writer.add(
eventFactory.createStartElement(
element.getName(), newAttributes.build().iterator(), element.getNamespaces()));
} else {
writer.add(event);
}
}
writer.flush();
} catch (XMLStreamException | FactoryConfigurationError | IOException e) {
throw new RuntimeException(e);
}
return output;
}
public void writeMergedManifest(
MergedManifestKind mergedManifestKind, MergingReport mergingReport, Path manifestOut)
throws IOException {
String manifestContents = mergingReport.getMergedDocument(mergedManifestKind);
String annotatedDocument = mergingReport.getMergedDocument(MergedManifestKind.BLAME);
stdLogger.verbose(annotatedDocument);
Files.write(manifestOut, manifestContents.getBytes(UTF_8));
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.vcs.log.util;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.intellij.ide.IdeTooltip;
import com.intellij.ide.IdeTooltipManager;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.progress.util.ProgressIndicatorWithDelayedPresentation;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.Balloon;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.changes.EditorTabDiffPreviewManager;
import com.intellij.ui.*;
import com.intellij.ui.components.panels.Wrapper;
import com.intellij.ui.navigation.History;
import com.intellij.ui.navigation.Place;
import com.intellij.util.concurrency.EdtExecutorService;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.StatusText;
import com.intellij.util.ui.UIUtil;
import com.intellij.vcs.log.CommitId;
import com.intellij.vcs.log.VcsLogBundle;
import com.intellij.vcs.log.data.VcsLogData;
import com.intellij.vcs.log.data.VcsLogProgress;
import com.intellij.vcs.log.ui.AbstractVcsLogUi;
import com.intellij.vcs.log.ui.VcsLogUiEx;
import com.intellij.vcs.log.ui.filter.VcsLogFilterUiEx;
import com.intellij.vcs.log.ui.frame.ProgressStripe;
import com.intellij.vcs.log.ui.table.VcsLogGraphTable;
import com.intellij.vcs.log.visible.VisiblePackRefresherImpl;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.Collection;
import java.util.List;
public final class VcsLogUiUtil {
@NotNull
public static JComponent installProgress(@NotNull JComponent component,
@NotNull VcsLogData logData,
@NotNull String logId,
@NotNull Disposable disposableParent) {
ProgressStripe progressStripe =
new ProgressStripe(component, disposableParent,
ProgressIndicatorWithDelayedPresentation.DEFAULT_PROGRESS_DIALOG_POSTPONE_TIME_MILLIS) {
@Override
public void updateUI() {
super.updateUI();
if (myDecorator != null && logData.getProgress().isRunning()) startLoadingImmediately();
}
};
logData.getProgress().addProgressIndicatorListener(new VcsLogProgress.ProgressListener() {
@Override
public void progressStarted(@NotNull Collection<? extends VcsLogProgress.ProgressKey> keys) {
if (isProgressVisible(keys, logId)) {
progressStripe.startLoading();
}
}
@Override
public void progressChanged(@NotNull Collection<? extends VcsLogProgress.ProgressKey> keys) {
if (isProgressVisible(keys, logId)) {
progressStripe.startLoading();
}
else {
progressStripe.stopLoading();
}
}
@Override
public void progressStopped() {
progressStripe.stopLoading();
}
}, disposableParent);
return progressStripe;
}
public static boolean isProgressVisible(@NotNull Collection<? extends VcsLogProgress.ProgressKey> keys,
@NotNull String logId) {
if (keys.contains(VcsLogData.DATA_PACK_REFRESH)) {
return true;
}
return ContainerUtil.find(keys, key -> VisiblePackRefresherImpl.isVisibleKeyFor(key, logId)) != null;
}
@NotNull
public static JScrollPane setupScrolledGraph(@NotNull VcsLogGraphTable graphTable, int border) {
JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(graphTable, border);
ComponentUtil.putClientProperty(scrollPane, UIUtil.KEEP_BORDER_SIDES, SideBorder.TOP);
graphTable.viewportSet(scrollPane.getViewport());
return scrollPane;
}
public static void showTooltip(@NotNull JComponent component,
@NotNull Point point,
@NotNull Balloon.Position position,
@NotNull @NlsContexts.Tooltip String text) {
JEditorPane tipComponent = IdeTooltipManager.initPane(text, new HintHint(component, point).setAwtTooltip(true), null);
IdeTooltip tooltip = new IdeTooltip(component, point, new Wrapper(tipComponent)).setPreferredPosition(position).setToCenter(false)
.setToCenterIfSmall(false);
IdeTooltipManager.getInstance().show(tooltip, false);
}
@NotNull
public static History installNavigationHistory(@NotNull AbstractVcsLogUi ui) {
History history = new History(new VcsLogPlaceNavigator(ui));
ui.getTable().getSelectionModel().addListSelectionListener((e) -> {
if (!history.isNavigatingNow() && !e.getValueIsAdjusting()) {
history.pushQueryPlace();
}
});
return history;
}
@NotNull
@Nls
public static String shortenTextToFit(@NotNull @Nls String text, @NotNull FontMetrics fontMetrics, int availableWidth, int maxLength,
@NotNull @Nls String symbol) {
if (fontMetrics.stringWidth(text) <= availableWidth) return text;
for (int i = text.length(); i > maxLength; i--) {
String result = StringUtil.shortenTextWithEllipsis(text, i, 0, symbol);
if (fontMetrics.stringWidth(result) <= availableWidth) {
return result;
}
}
return StringUtil.shortenTextWithEllipsis(text, maxLength, 0, symbol);
}
public static int getHorizontalTextPadding(@NotNull SimpleColoredComponent component) {
Insets borderInsets = component.getMyBorder().getBorderInsets(component);
Insets ipad = component.getIpad();
return borderInsets.left + borderInsets.right + ipad.left + ipad.right;
}
public static void appendActionToEmptyText(@Nls @NotNull StatusText emptyText, @Nls @NotNull String text, @NotNull Runnable action) {
emptyText.appendSecondaryText(text, SimpleTextAttributes.LINK_PLAIN_ATTRIBUTES, e -> action.run());
}
public static void appendResetFiltersActionToEmptyText(@NotNull VcsLogFilterUiEx filterUi, @Nls @NotNull StatusText emptyText) {
appendActionToEmptyText(emptyText, VcsLogBundle.message("vcs.log.reset.filters.status.action"), filterUi::clearFilters);
}
public static boolean isDiffPreviewInEditor(@NotNull Project project) {
return EditorTabDiffPreviewManager.getInstance(project).isEditorDiffPreviewAvailable();
}
@NotNull
public static Dimension expandToFitToolbar(@NotNull Dimension size, @NotNull JComponent toolbar) {
Dimension preferredSize = toolbar.getPreferredSize();
int minToolbarSize = Math.round(Math.min(preferredSize.width, preferredSize.height) * 1.5f);
return new Dimension(Math.max(size.width, minToolbarSize), Math.max(size.height, minToolbarSize));
}
private static final class VcsLogPlaceNavigator implements Place.Navigator {
@NonNls private static final String PLACE_KEY = "Vcs.Log.Ui.History.PlaceKey";
@NotNull private final AbstractVcsLogUi myUi;
private VcsLogPlaceNavigator(@NotNull AbstractVcsLogUi ui) {
myUi = ui;
}
@Override
public void queryPlace(@NotNull Place place) {
List<CommitId> commits = myUi.getVcsLog().getSelectedCommits();
if (commits.size() > 0) {
place.putPath(PLACE_KEY, commits.get(0));
}
}
@Override
public ActionCallback navigateTo(@Nullable Place place, boolean requestFocus) {
if (place == null) return ActionCallback.DONE;
Object value = place.getPath(PLACE_KEY);
if (!(value instanceof CommitId)) return ActionCallback.REJECTED;
CommitId commitId = (CommitId)value;
ActionCallback callback = new ActionCallback();
ListenableFuture<VcsLogUiEx.JumpResult> future = VcsLogUtil.jumpToCommit(myUi, commitId.getHash(), commitId.getRoot(),
false, true);
Futures.addCallback(future, new FutureCallback<>() {
@Override
public void onSuccess(VcsLogUiEx.JumpResult result) {
if (result == VcsLogUiEx.JumpResult.SUCCESS) {
if (requestFocus) myUi.getTable().requestFocusInWindow();
callback.setDone();
}
else {
callback.setRejected();
}
}
@Override
public void onFailure(Throwable t) {
callback.setRejected();
}
}, EdtExecutorService.getInstance());
return callback;
}
}
}
| |
/*
[The "BSD licence"]
Copyright (c) 2005-2008 Terence Parr
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.antlr.tool;
import antlr.BaseAST;
import antlr.Token;
import antlr.TokenWithIndex;
import antlr.collections.AST;
import org.antlr.analysis.DFA;
import org.antlr.analysis.NFAState;
import org.antlr.misc.IntSet;
import org.antlr.stringtemplate.StringTemplate;
import java.util.*;
/** Grammars are first converted to ASTs using this class and then are
* converted to NFAs via a tree walker.
*
* The reader may notice that I have made a very non-OO decision in this
* class to track variables for many different kinds of nodes. It wastes
* space for nodes that don't need the values and OO principles cry out
* for a new class type for each kind of node in my tree. I am doing this
* on purpose for a variety of reasons. I don't like using the type
* system for different node types; it yields too many damn class files
* which I hate. Perhaps if I put them all in one file. Most importantly
* though I hate all the type casting that would have to go on. I would
* have all sorts of extra work to do. Ick. Anyway, I'm doing all this
* on purpose, not out of ignorance. ;)
*/
public class GrammarAST extends BaseAST {
static int count = 0;
public int ID = ++count;
/** This AST node was created from what token? */
public Token token = null;
public String enclosingRuleName;
/** If this is a RULE node then track rule's start, stop tokens' index. */
public int ruleStartTokenIndex;
public int ruleStopTokenIndex;
/** If this is a decision node, what is the lookahead DFA? */
public DFA lookaheadDFA = null;
/** What NFA start state was built from this node? */
public NFAState NFAStartState = null;
/** This is used for TREE_BEGIN nodes to point into
* the NFA. TREE_BEGINs point at left edge of DOWN for LOOK computation
* purposes (Nullable tree child list needs special code gen when matching).
*/
public NFAState NFATreeDownState = null;
/** Rule ref nodes, token refs, set, and NOT set refs need to track their
* location in the generated NFA so that local FOLLOW sets can be
* computed during code gen for automatic error recovery.
*/
public NFAState followingNFAState = null;
/** If this is a SET node, what are the elements? */
protected IntSet setValue = null;
/** If this is a BLOCK node, track options here */
protected Map<String,Object> blockOptions;
/** If this is a BLOCK node for a rewrite rule, track referenced
* elements here. Don't track elements in nested subrules.
*/
public Set<GrammarAST> rewriteRefsShallow;
/* If REWRITE node, track EVERY element and label ref to right of ->
* for this rewrite rule. There could be multiple of these per
* rule:
*
* a : ( ... -> ... | ... -> ... ) -> ... ;
*
* We may need a list of all refs to do definitions for whole rewrite
* later.
*
* If BLOCK then tracks every element at that level and below.
*/
public Set<GrammarAST> rewriteRefsDeep;
public Map<String,Object> terminalOptions;
/** if this is an ACTION node, this is the outermost enclosing
* alt num in rule. For actions, define.g sets these (used to
* be codegen.g). We need these set so we can examine actions
* early, before code gen, for refs to rule predefined properties
* and rule labels. For most part define.g sets outerAltNum, but
* codegen.g does the ones for %foo(a={$ID.text}) type refs as
* the {$ID...} is not seen as an action until code gen pulls apart.
*/
public int outerAltNum;
/** if this is a TOKEN_REF or RULE_REF node, this is the code StringTemplate
* generated for this node. We need to update it later to add
* a label if someone does $tokenref or $ruleref in an action.
*/
public StringTemplate code;
public GrammarAST() {;}
public GrammarAST(int t, String txt) {
initialize(t,txt);
}
public void initialize(int i, String s) {
token = new TokenWithIndex(i,s);
}
public void initialize(AST ast) {
GrammarAST t = ((GrammarAST)ast);
this.token = t.token;
this.enclosingRuleName = t.enclosingRuleName;
this.ruleStartTokenIndex = t.ruleStartTokenIndex;
this.ruleStopTokenIndex = t.ruleStopTokenIndex;
this.setValue = t.setValue;
this.blockOptions = t.blockOptions;
this.outerAltNum = t.outerAltNum;
}
public void initialize(Token token) {
this.token = token;
}
public DFA getLookaheadDFA() {
return lookaheadDFA;
}
public void setLookaheadDFA(DFA lookaheadDFA) {
this.lookaheadDFA = lookaheadDFA;
}
public Token getToken() {
return token;
}
public NFAState getNFAStartState() {
return NFAStartState;
}
public void setNFAStartState(NFAState nfaStartState) {
this.NFAStartState = nfaStartState;
}
/** Save the option key/value pair and process it; return the key
* or null if invalid option.
*/
public String setBlockOption(Grammar grammar, String key, Object value) {
if ( blockOptions == null ) {
blockOptions = new HashMap();
}
return setOption(blockOptions, Grammar.legalBlockOptions, grammar, key, value);
}
public String setTerminalOption(Grammar grammar, String key, Object value) {
if ( terminalOptions == null ) {
terminalOptions = new HashMap<String,Object>();
}
return setOption(terminalOptions, Grammar.legalTokenOptions, grammar, key, value);
}
public String setOption(Map options, Set legalOptions, Grammar grammar, String key, Object value) {
if ( !legalOptions.contains(key) ) {
ErrorManager.grammarError(ErrorManager.MSG_ILLEGAL_OPTION,
grammar,
token,
key);
return null;
}
if ( value instanceof String ) {
String vs = (String)value;
if ( vs.charAt(0)=='"' ) {
value = vs.substring(1,vs.length()-1); // strip quotes
}
}
if ( key.equals("k") ) {
grammar.numberOfManualLookaheadOptions++;
}
options.put(key, value);
return key;
}
public Object getBlockOption(String key) {
Object value = null;
if ( blockOptions != null ) {
value = blockOptions.get(key);
}
return value;
}
public void setOptions(Grammar grammar, Map options) {
if ( options==null ) {
this.blockOptions = null;
return;
}
Set keys = options.keySet();
for (Iterator it = keys.iterator(); it.hasNext();) {
String optionName = (String) it.next();
String stored= setBlockOption(grammar, optionName, options.get(optionName));
if ( stored==null ) {
it.remove();
}
}
}
public String getText() {
if ( token!=null ) {
return token.getText();
}
return "";
}
public void setType(int type) {
token.setType(type);
}
public void setText(String text) {
token.setText(text);
}
public int getType() {
if ( token!=null ) {
return token.getType();
}
return -1;
}
public int getLine() {
int line=0;
if ( token!=null ) {
line = token.getLine();
}
if ( line==0 ) {
AST child = getFirstChild();
if ( child!=null ) {
line = child.getLine();
}
}
return line;
}
public int getColumn() {
int col=0;
if ( token!=null ) {
col = token.getColumn();
}
if ( col==0 ) {
AST child = getFirstChild();
if ( child!=null ) {
col = child.getColumn();
}
}
return col;
}
public void setLine(int line) {
token.setLine(line);
}
public void setColumn(int col) {
token.setColumn(col);
}
public IntSet getSetValue() {
return setValue;
}
public void setSetValue(IntSet setValue) {
this.setValue = setValue;
}
public GrammarAST getLastChild() {
return ((GrammarAST)getFirstChild()).getLastSibling();
}
public GrammarAST getLastSibling() {
GrammarAST t = this;
GrammarAST last = null;
while ( t!=null ) {
last = t;
t = (GrammarAST)t.getNextSibling();
}
return last;
}
/** Get the ith child from 0 */
public GrammarAST getChild(int i) {
int n = 0;
AST t = getFirstChild();
while ( t!=null ) {
if ( n==i ) {
return (GrammarAST)t;
}
n++;
t = (GrammarAST)t.getNextSibling();
}
return null;
}
public GrammarAST getFirstChildWithType(int ttype) {
AST t = getFirstChild();
while ( t!=null ) {
if ( t.getType()==ttype ) {
return (GrammarAST)t;
}
t = (GrammarAST)t.getNextSibling();
}
return null;
}
public GrammarAST[] getChildrenAsArray() {
AST t = getFirstChild();
GrammarAST[] array = new GrammarAST[getNumberOfChildren()];
int i = 0;
while ( t!=null ) {
array[i] = (GrammarAST)t;
t = t.getNextSibling();
i++;
}
return array;
}
/** Return a reference to the first node (depth-first) that has
* token type ttype. Assume 'this' is a root node; don't visit siblings
* of root. Return null if no node found with ttype.
*/
public GrammarAST findFirstType(int ttype) {
// check this node (the root) first
if ( this.getType()==ttype ) {
return this;
}
// else check children
GrammarAST child = (GrammarAST)this.getFirstChild();
while ( child!=null ) {
GrammarAST result = child.findFirstType(ttype);
if ( result!=null ) {
return result;
}
child = (GrammarAST)child.getNextSibling();
}
return null;
}
/** Make nodes unique based upon Token so we can add them to a Set; if
* not a GrammarAST, check type.
*/
public boolean equals(Object ast) {
if ( this == ast ) {
return true;
}
if ( !(ast instanceof GrammarAST) ) {
return this.getType() == ((AST)ast).getType();
}
GrammarAST t = (GrammarAST)ast;
return token.getLine() == t.getLine() &&
token.getColumn() == t.getColumn();
}
/** See if tree has exact token types and structure; no text */
public boolean hasSameTreeStructure(AST t) {
// check roots first.
if (this.getType() != t.getType()) return false;
// if roots match, do full list match test on children.
if (this.getFirstChild() != null) {
if (!(((GrammarAST)this.getFirstChild()).hasSameListStructure(t.getFirstChild()))) return false;
}
// sibling has no kids, make sure t doesn't either
else if (t.getFirstChild() != null) {
return false;
}
return true;
}
public boolean hasSameListStructure(AST t) {
AST sibling;
// the empty tree is not a match of any non-null tree.
if (t == null) {
return false;
}
// Otherwise, start walking sibling lists. First mismatch, return false.
for (sibling = this;
sibling != null && t != null;
sibling = sibling.getNextSibling(), t = t.getNextSibling())
{
// as a quick optimization, check roots first.
if (sibling.getType()!=t.getType()) {
return false;
}
// if roots match, do full list match test on children.
if (sibling.getFirstChild() != null) {
if (!((GrammarAST)sibling.getFirstChild()).hasSameListStructure(t.getFirstChild())) {
return false;
}
}
// sibling has no kids, make sure t doesn't either
else if (t.getFirstChild() != null) {
return false;
}
}
if (sibling == null && t == null) {
return true;
}
// one sibling list has more than the other
return false;
}
public static GrammarAST dup(AST t) {
if ( t==null ) {
return null;
}
GrammarAST dup_t = new GrammarAST();
dup_t.initialize(t);
return dup_t;
}
/** Duplicate tree including siblings of root. */
public static GrammarAST dupListNoActions(GrammarAST t, GrammarAST parent) {
GrammarAST result = dupTreeNoActions(t, parent); // if t == null, then result==null
GrammarAST nt = result;
while (t != null) { // for each sibling of the root
t = (GrammarAST)t.getNextSibling();
if ( t!=null && t.getType()==ANTLRParser.ACTION ) {
continue;
}
GrammarAST d = dupTreeNoActions(t, parent);
if ( d!=null ) {
if ( nt!=null ) {
nt.setNextSibling(d); // dup each subtree, building new tree
}
nt = d;
}
}
return result;
}
/**Duplicate a tree, assuming this is a root node of a tree--
* duplicate that node and what's below; ignore siblings of root node.
*/
public static GrammarAST dupTreeNoActions(GrammarAST t, GrammarAST parent) {
if ( t==null ) {
return null;
}
int ttype = t.getType();
if ( ttype==ANTLRParser.REWRITE ) {
return null;
}
if ( ttype==ANTLRParser.BANG || ttype==ANTLRParser.ROOT ) {
// return x from ^(ROOT x)
return (GrammarAST)dupListNoActions((GrammarAST)t.getFirstChild(), t);
}
/* DOH! Must allow labels for sem preds
if ( (ttype==ANTLRParser.ASSIGN||ttype==ANTLRParser.PLUS_ASSIGN) &&
(parent==null||parent.getType()!=ANTLRParser.OPTIONS) )
{
return dupTreeNoActions(t.getChild(1), t); // return x from ^(ASSIGN label x)
}
*/
GrammarAST result = dup(t); // make copy of root
// copy all children of root.
GrammarAST kids = dupListNoActions((GrammarAST)t.getFirstChild(), t);
result.setFirstChild(kids);
return result;
}
public void setTreeEnclosingRuleNameDeeply(String rname) {
GrammarAST t = this;
t.enclosingRuleName = rname;
t = t.getChild(0);
while (t != null) { // for each sibling of the root
t.setTreeEnclosingRuleNameDeeply(rname);
t = (GrammarAST)t.getNextSibling();
}
}
}
| |
package org.hardisonbrewing.narst.cod;
import java.io.DataInputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.math.BigInteger;
import java.net.URL;
import java.security.KeyFactory;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.spec.DSAPrivateKeySpec;
import java.util.Properties;
import org.apache.commons.codec.binary.Hex;
import org.codehaus.plexus.util.IOUtil;
public class Signer implements org.hardisonbrewing.narst.Signer {
public static final byte[] DEFAULT_SALT = { 18, 63, 75, 30, 90, -86, -115, 58 };
private static final long SERVER_RESPONSE_SIZE = 128;
private static final long SIGNATURE_HEADER_SIZE = 8;
private URL url;
private String signerId;
private String clientId;
private String password;
private File input;
private String salt;
private String privateKey;
private File output;
private Properties response;
public boolean sign() throws Exception {
if ( url == null ) {
throw new IllegalStateException( "URL is null" );
}
if ( signerId == null ) {
throw new IllegalStateException( "Signer ID is null" );
}
if ( clientId == null ) {
throw new IllegalStateException( "Client ID is null" );
}
if ( password == null ) {
throw new IllegalStateException( "Password is null" );
}
if ( input == null ) {
throw new IllegalStateException( "Input is null" );
}
if ( privateKey == null ) {
throw new IllegalStateException( "PrivateKey is null" );
}
PrivateKey privatekey = privateKey();
byte[] hash = hash( input );
if ( hash == null ) {
throw new IllegalStateException( "Exception generating COD hash" );
}
SignedProperties signedProperties = new SignedProperties();
signedProperties.setProperty( "Version", "0" );
signedProperties.setProperty( "Command", "Signature Request" );
signedProperties.setProperty( "SignerID", signerId );
signedProperties.setProperty( "ClientID", clientId );
signedProperties.setProperty( "Hash", new String( hash ) );
signedProperties.sign( privatekey );
Sender sender = null;
try {
sender = new Sender( url );
response = sender.send( signedProperties );
}
finally {
sender.disconnect();
}
byte[] signature = getSignature();
if ( signature != null ) {
System.out.println( Hex.encodeHex( signature ) );
}
return signature != null;
}
public byte[] getSignature() {
if ( response == null ) {
throw new IllegalStateException( "No response available" );
}
String s1 = response.getProperty( "Version" );
if ( s1 == null ) {
throw new IllegalStateException( "Version of server incompatible." );
}
String s2 = response.getProperty( "Response" );
if ( s2 == null || !s2.equals( "Signature Response" ) ) {
throw new IllegalStateException( "Response command invalid." );
}
String s3 = response.getProperty( "Confirm" );
if ( s3 == null ) {
String s4 = response.getProperty( "Error" );
if ( s4 == null ) {
s4 = response.getProperty( "Unknown" );
if ( s4 == null ) {
throw new IllegalStateException( "No confirm or error string in response." );
}
else {
throw new IllegalStateException( "There was an unknown error sent back from the server." );
}
}
else {
throw new IllegalStateException( "Error string received." );
}
}
else {
String s5 = response.getProperty( "Signature" );
if ( s5 == null ) {
throw new IllegalStateException( "Signature from server is invalid." );
}
return B.b( s5.getBytes() );
}
}
public void write() {
if ( output == null ) {
throw new IllegalStateException( "No output specified" );
}
byte[] signature = getSignature();
if ( signature == null ) {
return;
}
boolean write = write( signature );
if ( write ) {
System.out.println( "Signature success" );
}
}
private boolean write( byte[] signature ) {
RandomAccessFile randomaccessfile = null;
try {
randomaccessfile = new RandomAccessFile( output, "rw" );
randomaccessfile.skipBytes( 36 );
int j1 = randomaccessfile.readUnsignedShort();
j1 = ( j1 & 0xff ) << 8 | j1 >>> 8;
if ( j1 < 74 ) {
throw new IllegalStateException( "Version number incompatible." );
}
int k1 = randomaccessfile.readUnsignedShort();
k1 = ( k1 & 0xff ) << 8 | k1 >>> 8;
int i2 = randomaccessfile.readUnsignedShort();
i2 = ( i2 & 0xff ) << 8 | i2 >>> 8;
randomaccessfile.skipBytes( 2 );
randomaccessfile.skipBytes( k1 + i2 );
if ( (long) i2 + SERVER_RESPONSE_SIZE + SIGNATURE_HEADER_SIZE > 64988L ) {
throw new IllegalStateException( "Appending a signature to the following file will cause it to be larger than the maximum sibling cod file size. Signing will abort." );
}
do {
int j2 = randomaccessfile.readUnsignedByte();
int l2 = randomaccessfile.readUnsignedByte();
int i3 = ( l2 << 8 ) + j2;
int j3 = randomaccessfile.readUnsignedByte();
int k3 = randomaccessfile.readUnsignedByte();
int l3 = ( k3 << 8 ) + j3;
if ( i3 == 1 ) {
char ac1[] = new char[4];
int i4 = 0;
ac1[0] = (char) randomaccessfile.readByte();
if ( ac1[0] != 0 ) {
i4++;
}
ac1[1] = (char) randomaccessfile.readByte();
if ( ac1[1] != 0 ) {
i4++;
}
ac1[2] = (char) randomaccessfile.readByte();
if ( ac1[2] != 0 ) {
i4++;
}
ac1[3] = (char) randomaccessfile.readByte();
if ( ac1[3] != 0 ) {
i4++;
}
randomaccessfile.skipBytes( l3 - 4 );
}
else {
randomaccessfile.skipBytes( l3 );
}
}
while (true);
}
catch (EOFException eofexception) {
// do nothing, this is expected HAHA
}
catch (IOException ioexception) {
return false;
}
try {
randomaccessfile.write( 1 );
randomaccessfile.write( 0 );
randomaccessfile.write( signature.length + 4 & 0xff );
randomaccessfile.write( signature.length + 4 >> 8 & 0xff );
char ac[] = signerId.toCharArray();
for (int k2 = 0; k2 < 4; k2++) {
if ( k2 < ac.length ) {
randomaccessfile.write( ac[k2] );
}
else {
randomaccessfile.write( 0 );
}
}
System.out.println( "Signature length is : " + signature.length );
randomaccessfile.write( signature );
randomaccessfile.close();
return true;
}
catch (IOException ioexception1) {
return false;
}
}
private byte[] hash( File file ) throws NoSuchAlgorithmException, IOException {
InputStream inputStream = null;
try {
inputStream = new FileInputStream( file );
MessageDigest messageDigest = MessageDigest.getInstance( "SHA" );
DataInputStream datainputstream = new DataInputStream( inputStream );
byte[] thirtySix = new byte[36];
datainputstream.read( thirtySix );
messageDigest.update( thirtySix );
int vn1 = datainputstream.readUnsignedByte();
int vn2 = datainputstream.readUnsignedByte();
messageDigest.update( (byte) vn1 );
messageDigest.update( (byte) vn2 );
int versionNumber = vn2 << 8 | vn1;
if ( versionNumber < 74 ) {
throw new IllegalStateException( "Version number incompatible" );
}
int cs1 = datainputstream.readUnsignedByte();
int cs2 = datainputstream.readUnsignedByte();
messageDigest.update( (byte) cs1 );
messageDigest.update( (byte) cs2 );
int codeSize = cs2 << 8 | cs1;
int ds1 = datainputstream.readUnsignedByte();
int ds2 = datainputstream.readUnsignedByte();
messageDigest.update( (byte) ds1 );
messageDigest.update( (byte) ds2 );
int dataSize = ds2 << 8 | ds1;
System.out.println( "Version = " + Integer.toString( versionNumber ) );
System.out.println( "CodeSize = " + Integer.toString( codeSize ) );
System.out.println( "DataSize = " + Integer.toString( dataSize ) );
byte[] two = new byte[2];
datainputstream.read( two );
messageDigest.update( two );
byte[] codeSection = new byte[codeSize];
datainputstream.read( codeSection );
messageDigest.update( codeSection );
byte[] dataSection = new byte[dataSize];
datainputstream.read( dataSection );
messageDigest.update( dataSection );
byte[] digest = new byte[messageDigest.getDigestLength()];
digest = messageDigest.digest();
return B.a( digest );
}
finally {
IOUtil.close( inputStream );
}
}
private PrivateKey privateKey() {
byte[] salt = null;
if ( this.salt != null ) {
salt = B.b( this.salt.getBytes() );
}
else {
salt = DEFAULT_SALT;
}
try {
MessageDigest messageDigest = MessageDigest.getInstance( "SHA" );
PrivateKeyDigest privateKeyDigest = new PrivateKeyDigest( messageDigest );
byte[] digest = privateKeyDigest.digest( password, salt, 1000, 256 );
byte[] b = B.b( privateKey.getBytes() );
A.a( digest, b, 0, b.length );
BigInteger bi = new BigInteger( b );
BigInteger bi1 = new BigInteger( "fd7f53811d75122952df4a9c2eece4e7f611b7523cef4400c31e3f80b6512669455d402251fb593d8d58fabfc5f5ba30f6cb9b556cd7813b801d346ff26660b76b9950a5a49f9fe8047b1022c24fbba9d7feb7c61bf83b57e7c6a8a6150f04fb83f6d3c51ec3023554135a169132f675f3ae2b61d72aeff22203199dd14801c7", 16 );
BigInteger bi2 = new BigInteger( "9760508f15230bccb292b982a2eb840bf0581cf5", 16 );
BigInteger bi3 = new BigInteger( "f7e1a085d69b3ddecbbcab5c36b857b97994afbbfa3aea82f9574c0b3d0782675159578ebad4594fe67107108180b449167123e84c281613b7cf09328cc8a6e13c167a8b547c8d28e0a3ae1e2bb3a675916ea37f0bfa213562f1fb627a01243bcca4f1bea8519089a883dfe15ae59f06928b665e807b552564014c3bfecf492a", 16 );
KeyFactory keyfactory = KeyFactory.getInstance( "DSA" );
return keyfactory.generatePrivate( new DSAPrivateKeySpec( bi, bi1, bi2, bi3 ) );
}
catch (Exception e) {
e.printStackTrace();
throw new IllegalStateException( "Unable to locate algorithm" );
}
}
public void setUrl( URL url ) {
this.url = url;
}
public void setSignerId( String signerId ) {
this.signerId = signerId;
}
public void setClientId( String clientId ) {
this.clientId = clientId;
}
public void setPassword( String password ) {
this.password = password;
}
public void setInput( File input ) {
this.input = input;
}
public void setSalt( String salt ) {
this.salt = salt;
}
public void setPrivateKey( String privateKey ) {
this.privateKey = privateKey;
}
public void setOutput( File output ) {
this.output = output;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.