gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.parsing;
import com.intellij.lang.PsiBuilder;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.TokenSet;
import com.jetbrains.python.PyElementTypes;
import com.jetbrains.python.PyTokenTypes;
import org.jetbrains.annotations.Nullable;
import static com.jetbrains.python.PyBundle.message;
/**
* @author yole
*/
public class ExpressionParsing extends Parsing {
private static final Logger LOG = Logger.getInstance("#ru.yole.pythonlanguage.parsing.ExpressionParsing");
public ExpressionParsing(ParsingContext context) {
super(context);
}
public boolean parsePrimaryExpression(boolean isTargetExpression) {
final IElementType firstToken = myBuilder.getTokenType();
if (isIdentifier(myBuilder)) {
if (isTargetExpression) {
buildTokenElement(PyElementTypes.TARGET_EXPRESSION, myBuilder);
}
else {
buildTokenElement(getReferenceType(), myBuilder);
}
return true;
}
else if (firstToken == PyTokenTypes.INTEGER_LITERAL) {
buildTokenElement(PyElementTypes.INTEGER_LITERAL_EXPRESSION, myBuilder);
return true;
}
else if (firstToken == PyTokenTypes.FLOAT_LITERAL) {
buildTokenElement(PyElementTypes.FLOAT_LITERAL_EXPRESSION, myBuilder);
return true;
}
else if (firstToken == PyTokenTypes.IMAGINARY_LITERAL) {
buildTokenElement(PyElementTypes.IMAGINARY_LITERAL_EXPRESSION, myBuilder);
return true;
}
else if (firstToken == PyTokenTypes.NONE_KEYWORD) {
buildTokenElement(PyElementTypes.NONE_LITERAL_EXPRESSION, myBuilder);
return true;
}
else if (firstToken == PyTokenTypes.TRUE_KEYWORD ||
firstToken == PyTokenTypes.FALSE_KEYWORD ||
firstToken == PyTokenTypes.DEBUG_KEYWORD) {
buildTokenElement(PyElementTypes.BOOL_LITERAL_EXPRESSION, myBuilder);
return true;
}
else if (PyTokenTypes.STRING_NODES.contains(firstToken)) {
return parseStringLiteralExpression();
}
else if (firstToken == PyTokenTypes.LPAR) {
parseParenthesizedExpression(isTargetExpression);
return true;
}
else if (firstToken == PyTokenTypes.LBRACKET) {
parseListLiteralExpression(myBuilder, isTargetExpression);
return true;
}
else if (firstToken == PyTokenTypes.LBRACE) {
parseDictOrSetDisplay();
return true;
}
else if (firstToken == PyTokenTypes.TICK) {
parseReprExpression(myBuilder);
return true;
}
else if (parseEllipsis()) {
return true;
}
return false;
}
public boolean parseStringLiteralExpression() {
final PsiBuilder builder = myContext.getBuilder();
if (PyTokenTypes.STRING_NODES.contains(builder.getTokenType())) {
final PsiBuilder.Marker marker = builder.mark();
while (PyTokenTypes.STRING_NODES.contains(builder.getTokenType())) {
nextToken();
}
marker.done(PyElementTypes.STRING_LITERAL_EXPRESSION);
return true;
}
return false;
}
private void parseListLiteralExpression(final PsiBuilder builder, boolean isTargetExpression) {
LOG.assertTrue(builder.getTokenType() == PyTokenTypes.LBRACKET);
final PsiBuilder.Marker expr = builder.mark();
builder.advanceLexer();
if (builder.getTokenType() == PyTokenTypes.RBRACKET) {
builder.advanceLexer();
expr.done(PyElementTypes.LIST_LITERAL_EXPRESSION);
return;
}
if (!parseSingleExpression(isTargetExpression)) {
builder.error(message("PARSE.expected.expression"));
}
if (atForOrAsyncFor()) {
parseComprehension(expr, PyTokenTypes.RBRACKET, PyElementTypes.LIST_COMP_EXPRESSION);
}
else {
while (builder.getTokenType() != PyTokenTypes.RBRACKET) {
if (!matchToken(PyTokenTypes.COMMA)) {
builder.error("expected ',' or ']'");
}
if (atToken(PyTokenTypes.RBRACKET)) {
break;
}
if (!parseSingleExpression(isTargetExpression)) {
builder.error(message("PARSE.expected.expr.or.comma.or.bracket"));
break;
}
}
checkMatches(PyTokenTypes.RBRACKET, message("PARSE.expected.rbracket"));
expr.done(PyElementTypes.LIST_LITERAL_EXPRESSION);
}
}
private void parseComprehension(PsiBuilder.Marker expr,
@Nullable final IElementType endToken,
final IElementType exprType) {
assertCurrentToken(PyTokenTypes.FOR_KEYWORD);
while (true) {
myBuilder.advanceLexer();
parseExpression(true, true);
parseComprehensionRange(exprType == PyElementTypes.GENERATOR_EXPRESSION);
while (myBuilder.getTokenType() == PyTokenTypes.IF_KEYWORD) {
myBuilder.advanceLexer();
if (!parseOldExpression()) {
myBuilder.error(message("PARSE.expected.expression"));
}
}
if (atForOrAsyncFor()) {
continue;
}
if (endToken == null || matchToken(endToken)) {
break;
}
myBuilder.error(message("PARSE.expected.for.or.bracket"));
break;
}
expr.done(exprType);
}
protected void parseComprehensionRange(boolean generatorExpression) {
checkMatches(PyTokenTypes.IN_KEYWORD, "'in' expected");
boolean result;
if (generatorExpression) {
result = parseORTestExpression(false, false);
}
else {
result = parseTupleExpression(false, false, true);
}
if (!result) {
myBuilder.error("expression expected");
}
}
private void parseDictOrSetDisplay() {
LOG.assertTrue(myBuilder.getTokenType() == PyTokenTypes.LBRACE);
final PsiBuilder.Marker expr = myBuilder.mark();
myBuilder.advanceLexer();
if (matchToken(PyTokenTypes.RBRACE)) {
expr.done(PyElementTypes.DICT_LITERAL_EXPRESSION);
return;
}
if (atToken(PyTokenTypes.EXP)) {
if (!parseDoubleStarExpression(false)) {
myBuilder.error("expression expected");
expr.done(PyElementTypes.DICT_LITERAL_EXPRESSION);
return;
}
parseDictLiteralContentTail(expr);
return;
}
final PsiBuilder.Marker firstExprMarker = myBuilder.mark();
if (!parseSingleExpression(false)) {
myBuilder.error("expression expected");
firstExprMarker.drop();
expr.done(PyElementTypes.DICT_LITERAL_EXPRESSION);
return;
}
if (matchToken(PyTokenTypes.COLON)) {
parseDictLiteralTail(expr, firstExprMarker);
}
else if (atToken(PyTokenTypes.COMMA) || atToken(PyTokenTypes.RBRACE)) {
firstExprMarker.drop();
parseSetLiteralTail(expr);
}
else if (atForOrAsyncFor()) {
firstExprMarker.drop();
parseComprehension(expr, PyTokenTypes.RBRACE, PyElementTypes.SET_COMP_EXPRESSION);
}
else {
myBuilder.error("expression expected");
firstExprMarker.drop();
expr.done(PyElementTypes.DICT_LITERAL_EXPRESSION);
}
}
private void parseDictLiteralTail(PsiBuilder.Marker startMarker, PsiBuilder.Marker firstKeyValueMarker) {
if (!parseSingleExpression(false)) {
myBuilder.error("expression expected");
firstKeyValueMarker.done(PyElementTypes.KEY_VALUE_EXPRESSION);
if (atToken(PyTokenTypes.RBRACE)) {
myBuilder.advanceLexer();
}
startMarker.done(PyElementTypes.DICT_LITERAL_EXPRESSION);
return;
}
firstKeyValueMarker.done(PyElementTypes.KEY_VALUE_EXPRESSION);
if (atForOrAsyncFor()) {
parseComprehension(startMarker, PyTokenTypes.RBRACE, PyElementTypes.DICT_COMP_EXPRESSION);
}
else {
parseDictLiteralContentTail(startMarker);
}
}
private void parseDictLiteralContentTail(PsiBuilder.Marker startMarker) {
while (myBuilder.getTokenType() != PyTokenTypes.RBRACE) {
checkMatches(PyTokenTypes.COMMA, message("PARSE.expected.comma"));
if (atToken(PyTokenTypes.EXP)) {
if (!parseDoubleStarExpression(false)) {
break;
}
}
else {
if (!parseKeyValueExpression()) {
break;
}
}
}
checkMatches(PyTokenTypes.RBRACE, message("PARSE.expected.rbrace"));
startMarker.done(PyElementTypes.DICT_LITERAL_EXPRESSION);
}
private boolean parseKeyValueExpression() {
final PsiBuilder.Marker marker = myBuilder.mark();
if (!parseSingleExpression(false)) {
marker.drop();
return false;
}
checkMatches(PyTokenTypes.COLON, message("PARSE.expected.colon"));
if (!parseSingleExpression(false)) {
myBuilder.error("value expression expected");
marker.drop();
return false;
}
marker.done(PyElementTypes.KEY_VALUE_EXPRESSION);
return true;
}
private void parseSetLiteralTail(PsiBuilder.Marker startMarker) {
while (myBuilder.getTokenType() != PyTokenTypes.RBRACE) {
checkMatches(PyTokenTypes.COMMA, message("PARSE.expected.comma"));
if (!parseSingleExpression(false)) {
break;
}
}
checkMatches(PyTokenTypes.RBRACE, message("PARSE.expected.rbrace"));
startMarker.done(PyElementTypes.SET_LITERAL_EXPRESSION);
}
private void parseParenthesizedExpression(boolean isTargetExpression) {
LOG.assertTrue(myBuilder.getTokenType() == PyTokenTypes.LPAR);
final PsiBuilder.Marker expr = myBuilder.mark();
myBuilder.advanceLexer();
if (myBuilder.getTokenType() == PyTokenTypes.RPAR) {
myBuilder.advanceLexer();
expr.done(PyElementTypes.TUPLE_EXPRESSION);
}
else {
parseYieldOrTupleExpression(isTargetExpression);
if (atForOrAsyncFor()) {
parseComprehension(expr, PyTokenTypes.RPAR, PyElementTypes.GENERATOR_EXPRESSION);
}
else {
final PsiBuilder.Marker err = myBuilder.mark();
boolean empty = true;
while (myBuilder.getTokenType() != PyTokenTypes.RPAR &&
myBuilder.getTokenType() != PyTokenTypes.LINE_BREAK &&
myBuilder.getTokenType() != PyTokenTypes.STATEMENT_BREAK) {
myBuilder.advanceLexer();
empty = false;
}
if (!empty) {
err.error("Unexpected expression syntax");
}
else {
err.drop();
}
checkMatches(PyTokenTypes.RPAR, message("PARSE.expected.rpar"));
expr.done(PyElementTypes.PARENTHESIZED_EXPRESSION);
}
}
}
private void parseReprExpression(PsiBuilder builder) {
LOG.assertTrue(builder.getTokenType() == PyTokenTypes.TICK);
final PsiBuilder.Marker expr = builder.mark();
builder.advanceLexer();
parseExpression();
checkMatches(PyTokenTypes.TICK, message("PARSE.expected.tick"));
expr.done(PyElementTypes.REPR_EXPRESSION);
}
public boolean parseMemberExpression(boolean isTargetExpression) {
// in sequence a.b.... .c all members but last are always references, and the last may be target.
boolean recastFirstIdentifier = false;
boolean recastQualifier = false;
do {
boolean firstIdentifierIsTarget = isTargetExpression && !recastFirstIdentifier;
PsiBuilder.Marker expr = myBuilder.mark();
if (!parsePrimaryExpression(firstIdentifierIsTarget)) {
expr.drop();
return false;
}
while (true) {
final IElementType tokenType = myBuilder.getTokenType();
if (tokenType == PyTokenTypes.DOT) {
if (firstIdentifierIsTarget) {
recastFirstIdentifier = true;
expr.rollbackTo();
break;
}
myBuilder.advanceLexer();
checkMatches(PyTokenTypes.IDENTIFIER, message("PARSE.expected.name"));
if (isTargetExpression && !recastQualifier && !atAnyOfTokens(PyTokenTypes.DOT, PyTokenTypes.LPAR, PyTokenTypes.LBRACKET)) {
expr.done(PyElementTypes.TARGET_EXPRESSION);
}
else {
expr.done(getReferenceType());
}
expr = expr.precede();
}
else if (tokenType == PyTokenTypes.LPAR) {
parseArgumentList();
expr.done(PyElementTypes.CALL_EXPRESSION);
expr = expr.precede();
}
else if (tokenType == PyTokenTypes.LBRACKET) {
myBuilder.advanceLexer();
PsiBuilder.Marker sliceOrTupleStart = myBuilder.mark();
PsiBuilder.Marker sliceItemStart = myBuilder.mark();
if (atToken(PyTokenTypes.COLON)) {
sliceOrTupleStart.drop();
PsiBuilder.Marker sliceMarker = myBuilder.mark();
sliceMarker.done(PyElementTypes.EMPTY_EXPRESSION);
parseSliceEnd(expr, sliceItemStart);
}
else {
boolean hadExpression = parseSingleExpression(false);
if (atToken(PyTokenTypes.COLON)) {
sliceOrTupleStart.drop();
parseSliceEnd(expr, sliceItemStart);
}
else if (atToken(PyTokenTypes.COMMA)) {
sliceItemStart.done(PyElementTypes.SLICE_ITEM);
if (!parseSliceListTail(expr, sliceOrTupleStart)) {
sliceOrTupleStart.rollbackTo();
if (!parseTupleExpression(false, false, false)) {
myBuilder.error("tuple expression expected");
}
checkMatches(PyTokenTypes.RBRACKET, message("PARSE.expected.rbracket"));
expr.done(PyElementTypes.SUBSCRIPTION_EXPRESSION);
}
}
else {
if (!hadExpression) {
myBuilder.error("expression expected");
}
sliceOrTupleStart.drop();
sliceItemStart.drop();
checkMatches(PyTokenTypes.RBRACKET, message("PARSE.expected.rbracket"));
expr.done(PyElementTypes.SUBSCRIPTION_EXPRESSION);
}
}
if (isTargetExpression && !recastQualifier) {
recastFirstIdentifier = true; // subscription is always a reference
recastQualifier = true; // recast non-first qualifiers too
expr.rollbackTo();
break;
}
expr = expr.precede();
}
else {
expr.drop();
break;
}
recastFirstIdentifier = false; // it is true only after a break; normal flow always unsets it.
// recastQualifier is untouched, it remembers whether qualifiers were already recast
}
}
while (recastFirstIdentifier);
return true;
}
private boolean parseEllipsis() {
if (atToken(PyTokenTypes.DOT)) {
final PsiBuilder.Marker maybeEllipsis = myBuilder.mark();
myBuilder.advanceLexer();
if (matchToken(PyTokenTypes.DOT) && matchToken(PyTokenTypes.DOT)) {
maybeEllipsis.done(PyElementTypes.NONE_LITERAL_EXPRESSION);
return true;
}
maybeEllipsis.rollbackTo();
}
return false;
}
private static TokenSet BRACKET_OR_COMMA = TokenSet.create(PyTokenTypes.RBRACKET, PyTokenTypes.COMMA);
private static TokenSet BRACKET_COLON_COMMA = TokenSet.create(PyTokenTypes.RBRACKET, PyTokenTypes.COLON, PyTokenTypes.COMMA);
public void parseSliceEnd(PsiBuilder.Marker exprStart, PsiBuilder.Marker sliceItemStart) {
myBuilder.advanceLexer();
if (atToken(PyTokenTypes.RBRACKET)) {
PsiBuilder.Marker sliceMarker = myBuilder.mark();
sliceMarker.done(PyElementTypes.EMPTY_EXPRESSION);
sliceItemStart.done(PyElementTypes.SLICE_ITEM);
nextToken();
exprStart.done(PyElementTypes.SLICE_EXPRESSION);
return;
}
else {
if (atToken(PyTokenTypes.COLON)) {
PsiBuilder.Marker sliceMarker = myBuilder.mark();
sliceMarker.done(PyElementTypes.EMPTY_EXPRESSION);
}
else {
parseSingleExpression(false);
}
if (!BRACKET_COLON_COMMA.contains(myBuilder.getTokenType())) {
myBuilder.error(message("PARSE.expected.colon.or.rbracket"));
}
if (matchToken(PyTokenTypes.COLON)) {
parseSingleExpression(false);
}
sliceItemStart.done(PyElementTypes.SLICE_ITEM);
if (!BRACKET_OR_COMMA.contains(myBuilder.getTokenType())) {
myBuilder.error("']' or ',' expected");
}
}
parseSliceListTail(exprStart, null);
}
private boolean parseSliceListTail(PsiBuilder.Marker exprStart, @Nullable PsiBuilder.Marker sliceOrTupleStart) {
boolean inSlice = sliceOrTupleStart == null;
while (atToken(PyTokenTypes.COMMA)) {
nextToken();
PsiBuilder.Marker sliceItemStart = myBuilder.mark();
parseTestExpression(false, false);
if (matchToken(PyTokenTypes.COLON)) {
inSlice = true;
parseTestExpression(false, false);
if (matchToken(PyTokenTypes.COLON)) {
parseTestExpression(false, false);
}
}
sliceItemStart.done(PyElementTypes.SLICE_ITEM);
if (!BRACKET_OR_COMMA.contains(myBuilder.getTokenType())) {
myBuilder.error("']' or ',' expected");
break;
}
}
checkMatches(PyTokenTypes.RBRACKET, message("PARSE.expected.rbracket"));
if (inSlice) {
if (sliceOrTupleStart != null) {
sliceOrTupleStart.drop();
}
exprStart.done(PyElementTypes.SLICE_EXPRESSION);
}
return inSlice;
}
public void parseArgumentList() {
LOG.assertTrue(myBuilder.getTokenType() == PyTokenTypes.LPAR);
final PsiBuilder.Marker arglist = myBuilder.mark();
myBuilder.advanceLexer();
PsiBuilder.Marker genexpr = myBuilder.mark();
int argNumber = 0;
while (myBuilder.getTokenType() != PyTokenTypes.RPAR) {
argNumber++;
if (argNumber > 1) {
if (argNumber == 2 && atForOrAsyncFor() && genexpr != null) {
parseComprehension(genexpr, null, PyElementTypes.GENERATOR_EXPRESSION);
genexpr = null;
continue;
}
else if (matchToken(PyTokenTypes.COMMA)) {
if (atToken(PyTokenTypes.RPAR)) {
break;
}
}
else {
myBuilder.error(message("PARSE.expected.comma.or.rpar"));
break;
}
}
if (myBuilder.getTokenType() == PyTokenTypes.MULT || myBuilder.getTokenType() == PyTokenTypes.EXP) {
final PsiBuilder.Marker starArgMarker = myBuilder.mark();
myBuilder.advanceLexer();
if (!parseSingleExpression(false)) {
myBuilder.error(message("PARSE.expected.expression"));
}
starArgMarker.done(PyElementTypes.STAR_ARGUMENT_EXPRESSION);
}
else {
if (isIdentifier(myBuilder)) {
final PsiBuilder.Marker keywordArgMarker = myBuilder.mark();
advanceIdentifierLike(myBuilder);
if (myBuilder.getTokenType() == PyTokenTypes.EQ) {
myBuilder.advanceLexer();
if (!parseSingleExpression(false)) {
myBuilder.error(message("PARSE.expected.expression"));
}
keywordArgMarker.done(PyElementTypes.KEYWORD_ARGUMENT_EXPRESSION);
continue;
}
keywordArgMarker.rollbackTo();
}
if (!parseSingleExpression(false)) {
myBuilder.error(message("PARSE.expected.expression"));
break;
}
}
}
if (genexpr != null) {
genexpr.drop();
}
checkMatches(PyTokenTypes.RPAR, message("PARSE.expected.rpar"));
arglist.done(PyElementTypes.ARGUMENT_LIST);
}
public boolean parseExpressionOptional() {
return parseTupleExpression(false, false, false);
}
public boolean parseExpressionOptional(boolean isTargetExpression) {
return parseTupleExpression(false, isTargetExpression, false);
}
public void parseExpression() {
if (!parseExpressionOptional()) {
myBuilder.error(message("PARSE.expected.expression"));
}
}
public void parseExpression(boolean stopOnIn, boolean isTargetExpression) {
if (!parseTupleExpression(stopOnIn, isTargetExpression, false)) {
myBuilder.error(message("PARSE.expected.expression"));
}
}
public boolean parseYieldOrTupleExpression(final boolean isTargetExpression) {
if (myBuilder.getTokenType() == PyTokenTypes.YIELD_KEYWORD) {
PsiBuilder.Marker yieldExpr = myBuilder.mark();
myBuilder.advanceLexer();
if (myBuilder.getTokenType() == PyTokenTypes.FROM_KEYWORD) {
myBuilder.advanceLexer();
final boolean parsed = parseTupleExpression(false, isTargetExpression, false);
if (!parsed) {
myBuilder.error(message("PARSE.expected.expression"));
}
yieldExpr.done(PyElementTypes.YIELD_EXPRESSION);
return parsed;
}
else {
parseTupleExpression(false, isTargetExpression, false);
yieldExpr.done(PyElementTypes.YIELD_EXPRESSION);
return true;
}
}
else {
return parseTupleExpression(false, isTargetExpression, false);
}
}
protected boolean parseTupleExpression(boolean stopOnIn, boolean isTargetExpression, final boolean oldTest) {
PsiBuilder.Marker expr = myBuilder.mark();
boolean exprParseResult = oldTest ? parseOldTestExpression() : parseTestExpression(stopOnIn, isTargetExpression);
if (!exprParseResult) {
expr.drop();
return false;
}
if (myBuilder.getTokenType() == PyTokenTypes.COMMA) {
while (myBuilder.getTokenType() == PyTokenTypes.COMMA) {
myBuilder.advanceLexer();
PsiBuilder.Marker expr2 = myBuilder.mark();
exprParseResult = oldTest ? parseOldTestExpression() : parseTestExpression(stopOnIn, isTargetExpression);
if (!exprParseResult) {
expr2.rollbackTo();
break;
}
expr2.drop();
}
expr.done(PyElementTypes.TUPLE_EXPRESSION);
}
else {
expr.drop();
}
return true;
}
public boolean parseSingleExpression(boolean isTargetExpression) {
return parseTestExpression(false, isTargetExpression);
}
public boolean parseOldExpression() {
if (myBuilder.getTokenType() == PyTokenTypes.LAMBDA_KEYWORD) {
return parseLambdaExpression(false);
}
return parseORTestExpression(false, false);
}
private boolean parseTestExpression(boolean stopOnIn, boolean isTargetExpression) {
if (myBuilder.getTokenType() == PyTokenTypes.LAMBDA_KEYWORD) {
return parseLambdaExpression(false);
}
PsiBuilder.Marker condExpr = myBuilder.mark();
if (!parseORTestExpression(stopOnIn, isTargetExpression)) {
condExpr.drop();
return false;
}
if (myBuilder.getTokenType() == PyTokenTypes.IF_KEYWORD) {
PsiBuilder.Marker conditionMarker = myBuilder.mark();
myBuilder.advanceLexer();
if (!parseORTestExpression(stopOnIn, isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
else {
if (myBuilder.getTokenType() != PyTokenTypes.ELSE_KEYWORD) {
if (atToken(PyTokenTypes.COLON)) { // it's regular if statement. Bracket wasn't closed or new line was lost
conditionMarker.rollbackTo();
condExpr.drop();
return true;
}
else {
myBuilder.error(message("PARSE.expected.else"));
}
}
else {
myBuilder.advanceLexer();
if (!parseTestExpression(stopOnIn, isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
}
}
conditionMarker.drop();
condExpr.done(PyElementTypes.CONDITIONAL_EXPRESSION);
}
else {
condExpr.drop();
}
return true;
}
private boolean parseOldTestExpression() {
if (myBuilder.getTokenType() == PyTokenTypes.LAMBDA_KEYWORD) {
return parseLambdaExpression(true);
}
return parseORTestExpression(false, false);
}
private boolean parseLambdaExpression(final boolean oldTest) {
PsiBuilder.Marker expr = myBuilder.mark();
myBuilder.advanceLexer();
getFunctionParser().parseParameterListContents(PyTokenTypes.COLON, false, true);
boolean parseExpressionResult = oldTest ? parseOldTestExpression() : parseSingleExpression(false);
if (!parseExpressionResult) {
myBuilder.error(message("PARSE.expected.expression"));
}
expr.done(PyElementTypes.LAMBDA_EXPRESSION);
return true;
}
protected boolean parseORTestExpression(boolean stopOnIn, boolean isTargetExpression) {
PsiBuilder.Marker expr = myBuilder.mark();
if (!parseANDTestExpression(stopOnIn, isTargetExpression)) {
expr.drop();
return false;
}
while (myBuilder.getTokenType() == PyTokenTypes.OR_KEYWORD) {
myBuilder.advanceLexer();
if (!parseANDTestExpression(stopOnIn, isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
expr.done(PyElementTypes.BINARY_EXPRESSION);
expr = expr.precede();
}
expr.drop();
return true;
}
private boolean parseANDTestExpression(boolean stopOnIn, boolean isTargetExpression) {
PsiBuilder.Marker expr = myBuilder.mark();
if (!parseNOTTestExpression(stopOnIn, isTargetExpression)) {
expr.drop();
return false;
}
while (myBuilder.getTokenType() == PyTokenTypes.AND_KEYWORD) {
myBuilder.advanceLexer();
if (!parseNOTTestExpression(stopOnIn, isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
expr.done(PyElementTypes.BINARY_EXPRESSION);
expr = expr.precede();
}
expr.drop();
return true;
}
private boolean parseNOTTestExpression(boolean stopOnIn, boolean isTargetExpression) {
if (myBuilder.getTokenType() == PyTokenTypes.NOT_KEYWORD) {
final PsiBuilder.Marker expr = myBuilder.mark();
myBuilder.advanceLexer();
if (!parseNOTTestExpression(stopOnIn, isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
expr.done(PyElementTypes.PREFIX_EXPRESSION);
return true;
}
else {
return parseComparisonExpression(stopOnIn, isTargetExpression);
}
}
private boolean parseComparisonExpression(boolean stopOnIn, boolean isTargetExpression) {
PsiBuilder.Marker expr = myBuilder.mark();
if (!parseStarExpression(isTargetExpression)) {
expr.drop();
return false;
}
if (stopOnIn && atToken(PyTokenTypes.IN_KEYWORD)) {
expr.drop();
return true;
}
while (PyTokenTypes.COMPARISON_OPERATIONS.contains(myBuilder.getTokenType())) {
if (atToken(PyTokenTypes.NOT_KEYWORD)) {
PsiBuilder.Marker notMarker = myBuilder.mark();
myBuilder.advanceLexer();
if (!atToken(PyTokenTypes.IN_KEYWORD)) {
notMarker.rollbackTo();
break;
}
notMarker.drop();
myBuilder.advanceLexer();
}
else if (atToken(PyTokenTypes.IS_KEYWORD)) {
myBuilder.advanceLexer();
if (myBuilder.getTokenType() == PyTokenTypes.NOT_KEYWORD) {
myBuilder.advanceLexer();
}
}
else {
myBuilder.advanceLexer();
}
if (!parseBitwiseORExpression(isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
expr.done(PyElementTypes.BINARY_EXPRESSION);
expr = expr.precede();
}
expr.drop();
return true;
}
private boolean parseStarExpression(boolean isTargetExpression) {
if (atToken(PyTokenTypes.MULT)) {
PsiBuilder.Marker starExpr = myBuilder.mark();
nextToken();
if (!parseBitwiseORExpression(isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
starExpr.drop();
return false;
}
starExpr.done(PyElementTypes.STAR_EXPRESSION);
return true;
}
return parseBitwiseORExpression(isTargetExpression);
}
private boolean parseDoubleStarExpression(boolean isTargetExpression) {
if (atToken(PyTokenTypes.EXP)) {
PsiBuilder.Marker starExpr = myBuilder.mark();
nextToken();
if (!parseBitwiseORExpression(isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
starExpr.drop();
return false;
}
starExpr.done(PyElementTypes.DOUBLE_STAR_EXPRESSION);
return true;
}
return parseBitwiseORExpression(isTargetExpression);
}
private boolean parseBitwiseORExpression(boolean isTargetExpression) {
PsiBuilder.Marker expr = myBuilder.mark();
if (!parseBitwiseXORExpression(isTargetExpression)) {
expr.drop();
return false;
}
while (atToken(PyTokenTypes.OR)) {
myBuilder.advanceLexer();
if (!parseBitwiseXORExpression(isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
expr.done(PyElementTypes.BINARY_EXPRESSION);
expr = expr.precede();
}
expr.drop();
return true;
}
private boolean parseBitwiseXORExpression(boolean isTargetExpression) {
PsiBuilder.Marker expr = myBuilder.mark();
if (!parseBitwiseANDExpression(isTargetExpression)) {
expr.drop();
return false;
}
while (atToken(PyTokenTypes.XOR)) {
myBuilder.advanceLexer();
if (!parseBitwiseANDExpression(isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
expr.done(PyElementTypes.BINARY_EXPRESSION);
expr = expr.precede();
}
expr.drop();
return true;
}
private boolean parseBitwiseANDExpression(boolean isTargetExpression) {
PsiBuilder.Marker expr = myBuilder.mark();
if (!parseShiftExpression(isTargetExpression)) {
expr.drop();
return false;
}
while (atToken(PyTokenTypes.AND)) {
myBuilder.advanceLexer();
if (!parseShiftExpression(isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
expr.done(PyElementTypes.BINARY_EXPRESSION);
expr = expr.precede();
}
expr.drop();
return true;
}
private boolean parseShiftExpression(boolean isTargetExpression) {
PsiBuilder.Marker expr = myBuilder.mark();
if (!parseAdditiveExpression(myBuilder, isTargetExpression)) {
expr.drop();
return false;
}
while (PyTokenTypes.SHIFT_OPERATIONS.contains(myBuilder.getTokenType())) {
myBuilder.advanceLexer();
if (!parseAdditiveExpression(myBuilder, isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
expr.done(PyElementTypes.BINARY_EXPRESSION);
expr = expr.precede();
}
expr.drop();
return true;
}
private boolean parseAdditiveExpression(final PsiBuilder myBuilder, boolean isTargetExpression) {
PsiBuilder.Marker expr = myBuilder.mark();
if (!parseMultiplicativeExpression(isTargetExpression)) {
expr.drop();
return false;
}
while (PyTokenTypes.ADDITIVE_OPERATIONS.contains(myBuilder.getTokenType())) {
myBuilder.advanceLexer();
if (!parseMultiplicativeExpression(isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
expr.done(PyElementTypes.BINARY_EXPRESSION);
expr = expr.precede();
}
expr.drop();
return true;
}
private boolean parseMultiplicativeExpression(boolean isTargetExpression) {
PsiBuilder.Marker expr = myBuilder.mark();
if (!parseUnaryExpression(isTargetExpression)) {
expr.drop();
return false;
}
while (PyTokenTypes.MULTIPLICATIVE_OPERATIONS.contains(myBuilder.getTokenType())) {
myBuilder.advanceLexer();
if (!parseUnaryExpression(isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
expr.done(PyElementTypes.BINARY_EXPRESSION);
expr = expr.precede();
}
expr.drop();
return true;
}
protected boolean parseUnaryExpression(boolean isTargetExpression) {
final IElementType tokenType = myBuilder.getTokenType();
if (PyTokenTypes.UNARY_OPERATIONS.contains(tokenType)) {
final PsiBuilder.Marker expr = myBuilder.mark();
myBuilder.advanceLexer();
if (!parseUnaryExpression(isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
expr.done(PyElementTypes.PREFIX_EXPRESSION);
return true;
}
else {
return parsePowerExpression(isTargetExpression);
}
}
private boolean parsePowerExpression(boolean isTargetExpression) {
PsiBuilder.Marker expr = myBuilder.mark();
if (!parseAwaitExpression(isTargetExpression)) {
expr.drop();
return false;
}
if (myBuilder.getTokenType() == PyTokenTypes.EXP) {
myBuilder.advanceLexer();
if (!parseUnaryExpression(isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
}
expr.done(PyElementTypes.BINARY_EXPRESSION);
}
else {
expr.drop();
}
return true;
}
private boolean parseAwaitExpression(boolean isTargetExpression) {
if (atToken(PyTokenTypes.AWAIT_KEYWORD)) {
final PsiBuilder.Marker expr = myBuilder.mark();
myBuilder.advanceLexer();
if (!parseMemberExpression(isTargetExpression)) {
myBuilder.error(message("PARSE.expected.expression"));
expr.done(PyElementTypes.PREFIX_EXPRESSION);
}
else {
if (isTargetExpression) {
expr.error("can't assign to await expression");
}
else {
expr.done(PyElementTypes.PREFIX_EXPRESSION);
}
}
return true;
}
else {
return parseMemberExpression(isTargetExpression);
}
}
private boolean atForOrAsyncFor() {
if (atToken(PyTokenTypes.FOR_KEYWORD)) {
return true;
}
else if (matchToken(PyTokenTypes.ASYNC_KEYWORD)) {
if (atToken(PyTokenTypes.FOR_KEYWORD)) {
return true;
}
else {
myBuilder.error("'for' expected");
return false;
}
}
return false;
}
}
| |
// Copyright (C) 2010 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.project;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.gerrit.common.data.AccessSection;
import com.google.gerrit.common.data.Permission;
import com.google.gerrit.common.data.PermissionRange;
import com.google.gerrit.common.data.PermissionRule;
import com.google.gerrit.common.data.RefConfigSection;
import com.google.gerrit.common.errors.InvalidNameException;
import com.google.gerrit.reviewdb.client.AccountGroup;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.InternalUser;
import com.google.gerrit.server.git.GitRepositoryManager;
import dk.brics.automaton.RegExp;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevTag;
import org.eclipse.jgit.revwalk.RevWalk;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/** Manages access control for Git references (aka branches, tags). */
public class RefControl {
private final ProjectControl projectControl;
private final String refName;
/** All permissions that apply to this reference. */
private final PermissionCollection relevant;
/** Cached set of permissions matching this user. */
private final Map<String, List<PermissionRule>> effective;
private Boolean owner;
private Boolean canForgeAuthor;
private Boolean canForgeCommitter;
private Boolean isVisible;
RefControl(ProjectControl projectControl, String ref,
PermissionCollection relevant) {
this.projectControl = projectControl;
this.refName = ref;
this.relevant = relevant;
this.effective = new HashMap<String, List<PermissionRule>>();
}
public String getRefName() {
return refName;
}
public ProjectControl getProjectControl() {
return projectControl;
}
public CurrentUser getCurrentUser() {
return projectControl.getCurrentUser();
}
public RefControl forUser(CurrentUser who) {
ProjectControl newCtl = projectControl.forUser(who);
if (relevant.isUserSpecific()) {
return newCtl.controlForRef(getRefName());
} else {
return new RefControl(newCtl, getRefName(), relevant);
}
}
/** Is this user a ref owner? */
public boolean isOwner() {
if (owner == null) {
if (canPerform(Permission.OWNER)) {
owner = true;
} else {
owner = projectControl.isOwner();
}
}
return owner;
}
/** Can this user see this reference exists? */
public boolean isVisible() {
if (isVisible == null) {
isVisible =
(getCurrentUser() instanceof InternalUser || canPerform(Permission.READ))
&& canRead();
}
return isVisible;
}
/**
* True if this reference is visible by all REGISTERED_USERS
*/
public boolean isVisibleByRegisteredUsers() {
List<PermissionRule> access = relevant.getPermission(Permission.READ);
Set<ProjectRef> allows = Sets.newHashSet();
Set<ProjectRef> blocks = Sets.newHashSet();
for (PermissionRule rule : access) {
if (rule.isBlock()) {
blocks.add(relevant.getRuleProps(rule));
} else if (rule.getGroup().getUUID().equals(AccountGroup.ANONYMOUS_USERS)
|| rule.getGroup().getUUID().equals(AccountGroup.REGISTERED_USERS)) {
allows.add(relevant.getRuleProps(rule));
}
}
blocks.removeAll(allows);
return blocks.isEmpty() && !allows.isEmpty();
}
/**
* Determines whether the user can upload a change to the ref controlled by
* this object.
*
* @return {@code true} if the user specified can upload a change to the Git
* ref
*/
public boolean canUpload() {
return projectControl.controlForRef("refs/for/" + getRefName())
.canPerform(Permission.PUSH)
&& canWrite();
}
/** @return true if this user can submit merge patch sets to this ref */
public boolean canUploadMerges() {
return projectControl.controlForRef("refs/for/" + getRefName())
.canPerform(Permission.PUSH_MERGE)
&& canWrite();
}
/** @return true if this user can rebase changes on this ref */
public boolean canRebase() {
return canPerform(Permission.REBASE)
&& canWrite();
}
/** @return true if this user can submit patch sets to this ref */
public boolean canSubmit() {
if (GitRepositoryManager.REF_CONFIG.equals(refName)) {
// Always allow project owners to submit configuration changes.
// Submitting configuration changes modifies the access control
// rules. Allowing this to be done by a non-project-owner opens
// a security hole enabling editing of access rules, and thus
// granting of powers beyond submitting to the configuration.
return projectControl.isOwner();
}
return canPerform(Permission.SUBMIT)
&& canWrite();
}
/** @return true if the user can update the reference as a fast-forward. */
public boolean canUpdate() {
if (GitRepositoryManager.REF_CONFIG.equals(refName)
&& !projectControl.isOwner()) {
// Pushing requires being at least project owner, in addition to push.
// Pushing configuration changes modifies the access control
// rules. Allowing this to be done by a non-project-owner opens
// a security hole enabling editing of access rules, and thus
// granting of powers beyond pushing to the configuration.
// On the AllProjects project the owner access right cannot be assigned,
// this why for the AllProjects project we allow administrators to push
// configuration changes if they have push without being project owner.
if (!(projectControl.getProjectState().isAllProjects() &&
getCurrentUser().getCapabilities().canAdministrateServer())) {
return false;
}
}
return canPerform(Permission.PUSH)
&& canWrite();
}
/** @return true if the user can rewind (force push) the reference. */
public boolean canForceUpdate() {
return (canPushWithForce() || canDelete()) && canWrite();
}
public boolean canWrite() {
return getProjectControl().getProject().getState().equals(
Project.State.ACTIVE);
}
public boolean canRead() {
return getProjectControl().getProject().getState().equals(
Project.State.READ_ONLY) || canWrite();
}
private boolean canPushWithForce() {
if (!canWrite() || (GitRepositoryManager.REF_CONFIG.equals(refName)
&& !projectControl.isOwner())) {
// Pushing requires being at least project owner, in addition to push.
// Pushing configuration changes modifies the access control
// rules. Allowing this to be done by a non-project-owner opens
// a security hole enabling editing of access rules, and thus
// granting of powers beyond pushing to the configuration.
return false;
}
return canForcePerform(Permission.PUSH);
}
/**
* Determines whether the user can create a new Git ref.
*
* @param rw revision pool {@code object} was parsed in.
* @param object the object the user will start the reference with.
* @return {@code true} if the user specified can create a new Git ref
*/
public boolean canCreate(RevWalk rw, RevObject object) {
if (!canWrite()) {
return false;
}
boolean owner;
switch (getCurrentUser().getAccessPath()) {
case REST_API:
case JSON_RPC:
owner = isOwner();
break;
default:
owner = false;
}
if (object instanceof RevCommit) {
return owner || canPerform(Permission.CREATE);
} else if (object instanceof RevTag) {
final RevTag tag = (RevTag) object;
try {
rw.parseBody(tag);
} catch (IOException e) {
return false;
}
// If tagger is present, require it matches the user's email.
//
final PersonIdent tagger = tag.getTaggerIdent();
if (tagger != null) {
boolean valid;
if (getCurrentUser() instanceof IdentifiedUser) {
final IdentifiedUser user = (IdentifiedUser) getCurrentUser();
final String addr = tagger.getEmailAddress();
valid = user.getEmailAddresses().contains(addr);
} else {
valid = false;
}
if (!valid && !owner && !canForgeCommitter()) {
return false;
}
}
// If the tag has a PGP signature, allow a lower level of permission
// than if it doesn't have a PGP signature.
//
if (tag.getFullMessage().contains("-----BEGIN PGP SIGNATURE-----\n")) {
return owner || canPerform(Permission.PUSH_SIGNED_TAG);
} else {
return owner || canPerform(Permission.PUSH_TAG);
}
} else {
return false;
}
}
/**
* Determines whether the user can delete the Git ref controlled by this
* object.
*
* @return {@code true} if the user specified can delete a Git ref.
*/
public boolean canDelete() {
if (!canWrite() || (GitRepositoryManager.REF_CONFIG.equals(refName))) {
// Never allow removal of the refs/meta/config branch.
// Deleting the branch would destroy all Gerrit specific
// metadata about the project, including its access rules.
// If a project is to be removed from Gerrit, its repository
// should be removed first.
return false;
}
switch (getCurrentUser().getAccessPath()) {
case REST_API:
case JSON_RPC:
return isOwner() || canPushWithForce();
case GIT:
return canPushWithForce();
default:
return false;
}
}
/** @return true if this user can forge the author line in a commit. */
public boolean canForgeAuthor() {
if (canForgeAuthor == null) {
canForgeAuthor = canPerform(Permission.FORGE_AUTHOR);
}
return canForgeAuthor;
}
/** @return true if this user can forge the committer line in a commit. */
public boolean canForgeCommitter() {
if (canForgeCommitter == null) {
canForgeCommitter = canPerform(Permission.FORGE_COMMITTER);
}
return canForgeCommitter;
}
/** @return true if this user can forge the server on the committer line. */
public boolean canForgeGerritServerIdentity() {
return canPerform(Permission.FORGE_SERVER);
}
/** @return true if this user can abandon a change for this ref */
public boolean canAbandon() {
return canPerform(Permission.ABANDON);
}
/** @return true if this user can remove a reviewer for a change. */
public boolean canRemoveReviewer() {
return canPerform(Permission.REMOVE_REVIEWER);
}
/** @return true if this user can view draft changes. */
public boolean canViewDrafts() {
return canPerform(Permission.VIEW_DRAFTS);
}
/** @return true if this user can publish draft changes. */
public boolean canPublishDrafts() {
return canPerform(Permission.PUBLISH_DRAFTS);
}
/** @return true if this user can delete draft changes. */
public boolean canDeleteDrafts() {
return canPerform(Permission.DELETE_DRAFTS);
}
/** @return true if this user can edit topic names. */
public boolean canEditTopicName() {
return canPerform(Permission.EDIT_TOPIC_NAME);
}
/** @return true if this user can force edit topic names. */
public boolean canForceEditTopicName() {
return canForcePerform(Permission.EDIT_TOPIC_NAME);
}
/** All value ranges of any allowed label permission. */
public List<PermissionRange> getLabelRanges() {
List<PermissionRange> r = new ArrayList<PermissionRange>();
for (Map.Entry<String, List<PermissionRule>> e : relevant.getDeclaredPermissions()) {
if (Permission.isLabel(e.getKey())) {
int min = 0;
int max = 0;
for (PermissionRule rule : e.getValue()) {
if (projectControl.match(rule)) {
min = Math.min(min, rule.getMin());
max = Math.max(max, rule.getMax());
}
}
if (min != 0 || max != 0) {
r.add(new PermissionRange(e.getKey(), min, max));
}
}
}
return r;
}
/** The range of permitted values associated with a label permission. */
public PermissionRange getRange(String permission) {
if (Permission.isLabel(permission)) {
return toRange(permission, access(permission));
}
return null;
}
private static class AllowedRange {
private int allowMin = 0;
private int allowMax = 0;
private int blockMin = Integer.MIN_VALUE;
private int blockMax = Integer.MAX_VALUE;
void update(PermissionRule rule) {
if (rule.isBlock()) {
blockMin = Math.max(blockMin, rule.getMin());
blockMax = Math.min(blockMax, rule.getMax());
} else {
allowMin = Math.min(allowMin, rule.getMin());
allowMax = Math.max(allowMax, rule.getMax());
}
}
int getAllowMin() {
return allowMin;
}
int getAllowMax() {
return allowMax;
}
int getBlockMin() {
// ALLOW wins over BLOCK on the same project
return Math.min(blockMin, allowMin - 1);
}
int getBlockMax() {
// ALLOW wins over BLOCK on the same project
return Math.max(blockMax, allowMax + 1);
}
}
private PermissionRange toRange(String permissionName,
List<PermissionRule> ruleList) {
Map<ProjectRef, AllowedRange> ranges = Maps.newHashMap();
for (PermissionRule rule : ruleList) {
ProjectRef p = relevant.getRuleProps(rule);
AllowedRange r = ranges.get(p);
if (r == null) {
r = new AllowedRange();
ranges.put(p, r);
}
r.update(rule);
}
int allowMin = 0;
int allowMax = 0;
int blockMin = Integer.MIN_VALUE;
int blockMax = Integer.MAX_VALUE;
for (AllowedRange r : ranges.values()) {
allowMin = Math.min(allowMin, r.getAllowMin());
allowMax = Math.max(allowMax, r.getAllowMax());
blockMin = Math.max(blockMin, r.getBlockMin());
blockMax = Math.min(blockMax, r.getBlockMax());
}
// BLOCK wins over ALLOW across projects
int min = Math.max(allowMin, blockMin + 1);
int max = Math.min(allowMax, blockMax - 1);
return new PermissionRange(permissionName, min, max);
}
/** True if the user has this permission. Works only for non labels. */
boolean canPerform(String permissionName) {
List<PermissionRule> access = access(permissionName);
Set<ProjectRef> allows = Sets.newHashSet();
Set<ProjectRef> blocks = Sets.newHashSet();
for (PermissionRule rule : access) {
if (rule.isBlock() && !rule.getForce()) {
blocks.add(relevant.getRuleProps(rule));
} else {
allows.add(relevant.getRuleProps(rule));
}
}
blocks.removeAll(allows);
return blocks.isEmpty() && !allows.isEmpty();
}
/** True if the user has force this permission. Works only for non labels. */
private boolean canForcePerform(String permissionName) {
List<PermissionRule> access = access(permissionName);
Set<ProjectRef> allows = Sets.newHashSet();
Set<ProjectRef> blocks = Sets.newHashSet();
for (PermissionRule rule : access) {
if (rule.isBlock()) {
blocks.add(relevant.getRuleProps(rule));
} else if (rule.getForce()) {
allows.add(relevant.getRuleProps(rule));
}
}
blocks.removeAll(allows);
return blocks.isEmpty() && !allows.isEmpty();
}
/** Rules for the given permission, or the empty list. */
private List<PermissionRule> access(String permissionName) {
List<PermissionRule> rules = effective.get(permissionName);
if (rules != null) {
return rules;
}
rules = relevant.getPermission(permissionName);
if (rules.isEmpty()) {
effective.put(permissionName, rules);
return rules;
}
if (rules.size() == 1) {
if (!projectControl.match(rules.get(0))) {
rules = Collections.emptyList();
}
effective.put(permissionName, rules);
return rules;
}
List<PermissionRule> mine = new ArrayList<PermissionRule>(rules.size());
for (PermissionRule rule : rules) {
if (projectControl.match(rule)) {
mine.add(rule);
}
}
if (mine.isEmpty()) {
mine = Collections.emptyList();
}
effective.put(permissionName, mine);
return mine;
}
public static boolean isRE(String refPattern) {
return refPattern.startsWith(AccessSection.REGEX_PREFIX);
}
public static String shortestExample(String pattern) {
if (isRE(pattern)) {
// Since Brics will substitute dot [.] with \0 when generating
// shortest example, any usage of dot will fail in
// Repository.isValidRefName() if not combined with star [*].
// To get around this, we substitute the \0 with an arbitrary
// accepted character.
return toRegExp(pattern).toAutomaton().getShortestExample(true).replace('\0', '-');
} else if (pattern.endsWith("/*")) {
return pattern.substring(0, pattern.length() - 1) + '1';
} else {
return pattern;
}
}
public static RegExp toRegExp(String refPattern) {
if (isRE(refPattern)) {
refPattern = refPattern.substring(1);
}
return new RegExp(refPattern, RegExp.NONE);
}
public static void validateRefPattern(String refPattern)
throws InvalidNameException {
if (refPattern.startsWith(RefConfigSection.REGEX_PREFIX)) {
if (!Repository.isValidRefName(RefControl.shortestExample(refPattern))) {
throw new InvalidNameException(refPattern);
}
} else if (refPattern.equals(RefConfigSection.ALL)) {
// This is a special case we have to allow, it fails below.
} else if (refPattern.endsWith("/*")) {
String prefix = refPattern.substring(0, refPattern.length() - 2);
if (!Repository.isValidRefName(prefix)) {
throw new InvalidNameException(refPattern);
}
} else if (!Repository.isValidRefName(refPattern)) {
throw new InvalidNameException(refPattern);
}
}
}
| |
package br.fdagostini;
public class Dados {
public static final String[][] CONTAS = new String[][]{
{"1", "1.01"},
{"2", "1.01.01"},
{"3", "1.01.01.01"},
{"4", "1.01.01.01.0001"},
{"5", "1.01.01.02"},
{"6", "1.01.01.02.0001"},
{"7", "1.01.01.02.0002"},
{"8", "1.01.02"},
{"9", "1.01.03"},
{"10", "1.01.03.01"},
{"11", "1.01.03.01.0001"},
{"12", "1.01.03.02"},
{"13", "1.01.03.03"},
{"14", "1.01.03.04"},
{"15", "1.01.03.05"},
{"16", "1.01.03.05.0001"},
{"17", "1.01.03.05.0002"},
{"18", "1.01.03.05.0003"},
{"19", "1.01.03.05.0004"},
{"20", "1.01.01.02.0003"},
{"21", "1.01.01.02.0005"},
{"22", "1.01.01.02.0006"},
{"23", "1.01.01.02.0007"},
{"24", "1.01.03.01.0002"},
{"25", "1.01.03.01.0003"},
{"26", "1.01.01.01.9999"},
{"27", "1.01.01.02.9999"},
{"28", "1.01.03.05.0005"},
{"29", "1.01.03.05.9999"},
{"30", "1.01.04"},
{"31", "1.01.05"},
{"32", "1.01.05.01"},
{"33", "1.01.05.01.0001"},
{"34", "1.01.05.02"},
{"35", "1.01.05.02.0001"},
{"36", "1.01.05.02.0002"},
{"37", "1.01.05.02.0003"},
{"38", "1.01.06"},
{"39", "1.01.06.01"},
{"40", "1.01.06.01.0001"},
{"41", "1.01.07"},
{"42", "1.01.07.01"},
{"43", "1.01.07.01.0001"},
{"44", "1.01.07.02"},
{"45", "1.01.07.02.0001"},
{"46", "1.01.07.02.0002"},
{"47", "1.01.07.02.0003"},
{"48", "1.01.07.02.0004"},
{"49", "1.01.07.02.0005"},
{"50", "1.01.07.02.0006"},
{"51", "1.01.07.02.0007"},
{"52", "1.01.07.02.0008"},
{"53", "1.01.07.02.0009"},
{"54", "1.01.07.02.0010"},
{"55", "1.01.07.02.0011"},
{"56", "1.01.07.02.0012"},
{"57", "1.01.07.02.0013"},
{"58", "1.01.07.02.0014"},
{"59", "1.01.07.02.0015"},
{"60", "1.01.07.02.0016"},
{"61", "1.01.07.02.0017"},
{"62", "1.01.07.02.0018"},
{"63", "1.01.07.02.0019"},
{"64", "1.02"},
{"65", "1.02.01"},
{"66", "1.02.01.01"},
{"67", "1.02.01.01.0001"},
{"68", "1.02.02"},
{"69", "1.02.02.01"},
{"70", "1.02.02.01.0001"},
{"71", "1.02.01.01.0002"},
{"72", "1.02.01.01.0003"},
{"73", "1.02.02.01.0002"},
{"74", "1.02.02.01.0003"},
{"75", "1.02.02.01.0004"},
{"76", "1.02.03"},
{"77", "1.02.03.01"},
{"78", "1.02.03.02"},
{"79", "1.02.03.01.0001"},
{"80", "1.02.03.01.0002"},
{"81", "1.02.03.01.0003"},
{"82", "1.02.03.01.0004"},
{"83", "1.02.03.01.0005"},
{"84", "1.02.03.01.0006"},
{"85", "1.02.03.01.0007"},
{"86", "1.02.03.02.0002"},
{"87", "1.02.03.02.0004"},
{"88", "1.02.03.02.0007"},
{"89", "1.02.03.03"},
{"90", "1.02.03.03.0001"},
{"91", "1.02.03.03.0002"},
{"92", "1.02.03.03.0003"},
{"93", "1.02.03.03.0004"},
{"94", "1.02.03.03.0005"},
{"95", "1.02.03.03.0006"},
{"96", "1.02.03.03.0007"},
{"97", "1.02.03.03.0008"},
{"98", "1.02.03.03.0009"},
{"99", "1.02.03.03.0010"},
{"100", "1.02.03.03.0011"},
{"101", "1.02.03.03.0012"},
{"102", "1.02.03.03.0013"},
{"103", "1.02.03.03.0014"},
{"104", "1.02.04"},
{"105", "1.02.04.01"},
{"106", "1.02.04.01.0001"},
{"107", "1.02.04.01.0002"},
{"108", "1.02.04.01.0003"},
{"109", "1.01.01.02.0008"},
{"110", "1.01.01.02.0009"},
{"111", "1.01.01.02.0010"},
{"112", "1.01.01.02.0011"},
{"113", "2.01"},
{"114", "2.01.01"},
{"115", "2.01.01.01"},
{"116", "2.01.01.01.0001"},
{"117", "2.01.01.01.0002"},
{"118", "2.01.01.01.0003"},
{"119", "2.01.02"},
{"120", "2.01.02.01"},
{"121", "2.01.02.01.0002"},
{"122", "2.01.02.01.0003"},
{"123", "2.01.02.01.0004"},
{"124", "2.01.02.01.0005"},
{"125", "2.01.02.01.0006"},
{"126", "2.01.02.01.0008"},
{"127", "2.01.02.01.0011"},
{"128", "2.01.02.01.0012"},
{"129", "2.01.02.01.0013"},
{"130", "2.01.02.01.0014"},
{"131", "2.01.02.01.0015"},
{"132", "2.01.02.01.0016"},
{"133", "2.01.03"},
{"134", "2.01.04"},
{"135", "2.01.05"},
{"136", "2.01.05.01"},
{"137", "2.01.05.01.0001"},
{"138", "2.01.05.01.0002"},
{"139", "2.01.05.01.0003"},
{"140", "2.01.05.01.0004"},
{"141", "2.01.05.01.0005"},
{"142", "2.01.05.01.0006"},
{"143", "2.01.05.01.0007"},
{"144", "2.01.05.02"},
{"145", "2.01.05.02.0001"},
{"146", "2.01.05.03"},
{"147", "2.01.05.03.0001"},
{"148", "2.01.05.03.0002"},
{"149", "2.01.05.03.0003"},
{"150", "2.01.05.04"},
{"151", "2.01.05.04.0001"},
{"152", "2.01.05.04.0002"},
{"153", "2.01.05.04.0003"},
{"154", "2.01.05.04.0004"},
{"155", "2.01.06"},
{"156", "2.01.06.01"},
{"157", "2.01.06.01.0001"},
{"158", "2.01.06.02"},
{"159", "2.01.06.03"},
{"160", "2.01.06.04"},
{"161", "2.01.06.04.0001"},
{"162", "2.01.06.04.0002"},
{"163", "2.01.06.05"},
{"164", "2.01.06.05.0001"},
{"165", "2.02"},
{"166", "2.03"},
{"167", "2.02.01"},
{"168", "2.02.02"},
{"169", "2.02.02.01"},
{"170", "2.02.02.01.0001"},
{"171", "2.02.02.01.0002"},
{"172", "2.02.02.01.0006"},
{"173", "2.02.02.01.0007"},
{"174", "2.02.02.01.0008"},
{"175", "2.02.02.01.0009"},
{"176", "2.02.02.01.0010"},
{"177", "2.02.02.01.0011"},
{"178", "2.02.02.01.0012"},
{"179", "2.02.02.01.0013"},
{"180", "2.02.02.01.0014"},
{"181", "2.02.02.01.0015"},
{"182", "2.02.02.01.0016"},
{"183", "2.02.02.01.0017"},
{"184", "2.02.02.01.0018"},
{"185", "2.02.02.01.0019"},
{"186", "2.02.02.01.0020"},
{"187", "2.02.02.01.0021"},
{"188", "2.02.02.01.0022"},
{"189", "2.02.02.01.0023"},
{"190", "2.02.02.01.0024"},
{"191", "2.02.02.01.0025"},
{"192", "2.02.03"},
{"193", "2.02.03.01"},
{"194", "2.02.03.01.0001"},
{"195", "2.02.03.01.0002"},
{"196", "2.03.01"},
{"197", "2.03.01.01"},
{"198", "2.03.01.01.0001"},
{"199", "2.03.01.02"},
{"200", "2.03.01.02.0001"},
{"201", "2.03.02"},
{"202", "2.03.02.01"},
{"203", "2.03.02.01.0001"},
{"204", "2.03.02.01.0002"},
{"205", "2.03.02.02"},
{"206", "2.03.02.02.0001"},
{"207", "2.03.02.03"},
{"208", "2.03.02.03.0001"},
{"209", "2.03.02.03.0002"},
{"210", "2.03.02.03.0003"},
{"211", "2.03.03"},
{"212", "2.03.03.01"},
{"213", "2.03.03.01.0001"},
{"214", "2.03.03.01.0002"},
{"215", "2.03.03.01.0003"},
{"216", "2.03.03.01.0004"},
{"217", "2.03.03.02"},
{"218", "2.03.03.02.0001"},
{"219", "2.03.03.03"},
{"220", "2.03.03.03.0001"},
{"225", "2.04"},
{"226", "2.04.01"},
{"372", "2.01.03.01"},
{"373", "2.01.03.01.0001"},
{"375", "1.01.03.05.9998"},
{"376", "1.01.03.05.9997"},
{"377", "1.01.06.01.9999"},
{"378", "1.01.03.01.0050"},
{"379", "1.01.03.01.9999"},
{"380", "2.01.03.02"},
{"381", "2.01.03.02.0001"},
{"383", "1.01.06.02"},
{"384", "1.01.06.02.0001"},
{"385", "1.01.06.01.1000"},
{"386", "1.01.03.01.0100"},
{"387", "1.01.01.02.0012"},
{"388", "1.01.06.01.1001"},
{"389", "2.01.01.01.0010"},
{"390", "1.01.04.01"},
{"391", "1.01.04.01.0001"},
{"392", "1.01.04.01.0002"},
{"393", "1.01.04.01.0003"},
{"394", "1.01.04.01.0004"},
{"395", "1.01.04.02"},
{"396", "2.01.05.02.0002"},
{"397", "2.01.05.02.0003"},
{"398", "2.01.05.02.0004"},
{"401", "1.01.06.01.0050"},
{"403", "1.01.06.01.1002"},
{"406", "2.01.01.01.0011"},
{"407", "2.01.01.01.0012"},
{"408", "2.01.05.01.0008"},
{"409", "2.02.04"},
{"410", "2.02.04.01"},
{"411", "2.02.04.01.0001"},
{"412", "2.03.03.03.0002"},
{"415", "2.01.04.01"},
{"416", "2.01.04.01.0001"},
{"417", "1.01.02.01"},
{"418", "1.01.02.01.0001"},
{"419", "2.01.01.01.0013"},
{"426", "1.02.05"},
{"427", "1.02.05.01"},
{"428", "1.02.05.01.0001"},
{"429", "2.01.01.01.0014"},
{"432", "2.03.01.01.9999"},
{"434", "2.01.01.01.0015"},
{"439", "1.01.01.02.0013"},
{"440", "1.01.01.02.0014"},
{"441", "1.01.01.02.0015"},
{"442", "1.01.01.02.0016"},
{"443", "1.01.01.02.0017"},
{"444", "1.01.01.02.0018"},
{"445", "1.01.02.01.0002"},
{"446", "1.01.02.01.0003"},
{"447", "1.01.02.01.0004"},
{"449", "1.01.02.01.0005"},
{"450", "1.01.02.01.0006"},
{"451", "2.01.05.01.0009"},
{"452", "2.01.05.01.0010"},
{"455", "2.01.05.02.0005"},
{"458", "1.02.05.01.0002"},
{"459", "1.02.05.01.0003"},
{"460", "1.01.04.01.9999"},
{"461", "1.01.03.01.0005"},
{"462", "1.01.05.10"},
{"463", "1.01.05.10.0001"},
{"464", "1.01.06.10"},
{"465", "1.01.06.10.0001"},
{"466", "1.01.06.10.0002"},
{"467", "1.01.06.10.0003"},
{"468", "1.01.06.10.0004"},
{"469", "1.01.06.01.1003"},
{"471", "2.01.01.01.0016"},
{"472", "2.01.01.01.0017"},
{"473", "1.02.03.03.0080"},
{"474", "1.02.05.01.0004"},
{"475", "2.01.03.02.0002"},
{"476", "2.01.06.05.0002"},
{"477", "2.01.06.05.0003"},
{"478", "2.01.06.05.0004"},
{"479", "2.01.06.05.0005"},
{"480", "2.02.02.02"},
{"481", "2.02.02.02.0001"},
{"482", "2.02.02.02.0002"},
{"483", "2.02.02.02.0003"},
{"484", "2.02.02.02.9999"},
{"486", "2.02.04.01.0002"},
{"488", "2.01.05.04.0005"},
{"489", "1.01.03.01.9998"},
{"490", "1.01.03.01.0200"},
{"491", "2.01.03.02.0200"},
{"493", "1.02.05.01.0005"},
{"495", "1.01.06.01.1004"},
{"496", "1.01.03.05.0006"},
{"497", "1.01.01.02.0019"},
{"498", "1.01.01.02.0020"},
{"499", "1.01.03.05.0007"},
{"500", "1.01.03.01.0051"},
{"504", "2.01.03.02.0005"},
{"515", "1.01.01.02.0021"},
{"516", "1.01.01.02.0022"},
{"517", "1.01.03.01.0052"},
{"520", "1.01.01.02.1004"},
{"521", "1.01.01.02.0004"},
{"522", "1.01.01.02.0023"},
{"523", "1.01.01.02.0024"},
{"524", "1.01.01.02.0025"},
{"528", "9.01"},
{"529", "9.01.01"},
{"530", "9.01.01.01"},
{"531", "9.01.01.01.0001"},
{"532", "6.01"},
{"533", "6.01.01"},
{"534", "6.01.01.01"},
{"535", "6.01.01.01.0001"},
{"536", "6.01.01.01.0002"},
{"537", "6.01.01.02"},
{"538", "6.01.01.02.0001"},
{"539", "6.01.01.02.0002"},
{"540", "6.01.02"},
{"541", "6.01.02.01"},
{"542", "6.01.02.01.0001"},
{"543", "6.01.02.02"},
{"544", "6.01.02.02.0001"},
{"545", "6.01.02.02.0002"},
{"546", "6.01.02.02.0003"},
{"547", "6.01.02.02.0004"},
{"548", "6.01.03"},
{"549", "6.01.03.01"},
{"550", "6.01.03.01.0001"},
{"551", "6.02"},
{"552", "6.02.01"},
{"553", "6.02.01.01"},
{"554", "6.02.01.01.0001"},
{"555", "6.02.01.01.0002"},
{"556", "6.03"},
{"557", "6.03.01"},
{"558", "6.03.01.01"},
{"559", "6.03.01.01.0001"},
{"560", "6.04"},
{"561", "6.04.01"},
{"562", "6.04.01.01"},
{"563", "6.04.01.01.0001"},
{"564", "6.04.02"},
{"565", "6.04.02.01"},
{"566", "6.04.02.01.0001"},
{"567", "6.04.03"},
{"568", "6.04.03.01"},
{"569", "6.04.03.01.0001"},
{"570", "6.10"},
{"571", "6.10.01"},
{"572", "6.10.01.01"},
{"573", "6.10.01.01.0001"},
{"574", "6.10.01.01.0002"},
{"575", "6.10.01.01.0004"},
{"576", "6.10.01.01.0005"},
{"577", "6.10.01.01.0006"},
{"578", "6.10.01.01.0007"},
{"579", "6.10.01.01.0008"},
{"580", "6.10.01.01.0009"},
{"581", "6.10.01.01.0010"},
{"582", "6.10.01.01.0011"},
{"583", "6.10.01.01.0012"},
{"584", "6.10.01.01.0013"},
{"585", "6.10.01.01.0016"},
{"586", "6.10.01.01.0017"},
{"587", "6.10.01.01.0018"},
{"588", "6.10.01.01.0019"},
{"589", "6.10.01.02"},
{"590", "6.10.01.02.0001"},
{"591", "6.10.01.02.0002"},
{"592", "6.10.01.02.0003"},
{"593", "6.10.01.02.0004"},
{"594", "6.10.01.03"},
{"595", "6.10.01.04"},
{"596", "6.10.01.04.0001"},
{"597", "6.10.01.04.0002"},
{"598", "6.10.01.04.0003"},
{"599", "6.10.01.04.0004"},
{"600", "6.10.01.04.0005"},
{"601", "6.10.01.04.0006"},
{"602", "6.10.01.04.0007"},
{"603", "6.10.02"},
{"604", "6.10.02.01"},
{"605", "6.10.02.01.0001"},
{"606", "6.10.02.01.0002"},
{"607", "6.10.02.01.0004"},
{"608", "6.10.02.01.0005"},
{"609", "6.10.02.01.0006"},
{"610", "6.10.02.01.0007"},
{"611", "6.10.02.01.0008"},
{"612", "6.10.02.01.0009"},
{"613", "6.10.02.01.0010"},
{"614", "6.10.02.01.0011"},
{"615", "6.10.02.01.0012"},
{"616", "6.10.02.01.0013"},
{"617", "6.10.02.01.0014"},
{"618", "6.10.02.01.0015"},
{"619", "6.10.02.01.0016"},
{"620", "6.10.02.02"},
{"621", "6.10.02.02.0001"},
{"622", "6.10.02.02.0002"},
{"623", "6.10.02.02.0003"},
{"624", "6.10.02.02.0004"},
{"625", "6.10.02.02.0005"},
{"626", "6.10.02.02.0006"},
{"627", "6.10.02.02.0008"},
{"628", "6.10.02.02.0009"},
{"629", "6.10.02.02.0010"},
{"630", "6.10.02.02.0011"},
{"631", "6.10.02.02.0012"},
{"632", "6.10.02.02.0013"},
{"633", "6.10.02.02.0014"},
{"634", "6.10.02.02.0015"},
{"635", "6.10.02.02.0016"},
{"636", "6.10.02.02.0017"},
{"637", "6.10.02.02.0018"},
{"638", "6.10.02.02.0019"},
{"639", "6.10.02.02.0021"},
{"640", "6.10.02.02.0022"},
{"641", "6.10.02.02.0023"},
{"642", "6.10.02.02.0025"},
{"643", "6.10.02.02.0026"},
{"644", "6.10.02.02.0027"},
{"645", "6.10.02.02.0028"},
{"646", "6.10.02.02.0029"},
{"647", "6.10.02.02.0030"},
{"648", "6.10.02.02.0031"},
{"649", "6.10.02.02.0032"},
{"650", "6.10.02.03"},
{"651", "6.10.02.03.0001"},
{"652", "6.10.02.03.0002"},
{"653", "6.10.02.03.0003"},
{"654", "6.10.03"},
{"655", "6.10.03.01"},
{"656", "6.10.03.01.0001"},
{"657", "6.10.03.01.0002"},
{"658", "6.10.03.01.0003"},
{"659", "6.10.03.01.0004"},
{"660", "6.10.04"},
{"661", "6.10.04.01"},
{"662", "6.10.04.01.0001"},
{"663", "6.10.04.01.0002"},
{"664", "6.10.04.01.0003"},
{"665", "6.10.05"},
{"666", "6.10.05.01"},
{"667", "6.10.05.01.0001"},
{"668", "6.10.06"},
{"669", "6.10.06.01"},
{"670", "6.10.06.01.0001"},
{"671", "6.10.06.01.0002"},
{"672", "6.20"},
{"673", "6.20.01"},
{"674", "6.20.01.01"},
{"675", "6.20.01.01.0001"},
{"676", "6.20.01.01.0002"},
{"677", "6.10.04.01.0004"},
{"678", "6.01.03.01.1000"},
{"679", "6.10.01.04.0008"},
{"680", "6.10.02.02.0033"},
{"681", "6.10.02.02.9999"},
{"682", "6.01.01.20"},
{"683", "6.01.01.20.0001"},
{"684", "6.10.01.01.0020"},
{"685", "6.10.01.02.0005"},
{"686", "6.10.02.02.1000"},
{"687", "6.01.03.01.1001"},
{"688", "6.10.01.04.0009"},
{"689", "6.10.02.02.9998"},
{"690", "6.10.01.04.9999"},
{"691", "6.10.02.03.9999"},
{"692", "6.01.03.01.1002"},
{"693", "6.10.02.02.0034"},
{"694", "6.10.02.02.0035"},
{"695", "6.10.02.02.0036"},
{"696", "6.10.02.02.9997"},
{"697", "6.01.03.01.9998"},
{"698", "6.01.03.01.9999"},
{"699", "6.02.01.01.0003"},
{"700", "6.04.01.01.0002"},
{"701", "6.10.05.01.0002"},
{"702", "6.10.01.04.0010"},
{"703", "6.10.02.02.0037"},
{"704", "6.10.03.01.0005"},
{"705", "6.10.01.01.0021"},
{"706", "6.03.01.01.0002"},
{"707", "6.10.01.01.0022"},
{"708", "6.10.02.02.0038"},
{"709", "6.01.01.03"},
{"710", "6.01.01.03.0001"},
{"711", "6.01.01.03.0002"},
{"712", "6.01.01.01.0003"},
{"713", "6.01.01.01.0004"},
{"714", "6.01.01.01.0005"},
{"715", "6.01.01.02.0003"},
{"716", "6.01.01.02.0004"},
{"717", "6.01.01.02.0005"},
{"718", "6.01.01.03.0003"},
{"719", "6.01.01.03.0004"},
{"720", "6.01.01.03.0005"},
{"721", "6.01.01.01.0020"},
{"722", "6.10.03.01.0006"},
{"723", "6.10.01.02.0006"},
};
public static final String[] CONTAS_COM_VALOR = new String[]{
"6.01.01.02.0004",
"6.01.01.02.0005",
"6.01.01.03.0003",
"6.01.01.03.0004",
"6.01.01.03.0005",
"6.01.01.01.0020",
"1.01.06.01.1003",
"2.01.01.01.0016",
"2.01.01.01.0017",
"1.02.03.03.0080",
"1.02.05.01.0004",
"2.01.03.02.0002",
"2.01.06.05.0002",
"2.01.06.05.0003"
};
}
| |
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.reteoo;
import java.io.PrintWriter;
import java.lang.reflect.InvocationTargetException;
import java.util.Arrays;
import java.util.Collections;
import java.util.IdentityHashMap;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.drools.core.common.BaseNode;
import org.drools.core.impl.RuleBase;
import org.kie.api.KieBase;
import org.kie.api.definition.rule.Rule;
import org.kie.api.runtime.KieRuntime;
import org.kie.api.runtime.KieSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* Utility class to view Rete models
*
*/
public class ReteDumper {
private static Logger logger = LoggerFactory.getLogger(ReteDumper.class);
private PrintWriter writer;
private StringBuilder sb;
private Predicate<BaseNode> nodesFilter;
private boolean nodeInfoOnly = false;
public ReteDumper() {
this(node -> true);
}
public ReteDumper(Predicate<BaseNode> nodesFilter) {
this.nodesFilter = nodesFilter;
}
public ReteDumper(String ruleName) {
this( node -> Stream.of( node.getAssociatedRules() ).anyMatch( rule -> rule.getName().equals( ruleName ) ) );
}
public PrintWriter getWriter() {
return writer;
}
/**
* Set a writer to which ReteDumper prints results. By default, results will be printed to STDOUT
* @param writer
*/
public void setWriter(PrintWriter writer) {
this.writer = writer;
}
public boolean isNodeInfoOnly() {
return nodeInfoOnly;
}
/**
* If true, dump without partition/mask information. Default value is false
* @param nodeInfoOnly
*/
public void setNodeInfoOnly(boolean nodeInfoOnly) {
this.nodeInfoOnly = nodeInfoOnly;
}
public static void dumpRete(KieBase kbase ) {
new ReteDumper().dump((RuleBase) kbase);
}
public static void dumpRete(KieRuntime session ) {
new ReteDumper().dump((RuleBase)session.getKieBase());
}
public static void dumpRete(KieSession session) {
new ReteDumper().dump((RuleBase)session.getKieBase());
}
public static void dumpRete(RuleBase kBase) {
new ReteDumper().dump(kBase.getRete());
}
public static void dumpRete(Rete rete) {
new ReteDumper().dump(rete);
}
public void dump(KieBase kbase ) {
dump((RuleBase) kbase);
}
public void dump(KieRuntime session ) {
dump((RuleBase)session.getKieBase());
}
public void dump(KieSession session) {
dump((RuleBase)session.getKieBase());
}
public void dump(RuleBase kBase) {
dump(kBase.getRete());
}
public void dump(Rete rete) {
// Other dump/dumpRete methods eventually call this method
sb = new StringBuilder();
traverseRete(rete, this::dumpNode);
printResults();
}
private void printResults() {
if (writer == null) {
System.out.print(sb.toString());
} else {
// if a writer is given by a caller, the caller is responsible for closing
writer.print(sb.toString());
}
}
public static Set<BaseNode> collectRete(KieBase kbase ) {
return new ReteDumper().collect((RuleBase) kbase);
}
public static Set<BaseNode> collectRete(KieRuntime session ) {
return new ReteDumper().collect((RuleBase)session.getKieBase());
}
public static Set<BaseNode> collectRete(KieSession session) {
return new ReteDumper().collect((RuleBase)session.getKieBase());
}
public static Set<BaseNode> collectRete(RuleBase kBase) {
return new ReteDumper().collect(kBase.getRete());
}
public static Set<BaseNode> collectRete(Rete rete) {
return new ReteDumper().collect(rete);
}
public Set<BaseNode> collect(KieBase kbase ) {
return collect((RuleBase) kbase);
}
public Set<BaseNode> collect(KieRuntime session ) {
return collect((RuleBase)session.getKieBase());
}
public Set<BaseNode> collect(KieSession session) {
return collect((RuleBase)session.getKieBase());
}
public Set<BaseNode> collect(RuleBase kBase) {
return collect(kBase.getRete());
}
public Set<BaseNode> collect(Rete rete) {
Set<BaseNode> nodes = createIdentitySet();
traverseRete(rete, (node, s) -> nodes.add(node));
return nodes;
}
public void traverseRete(Rete rete, BiConsumer<BaseNode, String> consumer) {
for (EntryPointNode entryPointNode : rete.getEntryPointNodes().values()) {
dumpNode( entryPointNode, "", createIdentitySet(), consumer);
}
}
private <T> Set<T> createIdentitySet() {
return Collections.newSetFromMap(new IdentityHashMap<>());
}
private void dumpNode( BaseNode node, String ident, Set<BaseNode> visitedNodes, BiConsumer<BaseNode, String> consumer ) {
consumer.accept( node, ident );
if (!visitedNodes.add( node )) {
return;
}
Sink[] sinks = node.getSinks();
if (sinks != null) {
for (Sink sink : sinks) {
if (sink instanceof BaseNode) {
BaseNode sinkNode = ( BaseNode ) sink;
if ( nodesFilter.test( sinkNode ) ) {
dumpNode( sinkNode, ident + " ", visitedNodes, consumer );
}
}
}
}
}
private void dumpNode( BaseNode node, String ident ) {
sb.append(ident + formatNode(node));
if (!nodeInfoOnly) {
sb.append(" on " + node.getPartitionId());
try {
Object declaredMask = node.getClass().getMethod("getDeclaredMask").invoke(node);
Object inferreddMask = node.getClass().getMethod("getInferredMask").invoke(node);
sb.append(" d "+declaredMask + " i " + inferreddMask);
} catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
logger.trace("failed to log mask values", e);
}
try {
Object declaredMask = node.getClass().getMethod("getLeftDeclaredMask").invoke(node);
Object inferreddMask = node.getClass().getMethod("getLeftInferredMask").invoke(node);
sb.append(" Ld "+declaredMask + " Li " + inferreddMask);
} catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
logger.trace("failed to log left mask values", e);
}
try {
Object declaredMask = node.getClass().getMethod("getRightDeclaredMask").invoke(node);
Object inferreddMask = node.getClass().getMethod("getRightInferredMask").invoke(node);
sb.append(" Rd "+declaredMask + " Ri " + inferreddMask);
} catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
logger.trace("failed to log right mask values", e);
}
}
sb.append("\n");
}
/**
* Format a node to informative String
*
* @param node
* @return formatted String
*/
public String formatNode(BaseNode node) {
StringBuilder additionalInfo = new StringBuilder();
if (node instanceof BetaNode) {
BetaNode betaNode = (BetaNode) node;
additionalInfo.append("contraints=");
if (betaNode.getRawConstraints() != null) {
additionalInfo.append(Arrays.toString(betaNode.getConstraints()));
}
if (node instanceof AccumulateNode) {
AccumulateNode accNode = (AccumulateNode) node;
additionalInfo.append(", resultConstraints=" + Arrays.toString(accNode.getResultConstraints()));
additionalInfo.append(", resultBinder=" + Arrays.toString(accNode.getResultBinder().getConstraints()));
}
} else if (node instanceof FromNode<?>) {
FromNode<?> fromNode = (FromNode<?>) node;
additionalInfo.append("result=" + fromNode.getResultClass().getName());
additionalInfo.append(", alphaConstraints=" + Arrays.toString(fromNode.getAlphaConstraints()));
additionalInfo.append(", betaConstraints=" + Arrays.toString(fromNode.getBetaConstraints().getConstraints()));
}
if (additionalInfo.length() > 0) {
return node + " <" + additionalInfo.toString() + "> ";
}
return node.toString();
}
public static void dumpAssociatedRulesRete(KieBase kieBase) {
new ReteDumper().dumpAssociatedRules(((RuleBase) kieBase).getRete());
}
/**
* Dump nodes with associated rules. Helps to locate rules from a node in problem
* @param kieBase
*/
public void dumpAssociatedRules(KieBase kieBase) {
dumpAssociatedRules(((RuleBase) kieBase).getRete());
}
public void dumpAssociatedRules(Rete rete) {
sb = new StringBuilder();
Set<BaseNode> nodes = collect(rete);
for (BaseNode node : nodes) {
String ruleNames = Arrays.stream(node.getAssociatedRules()).map(Rule::getName)
.collect(Collectors.joining(", "));
sb.append(node + " : [" + ruleNames + "]\n");
}
printResults();
}
}
| |
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.jet.core;
import com.hazelcast.function.SupplierEx;
import com.hazelcast.jet.Traverser;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import static com.hazelcast.jet.core.EventTimeMapper.NO_NATIVE_TIME;
import static com.hazelcast.jet.core.EventTimePolicy.eventTimePolicy;
import static com.hazelcast.jet.core.JetTestSupport.wm;
import static com.hazelcast.jet.core.WatermarkPolicy.limitingLag;
import static com.hazelcast.jet.impl.execution.WatermarkCoalescer.IDLE_MESSAGE;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class EventTimeMapperTest {
private static final long LAG = 3;
@Rule
public ExpectedException exception = ExpectedException.none();
@Test
public void smokeTest() {
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(
eventTimePolicy(Long::longValue, limitingLag(LAG), 1, 0, 5)
);
eventTimeMapper.addPartitions(0L, 2);
// all partitions are active initially
assertTraverser(eventTimeMapper.flatMapEvent(ns(1), null, 0, NO_NATIVE_TIME));
// now idle timeout passed for all partitions, IDLE_MESSAGE should be emitted
assertTraverser(eventTimeMapper.flatMapEvent(ns(5), null, 0, NO_NATIVE_TIME), IDLE_MESSAGE);
// still all partitions are idle, but IDLE_MESSAGE should not be emitted for the second time
assertTraverser(eventTimeMapper.flatMapEvent(ns(5), null, 0, NO_NATIVE_TIME));
// now we observe event on partition0, watermark should be immediately forwarded because the other queue is idle
assertTraverser(eventTimeMapper.flatMapEvent(ns(5), 100L, 0, NO_NATIVE_TIME), wm(100 - LAG), 100L);
// observe another event on the same partition. No WM is emitted because the event is older
assertTraverser(eventTimeMapper.flatMapEvent(ns(5), 90L, 0, NO_NATIVE_TIME), 90L);
assertTraverser(eventTimeMapper.flatMapEvent(ns(5), 101L, 0, NO_NATIVE_TIME), wm(101 - LAG), 101L);
}
@Test
public void smokeTest_disabledIdleTimeout() {
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(
eventTimePolicy(Long::longValue, limitingLag(LAG), 1, 0, 0)
);
eventTimeMapper.addPartitions(2);
// all partitions are active initially
assertTraverser(eventTimeMapper.flatMapIdle());
// let's have events only in partition0. No WM is output because we wait for the other partition indefinitely
assertTraverser(eventTimeMapper.flatMapEvent(10L, 0, NO_NATIVE_TIME), 10L);
assertTraverser(eventTimeMapper.flatMapEvent(11L, 0, NO_NATIVE_TIME), 11L);
// now have some events in the other partition, wms will be output
assertTraverser(eventTimeMapper.flatMapEvent(10L, 1, NO_NATIVE_TIME), wm(10 - LAG), 10L);
assertTraverser(eventTimeMapper.flatMapEvent(11L, 1, NO_NATIVE_TIME), wm(11 - LAG), 11L);
// now partition1 will get ahead of partition0 -> no WM
assertTraverser(eventTimeMapper.flatMapEvent(12L, 1, NO_NATIVE_TIME), 12L);
// another event in partition0, we'll get the wm
assertTraverser(eventTimeMapper.flatMapEvent(13L, 0, NO_NATIVE_TIME), wm(12 - LAG), 13L);
}
@Test
public void test_zeroPartitions() {
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(
eventTimePolicy(Long::longValue, limitingLag(LAG), 1, 0, 0)
);
// it should immediately emit the idle message, even though the idle timeout is -1
assertTraverser(eventTimeMapper.flatMapIdle(), IDLE_MESSAGE);
assertTraverser(eventTimeMapper.flatMapIdle());
// after adding a partition and observing an event, WM should be emitted
eventTimeMapper.addPartitions(1);
assertTraverser(eventTimeMapper.flatMapIdle()); // can't send WM here, we don't know what its value would be
assertTraverser(eventTimeMapper.flatMapEvent(10L, 0, NO_NATIVE_TIME), wm(10 - LAG), 10L);
}
@Test
public void when_idle_event_idle_then_twoIdleMessagesSent() {
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(
eventTimePolicy(Long::longValue, limitingLag(LAG), 1, 0, 10)
);
eventTimeMapper.addPartitions(1);
assertTraverser(eventTimeMapper.flatMapEvent(ns(0), 10L, 0, NO_NATIVE_TIME), wm(10 - LAG), 10L);
// When - become idle
assertTraverser(eventTimeMapper.flatMapEvent(ns(10), null, 0, NO_NATIVE_TIME), IDLE_MESSAGE);
// When - another event, but no new WM
assertTraverser(eventTimeMapper.flatMapEvent(ns(10), 10L, 0, NO_NATIVE_TIME), 10L);
// When - become idle again
assertTraverser(eventTimeMapper.flatMapEvent(ns(10), null, 0, NO_NATIVE_TIME));
assertTraverser(eventTimeMapper.flatMapEvent(ns(20), null, 0, NO_NATIVE_TIME), IDLE_MESSAGE);
}
@Test
public void when_eventInOneOfTwoPartitions_then_wmAndIdleMessageForwardedAfterTimeout() {
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(
eventTimePolicy(Long::longValue, limitingLag(LAG), 1, 0, 10)
);
eventTimeMapper.addPartitions(ns(0), 2);
// When
assertTraverser(eventTimeMapper.flatMapEvent(ns(0), 10L, 0, NO_NATIVE_TIME), 10L);
// Then
assertTraverser(eventTimeMapper.flatMapEvent(ns(10), null, 0, NO_NATIVE_TIME),
wm(10 - LAG),
IDLE_MESSAGE);
}
@Test
public void when_noTimestampFnAndNoNativeTime_then_throw() {
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(
eventTimePolicy(null, limitingLag(LAG), 1, 0, 10)
);
eventTimeMapper.addPartitions(ns(0), 1);
exception.expectMessage("Neither timestampFn nor nativeEventTime specified");
eventTimeMapper.flatMapEvent(ns(0), 10L, 0, NO_NATIVE_TIME);
}
@Test
public void when_noTimestampFn_then_useNativeTime() {
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(
eventTimePolicy(null, limitingLag(LAG), 1, 0, 5)
);
eventTimeMapper.addPartitions(0L, 1);
assertTraverser(eventTimeMapper.flatMapEvent(ns(1), 10L, 0, 11L), wm(11L - LAG), 10L);
assertTraverser(eventTimeMapper.flatMapEvent(ns(1), 11L, 0, 12L), wm(12L - LAG), 11L);
}
@Test
public void when_throttlingToMaxFrame_then_noWatermarksOutput() {
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(
eventTimePolicy(Long::longValue, limitingLag(LAG), 0, 0, 5)
);
eventTimeMapper.addPartitions(0L, 1);
assertTraverser(eventTimeMapper.flatMapEvent(ns(1), -10L, 0, 11L), -10L);
assertTraverser(eventTimeMapper.flatMapEvent(ns(1), 10L, 0, 12L), 10L);
}
@Test
public void when_restoredState_then_wmDoesNotGoBack() {
EventTimePolicy<Long> eventTimePolicy = eventTimePolicy(Long::longValue, limitingLag(0), 1, 0, 5);
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(eventTimePolicy);
eventTimeMapper.addPartitions(0L, 1);
// When
eventTimeMapper.restoreWatermark(0, 10);
// Then
assertTraverser(eventTimeMapper.flatMapEvent(ns(0), 9L, 0, NO_NATIVE_TIME), 9L);
assertTraverser(eventTimeMapper.flatMapEvent(ns(0), 10L, 0, NO_NATIVE_TIME), 10L);
assertTraverser(eventTimeMapper.flatMapEvent(ns(0), 11L, 0, NO_NATIVE_TIME), wm(11), 11L);
}
@Test
public void when_twoActiveQueues_theLaggingOneRemoved_then_wmForwarded() {
EventTimePolicy<Long> eventTimePolicy = eventTimePolicy(Long::longValue, limitingLag(0), 1, 0, 5);
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(eventTimePolicy);
eventTimeMapper.addPartitions(0L, 2);
// When
assertTraverser(eventTimeMapper.flatMapEvent(ns(0), 10L, 0, NO_NATIVE_TIME), 10L);
// Then
assertTraverser(eventTimeMapper.removePartition(ns(0), 1), wm(10));
}
@Test
public void when_twoActiveQueues_theAheadOneRemoved_then_noWmForwarded() {
EventTimePolicy<Long> eventTimePolicy = eventTimePolicy(Long::longValue, limitingLag(0), 1, 0, 5);
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(eventTimePolicy);
eventTimeMapper.addPartitions(0L, 2);
// When
assertTraverser(eventTimeMapper.flatMapEvent(ns(0), 10L, 0, NO_NATIVE_TIME), 10L);
assertTraverser(eventTimeMapper.flatMapEvent(ns(0), 11L, 1, NO_NATIVE_TIME), wm(10), 11L);
// Then
assertTraverser(eventTimeMapper.removePartition(ns(0), 1));
}
@Test
public void when_threePartitions_laggingOneRemoved_secondLaggingOneIdle_then_noWmForwarded() {
EventTimePolicy<Long> eventTimePolicy = eventTimePolicy(Long::longValue, limitingLag(0), 1, 0, 5);
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(eventTimePolicy);
eventTimeMapper.addPartitions(0L, 3);
// When
assertTraverser(eventTimeMapper.flatMapEvent(ns(0), 10L, 0, NO_NATIVE_TIME), 10L);
assertTraverser(eventTimeMapper.flatMapEvent(ns(1), 11L, 1, NO_NATIVE_TIME), 11L);
assertTraverser(eventTimeMapper.flatMapEvent(ns(1), 12L, 2, NO_NATIVE_TIME), wm(10), 12L);
// Then
// in this call partition0 will turn idle and partition1 is be removed -> wm(12) is forwarded
assertTraverser(eventTimeMapper.removePartition(ns(5), 1), wm(12));
}
@Test
public void when_currentWmBeyondReportedEventTimestamp_then_eventNotLate() {
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(
eventTimePolicy(Long::longValue, constantWmPolicy(42L), 1, 0, 5));
eventTimeMapper.addPartitions(0L, 1);
assertTraverser(eventTimeMapper.flatMapEvent(41L, 0, NO_NATIVE_TIME), wm(41), 41L);
}
@Test
public void when_currentWmBeyondReportedEventTimestamp_and_eventLate_then_wmDoesNotGoBack() {
EventTimeMapper<Long> eventTimeMapper = new EventTimeMapper<>(
eventTimePolicy(Long::longValue, constantWmPolicy(42L), 1, 0, 5));
eventTimeMapper.addPartitions(0L, 1);
assertTraverser(eventTimeMapper.flatMapEvent(41L, 0, NO_NATIVE_TIME), wm(41), 41L);
assertTraverser(eventTimeMapper.flatMapEvent(ns(0), 40L, 0, NO_NATIVE_TIME), 40L);
}
private static SupplierEx<WatermarkPolicy> constantWmPolicy(long value) {
return () -> new WatermarkPolicy() {
@Override
public void reportEvent(long timestamp) { }
@Override
public long getCurrentWatermark() {
return value;
}
};
}
private <T> void assertTraverser(Traverser<T> actual, T ... expected) {
for (T element : expected) {
assertEquals(element, actual.next());
}
assertNull(actual.next());
}
private long ns(long ms) {
return MILLISECONDS.toNanos(ms);
}
}
| |
package org.apache.lucene.codecs.blocktree;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Comparator;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.automaton.CompiledAutomaton;
import org.apache.lucene.util.automaton.RunAutomaton;
import org.apache.lucene.util.fst.ByteSequenceOutputs;
import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.Outputs;
// NOTE: cannot seek!
final class IntersectTermsEnum extends TermsEnum {
final IndexInput in;
final static Outputs<BytesRef> fstOutputs = ByteSequenceOutputs.getSingleton();
private IntersectTermsEnumFrame[] stack;
@SuppressWarnings({"rawtypes","unchecked"}) private FST.Arc<BytesRef>[] arcs = new FST.Arc[5];
final RunAutomaton runAutomaton;
final CompiledAutomaton compiledAutomaton;
private IntersectTermsEnumFrame currentFrame;
private final BytesRef term = new BytesRef();
private final FST.BytesReader fstReader;
final FieldReader fr;
private BytesRef savedStartTerm;
// TODO: in some cases we can filter by length? eg
// regexp foo*bar must be at least length 6 bytes
public IntersectTermsEnum(FieldReader fr, CompiledAutomaton compiled, BytesRef startTerm) throws IOException {
// if (DEBUG) {
// System.out.println("\nintEnum.init seg=" + segment + " commonSuffix=" + brToString(compiled.commonSuffixRef));
// }
this.fr = fr;
runAutomaton = compiled.runAutomaton;
compiledAutomaton = compiled;
in = fr.parent.in.clone();
stack = new IntersectTermsEnumFrame[5];
for(int idx=0;idx<stack.length;idx++) {
stack[idx] = new IntersectTermsEnumFrame(this, idx);
}
for(int arcIdx=0;arcIdx<arcs.length;arcIdx++) {
arcs[arcIdx] = new FST.Arc<>();
}
if (fr.index == null) {
fstReader = null;
} else {
fstReader = fr.index.getBytesReader();
}
// TODO: if the automaton is "smallish" we really
// should use the terms index to seek at least to
// the initial term and likely to subsequent terms
// (or, maybe just fallback to ATE for such cases).
// Else the seek cost of loading the frames will be
// too costly.
final FST.Arc<BytesRef> arc = fr.index.getFirstArc(arcs[0]);
// Empty string prefix must have an output in the index!
assert arc.isFinal();
// Special pushFrame since it's the first one:
final IntersectTermsEnumFrame f = stack[0];
f.fp = f.fpOrig = fr.rootBlockFP;
f.prefix = 0;
f.setState(runAutomaton.getInitialState());
f.arc = arc;
f.outputPrefix = arc.output;
f.load(fr.rootCode);
// for assert:
assert setSavedStartTerm(startTerm);
currentFrame = f;
if (startTerm != null) {
seekToStartTerm(startTerm);
}
}
// only for assert:
private boolean setSavedStartTerm(BytesRef startTerm) {
savedStartTerm = startTerm == null ? null : BytesRef.deepCopyOf(startTerm);
return true;
}
@Override
public TermState termState() throws IOException {
currentFrame.decodeMetaData();
return currentFrame.termState.clone();
}
private IntersectTermsEnumFrame getFrame(int ord) throws IOException {
if (ord >= stack.length) {
final IntersectTermsEnumFrame[] next = new IntersectTermsEnumFrame[ArrayUtil.oversize(1+ord, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
System.arraycopy(stack, 0, next, 0, stack.length);
for(int stackOrd=stack.length;stackOrd<next.length;stackOrd++) {
next[stackOrd] = new IntersectTermsEnumFrame(this, stackOrd);
}
stack = next;
}
assert stack[ord].ord == ord;
return stack[ord];
}
private FST.Arc<BytesRef> getArc(int ord) {
if (ord >= arcs.length) {
@SuppressWarnings({"rawtypes","unchecked"}) final FST.Arc<BytesRef>[] next =
new FST.Arc[ArrayUtil.oversize(1+ord, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
System.arraycopy(arcs, 0, next, 0, arcs.length);
for(int arcOrd=arcs.length;arcOrd<next.length;arcOrd++) {
next[arcOrd] = new FST.Arc<>();
}
arcs = next;
}
return arcs[ord];
}
private IntersectTermsEnumFrame pushFrame(int state) throws IOException {
final IntersectTermsEnumFrame f = getFrame(currentFrame == null ? 0 : 1+currentFrame.ord);
f.fp = f.fpOrig = currentFrame.lastSubFP;
f.prefix = currentFrame.prefix + currentFrame.suffix;
// if (DEBUG) System.out.println(" pushFrame state=" + state + " prefix=" + f.prefix);
f.setState(state);
// Walk the arc through the index -- we only
// "bother" with this so we can get the floor data
// from the index and skip floor blocks when
// possible:
FST.Arc<BytesRef> arc = currentFrame.arc;
int idx = currentFrame.prefix;
assert currentFrame.suffix > 0;
BytesRef output = currentFrame.outputPrefix;
while (idx < f.prefix) {
final int target = term.bytes[idx] & 0xff;
// TODO: we could be more efficient for the next()
// case by using current arc as starting point,
// passed to findTargetArc
arc = fr.index.findTargetArc(target, arc, getArc(1+idx), fstReader);
assert arc != null;
output = fstOutputs.add(output, arc.output);
idx++;
}
f.arc = arc;
f.outputPrefix = output;
assert arc.isFinal();
f.load(fstOutputs.add(output, arc.nextFinalOutput));
return f;
}
@Override
public BytesRef term() {
return term;
}
@Override
public int docFreq() throws IOException {
//if (DEBUG) System.out.println("BTIR.docFreq");
currentFrame.decodeMetaData();
//if (DEBUG) System.out.println(" return " + currentFrame.termState.docFreq);
return currentFrame.termState.docFreq;
}
@Override
public long totalTermFreq() throws IOException {
currentFrame.decodeMetaData();
return currentFrame.termState.totalTermFreq;
}
@Override
public DocsEnum docs(Bits skipDocs, DocsEnum reuse, int flags) throws IOException {
currentFrame.decodeMetaData();
return fr.parent.postingsReader.docs(fr.fieldInfo, currentFrame.termState, skipDocs, reuse, flags);
}
@Override
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse, int flags) throws IOException {
if (fr.fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) {
// Positions were not indexed:
return null;
}
currentFrame.decodeMetaData();
return fr.parent.postingsReader.docsAndPositions(fr.fieldInfo, currentFrame.termState, skipDocs, reuse, flags);
}
private int getState() {
int state = currentFrame.state;
for(int idx=0;idx<currentFrame.suffix;idx++) {
state = runAutomaton.step(state, currentFrame.suffixBytes[currentFrame.startBytePos+idx] & 0xff);
assert state != -1;
}
return state;
}
// NOTE: specialized to only doing the first-time
// seek, but we could generalize it to allow
// arbitrary seekExact/Ceil. Note that this is a
// seekFloor!
private void seekToStartTerm(BytesRef target) throws IOException {
//if (DEBUG) System.out.println("seek to startTerm=" + target.utf8ToString());
assert currentFrame.ord == 0;
if (term.length < target.length) {
term.bytes = ArrayUtil.grow(term.bytes, target.length);
}
FST.Arc<BytesRef> arc = arcs[0];
assert arc == currentFrame.arc;
for(int idx=0;idx<=target.length;idx++) {
while (true) {
final int savePos = currentFrame.suffixesReader.getPosition();
final int saveStartBytePos = currentFrame.startBytePos;
final int saveSuffix = currentFrame.suffix;
final long saveLastSubFP = currentFrame.lastSubFP;
final int saveTermBlockOrd = currentFrame.termState.termBlockOrd;
final boolean isSubBlock = currentFrame.next();
//if (DEBUG) System.out.println(" cycle ent=" + currentFrame.nextEnt + " (of " + currentFrame.entCount + ") prefix=" + currentFrame.prefix + " suffix=" + currentFrame.suffix + " isBlock=" + isSubBlock + " firstLabel=" + (currentFrame.suffix == 0 ? "" : (currentFrame.suffixBytes[currentFrame.startBytePos])&0xff));
term.length = currentFrame.prefix + currentFrame.suffix;
if (term.bytes.length < term.length) {
term.bytes = ArrayUtil.grow(term.bytes, term.length);
}
System.arraycopy(currentFrame.suffixBytes, currentFrame.startBytePos, term.bytes, currentFrame.prefix, currentFrame.suffix);
if (isSubBlock && StringHelper.startsWith(target, term)) {
// Recurse
//if (DEBUG) System.out.println(" recurse!");
currentFrame = pushFrame(getState());
break;
} else {
final int cmp = term.compareTo(target);
if (cmp < 0) {
if (currentFrame.nextEnt == currentFrame.entCount) {
if (!currentFrame.isLastInFloor) {
//if (DEBUG) System.out.println(" load floorBlock");
currentFrame.loadNextFloorBlock();
continue;
} else {
//if (DEBUG) System.out.println(" return term=" + brToString(term));
return;
}
}
continue;
} else if (cmp == 0) {
//if (DEBUG) System.out.println(" return term=" + brToString(term));
return;
} else {
// Fallback to prior entry: the semantics of
// this method is that the first call to
// next() will return the term after the
// requested term
currentFrame.nextEnt--;
currentFrame.lastSubFP = saveLastSubFP;
currentFrame.startBytePos = saveStartBytePos;
currentFrame.suffix = saveSuffix;
currentFrame.suffixesReader.setPosition(savePos);
currentFrame.termState.termBlockOrd = saveTermBlockOrd;
System.arraycopy(currentFrame.suffixBytes, currentFrame.startBytePos, term.bytes, currentFrame.prefix, currentFrame.suffix);
term.length = currentFrame.prefix + currentFrame.suffix;
// If the last entry was a block we don't
// need to bother recursing and pushing to
// the last term under it because the first
// next() will simply skip the frame anyway
return;
}
}
}
}
assert false;
}
@Override
public BytesRef next() throws IOException {
// if (DEBUG) {
// System.out.println("\nintEnum.next seg=" + segment);
// System.out.println(" frame ord=" + currentFrame.ord + " prefix=" + brToString(new BytesRef(term.bytes, term.offset, currentFrame.prefix)) + " state=" + currentFrame.state + " lastInFloor?=" + currentFrame.isLastInFloor + " fp=" + currentFrame.fp + " trans=" + (currentFrame.transitions.length == 0 ? "n/a" : currentFrame.transitions[currentFrame.transitionIndex]) + " outputPrefix=" + currentFrame.outputPrefix);
// }
nextTerm:
while(true) {
// Pop finished frames
while (currentFrame.nextEnt == currentFrame.entCount) {
if (!currentFrame.isLastInFloor) {
//if (DEBUG) System.out.println(" next-floor-block");
currentFrame.loadNextFloorBlock();
//if (DEBUG) System.out.println("\n frame ord=" + currentFrame.ord + " prefix=" + brToString(new BytesRef(term.bytes, term.offset, currentFrame.prefix)) + " state=" + currentFrame.state + " lastInFloor?=" + currentFrame.isLastInFloor + " fp=" + currentFrame.fp + " trans=" + (currentFrame.transitions.length == 0 ? "n/a" : currentFrame.transitions[currentFrame.transitionIndex]) + " outputPrefix=" + currentFrame.outputPrefix);
} else {
//if (DEBUG) System.out.println(" pop frame");
if (currentFrame.ord == 0) {
return null;
}
final long lastFP = currentFrame.fpOrig;
currentFrame = stack[currentFrame.ord-1];
assert currentFrame.lastSubFP == lastFP;
//if (DEBUG) System.out.println("\n frame ord=" + currentFrame.ord + " prefix=" + brToString(new BytesRef(term.bytes, term.offset, currentFrame.prefix)) + " state=" + currentFrame.state + " lastInFloor?=" + currentFrame.isLastInFloor + " fp=" + currentFrame.fp + " trans=" + (currentFrame.transitions.length == 0 ? "n/a" : currentFrame.transitions[currentFrame.transitionIndex]) + " outputPrefix=" + currentFrame.outputPrefix);
}
}
final boolean isSubBlock = currentFrame.next();
// if (DEBUG) {
// final BytesRef suffixRef = new BytesRef();
// suffixRef.bytes = currentFrame.suffixBytes;
// suffixRef.offset = currentFrame.startBytePos;
// suffixRef.length = currentFrame.suffix;
// System.out.println(" " + (isSubBlock ? "sub-block" : "term") + " " + currentFrame.nextEnt + " (of " + currentFrame.entCount + ") suffix=" + brToString(suffixRef));
// }
if (currentFrame.suffix != 0) {
final int label = currentFrame.suffixBytes[currentFrame.startBytePos] & 0xff;
while (label > currentFrame.curTransitionMax) {
if (currentFrame.transitionIndex >= currentFrame.transitionCount-1) {
// Stop processing this frame -- no further
// matches are possible because we've moved
// beyond what the max transition will allow
//if (DEBUG) System.out.println(" break: trans=" + (currentFrame.transitions.length == 0 ? "n/a" : currentFrame.transitions[currentFrame.transitionIndex]));
// sneaky! forces a pop above
currentFrame.isLastInFloor = true;
currentFrame.nextEnt = currentFrame.entCount;
continue nextTerm;
}
currentFrame.transitionIndex++;
compiledAutomaton.automaton.getNextTransition(currentFrame.transition);
currentFrame.curTransitionMax = currentFrame.transition.max;
//if (DEBUG) System.out.println(" next trans=" + currentFrame.transitions[currentFrame.transitionIndex]);
}
}
// First test the common suffix, if set:
if (compiledAutomaton.commonSuffixRef != null && !isSubBlock) {
final int termLen = currentFrame.prefix + currentFrame.suffix;
if (termLen < compiledAutomaton.commonSuffixRef.length) {
// No match
// if (DEBUG) {
// System.out.println(" skip: common suffix length");
// }
continue nextTerm;
}
final byte[] suffixBytes = currentFrame.suffixBytes;
final byte[] commonSuffixBytes = compiledAutomaton.commonSuffixRef.bytes;
final int lenInPrefix = compiledAutomaton.commonSuffixRef.length - currentFrame.suffix;
assert compiledAutomaton.commonSuffixRef.offset == 0;
int suffixBytesPos;
int commonSuffixBytesPos = 0;
if (lenInPrefix > 0) {
// A prefix of the common suffix overlaps with
// the suffix of the block prefix so we first
// test whether the prefix part matches:
final byte[] termBytes = term.bytes;
int termBytesPos = currentFrame.prefix - lenInPrefix;
assert termBytesPos >= 0;
final int termBytesPosEnd = currentFrame.prefix;
while (termBytesPos < termBytesPosEnd) {
if (termBytes[termBytesPos++] != commonSuffixBytes[commonSuffixBytesPos++]) {
// if (DEBUG) {
// System.out.println(" skip: common suffix mismatch (in prefix)");
// }
continue nextTerm;
}
}
suffixBytesPos = currentFrame.startBytePos;
} else {
suffixBytesPos = currentFrame.startBytePos + currentFrame.suffix - compiledAutomaton.commonSuffixRef.length;
}
// Test overlapping suffix part:
final int commonSuffixBytesPosEnd = compiledAutomaton.commonSuffixRef.length;
while (commonSuffixBytesPos < commonSuffixBytesPosEnd) {
if (suffixBytes[suffixBytesPos++] != commonSuffixBytes[commonSuffixBytesPos++]) {
// if (DEBUG) {
// System.out.println(" skip: common suffix mismatch");
// }
continue nextTerm;
}
}
}
// TODO: maybe we should do the same linear test
// that AutomatonTermsEnum does, so that if we
// reach a part of the automaton where .* is
// "temporarily" accepted, we just blindly .next()
// until the limit
// See if the term prefix matches the automaton:
int state = currentFrame.state;
for (int idx=0;idx<currentFrame.suffix;idx++) {
state = runAutomaton.step(state, currentFrame.suffixBytes[currentFrame.startBytePos+idx] & 0xff);
if (state == -1) {
// No match
//System.out.println(" no s=" + state);
continue nextTerm;
} else {
//System.out.println(" c s=" + state);
}
}
if (isSubBlock) {
// Match! Recurse:
//if (DEBUG) System.out.println(" sub-block match to state=" + state + "; recurse fp=" + currentFrame.lastSubFP);
copyTerm();
currentFrame = pushFrame(state);
//if (DEBUG) System.out.println("\n frame ord=" + currentFrame.ord + " prefix=" + brToString(new BytesRef(term.bytes, term.offset, currentFrame.prefix)) + " state=" + currentFrame.state + " lastInFloor?=" + currentFrame.isLastInFloor + " fp=" + currentFrame.fp + " trans=" + (currentFrame.transitions.length == 0 ? "n/a" : currentFrame.transitions[currentFrame.transitionIndex]) + " outputPrefix=" + currentFrame.outputPrefix);
} else if (runAutomaton.isAccept(state)) {
copyTerm();
//if (DEBUG) System.out.println(" term match to state=" + state + "; return term=" + brToString(term));
assert savedStartTerm == null || term.compareTo(savedStartTerm) > 0: "saveStartTerm=" + savedStartTerm.utf8ToString() + " term=" + term.utf8ToString();
return term;
} else {
//System.out.println(" no s=" + state);
}
}
}
private void copyTerm() {
//System.out.println(" copyTerm cur.prefix=" + currentFrame.prefix + " cur.suffix=" + currentFrame.suffix + " first=" + (char) currentFrame.suffixBytes[currentFrame.startBytePos]);
final int len = currentFrame.prefix + currentFrame.suffix;
if (term.bytes.length < len) {
term.bytes = ArrayUtil.grow(term.bytes, len);
}
System.arraycopy(currentFrame.suffixBytes, currentFrame.startBytePos, term.bytes, currentFrame.prefix, currentFrame.suffix);
term.length = len;
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public boolean seekExact(BytesRef text) {
throw new UnsupportedOperationException();
}
@Override
public void seekExact(long ord) {
throw new UnsupportedOperationException();
}
@Override
public long ord() {
throw new UnsupportedOperationException();
}
@Override
public SeekStatus seekCeil(BytesRef text) {
throw new UnsupportedOperationException();
}
}
| |
package com.pa.devbox.domain.entity;
import android.content.Context;
import com.bluelinelabs.logansquare.annotation.JsonObject;
import com.pa.devbox.R;
import java.io.Serializable;
/**
* Description:
* <p>
* Author: PandaApe.
* CreatedAt: 14/1/16 21:32.
* Email: whailong2010@gmail.com
*/
@JsonObject(fieldDetectionPolicy = JsonObject.FieldDetectionPolicy.NONPRIVATE_FIELDS_AND_ACCESSORS)
public class Library implements Serializable {
private String objectId;
private String name;
private String author;
private String enDescription;
private String cnDescription;
private String githubAddress;
private String license;
private String minSdkVersion;
private int collectionCount;
private int downloadCount;
private int viewCount;
private DevFile apk;
private DevFile image;
public Library() {
this.collectionCount = 0;
this.downloadCount = 0;
this.viewCount = 0;
}
@JsonObject(fieldDetectionPolicy = JsonObject.FieldDetectionPolicy.NONPRIVATE_FIELDS_AND_ACCESSORS)
public static class DevFile implements Serializable {
private String url;
private MetaData metaData;
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public MetaData getMetaData() {
return metaData;
}
public void setMetaData(MetaData metaData) {
this.metaData = metaData;
}
public String getApkSizeStr(Context context) {
double size = this.getMetaData().getSize() / 1000.0 / 1000.0;
double sizeFinal = Math.round(size * 100) / 100.0;
return context.getString(R.string.download) + sizeFinal + "MB)";
}
@JsonObject(fieldDetectionPolicy = JsonObject.FieldDetectionPolicy.NONPRIVATE_FIELDS_AND_ACCESSORS)
public static class MetaData implements Serializable {
private String owner;
private long size;
public String getOwner() {
return owner;
}
public void setOwner(String owner) {
this.owner = owner;
}
public long getSize() {
return size;
}
public void setSize(long size) {
this.size = size;
}
}
}
public String getObjectId() {
return objectId;
}
public void setObjectId(String objectId) {
this.objectId = objectId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public String getEnDescription() {
return enDescription;
}
public void setEnDescription(String enDescription) {
this.enDescription = enDescription;
}
public String getCnDescription() {
return cnDescription;
}
public void setCnDescription(String cnDescription) {
this.cnDescription = cnDescription;
}
public String getGithubAddress() {
return githubAddress;
}
public void setGithubAddress(String githubAddress) {
this.githubAddress = githubAddress;
}
public String getLicense() {
return license;
}
public void setLicense(String license) {
this.license = license;
}
public String getMinSdkVersion() {
return minSdkVersion;
}
public void setMinSdkVersion(String minSdkVersion) {
this.minSdkVersion = minSdkVersion;
}
public int getCollectionCount() {
return collectionCount;
}
public void setCollectionCount(int collectionCount) {
this.collectionCount = collectionCount;
}
public int getDownloadCount() {
return downloadCount;
}
public void setDownloadCount(int downloadCount) {
this.downloadCount = downloadCount;
}
public int getViewCount() {
return viewCount;
}
public void setViewCount(int viewCount) {
this.viewCount = viewCount;
}
public DevFile getApk() {
return apk;
}
public void setApk(DevFile apk) {
this.apk = apk;
}
public DevFile getImage() {
return image;
}
public void setImage(DevFile image) {
this.image = image;
}
}
| |
/**
* Copyright (c) 2010, Ben Fortuna
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* o Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* o Neither the name of Ben Fortuna nor the names of any other contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.fortuna.ical4j.model;
import java.io.IOException;
import java.net.URISyntaxException;
import java.text.ParseException;
import junit.framework.TestSuite;
import net.fortuna.ical4j.model.parameter.Value;
import net.fortuna.ical4j.util.CompatibilityHints;
/**
* $Id: PropertyTest.java,v 1.16 2010/03/06 12:57:24 fortuna Exp $
*
* Created on 22/10/2006
*
* Unit tests for Property-specific functionality.
* @author Ben Fortuna
*/
public class PropertyTest extends AbstractPropertyTest {
private Property property;
private String expectedValue;
/**
* @param property
*/
public PropertyTest(String testMethod, Property property) {
super(testMethod);
this.property = property;
}
/**
* @param property
* @param expectedValue
*/
public PropertyTest(Property property, String expectedValue) {
super("testGetValue");
this.property = property;
this.expectedValue = expectedValue;
}
/*
* (non-Javadoc)
* @see junit.framework.TestCase#tearDown()
*/
protected void tearDown() throws Exception {
CompatibilityHints.clearHintEnabled(CompatibilityHints.KEY_RELAXED_VALIDATION);
}
/**
*
*/
public void testGetValue() {
assertEquals(expectedValue, property.getValue());
}
/**
* Test equality of properties.
*/
public void testEquals() {
assertTrue(property.equals(property));
Property notEqual = new Property("notEqual", null) {
public String getValue() {
return "";
}
public void setValue(String value) throws IOException,
URISyntaxException, ParseException {
}
public void validate() throws ValidationException {
}
};
assertFalse("Properties are equal", property.equals(notEqual));
assertFalse("Properties are equal", notEqual.equals(property));
}
/**
* Test deep copy of properties.
*/
public void testCopy() throws IOException, URISyntaxException,
ParseException {
Property copy = property.copy();
assertEquals(property, copy);
copy.getParameters().add(Value.BOOLEAN);
assertFalse(property.equals(copy));
assertFalse(copy.equals(property));
}
/**
* @throws ValidationException
*/
public final void testValidation() throws ValidationException {
property.validate();
}
/**
* @throws ValidationException
*/
public final void testRelaxedValidation() throws ValidationException {
CompatibilityHints.setHintEnabled(
CompatibilityHints.KEY_RELAXED_VALIDATION, true);
property.validate();
}
/**
*
*/
public final void testValidationException() {
try {
property.validate();
fail("Should throw ValidationException");
}
catch (ValidationException e) {
e.printStackTrace();
}
}
/**
* @throws IOException
* @throws URISyntaxException
* @throws ParseException
*/
public void testImmutable() throws IOException, URISyntaxException, ParseException {
try {
property.setValue("");
fail("UnsupportedOperationException should be thrown");
}
catch (UnsupportedOperationException uoe) {
}
try {
property.getParameters().add(new Parameter("name", null) {
public String getValue() {
return null;
}
});
fail("UnsupportedOperationException should be thrown");
}
catch (UnsupportedOperationException uoe) {
}
}
/**
* @return
*/
public static TestSuite suite() throws Exception {
TestSuite suite = new TestSuite();
Property property = new Property("name", null) {
public String getValue() {
return "value";
}
public void setValue(String value) throws IOException,
URISyntaxException, ParseException {
}
public void validate() throws ValidationException {
}
};
Property invalidProperty = new Property("name", null) {
public String getValue() {
return "value";
}
public void setValue(String value) throws IOException,
URISyntaxException, ParseException {
}
public void validate() throws ValidationException {
throw new ValidationException();
}
};
suite.addTest(new PropertyTest("testEquals", property));
suite.addTest(new PropertyTest(property, "value"));
suite.addTest(new PropertyTest("testValidation", property));
suite.addTest(new PropertyTest("testValidationException", invalidProperty));
return suite;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.heron.ckptmgr;
import java.io.IOException;
import com.google.protobuf.ByteString;
import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.heron.api.generated.TopologyAPI;
import org.apache.heron.common.basics.NIOLooper;
import org.apache.heron.common.basics.SysUtils;
import org.apache.heron.common.network.HeronClient;
import org.apache.heron.common.network.StatusCode;
import org.apache.heron.common.testhelpers.HeronServerTester;
import org.apache.heron.proto.ckptmgr.CheckpointManager;
import org.apache.heron.proto.system.PhysicalPlans;
import org.apache.heron.spi.statefulstorage.Checkpoint;
import org.apache.heron.spi.statefulstorage.CheckpointInfo;
import org.apache.heron.spi.statefulstorage.IStatefulStorage;
import static org.apache.heron.common.testhelpers.HeronServerTester.RESPONSE_RECEIVED_TIMEOUT;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class CheckpointManagerServerTest {
private static final String TOPOLOGY_NAME = "topology_name";
private static final String TOPOLOGY_ID = "topology_id";
private static final String CHECKPOINT_ID = "checkpoint_id";
private static final String CHECKPOINT_MANAGER_ID = "ckptmgr_id";
private static CheckpointManager.InstanceStateCheckpoint checkpointPartition;
private static CheckpointManager.CheckpointComponentMetadata checkpointComponentMetadata;
private static CheckpointManager.SaveInstanceStateRequest saveInstanceStateRequest;
private static CheckpointManager.GetInstanceStateRequest getInstanceStateRequest;
private static CheckpointManager.CleanStatefulCheckpointRequest cleanStatefulCheckpointRequest;
private static CheckpointManager.RegisterStMgrRequest registerStmgrRequest;
private static CheckpointManager.RegisterTManagerRequest registerTManagerRequest;
private static PhysicalPlans.Instance instance;
private CheckpointManagerServer checkpointManagerServer;
private IStatefulStorage statefulStorage;
private HeronServerTester serverTester;
@BeforeClass
public static void setup() throws Exception {
final String INSTANCE_ID = "instance_id";
final String STMGR_ID = "stmgr_id";
final int TASK_ID = 1;
final int COMPONENT_INDEX = 1;
final String COMPONENT_NAME = "component_name";
final byte[] BYTES = "checkpoint manager server test bytes".getBytes();
final String TOPO_ID = "topo_id";
final String TOPO_NAME = "topo_name";
PhysicalPlans.InstanceInfo info = PhysicalPlans.InstanceInfo.newBuilder()
.setTaskId(TASK_ID)
.setComponentIndex(COMPONENT_INDEX)
.setComponentName(COMPONENT_NAME)
.build();
TopologyAPI.Topology topology = TopologyAPI.Topology.newBuilder()
.setId(TOPO_ID)
.setName(TOPO_NAME)
.setState(TopologyAPI.TopologyState.RUNNING)
.build();
PhysicalPlans.PhysicalPlan pplan = PhysicalPlans.PhysicalPlan.newBuilder()
.setTopology(topology)
.build();
instance = PhysicalPlans.Instance.newBuilder()
.setInstanceId(INSTANCE_ID)
.setStmgrId(STMGR_ID)
.setInfo(info)
.build();
checkpointPartition = CheckpointManager.InstanceStateCheckpoint.newBuilder()
.setCheckpointId(CHECKPOINT_ID)
.setState(ByteString.copyFrom(BYTES))
.build();
checkpointComponentMetadata = CheckpointManager.CheckpointComponentMetadata.newBuilder()
.setComponentName(COMPONENT_NAME)
.setParallelism(2)
.build();
saveInstanceStateRequest = CheckpointManager.SaveInstanceStateRequest.newBuilder()
.setInstance(instance)
.setCheckpoint(checkpointPartition)
.build();
getInstanceStateRequest = CheckpointManager.GetInstanceStateRequest.newBuilder()
.setInstance(instance)
.setCheckpointId(CHECKPOINT_ID)
.build();
cleanStatefulCheckpointRequest = CheckpointManager.CleanStatefulCheckpointRequest.newBuilder()
.setCleanAllCheckpoints(true)
.setOldestCheckpointPreserved(CHECKPOINT_ID)
.build();
registerStmgrRequest = CheckpointManager.RegisterStMgrRequest.newBuilder()
.setTopologyId(TOPOLOGY_ID)
.setStmgrId(STMGR_ID)
.setTopologyName(TOPOLOGY_NAME)
.setPhysicalPlan(pplan)
.build();
registerTManagerRequest = CheckpointManager.RegisterTManagerRequest.newBuilder()
.setTopologyId(TOPOLOGY_ID)
.setTopologyName(TOPOLOGY_NAME)
.build();
}
@Before
public void before() throws Exception {
statefulStorage = mock(IStatefulStorage.class);
checkpointManagerServer = new CheckpointManagerServer(TOPOLOGY_NAME, TOPOLOGY_ID,
CHECKPOINT_MANAGER_ID, statefulStorage, new NIOLooper(), HeronServerTester.SERVER_HOST,
SysUtils.getFreePort(), HeronServerTester.TEST_SOCKET_OPTIONS);
}
@After
public void after() {
serverTester.stop();
}
private void runTest(TestRequestHandler.RequestType requestType,
HeronServerTester.TestResponseHandler responseHandler)
throws IOException, InterruptedException {
serverTester = new HeronServerTester(checkpointManagerServer,
new TestRequestHandler(requestType), responseHandler, RESPONSE_RECEIVED_TIMEOUT);
serverTester.start();
}
@Test
public void testSaveInstanceState() throws Exception {
runTest(TestRequestHandler.RequestType.SAVE_INSTANCE_STATE,
new HeronServerTester.SuccessResponseHandler(
CheckpointManager.SaveInstanceStateResponse.class,
new HeronServerTester.TestResponseHandler() {
@Override
public void handleResponse(HeronClient client, StatusCode status,
Object ctx, Message response) throws Exception {
verify(statefulStorage).storeCheckpoint(
any(CheckpointInfo.class), any(Checkpoint.class));
assertEquals(CHECKPOINT_ID,
((CheckpointManager.SaveInstanceStateResponse) response).getCheckpointId());
assertEquals(instance,
((CheckpointManager.SaveInstanceStateResponse) response).getInstance());
}
})
);
}
@Test
public void testGetInstanceState() throws Exception {
final CheckpointInfo info = new CheckpointInfo(CHECKPOINT_ID, instance);
final Checkpoint checkpoint = new Checkpoint(checkpointPartition);
when(statefulStorage.restoreCheckpoint(any(CheckpointInfo.class)))
.thenReturn(checkpoint);
runTest(TestRequestHandler.RequestType.GET_INSTANCE_STATE,
new HeronServerTester.SuccessResponseHandler(
CheckpointManager.GetInstanceStateResponse.class,
new HeronServerTester.TestResponseHandler() {
@Override
public void handleResponse(HeronClient client, StatusCode status,
Object ctx, Message response) throws Exception {
verify(statefulStorage).restoreCheckpoint(info);
assertEquals(checkpoint.getCheckpoint(),
((CheckpointManager.GetInstanceStateResponse) response).getCheckpoint());
}
})
);
}
@Test
public void testCleanStatefulCheckpoint() throws Exception {
runTest(TestRequestHandler.RequestType.CLEAN_STATEFUL_CHECKPOINTS,
new HeronServerTester.SuccessResponseHandler(
CheckpointManager.CleanStatefulCheckpointResponse.class,
new HeronServerTester.TestResponseHandler() {
@Override
public void handleResponse(HeronClient client, StatusCode status,
Object ctx, Message response) throws Exception {
verify(statefulStorage).dispose(anyString(), anyBoolean());
}
})
);
}
@Test
public void testRegisterTManager() throws Exception {
runTest(TestRequestHandler.RequestType.REGISTER_TMANAGER,
new HeronServerTester.SuccessResponseHandler(
CheckpointManager.RegisterTManagerResponse.class));
}
@Test
public void testRegisterStmgr() throws Exception {
runTest(TestRequestHandler.RequestType.REGISTER_STMGR,
new HeronServerTester.SuccessResponseHandler(
CheckpointManager.RegisterStMgrResponse.class));
}
private static final class TestRequestHandler implements HeronServerTester.TestRequestHandler {
private RequestType requestType;
public enum RequestType {
SAVE_INSTANCE_STATE(saveInstanceStateRequest,
CheckpointManager.SaveInstanceStateResponse.getDescriptor()),
GET_INSTANCE_STATE(getInstanceStateRequest,
CheckpointManager.GetInstanceStateResponse.getDescriptor()),
CLEAN_STATEFUL_CHECKPOINTS(cleanStatefulCheckpointRequest,
CheckpointManager.CleanStatefulCheckpointResponse.getDescriptor()),
REGISTER_STMGR(registerStmgrRequest,
CheckpointManager.RegisterStMgrResponse.getDescriptor()),
REGISTER_TMANAGER(registerTManagerRequest,
CheckpointManager.RegisterTManagerResponse.getDescriptor());
private Message requestMessage;
private Descriptors.Descriptor responseMessageDescriptor;
RequestType(Message requestMessage, Descriptors.Descriptor responseMessageDescriptor) {
this.requestMessage = requestMessage;
this.responseMessageDescriptor = responseMessageDescriptor;
}
public Message getRequestMessage() {
return requestMessage;
}
public Message.Builder newResponseBuilder() {
return responseMessageDescriptor.toProto().newBuilderForType();
}
}
private TestRequestHandler(RequestType requestType) {
this.requestType = requestType;
}
@Override
public Message getRequestMessage() {
return requestType.getRequestMessage();
}
@Override
public Message.Builder getResponseBuilder() {
return requestType.newResponseBuilder();
}
}
}
| |
/*
* Copyright 2015 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.nbasearc.gcp;
import static com.navercorp.nbasearc.gcp.StatusCode.*;
import static com.navercorp.nbasearc.gcp.PhysicalConnection.State.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.PooledByteBufAllocator;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
/**
*
* One request, one callback.
* First request, first callback. (except timeout)
*
* +---------------------------------------------unuse-------+
* | +------------------------unuse-------+
* | | v
* NONE --connect--> CONNECTING --y--> CONNECTED --unuse--> CLOSING --> UNUSED
* ^ | |
* +--n--+ |
* +-------error-------+
*
* @author seongjoon.ahn@navercorp.com
* @author seunghoo.han@navercorp.com (maintenance)
*
*/
class PhysicalConnection {
enum State {
NONE, CONNECTING, CONNECTED, CLOSING, UNUSED
}
private static final Logger log = LoggerFactory.getLogger(PhysicalConnection.class);
private final int CLOSEJOB_INTERVAL_MILLIS = 100;
private final Bootstrap b;
private final Pipeline pipeline;
private final SingleThreadEventLoop eventLoop;
private final String ip;
private final int port;
private final Gateway gw;
private final int reconnectInterval;
private boolean pendingFlush;
private boolean channelConnected;
private Channel ch;
private AtomicReference<SettableFuture<?>> closeFuture;
private AtomicInteger referenceCount;
private volatile AtomicReference<State> state;
static PhysicalConnection create(String ip, int port, SingleThreadEventLoop eventLoop, Gateway gw,
int reconnectInterval) {
final PhysicalConnection pc = new PhysicalConnection(ip, port, eventLoop, gw, reconnectInterval);
pc.b.group(eventLoop.getEventLoopGroup())
.channel(NioSocketChannel.class)
.option(ChannelOption.TCP_NODELAY, true)
.option(ChannelOption.SO_REUSEADDR, true)
.option(ChannelOption.SO_KEEPALIVE, true)
.option(ChannelOption.SO_LINGER, 0)
.option(ChannelOption.SO_SNDBUF, SOCKET_BUFFER)
.option(ChannelOption.SO_RCVBUF, SOCKET_BUFFER)
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, CONNECT_TIMEOUT)
.handler(new ChannelInitializer<SocketChannel>() {
@Override
protected void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast(pc.new PhysicalConnectionHandler());
}
});
return pc;
}
private PhysicalConnection(String ip, int port, SingleThreadEventLoop eventLoop, Gateway gw,
int reconnectInterval) {
this.ip = ip;
this.port = port;
this.state = new AtomicReference<State>(NONE);
this.eventLoop = eventLoop;
this.gw = gw;
this.reconnectInterval = reconnectInterval;
this.pipeline = new Pipeline(PIPELINE_SIZE, eventLoop);
this.b = new Bootstrap();
this.pendingFlush = false;
this.channelConnected = false;
this.ch = null;
this.closeFuture = new AtomicReference<SettableFuture<?>>();
this.referenceCount = new AtomicInteger();
}
private Runnable tryCloseJob;
private boolean tryClose() {
synchronized (this) {
if (pipeline.isEmpty() && pendingFlush == false && referenceCount.get() == 0) {
setState(UNUSED);
close();
closeFuture.get().set(null);
return true;
}
return false;
}
}
private void close() {
if (channelConnected) {
channelConnected = false;
gw.decreaseActive();
ch.close();
}
}
SettableFuture<Boolean> connect() {
state.set(State.CONNECTING);
final SettableFuture<Boolean> sf = SettableFuture.create();
b.connect(ip, port).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture cf) throws Exception {
connectionComplete(cf);
if (cf.cause() != null) {
sf.setException(cf.cause());
} else {
sf.set(true);
}
}
});
return sf;
}
private void connectionComplete(ChannelFuture cf) {
if (cf.cause() != null) {
eventLoop.getEventLoopGroup().schedule(reconnectJob, reconnectInterval, TimeUnit.MILLISECONDS);
return;
}
gw.increaseActive();
ch = cf.channel();
channelConnected = true;
setState(CONNECTED);
}
private void setState(State newState) {
log.debug("PhysicalConnection state changed. {}:{} {}->{}", new Object[] { ip, port, state.get(), newState });
state.set(newState);
}
State getState() {
return state.get();
}
int increaseReferenceCount() {
return referenceCount.incrementAndGet();
}
int decreaseReferenceCount() {
assert referenceCount.get() > 0 : "refCnt is " + referenceCount;
return referenceCount.decrementAndGet();
}
int getReferenceCount() {
return referenceCount.get();
}
ListenableFuture<?> unuse(final Set<VirtualConnection> vcConcurrentSet) {
if (closeFuture.compareAndSet(null, SettableFuture.create()) == false) {
return closeFuture.get();
}
setState(CLOSING);
tryCloseJob = new Runnable() {
@Override
public void run() {
try {
for (VirtualConnection vc : vcConcurrentSet) {
vc.reallocIdlePc(PhysicalConnection.this);
}
if (!tryClose()) {
eventLoop.getEventLoopGroup().schedule(tryCloseJob, CLOSEJOB_INTERVAL_MILLIS,
TimeUnit.MILLISECONDS);
}
} catch (Exception e) {
log.error("tryClose fail.", e);
}
}
};
eventLoop.getEventLoopGroup().schedule(tryCloseJob, CLOSEJOB_INTERVAL_MILLIS, TimeUnit.MILLISECONDS);
return closeFuture.get();
}
boolean isSafeToAbandon() {
return pipeline.isEmpty();
}
void execute(Runnable runnable) {
eventLoop.getEventLoopGroup().execute(runnable);
}
void request(Request rqst) {
if (getState() != CONNECTED && getState() != CLOSING && pendingFlush == false) {
assert pipeline.isEmpty() : "Pipeline is not clear.";
rqst.getVirtualConnection().onResponse(rqst, null, CONNECTION_ERROR);
return;
}
pipeline.put(rqst);
if (rqst.getType() == Request.Type.USER) {
eventLoop.addTimer(rqst);
}
if (pendingFlush) {
return;
}
eventLoop.getEventLoopGroup().execute(writeAndFlushJob);
}
private final Runnable writeAndFlushJob = new Runnable() {
@Override
public void run() {
/* previous write and flush not complete */
if (pendingFlush) {
return;
}
/* build buffer */
ByteBuf out = pipeline.aggregate(ch.alloc());
if (out == null) {
return;
}
/* write and flush */
pendingFlush = true;
out.retain();
ch.writeAndFlush(out).addListener(writeListener);
out.release();
}
};
private final ChannelFutureListener writeListener = new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture cf) throws Exception {
pendingFlush = false;
if (cf.isSuccess()) {
ch.eventLoop().execute(writeAndFlushJob);
} else {
log.error("Redis connection send failed", cf.cause());
setState(CONNECTING);
close();
pipeline.clear(CONNECTION_ERROR);
return;
}
}
};
private void processResponse(final byte[] result) {
Request rqst = pipeline.pollSent();
if (rqst == null) {
log.error("Illegal internal pipeline state");
return;
}
if (rqst.getType() == Request.Type.SYSTEM) {
return;
}
if (rqst.getState() == Request.State.QUEUING && rqst.isTimeout()) {
log.error("{} of request cannot get an any response.", rqst.getState());
rqst.getVirtualConnection().onResponse(rqst, result, INTERNAL_ERROR);
return;
}
if (rqst.getState() != Request.State.SENT) {
log.error("Illegal reqeust state " + rqst + ", tid: " + Thread.currentThread().getId());
rqst.getVirtualConnection().onResponse(rqst, result, INTERNAL_ERROR);
return;
}
if (rqst.isTimeout()) {
return;
} else {
rqst.setState(Request.State.DONE);
eventLoop.delTimer(rqst);
rqst.getVirtualConnection().onResponse(rqst, result, OK);
}
}
void removeRequestFromPipeline(Request rqst) {
pipeline.remove(rqst);
}
long busyCost() {
Request rqst = pipeline.peekFirst();
if (rqst == null) {
return 0;
}
return System.currentTimeMillis() - rqst.getConnTimestamp();
}
private Runnable reconnectJob = new Runnable() {
@Override
public void run() {
if (getState() == CLOSING || getState() == UNUSED) {
log.error("Invalid state to reconnect. Connection is closed.");
return;
}
log.error("Reconnect. {}", PhysicalConnection.this);
b.connect(ip, port).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture cf) throws Exception {
connectionComplete(cf);
}
});
}
};
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("[").append("ip: ").append(ip).append(", port: ").append(port).append("]");
return sb.toString();
}
private class PhysicalConnectionHandler extends ChannelInboundHandlerAdapter {
private final ByteBufAllocator ALLOCATOR = PooledByteBufAllocator.DEFAULT;
private final ByteBuf buf;
private final RedisDecoder decoder;
private final List<byte[]> msgs = new ArrayList<byte[]>();
PhysicalConnectionHandler() {
this.buf = ALLOCATOR.ioBuffer(SOCKET_BUFFER * 2);
this.decoder = new RedisDecoder();
}
@Override
public void channelRead(ChannelHandlerContext _ctx, Object obj) {
{
ByteBuf in = (ByteBuf) obj;
int inBytes = in.readableBytes();
buf.ensureWritable(inBytes);
buf.writeBytes(in);
in.release();
}
decoder.getFrames(buf, msgs);
buf.discardSomeReadBytes();
for (byte[] msg : msgs) {
try {
processResponse(msg);
} catch (Exception e) {
log.error("processResponse fail.", e);
}
}
msgs.clear();
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
log.error("Redis connection exception. {}", PhysicalConnection.this, cause);
PhysicalConnection.this.setState(CONNECTING);
PhysicalConnection.this.close();
pipeline.clear(CONNECTION_ERROR);
}
@Override
public void channelActive(ChannelHandlerContext ctx) {
}
@Override
public void channelInactive(ChannelHandlerContext ctx) {
log.info("Redis connection inactived. {}", PhysicalConnection.this);
try {
PhysicalConnection.this.close();
pipeline.clear(CONNECTION_ERROR);
buf.release();
if (getState() != UNUSED) {
setState(CONNECTING);
log.info("Run reconnectJob channelInactive. {}", PhysicalConnection.this);
eventLoop.getEventLoopGroup().schedule(reconnectJob, 1000, TimeUnit.MILLISECONDS);
}
} catch (Exception e) {
log.error("Internal error", e);
}
}
}
/* end of class RedisConnectionHandler */
/* defaults */
static final int CONNECT_TIMEOUT = 5 * 1000;
static final int COMMAND_TIMEOUT = 10 * 1000;
static final int SOCKET_BUFFER = 65536;
static final int PIPELINE_SIZE = 4096;
}
/* end of class RedisConnection */
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.dispatcher.runner;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.dispatcher.Dispatcher;
import org.apache.flink.runtime.dispatcher.DispatcherBootstrapFactory;
import org.apache.flink.runtime.dispatcher.DispatcherFactory;
import org.apache.flink.runtime.dispatcher.DispatcherGateway;
import org.apache.flink.runtime.dispatcher.DispatcherId;
import org.apache.flink.runtime.dispatcher.DispatcherServices;
import org.apache.flink.runtime.dispatcher.JobManagerRunnerFactory;
import org.apache.flink.runtime.dispatcher.MemoryExecutionGraphInfoStore;
import org.apache.flink.runtime.dispatcher.PartialDispatcherServices;
import org.apache.flink.runtime.dispatcher.PartialDispatcherServicesWithJobGraphStore;
import org.apache.flink.runtime.dispatcher.SessionDispatcherFactory;
import org.apache.flink.runtime.dispatcher.SingleJobJobGraphStore;
import org.apache.flink.runtime.dispatcher.StandaloneDispatcher;
import org.apache.flink.runtime.dispatcher.TestingJobManagerRunnerFactory;
import org.apache.flink.runtime.dispatcher.VoidHistoryServerArchivist;
import org.apache.flink.runtime.heartbeat.TestingHeartbeatServices;
import org.apache.flink.runtime.highavailability.TestingHighAvailabilityServicesBuilder;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobGraphTestUtils;
import org.apache.flink.runtime.jobmanager.JobGraphStore;
import org.apache.flink.runtime.jobmaster.TestingJobManagerRunner;
import org.apache.flink.runtime.leaderelection.TestingLeaderElectionService;
import org.apache.flink.runtime.metrics.groups.UnregisteredMetricGroups;
import org.apache.flink.runtime.rpc.RpcService;
import org.apache.flink.runtime.rpc.TestingRpcServiceResource;
import org.apache.flink.runtime.testutils.TestingJobGraphStore;
import org.apache.flink.runtime.testutils.TestingUtils;
import org.apache.flink.runtime.util.BlobServerResource;
import org.apache.flink.runtime.util.LeaderConnectionInfo;
import org.apache.flink.runtime.util.TestingFatalErrorHandler;
import org.apache.flink.util.TestLogger;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
/** Integration tests for the {@link DefaultDispatcherRunner}. */
public class DefaultDispatcherRunnerITCase extends TestLogger {
private static final Logger LOG = LoggerFactory.getLogger(DefaultDispatcherRunnerITCase.class);
private static final Time TIMEOUT = Time.seconds(10L);
@ClassRule
public static TestingRpcServiceResource rpcServiceResource = new TestingRpcServiceResource();
@ClassRule public static BlobServerResource blobServerResource = new BlobServerResource();
private JobGraph jobGraph;
private TestingLeaderElectionService dispatcherLeaderElectionService;
private TestingFatalErrorHandler fatalErrorHandler;
private JobGraphStore jobGraphStore;
private PartialDispatcherServices partialDispatcherServices;
private DefaultDispatcherRunnerFactory dispatcherRunnerFactory;
@Before
public void setup() {
dispatcherRunnerFactory =
DefaultDispatcherRunnerFactory.createSessionRunner(
SessionDispatcherFactory.INSTANCE);
jobGraph = createJobGraph();
dispatcherLeaderElectionService = new TestingLeaderElectionService();
fatalErrorHandler = new TestingFatalErrorHandler();
jobGraphStore = TestingJobGraphStore.newBuilder().build();
partialDispatcherServices =
new PartialDispatcherServices(
new Configuration(),
new TestingHighAvailabilityServicesBuilder().build(),
CompletableFuture::new,
blobServerResource.getBlobServer(),
new TestingHeartbeatServices(),
UnregisteredMetricGroups::createUnregisteredJobManagerMetricGroup,
new MemoryExecutionGraphInfoStore(),
fatalErrorHandler,
VoidHistoryServerArchivist.INSTANCE,
null,
ForkJoinPool.commonPool());
}
@After
public void teardown() throws Exception {
if (fatalErrorHandler != null) {
fatalErrorHandler.rethrowError();
}
}
@Test
public void leaderChange_afterJobSubmission_recoversSubmittedJob() throws Exception {
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
final UUID firstLeaderSessionId = UUID.randomUUID();
final DispatcherGateway firstDispatcherGateway =
electLeaderAndRetrieveGateway(firstLeaderSessionId);
firstDispatcherGateway.submitJob(jobGraph, TIMEOUT).get();
dispatcherLeaderElectionService.notLeader();
final UUID secondLeaderSessionId = UUID.randomUUID();
final DispatcherGateway secondDispatcherGateway =
electLeaderAndRetrieveGateway(secondLeaderSessionId);
final Collection<JobID> jobIds = secondDispatcherGateway.listJobs(TIMEOUT).get();
assertThat(jobIds, contains(jobGraph.getJobID()));
}
}
private DispatcherGateway electLeaderAndRetrieveGateway(UUID firstLeaderSessionId)
throws InterruptedException, java.util.concurrent.ExecutionException {
dispatcherLeaderElectionService.isLeader(firstLeaderSessionId);
final LeaderConnectionInfo leaderConnectionInfo =
dispatcherLeaderElectionService.getConfirmationFuture().get();
return rpcServiceResource
.getTestingRpcService()
.connect(
leaderConnectionInfo.getAddress(),
DispatcherId.fromUuid(leaderConnectionInfo.getLeaderSessionId()),
DispatcherGateway.class)
.get();
}
/**
* See FLINK-11843. This is a probabilistic test which needs to be executed several times to
* fail.
*/
@Test
public void leaderChange_withBlockingJobManagerTermination_doesNotAffectNewLeader()
throws Exception {
final TestingJobManagerRunnerFactory jobManagerRunnerFactory =
new TestingJobManagerRunnerFactory(1);
dispatcherRunnerFactory =
DefaultDispatcherRunnerFactory.createSessionRunner(
new TestingDispatcherFactory(jobManagerRunnerFactory));
jobGraphStore = new SingleJobJobGraphStore(jobGraph);
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
// initial run
dispatcherLeaderElectionService.isLeader(UUID.randomUUID()).get();
final TestingJobManagerRunner testingJobManagerRunner =
jobManagerRunnerFactory.takeCreatedJobManagerRunner();
dispatcherLeaderElectionService.notLeader();
LOG.info("Re-grant leadership first time.");
dispatcherLeaderElectionService.isLeader(UUID.randomUUID());
// give the Dispatcher some time to recover jobs
Thread.sleep(1L);
dispatcherLeaderElectionService.notLeader();
LOG.info("Re-grant leadership second time.");
final UUID leaderSessionId = UUID.randomUUID();
final CompletableFuture<UUID> leaderFuture =
dispatcherLeaderElectionService.isLeader(leaderSessionId);
assertThat(leaderFuture.isDone(), is(false));
LOG.info("Complete the termination of the first job manager runner.");
testingJobManagerRunner.completeTerminationFuture();
assertThat(
leaderFuture.get(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS),
is(equalTo(leaderSessionId)));
}
}
private static class TestingDispatcherFactory implements DispatcherFactory {
private final JobManagerRunnerFactory jobManagerRunnerFactory;
private TestingDispatcherFactory(JobManagerRunnerFactory jobManagerRunnerFactory) {
this.jobManagerRunnerFactory = jobManagerRunnerFactory;
}
@Override
public Dispatcher createDispatcher(
RpcService rpcService,
DispatcherId fencingToken,
Collection<JobGraph> recoveredJobs,
DispatcherBootstrapFactory dispatcherBootstrapFactory,
PartialDispatcherServicesWithJobGraphStore
partialDispatcherServicesWithJobGraphStore)
throws Exception {
return new StandaloneDispatcher(
rpcService,
fencingToken,
recoveredJobs,
dispatcherBootstrapFactory,
DispatcherServices.from(
partialDispatcherServicesWithJobGraphStore, jobManagerRunnerFactory));
}
}
private static JobGraph createJobGraph() {
return JobGraphTestUtils.singleNoOpJobGraph();
}
private DispatcherRunner createDispatcherRunner() throws Exception {
return dispatcherRunnerFactory.createDispatcherRunner(
dispatcherLeaderElectionService,
fatalErrorHandler,
() -> jobGraphStore,
TestingUtils.defaultExecutor(),
rpcServiceResource.getTestingRpcService(),
partialDispatcherServices);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.hdfs.server.namenode.FSImage;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.PathUtils;
import org.apache.log4j.Level;
import org.junit.Test;
/**
* A JUnit test for checking if restarting DFS preserves the
* blocks that are part of an unclosed file.
*/
public class TestPersistBlocks {
static {
GenericTestUtils.setLogLevel(FSImage.LOG, Level.ALL);
GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.ALL);
}
private static final int BLOCK_SIZE = 4096;
private static final int NUM_BLOCKS = 5;
private static final String FILE_NAME = "/data";
private static final Path FILE_PATH = new Path(FILE_NAME);
static final byte[] DATA_BEFORE_RESTART = new byte[BLOCK_SIZE * NUM_BLOCKS];
static final byte[] DATA_AFTER_RESTART = new byte[BLOCK_SIZE * NUM_BLOCKS];
private static final String HADOOP_1_0_MULTIBLOCK_TGZ =
"hadoop-1.0-multiblock-file.tgz";
static {
Random rand = new Random();
rand.nextBytes(DATA_BEFORE_RESTART);
rand.nextBytes(DATA_AFTER_RESTART);
}
/** check if DFS remains in proper condition after a restart */
@Test
public void testRestartDfs() throws Exception {
final Configuration conf = new HdfsConfiguration();
// Turn off persistent IPC, so that the DFSClient can survive NN restart
conf.setInt(
CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY,
0);
MiniDFSCluster cluster = null;
long len = 0;
FSDataOutputStream stream;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
FileSystem fs = cluster.getFileSystem();
// Creating a file with 4096 blockSize to write multiple blocks
stream = fs.create(FILE_PATH, true, BLOCK_SIZE, (short) 1, BLOCK_SIZE);
stream.write(DATA_BEFORE_RESTART);
stream.hflush();
// Wait for at least a few blocks to get through
while (len <= BLOCK_SIZE) {
FileStatus status = fs.getFileStatus(FILE_PATH);
len = status.getLen();
Thread.sleep(100);
}
// explicitly do NOT close the file.
cluster.restartNameNode();
// Check that the file has no less bytes than before the restart
// This would mean that blocks were successfully persisted to the log
FileStatus status = fs.getFileStatus(FILE_PATH);
assertTrue("Length too short: " + status.getLen(),
status.getLen() >= len);
// And keep writing (ensures that leases are also persisted correctly)
stream.write(DATA_AFTER_RESTART);
stream.close();
// Verify that the data showed up, both from before and after the restart.
FSDataInputStream readStream = fs.open(FILE_PATH);
try {
byte[] verifyBuf = new byte[DATA_BEFORE_RESTART.length];
IOUtils.readFully(readStream, verifyBuf, 0, verifyBuf.length);
assertArrayEquals(DATA_BEFORE_RESTART, verifyBuf);
IOUtils.readFully(readStream, verifyBuf, 0, verifyBuf.length);
assertArrayEquals(DATA_AFTER_RESTART, verifyBuf);
} finally {
IOUtils.closeStream(readStream);
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
@Test
public void testRestartDfsWithAbandonedBlock() throws Exception {
final Configuration conf = new HdfsConfiguration();
// Turn off persistent IPC, so that the DFSClient can survive NN restart
conf.setInt(
CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY,
0);
MiniDFSCluster cluster = null;
long len = 0;
FSDataOutputStream stream;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
FileSystem fs = cluster.getFileSystem();
// Creating a file with 4096 blockSize to write multiple blocks
stream = fs.create(FILE_PATH, true, BLOCK_SIZE, (short) 1, BLOCK_SIZE);
stream.write(DATA_BEFORE_RESTART);
stream.hflush();
// Wait for all of the blocks to get through
while (len < BLOCK_SIZE * (NUM_BLOCKS - 1)) {
FileStatus status = fs.getFileStatus(FILE_PATH);
len = status.getLen();
Thread.sleep(100);
}
// Abandon the last block
DFSClient dfsclient = DFSClientAdapter.getDFSClient((DistributedFileSystem)fs);
HdfsFileStatus fileStatus = dfsclient.getNamenode().getFileInfo(FILE_NAME);
LocatedBlocks blocks = dfsclient.getNamenode().getBlockLocations(
FILE_NAME, 0, BLOCK_SIZE * NUM_BLOCKS);
assertEquals(NUM_BLOCKS, blocks.getLocatedBlocks().size());
LocatedBlock b = blocks.getLastLocatedBlock();
dfsclient.getNamenode().abandonBlock(b.getBlock(), fileStatus.getFileId(),
FILE_NAME, dfsclient.clientName);
// explicitly do NOT close the file.
cluster.restartNameNode();
// Check that the file has no less bytes than before the restart
// This would mean that blocks were successfully persisted to the log
FileStatus status = fs.getFileStatus(FILE_PATH);
assertTrue("Length incorrect: " + status.getLen(),
status.getLen() == len - BLOCK_SIZE);
// Verify the data showed up from before restart, sans abandoned block.
FSDataInputStream readStream = fs.open(FILE_PATH);
try {
byte[] verifyBuf = new byte[DATA_BEFORE_RESTART.length - BLOCK_SIZE];
IOUtils.readFully(readStream, verifyBuf, 0, verifyBuf.length);
byte[] expectedBuf = new byte[DATA_BEFORE_RESTART.length - BLOCK_SIZE];
System.arraycopy(DATA_BEFORE_RESTART, 0,
expectedBuf, 0, expectedBuf.length);
assertArrayEquals(expectedBuf, verifyBuf);
} finally {
IOUtils.closeStream(readStream);
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
@Test
public void testRestartWithPartialBlockHflushed() throws IOException {
final Configuration conf = new HdfsConfiguration();
// Turn off persistent IPC, so that the DFSClient can survive NN restart
conf.setInt(
CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY,
0);
MiniDFSCluster cluster = null;
FSDataOutputStream stream;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
FileSystem fs = cluster.getFileSystem();
DFSUtilClient.getNNAddress(conf).getPort();
// Creating a file with 4096 blockSize to write multiple blocks
stream = fs.create(FILE_PATH, true, BLOCK_SIZE, (short) 1, BLOCK_SIZE);
stream.write(DATA_BEFORE_RESTART);
stream.write((byte)1);
stream.hflush();
// explicitly do NOT close the file before restarting the NN.
cluster.restartNameNode();
// this will fail if the final block of the file is prematurely COMPLETEd
stream.write((byte)2);
stream.hflush();
stream.close();
assertEquals(DATA_BEFORE_RESTART.length + 2,
fs.getFileStatus(FILE_PATH).getLen());
FSDataInputStream readStream = fs.open(FILE_PATH);
try {
byte[] verifyBuf = new byte[DATA_BEFORE_RESTART.length + 2];
IOUtils.readFully(readStream, verifyBuf, 0, verifyBuf.length);
byte[] expectedBuf = new byte[DATA_BEFORE_RESTART.length + 2];
System.arraycopy(DATA_BEFORE_RESTART, 0, expectedBuf, 0,
DATA_BEFORE_RESTART.length);
System.arraycopy(new byte[]{1, 2}, 0, expectedBuf,
DATA_BEFORE_RESTART.length, 2);
assertArrayEquals(expectedBuf, verifyBuf);
} finally {
IOUtils.closeStream(readStream);
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
@Test
public void testRestartWithAppend() throws IOException {
final Configuration conf = new HdfsConfiguration();
// Turn off persistent IPC, so that the DFSClient can survive NN restart
conf.setInt(
CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY,
0);
MiniDFSCluster cluster = null;
FSDataOutputStream stream;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
FileSystem fs = cluster.getFileSystem();
DFSUtilClient.getNNAddress(conf).getPort();
// Creating a file with 4096 blockSize to write multiple blocks
stream = fs.create(FILE_PATH, true, BLOCK_SIZE, (short) 1, BLOCK_SIZE);
stream.write(DATA_BEFORE_RESTART, 0, DATA_BEFORE_RESTART.length / 2);
stream.close();
stream = fs.append(FILE_PATH, BLOCK_SIZE);
stream.write(DATA_BEFORE_RESTART, DATA_BEFORE_RESTART.length / 2,
DATA_BEFORE_RESTART.length / 2);
stream.close();
assertEquals(DATA_BEFORE_RESTART.length,
fs.getFileStatus(FILE_PATH).getLen());
cluster.restartNameNode();
assertEquals(DATA_BEFORE_RESTART.length,
fs.getFileStatus(FILE_PATH).getLen());
FSDataInputStream readStream = fs.open(FILE_PATH);
try {
byte[] verifyBuf = new byte[DATA_BEFORE_RESTART.length];
IOUtils.readFully(readStream, verifyBuf, 0, verifyBuf.length);
assertArrayEquals(DATA_BEFORE_RESTART, verifyBuf);
} finally {
IOUtils.closeStream(readStream);
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
/**
* Earlier versions of HDFS didn't persist block allocation to the edit log.
* This makes sure that we can still load an edit log when the OP_CLOSE
* is the opcode which adds all of the blocks. This is a regression
* test for HDFS-2773.
* This test uses a tarred pseudo-distributed cluster from Hadoop 1.0
* which has a multi-block file. This is similar to the tests in
* {@link TestDFSUpgradeFromImage} but none of those images include
* a multi-block file.
*/
@Test
public void testEarlierVersionEditLog() throws Exception {
final Configuration conf = new HdfsConfiguration();
String tarFile = System.getProperty("test.cache.data", "build/test/cache")
+ "/" + HADOOP_1_0_MULTIBLOCK_TGZ;
String testDir = PathUtils.getTestDirName(getClass());
File dfsDir = new File(testDir, "image-1.0");
if (dfsDir.exists() && !FileUtil.fullyDelete(dfsDir)) {
throw new IOException("Could not delete dfs directory '" + dfsDir + "'");
}
FileUtil.unTar(new File(tarFile), new File(testDir));
File nameDir = new File(dfsDir, "name");
GenericTestUtils.assertExists(nameDir);
File dataDir = new File(dfsDir, "data");
GenericTestUtils.assertExists(dataDir);
conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, nameDir.getAbsolutePath());
conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, dataDir.getAbsolutePath());
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0)
.format(false)
.manageDataDfsDirs(false)
.manageNameDfsDirs(false)
.numDataNodes(1)
.startupOption(StartupOption.UPGRADE)
.build();
try {
FileSystem fs = cluster.getFileSystem();
Path testPath = new Path("/user/todd/4blocks");
// Read it without caring about the actual data within - we just need
// to make sure that the block states and locations are OK.
DFSTestUtil.readFile(fs, testPath);
// Ensure that we can append to it - if the blocks were in some funny
// state we'd get some kind of issue here.
FSDataOutputStream stm = fs.append(testPath);
try {
stm.write(1);
} finally {
IOUtils.closeStream(stm);
}
} finally {
cluster.shutdown();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oozie.util.db;
import static org.apache.oozie.util.db.SqlStatement.*;
import static org.apache.oozie.util.db.TestSchema.TestColumns.*;
import static org.apache.oozie.util.db.TestSchema.TestTable.*;
import org.apache.oozie.service.StoreService;
import org.apache.oozie.service.Services;
import java.util.List;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Calendar;
import java.sql.PreparedStatement;
import java.sql.Timestamp;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.apache.oozie.test.XTestCase;
import org.apache.oozie.util.db.Schema.Table;
public class TestSqlStatement extends XTestCase {
private Connection conn;
private final String[] names = {"a", "b", "c", "d", "e"};
private Timestamp currTime;
@Override
protected void setUp() throws Exception {
super.setUp();
Services services = new Services();
services.init();
// conn = dataSourceServ.getRawConnection();
conn = TestSchema.getDirectConnection();
TestSchema.prepareDB(conn);
}
@Override
protected void tearDown() throws Exception {
TestSchema.dropSchema(conn);
conn.close();
Services.get().destroy();
super.tearDown();
}
public void testSQLStatements() throws SQLException {
_testInsertAndGetCountAndprepare();
_testParser();
_testSelect();
_testUpdate();
_testDelete();
}
private void _testDelete() throws SQLException {
ResultSet rs = getCount(TEST_TABLE).where(isEqual(TEST_LONG, 0)).prepareAndSetValues(conn).executeQuery();
rs.next();
assertEquals(1, rs.getInt(1));
deleteFrom(TEST_TABLE).where(isEqual(TEST_LONG, 0)).prepareAndSetValues(conn).executeUpdate();
rs = getCount(TEST_TABLE).where(isEqual(TEST_LONG, 0)).prepareAndSetValues(conn).executeQuery();
rs.next();
assertEquals(0, rs.getInt(1));
}
private void _testUpdate() throws SQLException {
update(TEST_TABLE).set(TEST_STRING, "test").where(isEqual(TEST_LONG, 0)).prepareAndSetValues(conn)
.executeUpdate();
ResultSetReader rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).where(isEqual(TEST_LONG, 0))
.prepareAndSetValues(conn).executeQuery());
rsReader.next();
assertEquals("test", rsReader.getString(TEST_STRING));
rsReader.close();
update(TEST_TABLE).set(TEST_STRING, "a").where(isEqual(TEST_LONG, 0)).prepareAndSetValues(conn).executeUpdate();
rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).where(isEqual(TEST_LONG, 0)).prepareAndSetValues(conn)
.executeQuery());
rsReader.next();
assertEquals("a", rsReader.getString(TEST_STRING));
rsReader.close();
}
private void _testSelect() throws SQLException {
ResultSetReader rsReader = parse(selectAllFrom(TEST_TABLE).orderBy(TEST_LONG, true).prepareAndSetValues(conn)
.executeQuery());
assertEquals(5, checkIdAndName(rsReader));
rsReader = parse(selectAllFrom(TEST_TABLE).orderBy(TEST_LONG, true).limit(0, 3).prepareAndSetValues(conn)
.executeQuery());
assertEquals(3, checkIdAndName(rsReader));
rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).orderBy(TEST_LONG, true).prepareAndSetValues(conn)
.executeQuery());
assertEquals(5, checkIdAndName(rsReader));
rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).where(isLike(TEST_STRING, names[0])).orderBy(TEST_LONG,
true).prepareAndSetValues(conn).executeQuery());
assertEquals(1, checkIdAndName(rsReader));
rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).where(isNotLike(TEST_STRING, names[4])).orderBy(
TEST_LONG, true).prepareAndSetValues(conn).executeQuery());
assertEquals(4, checkIdAndName(rsReader));
rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).where(isEqual(TEST_LONG, 0)).orderBy(TEST_LONG, true)
.prepareAndSetValues(conn).executeQuery());
assertEquals(1, checkIdAndName(rsReader));
rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).where(isNotEqual(TEST_LONG, 4)).orderBy(TEST_LONG, true)
.prepareAndSetValues(conn).executeQuery());
assertEquals(4, checkIdAndName(rsReader));
rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).where(lessThan(TEST_LONG, 3)).orderBy(TEST_LONG, true)
.prepareAndSetValues(conn).executeQuery());
assertEquals(3, checkIdAndName(rsReader));
rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).where(lessThanOrEqual(TEST_LONG, 3)).orderBy(TEST_LONG,
true).prepareAndSetValues(conn).executeQuery());
assertEquals(4, checkIdAndName(rsReader));
ResultSet rs = getCount(TEST_TABLE).where(greaterThan(TEST_LONG, 3)).prepareAndSetValues(conn).executeQuery();
rs.next();
assertEquals(1, rs.getInt(1));
rs = getCount(TEST_TABLE).where(greaterThanOrEqual(TEST_LONG, 3)).prepareAndSetValues(conn).executeQuery();
rs.next();
assertEquals(2, rs.getInt(1));
rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).where(in(TEST_LONG, 0, 1, 2)).orderBy(TEST_LONG, true)
.prepareAndSetValues(conn).executeQuery());
assertEquals(3, checkIdAndName(rsReader));
rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).where(notIn(TEST_LONG, 3, 4)).orderBy(TEST_LONG, true)
.prepareAndSetValues(conn).executeQuery());
assertEquals(3, checkIdAndName(rsReader));
rs = getCount(TEST_TABLE).where(between(TEST_LONG, 1, 3)).prepareAndSetValues(conn).executeQuery();
rs.next();
assertEquals(3, rs.getInt(1));
rs = getCount(TEST_TABLE).where(notBetween(TEST_LONG, 1, 3)).prepareAndSetValues(conn).executeQuery();
rs.next();
assertEquals(2, rs.getInt(1));
rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).where(
and(isEqual(TEST_LONG, 0), isEqual(TEST_STRING, names[1]))).orderBy(TEST_LONG, true)
.prepareAndSetValues(conn).executeQuery());
assertEquals(0, checkIdAndName(rsReader));
rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).where(
and(isEqual(TEST_LONG, 0), isEqual(TEST_STRING, names[0]), isEqual(TEST_BOOLEAN, false))).orderBy(
TEST_LONG, true).prepareAndSetValues(conn).executeQuery());
assertEquals(1, checkIdAndName(rsReader));
rsReader = parse(selectColumns(TEST_STRING, TEST_LONG).where(
or(isEqual(TEST_LONG, 0), isEqual(TEST_STRING, names[1]))).orderBy(TEST_LONG, true)
.prepareAndSetValues(conn).executeQuery());
assertEquals(2, checkIdAndName(rsReader));
}
private void _testInsertAndGetCountAndprepare() throws SQLException {
int i;
List<Map<Object, Object>> maps = new ArrayList<Map<Object, Object>>();
SqlStatement insert = insertInto(TEST_TABLE).value(TEST_LONG, "1").value(TEST_STRING, "2").value(TEST_BOOLEAN,
true);
SqlStatement update = update(TEST_TABLE).set(TEST_BOOLEAN, false).where(
and(isEqual(TEST_LONG, "1"), isEqual(TEST_STRING, "2")));
PreparedStatement pUpdate = update.prepare(conn);
PreparedStatement pInsert = insert.prepare(conn);
for (i = 0; i < 4; i++) {
Map<Object, Object> values = new HashMap<Object, Object>();
values.put("1", i);
values.put("2", names[i]);
insert.getNewStatementWithValues(values).prepare(pInsert).execute();
maps.add(values);
}
ResultSet rs = getCount(TEST_TABLE).prepareAndSetValues(conn).executeQuery();
rs.next();
int cnt = myGetCount(TEST_TABLE);
assertEquals(4, cnt);
assertEquals(rs.getInt(1), cnt);
ResultSetReader rsReader = parse(selectAllFrom(TEST_TABLE).where(isEqual(TEST_BOOLEAN, true)).orderBy(
TEST_LONG, true).prepareAndSetValues(conn).executeQuery());
assertEquals(4, checkIdAndName(rsReader));
update.prepareForBatch(conn, maps, pUpdate).executeBatch();
rsReader = parse(selectAllFrom(TEST_TABLE).where(isEqual(TEST_BOOLEAN, false)).orderBy(TEST_LONG, true)
.prepareAndSetValues(conn).executeQuery());
assertEquals(4, checkIdAndName(rsReader));
currTime = new java.sql.Timestamp(Calendar.getInstance().getTimeInMillis());
SqlStatement stmt = insertInto(TEST_TABLE).value(TEST_LONG, "1").value(TEST_STRING, "2").value(TEST_BOOLEAN,
"3").value(TEST_TIMESTAMP, "4").value(TEST_BLOB, "5");
Map<Object, Object> values = new HashMap<Object, Object>();
values.put("1", i);
values.put("2", names[i]);
values.put("3", true);
values.put("4", currTime);
values.put("5", names[i].getBytes());
PreparedStatement pstmt = stmt.prepare(conn);
stmt.getNewStatementWithValues(values).prepare(pstmt).executeUpdate();
assertEquals(5, myGetCount(TEST_TABLE));
}
private void _testParser() throws SQLException {
ResultSetReader rsReader = parse(selectAllFrom(TEST_TABLE).where(isEqual(TEST_LONG, 4)).prepareAndSetValues(
conn).executeQuery());
rsReader.next();
assertEquals(4, rsReader.getLong(TEST_LONG).longValue());
assertEquals(names[4], rsReader.getString(TEST_STRING));
assertEquals(String.format("yyyyy-mm-dd hh:mm", currTime), String.format("yyyyy-mm-dd hh:mm", rsReader
.getTimestamp(TEST_TIMESTAMP)));
assertEquals(true, rsReader.getBoolean(TEST_BOOLEAN).booleanValue());
assertEquals(names[4], new String(rsReader.getByteArray(TEST_BLOB)));
rsReader.close();
}
private int myGetCount(Table table) throws SQLException {
ResultSet rs = conn.prepareStatement("SELECT count(*) FROM " + table).executeQuery();
rs.next();
return rs.getInt(1);
}
private int checkIdAndName(ResultSetReader rsReader) throws SQLException {
int cnt = 0;
while (rsReader.next()) {
assertEquals(cnt, rsReader.getLong(TEST_LONG).longValue());
assertEquals(names[cnt], rsReader.getString(TEST_STRING));
cnt++;
}
rsReader.close();
return cnt;
}
}
| |
/*
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Multimap;
import com.google.javascript.jscomp.NodeTraversal.Callback;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.Collection;
import java.util.List;
/**
* Reduces the size of common function expressions.
*
* This pass will rewrite:
*
* C.prototype.getA = function() { return this.a_ };
* C.prototype.setA = function(newValue) { this.a_ = newValue };
*
* as:
*
* C.prototype.getA = JSCompiler_get("a_);
* C.prototype.setA = JSCompiler_set("a_);
*
* if by doing so we will save bytes, after the helper functions are
* added and renaming is done.
*
*
*/
class FunctionRewriter implements CompilerPass {
private final AbstractCompiler compiler;
// Safety margin used to avoid growing simple programs by a few bytes.
// Selected arbitrarily.
private static final int SAVINGS_THRESHOLD = 16;
FunctionRewriter(AbstractCompiler compiler) {
this.compiler = compiler;
}
@Override
public void process(Node externs, Node root) {
List<Reducer> reducers = ImmutableList.of(new ReturnConstantReducer(),
new GetterReducer(),
new SetterReducer(),
new EmptyFunctionReducer(),
new IdentityReducer());
Multimap<Reducer, Reduction> reductionMap = HashMultimap.create();
// Accumulate possible reductions in the reduction multi map. They
// will be applied in the loop below.
NodeTraversal.traverse(compiler, root,
new ReductionGatherer(reducers, reductionMap));
// Apply reductions iff they will provide some savings.
for (Reducer reducer : reducers) {
Collection<Reduction> reductions = reductionMap.get(reducer);
if (reductions.isEmpty()) {
continue;
}
Node helperCode = parseHelperCode(reducer);
if (helperCode == null) {
continue;
}
int helperCodeCost = InlineCostEstimator.getCost(helperCode);
// Estimate savings
int savings = 0;
for (Reduction reduction : reductions) {
savings += reduction.estimateSavings();
}
// Compare estimated savings against the helper cost. Apply
// reductions if doing so will result in some savings.
if (savings > (helperCodeCost + SAVINGS_THRESHOLD)) {
for (Reduction reduction : reductions) {
reduction.apply();
}
Node addingRoot = compiler.getNodeForCodeInsertion(null);
addingRoot.addChildrenToFront(helperCode);
compiler.reportCodeChange();
}
}
}
/**
* Parse helper code needed by a reducer.
*
* @return Helper code root. If parse fails, return null.
*/
public Node parseHelperCode(Reducer reducer) {
Node root = compiler.parseSyntheticCode(
reducer.getClass().toString() + ":helper", reducer.getHelperSource());
return (root != null) ? root.removeFirstChild() : null;
}
/**
* Information needed to apply a reduction.
*/
private class Reduction {
private final Node parent;
private final Node oldChild;
private final Node newChild;
Reduction(Node parent, Node oldChild, Node newChild) {
this.parent = parent;
this.oldChild = oldChild;
this.newChild = newChild;
}
/**
* Apply the reduction by replacing the old child with the new child.
*/
void apply() {
parent.replaceChild(oldChild, newChild);
compiler.reportCodeChange();
}
/**
* Estimate number of bytes saved by applying this reduction.
*/
int estimateSavings() {
return InlineCostEstimator.getCost(oldChild) -
InlineCostEstimator.getCost(newChild);
}
}
/**
* Gathers a list of reductions to apply later by doing an in-order
* AST traversal. If a suitable reduction is found, stop traversal
* in that branch.
*/
private class ReductionGatherer implements Callback {
private final List<Reducer> reducers;
private final Multimap<Reducer, Reduction> reductions;
/**
* @param reducers List of reducers to apply during traversal.
* @param reductions Reducer -> Reduction multimap,
* populated during traversal.
*/
ReductionGatherer(List<Reducer> reducers,
Multimap<Reducer, Reduction> reductions) {
this.reducers = reducers;
this.reductions = reductions;
}
@Override
public boolean shouldTraverse(NodeTraversal raversal,
Node node,
Node parent) {
for (Reducer reducer : reducers) {
Node replacement = reducer.reduce(node);
if (replacement != node) {
reductions.put(reducer, new Reduction(parent, node, replacement));
return false;
}
}
return true;
}
@Override
public void visit(NodeTraversal traversal, Node node, Node parent) {
}
}
/**
* Interface implemented by the strength-reduction optimizers below.
*/
abstract static class Reducer {
/**
* @return js source for helper methods used by this reduction.
*/
abstract String getHelperSource();
/**
* @return root of the reduced subtree if a reduction was applied;
* otherwise returns the node argument.
*/
abstract Node reduce(Node node);
/**
* Builds a method call based on the the given method name,
* argument and history.
*
* @param methodName Method to call.
* @param argumentNode Method argument.
* @param lineno line number in original source.
* @param charno character offset in original line.
*/
protected final Node buildCallNode(String methodName, Node argumentNode,
int lineno, int charno) {
Node call = new Node(Token.CALL, lineno, charno);
call.addChildToBack(Node.newString(Token.NAME, methodName));
if (argumentNode != null) {
call.addChildToBack(argumentNode.cloneTree());
}
return call;
}
}
/**
* Reduces return immutable constant literal methods declarations
* with calls to a constant return method factory.
*
* Example:
* a.prototype.b = function() {}
* is reduced to:
* a.prototype.b = emptyFn();
*/
private static class EmptyFunctionReducer extends Reducer {
static final String FACTORY_METHOD_NAME = "JSCompiler_emptyFn";
static final String HELPER_SOURCE =
"function " + FACTORY_METHOD_NAME + "() {" +
" return function() {}" +
"}";
@Override
public String getHelperSource() {
return HELPER_SOURCE;
}
@Override
public Node reduce(Node node) {
if (NodeUtil.isEmptyFunctionExpression(node)) {
return buildCallNode(FACTORY_METHOD_NAME, null,
node.getLineno(), node.getCharno());
} else {
return node;
}
}
}
/**
* Base class for reducers that match functions that contain a
* single return statement.
*/
abstract static class SingleReturnStatementReducer extends Reducer {
/**
* @return function return value node if function body contains a
* single return statement. Otherwise, null.
*/
protected final Node maybeGetSingleReturnRValue(Node functionNode) {
Node body = functionNode.getLastChild();
if (!body.hasOneChild()) {
return null;
}
Node statement = body.getFirstChild();
if (statement.getType() == Token.RETURN) {
return statement.getFirstChild();
}
return null;
}
}
/**
* Reduces property getter method declarations with calls to a
* getter method factory.
*
* Example:
* a.prototype.b = function(a) {return a}
* is reduced to:
* a.prototype.b = getter(a);
*/
private static class IdentityReducer extends SingleReturnStatementReducer {
static final String FACTORY_METHOD_NAME = "JSCompiler_identityFn";
static final String HELPER_SOURCE =
"function " + FACTORY_METHOD_NAME + "() {" +
" return function(" + FACTORY_METHOD_NAME + "_value) {" +
"return " + FACTORY_METHOD_NAME + "_value}" +
"}";
@Override
public String getHelperSource() {
return HELPER_SOURCE;
}
@Override
public Node reduce(Node node) {
if (!NodeUtil.isFunctionExpression(node)) {
return node;
}
if (isIdentityFunction(node)) {
return buildCallNode(FACTORY_METHOD_NAME, null,
node.getLineno(), node.getCharno());
} else {
return node;
}
}
/**
* Checks if the function matches the pattern:
* function(<value>, <rest>) {return <value>}
*
* @return Whether the function matches the pattern.
*/
private boolean isIdentityFunction(Node functionNode) {
Node argList = functionNode.getFirstChild().getNext();
Node paramNode = argList.getFirstChild();
if (paramNode == null) {
return false;
}
Node value = maybeGetSingleReturnRValue(functionNode);
if (value != null &&
NodeUtil.isName(value) &&
value.getString().equals(paramNode.getString())) {
return true;
}
return false;
}
}
/**
* Reduces return immutable constant literal methods declarations
* with calls to a constant return method factory.
*
* Example:
* a.prototype.b = function() {return 10}
* is reduced to:
* a.prototype.b = returnconst(10);
*/
private static class ReturnConstantReducer
extends SingleReturnStatementReducer {
static final String FACTORY_METHOD_NAME = "JSCompiler_returnArg";
static final String HELPER_SOURCE =
"function " + FACTORY_METHOD_NAME +
"(" + FACTORY_METHOD_NAME + "_value) {" +
" return function() {return " + FACTORY_METHOD_NAME + "_value}" +
"}";
@Override
public String getHelperSource() {
return HELPER_SOURCE;
}
@Override
public Node reduce(Node node) {
if (!NodeUtil.isFunctionExpression(node)) {
return node;
}
Node valueNode = getValueNode(node);
if (valueNode != null) {
return buildCallNode(FACTORY_METHOD_NAME, valueNode,
node.getLineno(), node.getCharno());
} else {
return node;
}
}
/**
* Checks if the function matches the pattern:
* function(<args>) {return <immutable value>}
* and returns <immutable value> if a match is found.
*
* @return the immutable value node; or null.
*/
private Node getValueNode(Node functionNode) {
Node value = maybeGetSingleReturnRValue(functionNode);
if (value != null &&
NodeUtil.isImmutableValue(value)) {
return value;
}
return null;
}
}
/**
* Reduces property getter method declarations with calls to a
* getter method factory.
*
* Example:
* a.prototype.b = function() {return this.b_}
* is reduced to:
* a.prototype.b = getter("b_");
*/
private static class GetterReducer extends SingleReturnStatementReducer {
static final String FACTORY_METHOD_NAME = "JSCompiler_get";
static final String HELPER_SOURCE =
"function " + FACTORY_METHOD_NAME + "(" +
FACTORY_METHOD_NAME + "_name) {" +
" return function() {return this[" + FACTORY_METHOD_NAME + "_name]}" +
"}";
@Override
public String getHelperSource() {
return HELPER_SOURCE;
}
@Override
public Node reduce(Node node) {
if (!NodeUtil.isFunctionExpression(node)) {
return node;
}
Node propName = getGetPropertyName(node);
if (propName != null) {
if (propName.getType() != Token.STRING) {
throw new IllegalStateException(
"Expected STRING, got " + Token.name(propName.getType()));
}
return buildCallNode(FACTORY_METHOD_NAME, propName,
node.getLineno(), node.getCharno());
} else {
return node;
}
}
/**
* Checks if the function matches the pattern:
* function(<args>) {return this.<name>}
* and returns <name> if a match is found.
*
* @return STRING node that is the rhs of a this property get; or null.
*/
private Node getGetPropertyName(Node functionNode) {
Node value = maybeGetSingleReturnRValue(functionNode);
if (value != null &&
NodeUtil.isGetProp(value) &&
NodeUtil.isThis(value.getFirstChild())) {
return value.getLastChild();
}
return null;
}
}
/**
* Reduces property setter method declarations with calls to a
* setter method factory.
*
* Example:
* a.prototype.setB = function(value) {this.b_ = value}
* reduces to:
* a.prototype.setB = getter("b_");
*/
private static class SetterReducer extends Reducer {
static final String FACTORY_METHOD_NAME = "JSCompiler_set";
static final String HELPER_SOURCE =
"function " + FACTORY_METHOD_NAME + "(" +
FACTORY_METHOD_NAME + "_name) {" +
" return function(" + FACTORY_METHOD_NAME + "_value) {" +
"this[" + FACTORY_METHOD_NAME + "_name] = " +
FACTORY_METHOD_NAME + "_value}" +
"}";
@Override
public String getHelperSource() {
return HELPER_SOURCE;
}
@Override
public Node reduce(Node node) {
if (!NodeUtil.isFunctionExpression(node)) {
return node;
}
Node propName = getSetPropertyName(node);
if (propName != null) {
if (propName.getType() != Token.STRING) {
throw new IllegalStateException(
"Expected STRING, got " + Token.name(propName.getType()));
}
return buildCallNode(FACTORY_METHOD_NAME, propName,
node.getLineno(), node.getCharno());
} else {
return node;
}
}
/**
* Checks if the function matches the pattern:
* function(<value>, <rest>) {this.<name> = <value>}
* and returns <name> if a match is found.
*
* @return STRING node that is the rhs of a this property get; or null.
*/
private Node getSetPropertyName(Node functionNode) {
Node body = functionNode.getLastChild();
if (!body.hasOneChild()) {
return null;
}
Node argList = functionNode.getFirstChild().getNext();
Node paramNode = argList.getFirstChild();
if (paramNode == null) {
return null;
}
Node statement = body.getFirstChild();
if (!NodeUtil.isExprAssign(statement)) {
return null;
}
Node assign = statement.getFirstChild();
Node lhs = assign.getFirstChild();
if (NodeUtil.isGetProp(lhs) && NodeUtil.isThis(lhs.getFirstChild())) {
Node rhs = assign.getLastChild();
if (NodeUtil.isName(rhs) &&
rhs.getString().equals(paramNode.getString())) {
Node propertyName = lhs.getLastChild();
return propertyName;
}
}
return null;
}
}
}
| |
/*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2014 - 2017 Board of Regents of the University of
* Wisconsin-Madison, University of Konstanz and Brian Northan.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package net.imagej.ops.geom.geom3d.mesh;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import net.imagej.ops.geom.geom3d.DefaultConvexHull3D;
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
/**
* This is the triangle implementation of Facet Interface.
* A facet consists of three vertices. The triangles orientation
* is counter clock wise.
*
* @author Tim-Oliver Buchholz (University of Konstanz)
*
*/
public class TriangularFacet extends UpdateablePointSet<TriangularFacet> implements Facet {
/**
* The centroid of this facet.
*/
private Vector3D centroid = null;
/**
* The normal of this facet.
*/
private Vector3D normal = null;
/**
* The area of this facet.
*/
private double area = -1;
/**
* If a facet has points in front, they are stored in this list.
* This list is used in {@link DefaultConvexHull3D}.
*/
private List<Vertex> verticesInFront;
/**
* Creates a new empty facet.
*/
public TriangularFacet() {
vertices = new ArrayList<>();
verticesInFront = new ArrayList<>();
}
/**
* Creates a new facet of three vertices.
* @param v0 the first vertex
* @param v1 the second vertex
* @param v2 the third vertex
*/
public TriangularFacet(final Vertex v0, final Vertex v1, final Vertex v2) {
vertices = new ArrayList<>();
vertices.add(v0);
vertices.add(v1);
vertices.add(v2);
verticesInFront = new ArrayList<>();
neighbors = new ArrayList<>();
}
/**
* Get the area of this facet.
* @return the area
*/
public double getArea() {
if (area == -1) {
computeArea();
}
return area;
}
/**
* Compute the area of this facet.
*/
private void computeArea() {
Vector3D cross = vertices.get(0).subtract(vertices.get(1))
.crossProduct(vertices.get(2).subtract(vertices.get(0)));
area = cross.getNorm() * 0.5;
}
/**
* Get the centroid of this facet.
* @return the centroid
*/
public Vector3D getCentroid() {
if (centroid == null) {
computeCentroid();
}
return centroid;
}
/**
* Compute the centroid of this facet.
*/
private void computeCentroid() {
centroid = Vector3D.ZERO;
Iterator<Vertex> it = vertices.iterator();
while (it.hasNext()) {
centroid = centroid.add(it.next());
}
centroid = centroid.scalarMultiply(1 / (double) vertices.size());
}
/**
* Get the normal of this facet.
* @return the normal
*/
public Vector3D getNormal() {
if (normal == null) {
computeNormal();
}
return normal;
}
/**
* Compute the normal of this facet.
*/
private void computeNormal() {
Vector3D v0 = vertices.get(0);
Vector3D v1 = vertices.get(1);
Vector3D v2 = vertices.get(2);
normal = v1.subtract(v0).crossProduct(v2.subtract(v0));
}
/**
* Computes the offset of this facet
* @return the offset
*/
public double getPlaneOffset() {
return getNormal().normalize().dotProduct(getCentroid());
}
/**
* Computes the distance from a point to this facet
* @param p the point
* @return the distance
*/
public double distanceToPlane(final Vector3D p) {
return getNormal().normalize().dotProduct(p) - getPlaneOffset();
}
/**
* Adds a vertex to the points in front of this facet.
* @param v the vertex
* @param distanceToPlane of this vertex
*/
public void setVertexInFront(final Vertex v, final double distanceToPlane) {
if (verticesInFront.isEmpty()) {
v.setDistanceToFaceInFront(distanceToPlane);
verticesInFront.add(v);
} else {
if (verticesInFront.get(0)
.getDistanceToFaceInFront() < distanceToPlane) {
v.setDistanceToFaceInFront(distanceToPlane);
verticesInFront.add(0, v);
} else {
verticesInFront.add(v);
}
}
}
/**
* All points which are in front of this plane.
* @return points which are in front
*/
public List<Vertex> getVerticesInFront() {
return verticesInFront;
}
/**
* The vertex which is in front and farthest apart of the plane
* @return vertex with maximum distance to the plane
*/
public Vertex getMaximumDistanceVertex() {
return verticesInFront.remove(0);
}
public Vector3D getP0() {
return vertices.get(0);
}
public Vector3D getP1() {
return vertices.get(1);
}
public Vector3D getP2() {
return vertices.get(2);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(area);
result = prime * result + (int) (temp ^ (temp >>> 32));
result = prime * result
+ ((centroid == null) ? 0 : centroid.hashCode());
result = prime * result
+ ((neighbors == null) ? 0 : neighbors.hashCode());
result = prime * result
+ ((normal == null) ? 0 : normal.hashCode());
result = prime * result
+ ((verticesInFront == null) ? 0 : verticesInFront.hashCode());
result = prime * result
+ ((vertices == null) ? 0 : vertices.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TriangularFacet other = (TriangularFacet) obj;
if (Double.doubleToLongBits(area) != Double
.doubleToLongBits(other.area))
return false;
if (centroid == null) {
if (other.centroid != null)
return false;
} else if (!centroid.equals(other.centroid))
return false;
if (neighbors == null) {
if (other.neighbors != null)
return false;
} else if (!neighbors.equals(other.neighbors))
return false;
if (normal == null) {
if (other.normal != null)
return false;
} else if (!normal.equals(other.normal))
return false;
if (verticesInFront == null) {
if (other.verticesInFront != null)
return false;
} else if (!verticesInFront.equals(other.verticesInFront))
return false;
if (vertices == null) {
if (other.vertices != null)
return false;
} else if (!vertices.equals(other.vertices))
return false;
return true;
}
}
| |
package com.shapesecurity.shift.es2018.scope;
import com.shapesecurity.functional.Pair;
import com.shapesecurity.functional.data.HashTable;
import com.shapesecurity.functional.data.ImmutableList;
import com.shapesecurity.functional.data.NonEmptyImmutableList;
import com.shapesecurity.shift.es2018.ast.AssignmentTargetIdentifier;
import com.shapesecurity.shift.es2018.ast.BindingIdentifier;
import com.shapesecurity.shift.es2018.ast.IdentifierExpression;
import com.shapesecurity.shift.es2018.ast.Module;
import com.shapesecurity.shift.es2018.ast.Node;
import com.shapesecurity.shift.es2018.ast.Script;
import com.shapesecurity.shift.es2018.reducer.Flattener;
import java.util.*;
public class ScopeSerializer {
private Map<Node, Integer> nodeToID;
private class VariableComparator implements Comparator<Variable> {
@Override
public int compare(Variable v1, Variable v2) {
int comparison = v1.name.compareTo(v2.name);
if (comparison != 0) {
return comparison;
}
comparison = v1.declarations.length - v2.declarations.length;
if (comparison != 0) {
return comparison;
}
comparison = v1.references.length - v2.references.length;
if (comparison != 0) {
return comparison;
}
for (int i = 0; i < v1.declarations.length; ++i) {
Declaration d1 = v1.declarations.index(0).fromJust();
Declaration d2 = v2.declarations.index(0).fromJust();
comparison = d1.kind.compareTo(d2.kind);
if (comparison != 0) {
return comparison;
}
comparison = nodeToID.get(d1.node).compareTo(nodeToID.get(d2.node));
if (comparison != 0) {
return comparison;
}
}
ReferenceComparator refcompare = new ReferenceComparator();
for (int i = 0; i < v1.references.length; ++i) {
Reference r1 = v1.references.index(0).fromJust();
Reference r2 = v2.references.index(0).fromJust();
comparison = refcompare.compare(r1, r2);
if (comparison != 0) {
return comparison;
}
}
return 0;
}
}
private class ReferenceComparator implements Comparator<Reference> {
@Override
public int compare(Reference r1, Reference r2) {
int comparison = ((r1.accessibility.isRead() ? 1 : 0) + (r1.accessibility.isWrite() ? 2 : 0))
- ((r2.accessibility.isRead() ? 1 : 0) + (r2.accessibility.isWrite() ? 2 : 0));
if (comparison != 0) {
return comparison;
}
return nodeToID.get(r1.node).compareTo(nodeToID.get(r2.node));
}
}
private ScopeSerializer(GlobalScope scope) {
nodeToID = new IdentityHashMap<>();
ImmutableList<Node> nodes;
if (scope.astNode instanceof Script) {
nodes = Flattener.flatten((Script) scope.astNode);
} else if (scope.astNode instanceof Module) {
nodes = Flattener.flatten((Module) scope.astNode);
} else {
throw new RuntimeException("GlobalScope does not correspond to script or module");
}
nodes.forEach(n -> nodeToID.put(n, nodeToID.size())); // this logic could go elsewhere. we just need a canonical node->id map for canonical serialization.
}
private ScopeSerializer(Map<Node, Integer> nodeToId) {
this.nodeToID = nodeToId;
}
public static String serialize(GlobalScope scope) {
return (new ScopeSerializer(scope)).serializeScope(scope);
}
public static String serialize(GlobalScope scope, Map<Node, Integer> nodeToId) {
return (new ScopeSerializer(nodeToId)).serializeScope(scope);
}
private String serializeScope(Scope scope) {
String serialized = "{";
serialized += "\"node\": \"" + serializeNode(scope.astNode) + "\"";
serialized += ", \"type\": \"" + scope.type + "\"";
serialized += ", \"isDynamic\": " + scope.dynamic;
serialized += ", \"through\": " + serializeReferenceList(collectThrough(scope.through));
serialized += ", \"variables\": " + serializeVariableList(scope.variables());
serialized += ", \"children\": " + serializeScopeList(scope.children);
return serialized + "}";
}
private String serializeNode(Node node) {
if (node instanceof AssignmentTargetIdentifier) {
return node.getClass().getSimpleName() + "(" + ((AssignmentTargetIdentifier) node).name + ")_" + nodeToID.get(node);
} else if (node instanceof IdentifierExpression) {
return node.getClass().getSimpleName() + "(" + ((IdentifierExpression) node).name + ")_" + nodeToID.get(node);
} else if (node instanceof BindingIdentifier) {
return node.getClass().getSimpleName() + "(" + ((BindingIdentifier) node).name + ")_" + nodeToID.get(node);
} else {
return node.getClass().getSimpleName() + "_" + nodeToID.get(node);
}
}
private ImmutableList<Reference> collectThrough(HashTable<String, NonEmptyImmutableList<Reference>> through) {
List<Reference> references = new ArrayList<>();
for (Pair<String, NonEmptyImmutableList<Reference>> entry : through.entries()) {
for (Reference reference : entry.right()) {
references.add(reference);
}
}
Collections.sort(references, new ReferenceComparator());
return ImmutableList.from(references);
}
private String serializeScopeList(ImmutableList<Scope> scopes) {
String serialized = "[";
for (Scope scope : scopes) {
serialized += serializeScope(scope) + ", ";
}
if (scopes.length > 0) {
serialized = serialized.substring(0, serialized.length() - 2);
}
serialized += "]";
return serialized;
}
private String serializeReference(Reference reference) {
String serialized = "{";
serialized += "\"node\": \"" + serializeNode(reference.node) + "\"";
serialized += ", \"accessibility\": \"" + reference.accessibility + "\"";
serialized += "}";
return serialized;
}
private String serializeReferenceList(ImmutableList<Reference> references) {
String serialized = "[";
for (Reference reference : references) {
serialized += serializeReference(reference) + ", ";
}
if (references.length > 0) {
serialized = serialized.substring(0, serialized.length() - 2);
}
serialized += "]";
return serialized;
}
private String serializeDeclaration(Declaration declaration) {
String serialized = "{";
serialized += "\"node\": \"" + serializeNode(declaration.node) + "\"";
serialized += ", \"kind\": \"" + declaration.kind + "\"";
serialized += "}";
return serialized;
}
private String serializeDeclarationList(ImmutableList<Declaration> declarations) {
String serialized = "[";
for (Declaration declaration : declarations) {
serialized += serializeDeclaration(declaration) + ", ";
}
if (declarations.length > 0) {
serialized = serialized.substring(0, serialized.length() - 2);
}
serialized += "]";
return serialized;
}
private String serializeVariable(Variable variable) {
String serialized = "{";
serialized += "\"name\": \"" + variable.name + "\"";
serialized += ", \"references\": " + serializeReferenceList(variable.references);
serialized += ", \"declarations\": " + serializeDeclarationList(variable.declarations);
serialized += "}";
return serialized;
}
private String serializeVariableList(Collection<Variable> variables) {
List<Variable> sortedVariables = new ArrayList<>(variables);
Collections.sort(sortedVariables, new VariableComparator());
String serialized = "[";
for (Variable variable : sortedVariables) {
serialized += serializeVariable(variable) + ", ";
}
if (sortedVariables.size() > 0) {
serialized = serialized.substring(0, serialized.length() - 2);
}
serialized += "]";
return serialized;
}
}
| |
/*
* Copyright (c) 2012 David Green
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package castledesigner;
import java.awt.Point;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
*
* @author David Green
*/
public class Castle
{
public static final int CASTLE_BOUNDRY_LENGTH = 52;
public static final int exportVersionId = 2;
private Map<BuildingType, Integer> buildingQuantities = new HashMap<BuildingType, Integer>();
private Map<BuildingType, Integer> maxBuildings;
private Map<BuildingResource, Integer> buildingResources = new HashMap<BuildingResource, Integer>();
private int totalBuildingTime = 0;
private List<String> designErrors = new ArrayList<String>();
private TileBuilding[][] gridData = new TileBuilding[CASTLE_BOUNDRY_LENGTH][CASTLE_BOUNDRY_LENGTH];
private static int lastIdUsed = 0;
public Castle()
{
maxBuildings = new HashMap<BuildingType, Integer>();
setMaxBuildings();
//Call reset here to set the Keep in the centre
resetGridData();
}
public TileBuilding getGridData(int x, int y)
{
return gridData[x][y];
}
private void setMaxBuildings()
{
maxBuildings.put(BuildingType.MOAT, 500);
maxBuildings.put(BuildingType.BALLISTA_TOWER, 10);
maxBuildings.put(BuildingType.TURRET, 10);
maxBuildings.put(BuildingType.GUARD_HOUSE, 38);
}
public void removeBuilding(TileBuilding building)
{
if (building == null) throw new IllegalArgumentException();
int id = building.getBuildingId();
for (int i=0; i<CASTLE_BOUNDRY_LENGTH; i++)
{
for (int j=0; j<CASTLE_BOUNDRY_LENGTH; j++)
{
if (gridData[i][j] != null &&
gridData[i][j].getBuildingId() == id)
{
gridData[i][j] = null;
}
}
}
updateDesignStats();
}
public void resetGridData()
{
for (int i=0; i<gridData.length; i++)
{
for (int j=0; j<gridData[i].length; j++)
{
gridData[i][j] = null;
}
}
for (int i=22; i<22 + BuildingType.KEEP.getDimension().getWidth(); i++)
{
for (int j=22; j<22 + BuildingType.KEEP.getDimension().getHeight(); j++)
{
gridData[i][j] = new TileBuilding(BuildingType.KEEP, 0);
}
}
updateDesignStats();
}
/**
* Returns a string full of lovely data representing what buildings
* were placed where.
*
* @return
*/
public String getGridDataExport()
{
StringBuffer woodenWalls = new StringBuffer();
StringBuffer stoneWalls = new StringBuffer();
StringBuffer moats = new StringBuffer();
StringBuffer killingPits = new StringBuffer();
StringBuffer structures = new StringBuffer();
Set<Integer> ids = new HashSet<Integer>();
for (int i=0; i<gridData.length; i++)
{
for (int j=0; j<gridData[i].length; j++)
{
TileBuilding building = gridData[i][j];
if (building != null)
{
if (building.getBuildingType() == BuildingType.WOODEN_WALL)
{
woodenWalls.append(Converter.intToAlphaNumeric(i));
woodenWalls.append(Converter.intToAlphaNumeric(j));
}
else if (building.getBuildingType() == BuildingType.STONE_WALL)
{
stoneWalls.append(Converter.intToAlphaNumeric(i));
stoneWalls.append(Converter.intToAlphaNumeric(j));
}
else if (building.getBuildingType() == BuildingType.MOAT)
{
moats.append(Converter.intToAlphaNumeric(i));
moats.append(Converter.intToAlphaNumeric(j));
}
else if (building.getBuildingType() == BuildingType.KILLING_PIT)
{
killingPits.append(Converter.intToAlphaNumeric(i));
killingPits.append(Converter.intToAlphaNumeric(j));
}
else
{
if (!ids.contains(building.getBuildingId()))
{
ids.add(building.getBuildingId());
structures.append(Converter.intToAlphaNumeric(building.getBuildingType().ordinal()));
structures.append(Converter.intToAlphaNumeric(i));
structures.append(Converter.intToAlphaNumeric(j));
}
}
}
}
}
StringBuilder exportStringBuffer = new StringBuilder();
exportStringBuffer.append(exportVersionId);
return exportStringBuffer.append(woodenWalls)
.append(Converter.seperator)
.append(stoneWalls)
.append(Converter.seperator)
.append(structures)
.append(Converter.seperator)
.append(moats)
.append(Converter.seperator)
.append(killingPits).toString();
}
private int getNewId()
{
return ++lastIdUsed;
}
public void importData(String text) throws UnsupportedVersionException
{
int version = Character.getNumericValue(text.charAt(0));
resetGridData();
String data = text.substring(1);
if (data == null) return;
String[] dataStrings = data.split(String.valueOf(Converter.seperator));
if (dataStrings.length > 0 && dataStrings[0] != null) importSingleTiles(BuildingType.WOODEN_WALL, dataStrings[0]);
if (dataStrings.length > 1 && dataStrings[1] != null) importSingleTiles(BuildingType.STONE_WALL, dataStrings[1]);
if (dataStrings.length > 2 && dataStrings[2] != null)
{
int i=0;
while (i < dataStrings[2].length())
{
int ordinal = Converter.alphaNumericToInt(dataStrings[2].charAt(i));
int x = Converter.alphaNumericToInt(dataStrings[2].charAt(i+1));
int y = Converter.alphaNumericToInt(dataStrings[2].charAt(i+2));
BuildingType buildingType = BuildingType.values()[ordinal];
int id = getNewId();
for (int k=x; k<x+buildingType.getDimension().getWidth(); k++)
{
for (int l=y; l<y+buildingType.getDimension().getHeight(); l++)
{
gridData[k][l] = new TileBuilding(buildingType, id);
}
}
i += 3;
}
}
if (dataStrings.length > 3 && dataStrings[3] != null) importSingleTiles(BuildingType.MOAT, dataStrings[3]);
if (dataStrings.length > 4 && dataStrings[4] != null) importSingleTiles(BuildingType.KILLING_PIT, dataStrings[4]);
updateDesignStats();
if (version > 2) throw new UnsupportedVersionException(version);
}
private void importSingleTiles(BuildingType buildingType, String dataString)
{
int i=0;
while (i < dataString.length())
{
int x = Converter.alphaNumericToInt(dataString.charAt(i));
int y = Converter.alphaNumericToInt(dataString.charAt(i+1));
gridData[x][y] = new TileBuilding(buildingType, getNewId());
i += 2;
}
}
public void addBuilding(Set<Point> buildingCoords, BuildingType buildingType)
{
int id = getNewId();
for (Point p : buildingCoords)
{
gridData[p.x][p.y] = new TileBuilding(buildingType, id);
}
updateDesignStats();
}
public List<String> getDesignErrors()
{
return designErrors;
}
private void updateDesignStats()
{
designErrors.clear();
totalBuildingTime = 0;
for (BuildingResource buildingResource : BuildingResource.values())
{
buildingResources.put(buildingResource, 0);
}
int[] buildingCounts = new int[BuildingType.values().length];
for (int i=0; i<gridData.length; i++)
{
for (int j=0; j<gridData[i].length; j++)
{
TileBuilding building = gridData[i][j];
if (building != null) buildingCounts[building.getBuildingType().ordinal()]++;
}
}
for (BuildingType buildingType : BuildingType.values())
{
int numberOfBuildings = calculateNumberOfBuildings(buildingType, buildingCounts[buildingType.ordinal()]);
buildingQuantities.put(buildingType, numberOfBuildings);
for (BuildingResource buildingResource : BuildingResource.values())
{
int cumulativeCost = buildingResources.get(buildingResource) + buildingType.getCost(buildingResource) * numberOfBuildings;
buildingResources.put(buildingResource, cumulativeCost);
}
totalBuildingTime += buildingType.getBuildTime() * numberOfBuildings;
}
for (BuildingType buildingType : maxBuildings.keySet())
{
String designError = validateNumberOfBuildings(
buildingType,
buildingCounts[buildingType.ordinal()],
maxBuildings.get(buildingType));
if (designError != null) designErrors.add(designError);
}
}
public int getNumberOfBuildings(BuildingType buildingType)
{
return buildingQuantities.get(buildingType);
}
public int getTotalResource(BuildingResource resource)
{
return buildingResources.get(resource);
}
public int getTotalBuildingTime()
{
return totalBuildingTime;
}
private int calculateNumberOfBuildings(BuildingType buildingType, int numberOfTiles)
{
return numberOfTiles / (buildingType.getDimension().width * buildingType.getDimension().height);
}
private String validateNumberOfBuildings(BuildingType buildingType, int numberOfTiles, int maxNumberOfBuildings)
{
int numberOfBuildings = calculateNumberOfBuildings(buildingType, numberOfTiles);
if (numberOfBuildings > maxNumberOfBuildings)
{
return "Error: " + numberOfBuildings + " " + buildingType + "s (" + maxNumberOfBuildings + " max)";
}
else return null;
}
public int getMaximumNumberOfBuildings(BuildingType buildingType)
{
Integer max = maxBuildings.get(buildingType);
if (max == null) return 0;
else return max;
}
}
| |
// Copyright 2015 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis.config;
import com.google.devtools.build.lib.analysis.util.BuildViewTestCase;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.common.options.OptionsBase;
import com.google.devtools.common.options.OptionsParser;
/**
* Tests for {@link ConfigSetting}.
*/
public class ConfigSettingTest extends BuildViewTestCase {
private void writeSimpleExample() throws Exception {
scratch.file("pkg/BUILD",
"config_setting(",
" name = 'foo',",
" values = {",
" 'compilation_mode': 'dbg',",
" 'stamp': '1',",
" })");
}
private ConfigMatchingProvider getConfigMatchingProvider(String label) throws Exception {
return getConfiguredTarget(label).getProvider(ConfigMatchingProvider.class);
}
/**
* Returns the default value of the given flag.
*/
private Object flagDefault(String option) {
Class<? extends OptionsBase> optionsClass = getTargetConfiguration().getOptionClass(option);
return OptionsParser.newOptionsParser(optionsClass)
.getOptions(optionsClass)
.asMap()
.get(option);
}
/**
* Tests that a config_setting only matches build configurations where *all* of
* its flag specifications match.
*/
public void testMatchingCriteria() throws Exception {
writeSimpleExample();
// First flag mismatches:
useConfiguration("-c", "opt", "--stamp");
assertFalse(getConfigMatchingProvider("//pkg:foo").matches());
// Second flag mismatches:
useConfiguration("-c", "dbg", "--nostamp");
assertFalse(getConfigMatchingProvider("//pkg:foo").matches());
// Both flags mismatch:
useConfiguration("-c", "opt", "--nostamp");
assertFalse(getConfigMatchingProvider("//pkg:foo").matches());
// Both flags match:
useConfiguration("-c", "dbg", "--stamp");
assertTrue(getConfigMatchingProvider("//pkg:foo").matches());
}
/**
* Tests that {@link ConfigMatchingProvider#label} is correct.
*/
public void testLabel() throws Exception {
writeSimpleExample();
assertEquals(
Label.parseAbsolute("//pkg:foo"),
getConfigMatchingProvider("//pkg:foo").label());
}
/**
* Tests that rule analysis fails on unknown options.
*/
public void testUnknownOption() throws Exception {
checkError("foo", "badoption",
"unknown option: 'not_an_option'",
"config_setting(",
" name = 'badoption',",
" values = {'not_an_option': 'bar'})");
}
/**
* Tests that rule analysis fails on invalid option values.
*/
public void testInvalidOptionValue() throws Exception {
checkError("foo", "badvalue",
"Not a valid compilation mode: 'baz'",
"config_setting(",
" name = 'badvalue',",
" values = {'compilation_mode': 'baz'})");
}
/**
* Tests that when the first option is valid but the config_setting doesn't match,
* remaining options are still validity-checked.
*/
public void testInvalidOptionFartherDown() throws Exception {
checkError("foo", "badoption",
"unknown option: 'not_an_option'",
"config_setting(",
" name = 'badoption',",
" values = {",
" 'compilation_mode': 'opt',",
" 'not_an_option': 'bar',",
" })");
}
/**
* Tests that *some* settings must be specified.
*/
public void testEmptySettings() throws Exception {
checkError("foo", "empty",
"//foo:empty: no settings specified",
"config_setting(",
" name = 'empty',",
" values = {})");
}
/**
* Tests {@link BuildConfiguration.Fragment#lateBoundOptionDefaults} options (options
* that take alternative defaults from what's specified in {@link
* com.google.devtools.common.options.Option#defaultValue}).
*/
public void testLateBoundOptionDefaults() throws Exception {
String crosstoolCpuDefault = (String) getTargetConfiguration().getOptionValue("cpu");
String crosstoolCompilerDefault = (String) getTargetConfiguration().getOptionValue("compiler");
scratch.file("test/BUILD",
"config_setting(",
" name = 'match',",
" values = {",
" 'cpu': '" + crosstoolCpuDefault + "',",
" 'compiler': '" + crosstoolCompilerDefault + "',", //'gcc-4.4.0',",
" })");
assertTrue(getConfigMatchingProvider("//test:match").matches());
assertNull(flagDefault("cpu"));
assertNotNull(crosstoolCpuDefault);
assertNull(flagDefault("compiler"));
assertNotNull(crosstoolCompilerDefault);
}
/**
* Tests matching on multi-value attributes with key=value entries (e.g. --define).
*/
public void testMultiValueDict() throws Exception {
scratch.file("test/BUILD",
"config_setting(",
" name = 'match',",
" values = {",
" 'define': 'foo=bar',",
" })");
useConfiguration("");
assertFalse(getConfigMatchingProvider("//test:match").matches());
useConfiguration("--define", "foo=bar");
assertTrue(getConfigMatchingProvider("//test:match").matches());
useConfiguration("--define", "foo=baz");
assertFalse(getConfigMatchingProvider("//test:match").matches());
useConfiguration("--define", "foo=bar", "--define", "bar=baz");
assertTrue(getConfigMatchingProvider("//test:match").matches());
useConfiguration("--define", "foo=bar", "--define", "bar=baz", "--define", "foo=nope");
assertFalse(getConfigMatchingProvider("//test:match").matches());
useConfiguration("--define", "foo=nope", "--define", "bar=baz", "--define", "foo=bar");
assertTrue(getConfigMatchingProvider("//test:match").matches());
}
/**
* Tests matching on multi-value attributes with primitive values.
*/
public void testMultiValueList() throws Exception {
scratch.file("test/BUILD",
"config_setting(",
" name = 'match',",
" values = {",
" 'copt': '-Dfoo',",
" })");
useConfiguration("");
assertFalse(getConfigMatchingProvider("//test:match").matches());
useConfiguration("--copt", "-Dfoo");
assertTrue(getConfigMatchingProvider("//test:match").matches());
useConfiguration("--copt", "-Dbar");
assertFalse(getConfigMatchingProvider("//test:match").matches());
useConfiguration("--copt", "-Dfoo", "--copt", "-Dbar");
assertTrue(getConfigMatchingProvider("//test:match").matches());
useConfiguration("--copt", "-Dbar", "--copt", "-Dfoo");
assertTrue(getConfigMatchingProvider("//test:match").matches());
}
}
| |
/*
* Project Scelight
*
* Copyright (c) 2013 Andras Belicza <iczaaa@gmail.com>
*
* This software is the property of Andras Belicza.
* Copying, modifying, distributing, refactoring without the author's permission
* is prohibited and protected by Law.
*/
package hu.scelight.action;
import hu.scelight.Consts;
import hu.scelight.Scelight;
import hu.scelight.gui.dialog.AboutDialog;
import hu.scelight.gui.dialog.JobsDialog;
import hu.scelight.gui.dialog.dlregfile.DlRegistrationFileDialog;
import hu.scelight.gui.icon.Icons;
import hu.scelight.gui.overlaycard.ApmOverlay;
import hu.scelight.gui.overlaycard.LastGameInfoOverlay;
import hu.scelight.gui.page.about.logs.LogsPage;
import hu.scelight.gui.page.about.sysinfo.SysInfoPage;
import hu.scelight.gui.setting.SettingsDialog;
import hu.scelight.gui.setting.SettingsGui;
import hu.scelight.service.env.Env;
import hu.scelight.service.sc2reg.Sc2RegMonitor;
import hu.scelight.service.settings.Settings;
import hu.scelight.service.sound.Sounds;
import hu.scelight.util.Utils;
import hu.scelight.util.gui.GuiUtils;
import hu.scelight.util.job.UpdateCheckerJob;
import hu.scelight.util.sc2rep.LatestRepSearchCoordinatorJob;
import hu.scelight.util.sc2rep.RepUtils;
import hu.sllauncher.action.UrlAction;
import hu.sllauncher.action.XAction;
import hu.sllauncher.gui.comp.XFileChooser;
import hu.sllauncher.gui.page.LicensePage;
import hu.sllauncher.gui.page.NewsPage;
import hu.sllauncher.gui.page.RegInfoPage;
import hu.sllauncher.service.env.OpSys;
import hu.sllauncher.service.sound.Sound;
import java.awt.event.ActionEvent;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import javax.swing.KeyStroke;
import javax.swing.Timer;
/**
* Actions collection.
*
* <p>
* <i>Implementation note:</i> This is an interface instead of a class because constants defined in an interface are implicitly <code>public</code>,
* <code>static</code> and <code>final</code>, and so these keywords can be omitted!
* </p>
*
* @author Andras Belicza
*/
@SuppressWarnings( "serial" )
public interface Actions {
// FILE MENU ACTIONS
/** Open a Replay. */
XAction OPEN_REPLAY = new XAction( KeyStroke.getKeyStroke( KeyEvent.VK_O, InputEvent.CTRL_MASK ), Icons.F_CHART, "_Open a Replay..." ) {
@Override
public void actionPerformed( final ActionEvent event ) {
final XFileChooser fileChooser = RepUtils.createReplayChooserDialog( false );
if ( XFileChooser.APPROVE_OPTION != fileChooser.showOpenDialog( Env.MAIN_FRAME ) )
return;
Env.MAIN_FRAME.repAnalyzersPage.newRepAnalyzerPage( fileChooser.getSelectedPath(), true );
}
};
/** Quick open last replay. */
XAction QUICK_OPEN_LAST_REP = new XAction( KeyStroke.getKeyStroke( KeyEvent.VK_O, InputEvent.CTRL_MASK | InputEvent.SHIFT_MASK ),
Icons.F_CHART_ARROW, "Quick Open _Last Replay" ) {
@Override
public void actionPerformed( final ActionEvent event ) {
final LatestRepSearchCoordinatorJob lj = new LatestRepSearchCoordinatorJob();
lj.setEdtCallback( new Runnable() {
@Override
public void run() {
if ( lj.getLatestReplay() != null )
Env.MAIN_FRAME.repAnalyzersPage.newRepAnalyzerPage( lj.getLatestReplay(), true );
else
GuiUtils.showWarningMsg( "Could not find any replays in the monitored Replay Folders!" );
}
} );
lj.start();
}
};
/** Open Settings dialog. */
XAction SETTINGS = new XAction( KeyStroke.getKeyStroke( KeyEvent.VK_P, InputEvent.CTRL_MASK ), Icons.F_GEAR, "_Settings..." ) {
@Override
public void actionPerformed( final ActionEvent event ) {
new SettingsDialog( Env.MAIN_FRAME, Settings.NODE_UI ).setVisible( true );
}
};
/** Start SC2. */
XAction START_SC2 = new XAction( Icons.SC2_ICON, "S_tart StarCraft II" ) {
@Override
public void actionPerformed( final ActionEvent event ) {
Utils.launchExternalApp(
Env.APP_SETTINGS.get( Settings.SC2_INSTALL_FOLDER ).resolve(
Env.OS == OpSys.OS_X ? "StarCraft II.app/Contents/MacOS/StarCraft II" : "StarCraft II.exe" ),
null, " ", "Is your StarCraft II install folder setting correct?",
SettingsGui.createSettingLink( Settings.NODE_SC2_INSTALLATION ) );
}
};
/** Start SC2 editor. */
XAction START_SC2_EDITOR = new XAction( Icons.SC2_EDITOR, "Start StarCraft II _Editor" ) {
@Override
public void actionPerformed( final ActionEvent event ) {
Utils.launchExternalApp(
Env.APP_SETTINGS.get( Settings.SC2_INSTALL_FOLDER ).resolve(
Env.OS == OpSys.OS_X ? "StarCraft II Editor.app/Contents/MacOS/StarCraft II Editor"
: "StarCraft II Editor.exe" ), null, " ",
"Is your StarCraft II install folder setting correct?", SettingsGui
.createSettingLink( Settings.NODE_SC2_INSTALLATION ) );
}
};
/** Clear Recent Replays. */
XAction CLEAR_RECENT_REPS = new XAction( Icons.F_CROSS, "_Clear Recent Replays" ) {
@Override
public void actionPerformed( final ActionEvent event ) {
Env.APP_SETTINGS.reset( Settings.RECENT_REPLAYS_BEAN );
}
};
/** Exit. */
XAction EXIT = new XAction( KeyStroke.getKeyStroke( KeyEvent.VK_X, InputEvent.ALT_MASK ), Icons.F_DOOR_OPEN_IN, "E_xit" ) {
@Override
public void actionPerformed( final ActionEvent event ) {
Scelight.INSTANCE().exit();
}
};
/** Visit home page. */
XAction HOME_PAGE = new UrlAction( "Visit home page", Consts.URL_HOME_PAGE );
// VIEW MENU ACTIONS
/** Show Tool bar. */
XAction SHOW_MAIN_TOOL_BAR = new BoolSettingAction( Icons.F_UI_TOOLBAR, "Show Main _Tool Bar", Settings.SHOW_MAIN_TOOL_BAR, Env.APP_SETTINGS );
/** Show Status bar. */
XAction SHOW_STATUS_BAR = new BoolSettingAction( Icons.F_UI_STATUS_BAR, "Show _Status Bar", Settings.SHOW_STATUS_BAR, Env.APP_SETTINGS );
// TOOLS MENU ACTIONS
/** Show / Hide Live APM Overlay. */
XAction LIVE_APM_OVERLAY = new XAction( Icons.F_COUNTER, "Live _APM Overlay" ) {
{
if ( !Sc2RegMonitor.isSupported() )
setEnabled( false );
}
@Override
public void actionPerformed( final ActionEvent event ) {
final ApmOverlay apmOverlay = ApmOverlay.INSTANCE();
if ( apmOverlay == null )
new ApmOverlay();
else
apmOverlay.close();
}
};
/** Show / Hide Last Game Info Overlay. */
XAction LAST_GAME_INFO_OVERLAY = new XAction( Icons.F_INFORMATION_BALLOON, "Last Game _Info Overlay" ) {
@Override
public void actionPerformed( final ActionEvent event ) {
final LastGameInfoOverlay lastGameInfoOverlay = LastGameInfoOverlay.INSTANCE();
if ( lastGameInfoOverlay != null ) {
lastGameInfoOverlay.close();
return;
}
final LatestRepSearchCoordinatorJob lj = new LatestRepSearchCoordinatorJob();
lj.setEdtCallback( new Runnable() {
@Override
public void run() {
if ( lj.getLatestReplay() != null )
new LastGameInfoOverlay( lj.getLatestReplay() );
else
GuiUtils.showWarningMsg( "Could not find any replays in the monitored Replay Folders!" );
}
} );
lj.start();
}
};
/** Download Registration file. */
XAction DL_REGISTRATION_FILE = new XAction( Icons.F_LICENCE_KEY, "_Download Registration File..." ) {
@Override
public void actionPerformed( final ActionEvent event ) {
if ( Env.REG_MANAGER.isOk() ) {
Sound.play( Sounds.THANKS_FOR_REGISTERING );
GuiUtils.showInfoMsg( "You already have a valid registration file.", " ",
GuiUtils.linkForAction( "View Registration info...", ABOUT_REGINFO ) );
return;
}
new DlRegistrationFileDialog();
}
};
/** Running jobs. */
XAction RUNNING_JOBS = new XAction( KeyStroke.getKeyStroke( KeyEvent.VK_ESCAPE, InputEvent.SHIFT_MASK ), Icons.F_HARD_HAT,
"Running _Jobs..." ) {
@Override
public void actionPerformed( final ActionEvent event ) {
new JobsDialog();
}
};
// WINDOW MENU ACTIONS
/** Minimizes the main frame to the system tray. */
XAction MINIMIZE_TO_TRAY = new XAction( Icons.F_ARROW_STOP_270, "_Minimize to Tray" ) {
{
// Disable minimization by default, we'll enable it if everything goes well
// and tray icon gets installed successfully.
setEnabled( false );
}
@Override
public void actionPerformed( final ActionEvent event ) {
// Only minimize if enabled (if system tray is supported)
if ( isEnabled() )
Env.MAIN_FRAME.setVisible( false );
}
};
/** Restores the default window position. */
XAction RESTORE_DEF_WIN_POSITION = new XAction( Icons.F_APPLICATION_SUB, "_Restore default position" ) {
@Override
public void actionPerformed( final ActionEvent event ) {
Env.MAIN_FRAME.restoreDefaultWinPos();
}
};
/** Minimize to Tray on Close. */
XAction MINIMIZE_TO_TRAY_ON_CLOSE = new BoolSettingAction( Icons.F_APPLICATION_DOCK_TAB, "Minimize to Tray on _Close",
Settings.MINIMIZE_TO_TRAY_ON_CLOSE, Env.APP_SETTINGS );
/** Minimize to Tray on Close. */
XAction START_MINIMIZED_TO_TRAY = new BoolSettingAction( Icons.F_APPLICATION_DOCK_TAB, "_Start Minimized to Tray", Settings.START_MINIMIZED_TO_TRAY,
Env.APP_SETTINGS );
/** Maximize Window on Start. */
XAction MAXIMIZE_WINDOW_ON_START = new BoolSettingAction( Icons.F_APPLICATION_RESIZE, "Ma_ximize Window on Start", Settings.MAXIMIZE_WINDOW_ON_START,
Env.APP_SETTINGS );
/** Restore last Window position on Start. */
XAction RESTORE_LAST_WIN_POS_ON_START = new BoolSettingAction( Icons.F_APPLICATION_SUB, "Restore last Window _position on Start",
Settings.RESTORE_LAST_WIN_POS_ON_START, Env.APP_SETTINGS );
// HELP MENU ACTIONS
/** News About page. */
XAction ABOUT_NEWS = new AboutPageAction( new NewsPage() );
/** License About page. */
XAction ABOUT_LICENSE = new AboutPageAction( new LicensePage() );
/** Registration info About page. */
XAction ABOUT_REGINFO = new AboutPageAction( new RegInfoPage() );
/** Logs About page. */
XAction ABOUT_LOGS = new AboutPageAction( new LogsPage() );
/** System info About page. */
XAction ABOUT_SYS_INFO = new AboutPageAction( new SysInfoPage() );
/** Check for updates. */
XAction CHECK_FOR_UPDATES = new XAction( Icons.F_ARROW_CIRCLE_DOUBLE, "_Check for Updates" ) {
/** Number of scheduled checks. */
private int scheduledCount;
{
setEnabled( Env.ONLINE );
}
@Override
public void actionPerformed( final ActionEvent event ) {
if ( !isEnabled() )
return;
if ( event != null && event.getSource() instanceof Timer ) {
// Computer might be up after a sleep, so the elapsed time since start is
// not good for Scheduled checks, that's why we count the scheduled checks.
final int mins = ( Consts.SCHEDULED_UPDATE_CHECK_DELAY / 60_000 ) * scheduledCount++;
new UpdateCheckerJob( UpdateCheckerJob.Type.SCHEDULED, mins ).start();
} else {
new UpdateCheckerJob( UpdateCheckerJob.Type.MANUAL,
(int) ( ( System.currentTimeMillis() - Env.APPLICATION_START ) / Utils.MS_IN_MIN ) ).start();
}
}
};
/** About. */
XAction ABOUT_ABOUT = new XAction( KeyStroke.getKeyStroke( KeyEvent.VK_F1, 0 ), Icons.F_INFORMATION, "_About " + Consts.APP_NAME + "..." ) {
@Override
public void actionPerformed( final ActionEvent event ) {
new AboutDialog( null );
}
};
// MISCELLANEOUS, NON-MENU ACTIONS
/** Shows and activates the main frame (from minimized to tray state). */
XAction SHOW_MAIN_FRAME = new XAction( Icons.F_APPLICATION_RESIZE, "_Show Main Window" ) {
@Override
public void actionPerformed( final ActionEvent event ) {
Env.MAIN_FRAME.restoreMainFrame();
}
};
}
| |
package org.sasm;
/**
* Defines the JVM opcodes, access flags and array type codes. This interface
* does not define all the JVM opcodes because some opcodes are automatically
* handled. For example, the xLOAD and xSTORE opcodes are automatically replaced
* by xLOAD_n and xSTORE_n opcodes when possible. The xLOAD_n and xSTORE_n
* opcodes are therefore not defined out this interface. Likewise for LDC,
* automatically replaced by LDC_W or LDC2_W when necessary, WIDE, GOTO_W and
* JSR_W.
*
* @author Eric Bruneton
* @author Eugene Kuleshov
*/
public interface Opcodes {
// ASM API versions
int ASM4 = 4 << 16;
int ASM5 = 5 << 16;
// versions
int V1_1 = 3 << 16 | 45;
int V1_2 = 46;
int V1_3 = 47;
int V1_4 = 48;
int V1_5 = 49;
int V1_6 = 50;
int V1_7 = 51;
int V1_8 = 52;
// access flags
int ACC_PUBLIC = 0x0001; // class, field, method
int ACC_PRIVATE = 0x0002; // class, field, method
int ACC_PROTECTED = 0x0004; // class, field, method
int ACC_STATIC = 0x0008; // field, method
int ACC_FINAL = 0x0010; // class, field, method, parameter
int ACC_SUPER = 0x0020; // class
int ACC_SYNCHRONIZED = 0x0020; // method
int ACC_VOLATILE = 0x0040; // field
int ACC_BRIDGE = 0x0040; // method
int ACC_VARARGS = 0x0080; // method
int ACC_TRANSIENT = 0x0080; // field
int ACC_NATIVE = 0x0100; // method
int ACC_INTERFACE = 0x0200; // class
int ACC_ABSTRACT = 0x0400; // class, method
int ACC_STRICT = 0x0800; // method
int ACC_SYNTHETIC = 0x1000; // class, field, method, parameter
int ACC_ANNOTATION = 0x2000; // class
int ACC_ENUM = 0x4000; // class(?) field inner
int ACC_MANDATED = 0x8000; // parameter
// ASM specific pseudo access flags
int ACC_DEPRECATED = 0x20000; // class, field, method
// types for NEWARRAY
int T_BOOLEAN = 4;
int T_CHAR = 5;
int T_FLOAT = 6;
int T_DOUBLE = 7;
int T_BYTE = 8;
int T_SHORT = 9;
int T_INT = 10;
int T_LONG = 11;
// tags for Handle
int H_GETFIELD = 1;
int H_GETSTATIC = 2;
int H_PUTFIELD = 3;
int H_PUTSTATIC = 4;
int H_INVOKEVIRTUAL = 5;
int H_INVOKESTATIC = 6;
int H_INVOKESPECIAL = 7;
int H_NEWINVOKESPECIAL = 8;
int H_INVOKEINTERFACE = 9;
// stack map frame types
/**
* Represents an expanded frame. See {@link ClassReader#EXPAND_FRAMES}.
*/
int F_NEW = -1;
/**
* Represents a compressed frame with complete frame data.
*/
int F_FULL = 0;
/**
* Represents a compressed frame where locals are the same as the locals out
* the previous frame, except that additional 1-3 locals are defined, and
* with an empty stack.
*/
int F_APPEND = 1;
/**
* Represents a compressed frame where locals are the same as the locals out
* the previous frame, except that the last 1-3 locals are absent and with
* an empty stack.
*/
int F_CHOP = 2;
/**
* Represents a compressed frame with exactly the same locals as the
* previous frame and with an empty stack.
*/
int F_SAME = 3;
/**
* Represents a compressed frame with exactly the same locals as the
* previous frame and with a single value on the stack.
*/
int F_SAME1 = 4;
Integer TOP = 0;
Integer INTEGER = 1;
Integer FLOAT = 2;
Integer DOUBLE = 3;
Integer LONG = 4;
Integer NULL = 5;
Integer UNINITIALIZED_THIS = 6;
// opcodes // visit method (- = idem)
int NOP = 0; // visitInsn
int ACONST_NULL = 1; // -
int ICONST_M1 = 2; // -
int ICONST_0 = 3; // -
int ICONST_1 = 4; // -
int ICONST_2 = 5; // -
int ICONST_3 = 6; // -
int ICONST_4 = 7; // -
int ICONST_5 = 8; // -
int LCONST_0 = 9; // -
int LCONST_1 = 10; // -
int FCONST_0 = 11; // -
int FCONST_1 = 12; // -
int FCONST_2 = 13; // -
int DCONST_0 = 14; // -
int DCONST_1 = 15; // -
int BIPUSH = 16; // visitIntInsn
int SIPUSH = 17; // -
int LDC = 18; // visitLdcInsn
// int LDC_W = 19; // -
// int LDC2_W = 20; // -
int ILOAD = 21; // visitVarInsn
int LLOAD = 22; // -
int FLOAD = 23; // -
int DLOAD = 24; // -
int ALOAD = 25; // -
// int ILOAD_0 = 26; // -
// int ILOAD_1 = 27; // -
// int ILOAD_2 = 28; // -
// int ILOAD_3 = 29; // -
// int LLOAD_0 = 30; // -
// int LLOAD_1 = 31; // -
// int LLOAD_2 = 32; // -
// int LLOAD_3 = 33; // -
// int FLOAD_0 = 34; // -
// int FLOAD_1 = 35; // -
// int FLOAD_2 = 36; // -
// int FLOAD_3 = 37; // -
// int DLOAD_0 = 38; // -
// int DLOAD_1 = 39; // -
// int DLOAD_2 = 40; // -
// int DLOAD_3 = 41; // -
// int ALOAD_0 = 42; // -
// int ALOAD_1 = 43; // -
// int ALOAD_2 = 44; // -
// int ALOAD_3 = 45; // -
int IALOAD = 46; // visitInsn
int LALOAD = 47; // -
int FALOAD = 48; // -
int DALOAD = 49; // -
int AALOAD = 50; // -
int BALOAD = 51; // -
int CALOAD = 52; // -
int SALOAD = 53; // -
int ISTORE = 54; // visitVarInsn
int LSTORE = 55; // -
int FSTORE = 56; // -
int DSTORE = 57; // -
int ASTORE = 58; // -
// int ISTORE_0 = 59; // -
// int ISTORE_1 = 60; // -
// int ISTORE_2 = 61; // -
// int ISTORE_3 = 62; // -
// int LSTORE_0 = 63; // -
// int LSTORE_1 = 64; // -
// int LSTORE_2 = 65; // -
// int LSTORE_3 = 66; // -
// int FSTORE_0 = 67; // -
// int FSTORE_1 = 68; // -
// int FSTORE_2 = 69; // -
// int FSTORE_3 = 70; // -
// int DSTORE_0 = 71; // -
// int DSTORE_1 = 72; // -
// int DSTORE_2 = 73; // -
// int DSTORE_3 = 74; // -
// int ASTORE_0 = 75; // -
// int ASTORE_1 = 76; // -
// int ASTORE_2 = 77; // -
// int ASTORE_3 = 78; // -
int IASTORE = 79; // visitInsn
int LASTORE = 80; // -
int FASTORE = 81; // -
int DASTORE = 82; // -
int AASTORE = 83; // -
int BASTORE = 84; // -
int CASTORE = 85; // -
int SASTORE = 86; // -
int POP = 87; // -
int POP2 = 88; // -
int DUP = 89; // -
int DUP_X1 = 90; // -
int DUP_X2 = 91; // -
int DUP2 = 92; // -
int DUP2_X1 = 93; // -
int DUP2_X2 = 94; // -
int SWAP = 95; // -
int IADD = 96; // -
int LADD = 97; // -
int FADD = 98; // -
int DADD = 99; // -
int ISUB = 100; // -
int LSUB = 101; // -
int FSUB = 102; // -
int DSUB = 103; // -
int IMUL = 104; // -
int LMUL = 105; // -
int FMUL = 106; // -
int DMUL = 107; // -
int IDIV = 108; // -
int LDIV = 109; // -
int FDIV = 110; // -
int DDIV = 111; // -
int IREM = 112; // -
int LREM = 113; // -
int FREM = 114; // -
int DREM = 115; // -
int INEG = 116; // -
int LNEG = 117; // -
int FNEG = 118; // -
int DNEG = 119; // -
int ISHL = 120; // -
int LSHL = 121; // -
int ISHR = 122; // -
int LSHR = 123; // -
int IUSHR = 124; // -
int LUSHR = 125; // -
int IAND = 126; // -
int LAND = 127; // -
int IOR = 128; // -
int LOR = 129; // -
int IXOR = 130; // -
int LXOR = 131; // -
int IINC = 132; // visitIincInsn
int I2L = 133; // visitInsn
int I2F = 134; // -
int I2D = 135; // -
int L2I = 136; // -
int L2F = 137; // -
int L2D = 138; // -
int F2I = 139; // -
int F2L = 140; // -
int F2D = 141; // -
int D2I = 142; // -
int D2L = 143; // -
int D2F = 144; // -
int I2B = 145; // -
int I2C = 146; // -
int I2S = 147; // -
int LCMP = 148; // -
int FCMPL = 149; // -
int FCMPG = 150; // -
int DCMPL = 151; // -
int DCMPG = 152; // -
int IFEQ = 153; // visitJumpInsn
int IFNE = 154; // -
int IFLT = 155; // -
int IFGE = 156; // -
int IFGT = 157; // -
int IFLE = 158; // -
int IF_ICMPEQ = 159; // -
int IF_ICMPNE = 160; // -
int IF_ICMPLT = 161; // -
int IF_ICMPGE = 162; // -
int IF_ICMPGT = 163; // -
int IF_ICMPLE = 164; // -
int IF_ACMPEQ = 165; // -
int IF_ACMPNE = 166; // -
int GOTO = 167; // -
int JSR = 168; // -
int RET = 169; // visitVarInsn
int TABLESWITCH = 170; // visiTableSwitchInsn
int LOOKUPSWITCH = 171; // visitLookupSwitch
int IRETURN = 172; // visitInsn
int LRETURN = 173; // -
int FRETURN = 174; // -
int DRETURN = 175; // -
int ARETURN = 176; // -
int RETURN = 177; // -
int GETSTATIC = 178; // visitFieldInsn
int PUTSTATIC = 179; // -
int GETFIELD = 180; // -
int PUTFIELD = 181; // -
int INVOKEVIRTUAL = 182; // visitMethodInsn
int INVOKESPECIAL = 183; // -
int INVOKESTATIC = 184; // -
int INVOKEINTERFACE = 185; // -
int INVOKEDYNAMIC = 186; // visitInvokeDynamicInsn
int NEW = 187; // visitTypeInsn
int NEWARRAY = 188; // visitIntInsn
int ANEWARRAY = 189; // visitTypeInsn
int ARRAYLENGTH = 190; // visitInsn
int ATHROW = 191; // -
int CHECKCAST = 192; // visitTypeInsn
int INSTANCEOF = 193; // -
int MONITORENTER = 194; // visitInsn
int MONITOREXIT = 195; // -
// int WIDE = 196; // NOT VISITED
int MULTIANEWARRAY = 197; // visitMultiANewArrayInsn
int IFNULL = 198; // visitJumpInsn
int IFNONNULL = 199; // -
// int GOTO_W = 200; // -
// int JSR_W = 201; // -
}
| |
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skylarkbuildapi;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.events.Location;
import com.google.devtools.build.lib.skylarkbuildapi.StarlarkConfigApi.BuildSettingApi;
import com.google.devtools.build.lib.skylarkinterface.Param;
import com.google.devtools.build.lib.skylarkinterface.ParamType;
import com.google.devtools.build.lib.skylarkinterface.SkylarkCallable;
import com.google.devtools.build.lib.skylarkinterface.SkylarkConstructor;
import com.google.devtools.build.lib.skylarkinterface.SkylarkGlobalLibrary;
import com.google.devtools.build.lib.syntax.BaseFunction;
import com.google.devtools.build.lib.syntax.EvalException;
import com.google.devtools.build.lib.syntax.FuncallExpression;
import com.google.devtools.build.lib.syntax.Runtime.NoneType;
import com.google.devtools.build.lib.syntax.SkylarkDict;
import com.google.devtools.build.lib.syntax.SkylarkList;
import com.google.devtools.build.lib.syntax.StarlarkFunction;
import com.google.devtools.build.lib.syntax.StarlarkSemantics.FlagIdentifier;
import com.google.devtools.build.lib.syntax.StarlarkThread;
/**
* Interface for a global Skylark library containing rule-related helper and registration functions.
*/
@SkylarkGlobalLibrary
public interface SkylarkRuleFunctionsApi<FileApiT extends FileApi> {
static final String PROVIDES_DOC =
"A list of providers that the implementation function must return."
+ ""
+ "<p>It is an error if the implementation function omits any of the types of providers "
+ "listed here from its return value. However, the implementation function may return "
+ "additional providers not listed here."
+ ""
+ "<p>Each element of the list is an <code>*Info</code> object returned by "
+ "<a href='globals.html#provider'><code>provider()</code></a>, except that a legacy "
+ "provider is represented by its string name instead.";
@SkylarkCallable(
name = "provider",
doc =
"Creates a declared provider 'constructor'. The return value of this "
+ "function can be used to create \"struct-like\" values. Example:<br>"
+ "<pre class=\"language-python\">data = provider()\n"
+ "d = data(x = 2, y = 3)\n"
+ "print(d.x + d.y) # prints 5</pre>",
parameters = {
@Param(
name = "doc",
type = String.class,
named = true,
defaultValue = "''",
doc =
"A description of the provider that can be extracted by documentation generating"
+ " tools."),
@Param(
name = "fields",
doc =
"If specified, restricts the set of allowed fields. <br>Possible values are:<ul> "
+ " <li> list of fields:<br> <pre"
+ " class=\"language-python\">provider(fields = ['a', 'b'])</pre><p> <li>"
+ " dictionary field name -> documentation:<br> <pre"
+ " class=\"language-python\">provider(\n"
+ " fields = { 'a' : 'Documentation for a', 'b' : 'Documentation for b'"
+ " })</pre></ul>All fields are optional.",
allowedTypes = {
@ParamType(type = SkylarkList.class, generic1 = String.class),
@ParamType(type = SkylarkDict.class)
},
noneable = true,
named = true,
positional = false,
defaultValue = "None")
},
useLocation = true)
public ProviderApi provider(String doc, Object fields, Location location) throws EvalException;
@SkylarkCallable(
name = "rule",
doc =
"Creates a new rule, which can be called from a BUILD file or a macro to create targets."
+ "<p>Rules must be assigned to global variables in a .bzl file; the name of the "
+ "global variable is the rule's name."
+ "<p>Test rules are required to have a name ending in <code>_test</code>, while all "
+ "other rules must not have this suffix. (This restriction applies only to rules, "
+ "not to their targets.)",
parameters = {
@Param(
name = "implementation",
type = StarlarkFunction.class,
named = true,
doc =
"the Starlark function implementing this rule, must have exactly one parameter: "
+ "<a href=\"ctx.html\">ctx</a>. The function is called during the analysis "
+ "phase for each instance of the rule. It can access the attributes "
+ "provided by the user. It must create actions to generate all the declared "
+ "outputs."),
@Param(
name = "test",
type = Boolean.class,
named = true,
defaultValue = "False",
doc =
"Whether this rule is a test rule, that is, whether it may be the subject of a "
+ "<code>blaze test</code> command. All test rules are automatically "
+ "considered <a href='#rule.executable'>executable</a>; it is unnecessary "
+ "(and discouraged) to explicitly set <code>executable = True</code> for a "
+ "test rule. See the "
+ "<a href='../rules.$DOC_EXT#executable-rules-and-test-rules'>Rules page</a> "
+ "for more information."),
@Param(
name = "attrs",
type = SkylarkDict.class,
named = true,
noneable = true,
defaultValue = "None",
doc =
"dictionary to declare all the attributes of the rule. It maps from an attribute "
+ "name to an attribute object (see <a href=\"attr.html\">attr</a> module). "
+ "Attributes starting with <code>_</code> are private, and can be used to "
+ "add an implicit dependency on a label. The attribute <code>name</code> is "
+ "implicitly added and must not be specified. Attributes "
+ "<code>visibility</code>, <code>deprecation</code>, <code>tags</code>, "
+ "<code>testonly</code>, and <code>features</code> are implicitly added and "
+ "cannot be overridden."),
// TODO(bazel-team): need to give the types of these builtin attributes
@Param(
name = "outputs",
allowedTypes = {
@ParamType(type = SkylarkDict.class),
@ParamType(type = NoneType.class),
@ParamType(type = StarlarkFunction.class) // a function defined in Starlark
},
named = true,
callbackEnabled = true,
noneable = true,
defaultValue = "None",
valueWhenDisabled = "None",
disableWithFlag = FlagIdentifier.INCOMPATIBLE_NO_RULE_OUTPUTS_PARAM,
doc =
"This parameter has been deprecated. Migrate rules to use"
+ " <code>OutputGroupInfo</code> or <code>attr.output</code> instead. <p>A"
+ " schema for defining predeclared outputs. Unlike <a"
+ " href='attr.html#output'><code>output</code></a> and <a"
+ " href='attr.html#output_list'><code>output_list</code></a> attributes, the"
+ " user does not specify the labels for these files. See the <a"
+ " href='../rules.$DOC_EXT#files'>Rules page</a> for more on predeclared"
+ " outputs.<p>The value of this argument is either a dictionary or a callback"
+ " function that produces a dictionary. The callback works similar to"
+ " computed dependency attributes: The function's parameter names are matched"
+ " against the rule's attributes, so for example if you pass <code>outputs ="
+ " _my_func</code> with the definition <code>def _my_func(srcs, deps):"
+ " ...</code>, the function has access to the attributes <code>srcs</code>"
+ " and <code>deps</code>. Whether the dictionary is specified directly or via"
+ " a function, it is interpreted as follows.<p>Each entry in the dictionary"
+ " creates a predeclared output where the key is an identifier and the value"
+ " is a string template that determines the output's label. In the rule's"
+ " implementation function, the identifier becomes the field name used to"
+ " access the output's <a href='File.html'><code>File</code></a> in <a"
+ " href='ctx.html#outputs'><code>ctx.outputs</code></a>. The output's label"
+ " has the same package as the rule, and the part after the package is"
+ " produced by substituting each placeholder of the form"
+ " <code>\"%{ATTR}\"</code> with a string formed from the value of the"
+ " attribute <code>ATTR</code>:<ul><li>String-typed attributes are"
+ " substituted verbatim.<li>Label-typed attributes become the part of the"
+ " label after the package, minus the file extension. For example, the label"
+ " <code>\"//pkg:a/b.c\"</code> becomes <code>\"a/b\"</code>.<li>Output-typed"
+ " attributes become the part of the label after the package, including the"
+ " file extension (for the above example, <code>\"a/b.c\"</code>).<li>All"
+ " list-typed attributes (for example, <code>attr.label_list</code>) used in"
+ " placeholders are required to have <i>exactly one element</i>. Their"
+ " conversion is the same as their non-list version"
+ " (<code>attr.label</code>).<li>Other attribute types may not appear in"
+ " placeholders.<li>The special non-attribute placeholders"
+ " <code>%{dirname}</code> and <code>%{basename}</code> expand to those parts"
+ " of the rule's label, excluding its package. For example, in"
+ " <code>\"//pkg:a/b.c\"</code>, the dirname is <code>a</code> and the"
+ " basename is <code>b.c</code>.</ul><p>In practice, the most common"
+ " substitution placeholder is <code>\"%{name}\"</code>. For example, for a"
+ " target named \"foo\", the outputs dict <code>{\"bin\":"
+ " \"%{name}.exe\"}</code> predeclares an output named <code>foo.exe</code>"
+ " that is accessible in the implementation function as"
+ " <code>ctx.outputs.bin</code>."),
@Param(
name = "executable",
type = Boolean.class,
named = true,
defaultValue = "False",
doc =
"Whether this rule is considered executable, that is, whether it may be the "
+ "subject of a <code>blaze run</code> command. See the "
+ "<a href='../rules.$DOC_EXT#executable-rules-and-test-rules'>Rules page</a> "
+ "for more information."),
@Param(
name = "output_to_genfiles",
type = Boolean.class,
named = true,
defaultValue = "False",
doc =
"If true, the files will be generated in the genfiles directory instead of the "
+ "bin directory. Unless you need it for compatibility with existing rules "
+ "(e.g. when generating header files for C++), do not set this flag."),
@Param(
name = "fragments",
type = SkylarkList.class,
named = true,
generic1 = String.class,
defaultValue = "[]",
doc =
"List of names of configuration fragments that the rule requires "
+ "in target configuration."),
@Param(
name = "host_fragments",
type = SkylarkList.class,
named = true,
generic1 = String.class,
defaultValue = "[]",
doc =
"List of names of configuration fragments that the rule requires "
+ "in host configuration."),
@Param(
name = "_skylark_testable",
type = Boolean.class,
named = true,
defaultValue = "False",
doc =
"<i>(Experimental)</i><br/><br/>"
+ "If true, this rule will expose its actions for inspection by rules that "
+ "depend on it via an <a href=\"globals.html#Actions\">Actions</a> "
+ "provider. The provider is also available to the rule itself by calling "
+ "<a href=\"ctx.html#created_actions\">ctx.created_actions()</a>."
+ "<br/><br/>"
+ "This should only be used for testing the analysis-time behavior of "
+ "Starlark rules. This flag may be removed in the future."),
@Param(
name = "toolchains",
type = SkylarkList.class,
named = true,
generic1 = String.class,
defaultValue = "[]",
doc =
"<i>(Experimental)</i><br/><br/>"
+ "If set, the set of toolchains this rule requires. Toolchains will be "
+ "found by checking the current platform, and provided to the rule "
+ "implementation via <code>ctx.toolchain</code>."),
@Param(
name = "doc",
type = String.class,
named = true,
defaultValue = "''",
doc =
"A description of the rule that can be extracted by documentation generating "
+ "tools."),
@Param(
name = "provides",
type = SkylarkList.class,
named = true,
positional = false,
defaultValue = "[]",
doc = PROVIDES_DOC),
@Param(
name = "exec_compatible_with",
type = SkylarkList.class,
generic1 = String.class,
named = true,
positional = false,
defaultValue = "[]",
doc =
"A list of constraints on the execution platform that apply to all targets of "
+ "this rule type."),
@Param(
name = "analysis_test",
allowedTypes = {
@ParamType(type = Boolean.class),
},
named = true,
positional = false,
defaultValue = "False",
doc =
"If true, then this rule is treated as an analysis test. <p>Note: Analysis test"
+ " rules are primarily defined using infrastructure provided in core Starlark"
+ " libraries. See <a href=\"../testing.html#for-testing-rules\">Testing</a>"
+ " for guidance. <p>If a rule is defined as an analysis test rule, it becomes"
+ " allowed to use configuration transitions defined using <a"
+ " href=\"#analysis_test_transition\">analysis_test_transition</a> on its"
+ " attributes, but opts into some restrictions: <ul><li>Targets of this rule"
+ " are limited in the number of transitive dependencies they may have."
+ " <li>The rule is considered a test rule (as if <code>test=True</code> were"
+ " set). This supercedes the value of <code>test</code></li> <li>The rule"
+ " implementation function may not register actions."
+ " Instead, it must register a pass/fail result via providing <a"
+ " href='AnalysisTestResultInfo.html'>AnalysisTestResultInfo</a>.</li></ul>"),
@Param(
name = "build_setting",
type = BuildSettingApi.class,
noneable = true,
defaultValue = "None",
named = true,
positional = false,
enableOnlyWithFlag = FlagIdentifier.EXPERIMENTAL_BUILD_SETTING_API,
valueWhenDisabled = "None",
doc =
"If set, describes what kind of "
+ "<a href = '../config.$DOC_EXT#user-defined-build-settings'><code>build "
+ "setting</code></a> this rule is. See the "
+ "<a href='config.html'><code>config</code></a> module. If this is "
+ "set, a mandatory attribute named \"build_setting_default\" is automatically "
+ "added to this rule, with a type corresponding to the value passed in here."),
@Param(
name = "cfg",
type = Object.class,
noneable = true,
defaultValue = "None",
named = true,
positional = false,
doc =
"If set, points to the configuration transition the rule will "
+ "apply to its own configuration before analysis.")
},
useAst = true,
useStarlarkThread = true)
public BaseFunction rule(
StarlarkFunction implementation,
Boolean test,
Object attrs,
Object implicitOutputs,
Boolean executable,
Boolean outputToGenfiles,
SkylarkList<?> fragments,
SkylarkList<?> hostFragments,
Boolean skylarkTestable,
SkylarkList<?> toolchains,
String doc,
SkylarkList<?> providesArg,
SkylarkList<?> execCompatibleWith,
Object analysisTest,
Object buildSetting,
Object cfg,
FuncallExpression ast,
StarlarkThread thread)
throws EvalException;
@SkylarkCallable(
name = "aspect",
doc =
"Creates a new aspect. The result of this function must be stored in a global value. "
+ "Please see the <a href=\"../aspects.md\">introduction to Aspects</a> for more "
+ "details.",
parameters = {
@Param(
name = "implementation",
type = StarlarkFunction.class,
named = true,
doc =
"A Starlark function that implements this aspect, with exactly two parameters: "
+ "<a href=\"Target.html\">Target</a> (the target to which the aspect is "
+ "applied) and <a href=\"ctx.html\">ctx</a> (the rule context which the target"
+ "is created from). Attributes of the target are available via the "
+ "<code>ctx.rule</code> field. This function is evaluated during the "
+ "analysis phase for each application of an aspect to a target."),
@Param(
name = "attr_aspects",
type = SkylarkList.class,
named = true,
generic1 = String.class,
defaultValue = "[]",
doc =
"List of attribute names. The aspect propagates along dependencies specified in "
+ " the attributes of a target with these names. Common values here include "
+ "<code>deps</code> and <code>exports</code>. The list can also contain a "
+ "single string <code>\"*\"</code> to propagate along all dependencies of a "
+ "target."),
@Param(
name = "attrs",
type = SkylarkDict.class,
named = true,
noneable = true,
defaultValue = "None",
doc =
"A dictionary declaring all the attributes of the aspect. It maps from an "
+ "attribute name to an attribute object, like `attr.label` or `attr.string` "
+ "(see <a href=\"attr.html\">attr</a> module). Aspect attributes are "
+ "available to implementation function as fields of <code>ctx</code> "
+ "parameter. "
+ ""
+ "<p>Implicit attributes starting with <code>_</code> must have default "
+ "values, and have type <code>label</code> or <code>label_list</code>. "
+ ""
+ "<p>Explicit attributes must have type <code>string</code>, and must use "
+ "the <code>values</code> restriction. Explicit attributes restrict the "
+ "aspect to only be used with rules that have attributes of the same "
+ "name, type, and valid values according to the restriction."),
@Param(
name = "required_aspect_providers",
type = SkylarkList.class,
named = true,
defaultValue = "[]",
doc =
"This attribute allows this aspect to inspect other aspects. The value must be a "
+ "list of providers, or a list of lists of providers. For example, "
+ "<code>[FooInfo, BarInfo, [BazInfo, QuxInfo]]</code> is a "
+ "valid value."
+ ""
+ "<p>A single list of providers will automatically be converted to a list "
+ "containing one list of providers. That is, "
+ "<code>[FooInfo, BarInfo]</code> will automatically be converted to "
+ "<code>[[FooInfo, BarInfo]]</code>. "
+ ""
+ "<p>To make another aspect (e.g. <code>other_aspect</code>) visible to this "
+ "aspect, <code>other_aspect</code> must provide all providers from at least "
+ "one of the lists. In the example of "
+ "<code>[FooInfo, BarInfo, [BazInfo, QuxInfo]]</code>, this aspect can only "
+ "see <code>other_aspect</code> if and only if <code>other_aspect</code> "
+ "provides <code>FooInfo</code> *or* <code>BarInfo</code> *or* both "
+ "<code>BazInfo</code> *and* <code>QuxInfo</code>."),
@Param(
name = "provides",
type = SkylarkList.class,
named = true,
defaultValue = "[]",
doc = PROVIDES_DOC),
@Param(
name = "fragments",
type = SkylarkList.class,
named = true,
generic1 = String.class,
defaultValue = "[]",
doc =
"List of names of configuration fragments that the aspect requires "
+ "in target configuration."),
@Param(
name = "host_fragments",
type = SkylarkList.class,
named = true,
generic1 = String.class,
defaultValue = "[]",
doc =
"List of names of configuration fragments that the aspect requires "
+ "in host configuration."),
@Param(
name = "toolchains",
type = SkylarkList.class,
named = true,
generic1 = String.class,
defaultValue = "[]",
doc =
"<i>(Experimental)</i><br/><br/>"
+ "If set, the set of toolchains this rule requires. Toolchains will be "
+ "found by checking the current platform, and provided to the rule "
+ "implementation via <code>ctx.toolchain</code>."),
@Param(
name = "doc",
type = String.class,
named = true,
defaultValue = "''",
doc =
"A description of the aspect that can be extracted by documentation generating "
+ "tools."),
@Param(
name = "apply_to_generating_rules",
type = Boolean.class,
named = true,
positional = false,
defaultValue = "False",
enableOnlyWithFlag = FlagIdentifier.EXPERIMENTAL_ASPECT_OUTPUT_PROPAGATION,
valueWhenDisabled = "False",
doc =
"If true, the aspect will, when applied to an output file, instead apply to the "
+ "output file's generating rule. "
+ "<p>For example, suppose an aspect propagates transitively through attribute "
+ "`deps` and it is applied to target `alpha`. Suppose `alpha` has "
+ "`deps = [':beta_output']`, where `beta_output` is a declared output of "
+ "a target `beta`. Suppose `beta` has a target `charlie` as one of its "
+ "`deps`. If `apply_to_generating_rules=True` for the aspect, then the aspect "
+ "will propagate through `alpha`, `beta`, and `charlie`. If False, then the "
+ "aspect will propagate only to `alpha`. </p><p>False by default.</p>")
},
useStarlarkThread = true,
useAst = true)
public SkylarkAspectApi aspect(
StarlarkFunction implementation,
SkylarkList<?> attributeAspects,
Object attrs,
SkylarkList<?> requiredAspectProvidersArg,
SkylarkList<?> providesArg,
SkylarkList<?> fragments,
SkylarkList<?> hostFragments,
SkylarkList<?> toolchains,
String doc,
Boolean applyToGeneratingRules,
FuncallExpression ast,
StarlarkThread thread)
throws EvalException;
@SkylarkCallable(
name = "Label",
doc =
"Creates a Label referring to a BUILD target. Use "
+ "this function only when you want to give a default value for the label "
+ "attributes. The argument must refer to an absolute label. "
+ "Example: <br><pre class=language-python>Label(\"//tools:default\")</pre>",
parameters = {
@Param(
name = "label_string",
type = String.class,
legacyNamed = true,
doc = "the label string."),
@Param(
name = "relative_to_caller_repository",
type = Boolean.class,
defaultValue = "False",
named = true,
positional = false,
doc =
"Deprecated. Do not use. "
+ "When relative_to_caller_repository is True and the calling thread is a "
+ "rule's implementation function, then a repo-relative label //foo:bar is "
+ "resolved relative to the rule's repository. For calls to Label from any "
+ "other thread, or calls in which the relative_to_caller_repository flag is "
+ "False, a repo-relative label is resolved relative to the file in which the "
+ "Label() call appears.")
},
useLocation = true,
useStarlarkThread = true)
@SkylarkConstructor(objectType = Label.class)
public Label label(
String labelString, Boolean relativeToCallerRepository, Location loc, StarlarkThread thread)
throws EvalException;
}
| |
package apple.metalperformanceshaders;
import apple.NSObject;
import apple.foundation.NSArray;
import apple.foundation.NSCoder;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import apple.metal.protocol.MTLBuffer;
import apple.metal.protocol.MTLCommandBuffer;
import apple.metal.protocol.MTLDevice;
import apple.metal.protocol.MTLTexture;
import apple.metal.struct.MTLRegion;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.ByValue;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.ProtocolClassMethod;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
/**
* MPSImageEDLines
* <p>
* The MPSImageEDLInes class implements the EDLines line segmenting algorithm using edge-drawing (ED)
* described here
* https://ieeexplore.ieee.org/document/6116138
* <p>
* The EDLInes algorithm consists of 5 steps, the first 4 of which describe the ED algorithm:
* 1. Blur the source image using a Gaussian blur with a sigma parameter
* 2. Use horizontal and vertical Sobel filters to find a gradient magnitude and
* direction.
* G = sqrt(Sx^2 + Sy^2)
* G_ang = arctan(Sy / Sx)
* 3. Compute anchor points, points with a high probability of being edge pixels.
* Anchor points are local maxima, in the gradient image that lie on row and column
* multiples of the detailRatio. This parameter effectively downsamples the gradient
* image, and directly influences the density of anchor points. A larger detailRatio results
* in fewer fine grained details, leaving long, main lines.
* 4. Anchor points are traced in a forward and backward direction along the gradient direction, until
* the gradient falls below some gradientThreshold parameter or the edge of the image is reached.
* The paths traced become an edge map of the image.
* 5. Points in the edges are fit to a line), and extended along the edge until the line error crosses a
* lineErrorThreshold. Lines which are beyond a minimum length are labelled line segments and
* will be outputs of the algorithm.
*/
@Generated
@Library("MetalPerformanceShaders")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class MPSImageEDLines extends MPSKernel {
static {
NatJ.register();
}
@Generated
protected MPSImageEDLines(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native MPSImageEDLines alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native MPSImageEDLines allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
/**
* [@property] clipRectSource
* <p>
* The source rectangle to use when reading data.
* <p>
* A MTLRegion that indicates which part of the source to read. If the clipRectSource does not lie
* completely within the source image, the intersection of the image bounds and clipRectSource will
* be used. The clipRectSource replaces the MPSUnaryImageKernel offset parameter for this filter.
* The latter is ignored. Default: MPSRectNoClip, use the entire source texture.
*/
@Generated
@Selector("clipRectSource")
@ByValue
public native MTLRegion clipRectSource();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
/**
* [@property] detailRatio
* <p>
* Read-write value used to set the detailRatio to use in the EDLines algorithm
* Default is 32
*/
@Generated
@Selector("detailRatio")
public native char detailRatio();
/**
* Encode the filter to a command buffer using a MTLComputeCommandEncoder.
* <p>
* The filter will not begin to execute until after the command
* buffer has been enqueued and committed.
*
* @param commandBuffer A valid MTLCommandBuffer.
* @param source A valid MTLTexture containing the source image for the filter
* @param dest A valid MTLTexture containing the destination image for the filter. If not nil, the output will be the edges
* found through the Edge Drawing algorithm.
* @param endpointBuffer A valid MTLBuffer to receive the line segment count and endpoint results.
* @param endpointOffset Byte offset into endpoint buffer at which to write the line segment endpoint results. Must be a multiple of 32 bytes.
* The total line segment count and the line segment endpoints are written to the endpoint buffer. The count
* is written as a uint32_t at the start of the buffer. The line segments are written to the endpoint buffer as
* start and end pixel coordinates of the segment. Coordinates are stored as unsigned short pairs, and a
* single line segment will consist of two pairs, or four total unsigned shorts. The endpoint buffer size must
* be >= 4 * maxLines * sizeof(unsigned short) + sizeof(uint32_t).
*/
@Generated
@Selector("encodeToCommandBuffer:sourceTexture:destinationTexture:endpointBuffer:endpointOffset:")
public native void encodeToCommandBufferSourceTextureDestinationTextureEndpointBufferEndpointOffset(
@Mapped(ObjCObjectMapper.class) MTLCommandBuffer commandBuffer,
@Mapped(ObjCObjectMapper.class) MTLTexture source, @Mapped(ObjCObjectMapper.class) MTLTexture dest,
@Mapped(ObjCObjectMapper.class) MTLBuffer endpointBuffer, @NUInt long endpointOffset);
/**
* [@property] sigma
* <p>
* Read-only sigma value used in performing Gaussian blur of the image.
* Default is 2.0
*/
@Generated
@Selector("gaussianSigma")
public native float gaussianSigma();
/**
* [@property] gradientThreshold
* <p>
* Read-write value used to set the threshold for a pixel to be considered an edge
* Default is 0.2
*/
@Generated
@Selector("gradientThreshold")
public native float gradientThreshold();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("init")
public native MPSImageEDLines init();
@Generated
@Selector("initWithCoder:")
public native MPSImageEDLines initWithCoder(NSCoder aDecoder);
/**
* NSSecureCoding compatability
* <p>
* While the standard NSSecureCoding/NSCoding method
* -initWithCoder: should work, since the file can't
* know which device your data is allocated on, we
* have to guess and may guess incorrectly. To avoid
* that problem, use initWithCoder:device instead.
*
* @param aDecoder The NSCoder subclass with your serialized MPSKernel
* @param device The MTLDevice on which to make the MPSKernel
* @return A new MPSKernel object, or nil if failure.
*/
@Generated
@Selector("initWithCoder:device:")
public native MPSImageEDLines initWithCoderDevice(NSCoder aDecoder, @Mapped(ObjCObjectMapper.class) Object device);
@Generated
@Selector("initWithDevice:")
public native MPSImageEDLines initWithDevice(@Mapped(ObjCObjectMapper.class) Object device);
/**
* Initialize an EDLines kernel on a given device with specified parameters.
* [@code]
* w(i) = 1/sqrt(2*pi*sigma) * exp(-i^2/2*sigma^2)
* [@endcode]
* If we take cut off at 1% of w(0) (max weight) beyond which weights
* are considered 0, we have
* [@code]
* ceil (sqrt(-log(0.01)*2)*sigma) ~ ceil(3.7*sigma)
* [@endcode]
* as rough estimate of filter width
*
* @param device The device the filter will run on
* @param gaussianSigma The standard deviation of gaussian blur filter.
* Gaussian weight, centered at 0, at integer grid i is given as
* @param minLineLength The minimum length of output line segments.
* @param maxLines The maximum amount of lines for the EDLines algorithm to output. The size of the
* endpointBuffer supplied at encode must be >= maxLines * 4 * sizeof(unsigned short) + sizeof(uint32_t).
* @param detailRatio The detailRatio to use in the EDLines algorithm, which
* inversely effects the number of anchor points
* @param gradientThreshold Any pixel with a gradient below the gradientThreshold will
* not be considerd an edge
* @param lineErrorThreshold The limit of how much error a line segment can have relative
* to the edge it represents
* @param mergeLocalityThreshold Determines how many pixels apart two lines can deviate spatially and still be merged.
* This value is normalized to the diagonal length of the image.
* @return A valid object or nil, if failure.
*/
@Generated
@Selector("initWithDevice:gaussianSigma:minLineLength:maxLines:detailRatio:gradientThreshold:lineErrorThreshold:mergeLocalityThreshold:")
public native MPSImageEDLines initWithDeviceGaussianSigmaMinLineLengthMaxLinesDetailRatioGradientThresholdLineErrorThresholdMergeLocalityThreshold(
@Mapped(ObjCObjectMapper.class) MTLDevice device, float gaussianSigma, char minLineLength,
@NUInt long maxLines, char detailRatio, float gradientThreshold, float lineErrorThreshold,
float mergeLocalityThreshold);
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
/**
* [@property] lineErrorThreshold
* <p>
* Read-write value used to set the limit on error for a line segment relative to the edge it fits
* Default is 0.05
*/
@Generated
@Selector("lineErrorThreshold")
public native float lineErrorThreshold();
/**
* [@property] maxLines
* <p>
* Read-write value used to set the max number of line segments to be written out.
* The endpointBuffer at encode must be >= maxLines * 4 * sizeof(unsigned short) + sizeof(uint32_t).
* Default is 256
*/
@Generated
@Selector("maxLines")
@NUInt
public native long maxLines();
/**
* [@property] mergeLocalityThreshold
* <p>
* Read-write value used to set how many pixels apart two lines can deviate spatially and still be merged.
* Default is 0.0025
*/
@Generated
@Selector("mergeLocalityThreshold")
public native float mergeLocalityThreshold();
/**
* [@property] minLineLength
* <p>
* Read-write value used to set the minimum length of a line segment.
* Default is 32
*/
@Generated
@Selector("minLineLength")
public native char minLineLength();
@Generated
@Owned
@Selector("new")
public static native MPSImageEDLines new_objc();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
/**
* [@property] clipRectSource
* <p>
* The source rectangle to use when reading data.
* <p>
* A MTLRegion that indicates which part of the source to read. If the clipRectSource does not lie
* completely within the source image, the intersection of the image bounds and clipRectSource will
* be used. The clipRectSource replaces the MPSUnaryImageKernel offset parameter for this filter.
* The latter is ignored. Default: MPSRectNoClip, use the entire source texture.
*/
@Generated
@Selector("setClipRectSource:")
public native void setClipRectSource(@ByValue MTLRegion value);
/**
* [@property] detailRatio
* <p>
* Read-write value used to set the detailRatio to use in the EDLines algorithm
* Default is 32
*/
@Generated
@Selector("setDetailRatio:")
public native void setDetailRatio(char value);
/**
* [@property] gradientThreshold
* <p>
* Read-write value used to set the threshold for a pixel to be considered an edge
* Default is 0.2
*/
@Generated
@Selector("setGradientThreshold:")
public native void setGradientThreshold(float value);
/**
* [@property] lineErrorThreshold
* <p>
* Read-write value used to set the limit on error for a line segment relative to the edge it fits
* Default is 0.05
*/
@Generated
@Selector("setLineErrorThreshold:")
public native void setLineErrorThreshold(float value);
/**
* [@property] maxLines
* <p>
* Read-write value used to set the max number of line segments to be written out.
* The endpointBuffer at encode must be >= maxLines * 4 * sizeof(unsigned short) + sizeof(uint32_t).
* Default is 256
*/
@Generated
@Selector("setMaxLines:")
public native void setMaxLines(@NUInt long value);
/**
* [@property] mergeLocalityThreshold
* <p>
* Read-write value used to set how many pixels apart two lines can deviate spatially and still be merged.
* Default is 0.0025
*/
@Generated
@Selector("setMergeLocalityThreshold:")
public native void setMergeLocalityThreshold(float value);
/**
* [@property] minLineLength
* <p>
* Read-write value used to set the minimum length of a line segment.
* Default is 32
*/
@Generated
@Selector("setMinLineLength:")
public native void setMinLineLength(char value);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("supportsSecureCoding")
public static native boolean supportsSecureCoding();
@Generated
@ProtocolClassMethod("supportsSecureCoding")
public boolean _supportsSecureCoding() {
return supportsSecureCoding();
}
@Generated
@Selector("version")
@NInt
public static native long version_static();
}
| |
package com.calculator.aa;
import com.calculator.aa.calc.Calc;
import com.calculator.aa.db.SQLiteSupport;
import com.calculator.aa.ui.GradientPainter;
import com.calculator.aa.ui.MainWindow;
import javax.swing.*;
import javax.swing.Timer;
import java.awt.*;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.*;
import java.util.stream.Stream;
public class Main {
public static final String versionApp = "2.3";
private static final String updateUrl = "https://raw.githubusercontent.com/Oppositus/CalculatorAA/master/builds/version.txt";
public static String newVersionUrl = null;
public static String newDatabaseUrl = null;
public static Cursor weCursor;
public static Cursor nsCursor;
private static Main program;
private final JFrame mainFrame;
private final MainWindow mainWindow;
private static final String propertiesFile = "calcaa.properties";
public static Properties properties;
public static ResourceBundle resourceBundle;
public static SQLiteSupport sqLite;
public static String osName;
public static GradientPainter gradient;
private Main() {
mainFrame = new JFrame(resourceBundle.getString("text.program_name"));
mainWindow = new MainWindow();
mainFrame.setContentPane(mainWindow.GetMainPanel());
mainFrame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
mainFrame.setLocationRelativeTo(null);
mainFrame.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
mainFrame.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent we) {
try {
Rectangle bounds = mainFrame.getBounds();
properties.setProperty("frame.x", String.valueOf((int) bounds.getX()));
properties.setProperty("frame.y", String.valueOf((int) bounds.getY()));
properties.setProperty("frame.w", String.valueOf((int) bounds.getWidth()));
properties.setProperty("frame.h", String.valueOf((int) bounds.getHeight()));
int maximized = (mainFrame.getExtendedState() & JFrame.MAXIMIZED_BOTH) > 0 ? 1 : 0;
properties.setProperty("frame.z", String.valueOf(maximized));
properties.store(new BufferedOutputStream(new FileOutputStream(propertiesFile)), "CalculatorAA");
} catch (Exception ignored) {
}
sqLite.dispose();
System.exit(0);
}
});
restoreFrameProperties();
mainFrame.setVisible(true);
}
public static Main getMain() {
return program;
}
public static JFrame getFrame() {
return program.mainFrame;
}
public void restoreFrameProperties() {
mainFrame.pack();
int x = Calc.safeParseInt(properties.getProperty("frame.x", "-1"), -1);
int y = Calc.safeParseInt(properties.getProperty("frame.y", "-1"), -1);
int w = Calc.safeParseInt(properties.getProperty("frame.w", "-1"), -1);
int h = Calc.safeParseInt(properties.getProperty("frame.h", "-1"), -1);
int z = Calc.safeParseInt(properties.getProperty("frame.z", "0"), -1);
if (z == 1) {
mainFrame.setExtendedState(mainFrame.getExtendedState() | JFrame.MAXIMIZED_BOTH);
} else if (x >= 0 && y >= 0 && w >= 0 && h >= 0) {
Rectangle rec = new Rectangle(x, y, w, h);
mainFrame.setBounds(rec);
}
}
public static String[] getPeriods(int fromIndex, int toIndex) {
String[] periods = program.mainWindow.getPeriods();
int max = Math.min(toIndex, periods.length - 1) + 1;
return Arrays.copyOfRange(periods, fromIndex, max);
}
public boolean checkHasUpdate(boolean verbose) {
try {
HttpURLConnection.setFollowRedirects(true);
HttpURLConnection connection = (HttpURLConnection) new URL(updateUrl).openConnection();
if (connection.getResponseCode() == HttpURLConnection.HTTP_OK) {
int bufferLength = 1024;
byte[] responseBuffer = new byte[bufferLength];
java.util.List<Byte> bytesFromHTTP = new LinkedList<>();
InputStream isHTTP = connection.getInputStream();
while (true) {
int wasRead = isHTTP.read(responseBuffer);
if (wasRead < 0) {
break;
}
for (int i = 0; i < wasRead; i++) {
bytesFromHTTP.add(responseBuffer[i]);
}
}
int resLen = bytesFromHTTP.size();
byte[] res = new byte[resLen];
int index = 0;
for (byte b : bytesFromHTTP) {
res[index++] = b;
}
String result = new String(res, StandardCharsets.UTF_8);
isHTTP.close();
connection.disconnect();
String[] lines = result.split("[\\r\\n]+");
boolean hasAppUpdate = false;
boolean hasBaseUpdate = false;
String versionBase = sqLite.getDatabaseVersion();
for (String line1 : lines) {
String[] line = line1.split("=");
if (line.length == 2) {
if ("application".equals(line[0]) && !versionApp.equals(line[1])) {
hasAppUpdate = true;
}
if ("database".equals(line[0]) && !versionBase.equals(line[1])) {
hasBaseUpdate = true;
}
if ("aurl".equals(line[0])) {
newVersionUrl = line[1];
}
if ("durl".equals(line[0])) {
newDatabaseUrl = line[1];
}
}
}
if (!hasAppUpdate) {
newVersionUrl = null;
}
if (!hasBaseUpdate) {
newDatabaseUrl = null;
}
mainWindow.setUpdateAvailable(hasAppUpdate);
return hasAppUpdate || hasBaseUpdate;
} else {
if (verbose) {
JOptionPane.showMessageDialog(mainFrame,
String.format(resourceBundle.getString("text.update_error_http"), connection.getResponseCode()),
resourceBundle.getString("text.error"),
JOptionPane.ERROR_MESSAGE);
}
}
} catch (IOException e) {
if (verbose) {
JOptionPane.showMessageDialog(mainFrame, e, resourceBundle.getString("text.error"), JOptionPane.ERROR_MESSAGE);
}
}
return false;
}
public static void reconnectDatabase(Runnable middle) {
sqLite.dispose();
middle.run();
sqLite = new SQLiteSupport();
}
public static void main(String[] args) {
resourceBundle = ResourceBundle.getBundle("com.calculator.aa.messages", Locale.getDefault());
properties = new Properties();
try {
if (Files.exists(new File(propertiesFile).toPath())) {
properties.load(new BufferedInputStream(new FileInputStream(propertiesFile)));
}
} catch (IOException ignored) {
}
String laf = properties.getProperty("ui.theme");
try {
if (laf != null) {
try {
UIManager.setLookAndFeel(laf);
} catch (Exception ignored) {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
}
} else {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
}
} catch (Exception ignored) {
}
String[] savedOptions = readImportOptions();
sqLite = new SQLiteSupport();
osName = System.getProperty("os.name").toLowerCase();
gradient = new GradientPainter();
SwingUtilities.invokeLater(() -> {
program = new Main();
createCursors();
String file = properties.getProperty("files.last", "");
if (!file.isEmpty()) {
if (file.startsWith("base:")) {
SwingUtilities.invokeLater(() -> {
String[] files = file.replace("base:", "").split(";");
if (files.length > 0) {
program.mainWindow.getTickersAndLoadData(files, savedOptions);
}
});
} else {
SwingUtilities.invokeLater(() -> {
String[] files = file.split(";");
if (files.length > 0) {
program.mainWindow.parseCSVAndLoadData(new File(files[0]), savedOptions);
}
Stream.of(Arrays.copyOfRange(files, 1, files.length)).map(File::new).forEach(program.mainWindow::silentParseCSVAndMergeData);
});
}
}
if ("1".equals(Main.properties.getProperty("ui.updates_check", "1"))) {
Timer tm = new Timer(10000, actionEvent -> program.checkHasUpdate(false));
tm.setRepeats(false);
tm.start();
}
});
}
private static String[] readImportOptions() {
String[] savedOptions = new String[]{";", "\"", ".", "1"};
String s = properties.getProperty("import.delimiter");
if (s != null) {
savedOptions[0] = s;
}
s = properties.getProperty("import.mark");
if (s != null) {
savedOptions[1] = s;
}
s = properties.getProperty("import.decimal");
if (s != null) {
savedOptions[2] = s;
}
s = properties.getProperty("import.date");
if (s != null) {
savedOptions[3] = s;
}
return savedOptions;
}
private static void createCursors() {
if (osName.startsWith("windows")) {
weCursor = Cursor.getPredefinedCursor(Cursor.W_RESIZE_CURSOR);
nsCursor = Cursor.getPredefinedCursor(Cursor.N_RESIZE_CURSOR);
} else {
Toolkit toolkit = Toolkit.getDefaultToolkit();
Image we = toolkit.getImage(program.getClass().getResource("ui/icons/we-cursor.png"));
weCursor = toolkit.createCustomCursor(we, new Point(7, 4), "WE-CURSOR");
Image ns = toolkit.getImage(program.getClass().getResource("ui/icons/ns-cursor.png"));
nsCursor = toolkit.createCustomCursor(ns, new Point(4, 7), "NS-CURSOR");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.controller.service;
import org.apache.nifi.bundle.BundleCoordinate;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.controller.FlowController;
import org.apache.nifi.controller.serialization.FlowEncodingVersion;
import org.apache.nifi.controller.serialization.FlowFromDOMFactory;
import org.apache.nifi.encrypt.PropertyEncryptor;
import org.apache.nifi.groups.ProcessGroup;
import org.apache.nifi.reporting.BulletinRepository;
import org.apache.nifi.security.xml.XmlUtils;
import org.apache.nifi.util.BundleUtils;
import org.apache.nifi.util.DomUtils;
import org.apache.nifi.web.api.dto.BundleDTO;
import org.apache.nifi.web.api.dto.ControllerServiceDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.ParserConfigurationException;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
public class ControllerServiceLoader {
private static final Logger logger = LoggerFactory.getLogger(ControllerServiceLoader.class);
public static List<ControllerServiceNode> loadControllerServices(final FlowController controller, final InputStream serializedStream, final ProcessGroup parentGroup,
final PropertyEncryptor encryptor, final BulletinRepository bulletinRepo, final boolean autoResumeState, final FlowEncodingVersion encodingVersion) throws IOException {
try (final InputStream in = new BufferedInputStream(serializedStream)) {
final DocumentBuilder builder = XmlUtils.createSafeDocumentBuilder(null);
builder.setErrorHandler(new org.xml.sax.ErrorHandler() {
@Override
public void fatalError(final SAXParseException err) throws SAXException {
logger.error("Config file line " + err.getLineNumber() + ", col " + err.getColumnNumber() + ", uri " + err.getSystemId() + " :message: " + err.getMessage());
if (logger.isDebugEnabled()) {
logger.error("Error Stack Dump", err);
}
throw err;
}
@Override
public void error(final SAXParseException err) throws SAXParseException {
logger.error("Config file line " + err.getLineNumber() + ", col " + err.getColumnNumber() + ", uri " + err.getSystemId() + " :message: " + err.getMessage());
if (logger.isDebugEnabled()) {
logger.error("Error Stack Dump", err);
}
throw err;
}
@Override
public void warning(final SAXParseException err) throws SAXParseException {
logger.warn(" Config file line " + err.getLineNumber() + ", uri " + err.getSystemId() + " : message : " + err.getMessage());
if (logger.isDebugEnabled()) {
logger.warn("Warning stack dump", err);
}
throw err;
}
});
final Document document = builder.parse(in);
final Element controllerServices = document.getDocumentElement();
final List<Element> serviceElements = DomUtils.getChildElementsByTagName(controllerServices, "controllerService");
final Map<ControllerServiceNode, Element> controllerServiceMap = ControllerServiceLoader.loadControllerServices(serviceElements, controller, parentGroup, encryptor, encodingVersion);
enableControllerServices(controllerServiceMap, controller, encryptor, autoResumeState, encodingVersion);
return new ArrayList<>(controllerServiceMap.keySet());
} catch (SAXException | ParserConfigurationException sxe) {
throw new IOException(sxe);
}
}
public static Map<ControllerServiceNode, Element> loadControllerServices(final List<Element> serviceElements, final FlowController controller,
final ProcessGroup parentGroup, final PropertyEncryptor encryptor, final FlowEncodingVersion encodingVersion) {
final Map<ControllerServiceNode, Element> nodeMap = new HashMap<>();
for (final Element serviceElement : serviceElements) {
final ControllerServiceNode serviceNode = createControllerService(controller, serviceElement, encryptor, encodingVersion);
if (parentGroup == null) {
controller.getFlowManager().addRootControllerService(serviceNode);
} else {
parentGroup.addControllerService(serviceNode);
}
// We need to clone the node because it will be used in a separate thread below, and
// Element is not thread-safe.
nodeMap.put(serviceNode, (Element) serviceElement.cloneNode(true));
}
for (final Map.Entry<ControllerServiceNode, Element> entry : nodeMap.entrySet()) {
configureControllerService(entry.getKey(), entry.getValue(), encryptor, encodingVersion);
}
return nodeMap;
}
public static void enableControllerServices(final Map<ControllerServiceNode, Element> nodeMap, final FlowController controller,
final PropertyEncryptor encryptor, final boolean autoResumeState, final FlowEncodingVersion encodingVersion) {
// Start services
if (autoResumeState) {
final Set<ControllerServiceNode> nodesToEnable = new HashSet<>();
for (final ControllerServiceNode node : nodeMap.keySet()) {
final Element controllerServiceElement = nodeMap.get(node);
final ControllerServiceDTO dto;
synchronized (controllerServiceElement.getOwnerDocument()) {
dto = FlowFromDOMFactory.getControllerService(controllerServiceElement, encryptor, encodingVersion);
}
final ControllerServiceState state = ControllerServiceState.valueOf(dto.getState());
if (state == ControllerServiceState.ENABLED) {
nodesToEnable.add(node);
logger.debug("Will enable Controller Service {}", node);
} else {
logger.debug("Will not enable Controller Service {} because its state is set to {}", node, state);
}
}
enableControllerServices(nodesToEnable, controller, autoResumeState);
} else {
logger.debug("Will not enable the following Controller Services because 'auto-resume state' flag is false: {}", nodeMap.keySet());
}
}
public static void enableControllerServices(final Collection<ControllerServiceNode> nodesToEnable, final FlowController controller, final boolean autoResumeState) {
// Start services
if (autoResumeState) {
logger.debug("Enabling Controller Services {}", nodesToEnable);
nodesToEnable.forEach(ControllerServiceNode::performValidation); // validate services before attempting to enable them
controller.getControllerServiceProvider().enableControllerServices(nodesToEnable);
} else {
logger.debug("Will not enable the following Controller Services because 'auto-resume state' flag is false: {}", nodesToEnable);
}
}
public static ControllerServiceNode cloneControllerService(final FlowController flowController, final ControllerServiceNode controllerService) {
// create a new id for the clone seeded from the original id so that it is consistent in a cluster
final UUID id = UUID.nameUUIDFromBytes(controllerService.getIdentifier().getBytes(StandardCharsets.UTF_8));
final ControllerServiceNode clone = flowController.getFlowManager().createControllerService(controllerService.getCanonicalClassName(), id.toString(),
controllerService.getBundleCoordinate(), Collections.emptySet(), false, true, null);
clone.setName(controllerService.getName());
clone.setComments(controllerService.getComments());
if (controllerService.getProperties() != null) {
Map<String,String> properties = new HashMap<>();
for (Map.Entry<PropertyDescriptor, String> propEntry : controllerService.getRawPropertyValues().entrySet()) {
properties.put(propEntry.getKey().getName(), propEntry.getValue());
}
clone.setProperties(properties);
}
return clone;
}
private static ControllerServiceNode createControllerService(final FlowController flowController, final Element controllerServiceElement, final PropertyEncryptor encryptor,
final FlowEncodingVersion encodingVersion) {
final ControllerServiceDTO dto = FlowFromDOMFactory.getControllerService(controllerServiceElement, encryptor, encodingVersion);
BundleCoordinate coordinate;
try {
coordinate = BundleUtils.getCompatibleBundle(flowController.getExtensionManager(), dto.getType(), dto.getBundle());
} catch (final IllegalStateException e) {
final BundleDTO bundleDTO = dto.getBundle();
if (bundleDTO == null) {
coordinate = BundleCoordinate.UNKNOWN_COORDINATE;
} else {
coordinate = new BundleCoordinate(bundleDTO.getGroup(), bundleDTO.getArtifact(), bundleDTO.getVersion());
}
}
final ControllerServiceNode node = flowController.getFlowManager().createControllerService(dto.getType(), dto.getId(), coordinate, Collections.emptySet(), false, true, null);
node.setName(dto.getName());
node.setComments(dto.getComments());
node.setVersionedComponentId(dto.getVersionedComponentId());
return node;
}
private static void configureControllerService(final ControllerServiceNode node, final Element controllerServiceElement, final PropertyEncryptor encryptor,
final FlowEncodingVersion encodingVersion) {
final ControllerServiceDTO dto = FlowFromDOMFactory.getControllerService(controllerServiceElement, encryptor, encodingVersion);
node.pauseValidationTrigger();
try {
node.setAnnotationData(dto.getAnnotationData());
node.setProperties(dto.getProperties());
} finally {
node.resumeValidationTrigger();
}
}
}
| |
// ----------------------------------------------------------------------------
// Copyright 2007-2013, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Description:
// Tools for obtaining static Google Maps for mobile devices
// http://code.google.com/apis/maps/documentation/staticmaps/
// ----------------------------------------------------------------------------
// Change History:
// 2009/04/02 Martin D. Flynn
// -Initial release
// ----------------------------------------------------------------------------
package org.opengts.google;
import java.lang.*;
import java.util.*;
import java.io.*;
import java.awt.*;
import java.net.*;
import org.opengts.util.*;
/**
*** Tools for obtaining static Google Maps for mobile devices
**/
public class GoogleStaticMap
{
// ------------------------------------------------------------------------
public static String GOOGLE_MAP_URL = "http://maps.google.com/staticmap";
// ------------------------------------------------------------------------
public static String MAPTYPE_MOBILE = "mobile";
public static String MAPTYPE_ROADMAP = "roadmap";
public static String MAPTYPE_SATELLITE = "satellite";
public static String MAPTYPE_HYBRID = "hybrid";
public static String MAPTYPE_TERRAIN = "terrain";
// ------------------------------------------------------------------------
public static String PUSHPIN_SIZE_TINY = "tiny";
public static String PUSHPIN_SIZE_MID = "mid";
public static String PUSHPIN_SIZE_SMALL = "small";
public static String PUSHPIN_SIZE[] = new String[] {
PUSHPIN_SIZE_TINY,
PUSHPIN_SIZE_MID,
PUSHPIN_SIZE_SMALL
};
public static String PUSHPIN_COLOR_BLACK = "black";
public static String PUSHPIN_COLOR_BROWN = "brown";
public static String PUSHPIN_COLOR_RED = "red";
public static String PUSHPIN_COLOR_ORANGE = "orange";
public static String PUSHPIN_COLOR_YELLOW = "yellow";
public static String PUSHPIN_COLOR_GREEN = "green";
public static String PUSHPIN_COLOR_BLUE = "blue";
public static String PUSHPIN_COLOR_PURPLE = "purple";
public static String PUSHPIN_COLOR_GRAY = "gray";
public static String PUSHPIN_COLOR_WHITE = "white";
public static String PUSHPIN_COLOR[] = new String[] {
PUSHPIN_COLOR_BLACK,
PUSHPIN_COLOR_BROWN,
PUSHPIN_COLOR_RED,
PUSHPIN_COLOR_ORANGE,
PUSHPIN_COLOR_YELLOW,
PUSHPIN_COLOR_GREEN,
PUSHPIN_COLOR_BLUE,
PUSHPIN_COLOR_PURPLE,
PUSHPIN_COLOR_GRAY,
PUSHPIN_COLOR_WHITE,
};
/**
*** Creates a pushpin name based on the specified size, color, and tag
*** @param size The pushpin size ("tiny", "mid", "small")
*** @param color The pushpin color ("red", "green", ...)
*** @param tag Alphanumeric letter/digit tag
*** @return The composite pushpin name
**/
public static String CreatePushpinIcon(String size, String color, String tag)
{
String S = StringTools.blankDefault(size , PUSHPIN_SIZE_MID );
String C = StringTools.blankDefault(color, PUSHPIN_COLOR_RED);
if (PUSHPIN_SIZE_TINY.equals(S)) {
return S + C;
} else {
String L = StringTools.blankDefault(tag,"").toLowerCase();
return S + C + L;
}
}
public static String DEFAULT_PUSHPIN = CreatePushpinIcon(PUSHPIN_SIZE_MID,PUSHPIN_COLOR_RED,"o");
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
private java.util.List<String> pushpins = new Vector<String>();
private java.util.List<String> pathLine = new Vector<String>();
private GeoBounds bounds = new GeoBounds();
private GeoPoint center = null;
private int width = 200;
private int height = 250;
private String googleKey = "";
private String mapType = MAPTYPE_MOBILE;
private int zoom = 8;
private boolean sensor = false;
private ColorTools.RGB pathColor = null;
private int pathWeight = 2;
public GoogleStaticMap()
{
//
}
public GoogleStaticMap(int width, int height, String key)
{
this.setSize(width, height);
this.setGoogleKey(key);
}
// ------------------------------------------------------------------------
/**
*** Sets the preferred map size
*** @return The map width
**/
public void setSize(int W, int H)
{
this.width = W;
this.height = H;
}
/**
*** Gets the preferred map width
*** @return The map width
**/
public int getWidth()
{
return this.width;
}
/**
*** Gets the preferred map height
*** @return The map height
**/
public int getHeight()
{
return this.height;
}
// ------------------------------------------------------------------------
/**
*** Returns true if the Google Map key has been defined
*** @return True if the Google Map key has been defined
**/
public boolean hasGoogleKey()
{
return !StringTools.isBlank(this.googleKey);
}
/**
*** Gets the Google map authorization key
*** @return The Google map authorization key
**/
public String getGoogleKey()
{
return this.googleKey;
}
/**
*** Sets the Google map authorization key
*** @param key The Google map authorization key
**/
public void setGoogleKey(String key)
{
this.googleKey = StringTools.trim(key);
}
// ------------------------------------------------------------------------
/**
*** Sets the map type (mobile, roadmap, satellite, terrain, hybrid, ...)
*** @param mapType The map type
**/
public void setMapType(String mapType)
{
this.mapType = StringTools.trim(mapType);
if (StringTools.isBlank(this.mapType)) {
this.mapType = MAPTYPE_MOBILE;
}
}
/**
*** Gets the map type
*** @return The map type
**/
public String getMapType()
{
return this.mapType;
}
// ------------------------------------------------------------------------
/**
*** Sets the current zoom level [from 0(lowest) to 19(highest)]
*** @param zoom The current zoom level
**/
public void setZoom(int zoom)
{
this.zoom = zoom;
}
/**
*** Gets the current zoom level
*** @return The current zoom level
**/
public int getZoom()
{
return this.zoom;
}
/**
*** Calculates the best zoom of the map based on added points
*** @return The calculated zoom of the map
**/
public int calculateZoom()
{
// http://slappy.cs.uiuc.edu/fall06/cs492/Group2/example.html
// zoom_level = log(ppd_lon/(256/360)) / log(2)
// m/px = cos(lat) * (1 / 2^zoom) * (40075017 / 256)
double ppd_lat = this.getHeight() / this.bounds.getDeltaLatitude();
double ppd_lon = this.getWidth() / this.bounds.getDeltaLongitude();
double ppd = (ppd_lon < ppd_lat)? ppd_lon : ppd_lat;
double zoom = Math.log(ppd_lon/(256.0/360.0)) / Math.log(2.0);
int z = (int)Math.floor(zoom - 1.95);
return (z >= 0)? z : 0;
// Other References:
// http://blogs.esri.com/Support/blogs/mappingcenter/archive/2009/03/19/How-can-you-tell-what-map-scales-are-shown-for-online-maps_3F00_.aspx
// http://squall.nrel.colostate.edu/cwis438/DisplayHTML.php?FilePath=D:/WebContent/Jim/GoogleMapsProjection.html&WebSiteID=9
}
// ------------------------------------------------------------------------
/**
*** Sets the map center point
*** @param cp The map center
**/
public void setCenter(GeoPoint cp)
{
this.center = ((cp != null) && cp.isValid())? cp : null;
}
/**
*** Gets the center of the map (may return null)
*** @return The map center (may be null)
**/
public GeoPoint getCenter()
{
return this.center;
}
/**
*** Calculates the best center of the map based on added points
*** @return The calculated center of the map
**/
public GeoPoint calculateCenter()
{
return this.bounds.getCenter();
}
// ------------------------------------------------------------------------
/**
*** Sets the 'sensor generated' state of the included pushpins
*** @param sensor True if lat/lon is autogenerated
**/
public void setSensorState(boolean sensor)
{
this.sensor = sensor;
}
/**
*** Should return true if latitude/longitude is auto-generated
*** @return True if lat/lon is autogenerated
**/
public boolean getSensorState()
{
return this.sensor;
}
// ------------------------------------------------------------------------
/**
*** Sets the route path color and weight
*** @param rgb The route path color
*** @param weight The route path weight
**/
public void setPath(ColorTools.RGB rgb, int weight)
{
this.pathColor = rgb;
this.pathWeight = (weight < 1)? 1 : (weight > 10)? 10 : weight;
}
// ------------------------------------------------------------------------
/**
*** Adds the specified pushpin to the map
*** @param gp The pushpin location
*** @param icon The pushpin icon name
**/
public void addPushpin(GeoPoint gp, String icon)
{
if ((gp != null) && gp.isValid()) {
String lat = GeoPoint.formatLatitude( gp.getLatitude());
String lon = GeoPoint.formatLongitude(gp.getLongitude());
String pp = lat + "," + lon + "," + StringTools.trim(icon);
this.pushpins.add(pp);
String rt = lat + "," + lon;
this.pathLine.add(rt);
this.bounds.extendByCircle(200.0, gp);
}
}
/**
*** Gets the number of pushpins currently on this map
*** @return The current number of pushpins
**/
public int getPushpinCount()
{
return this.pushpins.size();
}
// ------------------------------------------------------------------------
/**
*** Gets the Google Map URL for retrieving the map image data
*** @return A String representation of the Google map URL
**/
public String toString()
{
URIArg url = new URIArg(GOOGLE_MAP_URL);
/* center */
if (this.center != null) {
url.addArg("center" , this.center.getLatitude() + "," + this.center.getLongitude());
}
/* common arguments */
url.addArg("zoom" , this.getZoom());
url.addArg("size" , this.getWidth() + "x" + this.getHeight());
url.addArg("maptype", this.getMapType());
url.addArg("sensor" , String.valueOf(this.getSensorState()));
/* path */
if ((this.pathColor != null) && (this.pathLine.size() >= 2)) {
StringBuffer sb = new StringBuffer();
sb.append("rgb:0x").append(this.pathColor.toString());
sb.append(",weight:").append(this.pathWeight);
for (String pt : this.pathLine) {
sb.append("|");
sb.append(pt);
}
url.addArg("path", sb.toString());
}
/* markers */
if (!this.pushpins.isEmpty()) {
StringBuffer sb = new StringBuffer();
for (String pp : this.pushpins) {
if (sb.length() > 0) {
sb.append("|"); // %7C
}
sb.append(pp);
}
url.addArg("markers", sb.toString());
}
/* google key at the end */
if (this.hasGoogleKey()) {
url.addArg("key", this.getGoogleKey());
}
/* return URL */
return url.toString();
}
// ------------------------------------------------------------------------
/**
*** Gets an array of bytes containing the Google Map for the specified location
*** @return A byte array containing the PNG map image
**/
public byte[] getMap()
{
String url = this.toString();
//Print.logInfo("Google Map URL: " + url);
try {
int timeoutMS = -1;
return HTMLTools.readPage_GET(url, timeoutMS);
} catch (Throwable th) {
Print.logError("Unable to retrieve map", th);
return null;
}
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
public static void main(String argv[])
{
RTConfig.setCommandLineArgs(argv);
GoogleStaticMap gsm = new GoogleStaticMap();
gsm.setPath(ColorTools.COLOR_RED, 2);
gsm.setSize(640,480);
/* points */
String gps[] = StringTools.parseString(RTConfig.getString("gp",""),',');
for (String g : gps) {
GeoPoint gp = new GeoPoint(g);
if (gp.isValid()) {
gsm.addPushpin(gp, "red");
}
}
if (gsm.getPushpinCount() <= 0) {
Print.sysPrintln("Missing '-gp=<lat>/<lon>,<lat>/<lon>'");
System.exit(99);
}
gsm.setZoom(gsm.calculateZoom());
Print.sysPrintln(gsm.toString());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.util.path;
import java.io.File;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
import org.apache.carbondata.core.datastore.filesystem.CarbonFileFilter;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
import org.apache.hadoop.fs.Path;
/**
* Helps to get Table content paths.
*/
public class CarbonTablePath extends Path {
protected static final String METADATA_DIR = "Metadata";
protected static final String DICTIONARY_EXT = ".dict";
protected static final String DICTIONARY_META_EXT = ".dictmeta";
protected static final String SORT_INDEX_EXT = ".sortindex";
protected static final String SCHEMA_FILE = "schema";
protected static final String TABLE_STATUS_FILE = "tablestatus";
protected static final String TABLE_UPDATE_STATUS_FILE = "tableupdatestatus";
protected static final String FACT_DIR = "Fact";
protected static final String SEGMENT_PREFIX = "Segment_";
protected static final String PARTITION_PREFIX = "Part";
protected static final String CARBON_DATA_EXT = ".carbondata";
protected static final String CARBON_DELTE_DELTA_EXT = ".deletedelta";
protected static final String CARBON_UPDATE_DELTA_EXT = ".updatedelta";
protected static final String DATA_PART_PREFIX = "part-";
protected static final String BATCH_PREFIX = "_batchno";
protected static final String INDEX_FILE_EXT = ".carbonindex";
protected static final String DELETE_DELTA_FILE_EXT = ".deletedelta";
protected String tablePath;
protected CarbonTableIdentifier carbonTableIdentifier;
/**
*
* @param carbonTableIdentifier
* @param tablePathString
*/
public CarbonTablePath(CarbonTableIdentifier carbonTableIdentifier, String tablePathString) {
super(tablePathString);
this.carbonTableIdentifier = carbonTableIdentifier;
this.tablePath = tablePathString;
}
public CarbonTablePath(String storePath, String dbName, String tableName) {
super(storePath + File.separator + dbName + File.separator + tableName);
this.carbonTableIdentifier = new CarbonTableIdentifier(dbName, tableName, "");
this.tablePath = storePath + File.separator + dbName + File.separator + tableName;
}
/**
* The method returns the folder path containing the carbon file.
*
* @param carbonFilePath
*/
public static String getFolderContainingFile(String carbonFilePath) {
return carbonFilePath.substring(0, carbonFilePath.lastIndexOf(File.separator));
}
/**
* @param columnId unique column identifier
* @return name of dictionary file
*/
public static String getDictionaryFileName(String columnId) {
return columnId + DICTIONARY_EXT;
}
/**
* whether carbonFile is dictionary file or not
*
* @param carbonFile
* @return
*/
public static Boolean isDictionaryFile(CarbonFile carbonFile) {
return (!carbonFile.isDirectory()) && (carbonFile.getName().endsWith(DICTIONARY_EXT));
}
/**
* check if it is carbon data file matching extension
*
* @param fileNameWithPath
* @return boolean
*/
public static boolean isCarbonDataFile(String fileNameWithPath) {
int pos = fileNameWithPath.lastIndexOf('.');
if (pos != -1) {
return fileNameWithPath.substring(pos).startsWith(CARBON_DATA_EXT);
}
return false;
}
/**
* check if it is carbon data file matching extension
*
* @param fileNameWithPath
* @return boolean
*/
public static boolean isCarbonDataFileOrUpdateFile(String fileNameWithPath) {
int pos = fileNameWithPath.lastIndexOf('.');
if (pos != -1) {
return fileNameWithPath.substring(pos).startsWith(CARBON_DATA_EXT) || fileNameWithPath
.substring(pos).startsWith(CARBON_UPDATE_DELTA_EXT);
}
return false;
}
/**
* check if it is carbon index file matching extension
*
* @param fileNameWithPath
* @return boolean
*/
public static boolean isCarbonIndexFile(String fileNameWithPath) {
int pos = fileNameWithPath.lastIndexOf('.');
if (pos != -1) {
return fileNameWithPath.substring(pos).startsWith(INDEX_FILE_EXT);
}
return false;
}
/**
* gets table path
*/
public String getPath() {
return tablePath;
}
/**
* @param columnId unique column identifier
* @return absolute path of dictionary file
*/
public String getDictionaryFilePath(String columnId) {
return getMetaDataDir() + File.separator + getDictionaryFileName(columnId);
}
/**
* @return it return relative directory
*/
public String getRelativeDictionaryDirectory() {
return carbonTableIdentifier.getDatabaseName() + File.separator + carbonTableIdentifier
.getTableName();
}
/**
* This method will return the metadata directory location for a table
*
* @return
*/
public String getMetadataDirectoryPath() {
return getMetaDataDir();
}
/**
* @param columnId unique column identifier
* @return absolute path of dictionary meta file
*/
public String getDictionaryMetaFilePath(String columnId) {
return getMetaDataDir() + File.separator + columnId + DICTIONARY_META_EXT;
}
/**
* @param columnId unique column identifier
* @return absolute path of sort index file
*/
public String getSortIndexFilePath(String columnId) {
return getMetaDataDir() + File.separator + columnId + SORT_INDEX_EXT;
}
/**
*
* @param columnId
* @param dictOffset
* @return absolute path of sortindex with appeneded dictionary offset
*/
public String getSortIndexFilePath(String columnId, long dictOffset) {
return getMetaDataDir() + File.separator + columnId + "_" + dictOffset + SORT_INDEX_EXT;
}
/**
* @return absolute path of schema file
*/
public String getSchemaFilePath() {
return getMetaDataDir() + File.separator + SCHEMA_FILE;
}
/**
* return the schema file path
* @param tablePath path to table files
* @return schema file path
*/
public static String getSchemaFilePath(String tablePath) {
return tablePath + File.separator + METADATA_DIR + File.separator + SCHEMA_FILE;
}
/**
* @return absolute path of table status file
*/
public String getTableStatusFilePath() {
return getMetaDataDir() + File.separator + TABLE_STATUS_FILE;
}
/**
* @return absolute path of table update status file
*/
public String getTableUpdateStatusFilePath() {
return getMetaDataDir() + File.separator + TABLE_UPDATE_STATUS_FILE;
}
/**
* Gets absolute path of data file
*
* @param partitionId unique partition identifier
* @param segmentId unique partition identifier
* @param filePartNo data file part number
* @param factUpdateTimeStamp unique identifier to identify an update
* @return absolute path of data file stored in carbon data format
*/
public String getCarbonDataFilePath(String partitionId, String segmentId, Integer filePartNo,
Integer taskNo, int batchNo, int bucketNumber, String factUpdateTimeStamp) {
return getSegmentDir(partitionId, segmentId) + File.separator + getCarbonDataFileName(
filePartNo, taskNo, bucketNumber, batchNo, factUpdateTimeStamp);
}
/**
* Below method will be used to get the index file present in the segment folder
* based on task id
*
* @param taskId task id of the file
* @param partitionId partition number
* @param segmentId segment number
* @return full qualified carbon index path
*/
public String getCarbonIndexFilePath(final String taskId, final String partitionId,
final String segmentId, final String bucketNumber) {
String segmentDir = getSegmentDir(partitionId, segmentId);
CarbonFile carbonFile =
FileFactory.getCarbonFile(segmentDir, FileFactory.getFileType(segmentDir));
CarbonFile[] files = carbonFile.listFiles(new CarbonFileFilter() {
@Override public boolean accept(CarbonFile file) {
if (bucketNumber.equals("-1")) {
return file.getName().startsWith(taskId) && file.getName().endsWith(INDEX_FILE_EXT);
}
return file.getName().startsWith(taskId + "-" + bucketNumber) && file.getName()
.endsWith(INDEX_FILE_EXT);
}
});
if (files.length > 0) {
return files[0].getAbsolutePath();
} else {
throw new RuntimeException("Missing Carbon index file for partition["
+ partitionId + "] Segment[" + segmentId + "], taskId[" + taskId
+ "]");
}
}
/**
* Below method will be used to get the carbon index file path
* @param taskId
* task id
* @param partitionId
* partition id
* @param segmentId
* segment id
* @param bucketNumber
* bucket number
* @param timeStamp
* timestamp
* @return carbon index file path
*/
public String getCarbonIndexFilePath(String taskId, String partitionId, String segmentId,
String bucketNumber, String timeStamp, ColumnarFormatVersion columnarFormatVersion) {
switch (columnarFormatVersion) {
case V1:
case V2:
return getCarbonIndexFilePath(taskId, partitionId, segmentId, bucketNumber);
default:
String segmentDir = getSegmentDir(partitionId, segmentId);
return segmentDir + File.separator + getCarbonIndexFileName(taskId,
Integer.parseInt(bucketNumber), timeStamp);
}
}
public String getCarbonIndexFilePath(String taskId, String partitionId, String segmentId,
int batchNo, String bucketNumber, String timeStamp,
ColumnarFormatVersion columnarFormatVersion) {
switch (columnarFormatVersion) {
case V1:
case V2:
return getCarbonIndexFilePath(taskId, partitionId, segmentId, bucketNumber);
default:
String segmentDir = getSegmentDir(partitionId, segmentId);
return segmentDir + File.separator + getCarbonIndexFileName(Integer.parseInt(taskId),
Integer.parseInt(bucketNumber), batchNo, timeStamp);
}
}
private static String getCarbonIndexFileName(String taskNo, int bucketNumber,
String factUpdatedtimeStamp) {
return taskNo + "-" + bucketNumber + "-" + factUpdatedtimeStamp + INDEX_FILE_EXT;
}
/**
* Below method will be used to get the index file present in the segment folder
* based on task id
*
* @param taskId task id of the file
* @param partitionId partition number
* @param segmentId segment number
* @return full qualified carbon index path
*/
public String getCarbonUpdatedIndexFilePath(final String taskId, final String partitionId,
final String segmentId) {
String segmentDir = getSegmentDir(partitionId, segmentId);
CarbonFile carbonFile =
FileFactory.getCarbonFile(segmentDir, FileFactory.getFileType(segmentDir));
CarbonFile[] files = carbonFile.listFiles(new CarbonFileFilter() {
@Override public boolean accept(CarbonFile file) {
return file.getName().startsWith(taskId) && file.getName().endsWith(INDEX_FILE_EXT);
}
});
if (files.length > 0) {
return files[0].getAbsolutePath();
} else {
throw new RuntimeException(
"Missing Carbon Updated index file for partition[" + partitionId
+ "] Segment[" + segmentId + "], taskId[" + taskId + "]");
}
}
/**
* Below method will be used to get the index file present in the segment folder
* based on task id
*
* @param taskId task id of the file
* @param partitionId partition number
* @param segmentId segment number
* @return full qualified carbon index path
*/
public String getCarbonDeleteDeltaFilePath(final String taskId, final String partitionId,
final String segmentId) {
String segmentDir = getSegmentDir(partitionId, segmentId);
CarbonFile carbonFile =
FileFactory.getCarbonFile(segmentDir, FileFactory.getFileType(segmentDir));
CarbonFile[] files = carbonFile.listFiles(new CarbonFileFilter() {
@Override public boolean accept(CarbonFile file) {
return file.getName().startsWith(taskId) && file.getName().endsWith(DELETE_DELTA_FILE_EXT);
}
});
if (files.length > 0) {
return files[0].getAbsolutePath();
} else {
throw new RuntimeException(
"Missing Carbon delete delta file index file for partition["
+ partitionId + "] Segment[" + segmentId + "], taskId[" + taskId
+ "]");
}
}
/**
* Gets absolute path of data file
*
* @param partitionId unique partition identifier
* @param segmentId unique partition identifier
* @return absolute path of data file stored in carbon data format
*/
public String getCarbonDataDirectoryPath(String partitionId, String segmentId) {
return getSegmentDir(partitionId, segmentId);
}
/**
* Gets data file name only with out path
*
* @param filePartNo data file part number
* @param taskNo task identifier
* @param factUpdateTimeStamp unique identifier to identify an update
* @return gets data file name only with out path
*/
public static String getCarbonDataFileName(Integer filePartNo, Integer taskNo, int bucketNumber,
int batchNo, String factUpdateTimeStamp) {
return DATA_PART_PREFIX + filePartNo + "-" + taskNo + BATCH_PREFIX + batchNo + "-"
+ bucketNumber + "-" + factUpdateTimeStamp + CARBON_DATA_EXT;
}
/**
* Below method will be used to get the carbon index filename
*
* @param taskNo task number
* @param factUpdatedTimeStamp time stamp
* @return filename
*/
public static String getCarbonIndexFileName(int taskNo, int bucketNumber, int batchNo,
String factUpdatedTimeStamp) {
return taskNo + BATCH_PREFIX + batchNo + "-" + bucketNumber + "-" + factUpdatedTimeStamp
+ INDEX_FILE_EXT;
}
/**
* Below method will be used to get the carbon index filename
*
* @param taskNo task number
* @param factUpdatedTimeStamp time stamp
* @return filename
*/
public String getCarbonIndexFileName(int taskNo, String factUpdatedTimeStamp,
String indexFileExtension) {
return taskNo + "-" + factUpdatedTimeStamp + indexFileExtension;
}
private String getSegmentDir(String partitionId, String segmentId) {
return getPartitionDir(partitionId) + File.separator + SEGMENT_PREFIX + segmentId;
}
public String getPartitionDir(String partitionId) {
return getFactDir() + File.separator + PARTITION_PREFIX + partitionId;
}
private String getMetaDataDir() {
return tablePath + File.separator + METADATA_DIR;
}
public String getFactDir() {
return tablePath + File.separator + FACT_DIR;
}
public CarbonTableIdentifier getCarbonTableIdentifier() {
return carbonTableIdentifier;
}
@Override public boolean equals(Object o) {
if (!(o instanceof CarbonTablePath)) {
return false;
}
CarbonTablePath path = (CarbonTablePath) o;
return tablePath.equals(path.tablePath) && super.equals(o);
}
@Override public int hashCode() {
return super.hashCode() + tablePath.hashCode();
}
/**
* To manage data file name and composition
*/
public static class DataFileUtil {
/**
* gets updated timestamp information from given carbon data file name
*/
public static String getTimeStampFromFileName(String carbonDataFileName) {
// Get the timestamp portion of the file.
String fileName = getFileName(carbonDataFileName);
int startIndex = fileName.lastIndexOf(CarbonCommonConstants.HYPHEN) + 1;
int endIndex = fileName.indexOf(".", startIndex);
return fileName.substring(startIndex, endIndex);
}
/**
* This will return the timestamp present in the delete delta file.
* @param fileName
* @return
*/
public static String getTimeStampFromDeleteDeltaFile(String fileName) {
return fileName.substring(fileName.lastIndexOf(CarbonCommonConstants.HYPHEN) + 1,
fileName.lastIndexOf("."));
}
/**
* This will return the timestamp present in the delete delta file.
* @param fileName
* @return
*/
public static String getBlockNameFromDeleteDeltaFile(String fileName) {
return fileName.substring(0,
fileName.lastIndexOf(CarbonCommonConstants.HYPHEN));
}
/**
* gets updated timestamp information from given carbon data file name
*/
public static String getBucketNo(String carbonFilePath) {
// Get the file name from path
String fileName = getFileName(carbonFilePath);
// + 1 for size of "-"
int firstDashPos = fileName.indexOf("-");
int secondDash = fileName.indexOf("-", firstDashPos + 1);
int startIndex = fileName.indexOf("-", secondDash + 1) + 1;
int endIndex = fileName.indexOf("-", startIndex);
// to support backward compatibility
if (startIndex == -1 || endIndex == -1) {
return "-1";
}
return fileName.substring(startIndex, endIndex);
}
/**
* gets file part number information from given carbon data file name
*/
public static String getPartNo(String carbonDataFileName) {
// Get the file name from path
String fileName = getFileName(carbonDataFileName);
// + 1 for size of "-"
int startIndex = fileName.indexOf("-") + 1;
int endIndex = fileName.indexOf("-", startIndex);
return fileName.substring(startIndex, endIndex);
}
/**
* gets updated timestamp information from given carbon data file name
*/
public static String getTaskNo(String carbonDataFileName) {
// Get the file name from path
String fileName = getFileName(carbonDataFileName);
// + 1 for size of "-"
int firstDashPos = fileName.indexOf("-");
int startIndex = fileName.indexOf("-", firstDashPos + 1) + 1;
int endIndex = fileName.indexOf("-", startIndex);
return fileName.substring(startIndex, endIndex);
}
/**
* get the taskId part from taskNo(include taskId + batchNo)
* @param taskNo
* @return
*/
public static int getTaskIdFromTaskNo(String taskNo) {
return Integer.parseInt(taskNo.split(BATCH_PREFIX)[0]);
}
public static int getBatchNoFromTaskNo(String taskNo) {
return Integer.parseInt(taskNo.split(BATCH_PREFIX)[1]);
}
/**
* Gets the file name from file path
*/
private static String getFileName(String carbonDataFileName) {
int endIndex = carbonDataFileName.lastIndexOf(CarbonCommonConstants.FILE_SEPARATOR);
if (endIndex > -1) {
return carbonDataFileName.substring(endIndex + 1, carbonDataFileName.length());
} else {
return carbonDataFileName;
}
}
/**
* Gets the file name of the delta files.
*
* @param filePartNo
* @param taskNo
* @param factUpdateTimeStamp
* @param Extension
* @return
*/
public static String getCarbonDeltaFileName(String filePartNo, String taskNo,
String factUpdateTimeStamp, String Extension) {
return DATA_PART_PREFIX + filePartNo + "-" + taskNo + "-" + factUpdateTimeStamp
+ Extension;
}
}
/**
* To manage data path and composition
*/
public static class DataPathUtil {
/**
* gets segement id from given absolute data file path
*/
public static String getSegmentId(String dataFileAbsolutePath) {
// find segment id from last of data file path
String tempdataFileAbsolutePath = dataFileAbsolutePath.replace(
CarbonCommonConstants.WINDOWS_FILE_SEPARATOR, CarbonCommonConstants.FILE_SEPARATOR);
int endIndex = tempdataFileAbsolutePath.lastIndexOf(CarbonCommonConstants.FILE_SEPARATOR);
// + 1 for size of "/"
int startIndex = tempdataFileAbsolutePath.lastIndexOf(
CarbonCommonConstants.FILE_SEPARATOR, endIndex - 1) + 1;
String segmentDirStr = dataFileAbsolutePath.substring(startIndex, endIndex);
//identify id in segment_<id>
String[] segmentDirSplits = segmentDirStr.split("_");
try {
if (segmentDirSplits.length == 2) {
return segmentDirSplits[1];
}
} catch (Exception e) {
return CarbonCommonConstants.INVALID_SEGMENT_ID;
}
return CarbonCommonConstants.INVALID_SEGMENT_ID;
}
}
/**
* Below method will be used to get sort index file present in mentioned folder
*
* @param sortIndexDir directory where sort index file resides
* @param columnUniqueId columnunique id
* @return sort index carbon files
*/
public CarbonFile[] getSortIndexFiles(CarbonFile sortIndexDir, final String columnUniqueId) {
return sortIndexDir.listFiles(new CarbonFileFilter() {
@Override public boolean accept(CarbonFile file) {
return file.getName().startsWith(columnUniqueId) && file.getName().endsWith(SORT_INDEX_EXT);
}
});
}
/**
* returns the carbondata file name
*
* @param carbonDataFilePath carbondata file path
* @return
*/
public static String getCarbonDataFileName(String carbonDataFilePath) {
return carbonDataFilePath
.substring(carbonDataFilePath.lastIndexOf(CarbonCommonConstants.FILE_SEPARATOR) + 1,
carbonDataFilePath.indexOf(CARBON_DATA_EXT));
}
/**
* @return prefix of carbon data
*/
public static String getCarbonDataPrefix() {
return DATA_PART_PREFIX;
}
/**
*
* @return carbon data extension
*/
public static String getCarbonDataExtension() {
return CARBON_DATA_EXT;
}
/**
*
* @return carbon index extension
*/
public static String getCarbonIndexExtension() {
return INDEX_FILE_EXT;
}
/**
* This method will remove strings in path and return short block id
*
* @param blockId
* @return shortBlockId
*/
public static String getShortBlockId(String blockId) {
return blockId.replace(PARTITION_PREFIX, "")
.replace(SEGMENT_PREFIX, "")
.replace(DATA_PART_PREFIX, "")
.replace(CARBON_DATA_EXT, "");
}
/**
* This method will append strings in path and return block id
*
* @param shortBlockId
* @return blockId
*/
public static String getBlockId(String shortBlockId) {
String[] splitRecords = shortBlockId.split(CarbonCommonConstants.FILE_SEPARATOR);
StringBuffer sb = new StringBuffer();
for (int i = 0; i < splitRecords.length; i++) {
if (i == 0) {
sb.append(PARTITION_PREFIX);
sb.append(splitRecords[i]);
} else if (i == 1) {
sb.append(CarbonCommonConstants.FILE_SEPARATOR);
sb.append(SEGMENT_PREFIX);
sb.append(splitRecords[i]);
} else if (i == 2) {
sb.append(CarbonCommonConstants.FILE_SEPARATOR);
sb.append(DATA_PART_PREFIX);
sb.append(splitRecords[i]);
} else if (i == 3) {
sb.append(CarbonCommonConstants.FILE_SEPARATOR);
sb.append(splitRecords[i]);
sb.append(CARBON_DATA_EXT);
} else {
sb.append(CarbonCommonConstants.FILE_SEPARATOR);
sb.append(splitRecords[i]);
}
}
return sb.toString();
}
/**
* adds data part prefix to given value
* @return partition prefix
*/
public static String addDataPartPrefix(String value) {
return DATA_PART_PREFIX + value;
}
/**
* adds part prefix to given value
* @return partition prefix
*/
public static String addPartPrefix(String value) {
return PARTITION_PREFIX + value;
}
/**
* adds part prefix to given value
* @return partition prefix
*/
public static String addSegmentPrefix(String value) {
return SEGMENT_PREFIX + value;
}
public static String getCarbonIndexFileName(String actualBlockName) {
return DataFileUtil.getTaskNo(actualBlockName) + "-" + DataFileUtil.getBucketNo(actualBlockName)
+ "-" + DataFileUtil.getTimeStampFromFileName(actualBlockName) + INDEX_FILE_EXT;
}
}
| |
/*
* Copyright 2012-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.context.properties;
import javax.annotation.PostConstruct;
import javax.validation.constraints.NotNull;
import org.junit.After;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanCreationException;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.GenericBeanDefinition;
import org.springframework.boot.test.EnvironmentTestUtils;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.mock.env.MockEnvironment;
import org.springframework.validation.BindException;
import org.springframework.validation.Errors;
import org.springframework.validation.ValidationUtils;
import org.springframework.validation.Validator;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Tests for {@link ConfigurationPropertiesBindingPostProcessor}.
*
* @author Christian Dupuis
* @author Phillip Webb
*/
public class ConfigurationPropertiesBindingPostProcessorTests {
@Rule
public ExpectedException thrown = ExpectedException.none();
private AnnotationConfigApplicationContext context;
@After
public void close() {
if (this.context != null) {
this.context.close();
}
}
@Test
public void testValidationWithSetter() {
this.context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(this.context, "test.foo:spam");
this.context.register(TestConfigurationWithValidatingSetter.class);
try {
this.context.refresh();
fail("Expected exception");
}
catch (BeanCreationException ex) {
BindException bex = (BindException) ex.getRootCause();
assertEquals(1, bex.getErrorCount());
}
}
@Test
public void testValidationWithoutJSR303() {
this.context = new AnnotationConfigApplicationContext();
this.context.register(TestConfigurationWithoutJSR303.class);
try {
this.context.refresh();
fail("Expected exception");
}
catch (BeanCreationException ex) {
BindException bex = (BindException) ex.getRootCause();
assertEquals(1, bex.getErrorCount());
}
}
@Test
public void testValidationWithJSR303() {
this.context = new AnnotationConfigApplicationContext();
this.context.register(TestConfigurationWithJSR303.class);
try {
this.context.refresh();
fail("Expected exception");
}
catch (BeanCreationException ex) {
BindException bex = (BindException) ex.getRootCause();
assertEquals(2, bex.getErrorCount());
}
}
@Test
public void testSuccessfulValidationWithJSR303() {
MockEnvironment env = new MockEnvironment();
env.setProperty("test.foo", "123456");
env.setProperty("test.bar", "654321");
this.context = new AnnotationConfigApplicationContext();
this.context.setEnvironment(env);
this.context.register(TestConfigurationWithJSR303.class);
this.context.refresh();
}
@Test
public void testInitializersSeeBoundProperties() {
MockEnvironment env = new MockEnvironment();
env.setProperty("bar", "foo");
this.context = new AnnotationConfigApplicationContext();
this.context.setEnvironment(env);
this.context.register(TestConfigurationWithInitializer.class);
this.context.refresh();
}
@Test
public void testPropertyWithEnum() throws Exception {
doEnumTest("test.theValue:foo");
}
@Test
public void testRelaxedPropertyWithEnum() throws Exception {
doEnumTest("test.the-value:FoO");
doEnumTest("TEST_THE_VALUE:FoO");
doEnumTest("test.THE_VALUE:FoO");
doEnumTest("test_the_value:FoO");
}
private void doEnumTest(String property) {
this.context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(this.context, property);
this.context.register(PropertyWithEnum.class);
this.context.refresh();
assertThat(this.context.getBean(PropertyWithEnum.class).getTheValue(),
equalTo(FooEnum.FOO));
this.context.close();
}
@Test
public void testValueBindingForDefaults() throws Exception {
this.context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(this.context, "default.value:foo");
this.context.register(PropertyWithValue.class);
this.context.refresh();
assertThat(this.context.getBean(PropertyWithValue.class).getValue(),
equalTo("foo"));
}
@Test
public void placeholderResolutionWithCustomLocation() throws Exception {
this.context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(this.context, "fooValue:bar");
this.context.register(CustomConfigurationLocation.class);
this.context.refresh();
assertThat(this.context.getBean(CustomConfigurationLocation.class).getFoo(),
equalTo("bar"));
}
@Test
public void placeholderResolutionWithUnmergedCustomLocation() throws Exception {
this.context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(this.context, "fooValue:bar");
this.context.register(UnmergedCustomConfigurationLocation.class);
this.context.refresh();
assertThat(this.context.getBean(UnmergedCustomConfigurationLocation.class)
.getFoo(), equalTo("${fooValue}"));
}
@Test
public void configurationPropertiesWithFactoryBean() throws Exception {
ConfigurationPropertiesWithFactoryBean.factoryBeanInit = false;
this.context = new AnnotationConfigApplicationContext() {
@Override
protected void onRefresh() throws BeansException {
assertFalse("Init too early",
ConfigurationPropertiesWithFactoryBean.factoryBeanInit);
super.onRefresh();
}
};
this.context.register(ConfigurationPropertiesWithFactoryBean.class);
GenericBeanDefinition beanDefinition = new GenericBeanDefinition();
beanDefinition.setBeanClass(FactoryBeanTester.class);
beanDefinition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_BY_TYPE);
this.context.registerBeanDefinition("test", beanDefinition);
this.context.refresh();
assertTrue("No init", ConfigurationPropertiesWithFactoryBean.factoryBeanInit);
}
@Test
public void configurationPropertiesWithCharArray() throws Exception {
this.context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(this.context, "test.chars:word");
this.context.register(PropertyWithCharArray.class);
this.context.refresh();
assertThat(this.context.getBean(PropertyWithCharArray.class).getChars(),
equalTo("word".toCharArray()));
}
@Test
public void configurationPropertiesWithArrayExpansion() throws Exception {
this.context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(this.context, "test.chars[4]:s");
this.context.register(PropertyWithCharArrayExpansion.class);
this.context.refresh();
assertThat(this.context.getBean(PropertyWithCharArrayExpansion.class).getChars(),
equalTo("words".toCharArray()));
}
@Test
public void notWritablePropertyException() throws Exception {
this.context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(this.context, "test.madeup:word");
this.context.register(PropertyWithCharArray.class);
this.thrown.expect(BeanCreationException.class);
this.thrown.expectMessage("test");
this.context.refresh();
}
@Test
public void relaxedPropertyNamesSame() throws Exception {
this.context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(this.context, "test.FOO_BAR:test1");
EnvironmentTestUtils.addEnvironment(this.context, "test.FOO_BAR:test2");
this.context.register(RelaxedPropertyNames.class);
this.context.refresh();
assertThat(this.context.getBean(RelaxedPropertyNames.class).getFooBar(),
equalTo("test2"));
}
@Test
public void relaxedPropertyNamesMixed() throws Exception {
// gh-3385
this.context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(this.context, "test.foo-bar:test1");
EnvironmentTestUtils.addEnvironment(this.context, "test.FOO_BAR:test2");
this.context.register(RelaxedPropertyNames.class);
this.context.refresh();
assertThat(this.context.getBean(RelaxedPropertyNames.class).getFooBar(),
equalTo("test2"));
}
@Configuration
@EnableConfigurationProperties
public static class TestConfigurationWithValidatingSetter {
@Bean
public PropertyWithValidatingSetter testProperties() {
return new PropertyWithValidatingSetter();
}
}
@ConfigurationProperties(prefix = "test")
public static class PropertyWithValidatingSetter {
private String foo;
public String getFoo() {
return this.foo;
}
public void setFoo(String foo) {
this.foo = foo;
if (!foo.equals("bar")) {
throw new IllegalArgumentException("Wrong value for foo");
}
}
}
@Configuration
@EnableConfigurationProperties
public static class TestConfigurationWithoutJSR303 {
@Bean
public PropertyWithoutJSR303 testProperties() {
return new PropertyWithoutJSR303();
}
}
@ConfigurationProperties(prefix = "test")
public static class PropertyWithoutJSR303 implements Validator {
private String foo;
@Override
public boolean supports(Class<?> clazz) {
return clazz.isAssignableFrom(getClass());
}
@Override
public void validate(Object target, Errors errors) {
ValidationUtils.rejectIfEmpty(errors, "foo", "TEST1");
}
public String getFoo() {
return this.foo;
}
public void setFoo(String foo) {
this.foo = foo;
}
}
@Configuration
@EnableConfigurationProperties
public static class TestConfigurationWithJSR303 {
@Bean
public PropertyWithJSR303 testProperties() {
return new PropertyWithJSR303();
}
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties
public static class TestConfigurationWithInitializer {
private String bar;
public void setBar(String bar) {
this.bar = bar;
}
public String getBar() {
return this.bar;
}
@PostConstruct
public void init() {
assertNotNull(this.bar);
}
}
@ConfigurationProperties(prefix = "test")
public static class PropertyWithJSR303 extends PropertyWithoutJSR303 {
@NotNull
private String bar;
public void setBar(String bar) {
this.bar = bar;
}
public String getBar() {
return this.bar;
}
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties(prefix = "test", ignoreUnknownFields = false)
public static class PropertyWithCharArray {
private char[] chars;
public char[] getChars() {
return this.chars;
}
public void setChars(char[] chars) {
this.chars = chars;
}
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties(prefix = "test", ignoreUnknownFields = false)
public static class PropertyWithCharArrayExpansion {
private char[] chars = new char[] { 'w', 'o', 'r', 'd' };
public char[] getChars() {
return this.chars;
}
public void setChars(char[] chars) {
this.chars = chars;
}
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties(prefix = "test")
public static class PropertyWithEnum {
private FooEnum theValue;
public void setTheValue(FooEnum value) {
this.theValue = value;
}
public FooEnum getTheValue() {
return this.theValue;
}
}
enum FooEnum {
FOO, BAZ, BAR
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties(prefix = "test")
public static class PropertyWithValue {
@Value("${default.value}")
private String value;
public void setValue(String value) {
this.value = value;
}
public String getValue() {
return this.value;
}
@Bean
public static PropertySourcesPlaceholderConfigurer configurer() {
return new PropertySourcesPlaceholderConfigurer();
}
}
@EnableConfigurationProperties
@ConfigurationProperties(locations = "custom-location.yml")
public static class CustomConfigurationLocation {
private String foo;
public String getFoo() {
return this.foo;
}
public void setFoo(String foo) {
this.foo = foo;
}
}
@EnableConfigurationProperties
@ConfigurationProperties(locations = "custom-location.yml", merge = false)
public static class UnmergedCustomConfigurationLocation {
private String foo;
public String getFoo() {
return this.foo;
}
public void setFoo(String foo) {
this.foo = foo;
}
}
@Configuration
@EnableConfigurationProperties
public static class ConfigurationPropertiesWithFactoryBean {
public static boolean factoryBeanInit;
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties(prefix = "test")
public static class RelaxedPropertyNames {
private String fooBar;
public String getFooBar() {
return this.fooBar;
}
public void setFooBar(String fooBar) {
this.fooBar = fooBar;
}
}
@SuppressWarnings("rawtypes")
// Must be a raw type
static class FactoryBeanTester implements FactoryBean, InitializingBean {
@Override
public Object getObject() throws Exception {
return Object.class;
}
@Override
public Class<?> getObjectType() {
return null;
}
@Override
public boolean isSingleton() {
return true;
}
@Override
public void afterPropertiesSet() throws Exception {
ConfigurationPropertiesWithFactoryBean.factoryBeanInit = true;
}
}
}
| |
/*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.transaction;
import java.util.List;
import java.util.Map;
import javax.sql.DataSource;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerAutoConfiguration;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.boot.test.util.TestPropertyValues;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import org.springframework.transaction.support.TransactionTemplate;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
/**
* Tests for {@link TransactionAutoConfiguration}.
*
* @author Stephane Nicoll
* @author Phillip Webb
*/
public class TransactionAutoConfigurationTests {
private AnnotationConfigApplicationContext context;
@AfterEach
public void tearDown() {
if (this.context != null) {
this.context.close();
}
}
@Test
public void noTransactionManager() {
load(EmptyConfiguration.class);
assertThat(this.context.getBeansOfType(TransactionTemplate.class)).isEmpty();
}
@Test
public void singleTransactionManager() {
load(new Class<?>[] { DataSourceAutoConfiguration.class,
DataSourceTransactionManagerAutoConfiguration.class },
"spring.datasource.initialization-mode:never");
PlatformTransactionManager transactionManager = this.context
.getBean(PlatformTransactionManager.class);
TransactionTemplate transactionTemplate = this.context
.getBean(TransactionTemplate.class);
assertThat(transactionTemplate.getTransactionManager())
.isSameAs(transactionManager);
}
@Test
public void severalTransactionManagers() {
load(SeveralTransactionManagersConfiguration.class);
assertThat(this.context.getBeansOfType(TransactionTemplate.class)).isEmpty();
}
@Test
public void customTransactionManager() {
load(CustomTransactionManagerConfiguration.class);
Map<String, TransactionTemplate> beans = this.context
.getBeansOfType(TransactionTemplate.class);
assertThat(beans).hasSize(1);
assertThat(beans.containsKey("transactionTemplateFoo")).isTrue();
}
@Test
public void platformTransactionManagerCustomizers() {
load(SeveralTransactionManagersConfiguration.class);
TransactionManagerCustomizers customizers = this.context
.getBean(TransactionManagerCustomizers.class);
List<?> field = (List<?>) ReflectionTestUtils.getField(customizers,
"customizers");
assertThat(field).hasSize(1).first().isInstanceOf(TransactionProperties.class);
}
@Test
public void transactionNotManagedWithNoTransactionManager() {
load(BaseConfiguration.class);
assertThat(this.context.getBean(TransactionalService.class).isTransactionActive())
.isFalse();
}
@Test
public void transactionManagerUsesCglibByDefault() {
load(TransactionManagersConfiguration.class);
assertThat(this.context.getBean(AnotherServiceImpl.class).isTransactionActive())
.isTrue();
assertThat(this.context.getBeansOfType(TransactionalServiceImpl.class))
.hasSize(1);
}
@Test
public void transactionManagerCanBeConfiguredToJdkProxy() {
load(TransactionManagersConfiguration.class,
"spring.aop.proxy-target-class=false");
assertThat(this.context.getBean(AnotherService.class).isTransactionActive())
.isTrue();
assertThat(this.context.getBeansOfType(AnotherServiceImpl.class)).hasSize(0);
assertThat(this.context.getBeansOfType(TransactionalServiceImpl.class))
.hasSize(0);
}
@Test
public void customEnableTransactionManagementTakesPrecedence() {
load(new Class<?>[] { CustomTransactionManagementConfiguration.class,
TransactionManagersConfiguration.class },
"spring.aop.proxy-target-class=true");
assertThat(this.context.getBean(AnotherService.class).isTransactionActive())
.isTrue();
assertThat(this.context.getBeansOfType(AnotherServiceImpl.class)).hasSize(0);
assertThat(this.context.getBeansOfType(TransactionalServiceImpl.class))
.hasSize(0);
}
private void load(Class<?> config, String... environment) {
load(new Class<?>[] { config }, environment);
}
private void load(Class<?>[] configs, String... environment) {
AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext();
applicationContext.register(configs);
applicationContext.register(TransactionAutoConfiguration.class);
TestPropertyValues.of(environment).applyTo(applicationContext);
applicationContext.refresh();
this.context = applicationContext;
}
@Configuration(proxyBeanMethods = false)
static class EmptyConfiguration {
}
@Configuration(proxyBeanMethods = false)
static class SeveralTransactionManagersConfiguration {
@Bean
public PlatformTransactionManager transactionManagerOne() {
return mock(PlatformTransactionManager.class);
}
@Bean
public PlatformTransactionManager transactionManagerTwo() {
return mock(PlatformTransactionManager.class);
}
}
@Configuration(proxyBeanMethods = false)
static class CustomTransactionManagerConfiguration {
@Bean
public TransactionTemplate transactionTemplateFoo(
PlatformTransactionManager transactionManager) {
return new TransactionTemplate(transactionManager);
}
@Bean
public PlatformTransactionManager transactionManagerFoo() {
return mock(PlatformTransactionManager.class);
}
}
@Configuration(proxyBeanMethods = false)
static class BaseConfiguration {
@Bean
public TransactionalService transactionalService() {
return new TransactionalServiceImpl();
}
@Bean
public AnotherServiceImpl anotherService() {
return new AnotherServiceImpl();
}
}
@Configuration(proxyBeanMethods = false)
@Import(BaseConfiguration.class)
static class TransactionManagersConfiguration {
@Bean
public DataSourceTransactionManager transactionManager(DataSource dataSource) {
return new DataSourceTransactionManager(dataSource);
}
@Bean
public DataSource dataSource() {
return DataSourceBuilder.create()
.driverClassName("org.hsqldb.jdbc.JDBCDriver")
.url("jdbc:hsqldb:mem:tx").username("sa").build();
}
}
@Configuration(proxyBeanMethods = false)
@EnableTransactionManagement(proxyTargetClass = false)
static class CustomTransactionManagementConfiguration {
}
interface TransactionalService {
@Transactional
boolean isTransactionActive();
}
static class TransactionalServiceImpl implements TransactionalService {
@Override
public boolean isTransactionActive() {
return TransactionSynchronizationManager.isActualTransactionActive();
}
}
interface AnotherService {
boolean isTransactionActive();
}
static class AnotherServiceImpl implements AnotherService {
@Override
@Transactional
public boolean isTransactionActive() {
return TransactionSynchronizationManager.isActualTransactionActive();
}
}
}
| |
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.id.enhanced;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.hibernate.id.IdentifierGeneratorHelper;
import org.hibernate.id.IntegralDataTypeHolder;
/**
* {@inheritDoc}
*
* @author Steve Ebersole
*/
@SuppressWarnings({ "deprecation" })
public class OptimizerUnitTest extends TestCase {
public OptimizerUnitTest(String string) {
super( string );
}
public static Test suite() {
return new TestSuite( OptimizerUnitTest.class );
}
public void testBasicNoOptimizerUsage() {
// test historic sequence behavior, where the initial values start at 1...
SourceMock sequence = new SourceMock( 1 );
Optimizer optimizer = buildNoneOptimizer( -1, 1 );
for ( int i = 1; i < 11; i++ ) {
final Long next = ( Long ) optimizer.generate( sequence );
assertEquals( i, next.intValue() );
}
assertEquals( 10, sequence.getTimesCalled() );
assertEquals( 10, sequence.getCurrentValue() );
// test historic table behavior, where the initial values started at 0 (we now force 1 to be the first used id value)
sequence = new SourceMock( 0 );
optimizer = buildNoneOptimizer( -1, 1 );
for ( int i = 1; i < 11; i++ ) {
final Long next = ( Long ) optimizer.generate( sequence );
assertEquals( i, next.intValue() );
}
assertEquals( 11, sequence.getTimesCalled() ); // an extra time to get to 1 initially
assertEquals( 10, sequence.getCurrentValue() );
}
public void testBasicHiLoOptimizerUsage() {
int increment = 10;
Long next;
// test historic sequence behavior, where the initial values start at 1...
SourceMock sequence = new SourceMock( 1 );
Optimizer optimizer = buildHiloOptimizer( -1, increment );
for ( int i = 1; i <= increment; i++ ) {
next = ( Long ) optimizer.generate( sequence );
assertEquals( i, next.intValue() );
}
assertEquals( 1, sequence.getTimesCalled() ); // once to initialze state
assertEquals( 1, sequence.getCurrentValue() );
// force a "clock over"
next = ( Long ) optimizer.generate( sequence );
assertEquals( 11, next.intValue() );
assertEquals( 2, sequence.getTimesCalled() );
assertEquals( 2, sequence.getCurrentValue() );
// test historic table behavior, where the initial values started at 0 (we now force 1 to be the first used id value)
sequence = new SourceMock( 0 );
optimizer = buildHiloOptimizer( -1, increment );
for ( int i = 1; i <= increment; i++ ) {
next = ( Long ) optimizer.generate( sequence );
assertEquals( i, next.intValue() );
}
assertEquals( 2, sequence.getTimesCalled() ); // here have have an extra call to get to 1 initially
assertEquals( 1, sequence.getCurrentValue() );
// force a "clock over"
next = ( Long ) optimizer.generate( sequence );
assertEquals( 11, next.intValue() );
assertEquals( 3, sequence.getTimesCalled() );
assertEquals( 2, sequence.getCurrentValue() );
}
public void testBasicPooledOptimizerUsage() {
Long next;
// test historic sequence behavior, where the initial values start at 1...
SourceMock sequence = new SourceMock( 1, 10 );
Optimizer optimizer = buildPooledOptimizer( -1, 10 );
for ( int i = 1; i < 11; i++ ) {
next = ( Long ) optimizer.generate( sequence );
assertEquals( i, next.intValue() );
}
assertEquals( 2, sequence.getTimesCalled() ); // twice to initialize state
assertEquals( 11, sequence.getCurrentValue() );
// force a "clock over"
next = ( Long ) optimizer.generate( sequence );
assertEquals( 11, next.intValue() );
assertEquals( 3, sequence.getTimesCalled() );
assertEquals( 21, sequence.getCurrentValue() );
}
public void testSubsequentPooledOptimizerUsage() {
// test the pooled optimizer in situation where the sequence is already beyond its initial value on init.
// cheat by telling the sequence to start with 1000
final SourceMock sequence = new SourceMock( 1001, 3, 5 );
// but tell the optimizer the start-with is 1
final Optimizer optimizer = buildPooledOptimizer( 1, 3 );
assertEquals( 5, sequence.getTimesCalled() );
assertEquals( 1001, sequence.getCurrentValue() );
Long next = (Long) optimizer.generate( sequence );
assertEquals( 1001, next.intValue() );
assertEquals( (5+1), sequence.getTimesCalled() );
assertEquals( (1001+3), sequence.getCurrentValue() );
next = (Long) optimizer.generate( sequence );
assertEquals( (1001+1), next.intValue() );
assertEquals( (5+1), sequence.getTimesCalled() );
assertEquals( (1001+3), sequence.getCurrentValue() );
next = (Long) optimizer.generate( sequence );
assertEquals( (1001+2), next.intValue() );
assertEquals( (5+1), sequence.getTimesCalled() );
assertEquals( (1001+3), sequence.getCurrentValue() );
// force a "clock over"
next = (Long) optimizer.generate( sequence );
assertEquals( (1001+3), next.intValue() );
assertEquals( (5+2), sequence.getTimesCalled() );
assertEquals( (1001+6), sequence.getCurrentValue() );
}
public void testBasicPooledLoOptimizerUsage() {
final SourceMock sequence = new SourceMock( 1, 3 );
final Optimizer optimizer = buildPooledLoOptimizer( 1, 3 );
assertEquals( 0, sequence.getTimesCalled() );
assertEquals( -1, sequence.getCurrentValue() );
Long next = ( Long ) optimizer.generate( sequence );
assertEquals( 1, next.intValue() );
assertEquals( 1, sequence.getTimesCalled() );
assertEquals( 1, sequence.getCurrentValue() );
next = ( Long ) optimizer.generate( sequence );
assertEquals( 2, next.intValue() );
assertEquals( 1, sequence.getTimesCalled() );
assertEquals( 1, sequence.getCurrentValue() );
next = ( Long ) optimizer.generate( sequence );
assertEquals( 3, next.intValue() );
assertEquals( 1, sequence.getTimesCalled() );
assertEquals( 1, sequence.getCurrentValue() );
// // force a "clock over"
next = ( Long ) optimizer.generate( sequence );
assertEquals( 4, next.intValue() );
assertEquals( 2, sequence.getTimesCalled() );
assertEquals( (1+3), sequence.getCurrentValue() );
}
public void testSubsequentPooledLoOptimizerUsage() {
// test the pooled-lo optimizer in situation where the sequence is already beyond its initial value on init.
// cheat by telling the sequence to start with 1000
final SourceMock sequence = new SourceMock( 1001, 3, 5 );
// but tell the optimizer the start-with is 1
final Optimizer optimizer = buildPooledLoOptimizer( 1, 3 );
assertEquals( 5, sequence.getTimesCalled() );
assertEquals( 1001, sequence.getCurrentValue() );
// should "clock over" immediately
Long next = ( Long ) optimizer.generate( sequence );
assertEquals( (1001+3), next.intValue() );
assertEquals( (5+1), sequence.getTimesCalled() );
assertEquals( (1001+3), sequence.getCurrentValue() );
next = ( Long ) optimizer.generate( sequence );
assertEquals( (1001+4), next.intValue() );
assertEquals( (5+1), sequence.getTimesCalled() );
assertEquals( (1001+3), sequence.getCurrentValue() );
next = ( Long ) optimizer.generate( sequence );
assertEquals( (1001+5), next.intValue() );
assertEquals( (5+1), sequence.getTimesCalled() );
assertEquals( (1001+3), sequence.getCurrentValue() );
// // force a "clock over"
next = ( Long ) optimizer.generate( sequence );
assertEquals( (1001+6), next.intValue() );
assertEquals( (5+2), sequence.getTimesCalled() );
assertEquals( (1001+6), sequence.getCurrentValue() );
}
public void testRecoveredPooledOptimizerUsage() {
final SourceMock sequence = new SourceMock( 1, 3 );
final Optimizer optimizer = buildPooledOptimizer( 1, 3 );
assertEquals( 0, sequence.getTimesCalled() );
assertEquals( -1, sequence.getCurrentValue() );
Long next = ( Long ) optimizer.generate( sequence );
assertEquals( 1, next.intValue() );
assertEquals( 2, sequence.getTimesCalled() );
assertEquals( 4, sequence.getCurrentValue() );
// app ends, and starts back up (we should "lose" only 2 and 3 as id values)
final Optimizer optimizer2 = buildPooledOptimizer( 1, 3 );
next = ( Long ) optimizer2.generate( sequence );
assertEquals( 4, next.intValue() );
assertEquals( 3, sequence.getTimesCalled() );
assertEquals( 7, sequence.getCurrentValue() );
}
public void testRecoveredPooledLoOptimizerUsage() {
final SourceMock sequence = new SourceMock( 1, 3 );
final Optimizer optimizer = buildPooledLoOptimizer( 1, 3 );
assertEquals( 0, sequence.getTimesCalled() );
assertEquals( -1, sequence.getCurrentValue() );
Long next = ( Long ) optimizer.generate( sequence );
assertEquals( 1, next.intValue() );
assertEquals( 1, sequence.getTimesCalled() );
assertEquals( 1, sequence.getCurrentValue() );
// app ends, and starts back up (we should "lose" only 2 and 3 as id values)
final Optimizer optimizer2 = buildPooledLoOptimizer( 1, 3 );
next = ( Long ) optimizer2.generate( sequence );
assertEquals( 4, next.intValue() );
assertEquals( 2, sequence.getTimesCalled() );
assertEquals( 4, sequence.getCurrentValue() );
}
private static Optimizer buildNoneOptimizer(long initial, int increment) {
return buildOptimizer( OptimizerFactory.StandardOptimizerDescriptor.NONE, initial, increment );
}
private static Optimizer buildHiloOptimizer(long initial, int increment) {
return buildOptimizer( OptimizerFactory.StandardOptimizerDescriptor.HILO, initial, increment );
}
private static Optimizer buildPooledOptimizer(long initial, int increment) {
return buildOptimizer( OptimizerFactory.StandardOptimizerDescriptor.POOLED, initial, increment );
}
private static Optimizer buildPooledLoOptimizer(long initial, int increment) {
return buildOptimizer( OptimizerFactory.StandardOptimizerDescriptor.POOLED_LO, initial, increment );
}
private static Optimizer buildOptimizer(
OptimizerFactory.StandardOptimizerDescriptor descriptor,
long initial,
int increment) {
return OptimizerFactory.buildOptimizer( descriptor.getExternalName(), Long.class, increment, initial );
}
private static class SourceMock implements AccessCallback {
private IdentifierGeneratorHelper.BasicHolder value = new IdentifierGeneratorHelper.BasicHolder( Long.class );
private long initialValue;
private int increment;
private int timesCalled = 0;
public SourceMock(long initialValue) {
this( initialValue, 1 );
}
public SourceMock(long initialValue, int increment) {
this( initialValue, increment, 0 );
}
public SourceMock(long initialValue, int increment, int timesCalled) {
this.increment = increment;
this.timesCalled = timesCalled;
if ( timesCalled != 0 ) {
this.value.initialize( initialValue );
this.initialValue = 1;
}
else {
this.value.initialize( -1 );
this.initialValue = initialValue;
}
}
public IntegralDataTypeHolder getNextValue() {
try {
if ( timesCalled == 0 ) {
initValue();
return value.copy();
}
else {
return value.add( increment ).copy();
}
}
finally {
timesCalled++;
}
}
private void initValue() {
this.value.initialize( initialValue );
}
public int getTimesCalled() {
return timesCalled;
}
public long getCurrentValue() {
return value == null ? -1 : value.getActualLongValue();
}
}
// public void testNoopDumping() {
// SourceMock sequence = new SourceMock( 1 );
// Optimizer optimizer = OptimizerFactory.buildOptimizer( OptimizerFactory.NONE, Long.class, 1 );
// for ( int i = 1; i <= 41; i++ ) {
// System.out.println( i + " => " + optimizer.generate( sequence ) + " (" + sequence.getCurrentValue() + ")" );
// }
// }
//
// public void testHiLoDumping() {
// int increment = 10;
// SourceMock sequence = new SourceMock( 1 );
// Optimizer optimizer = OptimizerFactory.buildOptimizer( OptimizerFactory.HILO, Long.class, increment );
// for ( int i = 1; i <= 41; i++ ) {
// System.out.println( i + " => " + optimizer.generate( sequence ) + " (" + sequence.getCurrentValue() + ")" );
// }
// }
//
// public void testPooledDumping() {
// int increment = 10;
// SourceMock sequence = new SourceMock( 1, increment );
// Optimizer optimizer = OptimizerFactory.buildOptimizer( OptimizerFactory.POOL, Long.class, increment );
// for ( int i = 1; i <= 41; i++ ) {
// System.out.println( i + " => " + optimizer.generate( sequence ) + " (" + sequence.getCurrentValue() + ")" );
// }
// }
}
| |
/*
* Copyright (c) 2016 Qiscus.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.qiscus.sdk.chat.core.data.local;
import android.database.Cursor;
import android.database.DatabaseUtils;
import android.database.sqlite.SQLiteDatabase;
import com.qiscus.sdk.chat.core.QiscusCore;
import com.qiscus.sdk.chat.core.data.model.QiscusAccount;
import com.qiscus.sdk.chat.core.data.model.QiscusChatRoom;
import com.qiscus.sdk.chat.core.data.model.QiscusComment;
import com.qiscus.sdk.chat.core.data.model.QiscusRoomMember;
import com.qiscus.sdk.chat.core.util.QiscusErrorLogger;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import rx.Emitter;
import rx.Observable;
public class QiscusDataBaseHelper implements QiscusDataStore {
protected final SQLiteDatabase sqLiteReadDatabase;
protected final SQLiteDatabase sqLiteWriteDatabase;
public QiscusDataBaseHelper() {
QiscusDbOpenHelper qiscusDbOpenHelper = new QiscusDbOpenHelper(QiscusCore.getApps());
sqLiteReadDatabase = qiscusDbOpenHelper.getReadableDatabase();
sqLiteWriteDatabase = qiscusDbOpenHelper.getWritableDatabase();
}
@Override
public void add(QiscusChatRoom qiscusChatRoom) {
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.insertWithOnConflict(QiscusDb.RoomTable.TABLE_NAME, null,
QiscusDb.RoomTable.toContentValues(qiscusChatRoom), SQLiteDatabase.CONFLICT_ABORT);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
if (qiscusChatRoom.getMember() != null) {
for (QiscusRoomMember member : qiscusChatRoom.getMember()) {
addRoomMember(qiscusChatRoom.getId(), member, qiscusChatRoom.getDistinctId());
}
}
QiscusComment comment = qiscusChatRoom.getLastComment();
if (comment != null && comment.getId() > 0) {
addOrUpdate(comment);
}
}
@Override
public boolean isContains(QiscusChatRoom qiscusChatRoom) {
String query = "SELECT * FROM "
+ QiscusDb.RoomTable.TABLE_NAME + " WHERE "
+ QiscusDb.RoomTable.COLUMN_ID + " = ?";
String roomId = String.valueOf(qiscusChatRoom.getId());
Cursor cursor = sqLiteReadDatabase.rawQuery(query, new String[]{roomId});
boolean contains = cursor.getCount() > 0;
cursor.close();
return contains;
}
@Override
public void update(QiscusChatRoom qiscusChatRoom) {
String where = QiscusDb.RoomTable.COLUMN_ID + " =? ";
String[] args = new String[]{String.valueOf(qiscusChatRoom.getId())};
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.update(QiscusDb.RoomTable.TABLE_NAME, QiscusDb.RoomTable.toContentValues(qiscusChatRoom), where, args);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
if (qiscusChatRoom.getMember() != null && !qiscusChatRoom.getMember().isEmpty()) {
deleteRoomMembers(qiscusChatRoom.getId());
for (QiscusRoomMember member : qiscusChatRoom.getMember()) {
addRoomMember(qiscusChatRoom.getId(), member, qiscusChatRoom.getDistinctId());
}
}
QiscusComment comment = qiscusChatRoom.getLastComment();
if (comment != null && comment.getId() > 0) {
addOrUpdate(comment);
}
}
@Override
public void addOrUpdate(QiscusChatRoom qiscusChatRoom) {
if (!isContains(qiscusChatRoom)) {
add(qiscusChatRoom);
} else {
update(qiscusChatRoom);
}
}
@Override
public QiscusChatRoom getChatRoom(long id) {
String query = "SELECT * FROM "
+ QiscusDb.RoomTable.TABLE_NAME + " WHERE "
+ QiscusDb.RoomTable.COLUMN_ID + " =? ";
Cursor cursor = null;
try {
cursor = sqLiteReadDatabase.rawQuery(query, new String[] { String.valueOf(id)} );
if (cursor.moveToNext()) {
QiscusChatRoom qiscusChatRoom = QiscusDb.RoomTable.parseCursor(cursor);
qiscusChatRoom.setMember(getRoomMembers(id));
QiscusComment latestComment = getLatestComment(id);
if (latestComment != null) {
qiscusChatRoom.setLastComment(latestComment);
}
cursor.close();
return qiscusChatRoom;
} else {
cursor.close();
return null;
}
} catch (Exception e) {
QiscusErrorLogger.print(e);
if (cursor != null) {
cursor.close();
}
return null;
}
}
@Override
public QiscusChatRoom getChatRoom(String email) {
QiscusAccount account = QiscusCore.getQiscusAccount();
QiscusChatRoom room = getChatRoom(email, account.getEmail() + " " + email);
if (room == null) {
room = getChatRoom(email, email + " " + account.getEmail());
}
return room;
}
@Override
public QiscusChatRoom getChatRoom(String email, String distinctId) {
String query = "SELECT * FROM "
+ QiscusDb.RoomMemberTable.TABLE_NAME + " WHERE "
+ QiscusDb.RoomMemberTable.COLUMN_DISTINCT_ID + " =? "
+ " AND " + QiscusDb.RoomMemberTable.COLUMN_USER_EMAIL
+ " =? ";
Cursor cursor = sqLiteReadDatabase.rawQuery(query, new String[] {distinctId, email});
while (cursor.moveToNext()) {
QiscusChatRoom qiscusChatRoom = getChatRoom(QiscusDb.RoomMemberTable.getRoomId(cursor));
if (qiscusChatRoom == null) {
cursor.close();
return null;
}
if (!qiscusChatRoom.isGroup()) {
cursor.close();
return qiscusChatRoom;
}
}
cursor.close();
return null;
}
@Override
public QiscusChatRoom getChatRoomWithUniqueId(String uniqueId) {
String query = "SELECT * FROM "
+ QiscusDb.RoomTable.TABLE_NAME + " WHERE "
+ QiscusDb.RoomTable.COLUMN_UNIQUE_ID + " =? ";
Cursor cursor = sqLiteReadDatabase.rawQuery(query, new String[] {uniqueId});
if (cursor.moveToNext()) {
QiscusChatRoom qiscusChatRoom = QiscusDb.RoomTable.parseCursor(cursor);
qiscusChatRoom.setMember(getRoomMembers(qiscusChatRoom.getId()));
QiscusComment latestComment = getLatestComment(qiscusChatRoom.getId());
if (latestComment != null) {
qiscusChatRoom.setLastComment(latestComment);
}
cursor.close();
return qiscusChatRoom;
} else {
cursor.close();
return null;
}
}
@Override
public List<QiscusChatRoom> getChatRooms(int limit) {
return getChatRooms(limit, -1);
}
@Override
public List<QiscusChatRoom> getChatRooms(int limit, int offset) {
String roomTableName = QiscusDb.RoomTable.TABLE_NAME;
String commentTableName = QiscusDb.CommentTable.TABLE_NAME;
String limitStr = String.valueOf(limit);
String offsetStr = String.valueOf(offset);
String query = "SELECT " + roomTableName + ".*" + " FROM "
+ QiscusDb.RoomTable.TABLE_NAME
+ " LEFT JOIN " + commentTableName
+ " ON " + roomTableName + "." + QiscusDb.RoomTable.COLUMN_ID
+ " = " + commentTableName + "." + QiscusDb.CommentTable.COLUMN_ROOM_ID
+ " AND " + commentTableName + "." + QiscusDb.CommentTable.COLUMN_DELETED + " != 1"
+ " AND " + commentTableName + "." + QiscusDb.CommentTable.COLUMN_HARD_DELETED + " != 1"
+ " GROUP BY " + roomTableName + "." + QiscusDb.RoomTable.COLUMN_ID
+ " ORDER BY " + commentTableName + "." + QiscusDb.CommentTable.COLUMN_TIME
+ " DESC "
+ " LIMIT " + "?"
+ " OFFSET " + "?";
String[] args = new String[]{limitStr, offsetStr};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
List<QiscusChatRoom> qiscusChatRooms = new ArrayList<>();
while (cursor.moveToNext()) {
QiscusChatRoom qiscusChatRoom = QiscusDb.RoomTable.parseCursor(cursor);
qiscusChatRoom.setMember(getRoomMembers(qiscusChatRoom.getId()));
QiscusComment latestComment = getLatestComment(qiscusChatRoom.getId());
if (latestComment != null) {
qiscusChatRoom.setLastComment(latestComment);
}
qiscusChatRooms.add(qiscusChatRoom);
}
cursor.close();
return qiscusChatRooms;
}
@Override
public Observable<List<QiscusChatRoom>> getObservableChatRooms(int limit) {
return getObservableChatRooms(limit, -1);
}
@Override
public Observable<List<QiscusChatRoom>> getObservableChatRooms(int limit, int offset) {
return Observable.create(subscriber -> {
subscriber.onNext(getChatRooms(limit, offset));
subscriber.onCompleted();
}, Emitter.BackpressureMode.BUFFER);
}
//TODO change to a prepared statement
@Override
public List<QiscusChatRoom> getChatRooms(List<Long> roomIds, List<String> uniqueIds) {
List<QiscusChatRoom> qiscusChatRooms = new ArrayList<>();
if (roomIds.isEmpty() && uniqueIds.isEmpty()) {
return qiscusChatRooms;
}
StringBuilder query = new StringBuilder("SELECT * FROM ").append(QiscusDb.RoomTable.TABLE_NAME).append(" WHERE ");
for (int i = 0; i < roomIds.size(); i++) {
query.append(QiscusDb.RoomTable.COLUMN_ID).append(" = ").append(roomIds.get(i));
if (i < roomIds.size() - 1) {
query.append(" OR ");
}
}
if (!roomIds.isEmpty() && !uniqueIds.isEmpty()) {
query.append(" OR ");
}
for (int i = 0; i < uniqueIds.size(); i++) {
query.append(QiscusDb.RoomTable.COLUMN_UNIQUE_ID).append(" = ").append(DatabaseUtils.sqlEscapeString(uniqueIds.get(i)));
if (i < uniqueIds.size() - 1) {
query.append(" OR ");
}
}
Cursor cursor = sqLiteReadDatabase.rawQuery(query.toString(), null);
while (cursor.moveToNext()) {
QiscusChatRoom qiscusChatRoom = QiscusDb.RoomTable.parseCursor(cursor);
qiscusChatRoom.setMember(getRoomMembers(qiscusChatRoom.getId()));
QiscusComment latestComment = getLatestComment(qiscusChatRoom.getId());
if (latestComment != null) {
qiscusChatRoom.setLastComment(latestComment);
}
qiscusChatRooms.add(qiscusChatRoom);
}
cursor.close();
sortRooms(qiscusChatRooms);
return qiscusChatRooms;
}
@Override
public void deleteChatRoom(long roomId) {
String where = QiscusDb.RoomTable.COLUMN_ID + " =? ";
String[] args = new String[]{String.valueOf(roomId)};
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.delete(QiscusDb.RoomTable.TABLE_NAME, where, args);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public void addRoomMember(long roomId, QiscusRoomMember qiscusRoomMember, String distinctId) {
distinctId = distinctId == null ? "default" : distinctId;
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.insertWithOnConflict(QiscusDb.RoomMemberTable.TABLE_NAME, null,
QiscusDb.RoomMemberTable.toContentValues(roomId, distinctId, qiscusRoomMember), SQLiteDatabase.CONFLICT_ABORT);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
addOrUpdate(qiscusRoomMember);
}
@Override
public boolean isContainsRoomMember(long roomId, String email) {
String query = "SELECT * FROM "
+ QiscusDb.RoomMemberTable.TABLE_NAME + " WHERE "
+ QiscusDb.RoomMemberTable.COLUMN_ROOM_ID + " =? "
+ " AND " + QiscusDb.RoomMemberTable.COLUMN_USER_EMAIL
+ " =? ";
String roomIdStr = String.valueOf(roomId);
Cursor cursor = sqLiteReadDatabase.rawQuery(query, new String[]{roomIdStr, email});
boolean contains = cursor.getCount() > 0;
cursor.close();
return contains;
}
@Override
public void updateRoomMember(long roomId, QiscusRoomMember qiscusRoomMember, String distinctId) {
distinctId = distinctId == null ? "default" : distinctId;
String where = QiscusDb.RoomMemberTable.COLUMN_ROOM_ID + " =? " + " AND "
+ QiscusDb.RoomMemberTable.COLUMN_USER_EMAIL + " =? ";
String[] args = new String[]{ String.valueOf(roomId), String.valueOf(qiscusRoomMember.getEmail())};
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.update(QiscusDb.RoomMemberTable.TABLE_NAME,
QiscusDb.RoomMemberTable.toContentValues(roomId, distinctId, qiscusRoomMember), where, args);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
addOrUpdate(qiscusRoomMember);
}
@Override
public void addOrUpdateRoomMember(long roomId, QiscusRoomMember qiscusRoomMember, String distinctId) {
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.insertWithOnConflict(QiscusDb.RoomMemberTable.TABLE_NAME, null,
QiscusDb.RoomMemberTable.toContentValues(roomId, distinctId, qiscusRoomMember), SQLiteDatabase.CONFLICT_REPLACE);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
addOrUpdate(qiscusRoomMember);
}
@Override
public List<QiscusRoomMember> getRoomMembers(long roomId) {
String query = "SELECT * FROM "
+ QiscusDb.RoomMemberTable.TABLE_NAME + " WHERE "
+ QiscusDb.RoomMemberTable.COLUMN_ROOM_ID + " =? ";
String[] args = new String[]{String.valueOf(roomId)};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
List<QiscusRoomMember> members = new ArrayList<>();
while (cursor.moveToNext()) {
QiscusRoomMember member = getMember(QiscusDb.RoomMemberTable.getUserEmail(cursor));
if (member != null) {
member.setLastDeliveredCommentId(QiscusDb.RoomMemberTable.getLastDeliveredCommentId(cursor));
member.setLastReadCommentId(QiscusDb.RoomMemberTable.getLastReadCommentId(cursor));
members.add(member);
}
}
cursor.close();
return members;
}
@Override
public void deleteRoomMember(long roomId, String email) {
String where = QiscusDb.RoomMemberTable.COLUMN_ROOM_ID + " =? "
+ " AND " + QiscusDb.RoomMemberTable.COLUMN_USER_EMAIL + " =? ";
String[] args = new String[]{String.valueOf(roomId), email};
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.delete(QiscusDb.RoomMemberTable.TABLE_NAME, where, args);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public void deleteRoomMembers(long roomId) {
String where = QiscusDb.RoomMemberTable.COLUMN_ROOM_ID + " = " + roomId;
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.delete(QiscusDb.RoomMemberTable.TABLE_NAME, where, null);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public void add(QiscusRoomMember qiscusRoomMember) {
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.insertWithOnConflict(QiscusDb.MemberTable.TABLE_NAME, null,
QiscusDb.MemberTable.toContentValues(qiscusRoomMember), SQLiteDatabase.CONFLICT_ABORT);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public boolean isContains(QiscusRoomMember qiscusRoomMember) {
String query = "SELECT * FROM "
+ QiscusDb.MemberTable.TABLE_NAME + " WHERE "
+ QiscusDb.MemberTable.COLUMN_USER_EMAIL + " =? ";
String[] args = new String[]{qiscusRoomMember.getEmail()};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
boolean contains = cursor.getCount() > 0;
cursor.close();
return contains;
}
@Override
public void update(QiscusRoomMember qiscusRoomMember) {
String where = QiscusDb.MemberTable.COLUMN_USER_EMAIL + " =? ";
String[] args = new String[]{qiscusRoomMember.getEmail()};
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.update(QiscusDb.MemberTable.TABLE_NAME,
QiscusDb.MemberTable.toContentValues(qiscusRoomMember), where, args);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public void addOrUpdate(QiscusRoomMember qiscusRoomMember) {
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.insertWithOnConflict(QiscusDb.MemberTable.TABLE_NAME, null,
QiscusDb.MemberTable.toContentValues(qiscusRoomMember), SQLiteDatabase.CONFLICT_REPLACE);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public QiscusRoomMember getMember(String email) {
String query = "SELECT * FROM "
+ QiscusDb.MemberTable.TABLE_NAME + " WHERE "
+ QiscusDb.MemberTable.COLUMN_USER_EMAIL + " =? ";
String[] args = new String[]{email};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
try {
if (cursor != null && cursor.moveToNext()) {
QiscusRoomMember qiscusRoomMember = QiscusDb.MemberTable.getMember(cursor);
cursor.close();
return qiscusRoomMember;
} else {
cursor.close();
return null;
}
} catch (Exception e) {
cursor.close();
QiscusErrorLogger.print(e);
return null;
}
}
@Override
public void add(QiscusComment qiscusComment) {
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.insertWithOnConflict(QiscusDb.CommentTable.TABLE_NAME, null,
QiscusDb.CommentTable.toContentValues(qiscusComment), SQLiteDatabase.CONFLICT_ABORT);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public void saveLocalPath(long roomId, long commentId, String localPath) {
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.insertWithOnConflict(QiscusDb.FilesTable.TABLE_NAME, null,
QiscusDb.FilesTable.toContentValues(roomId, commentId, localPath), SQLiteDatabase.CONFLICT_ABORT);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
public boolean isContains(QiscusComment qiscusComment) {
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_UNIQUE_ID + " =? ";
String[] args = new String[]{qiscusComment.getUniqueId()};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
boolean contains = cursor.getCount() > 0;
cursor.close();
return contains;
}
@Override
public boolean isContainsFileOfComment(long commentId) {
String query = "SELECT * FROM "
+ QiscusDb.FilesTable.TABLE_NAME + " WHERE "
+ QiscusDb.FilesTable.COLUMN_COMMENT_ID + " =? ";
String[] args = new String[]{String.valueOf(commentId)};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
boolean contains = cursor.getCount() > 0;
cursor.close();
return contains;
}
@Override
public void update(QiscusComment qiscusComment) {
String where = QiscusDb.CommentTable.COLUMN_UNIQUE_ID + " =? ";
String[] args = new String[]{qiscusComment.getUniqueId()};
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.update(QiscusDb.CommentTable.TABLE_NAME,
QiscusDb.CommentTable.toContentValues(qiscusComment), where, args);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public void updateLocalPath(long roomId, long commentId, String localPath) {
String where = QiscusDb.FilesTable.COLUMN_COMMENT_ID + " =? ";
String[] args = new String[]{String.valueOf(commentId)};
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.update(QiscusDb.FilesTable.TABLE_NAME,
QiscusDb.FilesTable.toContentValues(roomId, commentId, localPath), where, args);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public void addOrUpdate(QiscusComment qiscusComment) {
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.insertWithOnConflict(QiscusDb.CommentTable.TABLE_NAME, null,
QiscusDb.CommentTable.toContentValues(qiscusComment), SQLiteDatabase.CONFLICT_REPLACE);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public void addOrUpdateLocalPath(long roomId, long commentId, String localPath) {
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.insertWithOnConflict(QiscusDb.FilesTable.TABLE_NAME, null,
QiscusDb.FilesTable.toContentValues(roomId, commentId, localPath), SQLiteDatabase.CONFLICT_REPLACE);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public void delete(QiscusComment qiscusComment) {
String where = QiscusDb.CommentTable.COLUMN_UNIQUE_ID + " =? ";
String[] args = new String[]{qiscusComment.getUniqueId()};
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.delete(QiscusDb.CommentTable.TABLE_NAME, where, args);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
deleteLocalPath(qiscusComment.getId());
}
@Override
public boolean deleteCommentsByRoomId(long roomId) {
List<QiscusComment> comments = getComments(roomId);
if (comments.isEmpty()) {
return false;
}
for (QiscusComment comment : comments) {
deleteLocalPath(comment.getId());
}
String where = QiscusDb.CommentTable.COLUMN_ROOM_ID + " =? ";
String[] args = new String[]{String.valueOf(roomId)};
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.delete(QiscusDb.CommentTable.TABLE_NAME, where, args);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
return true;
}
@Override
public boolean deleteCommentsByRoomId(long roomId, long timestampOffset) {
List<QiscusComment> comments = getComments(roomId, timestampOffset);
if (comments.isEmpty()) {
return false;
}
for (QiscusComment comment : comments) {
deleteLocalPath(comment.getId());
}
String where = QiscusDb.CommentTable.COLUMN_ROOM_ID + " =? " + " AND "
+ QiscusDb.CommentTable.COLUMN_TIME + " <=? ";
String[] args = new String[]{String.valueOf(roomId), String.valueOf(timestampOffset)};
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.delete(QiscusDb.CommentTable.TABLE_NAME, where, args);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
return true;
}
@Override
public void updateLastDeliveredComment(long roomId, long commentId) {
String sql = "UPDATE " + QiscusDb.CommentTable.TABLE_NAME
+ " SET " + QiscusDb.CommentTable.COLUMN_STATE + " = " + QiscusComment.STATE_DELIVERED
+ " WHERE " + QiscusDb.CommentTable.COLUMN_ROOM_ID + " =? "
+ " AND " + QiscusDb.CommentTable.COLUMN_ID + " <= " + "?"
+ " AND " + QiscusDb.CommentTable.COLUMN_ID + " != -1"
+ " AND " + QiscusDb.CommentTable.COLUMN_STATE + " < " + QiscusComment.STATE_DELIVERED;
String[] args = new String[]{String.valueOf(roomId), String.valueOf(commentId)};
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.execSQL(sql, args);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public void updateLastReadComment(long roomId, long commentId) {
String sql = "UPDATE " + QiscusDb.CommentTable.TABLE_NAME
+ " SET " + QiscusDb.CommentTable.COLUMN_STATE + " = " + QiscusComment.STATE_READ
+ " WHERE " + QiscusDb.CommentTable.COLUMN_ROOM_ID + " =? "
+ " AND " + QiscusDb.CommentTable.COLUMN_ID + " <= " + "?"
+ " AND " + QiscusDb.CommentTable.COLUMN_ID + " != -1"
+ " AND " + QiscusDb.CommentTable.COLUMN_STATE + " < " + QiscusComment.STATE_READ;
String[] args = new String[]{String.valueOf(roomId), String.valueOf(commentId)};
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
sqLiteWriteDatabase.execSQL(sql, args);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public File getLocalPath(long commentId) {
String query = "SELECT * FROM "
+ QiscusDb.FilesTable.TABLE_NAME + " WHERE "
+ QiscusDb.FilesTable.COLUMN_COMMENT_ID + " =? ";
String[] args = new String[]{String.valueOf(commentId)};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
if (cursor.moveToNext()) {
File file = new File(QiscusDb.FilesTable.parseCursor(cursor));
cursor.close();
if (file.exists()) {
return file;
}
return null;
} else {
cursor.close();
return null;
}
}
@Override
public void deleteLocalPath(long commentId) {
File file = getLocalPath(commentId);
if (file != null) {
file.delete();
}
sqLiteWriteDatabase.beginTransactionNonExclusive();
try {
String where = QiscusDb.FilesTable.COLUMN_COMMENT_ID + " =? ";
String[] args = new String[]{String.valueOf(commentId)};
sqLiteWriteDatabase.delete(QiscusDb.FilesTable.TABLE_NAME, where, args);
sqLiteWriteDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteWriteDatabase.endTransaction();
}
}
@Override
public QiscusComment getComment(String uniqueId) {
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_UNIQUE_ID + " =? ";
String[] args = new String[]{uniqueId};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
if (cursor.moveToNext()) {
QiscusComment qiscusComment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(qiscusComment.getSenderEmail());
if (qiscusRoomMember != null) {
qiscusComment.setSender(qiscusRoomMember.getUsername());
qiscusComment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
cursor.close();
return qiscusComment;
} else {
cursor.close();
return null;
}
}
//done
private QiscusComment getComment(long id) {
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_ID + " =? ";
String[] args = new String[]{String.valueOf(id)};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
if (cursor.moveToNext()) {
QiscusComment qiscusComment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(qiscusComment.getSenderEmail());
if (qiscusRoomMember != null) {
qiscusComment.setSender(qiscusRoomMember.getUsername());
qiscusComment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
cursor.close();
return qiscusComment;
} else {
cursor.close();
return null;
}
}
@Override
public QiscusComment getCommentByBeforeId(long beforeId) {
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_COMMENT_BEFORE_ID + " =? ";
String[] args = new String[]{String.valueOf(beforeId)};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
if (cursor.moveToNext()) {
QiscusComment qiscusComment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(qiscusComment.getSenderEmail());
if (qiscusRoomMember != null) {
qiscusComment.setSender(qiscusRoomMember.getUsername());
qiscusComment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
cursor.close();
return qiscusComment;
} else {
cursor.close();
return null;
}
}
@Override
public List<QiscusComment> getComments(long roomId) {
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_ROOM_ID + " =? " + " AND "
+ QiscusDb.CommentTable.COLUMN_HARD_DELETED + " = " + 0
+ " ORDER BY " + QiscusDb.CommentTable.COLUMN_TIME + " DESC";
String[] args = new String[]{String.valueOf(roomId)};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
List<QiscusComment> qiscusComments = new ArrayList<>();
while (cursor.moveToNext()) {
QiscusComment qiscusComment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(qiscusComment.getSenderEmail());
if (qiscusRoomMember != null) {
qiscusComment.setSender(qiscusRoomMember.getUsername());
qiscusComment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
qiscusComments.add(qiscusComment);
}
cursor.close();
return qiscusComments;
}
@Override
public List<QiscusComment> getComments(long roomId, int limit) {
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_ROOM_ID + " =? " + " AND "
+ QiscusDb.CommentTable.COLUMN_HARD_DELETED + " = " + 0
+ " ORDER BY " + QiscusDb.CommentTable.COLUMN_TIME + " DESC"
+ " LIMIT " + "?";
String[] args = new String[]{String.valueOf(roomId), String.valueOf(limit)};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
List<QiscusComment> qiscusComments = new ArrayList<>();
while (cursor.moveToNext()) {
QiscusComment qiscusComment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(qiscusComment.getSenderEmail());
if (qiscusRoomMember != null) {
qiscusComment.setSender(qiscusRoomMember.getUsername());
qiscusComment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
qiscusComments.add(qiscusComment);
}
cursor.close();
return qiscusComments;
}
@Override
public List<QiscusComment> getComments(long roomId, long timestampOffset) {
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_ROOM_ID + " =? " + " AND "
+ QiscusDb.CommentTable.COLUMN_TIME + " <= " + "?" + " AND "
+ QiscusDb.CommentTable.COLUMN_HARD_DELETED + " = " + 0
+ " ORDER BY " + QiscusDb.CommentTable.COLUMN_TIME + " DESC";
String[] args = new String[]{String.valueOf(roomId), String.valueOf(timestampOffset)};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
List<QiscusComment> qiscusComments = new ArrayList<>();
while (cursor.moveToNext()) {
QiscusComment qiscusComment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(qiscusComment.getSenderEmail());
if (qiscusRoomMember != null) {
qiscusComment.setSender(qiscusRoomMember.getUsername());
qiscusComment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
qiscusComments.add(qiscusComment);
}
cursor.close();
return qiscusComments;
}
@Override
public Observable<List<QiscusComment>> getObservableComments(final long roomId) {
return Observable.create(subscriber -> {
subscriber.onNext(getComments(roomId));
subscriber.onCompleted();
}, Emitter.BackpressureMode.BUFFER);
}
@Override
public Observable<List<QiscusComment>> getObservableComments(final long roomId, final int limit) {
return Observable.create(subscriber -> {
subscriber.onNext(getComments(roomId, limit));
subscriber.onCompleted();
}, Emitter.BackpressureMode.BUFFER);
}
@Override
public List<QiscusComment> getOlderCommentsThan(QiscusComment qiscusComment, long roomId, int limit) {
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_ROOM_ID + " =? " + " AND "
+ QiscusDb.CommentTable.COLUMN_TIME + " <= " + "?" + " AND "
+ QiscusDb.CommentTable.COLUMN_HARD_DELETED + " = " + 0
+ " ORDER BY " + QiscusDb.CommentTable.COLUMN_TIME + " DESC"
+ " LIMIT " + "?";
String[] args = new String[]{String.valueOf(roomId), String.valueOf(qiscusComment.getTime().getTime()),
String.valueOf(limit)};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
List<QiscusComment> qiscusComments = new ArrayList<>();
while (cursor.moveToNext()) {
QiscusComment comment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(comment.getSenderEmail());
if (qiscusRoomMember != null) {
comment.setSender(qiscusRoomMember.getUsername());
comment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
qiscusComments.add(comment);
}
cursor.close();
return qiscusComments;
}
@Override
public Observable<List<QiscusComment>> getObservableOlderCommentsThan(QiscusComment qiscusComment, long roomId, int limit) {
return Observable.create(subscriber -> {
subscriber.onNext(getOlderCommentsThan(qiscusComment, roomId, limit));
subscriber.onCompleted();
}, Emitter.BackpressureMode.BUFFER);
}
@Override
public List<QiscusComment> getCommentsAfter(QiscusComment qiscusComment, long roomId) {
QiscusComment savedComment = getComment(qiscusComment.getId());
if (savedComment == null) {
return new ArrayList<>();
}
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_ROOM_ID + " =? " + " AND ("
+ QiscusDb.CommentTable.COLUMN_TIME + " >= " + "?" + " OR "
+ QiscusDb.CommentTable.COLUMN_ID + " = -1) " + " AND "
+ QiscusDb.CommentTable.COLUMN_HARD_DELETED + " = " + 0
+ " ORDER BY " + QiscusDb.CommentTable.COLUMN_TIME + " DESC";
String[] args = new String[]{String.valueOf(roomId), String.valueOf(savedComment.getTime().getTime())};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
List<QiscusComment> qiscusComments = new ArrayList<>();
while (cursor.moveToNext()) {
QiscusComment comment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(comment.getSenderEmail());
if (qiscusRoomMember != null) {
comment.setSender(qiscusRoomMember.getUsername());
comment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
qiscusComments.add(comment);
}
cursor.close();
return qiscusComments;
}
@Override
public Observable<List<QiscusComment>> getObservableCommentsAfter(QiscusComment qiscusComment, long roomId) {
return Observable.create(subscriber -> {
subscriber.onNext(getCommentsAfter(qiscusComment, roomId));
subscriber.onCompleted();
}, Emitter.BackpressureMode.BUFFER);
}
@Override
public QiscusComment getLatestComment() {
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_ID + " != -1 " + " AND "
+ QiscusDb.CommentTable.COLUMN_HARD_DELETED + " = " + 0
+ " ORDER BY " + QiscusDb.CommentTable.COLUMN_TIME + " DESC "
+ " LIMIT " + 1;
Cursor cursor = sqLiteReadDatabase.rawQuery(query, null);
QiscusComment qiscusComment = null;
while (cursor.moveToNext()) {
qiscusComment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(qiscusComment.getSenderEmail());
if (qiscusRoomMember != null) {
qiscusComment.setSender(qiscusRoomMember.getUsername());
qiscusComment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
}
cursor.close();
return qiscusComment;
}
@Override
public QiscusComment getLatestComment(long roomId) {
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME
+ " WHERE " + QiscusDb.CommentTable.COLUMN_ROOM_ID + " =? " + " AND "
+ QiscusDb.CommentTable.COLUMN_HARD_DELETED + " = " + 0
+ " ORDER BY " + QiscusDb.CommentTable.COLUMN_TIME + " DESC"
+ " LIMIT " + 1;
String[] args = new String[]{String.valueOf(roomId)};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
QiscusComment qiscusComment = null;
while (cursor.moveToNext()) {
qiscusComment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(qiscusComment.getSenderEmail());
if (qiscusRoomMember != null) {
qiscusComment.setSender(qiscusRoomMember.getUsername());
qiscusComment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
}
cursor.close();
return qiscusComment;
}
@Override
public QiscusComment getLatestDeliveredComment(long roomId) {
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_ID + " != -1 "
+ " AND " + QiscusDb.CommentTable.COLUMN_ROOM_ID + " =? "
+ " AND " + QiscusDb.CommentTable.COLUMN_STATE + " = " + QiscusComment.STATE_DELIVERED
+ " ORDER BY " + QiscusDb.CommentTable.COLUMN_TIME + " DESC"
+ " LIMIT " + 1;
String[] args = new String[]{String.valueOf(roomId)};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
QiscusComment qiscusComment = null;
while (cursor.moveToNext()) {
qiscusComment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(qiscusComment.getSenderEmail());
if (qiscusRoomMember != null) {
qiscusComment.setSender(qiscusRoomMember.getUsername());
qiscusComment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
}
cursor.close();
return qiscusComment;
}
@Override
public QiscusComment getLatestReadComment(long roomId) {
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_ID + " != -1 "
+ " AND " + QiscusDb.CommentTable.COLUMN_ROOM_ID + " =? "
+ " AND " + QiscusDb.CommentTable.COLUMN_STATE + " = " + QiscusComment.STATE_READ
+ " ORDER BY " + QiscusDb.CommentTable.COLUMN_TIME + " DESC"
+ " LIMIT " + 1;
String[] args = new String[]{String.valueOf(roomId)};
Cursor cursor = sqLiteReadDatabase.rawQuery(query, args);
QiscusComment qiscusComment = null;
while (cursor.moveToNext()) {
qiscusComment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(qiscusComment.getSenderEmail());
if (qiscusRoomMember != null) {
qiscusComment.setSender(qiscusRoomMember.getUsername());
qiscusComment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
}
cursor.close();
return qiscusComment;
}
@Override
public List<QiscusComment> getPendingComments() {
String query = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_STATE + " = " + QiscusComment.STATE_PENDING
+ " ORDER BY " + QiscusDb.CommentTable.COLUMN_TIME + " ASC";
Cursor cursor = sqLiteReadDatabase.rawQuery(query, null);
List<QiscusComment> qiscusComments = new ArrayList<>();
while (cursor.moveToNext()) {
QiscusComment qiscusComment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(qiscusComment.getSenderEmail());
if (qiscusRoomMember != null) {
qiscusComment.setSender(qiscusRoomMember.getUsername());
qiscusComment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
qiscusComments.add(qiscusComment);
}
cursor.close();
return qiscusComments;
}
@Override
public Observable<List<QiscusComment>> getObservablePendingComments() {
return Observable.create(subscriber -> {
subscriber.onNext(getPendingComments());
subscriber.onCompleted();
}, Emitter.BackpressureMode.BUFFER);
}
@Override
public List<QiscusComment> searchComments(String query, long roomId, int limit, int offset) {
String sql = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_ROOM_ID + " =? " + " AND "
+ QiscusDb.CommentTable.COLUMN_MESSAGE + " LIKE ? " + " AND "
+ QiscusDb.CommentTable.COLUMN_HARD_DELETED + " = " + 0
+ " ORDER BY " + QiscusDb.CommentTable.COLUMN_TIME + " DESC "
+ " LIMIT " + "?" + " OFFSET " + "?";
String queryLike = "%" + query + "%";
String[] args = new String[]{String.valueOf(roomId), queryLike, String.valueOf(limit), String.valueOf(offset)};
Cursor cursor = sqLiteReadDatabase.rawQuery(sql, args);
List<QiscusComment> qiscusComments = new ArrayList<>();
while (cursor.moveToNext()) {
QiscusComment qiscusComment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(qiscusComment.getSenderEmail());
if (qiscusRoomMember != null) {
qiscusComment.setSender(qiscusRoomMember.getUsername());
qiscusComment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
qiscusComments.add(qiscusComment);
}
cursor.close();
return qiscusComments;
}
@Override
public List<QiscusComment> searchComments(String query, int limit, int offset) {
String sql = "SELECT * FROM "
+ QiscusDb.CommentTable.TABLE_NAME + " WHERE "
+ QiscusDb.CommentTable.COLUMN_MESSAGE + " LIKE ? " + " AND "
+ QiscusDb.CommentTable.COLUMN_HARD_DELETED + " = " + 0
+ " ORDER BY " + QiscusDb.CommentTable.COLUMN_TIME + " DESC "
+ " LIMIT " + "?" + " OFFSET " + "?";
String queryLike = "%" + query + "%";
String[] args = new String[]{queryLike, String.valueOf(limit), String.valueOf(offset)};
Cursor cursor = sqLiteReadDatabase.rawQuery(sql, args);
List<QiscusComment> qiscusComments = new ArrayList<>();
while (cursor.moveToNext()) {
QiscusComment qiscusComment = QiscusDb.CommentTable.parseCursor(cursor);
QiscusRoomMember qiscusRoomMember = getMember(qiscusComment.getSenderEmail());
if (qiscusRoomMember != null) {
qiscusComment.setSender(qiscusRoomMember.getUsername());
qiscusComment.setSenderAvatar(qiscusRoomMember.getAvatar());
}
qiscusComments.add(qiscusComment);
}
cursor.close();
return qiscusComments;
}
@Override
public void clear() {
sqLiteReadDatabase.beginTransaction();
try {
sqLiteReadDatabase.delete(QiscusDb.RoomTable.TABLE_NAME, null, null);
sqLiteReadDatabase.delete(QiscusDb.MemberTable.TABLE_NAME, null, null);
sqLiteReadDatabase.delete(QiscusDb.RoomMemberTable.TABLE_NAME, null, null);
sqLiteReadDatabase.delete(QiscusDb.FilesTable.TABLE_NAME, null, null);
sqLiteReadDatabase.delete(QiscusDb.CommentTable.TABLE_NAME, null, null);
sqLiteReadDatabase.setTransactionSuccessful();
} catch (Exception e) {
QiscusErrorLogger.print(e);
} finally {
sqLiteReadDatabase.endTransaction();
}
}
private void sortRooms(List<QiscusChatRoom> qiscusChatRooms) {
Collections.sort(qiscusChatRooms, (room1, room2) -> {
if (room1.getLastComment() != null && room2.getLastComment() != null) {
return room2.getLastComment().getTime().compareTo(room1.getLastComment().getTime());
} else if (room1.getLastComment() == null && room2.getLastComment() != null) {
return 1;
} else if (room1.getLastComment() != null && room2.getLastComment() == null) {
return -1;
}
return 0;
});
}
}
| |
/**
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.InvalidFamilyOperationException;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.master.metrics.MasterMetrics;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogSplitter;
import org.apache.hadoop.hbase.regionserver.wal.OrphanHLogAfterSplitException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
/**
* This class abstracts a bunch of operations the HMaster needs to interact with
* the underlying file system, including splitting log files, checking file
* system status, etc.
*/
public class MasterFileSystem {
private static final Log LOG = LogFactory.getLog(MasterFileSystem.class.getName());
// HBase configuration
Configuration conf;
// master status
Server master;
// metrics for master
MasterMetrics metrics;
// Persisted unique cluster ID
private String clusterId;
// Keep around for convenience.
private final FileSystem fs;
// Is the fileystem ok?
private volatile boolean fsOk = true;
// The Path to the old logs dir
private final Path oldLogDir;
// root hbase directory on the FS
private final Path rootdir;
// create the split log lock
final Lock splitLogLock = new ReentrantLock();
final boolean distributedLogSplitting;
final SplitLogManager splitLogManager;
private final MasterServices services;
public MasterFileSystem(Server master, MasterServices services,
MasterMetrics metrics, boolean masterRecovery)
throws IOException {
this.conf = master.getConfiguration();
this.master = master;
this.services = services;
this.metrics = metrics;
// Set filesystem to be that of this.rootdir else we get complaints about
// mismatched filesystems if hbase.rootdir is hdfs and fs.defaultFS is
// default localfs. Presumption is that rootdir is fully-qualified before
// we get to here with appropriate fs scheme.
this.rootdir = FSUtils.getRootDir(conf);
// Cover both bases, the old way of setting default fs and the new.
// We're supposed to run on 0.20 and 0.21 anyways.
this.fs = this.rootdir.getFileSystem(conf);
String fsUri = this.fs.getUri().toString();
conf.set("fs.default.name", fsUri);
conf.set("fs.defaultFS", fsUri);
this.distributedLogSplitting =
conf.getBoolean("hbase.master.distributed.log.splitting", true);
if (this.distributedLogSplitting) {
this.splitLogManager = new SplitLogManager(master.getZooKeeper(),
master.getConfiguration(), master, master.getServerName().toString());
this.splitLogManager.finishInitialization(masterRecovery);
} else {
this.splitLogManager = null;
}
// setup the filesystem variable
// set up the archived logs path
this.oldLogDir = createInitialFileSystemLayout();
}
/**
* Create initial layout in filesystem.
* <ol>
* <li>Check if the root region exists and is readable, if not create it.
* Create hbase.version and the -ROOT- directory if not one.
* </li>
* <li>Create a log archive directory for RS to put archived logs</li>
* </ol>
* Idempotent.
*/
private Path createInitialFileSystemLayout() throws IOException {
// check if the root directory exists
checkRootDir(this.rootdir, conf, this.fs);
Path oldLogDir = new Path(this.rootdir, HConstants.HREGION_OLDLOGDIR_NAME);
// Make sure the region servers can archive their old logs
if(!this.fs.exists(oldLogDir)) {
this.fs.mkdirs(oldLogDir);
}
return oldLogDir;
}
public FileSystem getFileSystem() {
return this.fs;
}
/**
* Get the directory where old logs go
* @return the dir
*/
public Path getOldLogDir() {
return this.oldLogDir;
}
/**
* Checks to see if the file system is still accessible.
* If not, sets closed
* @return false if file system is not available
*/
public boolean checkFileSystem() {
if (this.fsOk) {
try {
FSUtils.checkFileSystemAvailable(this.fs);
FSUtils.checkDfsSafeMode(this.conf);
} catch (IOException e) {
master.abort("Shutting down HBase cluster: file system not available", e);
this.fsOk = false;
}
}
return this.fsOk;
}
/**
* @return HBase root dir.
*/
public Path getRootDir() {
return this.rootdir;
}
/**
* @return The unique identifier generated for this cluster
*/
public String getClusterId() {
return clusterId;
}
/**
* Inspect the log directory to recover any log file without
* an active region server.
*/
void splitLogAfterStartup() {
boolean retrySplitting = !conf.getBoolean("hbase.hlog.split.skip.errors",
HLog.SPLIT_SKIP_ERRORS_DEFAULT);
Path logsDirPath = new Path(this.rootdir, HConstants.HREGION_LOGDIR_NAME);
do {
if (master.isStopped()) {
LOG.warn("Master stopped while splitting logs");
break;
}
List<ServerName> serverNames = new ArrayList<ServerName>();
try {
if (!this.fs.exists(logsDirPath)) return;
FileStatus[] logFolders = FSUtils.listStatus(this.fs, logsDirPath, null);
// Get online servers after getting log folders to avoid log folder deletion of newly
// checked in region servers . see HBASE-5916
Set<ServerName> onlineServers = ((HMaster) master).getServerManager().getOnlineServers()
.keySet();
if (logFolders == null || logFolders.length == 0) {
LOG.debug("No log files to split, proceeding...");
return;
}
for (FileStatus status : logFolders) {
String sn = status.getPath().getName();
// truncate splitting suffix if present (for ServerName parsing)
if (sn.endsWith(HLog.SPLITTING_EXT)) {
sn = sn.substring(0, sn.length() - HLog.SPLITTING_EXT.length());
}
ServerName serverName = ServerName.parseServerName(sn);
if (!onlineServers.contains(serverName)) {
LOG.info("Log folder " + status.getPath() + " doesn't belong "
+ "to a known region server, splitting");
serverNames.add(serverName);
} else {
LOG.info("Log folder " + status.getPath()
+ " belongs to an existing region server");
}
}
splitLog(serverNames);
retrySplitting = false;
} catch (IOException ioe) {
LOG.warn("Failed splitting of " + serverNames, ioe);
if (!checkFileSystem()) {
LOG.warn("Bad Filesystem, exiting");
Runtime.getRuntime().halt(1);
}
try {
if (retrySplitting) {
Thread.sleep(conf.getInt(
"hbase.hlog.split.failure.retry.interval", 30 * 1000));
}
} catch (InterruptedException e) {
LOG.warn("Interrupted, aborting since cannot return w/o splitting");
Thread.currentThread().interrupt();
retrySplitting = false;
Runtime.getRuntime().halt(1);
}
}
} while (retrySplitting);
}
public void splitLog(final ServerName serverName) throws IOException {
List<ServerName> serverNames = new ArrayList<ServerName>();
serverNames.add(serverName);
splitLog(serverNames);
}
public void splitLog(final List<ServerName> serverNames) throws IOException {
long splitTime = 0, splitLogSize = 0;
List<Path> logDirs = new ArrayList<Path>();
for(ServerName serverName: serverNames){
Path logDir = new Path(this.rootdir,
HLog.getHLogDirectoryName(serverName.toString()));
Path splitDir = logDir.suffix(HLog.SPLITTING_EXT);
// rename the directory so a rogue RS doesn't create more HLogs
if (fs.exists(logDir)) {
if (!this.fs.rename(logDir, splitDir)) {
throw new IOException("Failed fs.rename for log split: " + logDir);
}
logDir = splitDir;
LOG.debug("Renamed region directory: " + splitDir);
} else if (!fs.exists(splitDir)) {
LOG.info("Log dir for server " + serverName + " does not exist");
continue;
}
logDirs.add(splitDir);
}
if (logDirs.isEmpty()) {
LOG.info("No logs to split");
return;
}
if (distributedLogSplitting) {
splitLogManager.handleDeadWorkers(serverNames);
splitTime = EnvironmentEdgeManager.currentTimeMillis();
splitLogSize = splitLogManager.splitLogDistributed(logDirs);
splitTime = EnvironmentEdgeManager.currentTimeMillis() - splitTime;
} else {
for(Path logDir: logDirs){
// splitLogLock ensures that dead region servers' logs are processed
// one at a time
this.splitLogLock.lock();
try {
HLogSplitter splitter = HLogSplitter.createLogSplitter(
conf, rootdir, logDir, oldLogDir, this.fs);
try {
// If FS is in safe mode, just wait till out of it.
FSUtils.waitOnSafeMode(conf, conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 1000));
splitter.splitLog();
} catch (OrphanHLogAfterSplitException e) {
LOG.warn("Retrying splitting because of:", e);
//An HLogSplitter instance can only be used once. Get new instance.
splitter = HLogSplitter.createLogSplitter(conf, rootdir, logDir,
oldLogDir, this.fs);
splitter.splitLog();
}
splitTime = splitter.getTime();
splitLogSize = splitter.getSize();
} finally {
this.splitLogLock.unlock();
}
}
}
if (this.metrics != null) {
this.metrics.addSplit(splitTime, splitLogSize);
}
}
/**
* Get the rootdir. Make sure its wholesome and exists before returning.
* @param rd
* @param conf
* @param fs
* @return hbase.rootdir (after checks for existence and bootstrapping if
* needed populating the directory with necessary bootup files).
* @throws IOException
*/
private Path checkRootDir(final Path rd, final Configuration c,
final FileSystem fs)
throws IOException {
// If FS is in safe mode wait till out of it.
FSUtils.waitOnSafeMode(c, c.getInt(HConstants.THREAD_WAKE_FREQUENCY,
10 * 1000));
// Filesystem is good. Go ahead and check for hbase.rootdir.
try {
if (!fs.exists(rd)) {
fs.mkdirs(rd);
// DFS leaves safe mode with 0 DNs when there are 0 blocks.
// We used to handle this by checking the current DN count and waiting until
// it is nonzero. With security, the check for datanode count doesn't work --
// it is a privileged op. So instead we adopt the strategy of the jobtracker
// and simply retry file creation during bootstrap indefinitely. As soon as
// there is one datanode it will succeed. Permission problems should have
// already been caught by mkdirs above.
FSUtils.setVersion(fs, rd, c.getInt(HConstants.THREAD_WAKE_FREQUENCY,
10 * 1000), c.getInt(HConstants.VERSION_FILE_WRITE_ATTEMPTS,
HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS));
} else {
if (!fs.isDirectory(rd)) {
throw new IllegalArgumentException(rd.toString() + " is not a directory");
}
// as above
FSUtils.checkVersion(fs, rd, true, c.getInt(HConstants.THREAD_WAKE_FREQUENCY,
10 * 1000), c.getInt(HConstants.VERSION_FILE_WRITE_ATTEMPTS,
HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS));
}
} catch (IllegalArgumentException iae) {
LOG.fatal("Please fix invalid configuration for "
+ HConstants.HBASE_DIR + " " + rd.toString(), iae);
throw iae;
}
// Make sure cluster ID exists
if (!FSUtils.checkClusterIdExists(fs, rd, c.getInt(
HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000))) {
FSUtils.setClusterId(fs, rd, UUID.randomUUID().toString(), c.getInt(
HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000));
}
clusterId = FSUtils.getClusterId(fs, rd);
// Make sure the root region directory exists!
if (!FSUtils.rootRegionExists(fs, rd)) {
bootstrap(rd, c);
}
createRootTableInfo(rd);
return rd;
}
private void createRootTableInfo(Path rd) throws IOException {
// Create ROOT tableInfo if required.
if (!FSTableDescriptors.isTableInfoExists(fs, rd,
Bytes.toString(HRegionInfo.ROOT_REGIONINFO.getTableName()))) {
FSTableDescriptors.createTableDescriptor(HTableDescriptor.ROOT_TABLEDESC, this.conf);
}
}
private static void bootstrap(final Path rd, final Configuration c)
throws IOException {
LOG.info("BOOTSTRAP: creating ROOT and first META regions");
try {
// Bootstrapping, make sure blockcache is off. Else, one will be
// created here in bootstap and it'll need to be cleaned up. Better to
// not make it in first place. Turn off block caching for bootstrap.
// Enable after.
HRegionInfo rootHRI = new HRegionInfo(HRegionInfo.ROOT_REGIONINFO);
setInfoFamilyCachingForRoot(false);
HRegionInfo metaHRI = new HRegionInfo(HRegionInfo.FIRST_META_REGIONINFO);
setInfoFamilyCachingForMeta(false);
HRegion root = HRegion.createHRegion(rootHRI, rd, c,
HTableDescriptor.ROOT_TABLEDESC);
HRegion meta = HRegion.createHRegion(metaHRI, rd, c,
HTableDescriptor.META_TABLEDESC);
setInfoFamilyCachingForRoot(true);
setInfoFamilyCachingForMeta(true);
// Add first region from the META table to the ROOT region.
HRegion.addRegionToMETA(root, meta);
root.close();
root.getLog().closeAndDelete();
meta.close();
meta.getLog().closeAndDelete();
} catch (IOException e) {
e = RemoteExceptionHandler.checkIOException(e);
LOG.error("bootstrap", e);
throw e;
}
}
/**
* Enable in-memory caching for -ROOT-
*/
public static void setInfoFamilyCachingForRoot(final boolean b) {
for (HColumnDescriptor hcd:
HTableDescriptor.ROOT_TABLEDESC.getColumnFamilies()) {
if (Bytes.equals(hcd.getName(), HConstants.CATALOG_FAMILY)) {
hcd.setBlockCacheEnabled(b);
hcd.setInMemory(b);
}
}
}
/**
* Enable in memory caching for .META.
*/
public static void setInfoFamilyCachingForMeta(final boolean b) {
for (HColumnDescriptor hcd:
HTableDescriptor.META_TABLEDESC.getColumnFamilies()) {
if (Bytes.equals(hcd.getName(), HConstants.CATALOG_FAMILY)) {
hcd.setBlockCacheEnabled(b);
hcd.setInMemory(b);
}
}
}
public void deleteRegion(HRegionInfo region) throws IOException {
fs.delete(HRegion.getRegionDir(rootdir, region), true);
}
public void deleteTable(byte[] tableName) throws IOException {
fs.delete(new Path(rootdir, Bytes.toString(tableName)), true);
}
public void updateRegionInfo(HRegionInfo region) {
// TODO implement this. i think this is currently broken in trunk i don't
// see this getting updated.
// @see HRegion.checkRegioninfoOnFilesystem()
}
public void stop() {
if (splitLogManager != null) {
this.splitLogManager.stop();
}
}
/**
* Create new HTableDescriptor in HDFS.
*
* @param htableDescriptor
*/
public void createTableDescriptor(HTableDescriptor htableDescriptor)
throws IOException {
FSTableDescriptors.createTableDescriptor(htableDescriptor, conf);
}
/**
* Delete column of a table
* @param tableName
* @param familyName
* @return Modified HTableDescriptor with requested column deleted.
* @throws IOException
*/
public HTableDescriptor deleteColumn(byte[] tableName, byte[] familyName)
throws IOException {
LOG.info("DeleteColumn. Table = " + Bytes.toString(tableName)
+ " family = " + Bytes.toString(familyName));
HTableDescriptor htd = this.services.getTableDescriptors().get(tableName);
htd.removeFamily(familyName);
this.services.getTableDescriptors().add(htd);
return htd;
}
/**
* Modify Column of a table
* @param tableName
* @param hcd HColumnDesciptor
* @return Modified HTableDescriptor with the column modified.
* @throws IOException
*/
public HTableDescriptor modifyColumn(byte[] tableName, HColumnDescriptor hcd)
throws IOException {
LOG.info("AddModifyColumn. Table = " + Bytes.toString(tableName)
+ " HCD = " + hcd.toString());
HTableDescriptor htd = this.services.getTableDescriptors().get(tableName);
byte [] familyName = hcd.getName();
if(!htd.hasFamily(familyName)) {
throw new InvalidFamilyOperationException("Family '" +
Bytes.toString(familyName) + "' doesn't exists so cannot be modified");
}
htd.addFamily(hcd);
this.services.getTableDescriptors().add(htd);
return htd;
}
/**
* Add column to a table
* @param tableName
* @param hcd
* @return Modified HTableDescriptor with new column added.
* @throws IOException
*/
public HTableDescriptor addColumn(byte[] tableName, HColumnDescriptor hcd)
throws IOException {
LOG.info("AddColumn. Table = " + Bytes.toString(tableName) + " HCD = " +
hcd.toString());
HTableDescriptor htd = this.services.getTableDescriptors().get(tableName);
if (htd == null) {
throw new InvalidFamilyOperationException("Family '" +
hcd.getNameAsString() + "' cannot be modified as HTD is null");
}
htd.addFamily(hcd);
this.services.getTableDescriptors().add(htd);
return htd;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.isi.wings.catalog.data.api.impl.kb;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import edu.isi.wings.catalog.data.api.DataCreationAPI;
import edu.isi.wings.catalog.data.classes.DataItem;
import edu.isi.wings.catalog.data.classes.DataTree;
import edu.isi.wings.catalog.data.classes.DataTreeNode;
import edu.isi.wings.catalog.data.classes.MetadataProperty;
import edu.isi.wings.catalog.data.classes.MetadataValue;
import edu.isi.wings.common.kb.KBUtils;
import edu.isi.kcap.ontapi.KBAPI;
import edu.isi.kcap.ontapi.KBObject;
import edu.isi.kcap.ontapi.KBTriple;
import edu.isi.kcap.ontapi.OntSpec;
import edu.isi.kcap.ontapi.SparqlQuerySolution;
public class DataCreationKB extends DataKB implements DataCreationAPI {
String topclass;
String topmetric;
KBAPI tmpkb;
DataCreationAPI externalCatalog;
public DataCreationKB(Properties props) {
super(props, true, true);
// FIXME: Hack to get unionOf domains
try {
this.tmpkb = this.ontologyFactory.getKB(onturl, OntSpec.MICRO);
}
catch (Exception e) {
e.printStackTrace();
}
this.topclass = this.dcns + "DataObject";
this.topmetric = this.dcns + "Metrics";
String extern = props.getProperty("extern_data_catalog");
if(extern != null) {
try {
Class<?> classz = Class.forName(extern);
DataCreationAPI externalDC =
(DataCreationAPI) classz.getDeclaredConstructor(Properties.class).newInstance(props);
this.setExternalCatalog(externalDC);
} catch (Exception e) {
e.printStackTrace();
}
}
// Legacy porting: Fix all properties that have multiple domains
// -- convert to disjunctive domains FIXME
this.convertPropertyDomainsToDisjunctiveDomains();
}
@Override
public boolean save() {
return this.save(libkb) && this.save(ontkb);
}
@Override
public DataTree getDataHierarchy() {
return this.createHierarchy(this.topclass, false);
}
@Override
public DataTree getNodeDataHierarchy(String nodeid) {
return this.createHierarchy(nodeid, false);
}
@Override
public DataTree getDatatypeHierarchy() {
return this.createHierarchy(this.topclass, true);
}
@Override
public DataTree getMetricsHierarchy() {
return this.createHierarchy(this.topmetric, false);
}
@Override
public ArrayList<String> getAllDatatypeIds() {
ArrayList<String> list = new ArrayList<String>();
String query =
"SELECT ?type\n" +
"WHERE {\n" +
"?type a <"+KBUtils.OWL+"Class> .\n" +
"FILTER ( STRSTARTS(STR(?type), \"" + this.dcdomns + "\"))\n" +
"}";
this.start_read();
ArrayList<ArrayList<SparqlQuerySolution>> result = this.kb.sparqlQuery(query);
for(ArrayList<SparqlQuerySolution> row : result) {
HashMap<String, KBObject> vals = new HashMap<String, KBObject>();
for(SparqlQuerySolution col : row)
vals.put(col.getVariable(), col.getObject());
if(vals.get("type") == null)
continue;
String typeid = vals.get("type").getID();
list.add(typeid);
}
this.end();
return list;
}
@Override
public HashMap<String, ArrayList<String>> getAllDatatypeDatasets() {
HashMap<String, ArrayList<String>> typedata = new HashMap<String, ArrayList<String>>();
String query =
"SELECT ?s ?type\n" +
"WHERE {\n" +
"?s a ?type .\n" +
"FILTER ( STRSTARTS(STR(?type), \"" + this.dcdomns + "\"))\n" +
"}";
this.start_read();
ArrayList<ArrayList<SparqlQuerySolution>> result = this.libkb.sparqlQuery(query);
for(ArrayList<SparqlQuerySolution> row : result) {
HashMap<String, KBObject> vals = new HashMap<String, KBObject>();
for(SparqlQuerySolution col : row)
vals.put(col.getVariable(), col.getObject());
if(vals.get("type") == null)
continue;
String typeid = vals.get("type").getID();
String instid = vals.get("s").getID();
if(!typedata.containsKey(typeid))
typedata.put(typeid, new ArrayList<String>());
typedata.get(typeid).add(instid);
}
this.end();
return typedata;
}
@Override
public ArrayList<MetadataProperty> getAllMetadataProperties() {
this.start_read();
KBObject mprop = this.kb.getProperty(this.dcns + "hasMetrics");
KBObject dmprop = this.kb.getProperty(this.dcns + "hasDataMetrics");
ArrayList<KBObject> properties = this.kb.getSubPropertiesOf(mprop, false);
properties.addAll(this.kb.getSubPropertiesOf(dmprop, false));
ArrayList<MetadataProperty> mprops = createMetadataProperties(properties);
this.end();
return mprops; }
@Override
public ArrayList<MetadataProperty> getMetadataProperties(String dtypeid, boolean direct) {
this.start_read();
KBObject datatype = this.tmpkb.getConcept(dtypeid);
ArrayList<KBObject> properties = this.tmpkb.getPropertiesOfClass(datatype, direct);
ArrayList<MetadataProperty> mprops = createMetadataProperties(properties);
this.end();
return mprops;
}
@Override
public DataItem getDatatypeForData(String dataid) {
try {
this.start_read();
KBObject data = this.kb.getIndividual(dataid);
if(data == null)
return null;
KBObject cls = this.kb.getClassOfInstance(data);
if (cls != null)
return new DataItem(cls.getID(), DataItem.DATATYPE);
return null;
}
finally {
this.end();
}
}
@Override
public ArrayList<DataItem> getDataForDatatype(String dtypeid, boolean direct) {
try {
this.start_read();
KBObject datatype = this.kb.getConcept(dtypeid);
ArrayList<KBObject> datas = this.kb.getInstancesOfClass(datatype, direct);
ArrayList<DataItem> list = new ArrayList<DataItem>();
for (KBObject data : datas) {
list.add(new DataItem(data.getID(), DataItem.DATA));
}
if(!direct && datatype != null) {
for(KBObject cls : this.getSubClasses(datatype))
list.addAll(this.getDataForDatatype(cls.getID(), direct));
}
return list;
}
finally {
this.end();
}
}
@Override
public String getTypeNameFormat(String dtypeid) {
try {
this.start_read();
KBObject datatype = this.kb.getConcept(dtypeid);
Pattern pat = Pattern.compile("^NameFormat=(.+)$");
for (String comment : this.kb.getAllComments(datatype)) {
Matcher m = pat.matcher(comment);
if (m.find()) {
return m.group(1);
}
}
return null;
}
finally {
this.end();
}
}
@Override
public ArrayList<MetadataValue> getMetadataValues(String dataid, ArrayList<String> propids) {
this.start_read();
KBObject data = this.kb.getIndividual(dataid);
ArrayList<MetadataValue> values = new ArrayList<MetadataValue>();
for (String propid : propids) {
KBObject prop = this.kb.getProperty(propid);
ArrayList<KBObject> vals = this.kb.getPropertyValues(data, prop);
for (KBObject val : vals) {
if (val.isLiteral())
values.add(new MetadataValue(propid, val.getValue(), MetadataValue.DATATYPE));
else
values.add(new MetadataValue(propid, val.getID(), MetadataValue.OBJECT));
}
}
this.end();
return values;
}
@Override
public MetadataProperty getMetadataProperty(String propid) {
try {
this.start_read();
if (!this.kb.containsResource(propid))
return null;
KBObject property = this.kb.getProperty(propid);
MetadataProperty mprop = this.createMetadataProperty(property);
return mprop;
}
finally {
this.end();
}
}
@Override
public boolean addDatatype(String dtypeid, String parentid) {
try {
this.start_write();
KBObject dtype = this.ontkb.createClass(dtypeid, parentid);
if(this.externalCatalog != null)
this.externalCatalog.addDatatype(dtypeid, parentid);
return this.save() && (dtype != null);
}
finally {
this.end();
}
}
@Override
public boolean removeDatatype(String dtypeid) {
this.start_read();
KBObject cls = this.kb.getConcept(dtypeid);
// Get all subclasses
ArrayList<KBObject> subclses = this.getSubClasses(cls);
// Get all files
ArrayList<KBObject> files = this.kb.getInstancesOfClass(cls, true);
// Get metadata properties
ArrayList<KBObject> props = this.kb.getPropertiesOfClass(cls, false);
this.end();
this.start_write();
// Suspend further internal transactions (i.e. Batch write)
this.start_batch_operation();
// Remove properties
for (KBObject prop : props) {
MetadataProperty mprop = this.getMetadataProperty(prop.getID());
this.start_write();
if(mprop.getDomains().contains(dtypeid)) {
if(mprop.getDomains().size() > 1)
this.removeMetadataPropertyDomain(prop.getID(), dtypeid);
else
this.removeMetadataProperty(prop.getID());
}
}
// Remove files
for (KBObject file : files) {
this.removeData(file.getID());
}
// Remove subclasses
for (KBObject subcls : subclses) {
if (!subcls.isNothing())
this.removeDatatype(subcls.getID());
}
// Finally remove the class itself
KBUtils.removeAllTriplesWith(this.ontkb, dtypeid, false);
if(this.externalCatalog != null)
this.externalCatalog.removeDatatype(dtypeid);
// Resume transactions
this.stop_batch_operation();
return this.save() && this.end();
}
@Override
public boolean renameDatatype(String newtypeid, String oldtypeid) {
this.start_write();
KBUtils.renameAllTriplesWith(this.ontkb, oldtypeid, newtypeid, false);
if(this.externalCatalog != null)
this.externalCatalog.renameDatatype(newtypeid, oldtypeid);
return this.save() && this.end();
}
@Override
public boolean renameDatatypeInLibrary(String newtypeid, String oldtypeid) {
this.start_write();
KBUtils.renameAllTriplesWith(this.libkb, oldtypeid, newtypeid, false);
if(this.externalCatalog != null)
this.externalCatalog.renameDatatypeInLibrary(newtypeid, oldtypeid);
return this.save() && this.end();
}
@Override
public boolean moveDatatypeParentInLibrary(String dtypeid, String fromtypeid, String totypeid) {
this.start_write();
KBObject cls = this.kb.getConcept(dtypeid);
KBObject fromcls = this.kb.getConcept(fromtypeid);
KBObject tocls = this.kb.getConcept(totypeid);
ArrayList<KBObject> oldprops = this.kb.getPropertiesOfClass(fromcls, false);
ArrayList<KBObject> newprops = this.kb.getPropertiesOfClass(tocls, false);
ArrayList<KBObject> removedProps = new ArrayList<KBObject>();
for(KBObject oldprop : oldprops) {
if(!newprops.contains(oldprop)) {
removedProps.add(oldprop);
}
}
for(KBObject ind : this.kb.getInstancesOfClass(cls, false)) {
for(KBObject prop : removedProps) {
for(KBTriple triple : this.kb.genericTripleQuery(ind, prop, null))
this.libkb.removeTriple(triple);
}
}
if(this.externalCatalog != null)
this.externalCatalog.moveDatatypeParentInLibrary(dtypeid, fromtypeid, totypeid);
return this.save() && this.end();
}
@Override
public boolean moveDatatypeParent(String dtypeid, String fromtypeid, String totypeid) {
this.start_write();
if(!this.ontkb.setSuperClass(dtypeid, totypeid))
return false;
if(this.externalCatalog != null)
this.externalCatalog.moveDatatypeParent(dtypeid, fromtypeid, totypeid);
return this.save() && this.end();
}
@Override
public boolean moveDataParent(String dataid, String fromtypeid, String totypeid) {
KBObject obj = this.kb.getIndividual(dataid);
KBObject fromcls = this.kb.getConcept(fromtypeid);
KBObject tocls = this.kb.getConcept(totypeid);
ArrayList<KBObject> oldprops = this.kb.getPropertiesOfClass(fromcls, false);
ArrayList<KBObject> newprops = this.kb.getPropertiesOfClass(tocls, false);
ArrayList<KBObject> removedProps = new ArrayList<KBObject>();
for(KBObject oldprop : oldprops) {
if(!newprops.contains(oldprop)) {
removedProps.add(oldprop);
}
}
for(KBObject prop : removedProps) {
for(KBTriple triple : this.kb.genericTripleQuery(obj, prop, null))
this.libkb.removeTriple(triple);
}
KBObject typeProp = this.kb.getProperty(KBUtils.RDF+"type");
this.libkb.removeTriple(obj, typeProp, fromcls);
this.libkb.addTriple(obj, typeProp, tocls);
if(this.externalCatalog != null)
this.externalCatalog.moveDataParent(dataid, fromtypeid, totypeid);
return true;
}
@Override
public boolean addData(String dataid, String dtypeid) {
try {
this.start_write();
KBObject dtypeobj = this.kb.getConcept(dtypeid);
this.libkb.createObjectOfClass(dataid, dtypeobj);
if(this.externalCatalog != null)
this.externalCatalog.addData(dataid, dtypeid);
return this.save();
}
catch (Exception e) {
e.printStackTrace();
return false;
}
finally {
this.end();
}
}
@Override
public boolean renameData(String newdataid, String olddataid) {
try {
this.start_write();
KBUtils.renameAllTriplesWith(this.libkb, olddataid, newdataid, false);
if(this.externalCatalog != null)
this.externalCatalog.renameData(newdataid, olddataid);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean removeData(String dataid) {
// Remove data if it is in the catalog's data directory
try {
String loc = this.getDataLocation(dataid);
if(loc != null) {
File f = new File(loc);
if(f.getParentFile().getAbsolutePath().equals(this.datadir))
f.delete();
}
this.start_write();
KBUtils.removeAllTriplesWith(this.libkb, dataid, false);
if(this.externalCatalog != null)
this.externalCatalog.removeData(dataid);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean setDataLocation(String dataid, String locuri) {
// What happens to existing file ?
try {
this.start_write();
KBObject locprop = this.kb.getProperty(this.dcns + "hasLocation");
KBObject dobj = this.libkb.getIndividual(dataid);
KBObject locobj = this.libkb.createLiteral(locuri);
this.libkb.setPropertyValue(dobj, locprop, locobj);
if(this.externalCatalog != null)
this.externalCatalog.setDataLocation(dataid, locuri);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean setTypeNameFormat(String dtypeid, String format) {
try {
this.start_write();
KBObject dtypeobj = this.ontkb.getConcept(dtypeid);
this.ontkb.setComment(dtypeobj, "NameFormat=" + format);
if(this.externalCatalog != null)
this.externalCatalog.setTypeNameFormat(dtypeid, format);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean addDatatypePropertyValue(String dataid, String propid, Object val) {
try {
this.start_write();
KBObject dataobj = this.libkb.getIndividual(dataid);
KBObject pobj = this.kb.getProperty(propid);
KBObject valobj = this.libkb.createLiteral(val);
this.libkb.setPropertyValue(dataobj, pobj, valobj);
if(this.externalCatalog != null)
this.externalCatalog.addDatatypePropertyValue(dataid, propid, val);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean addDatatypePropertyValue(String dataid, String propid, String val, String xsdtype) {
try {
this.start_write();
KBObject dataobj = this.libkb.getIndividual(dataid);
KBObject pobj = this.kb.getProperty(propid);
KBObject valobj = this.kb.createXSDLiteral(val, xsdtype);
this.libkb.setPropertyValue(dataobj, pobj, valobj);
if(this.externalCatalog != null)
this.externalCatalog.addDatatypePropertyValue(dataid, propid, val, xsdtype);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean addObjectPropertyValue(String dataid, String propid, String valid) {
try {
this.start_write();
KBObject dataobj = this.libkb.getIndividual(dataid);
KBObject pobj = this.kb.getProperty(propid);
KBObject valobj = this.kb.getResource(valid);
this.libkb.setPropertyValue(dataobj, pobj, valobj);
if(this.externalCatalog != null)
this.externalCatalog.addObjectPropertyValue(dataid, propid, valid);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean removePropertyValue(String dataid, String propid, Object val) {
try {
this.start_write();
KBObject dataobj = this.libkb.getIndividual(dataid);
KBObject pobj = this.kb.getProperty(propid);
KBObject valobj = this.libkb.createLiteral(val);
this.libkb.removeTriple(dataobj, pobj, valobj);
if(this.externalCatalog != null)
this.externalCatalog.removePropertyValue(dataid, propid, val);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean removeAllPropertyValues(String dataid, ArrayList<String> propids) {
try {
this.start_write();
KBObject dataobj = this.libkb.getIndividual(dataid);
for (String propid : propids) {
KBObject pobj = this.kb.getProperty(propid);
ArrayList<KBObject> vals = this.kb.getPropertyValues(dataobj, pobj);
for (KBObject val : vals) {
this.libkb.removeTriple(dataobj, pobj, val);
}
}
if(this.externalCatalog != null)
this.externalCatalog.removeAllPropertyValues(dataid, propids);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean addMetadataProperty(String propid, String domain, String range) {
try {
this.start_write();
if (range.contains(KBUtils.XSD)) {
this.ontkb.createDatatypeProperty(propid, this.dcns + "hasDataMetrics");
} else {
this.ontkb.createObjectProperty(propid, this.dcns + "hasMetrics");
}
if(this.ontkb.getConcept(domain) == null)
this.ontkb.createClass(domain);
this.ontkb.addPropertyDomainDisjunctive(propid, domain);
this.ontkb.setPropertyRange(propid, range);
if(this.externalCatalog != null)
this.externalCatalog.addMetadataProperty(propid, domain, range);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean addMetadataPropertyDomain(String propid, String domain) {
try {
this.start_write();
if(this.ontkb.getConcept(domain) == null)
this.ontkb.createClass(domain);
this.ontkb.addPropertyDomainDisjunctive(propid, domain);
if(this.externalCatalog != null)
this.externalCatalog.addMetadataPropertyDomain(propid, domain);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean removeMetadataPropertyDomain(String propid, String domain) {
try {
this.start_write();
this.ontkb.removePropertyDomainDisjunctive(propid, domain);
if(this.externalCatalog != null)
this.externalCatalog.removeMetadataPropertyDomain(propid, domain);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean removeMetadataProperty(String propid) {
// Remove all domains manually
// - Due to bug in removing triples with union classes
try {
this.start_write();
MetadataProperty prop = this.getMetadataProperty(propid);
for(String domid : prop.getDomains())
this.ontkb.removePropertyDomainDisjunctive(propid, domid);
// Remove all triples (this skips removing domain union classes)
KBUtils.removeAllTriplesWith(this.ontkb, propid, true);
if(this.externalCatalog != null)
this.externalCatalog.removeMetadataProperty(propid);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean removeMetadataPropertyInLibrary(String propid) {
try {
this.start_write();
KBUtils.removeAllTriplesWith(this.libkb, propid, true);
if(this.externalCatalog != null)
this.externalCatalog.removeMetadataPropertyInLibrary(propid);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean renameMetadataProperty(String oldid, String newid) {
// First remove all domains and then readd them later
// - Due to bug in renaming triples
try {
this.start_write();
MetadataProperty prop = this.getMetadataProperty(oldid);
for(String domid : prop.getDomains())
this.ontkb.removePropertyDomainDisjunctive(oldid, domid);
// Rename all triples (this skips renaming domain union classes)
KBUtils.renameAllTriplesWith(this.ontkb, oldid, newid, true);
for(String domid : prop.getDomains())
this.ontkb.addPropertyDomainDisjunctive(newid, domid);
if(this.externalCatalog != null)
this.externalCatalog.renameMetadataProperty(oldid, newid);
return this.save();
}
finally {
this.end();
}
}
@Override
public boolean renamePropertyInLibrary(String oldid, String newid) {
try {
this.start_write();
KBUtils.renameAllTriplesWith(this.libkb, oldid, newid, true);
if(this.externalCatalog != null)
this.externalCatalog.renamePropertyInLibrary(oldid, newid);
return this.save();
}
finally {
this.end();
}
}
@Override
public void copyFrom(DataCreationAPI dc) {
try {
DataCreationKB dckb = (DataCreationKB)dc;
this.start_write();
dckb.start_read();
this.libkb.copyFrom(dckb.libkb);
KBUtils.renameTripleNamespace(this.libkb, dckb.dcns, this.dcns);
KBUtils.renameTripleNamespace(this.libkb, dckb.dcdomns, this.dcdomns);
KBUtils.renameTripleNamespace(this.libkb, dckb.dclibns, this.dclibns);
KBUtils.renameAllTriplesWith(this.libkb, dckb.onturl, this.onturl, false);
KBUtils.renameAllTriplesWith(this.libkb, dckb.liburl, this.liburl, false);
// Change any specified locations of data
KBObject locProp = this.libkb.getProperty(this.dcns+"hasLocation");
ArrayList<KBTriple> triples =
this.libkb.genericTripleQuery(null, locProp, null);
for(KBTriple t : triples) {
if(t.getObject() == null || t.getObject().getValue() == null)
continue;
KBObject data = t.getSubject();
String loc = (String) t.getObject().getValue();
File f = new File(loc);
loc = this.datadir + File.separator + f.getName();
this.libkb.setPropertyValue(data, locProp, this.libkb.createLiteral(loc));
}
this.ontkb.copyFrom(dckb.ontkb);
KBUtils.renameTripleNamespace(this.ontkb, dckb.dcns, this.dcns);
KBUtils.renameTripleNamespace(this.ontkb, dckb.dcdomns, this.dcdomns);
KBUtils.renameAllTriplesWith(this.ontkb, dckb.dcurl, this.dcurl, false);
KBUtils.renameAllTriplesWith(this.ontkb, dckb.onturl, this.onturl, false);
this.save();
dckb.end();
this.end();
this.start_read();
this.initializeAPI(true, true, true);
}
finally {
this.end();
}
}
@Override
public boolean delete() {
try {
return
this.start_write() &&
this.libkb.delete() &&
this.ontkb.delete() &&
this.save();
}
finally {
this.end();
}
}
@Override
public DataCreationAPI getExternalCatalog() {
return this.externalCatalog;
}
@Override
public void setExternalCatalog(DataCreationAPI dc) {
this.externalCatalog = dc;
this.externalCatalog.copyFrom(this);
}
/*
* Private Helper functions below
*/
private ArrayList<KBObject> getSubClasses(KBObject cls) {
ArrayList<KBObject> subclses = new ArrayList<KBObject>();
for(KBTriple t :
this.kb.genericTripleQuery(null, this.kb.getProperty(KBUtils.RDFS+"subClassOf"), cls)) {
KBObject subcls = this.kb.getConcept(t.getSubject().getID());
if(subcls == null) {
subcls = this.ontkb.createClass(t.getSubject().getID());
}
subclses.add(subcls);
}
return subclses;
}
private DataTree createHierarchy(String classid, boolean types_only) {
DataItem rootitem = new DataItem(classid, DataItem.DATATYPE);
DataTreeNode rootnode = new DataTreeNode(rootitem);
ArrayList<DataTreeNode> queue = new ArrayList<DataTreeNode>();
queue.add(rootnode);
this.start_read();
while (!queue.isEmpty()) {
DataTreeNode node = queue.remove(0);
DataItem item = node.getItem();
if (item.getType() == DataItem.DATATYPE) {
KBObject cls = this.kb.getConcept(item.getID());
if (cls == null)
continue;
if (!types_only) {
ArrayList<KBObject> instances = this.kb.getInstancesOfClass(cls, true);
for (KBObject inst : instances) {
DataItem institem = new DataItem(inst.getID(), DataItem.DATA);
DataTreeNode childnode = new DataTreeNode(institem);
node.addChild(childnode);
}
}
ArrayList<KBObject> subclasses = this.getSubClasses(cls);
for (KBObject subcls : subclasses) {
if (!subcls.getNamespace().equals(this.dcdomns)
&& !subcls.getNamespace().equals(this.dcdomns))
continue;
DataItem institem = new DataItem(subcls.getID(), DataItem.DATATYPE);
DataTreeNode childnode = new DataTreeNode(institem);
node.addChild(childnode);
queue.add(childnode);
}
}
}
this.end();
DataTree tree = new DataTree(rootnode);
return tree;
}
/*
* Legacy function to convert properties with multiple domains
* to have a single domain which is a unionClass of all the domains
* -- we consider multiple domains as disjunctive whereas owl considers
* them conjunctive, so we have to do this translation for our earlier domains
*/
private void convertPropertyDomainsToDisjunctiveDomains() {
if(this.kb == null)
return;
this.start_read();
KBObject mprop = this.kb.getProperty(this.dcns + "hasMetrics");
KBObject dmprop = this.kb.getProperty(this.dcns + "hasDataMetrics");
ArrayList<KBObject> properties = this.kb.getSubPropertiesOf(mprop, false);
properties.addAll(this.kb.getSubPropertiesOf(dmprop, false));
for(KBObject prop : properties) {
ArrayList<KBObject> doms = this.ontkb.getPropertyDomains(prop);
if(doms.size() > 1) {
this.end();
this.start_write();
for(KBObject dom : doms)
this.ontkb.removePropertyDomain(prop.getID(), dom.getID());
for(KBObject dom : doms)
this.ontkb.addPropertyDomainDisjunctive(prop.getID(), dom.getID());
this.ontkb.save();
this.end();
this.start_read();
}
}
this.end();
}
private ArrayList<MetadataProperty> createMetadataProperties(ArrayList<KBObject> properties) {
ArrayList<MetadataProperty> list = new ArrayList<MetadataProperty>();
for (KBObject property : properties) {
// Ignore properties not declared in this domain
if (!property.getNamespace().equals(this.dcdomns))
continue;
MetadataProperty prop = this.createMetadataProperty(property);
if (prop != null)
list.add(prop);
}
return list;
}
private MetadataProperty createMetadataProperty(KBObject property) {
if (property == null)
return null;
int proptype = this.kb.isDatatypeProperty(property) ? MetadataProperty.DATATYPE
: MetadataProperty.OBJECT;
MetadataProperty prop = new MetadataProperty(property.getID(), proptype);
// Query for domain and range from the non-inference ontkb model (otherwise we get inferenced domains as well)
ArrayList<KBObject> domains = this.ontkb.getPropertyDomainsDisjunctive(property);
KBObject range = this.ontkb.getPropertyRange(property);
for(KBObject domain : domains)
prop.addDomain(domain.getID());
if (range != null) {
prop.setRange(range.getID());
}
return prop;
}
}
| |
package com.youtube.vitess.client;
import com.google.common.collect.Iterables;
import com.youtube.vitess.client.cursor.Cursor;
import com.youtube.vitess.client.cursor.SimpleCursor;
import com.youtube.vitess.client.cursor.StreamCursor;
import com.youtube.vitess.proto.Topodata.KeyRange;
import com.youtube.vitess.proto.Topodata.SrvKeyspace;
import com.youtube.vitess.proto.Topodata.TabletType;
import com.youtube.vitess.proto.Vtgate.BeginRequest;
import com.youtube.vitess.proto.Vtgate.BeginResponse;
import com.youtube.vitess.proto.Vtgate.BoundKeyspaceIdQuery;
import com.youtube.vitess.proto.Vtgate.BoundShardQuery;
import com.youtube.vitess.proto.Vtgate.ExecuteBatchKeyspaceIdsRequest;
import com.youtube.vitess.proto.Vtgate.ExecuteBatchKeyspaceIdsResponse;
import com.youtube.vitess.proto.Vtgate.ExecuteBatchShardsRequest;
import com.youtube.vitess.proto.Vtgate.ExecuteBatchShardsResponse;
import com.youtube.vitess.proto.Vtgate.ExecuteEntityIdsRequest;
import com.youtube.vitess.proto.Vtgate.ExecuteEntityIdsResponse;
import com.youtube.vitess.proto.Vtgate.ExecuteKeyRangesRequest;
import com.youtube.vitess.proto.Vtgate.ExecuteKeyRangesResponse;
import com.youtube.vitess.proto.Vtgate.ExecuteKeyspaceIdsRequest;
import com.youtube.vitess.proto.Vtgate.ExecuteKeyspaceIdsResponse;
import com.youtube.vitess.proto.Vtgate.ExecuteRequest;
import com.youtube.vitess.proto.Vtgate.ExecuteResponse;
import com.youtube.vitess.proto.Vtgate.ExecuteShardsRequest;
import com.youtube.vitess.proto.Vtgate.ExecuteShardsResponse;
import com.youtube.vitess.proto.Vtgate.GetSrvKeyspaceRequest;
import com.youtube.vitess.proto.Vtgate.GetSrvKeyspaceResponse;
import com.youtube.vitess.proto.Vtgate.SplitQueryRequest;
import com.youtube.vitess.proto.Vtgate.SplitQueryResponse;
import com.youtube.vitess.proto.Vtgate.StreamExecuteKeyRangesRequest;
import com.youtube.vitess.proto.Vtgate.StreamExecuteKeyspaceIdsRequest;
import com.youtube.vitess.proto.Vtgate.StreamExecuteRequest;
import com.youtube.vitess.proto.Vtgate.StreamExecuteShardsRequest;
import java.io.Closeable;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
/**
* VTGateConn manages a VTGate connection.
*
* <p>Usage:
*
* <pre>
* CallerID callerId = CallerID.newBuilder().setPrincipal("username").build();
* Context ctx = Context.getDefault()
* .withDeadlineAfter(Duration.millis(500))
* .withCallerId(callerId);
* RpcClient client = rpcClientFactory.create(ctx, new InetSocketAddress("host", port));
* VTGateConn conn = new VTGateConn(client);
*
* try {
* byte ksid[] = computeKeyspaceId(...);
* QueryResult result = conn.executeKeyspaceIds(ctx,
* "INSERT INTO test_table (col1,col2) VALUES(:val1,:val2)",
* "test_keyspace", // keyspace
* Arrays.asList(ksid), // keyspaceIds
* ImmutableMap.of( // bindVars
* "val1", 123,
* "val2", 456
* ),
* TabletType.MASTER // tabletType
* );
*
* for (Row row : result.getRowsList()) {
* // process each row.
* }
* } catch (SQLException e) {
* // ...
* }
* </pre>
* */
public class VTGateConn implements Closeable {
private RpcClient client;
public VTGateConn(RpcClient client) {
this.client = client;
}
public Cursor execute(Context ctx, String query, Map<String, ?> bindVars, TabletType tabletType)
throws SQLException {
ExecuteRequest.Builder requestBuilder =
ExecuteRequest.newBuilder()
.setQuery(Proto.bindQuery(query, bindVars))
.setTabletType(tabletType);
if (ctx.getCallerId() != null) {
requestBuilder.setCallerId(ctx.getCallerId());
}
ExecuteResponse response = client.execute(ctx, requestBuilder.build());
Proto.checkError(response.getError());
return new SimpleCursor(response.getResult());
}
public Cursor executeShards(Context ctx, String query, String keyspace, Iterable<String> shards,
Map<String, ?> bindVars, TabletType tabletType) throws SQLException {
ExecuteShardsRequest.Builder requestBuilder =
ExecuteShardsRequest.newBuilder()
.setQuery(Proto.bindQuery(query, bindVars))
.setKeyspace(keyspace)
.addAllShards(shards)
.setTabletType(tabletType);
if (ctx.getCallerId() != null) {
requestBuilder.setCallerId(ctx.getCallerId());
}
ExecuteShardsResponse response = client.executeShards(ctx, requestBuilder.build());
Proto.checkError(response.getError());
return new SimpleCursor(response.getResult());
}
public Cursor executeKeyspaceIds(Context ctx, String query, String keyspace,
Iterable<byte[]> keyspaceIds, Map<String, ?> bindVars, TabletType tabletType)
throws SQLException {
ExecuteKeyspaceIdsRequest.Builder requestBuilder =
ExecuteKeyspaceIdsRequest.newBuilder()
.setQuery(Proto.bindQuery(query, bindVars))
.setKeyspace(keyspace)
.addAllKeyspaceIds(Iterables.transform(keyspaceIds, Proto.BYTE_ARRAY_TO_BYTE_STRING))
.setTabletType(tabletType);
if (ctx.getCallerId() != null) {
requestBuilder.setCallerId(ctx.getCallerId());
}
ExecuteKeyspaceIdsResponse response = client.executeKeyspaceIds(ctx, requestBuilder.build());
Proto.checkError(response.getError());
return new SimpleCursor(response.getResult());
}
public Cursor executeKeyRanges(Context ctx, String query, String keyspace,
Iterable<? extends KeyRange> keyRanges, Map<String, ?> bindVars, TabletType tabletType)
throws SQLException {
ExecuteKeyRangesRequest.Builder requestBuilder =
ExecuteKeyRangesRequest.newBuilder()
.setQuery(Proto.bindQuery(query, bindVars))
.setKeyspace(keyspace)
.addAllKeyRanges(keyRanges)
.setTabletType(tabletType);
if (ctx.getCallerId() != null) {
requestBuilder.setCallerId(ctx.getCallerId());
}
ExecuteKeyRangesResponse response = client.executeKeyRanges(ctx, requestBuilder.build());
Proto.checkError(response.getError());
return new SimpleCursor(response.getResult());
}
public Cursor executeEntityIds(Context ctx, String query, String keyspace,
String entityColumnName, Map<byte[], ?> entityKeyspaceIds, Map<String, ?> bindVars,
TabletType tabletType) throws SQLException {
ExecuteEntityIdsRequest.Builder requestBuilder =
ExecuteEntityIdsRequest.newBuilder()
.setQuery(Proto.bindQuery(query, bindVars))
.setKeyspace(keyspace)
.setEntityColumnName(entityColumnName)
.addAllEntityKeyspaceIds(Iterables.transform(
entityKeyspaceIds.entrySet(), Proto.MAP_ENTRY_TO_ENTITY_KEYSPACE_ID))
.setTabletType(tabletType);
if (ctx.getCallerId() != null) {
requestBuilder.setCallerId(ctx.getCallerId());
}
ExecuteEntityIdsResponse response = client.executeEntityIds(ctx, requestBuilder.build());
Proto.checkError(response.getError());
return new SimpleCursor(response.getResult());
}
/**
* Execute multiple keyspace ID queries as a batch.
*
* @param asTransaction If true, automatically create a transaction (per shard) that encloses all
* the batch queries.
*/
public List<Cursor> executeBatchShards(Context ctx, Iterable<? extends BoundShardQuery> queries,
TabletType tabletType, boolean asTransaction) throws SQLException {
ExecuteBatchShardsRequest.Builder requestBuilder =
ExecuteBatchShardsRequest.newBuilder()
.addAllQueries(queries)
.setTabletType(tabletType)
.setAsTransaction(asTransaction);
if (ctx.getCallerId() != null) {
requestBuilder.setCallerId(ctx.getCallerId());
}
ExecuteBatchShardsResponse response = client.executeBatchShards(ctx, requestBuilder.build());
Proto.checkError(response.getError());
return Proto.toCursorList(response.getResultsList());
}
/**
* Execute multiple keyspace ID queries as a batch.
*
* @param asTransaction If true, automatically create a transaction (per shard) that encloses all
* the batch queries.
*/
public List<Cursor> executeBatchKeyspaceIds(Context ctx,
Iterable<? extends BoundKeyspaceIdQuery> queries, TabletType tabletType,
boolean asTransaction) throws SQLException {
ExecuteBatchKeyspaceIdsRequest.Builder requestBuilder =
ExecuteBatchKeyspaceIdsRequest.newBuilder()
.addAllQueries(queries)
.setTabletType(tabletType)
.setAsTransaction(asTransaction);
if (ctx.getCallerId() != null) {
requestBuilder.setCallerId(ctx.getCallerId());
}
ExecuteBatchKeyspaceIdsResponse response =
client.executeBatchKeyspaceIds(ctx, requestBuilder.build());
Proto.checkError(response.getError());
return Proto.toCursorList(response.getResultsList());
}
public Cursor streamExecute(Context ctx, String query, Map<String, ?> bindVars,
TabletType tabletType) throws SQLException {
StreamExecuteRequest.Builder requestBuilder =
StreamExecuteRequest.newBuilder()
.setQuery(Proto.bindQuery(query, bindVars))
.setTabletType(tabletType);
if (ctx.getCallerId() != null) {
requestBuilder.setCallerId(ctx.getCallerId());
}
return new StreamCursor(client.streamExecute(ctx, requestBuilder.build()));
}
public Cursor streamExecuteShards(Context ctx, String query, String keyspace,
Iterable<String> shards, Map<String, ?> bindVars, TabletType tabletType) throws SQLException {
StreamExecuteShardsRequest.Builder requestBuilder =
StreamExecuteShardsRequest.newBuilder()
.setQuery(Proto.bindQuery(query, bindVars))
.setKeyspace(keyspace)
.addAllShards(shards)
.setTabletType(tabletType);
if (ctx.getCallerId() != null) {
requestBuilder.setCallerId(ctx.getCallerId());
}
return new StreamCursor(client.streamExecuteShards(ctx, requestBuilder.build()));
}
public Cursor streamExecuteKeyspaceIds(Context ctx, String query, String keyspace,
Iterable<byte[]> keyspaceIds, Map<String, ?> bindVars, TabletType tabletType)
throws SQLException {
StreamExecuteKeyspaceIdsRequest.Builder requestBuilder =
StreamExecuteKeyspaceIdsRequest.newBuilder()
.setQuery(Proto.bindQuery(query, bindVars))
.setKeyspace(keyspace)
.addAllKeyspaceIds(Iterables.transform(keyspaceIds, Proto.BYTE_ARRAY_TO_BYTE_STRING))
.setTabletType(tabletType);
if (ctx.getCallerId() != null) {
requestBuilder.setCallerId(ctx.getCallerId());
}
return new StreamCursor(client.streamExecuteKeyspaceIds(ctx, requestBuilder.build()));
}
public Cursor streamExecuteKeyRanges(Context ctx, String query, String keyspace,
Iterable<? extends KeyRange> keyRanges, Map<String, ?> bindVars, TabletType tabletType)
throws SQLException {
StreamExecuteKeyRangesRequest.Builder requestBuilder =
StreamExecuteKeyRangesRequest.newBuilder()
.setQuery(Proto.bindQuery(query, bindVars))
.setKeyspace(keyspace)
.addAllKeyRanges(keyRanges)
.setTabletType(tabletType);
if (ctx.getCallerId() != null) {
requestBuilder.setCallerId(ctx.getCallerId());
}
return new StreamCursor(client.streamExecuteKeyRanges(ctx, requestBuilder.build()));
}
public VTGateTx begin(Context ctx) throws SQLException {
BeginRequest.Builder requestBuilder = BeginRequest.newBuilder();
if (ctx.getCallerId() != null) {
requestBuilder.setCallerId(ctx.getCallerId());
}
BeginResponse response = client.begin(ctx, requestBuilder.build());
return VTGateTx.withRpcClientAndSession(client, response.getSession());
}
public List<SplitQueryResponse.Part> splitQuery(Context ctx, String keyspace, String query,
Map<String, ?> bindVars, String splitColumn, long splitCount) throws SQLException {
SplitQueryRequest.Builder requestBuilder =
SplitQueryRequest.newBuilder()
.setKeyspace(keyspace)
.setQuery(Proto.bindQuery(query, bindVars))
.setSplitColumn(splitColumn)
.setSplitCount(splitCount);
if (ctx.getCallerId() != null) {
requestBuilder.setCallerId(ctx.getCallerId());
}
SplitQueryResponse response = client.splitQuery(ctx, requestBuilder.build());
return response.getSplitsList();
}
public SrvKeyspace getSrvKeyspace(Context ctx, String keyspace) throws SQLException {
GetSrvKeyspaceRequest.Builder requestBuilder =
GetSrvKeyspaceRequest.newBuilder().setKeyspace(keyspace);
GetSrvKeyspaceResponse response = client.getSrvKeyspace(ctx, requestBuilder.build());
return response.getSrvKeyspace();
}
@Override
public void close() throws IOException {
client.close();
}
}
| |
/*
*
* Copyright (c) Lightstreamer Srl
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package javametest;
import java.util.NoSuchElementException;
import java.util.Vector;
import javax.microedition.lcdui.Command;
import javax.microedition.lcdui.CommandListener;
import javax.microedition.lcdui.Display;
import javax.microedition.lcdui.Displayable;
import javax.microedition.midlet.MIDlet;
import javax.microedition.midlet.MIDletStateChangeException;
import com.lightstreamer.javameclient.midp.ConnectionInfo;
import com.lightstreamer.javameclient.midp.ConnectionPolicy;
import com.lightstreamer.javameclient.midp.ExtendedTableInfo;
import com.lightstreamer.javameclient.midp.LSClient;
import com.lightstreamer.javameclient.midp.MessageInfo;
import com.lightstreamer.javameclient.midp.SimpleTableInfo;
import com.lightstreamer.javameclient.midp.SubscribedTableKey;
import com.lightstreamer.javameclient.midp.logger.ErrorPrompt;
import com.lightstreamer.javameclient.midp.logger.Logger;
/**
* Class DistinctTester.
*/
public class DistinctTester extends MIDlet implements CommandListener {
static boolean useSingleConnection = false;
static boolean useReusableItemUpdates = true;
static boolean useSocket = true;
static boolean subscribeChat = true;
private long hbTime = 2000;
static {
Logger.setDefaultActive(true);
}
private TestConnectionListener myConnListener =
new TestConnectionListener();
private LSClient myClient;
private Vector subscribedTables = new Vector();
private TestCanvas myCanvas = new TestCanvas();
private Display display;
private Command exit;
private Command disconnect;
private Command deleteATable;
private Command polling;
private Command streaming;
private Command addTable;
private Command switchHB;
private Command reduceFreq;
private Command riseFreq;
private Command sendSomeMessages;
private Command sendSomeMessagesAndClose;
private Command switchView;
private Logger logger = Logger.getLogger("Local logger");
private ConnectionInfo myCI;
private ConnectionPolicy myCP;
private ErrorPrompt prompt;
private ErrorPrompt sendMessageResponse;
private TestSendMessageListener tsml;
/**
* Constructor DistinctTester.
*/
public DistinctTester() {
super();
logger.log("TEST STARTS");
display = Display.getDisplay(this);
display.setCurrent(myCanvas);
prompt = new ErrorPrompt(this,50);
sendMessageResponse = new ErrorPrompt(this,200);
//configure buttons
disconnect = new Command("Disconnect", Command.STOP, 2);
myCanvas.addCommand(disconnect);
deleteATable = new Command("Remove a table", Command.OK, 2);
myCanvas.addCommand(deleteATable);
polling = new Command("Polling", Command.OK, 2);
myCanvas.addCommand(polling);
streaming = new Command("Streaming", Command.OK, 2);
myCanvas.addCommand(streaming);
addTable = new Command("Add a table", Command.OK, 2);
myCanvas.addCommand(addTable);
switchHB = new Command("enable/disable reverse HB", Command.OK, 2);
myCanvas.addCommand(switchHB);
reduceFreq = new Command("Set a frequency of 0.1", Command.OK, 2);
myCanvas.addCommand(reduceFreq);
riseFreq = new Command("Set an unlimited frequency", Command.OK, 2);
myCanvas.addCommand(riseFreq);
sendSomeMessages = new Command("Send some messages", Command.OK, 2);
myCanvas.addCommand(sendSomeMessages);
sendSomeMessagesAndClose = new Command("Send mexs and close", Command.OK, 2);
myCanvas.addCommand(sendSomeMessagesAndClose);
exit = new Command("Quit", Command.STOP, 1);
myCanvas.addCommand(exit);
switchView = new Command("Switch", Command.OK, 2);
myCanvas.addCommand(switchView);
myCanvas.setCommandListener(this);
//Configure connection infos
//change these configurations to point to your local server
//
myCI = new ConnectionInfo("localhost");
myCI.setAdapter("CHAT");
myCI.setControlInHttps(false);
myCI.setControlPort(8989);
myCI.setPort(8989);
//myCI.setPassword("NO");
//myCI.setUser("NO");
myCI.setWorkInHttps(false);
//Configure connection policy
myCP = new ConnectionPolicy();
//myCP.setRetryTimeout(500);
//myCP.setBufferedStreamingHandled(false);
//myCP.setIdleTimeout(30000);
//myCP.setPollingInterval(500);
//myCP.setKeepAliveInterval(2000);
//myCP.setTimeoutForStalled(2000);
//myCP.setTimeoutForReconnect(15000);
}
protected LSClient prepareLSClient() {
LSClient aClient = new LSClient();
aClient.useReusableItemUpdates(useReusableItemUpdates);
aClient.useSingleConnection(useSingleConnection);
aClient.useSocketConnection(useSocket);
aClient.setReverseHeartbeatMillis(hbTime);
return aClient;
}
/**
* Method startApp.
* @throws MIDletStateChangeException
*/
protected void startApp() throws MIDletStateChangeException {
this.myClient = this.prepareLSClient();
tsml = new TestSendMessageListener(sendMessageResponse,myClient);
SubscribedTableKey tableKey;
if (subscribeChat) {
//create and subscribe a chat table
ExtendedTableInfo myChatTable = new ExtendedTableInfo(
new String[]{"chat_room"},
new String[]{"IP", "timestamp", "nick", "message"}, "DISTINCT");
myChatTable.setDataAdapter("CHAT_ROOM");
myChatTable.setSnaspshotRequired(true);
tableKey = myClient.subscribeTable(myChatTable, new TestHandyListener(new String[]{"IP", "timestamp", "nick", "message"},"ChatFields"), false);
subscribedTables.addElement(tableKey);
}
//new MessageInfoTest(prompt);
//new MessageSequenceTest(prompt);
myClient.openConnection(myCI, myConnListener, myCP);
//myClient.openPollingConnection(myCI,myConnListener, myCp);
//If we have called no methods on myCP (ie default policies)
//we could use the overloaded open methods:
//myClient.openConnection(myCI, myConnListener);
//myClient.openPollingConnection(myCI,myConnListener);
}
/**
* Method pauseApp.
*/
protected void pauseApp() {
}
/**
* Method destroyApp.
*
* @param arg0 ...
* @throws MIDletStateChangeException
*/
protected void destroyApp(boolean arg0) throws MIDletStateChangeException {
//ends LSClient threads.
LSClient.closeApp();
}
/**
* Method commandAction.
*
* @param com ...
* @param arg1 ...
*/
public void commandAction(Command com, Displayable arg1) {
if (com == disconnect) {
logger.log("DISCONNECT");
myClient.closeConnection();
} else if (com == polling) {
logger.log("POLLING CONNECTION");
myClient.openPollingConnection(myCI, myConnListener, myCP);
} else if (com == streaming) {
logger.log("STREAMING CONNECTION");
myClient.openConnection(myCI, myConnListener, myCP);
} else if (com == sendSomeMessages) {
logger.log("SEND MESSAGES");
sendMessageResponse.show();
for (int i = 1; i<=100; i++) {
myClient.sendMessage(new MessageInfo("CHAT|Message" + i,"chat",1000),tsml);
}
} else if (com == sendSomeMessagesAndClose) {
logger.log("SEND MESSAGES");
sendMessageResponse.show();
for (int i = 1; i<=100; i++) {
myClient.sendMessage(new MessageInfo("CHAT|Message" + i,"chat",1000),tsml);
}
myClient.closeConnection();
} else if (com == switchHB) {
hbTime = (hbTime > 0) ? 0 : 2000;
logger.log("NEW REVERSE HEARTBEAT TIME " + hbTime);
myClient.setReverseHeartbeatMillis(hbTime);
} else if (com == reduceFreq) {
logger.log("REDUCE FREQUENCY LIMIT");
if (useSingleConnection) {
//singleConnectionMode: need to disconnect before change subscriptions
myClient.closeConnection();
}
//get all tables from the Vector...
for (int i = 0; i < subscribedTables.size(); i++) {
SubscribedTableKey key = (SubscribedTableKey) subscribedTables.elementAt(i);
myClient.setMaxFrequency(key, 0.1);
}
if (useSingleConnection) {
//in single connection mode we need to reconnect now
myClient.openConnection(myCI, myConnListener);
}
} else if (com == riseFreq) {
logger.log("INCREASE FREQUENCY LIMIT");
if (useSingleConnection) {
//singleConnectionMode: need to disconnect before change subscriptions
myClient.closeConnection();
}
//get all tables from the Vector...
for (int i = 0; i < subscribedTables.size(); i++) {
SubscribedTableKey key = (SubscribedTableKey) subscribedTables.elementAt(i);
myClient.setFrequencyUnlimited(key);
}
if (useSingleConnection) {
//in single connection mode we need to reconnect now
myClient.openConnection(myCI, myConnListener);
}
} else if (com == deleteATable) {
logger.log("DELETE A TABLE");
if (useSingleConnection) {
//singleConnectionMode: need to disconnect before change subscriptions
myClient.closeConnection();
}
try {
//get the first table from the Vector...
SubscribedTableKey toRem =
(SubscribedTableKey) subscribedTables.firstElement();
//...and unsubscribe it...
myClient.unsubscribeTable(toRem);
//...then remove from the Vector too.
subscribedTables.removeElement(toRem);
} catch (NoSuchElementException nse) {
logger.log("NO MORE TABLES, CAN'T DELETE");
}
if (useSingleConnection) {
//in single connection mode we need to reconnect now
myClient.openConnection(myCI, myConnListener);
}
} else if (com == switchView) {
prompt.show();
} else if (com == exit) {
logger.log("BYE BYE");
notifyDestroyed();
}
}
}
/*--- Formatted in Lightstreamer Java Convention Style on 2007-02-12 ---*/
| |
package com.sora.util.akatsuki;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.Log;
import android.view.View;
import java.util.Map;
import java.util.WeakHashMap;
/**
* Contains API for working with {@link Retained} annotated fields.
*/
@SuppressWarnings("ALL")
public class Akatsuki {
private static final Map<String, BundleRetainer<?>> CLASS_CACHE = new WeakHashMap<>();
private static final Map<Class<? extends TypeConverter<?>>, TypeConverter<?>> CACHED_CONVERTERS = new WeakHashMap<>();
public static final String RETAINER_CACHE_NAME = "AkatsukiMapping";
public static final String RETAINER_CACHE_PACKAGE = "com.sora.util.akatsuki";
public static final String TAG = "Akatsuki";
static LoggingLevel loggingLevel = LoggingLevel.ERROR_ONLY;
private static RetainerCache retainerCache;
static {
Class<?> retainerCacheClass = null;
try {
retainerCacheClass = Class.forName(RETAINER_CACHE_PACKAGE + "." + RETAINER_CACHE_NAME);
} catch (ClassNotFoundException iggored) {
// we don't have it, that's fine
}
if (retainerCacheClass != null) {
try {
retainerCache = (RetainerCache) retainerCacheClass.newInstance();
} catch (Exception e) {
// we have it but it's broken, not good
throw new RuntimeException("Unable to instantiate RetainerCache", e);
}
}
}
/**
* Logging levels
*/
public enum LoggingLevel {
/**
* Print debug informations at compile time such as class scanning
* progress(this would large output so use with caution)
**/
DEBUG, /**
* Print everything! (class caching, verification, and class
* hierarchy traversal attempts)
*/
VERBOSE, /**
* Prints only errors (when a {@link BundleRetainer} is
* missing for example)
*/
ERROR_ONLY
}
/**
* Sets the current logging level
*/
public static void setLoggingLevel(LoggingLevel level) {
loggingLevel = level;
}
public static LoggingLevel loggingLevel() {
return loggingLevel;
}
/**
* Saves all fields annotated with {@link Retained} into the provided bundle
*
* @param instance
* the object that contains the annotated fields
* @param outState
* the bundle for saving, not null
*/
public static void save(Object instance, Bundle outState) {
if (outState == null)
throw new IllegalArgumentException("outState cannot be null");
getInstance(instance).save(instance, outState);
}
/**
* Restores field saved by {@link #save(Object, Bundle)} back into the
* instance
*
* @param instance
* the object that needs restoring
* @param savedInstanceState
* the bundle containing the saved fields, null-safe
*/
public static void restore(Object instance, Bundle savedInstanceState) {
if (savedInstanceState == null)
return;
getInstance(instance).restore(instance, savedInstanceState);
}
/**
* Like {@link #save(Object, Bundle)} but included some View state aware
* logic, use this if you want to save view states. Typical usage looks
* like:
* <p>
*
* <pre>
* {@code
* @Override
* protected Parcelable onSaveInstanceState() {
* return Akatsuki.save(this, super.onSaveInstanceState());
* }
* }
* </pre>
*
* @param view
* the view containing annotated fields
* @param parcelable
* from {@code super.onSaveInstanceState()}
* @return a parcelable to returned in {@link View#onSaveInstanceState()}
*/
public static Parcelable save(View view, Parcelable parcelable) {
Bundle bundle = new Bundle();
bundle.putParcelable(view.getClass().getName(), parcelable);
save((Object) view, bundle);
return bundle;
}
/**
* For restoring states saved by {@link #save(View, Parcelable)}.Typical
* usage looks like:
* <p>
*
* <pre>
* {@code
* @Override
* protected void onRestoreInstanceState(Parcelable state) {
* super.onRestoreInstanceState(Akatsuki.restore(this, state));
* }
* }
* </pre>
*
* @param view
* the view that requires restoring
* @param parcelable
* restored state from the parameter of
* {@link View#onRestoreInstanceState(Parcelable)}
* @return a parcelable to be passed to
* {@code super.onRestoreInstanceState()}
*/
public static Parcelable restore(View view, Parcelable parcelable) {
if (parcelable instanceof Bundle) {
final Bundle bundle = (Bundle) parcelable;
restore((Object) view, bundle);
return bundle.getParcelable(view.getClass().getName());
} else {
throw new RuntimeException("View state of view " + view.getClass()
+ " is not saved with Akatsuki View.onSaveInstanceState()");
}
}
/**
* Serializes the given instance into a Bundle
*/
public static Bundle serialize(Object instance) {
Bundle bundle = new Bundle();
save(instance, bundle);
return bundle;
}
/**
* Deserialize the given bundle into the original instance
*
* @param instance
* the instantiated instance
* @param bundle
* the bundle
* @return deserialized instance
*/
public static <T> T deserialize(T instance, Bundle bundle) {
restore(instance, bundle);
return instance;
}
/**
* Same as {@link #deserialize(Object, Bundle)} but with a
* {@link InstanceSupplier} so that the instance can be instantiated without
* creating an instance first
*
* @param supplier
* the instance supplier
* @param bundle
* the bundle
* @return deserialized instance
*/
public static <T> T deserialize(InstanceSupplier<T> supplier, Bundle bundle) {
final T t = supplier.create();
return deserialize(t, bundle);
}
/**
* An interface that supplies {@link #deserialize(InstanceSupplier, Bundle)}
* an working instance
*
* @param <T>
* the type of the instance
*/
public interface InstanceSupplier<T> {
/**
* Creates the instance
*/
T create();
}
@SuppressWarnings("unchecked")
private static <T> BundleRetainer<T> getInstance(T clazz) {
final String fqcn = clazz.getClass().getName();
BundleRetainer<T> instance = (BundleRetainer<T>) CLASS_CACHE.get(fqcn);
if (instance == null) {
if (loggingLevel == LoggingLevel.VERBOSE)
Log.i(TAG, "cache miss for class " + fqcn);
instance = Internal.createRetainer(Thread.currentThread().getContextClassLoader(),
retainerCache, fqcn, clazz.getClass());
CLASS_CACHE.put(fqcn, instance);
} else {
if (loggingLevel == LoggingLevel.VERBOSE)
Log.i(TAG, "cache hit for class" + fqcn);
}
return instance;
}
private static void discardCache() {
CLASS_CACHE.clear();
}
/**
* Finds the converter from the cache or create one. <b>This is not the
* method you are looking for</b>
*
* @param key
* the class of the converter
*/
@SuppressWarnings("unchecked")
public static <T> TypeConverter<T> converter(Class<? extends TypeConverter<T>> key) {
TypeConverter<T> converter = (TypeConverter<T>) CACHED_CONVERTERS.get(key);
if (converter == null) {
try {
converter = key.newInstance();
} catch (Exception e) {
converter = new InvalidTypeConverter(e);
}
CACHED_CONVERTERS.put(key, converter);
}
return converter;
}
}
| |
/*
* Copyright 2012 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package voldemort.utils.pool;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Queue;
import java.util.Random;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
public class QueuedKeyedResourcePoolContentionTest extends KeyedResourcePoolContentionTest {
protected QueuedKeyedResourcePool<String, TestResource> queuedPool;
@Before
@Override
public void setUp() {
super.setUp();
this.queuedPool = new QueuedKeyedResourcePool<String, TestResource>(factory, config);
super.pool = queuedPool;
TestResourceRequest.usedResourceCount.set(0);
TestResourceRequest.handledTimeoutCount.set(0);
TestResourceRequest.handledExceptionCount.set(0);
}
@Test
public void contendForQueue() throws Exception {
// Over ride some set up
super.config = new ResourcePoolConfig().setMaxPoolSize(POOL_SIZE)
.setTimeout(TIMEOUT_MS * 50, TimeUnit.MILLISECONDS);
this.queuedPool = new QueuedKeyedResourcePool<String, TestResource>(factory, config);
super.pool = queuedPool;
int numEnqueuers = POOL_SIZE * 2;
int numEnqueues = 10 * 1000;
String key = "Key";
float invalidationRate = (float) 0.25;
CountDownLatch waitForThreads = new CountDownLatch(numEnqueuers);
CountDownLatch waitForEnqueuers = new CountDownLatch(numEnqueuers);
for(int i = 0; i < numEnqueuers; ++i) {
new Thread(new Enqueuers(waitForThreads,
waitForEnqueuers,
key,
numEnqueues,
invalidationRate)).start();
}
try {
waitForEnqueuers.await();
assertEquals(POOL_SIZE, this.queuedPool.getTotalResourceCount());
assertEquals(POOL_SIZE, this.queuedPool.getCheckedInResourceCount());
assertEquals(0, this.queuedPool.getRegisteredResourceRequestCount());
assertEquals(numEnqueuers * numEnqueues, TestResourceRequest.usedResourceCount.get());
assertEquals(0, TestResourceRequest.handledTimeoutCount.get());
assertEquals(0, TestResourceRequest.handledExceptionCount.get());
} catch(InterruptedException e) {
e.printStackTrace();
}
}
@Test
public void contendForQueueAndPool() throws Exception {
// Over ride some set up
super.config = new ResourcePoolConfig().setMaxPoolSize(POOL_SIZE)
.setTimeout(TIMEOUT_MS * 100, TimeUnit.MILLISECONDS);
this.queuedPool = new QueuedKeyedResourcePool<String, TestResource>(factory, config);
super.pool = queuedPool;
int numEnqueuers = POOL_SIZE;
int numCheckers = POOL_SIZE;
int numEnqueues = 10 * 1000;
String key = "Key";
float invalidationRate = (float) 0.25;
CountDownLatch waitForThreadsStart = new CountDownLatch(numEnqueuers + numCheckers);
CountDownLatch waitForThreadsEnd = new CountDownLatch(numEnqueuers + numCheckers);
for(int i = 0; i < numEnqueuers; ++i) {
new Thread(new Enqueuers(waitForThreadsStart,
waitForThreadsEnd,
key,
numEnqueues,
invalidationRate)).start();
}
for(int i = 0; i < numCheckers; ++i) {
new Thread(new Checkers(waitForThreadsStart,
waitForThreadsEnd,
key,
numEnqueues,
invalidationRate)).start();
}
try {
waitForThreadsEnd.await();
assertEquals(this.queuedPool.getCheckedInResourceCount(),
this.queuedPool.getTotalResourceCount());
assertEquals(0, this.queuedPool.getRegisteredResourceRequestCount());
assertEquals(numEnqueuers * numEnqueues, TestResourceRequest.usedResourceCount.get());
assertEquals(0, TestResourceRequest.handledTimeoutCount.get());
assertEquals(0, TestResourceRequest.handledExceptionCount.get());
} catch(InterruptedException e) {
e.printStackTrace();
}
}
public class Enqueuers implements Runnable {
private final CountDownLatch startSignal;
private final CountDownLatch doneSignal;
private final String key;
private final int enqueues;
private int used;
Queue<TestResource> resources;
private Random random;
private float invalidationRate;
Enqueuers(CountDownLatch startSignal,
CountDownLatch doneSignal,
String key,
int enqueues,
float invalidationRate) {
this.startSignal = startSignal;
this.doneSignal = doneSignal;
this.key = key;
this.enqueues = enqueues;
this.used = 0;
resources = new ConcurrentLinkedQueue<TestResource>();
this.random = new Random();
this.invalidationRate = invalidationRate;
}
private void processAtMostOneEnqueuedResource() throws Exception {
TestResource tr = resources.poll();
if(tr != null) {
this.used++;
assertTrue(tr.isValid());
// Invalidate some resources (except on last few check ins)
float f = random.nextFloat();
if(f < invalidationRate && this.used < this.enqueues - POOL_SIZE) {
tr.invalidate();
}
Thread.yield();
queuedPool.checkin(key, tr);
Thread.yield();
}
}
@Override
public void run() {
startSignal.countDown();
try {
startSignal.await();
} catch(InterruptedException e) {
e.printStackTrace();
}
try {
for(int i = 0; i < enqueues; ++i) {
long deadlineNs = System.nanoTime()
+ TimeUnit.MILLISECONDS.toNanos(config.getTimeout(TimeUnit.NANOSECONDS));
queuedPool.registerResourceRequest(key, new TestResourceRequest(deadlineNs,
resources));
Thread.yield();
processAtMostOneEnqueuedResource();
}
while(this.used < enqueues) {
processAtMostOneEnqueuedResource();
Thread.yield();
}
} catch(Exception e) {
fail(e.toString());
}
doneSignal.countDown();
}
}
}
| |
package com.tygron.tools.explorer.gui;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.event.EventHandler;
import javafx.scene.Node;
import javafx.scene.control.ListCell;
import javafx.scene.control.ListView;
import javafx.scene.control.ScrollPane;
import javafx.scene.control.ScrollPane.ScrollBarPolicy;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Pane;
import javafx.util.Callback;
import com.tygron.pub.logger.Log;
import com.tygron.pub.utils.StringUtils;
import com.tygron.tools.explorer.logic.ExplorerCommunicator;
public class DataPane extends GameExplorerSubPane {
private class KeyValueCell extends ListCell<Entry<?, ?>> {
private Callback<ListView<Entry<?, ?>>, javafx.scene.control.ListCell<Entry<?, ?>>> getKeyValueCellFactory() {
return new Callback<ListView<Entry<?, ?>>, javafx.scene.control.ListCell<Entry<?, ?>>>() {
@Override
public ListCell<Entry<?, ?>> call(ListView<Entry<?, ?>> listView) {
return new KeyValueCell();
}
};
}
@Override
public void updateItem(Entry<?, ?> item, boolean empty) {
super.updateItem(item, empty);
if (empty || item == null) {
setText(null);
setGraphic(null);
} else {
setText(generateName(item.getKey(), item.getValue()));
}
}
}
private interface SelectionListener {
public void updateSelection(Object selection);
}
private class SelectionPane extends Pane {
private Object selectionKey = null;
private Object selectionValue = null;
private SelectionListener listener = null;
public Object getSelectionKey() {
return this.selectionKey;
}
public Object getSelectionValue() {
return this.selectionValue;
}
public void setSelection(Object selectionKey, Object selectionValue) {
this.selectionKey = selectionKey;
this.selectionValue = selectionValue;
}
public void setSelectionAndUpdateListener(Object selection) {
this.selectionKey = selection;
if (listener != null) {
listener.updateSelection(selection);
}
}
public void setSelectionListener(SelectionListener listener) {
this.listener = listener;
}
}
private static String generateName(final Object key, final Object value) {
String name = StringUtils.EMPTY;
if (key != null) {
if (key instanceof String) {
return (String) key;
} else if (key instanceof Integer) {
name = key.toString() + ": ";
}
}
if (value != null) {
if (value instanceof Map) {
Object valueName = ((Map<?, ?>) value).get("name");
if (valueName != null) {
name += valueName.toString();
}
}
}
if (StringUtils.isEmpty(name)) {
name = "Nameless data";
}
return name;
}
private final ScrollPane scrollPane = new ScrollPane();
private final HBox horizontalPane = new HBox();
public DataPane(ExplorerCommunicator communicator) {
super(communicator);
GameExplorerPane.fill(scrollPane, 0.0);
scrollPane.setHbarPolicy(ScrollBarPolicy.ALWAYS);
scrollPane.setVbarPolicy(ScrollBarPolicy.NEVER);
scrollPane.setContent(horizontalPane);
scrollPane.setFitToHeight(true);
// selectionPane.resize(scrollPane.getWidth(), scrollPane.getHeight());
horizontalPane.minWidthProperty().bind(scrollPane.widthProperty());
horizontalPane.maxHeightProperty().bind(scrollPane.heightProperty());
// horizontalPane.prefWidthProperty().bind(scrollPane.widthProperty().subtract(1));
horizontalPane.widthProperty().addListener(new ChangeListener<Number>() {
@Override
public void changed(ObservableValue<? extends Number> observableValue, Number oldValue,
Number newValue) {
scrollPane.setHvalue(scrollPane.getHmax());
}
});
this.getChildren().add(scrollPane);
}
/**
* The first display of data
*/
public void displayData(Map<String, Map<Integer, Map<?, ?>>> map) {
Node node = getPaneWithMap(0, map);
insertData(0, node);
}
/**
* The processing of data updates. It gets a list of current selections, rerenders the datalists, and then
* select the proper entry in each list if possible.
*/
public void displayDataUpdate(Map<String, Map<Integer, Map<?, ?>>> map) {
List<Object> selections = getSelections();
SelectionPane node = getPaneWithMap(0, map);
insertData(0, node);
boolean nextSelectionExists = true;
for (int i = 0; nextSelectionExists; i++) {
nextSelectionExists = setSelection(i, selections);
if (!nextSelectionExists) {
getCommunicator().selectDataByUpdate(getMapLink(), getSelection(i - 1, true));
}
}
}
/**
* Most (if not all) selectable data will be in the form of map entries.
*/
private void displayEntry(final int index, final Entry<?, ?> entry) {
SelectionPane newContent = null;
setStatus("Selected: " + entry.getKey().toString());
Object value = entry.getValue();
if (value == null) {
Log.info("Content is null");
return;
} else if (value instanceof Map<?, ?>) {
newContent = getPaneWithMap(index, (Map<?, ?>) value);
} else {
setStatus("Reached max depth. Content is: " + value.toString());
Log.info("Reached max depth. Content is: " + value.toString());
}
if (newContent == null) {
return;
}
newContent.prefHeightProperty().bind(heightProperty());
insertData(index, newContent);
}
private String getMapLink() {
return (String) getSelection(0, false);
}
private SelectionPane getPaneWithMap(final int index, final Map<? extends Object, ? extends Object> map) {
final SelectionPane pane = new SelectionPane();
final ListView<Entry<?, ?>> dataListView = new ListView<Entry<?, ?>>();
dataListView.setCellFactory(new KeyValueCell().getKeyValueCellFactory());
dataListView.setOnMouseClicked(new EventHandler<MouseEvent>() {
@Override
public void handle(MouseEvent event) {
Entry<?, ?> selectedEntry = dataListView.getSelectionModel().getSelectedItem();
handleSelectedEntry(pane, selectedEntry, index);
}
});
dataListView.getItems().setAll(map.entrySet());
java.util.Collections.sort(dataListView.getItems(), new java.util.Comparator<Entry<?, ?>>() {
@Override
public int compare(Entry<? extends Object, ? extends Object> o1,
Entry<? extends Object, ? extends Object> o2) {
int result = 0;
result = generateName(o1.getKey(), o1.getValue()).compareTo(
generateName(o2.getKey(), o2.getValue()));
return result;
}
});
pane.setSelectionListener(new SelectionListener() {
@Override
// What to do when the selection is changed with side effects.
public void updateSelection(Object selection) {
pane.setSelection(null, null);
dataListView.getSelectionModel().clearSelection();
for (Entry<?, ?> entry : dataListView.getItems()) {
if (entry.getKey().equals(selection)) {
dataListView.getSelectionModel().select(entry);
pane.setSelection(selection, entry.getValue());
dataListView.scrollTo(entry);
displayEntry(index + 1, entry);
break;
}
}
}
});
pane.minWidthProperty().bind(dataListView.minWidthProperty());
pane.prefWidthProperty().bind(dataListView.prefWidthProperty());
pane.maxWidthProperty().bind(dataListView.maxWidthProperty());
dataListView.minHeightProperty().bind(pane.heightProperty());
dataListView.maxHeightProperty().bind(pane.heightProperty());
pane.getChildren().add(dataListView);
return pane;
}
private Object getSelection(final int index, boolean getValue) {
Object selection = null;
if (index < 0) {
return selection;
}
if (horizontalPane.getChildren().size() <= index) {
return selection;
}
Node node = horizontalPane.getChildren().get(index);
selection = getValue ? ((SelectionPane) node).getSelectionValue() : ((SelectionPane) node)
.getSelectionKey();
return selection;
}
private List<Object> getSelections() {
List<Object> selections = new LinkedList<Object>();
for (Node node : horizontalPane.getChildren()) {
if (!(node instanceof SelectionPane)) {
Log.warning("Node encountered not of type " + SelectionPane.class.getSimpleName()
+ ". Stopping retrieving selections...");
return selections;
}
Object selection = ((SelectionPane) node).getSelectionKey();
if (selection != null) {
selections.add(selection);
}
}
return selections;
}
private void handleSelectedEntry(final SelectionPane pane, final Entry<?, ?> selectedEntry,
final int index) {
if (selectedEntry == null) {
return;
}
String selection = StringUtils.EMPTY;
if (selectedEntry.getKey() != null) {
selection = selectedEntry.getKey().toString();
}
if (selectedEntry.toString().length() < 500 && selectedEntry.getValue() != null) {
if (!selection.equals(StringUtils.EMPTY)) {
selection += ": ";
}
selection += selectedEntry.getValue().toString();
}
if (selection.equals(StringUtils.EMPTY)) {
selection = "Nameless";
}
Log.verbose("Clicked on " + selection);
pane.setSelection(selectedEntry.getKey(), selectedEntry.getValue());
displayEntry(index + 1, selectedEntry);
getCommunicator().selectData(null, selectedEntry.getValue());
}
/**
* Insert the new node into the scene graph
*/
private void insertData(final int index, final Node node) {
// TODO: insertData should be run on the javafx thread
if (node == null) {
return;
}
if (horizontalPane.getChildren().size() > index) {
horizontalPane.getChildren().subList(index, horizontalPane.getChildren().size()).clear();
horizontalPane.getChildren().add(index, node);
} else {
horizontalPane.getChildren().add(node);
}
scrollPane.setHvalue(scrollPane.getHmax());
if (!(node instanceof SelectionPane)) {
Log.warning(this.getClass().getSimpleName() + "'s child at index " + index
+ " is not a SelectionPane.");
return;
}
}
/**
* Set the entry at a given index. This will trigger the next SelectionPane to be recreated.
*/
private boolean setSelection(final int index, List<Object> selections) {
if (selections == null) {
return false;
}
if (selections.size() <= index) {
return false;
}
if (selections.get(index) == null) {
return false;
}
if (horizontalPane.getChildren().size() <= index) {
return false;
}
SelectionPane selectionPane = (SelectionPane) horizontalPane.getChildren().get(index);
selectionPane.setSelectionAndUpdateListener(selections.get(index));
Log.info("Selection " + index + " set to " + selections.get(index));
if (selectionPane.getSelectionKey() == null) {
return false;
}
return true;
}
}
| |
/******************************************************************************
*
* Copyright 2011-2012 Tavendo GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package de.tavendo.autobahn;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.Socket;
import java.net.SocketException;
import java.nio.ByteBuffer;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.util.Pair;
import de.tavendo.autobahn.WebSocketMessage.WebSocketCloseCode;
/**
* WebSocket reader, the receiving leg of a WebSockets connection.
* This runs on it's own background thread and posts messages to master
* thread's message queue for there to be consumed by the application.
* The only method that needs to be called (from foreground thread) is quit(),
* which gracefully shuts down the background receiver thread.
*/
public class WebSocketReader extends Thread {
private static final String TAG = WebSocketReader.class.getCanonicalName();
private static enum ReaderState {
STATE_CLOSED,
STATE_CONNECTING,
STATE_CLOSING,
STATE_OPEN
}
private final Handler mWebSocketConnectionHandler;
private final Socket mSocket;
private InputStream mInputStream;
private final WebSocketOptions mWebSocketOptions;
private volatile boolean mStopped = false;
private final byte[] mNetworkBuffer;
private final ByteBuffer mApplicationBuffer;
private NoCopyByteArrayOutputStream mMessagePayload;
private ReaderState mState;
private boolean mInsideMessage = false;
private int mMessageOpcode;
private WebSocketFrameHeader mFrameHeader;
private Utf8Validator mUTF8Validator = new Utf8Validator();
/**
* Create new WebSockets background reader.
*
* @param master The message handler of master (foreground thread).
* @param socket The socket channel created on foreground thread.
*/
public WebSocketReader(Handler master, Socket socket, WebSocketOptions options, String threadName) {
super(threadName);
this.mWebSocketConnectionHandler = master;
this.mSocket = socket;
this.mWebSocketOptions = options;
this.mNetworkBuffer = new byte[4096];
this.mApplicationBuffer = ByteBuffer.allocateDirect(options.getMaxFramePayloadSize() + 14);
this.mMessagePayload = new NoCopyByteArrayOutputStream(options.getMaxMessagePayloadSize());
this.mFrameHeader = null;
this.mState = ReaderState.STATE_CONNECTING;
Log.d(TAG, "WebSocket reader created.");
}
/**
* Graceful shutdown of background reader thread (called from master).
*/
public void quit() {
mStopped = true;
Log.d(TAG, "quit");
}
/**
* Notify the master (foreground thread) of WebSockets message received
* and unwrapped.
*
* @param message Message to send to master.
*/
protected void notify(Object message) {
Message msg = mWebSocketConnectionHandler.obtainMessage();
msg.obj = message;
mWebSocketConnectionHandler.sendMessage(msg);
}
/**
* Process incoming WebSockets data (after handshake).
*/
private boolean processData() throws Exception {
// outside frame?
if (mFrameHeader == null) {
// need at least 2 bytes from WS frame header to start processing
if (mApplicationBuffer.position() >= 2) {
byte b0 = mApplicationBuffer.get(0);
boolean fin = (b0 & 0x80) != 0;
int rsv = (b0 & 0x70) >> 4;
int opcode = b0 & 0x0f;
byte b1 = mApplicationBuffer.get(1);
boolean masked = (b1 & 0x80) != 0;
int payload_len1 = b1 & 0x7f;
// now check protocol compliance
if (rsv != 0) {
throw new WebSocketException("RSV != 0 and no extension negotiated");
}
if (masked) {
// currently, we don't allow this. need to see whats the final spec.
throw new WebSocketException("masked server frame");
}
if (opcode > 7) {
// control frame
if (!fin) {
throw new WebSocketException("fragmented control frame");
}
if (payload_len1 > 125) {
throw new WebSocketException("control frame with payload length > 125 octets");
}
if (opcode != 8 && opcode != 9 && opcode != 10) {
throw new WebSocketException("control frame using reserved opcode " + opcode);
}
if (opcode == 8 && payload_len1 == 1) {
throw new WebSocketException("received close control frame with payload len 1");
}
} else {
// message frame
if (opcode != 0 && opcode != 1 && opcode != 2) {
throw new WebSocketException("data frame using reserved opcode " + opcode);
}
if (!mInsideMessage && opcode == 0) {
throw new WebSocketException("received continuation data frame outside fragmented message");
}
if (mInsideMessage && opcode != 0) {
throw new WebSocketException("received non-continuation data frame while inside fragmented message");
}
}
int mask_len = masked ? 4 : 0;
int header_len = 0;
if (payload_len1 < 126) {
header_len = 2 + mask_len;
} else if (payload_len1 == 126) {
header_len = 2 + 2 + mask_len;
} else if (payload_len1 == 127) {
header_len = 2 + 8 + mask_len;
} else {
// should not arrive here
throw new Exception("logic error");
}
// continue when complete frame header is available
if (mApplicationBuffer.position() >= header_len) {
// determine frame payload length
int i = 2;
long payload_len = 0;
if (payload_len1 == 126) {
payload_len = ((0xff & mApplicationBuffer.get(i)) << 8) | (0xff & mApplicationBuffer.get(i+1));
if (payload_len < 126) {
throw new WebSocketException("invalid data frame length (not using minimal length encoding)");
}
i += 2;
} else if (payload_len1 == 127) {
if ((0x80 & mApplicationBuffer.get(i+0)) != 0) {
throw new WebSocketException("invalid data frame length (> 2^63)");
}
payload_len = ((0xff & mApplicationBuffer.get(i+0)) << 56) |
((0xff & mApplicationBuffer.get(i+1)) << 48) |
((0xff & mApplicationBuffer.get(i+2)) << 40) |
((0xff & mApplicationBuffer.get(i+3)) << 32) |
((0xff & mApplicationBuffer.get(i+4)) << 24) |
((0xff & mApplicationBuffer.get(i+5)) << 16) |
((0xff & mApplicationBuffer.get(i+6)) << 8) |
((0xff & mApplicationBuffer.get(i+7)) );
if (payload_len < 65536) {
throw new WebSocketException("invalid data frame length (not using minimal length encoding)");
}
i += 8;
} else {
payload_len = payload_len1;
}
// immediately bail out on frame too large
if (payload_len > mWebSocketOptions.getMaxFramePayloadSize()) {
throw new WebSocketException("frame payload too large");
}
// save frame header metadata
mFrameHeader = new WebSocketFrameHeader();
mFrameHeader.setOpcode(opcode);
mFrameHeader.setFin(fin);
mFrameHeader.setReserved(rsv);
mFrameHeader.setPayloadLength((int) payload_len);
mFrameHeader.setHeaderLength(header_len);
mFrameHeader.setTotalLen(mFrameHeader.getHeaderLength() + mFrameHeader.getPayloadLength());
if (masked) {
byte[] mask = new byte[4];
for (int j = 0; j < 4; ++j) {
mask[i] = (byte) (0xff & mApplicationBuffer.get(i + j));
}
mFrameHeader.setMask(mask);
i += 4;
} else {
mFrameHeader.setMask(null);
}
// continue processing when payload empty or completely buffered
return mFrameHeader.getPayloadLength() == 0 || mApplicationBuffer.position() >= mFrameHeader.getTotalLength();
} else {
// need more data
return false;
}
} else {
// need more data
return false;
}
} else {
/// \todo refactor this for streaming processing, incl. fail fast on invalid UTF-8 within frame already
// within frame
// see if we buffered complete frame
if (mApplicationBuffer.position() >= mFrameHeader.getTotalLength()) {
// cut out frame payload
byte[] framePayload = null;
int oldPosition = mApplicationBuffer.position();
if (mFrameHeader.getPayloadLength() > 0) {
framePayload = new byte[mFrameHeader.getPayloadLength()];
mApplicationBuffer.position(mFrameHeader.getHeaderLength());
mApplicationBuffer.get(framePayload, 0, (int) mFrameHeader.getPayloadLength());
}
mApplicationBuffer.position(mFrameHeader.getTotalLength());
mApplicationBuffer.limit(oldPosition);
mApplicationBuffer.compact();
if (mFrameHeader.getOpcode() > 7) {
// control frame
if (mFrameHeader.getOpcode() == 8) {
int code = WebSocketCloseCode.RESERVED_NO_STATUS;
String reason = null;
if (mFrameHeader.getPayloadLength() >= 2) {
// parse and check close code
code = (framePayload[0] & 0xff) * 256 + (framePayload[1] & 0xff);
if (code < 1000
|| (code >= 1000 && code <= 2999 &&
code != 1000 && code != 1001 && code != 1002 && code != 1003 && code != 1007 && code != 1008 && code != 1009 && code != 1010 && code != 1011)
|| code >= 5000) {
throw new WebSocketException("invalid close code " + code);
}
// parse and check close reason
if (mFrameHeader.getPayloadLength() > 2) {
byte[] ra = new byte[mFrameHeader.getPayloadLength() - 2];
System.arraycopy(framePayload, 2, ra, 0, mFrameHeader.getPayloadLength() - 2);
Utf8Validator val = new Utf8Validator();
val.validate(ra);
if (!val.isValid()) {
throw new WebSocketException("invalid close reasons (not UTF-8)");
} else {
reason = new String(ra, WebSocket.UTF8_ENCODING);
}
}
}
onClose(code, reason);
} else if (mFrameHeader.getOpcode() == 9) {
// dispatch WS ping
onPing(framePayload);
} else if (mFrameHeader.getOpcode() == 10) {
// dispatch WS pong
onPong(framePayload);
} else {
// should not arrive here (handled before)
throw new Exception("logic error");
}
} else {
// message frame
if (!mInsideMessage) {
// new message started
mInsideMessage = true;
mMessageOpcode = mFrameHeader.getOpcode();
if (mMessageOpcode == 1 && mWebSocketOptions.getValidateIncomingUtf8()) {
mUTF8Validator.reset();
}
}
if (framePayload != null) {
// immediately bail out on message too large
if (mMessagePayload.size() + framePayload.length > mWebSocketOptions.getMaxMessagePayloadSize()) {
throw new WebSocketException("message payload too large");
}
// validate incoming UTF-8
if (mMessageOpcode == 1 && mWebSocketOptions.getValidateIncomingUtf8() && !mUTF8Validator.validate(framePayload)) {
throw new WebSocketException("invalid UTF-8 in text message payload");
}
// buffer frame payload for message
mMessagePayload.write(framePayload);
}
// on final frame ..
if (mFrameHeader.isFin()) {
if (mMessageOpcode == 1) {
// verify that UTF-8 ends on codepoint
if (mWebSocketOptions.getValidateIncomingUtf8() && !mUTF8Validator.isValid()) {
throw new WebSocketException("UTF-8 text message payload ended within Unicode code point");
}
// deliver text message
if (mWebSocketOptions.getReceiveTextMessagesRaw()) {
// dispatch WS text message as raw (but validated) UTF-8
onRawTextMessage(mMessagePayload.toByteArray());
} else {
// dispatch WS text message as Java String (previously already validated)
String s = new String(mMessagePayload.toByteArray(), WebSocket.UTF8_ENCODING);
onTextMessage(s);
}
} else if (mMessageOpcode == 2) {
// dispatch WS binary message
onBinaryMessage(mMessagePayload.toByteArray());
} else {
// should not arrive here (handled before)
throw new Exception("logic error");
}
// ok, message completed - reset all
mInsideMessage = false;
mMessagePayload.reset();
}
}
// reset frame
mFrameHeader = null;
// reprocess if more data left
return mApplicationBuffer.position() > 0;
} else {
// need more data
return false;
}
}
}
/**
* WebSockets handshake reply from server received, default notifies master.
*
* @param success Success handshake flag
*/
protected void onHandshake(boolean success) {
notify(new WebSocketMessage.ServerHandshake(success));
}
/**
* WebSockets close received, default notifies master.
*/
protected void onClose(int code, String reason) {
notify(new WebSocketMessage.Close(code, reason));
}
/**
* WebSockets ping received, default notifies master.
*
* @param payload Ping payload or null.
*/
protected void onPing(byte[] payload) {
notify(new WebSocketMessage.Ping(payload));
}
/**
* WebSockets pong received, default notifies master.
*
* @param payload Pong payload or null.
*/
protected void onPong(byte[] payload) {
notify(new WebSocketMessage.Pong(payload));
}
/**
* WebSockets text message received, default notifies master.
* This will only be called when the option receiveTextMessagesRaw
* HAS NOT been set.
*
* @param payload Text message payload as Java String decoded
* from raw UTF-8 payload or null (empty payload).
*/
protected void onTextMessage(String payload) {
notify(new WebSocketMessage.TextMessage(payload));
}
/**
* WebSockets text message received, default notifies master.
* This will only be called when the option receiveTextMessagesRaw
* HAS been set.
*
* @param payload Text message payload as raw UTF-8 octets or
* null (empty payload).
*/
protected void onRawTextMessage(byte[] payload) {
notify(new WebSocketMessage.RawTextMessage(payload));
}
/**
* WebSockets binary message received, default notifies master.
*
* @param payload Binary message payload or null (empty payload).
*/
protected void onBinaryMessage(byte[] payload) {
notify(new WebSocketMessage.BinaryMessage(payload));
}
/**
* Process WebSockets handshake received from server.
*/
private boolean processHandshake() throws UnsupportedEncodingException {
boolean res = false;
for (int pos = mApplicationBuffer.position() - 4; pos >= 0; --pos) {
if (mApplicationBuffer.get(pos+0) == 0x0d &&
mApplicationBuffer.get(pos+1) == 0x0a &&
mApplicationBuffer.get(pos+2) == 0x0d &&
mApplicationBuffer.get(pos+3) == 0x0a) {
/// \todo process & verify handshake from server
/// \todo forward subprotocol, if any
int oldPosition = mApplicationBuffer.position();
// Check HTTP status code
boolean serverError = false;
if (mApplicationBuffer.get(0) == 'H' &&
mApplicationBuffer.get(1) == 'T' &&
mApplicationBuffer.get(2) == 'T' &&
mApplicationBuffer.get(3) == 'P') {
Pair<Integer, String> status = parseHTTPStatus();
if (status.first >= 300) {
// Invalid status code for success connection
notify(new WebSocketMessage.ServerError(status.first, status.second));
serverError = true;
}
}
mApplicationBuffer.position(pos + 4);
mApplicationBuffer.limit(oldPosition);
mApplicationBuffer.compact();
if (!serverError) {
// process further when data after HTTP headers left in buffer
res = mApplicationBuffer.position() > 0;
mState = ReaderState.STATE_OPEN;
} else {
res = true;
mState = ReaderState.STATE_CLOSED;
mStopped = true;
}
onHandshake(!serverError);
break;
}
}
return res;
}
private Pair<Integer, String> parseHTTPStatus() throws UnsupportedEncodingException {
int beg, end;
// Find first space
for (beg = 4; beg < mApplicationBuffer.position(); ++beg) {
if (mApplicationBuffer.get(beg) == ' ') break;
}
// Find second space
for (end = beg + 1; end < mApplicationBuffer.position(); ++end) {
if (mApplicationBuffer.get(end) == ' ') break;
}
// Parse status code between them
++beg;
int statusCode = 0;
for (int i = 0; beg + i < end; ++i) {
int digit = (mApplicationBuffer.get(beg + i) - 0x30);
statusCode *= 10;
statusCode += digit;
}
// Find end of line to extract error message
++end;
int eol;
for (eol = end; eol < mApplicationBuffer.position(); ++eol) {
if (mApplicationBuffer.get(eol) == 0x0d) break;
}
int statusMessageLength = eol - end;
byte[] statusBuf = new byte[statusMessageLength];
mApplicationBuffer.position(end);
mApplicationBuffer.get(statusBuf, 0, statusMessageLength);
String statusMessage = new String(statusBuf, WebSocket.UTF8_ENCODING);
Log.w(TAG, String.format("Status: %d (%s)", statusCode, statusMessage));
return new Pair<Integer, String>(statusCode, statusMessage);
}
/**
* Consume data buffered in mFrameBuffer.
*/
private boolean consumeData() throws Exception {
switch (mState) {
case STATE_OPEN:
case STATE_CLOSING:
return processData();
case STATE_CLOSED:
return false;
case STATE_CONNECTING:
return processHandshake();
default:
return false;
}
}
/**
* Run the background reader thread loop.
*/
@Override
public void run() {
synchronized (this) {
notifyAll();
}
InputStream inputStream = null;
try {
inputStream = mSocket.getInputStream();
} catch (IOException e) {
Log.e(TAG, e.getLocalizedMessage());
return;
}
this.mInputStream = inputStream;
Log.d(TAG, "WebSocker reader running.");
mApplicationBuffer.clear();
while (!mStopped) {
try {
int bytesRead = mInputStream.read(mNetworkBuffer);
if (bytesRead > 0) {
mApplicationBuffer.put(mNetworkBuffer, 0, bytesRead);
while (consumeData()) {
}
} else if (bytesRead == -1) {
Log.d(TAG, "run() : ConnectionLost");
notify(new WebSocketMessage.ConnectionLost());
this.mStopped = true;
} else {
Log.e(TAG, "WebSocketReader read() failed.");
}
} catch (WebSocketException e) {
Log.d(TAG, "run() : WebSocketException (" + e.toString() + ")");
// wrap the exception and notify master
notify(new WebSocketMessage.ProtocolViolation(e));
} catch (SocketException e) {
Log.d(TAG, "run() : SocketException (" + e.toString() + ")");
// wrap the exception and notify master
notify(new WebSocketMessage.ConnectionLost());
} catch (IOException e) {
Log.d(TAG, "run() : IOException (" + e.toString() + ")");
notify(new WebSocketMessage.ConnectionLost());
} catch (Exception e) {
Log.d(TAG, "run() : Exception (" + e.toString() + ")");
// wrap the exception and notify master
notify(new WebSocketMessage.Error(e));
}
}
Log.d(TAG, "WebSocket reader ended.");
}
}
| |
package hex.schemas;
import hex.deeplearning.DeepLearning;
import hex.deeplearning.DeepLearningModel.DeepLearningParameters;
import water.api.API;
import water.api.KeyV1.ModelKeyV1;
import water.api.SupervisedModelParametersSchema;
import water.fvec.Frame;
import java.util.Random;
public class DeepLearningV2 extends SupervisedModelBuilderSchema<DeepLearning,DeepLearningV2,DeepLearningV2.DeepLearningParametersV2> {
public static final class DeepLearningParametersV2 extends SupervisedModelParametersSchema<DeepLearningParameters, DeepLearningParametersV2> {
// Determines the order of parameters in the GUI
static public String[] own_fields = new String[] {
// "n_folds",
"keep_cross_validation_splits",
"checkpoint",
"override_with_best_model",
"use_all_factor_levels",
"activation",
"hidden",
"epochs",
"train_samples_per_iteration",
"target_ratio_comm_to_comp",
"seed",
"adaptive_rate",
"rho",
"epsilon",
"rate",
"rate_annealing",
"rate_decay",
"momentum_start",
"momentum_ramp",
"momentum_stable",
"nesterov_accelerated_gradient",
"input_dropout_ratio",
"hidden_dropout_ratios",
"l1",
"l2",
"max_w2",
"initial_weight_distribution",
"initial_weight_scale",
"loss",
"score_interval",
"score_training_samples",
"score_validation_samples",
"score_duty_cycle",
"classification_stop",
"regression_stop",
"max_hit_ratio_k",
"score_validation_sampling",
"diagnostics",
"fast_mode",
"ignore_const_cols",
"force_load_balance",
"variable_importances",
"replicate_training_data",
"single_node_mode",
"shuffle_training_data",
"missing_values_handling",
"quiet_mode",
"max_confusion_matrix_size",
"autoencoder",
"sparse",
"col_major",
"average_activation",
"sparsity_beta",
"max_categorical_features",
"reproducible",
"export_weights_and_biases"
};
// @API(help="Number of folds for n-fold cross-validation (0 to n)", level = API.Level.critical, direction= API.Direction.INOUT)
// public int n_folds;
@API(help="Keep cross-validation Frames", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean keep_cross_validation_splits;
/**
* A model key associated with a previously trained Deep Learning
* model. This option allows users to build a new model as a
* continuation of a previously generated model (e.g., by a grid search).
*/
@API(help = "Model checkpoint to resume training with", level = API.Level.secondary, direction=API.Direction.INOUT)
public ModelKeyV1 checkpoint;
/**
* If enabled, store the best model under the destination key of this model at the end of training.
* Only applicable if training is not cancelled.
*/
@API(help = "If enabled, override the final model with the best model found during training", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean override_with_best_model;
@API(help = "Auto-Encoder", level = API.Level.secondary, direction=API.Direction.INOUT)
public boolean autoencoder;
@API(help="Use all factor levels of categorical variables. Otherwise, the first factor level is omitted (without loss of accuracy). Useful for variable importances and auto-enabled for autoencoder.", level = API.Level.secondary, direction=API.Direction.INOUT)
public boolean use_all_factor_levels;
/*Neural Net Topology*/
/**
* The activation function (non-linearity) to be used the neurons in the hidden layers.
* Tanh: Hyperbolic tangent function (same as scaled and shifted sigmoid).
* Rectifier: Chooses the maximum of (0, x) where x is the input value.
* Maxout: Choose the maximum coordinate of the input vector.
* With Dropout: Zero out a random user-given fraction of the
* incoming weights to each hidden layer during training, for each
* training row. This effectively trains exponentially many models at
* once, and can improve generalization.
*/
@API(help = "Activation function", values = { "Tanh", "TanhWithDropout", "Rectifier", "RectifierWithDropout", "Maxout", "MaxoutWithDropout" }, level=API.Level.critical, direction=API.Direction.INOUT)
public DeepLearningParameters.Activation activation;
/**
* The number and size of each hidden layer in the model.
* For example, if a user specifies "100,200,100" a model with 3 hidden
* layers will be produced, and the middle hidden layer will have 200
* neurons.
*/
@API(help = "Hidden layer sizes (e.g. 100,100).", level = API.Level.critical, direction=API.Direction.INOUT)
public int[] hidden;
/**
* The number of passes over the training dataset to be carried out.
* It is recommended to start with lower values for initial grid searches.
* This value can be modified during checkpoint restarts and allows continuation
* of selected models.
*/
@API(help = "How many times the dataset should be iterated (streamed), can be fractional", /* dmin = 1e-3, */ level = API.Level.critical, direction=API.Direction.INOUT)
public double epochs;
/**
* The number of training data rows to be processed per iteration. Note that
* independent of this parameter, each row is used immediately to update the model
* with (online) stochastic gradient descent. This parameter controls the
* synchronization period between nodes in a distributed environment and the
* frequency at which scoring and model cancellation can happen. For example, if
* it is set to 10,000 on H2O running on 4 nodes, then each node will
* process 2,500 rows per iteration, sampling randomly from their local data.
* Then, model averaging between the nodes takes place, and scoring can happen
* (dependent on scoring interval and duty factor). Special values are 0 for
* one epoch per iteration, -1 for processing the maximum amount of data
* per iteration (if **replicate training data** is enabled, N epochs
* will be trained per iteration on N nodes, otherwise one epoch). Special value
* of -2 turns on automatic mode (auto-tuning).
*/
@API(help = "Number of training samples (globally) per MapReduce iteration. Special values are 0: one epoch, -1: all available data (e.g., replicated training data), -2: automatic", /* lmin = -2, */ level = API.Level.secondary, direction=API.Direction.INOUT)
public long train_samples_per_iteration;
@API(help = "Target ratio of communication overhead to computation. Only for multi-node operation and train_samples_per_iteration=-2 (auto-tuning)", /* dmin = 1e-3, dmax=0.999, */ level = API.Level.expert, direction=API.Direction.INOUT)
public double target_ratio_comm_to_comp;
/**
* The random seed controls sampling and initialization. Reproducible
* results are only expected with single-threaded operation (i.e.,
* when running on one node, turning off load balancing and providing
* a small dataset that fits in one chunk). In general, the
* multi-threaded asynchronous updates to the model parameters will
* result in (intentional) race conditions and non-reproducible
* results. Note that deterministic sampling and initialization might
* still lead to some weak sense of determinism in the model.
*/
@API(help = "Seed for random numbers (affects sampling) - Note: only reproducible when running single threaded", level = API.Level.expert, direction=API.Direction.INOUT)
public long seed;
/*Adaptive Learning Rate*/
/**
* The implemented adaptive learning rate algorithm (ADADELTA) automatically
* combines the benefits of learning rate annealing and momentum
* training to avoid slow convergence. Specification of only two
* parameters (rho and epsilon) simplifies hyper parameter search.
* In some cases, manually controlled (non-adaptive) learning rate and
* momentum specifications can lead to better results, but require the
* specification (and hyper parameter search) of up to 7 parameters.
* If the model is built on a topology with many local minima or
* long plateaus, it is possible for a constant learning rate to produce
* sub-optimal results. Learning rate annealing allows digging deeper into
* local minima, while rate decay allows specification of different
* learning rates per layer. When the gradient is being estimated in
* a long valley in the optimization landscape, a large learning rate
* can cause the gradient to oscillate and move in the wrong
* direction. When the gradient is computed on a relatively flat
* surface with small learning rates, the model can converge far
* slower than necessary.
*/
@API(help = "Adaptive learning rate", level = API.Level.secondary, direction=API.Direction.INOUT)
public boolean adaptive_rate;
/**
* The first of two hyper parameters for adaptive learning rate (ADADELTA).
* It is similar to momentum and relates to the memory to prior weight updates.
* Typical values are between 0.9 and 0.999.
* This parameter is only active if adaptive learning rate is enabled.
*/
@API(help = "Adaptive learning rate time decay factor (similarity to prior updates)", /* dmin = 0.01, dmax = 1, */ level = API.Level.expert, direction=API.Direction.INOUT)
public double rho;
/**
* The second of two hyper parameters for adaptive learning rate (ADADELTA).
* It is similar to learning rate annealing during initial training
* and momentum at later stages where it allows forward progress.
* Typical values are between 1e-10 and 1e-4.
* This parameter is only active if adaptive learning rate is enabled.
*/
@API(help = "Adaptive learning rate smoothing factor (to avoid divisions by zero and allow progress)", /* dmin = 1e-15, dmax = 1, */ level = API.Level.expert, direction=API.Direction.INOUT)
public double epsilon;
/*Learning Rate*/
/**
* When adaptive learning rate is disabled, the magnitude of the weight
* updates are determined by the user specified learning rate
* (potentially annealed), and are a function of the difference
* between the predicted value and the target value. That difference,
* generally called delta, is only available at the output layer. To
* correct the output at each hidden layer, back propagation is
* used. Momentum modifies back propagation by allowing prior
* iterations to influence the current update. Using the momentum
* parameter can aid in avoiding local minima and the associated
* instability. Too much momentum can lead to instabilities, that's
* why the momentum is best ramped up slowly.
* This parameter is only active if adaptive learning rate is disabled.
*/
@API(help = "Learning rate (higher => less stable, lower => slower convergence)", /* dmin = 1e-10, dmax = 1, */ level = API.Level.expert, direction=API.Direction.INOUT)
public double rate;
/**
* Learning rate annealing reduces the learning rate to "freeze" into
* local minima in the optimization landscape. The annealing rate is the
* inverse of the number of training samples it takes to cut the learning rate in half
* (e.g., 1e-6 means that it takes 1e6 training samples to halve the learning rate).
* This parameter is only active if adaptive learning rate is disabled.
*/
@API(help = "Learning rate annealing: rate / (1 + rate_annealing * samples)", /* dmin = 0, dmax = 1, */ level = API.Level.expert, direction=API.Direction.INOUT)
public double rate_annealing;
/**
* The learning rate decay parameter controls the change of learning rate across layers.
* For example, assume the rate parameter is set to 0.01, and the rate_decay parameter is set to 0.5.
* Then the learning rate for the weights connecting the input and first hidden layer will be 0.01,
* the learning rate for the weights connecting the first and the second hidden layer will be 0.005,
* and the learning rate for the weights connecting the second and third hidden layer will be 0.0025, etc.
* This parameter is only active if adaptive learning rate is disabled.
*/
@API(help = "Learning rate decay factor between layers (N-th layer: rate*alpha^(N-1))", /* dmin = 0, */ level = API.Level.expert, direction=API.Direction.INOUT)
public double rate_decay;
/*Momentum*/
/**
* The momentum_start parameter controls the amount of momentum at the beginning of training.
* This parameter is only active if adaptive learning rate is disabled.
*/
@API(help = "Initial momentum at the beginning of training (try 0.5)", /* dmin = 0, dmax = 0.9999999999, */ level = API.Level.expert, direction=API.Direction.INOUT)
public double momentum_start;
/**
* The momentum_ramp parameter controls the amount of learning for which momentum increases
* (assuming momentum_stable is larger than momentum_start). The ramp is measured in the number
* of training samples.
* This parameter is only active if adaptive learning rate is disabled.
*/
@API(help = "Number of training samples for which momentum increases", /* dmin = 1, */ level = API.Level.expert, direction=API.Direction.INOUT)
public double momentum_ramp;
/**
* The momentum_stable parameter controls the final momentum value reached after momentum_ramp training samples.
* The momentum used for training will remain the same for training beyond reaching that point.
* This parameter is only active if adaptive learning rate is disabled.
*/
@API(help = "Final momentum after the ramp is over (try 0.99)", /* dmin = 0, dmax = 0.9999999999, */ level = API.Level.expert, direction=API.Direction.INOUT)
public double momentum_stable;
/**
* The Nesterov accelerated gradient descent method is a modification to
* traditional gradient descent for convex functions. The method relies on
* gradient information at various points to build a polynomial approximation that
* minimizes the residuals in fewer iterations of the descent.
* This parameter is only active if adaptive learning rate is disabled.
*/
@API(help = "Use Nesterov accelerated gradient (recommended)", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean nesterov_accelerated_gradient;
/*Regularization*/
/**
* A fraction of the features for each training row to be omitted from training in order
* to improve generalization (dimension sampling).
*/
@API(help = "Input layer dropout ratio (can improve generalization, try 0.1 or 0.2)", /* dmin = 0, dmax = 1, */ level = API.Level.secondary, direction=API.Direction.INOUT)
public double input_dropout_ratio;
/**
* A fraction of the inputs for each hidden layer to be omitted from training in order
* to improve generalization. Defaults to 0.5 for each hidden layer if omitted.
*/
@API(help = "Hidden layer dropout ratios (can improve generalization), specify one value per hidden layer, defaults to 0.5", /* dmin = 0, dmax = 1, */ level = API.Level.secondary, direction=API.Direction.INOUT)
public double[] hidden_dropout_ratios;
/**
* A regularization method that constrains the absolute value of the weights and
* has the net effect of dropping some weights (setting them to zero) from a model
* to reduce complexity and avoid overfitting.
*/
@API(help = "L1 regularization (can add stability and improve generalization, causes many weights to become 0)", /* dmin = 0, dmax = 1, */ level = API.Level.secondary, direction=API.Direction.INOUT)
public double l1;
/**
* A regularization method that constrdains the sum of the squared
* weights. This method introduces bias into parameter estimates, but
* frequently produces substantial gains in modeling as estimate variance is
* reduced.
*/
@API(help = "L2 regularization (can add stability and improve generalization, causes many weights to be small", /* dmin = 0, dmax = 1, */ level = API.Level.secondary, direction=API.Direction.INOUT)
public double l2;
/**
* A maximum on the sum of the squared incoming weights into
* any one neuron. This tuning parameter is especially useful for unbound
* activation functions such as Maxout or Rectifier.
*/
@API(help = "Constraint for squared sum of incoming weights per unit (e.g. for Rectifier)", /* dmin = 1e-10, */ level = API.Level.expert, direction=API.Direction.INOUT)
public float max_w2;
/*Initialization*/
/**
* The distribution from which initial weights are to be drawn. The default
* option is an optimized initialization that considers the size of the network.
* The "uniform" option uses a uniform distribution with a mean of 0 and a given
* interval. The "normal" option draws weights from the standard normal
* distribution with a mean of 0 and given standard deviation.
*/
@API(help = "Initial Weight Distribution", values = { "UniformAdaptive", "Uniform", "Normal" }, level = API.Level.expert, direction=API.Direction.INOUT)
public DeepLearningParameters.InitialWeightDistribution initial_weight_distribution;
/**
* The scale of the distribution function for Uniform or Normal distributions.
* For Uniform, the values are drawn uniformly from -initial_weight_scale...initial_weight_scale.
* For Normal, the values are drawn from a Normal distribution with a standard deviation of initial_weight_scale.
*/
@API(help = "Uniform: -value...value, Normal: stddev)", /* dmin = 0, */ level = API.Level.expert, direction=API.Direction.INOUT)
public double initial_weight_scale;
/**
* The loss (error) function to be minimized by the model.
* CrossEntropy loss is used when the model output consists of independent
* hypotheses, and the outputs can be interpreted as the probability that each
* hypothesis is true. Cross entropy is the recommended loss function when the
* target values are class labels, and especially for imbalanced data.
* It strongly penalizes error in the prediction of the actual class label.
* MeanSquare loss is used when the model output are continuous real values, but can
* be used for classification as well (where it emphasizes the error on all
* output classes, not just for the actual class).
*/
@API(help = "Loss function", values = { "Automatic", "CrossEntropy", "MeanSquare", "Huber", "Absolute" }, required = false, level = API.Level.secondary, direction=API.Direction.INOUT)
public DeepLearningParameters.Loss loss;
/*Scoring*/
/**
* The minimum time (in seconds) to elapse between model scoring. The actual
* interval is determined by the number of training samples per iteration and the scoring duty cycle.
*/
@API(help = "Shortest time interval (in secs) between model scoring", /* dmin = 0, */ level = API.Level.secondary, direction=API.Direction.INOUT)
public double score_interval;
/**
* The number of training dataset points to be used for scoring. Will be
* randomly sampled. Use 0 for selecting the entire training dataset.
*/
@API(help = "Number of training set samples for scoring (0 for all)", /* lmin = 0, */ level = API.Level.secondary, direction=API.Direction.INOUT)
public long score_training_samples;
/**
* The number of validation dataset points to be used for scoring. Can be
* randomly sampled or stratified (if "balance classes" is set and "score
* validation sampling" is set to stratify). Use 0 for selecting the entire
* training dataset.
*/
@API(help = "Number of validation set samples for scoring (0 for all)", /* lmin = 0, */ level = API.Level.secondary, direction=API.Direction.INOUT)
public long score_validation_samples;
/**
* Maximum fraction of wall clock time spent on model scoring on training and validation samples,
* and on diagnostics such as computation of feature importances (i.e., not on training).
*/
@API(help = "Maximum duty cycle fraction for scoring (lower: more training, higher: more scoring).", /* dmin = 0, dmax = 1, */ level = API.Level.secondary, direction=API.Direction.INOUT)
public double score_duty_cycle;
/**
* The stopping criteria in terms of classification error (1-accuracy) on the
* training data scoring dataset. When the error is at or below this threshold,
* training stops.
*/
@API(help = "Stopping criterion for classification error fraction on training data (-1 to disable)", /* dmin=-1, dmax=1, */ level = API.Level.expert, direction=API.Direction.INOUT)
public double classification_stop;
/**
* The stopping criteria in terms of regression error (MSE) on the training
* data scoring dataset. When the error is at or below this threshold, training
* stops.
*/
@API(help = "Stopping criterion for regression error (MSE) on training data (-1 to disable)", /* dmin=-1, */ level = API.Level.expert, direction=API.Direction.INOUT)
public double regression_stop;
/**
* Enable quiet mode for less output to standard output.
*/
@API(help = "Enable quiet mode for less output to standard output", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean quiet_mode;
/**
* For classification models, the maximum size (in terms of classes) of the
* confusion matrix for it to be printed. This option is meant to avoid printing
* extremely large confusion matrices.
*/
@API(help = "Max. size (number of classes) for confusion matrices to be shown", level = API.Level.expert, direction=API.Direction.INOUT)
public int max_confusion_matrix_size;
/**
* The maximum number (top K) of predictions to use for hit ratio computation (for multi-class only, 0 to disable)
*/
@API(help = "Max. number (top K) of predictions to use for hit ratio computation (for multi-class only, 0 to disable)", /* lmin=0, */ level = API.Level.expert, direction=API.Direction.INOUT)
public int max_hit_ratio_k;
/**
* Method used to sample the validation dataset for scoring, see Score Validation Samples above.
*/
@API(help = "Method used to sample validation dataset for scoring", values = { "Uniform", "Stratified" }, level = API.Level.expert, direction=API.Direction.INOUT)
public DeepLearningParameters.ClassSamplingMethod score_validation_sampling;
/*Misc*/
/**
* Gather diagnostics for hidden layers, such as mean and RMS values of learning
* rate, momentum, weights and biases.
*/
@API(help = "Enable diagnostics for hidden layers", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean diagnostics;
/**
* Whether to compute variable importances for input features.
* The implemented method (by Gedeon) considers the weights connecting the
* input features to the first two hidden layers.
*/
@API(help = "Compute variable importances for input features (Gedeon method) - can be slow for large networks", direction=API.Direction.INOUT)
public boolean variable_importances;
/**
* Enable fast mode (minor approximation in back-propagation), should not affect results significantly.
*/
@API(help = "Enable fast mode (minor approximation in back-propagation)", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean fast_mode;
/**
* Ignore constant training columns (no information can be gained anyway).
*/
@API(help = "Ignore constant training columns (no information can be gained anyway)", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean ignore_const_cols;
/**
* Increase training speed on small datasets by splitting it into many chunks
* to allow utilization of all cores.
*/
@API(help = "Force extra load balancing to increase training speed for small datasets (to keep all cores busy)", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean force_load_balance;
/**
* Replicate the entire training dataset onto every node for faster training on small datasets.
*/
@API(help = "Replicate the entire training dataset onto every node for faster training on small datasets", level = API.Level.secondary, direction=API.Direction.INOUT)
public boolean replicate_training_data;
/**
* Run on a single node for fine-tuning of model parameters. Can be useful for
* checkpoint resumes after training on multiple nodes for fast initial
* convergence.
*/
@API(help = "Run on a single node for fine-tuning of model parameters", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean single_node_mode;
/**
* Enable shuffling of training data (on each node). This option is
* recommended if training data is replicated on N nodes, and the number of training samples per iteration
* is close to N times the dataset size, where all nodes train will (almost) all
* the data. It is automatically enabled if the number of training samples per iteration is set to -1 (or to N
* times the dataset size or larger).
*/
@API(help = "Enable shuffling of training data (recommended if training data is replicated and train_samples_per_iteration is close to #nodes x #rows)", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean shuffle_training_data;
@API(help = "Handling of missing values. Either Skip or MeanImputation.", values = { "Skip", "MeanImputation" }, level = API.Level.expert, direction=API.Direction.INOUT)
public DeepLearningParameters.MissingValuesHandling missing_values_handling;
@API(help = "Sparse data handling (Experimental).", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean sparse;
@API(help = "Use a column major weight matrix for input layer. Can speed up forward propagation, but might slow down backpropagation (Experimental).", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean col_major;
@API(help = "Average activation for sparse auto-encoder (Experimental)", level = API.Level.expert, direction=API.Direction.INOUT)
public double average_activation;
@API(help = "Sparsity regularization (Experimental)", level = API.Level.expert, direction=API.Direction.INOUT)
public double sparsity_beta;
@API(help = "Max. number of categorical features, enforced via hashing (Experimental)", level = API.Level.expert, direction=API.Direction.INOUT)
public int max_categorical_features;
@API(help = "Force reproducibility on small data (will be slow - only uses 1 thread)", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean reproducible;
@API(help = "Whether to export Neural Network weights and biases to H2O Frames", level = API.Level.expert, direction=API.Direction.INOUT)
public boolean export_weights_and_biases;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Marker;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.LoggerConfig;
import org.apache.logging.log4j.core.config.ReliabilityStrategy;
import org.apache.logging.log4j.core.filter.CompositeFilter;
import org.apache.logging.log4j.message.Message;
import org.apache.logging.log4j.message.MessageFactory;
import org.apache.logging.log4j.message.SimpleMessage;
import org.apache.logging.log4j.spi.AbstractLogger;
import org.apache.logging.log4j.util.Strings;
import org.apache.logging.log4j.util.Supplier;
/**
* The core implementation of the {@link org.apache.logging.log4j.Logger} interface. Besides providing an implementation
* of all the Logger methods, this class also provides some convenience methods for Log4j 1.x compatibility as well as
* access to the {@link org.apache.logging.log4j.core.Filter Filters} and {@link org.apache.logging.log4j.core.Appender
* Appenders} associated with this Logger. Note that access to these underlying objects is provided primarily for use in
* unit tests or bridging legacy Log4j 1.x code. Future versions of this class may or may not include the various
* methods that are noted as not being part of the public API.
*
* TODO All the isEnabled methods could be pushed into a filter interface. Not sure of the utility of having isEnabled
* be able to examine the message pattern and parameters. (RG) Moving the isEnabled methods out of Logger noticeably
* impacts performance. The message pattern and parameters are required so that they can be used in global filters.
*/
public class Logger extends AbstractLogger implements Supplier<LoggerConfig> {
private static final long serialVersionUID = 1L;
/**
* Config should be consistent across threads.
*/
protected volatile PrivateConfig privateConfig;
// FIXME: ditto to the above
private final LoggerContext context;
/**
* The constructor.
*
* @param context The LoggerContext this Logger is associated with.
* @param messageFactory The message factory.
* @param name The name of the Logger.
*/
protected Logger(final LoggerContext context, final String name, final MessageFactory messageFactory) {
super(name, messageFactory);
this.context = context;
privateConfig = new PrivateConfig(context.getConfiguration(), this);
}
/**
* This method is only used for 1.x compatibility. Returns the parent of this Logger. If it doesn't already exist
* return a temporary Logger.
*
* @return The parent Logger.
*/
public Logger getParent() {
final LoggerConfig lc = privateConfig.loggerConfig.getName().equals(getName()) ? privateConfig.loggerConfig
.getParent() : privateConfig.loggerConfig;
if (lc == null) {
return null;
}
if (context.hasLogger(lc.getName())) {
return context.getLogger(lc.getName(), getMessageFactory());
}
return new Logger(context, lc.getName(), this.getMessageFactory());
}
/**
* Returns the LoggerContext this Logger is associated with.
*
* @return the LoggerContext.
*/
public LoggerContext getContext() {
return context;
}
/**
* This method is not exposed through the public API and is provided primarily for unit testing.
* <p>
* If the new level is null, this logger inherits the level from its parent.
* </p>
*
* @param level The Level to use on this Logger, may be null.
*/
public synchronized void setLevel(final Level level) {
if (level == getLevel()) {
return;
}
Level actualLevel;
if (level != null) {
actualLevel = level;
} else {
final Logger parent = getParent();
actualLevel = parent != null ? parent.getLevel() : privateConfig.loggerConfigLevel;
}
privateConfig = new PrivateConfig(privateConfig, actualLevel);
}
/*
* (non-Javadoc)
*
* @see org.apache.logging.log4j.util.Supplier#get()
*/
@Override
public LoggerConfig get() {
return privateConfig.loggerConfig;
}
@Override
public void logMessage(final String fqcn, final Level level, final Marker marker, final Message message,
final Throwable t) {
final Message msg = message == null ? new SimpleMessage(Strings.EMPTY) : message;
// check if we need to reconfigure
privateConfig.config.getConfigurationMonitor().checkConfiguration();
final ReliabilityStrategy strategy = privateConfig.loggerConfig.getReliabilityStrategy();
strategy.log(this, getName(), fqcn, marker, level, msg, t);
}
@Override
public boolean isEnabled(final Level level, final Marker marker, final String message, final Throwable t) {
return privateConfig.filter(level, marker, message, t);
}
@Override
public boolean isEnabled(final Level level, final Marker marker, final String message) {
return privateConfig.filter(level, marker, message);
}
@Override
public boolean isEnabled(final Level level, final Marker marker, final String message, final Object... params) {
return privateConfig.filter(level, marker, message, params);
}
@Override
public boolean isEnabled(final Level level, final Marker marker, final Object message, final Throwable t) {
return privateConfig.filter(level, marker, message, t);
}
@Override
public boolean isEnabled(final Level level, final Marker marker, final Message message, final Throwable t) {
return privateConfig.filter(level, marker, message, t);
}
/**
* This method is not exposed through the public API and is used primarily for unit testing.
*
* @param appender The Appender to add to the Logger.
*/
public void addAppender(final Appender appender) {
privateConfig.config.addLoggerAppender(this, appender);
}
/**
* This method is not exposed through the public API and is used primarily for unit testing.
*
* @param appender The Appender to remove from the Logger.
*/
public void removeAppender(final Appender appender) {
privateConfig.loggerConfig.removeAppender(appender.getName());
}
/**
* This method is not exposed through the public API and is used primarily for unit testing.
*
* @return A Map containing the Appender's name as the key and the Appender as the value.
*/
public Map<String, Appender> getAppenders() {
return privateConfig.loggerConfig.getAppenders();
}
/**
* This method is not exposed through the public API and is used primarily for unit testing.
*
* @return An Iterator over all the Filters associated with the Logger.
*/
// FIXME: this really ought to be an Iterable instead of an Iterator
public Iterator<Filter> getFilters() {
final Filter filter = privateConfig.loggerConfig.getFilter();
if (filter == null) {
return new ArrayList<Filter>().iterator();
} else if (filter instanceof CompositeFilter) {
return ((CompositeFilter) filter).iterator();
} else {
final List<Filter> filters = new ArrayList<>();
filters.add(filter);
return filters.iterator();
}
}
/**
* Gets the Level associated with the Logger.
*
* @return the Level associate with the Logger.
*/
@Override
public Level getLevel() {
return privateConfig.loggerConfigLevel;
}
/**
* This method is not exposed through the public API and is used primarily for unit testing.
*
* @return The number of Filters associated with the Logger.
*/
public int filterCount() {
final Filter filter = privateConfig.loggerConfig.getFilter();
if (filter == null) {
return 0;
} else if (filter instanceof CompositeFilter) {
return ((CompositeFilter) filter).size();
}
return 1;
}
/**
* This method is not exposed through the public API and is used primarily for unit testing.
*
* @param filter The Filter to add.
*/
public void addFilter(final Filter filter) {
privateConfig.config.addLoggerFilter(this, filter);
}
/**
* This method is not exposed through the public API and is present only to support the Log4j 1.2 compatibility
* bridge.
*
* @return true if the associated LoggerConfig is additive, false otherwise.
*/
public boolean isAdditive() {
return privateConfig.loggerConfig.isAdditive();
}
/**
* This method is not exposed through the public API and is present only to support the Log4j 1.2 compatibility
* bridge.
*
* @param additive Boolean value to indicate whether the Logger is additive or not.
*/
public void setAdditive(final boolean additive) {
privateConfig.config.setLoggerAdditive(this, additive);
}
/**
* Associates the Logger with a new Configuration. This method is not exposed through the public API.
*
* There are two ways that could be used to guarantee all threads are aware of changes to config. 1. synchronize
* this method. Accessors don't need to be synchronized as Java will treat all variables within a synchronized block
* as volatile. 2. Declare the variable volatile. Option 2 is used here as the performance cost is very low and it
* does a better job at documenting how it is used.
*
* @param newConfig The new Configuration.
*/
protected void updateConfiguration(final Configuration newConfig) {
this.privateConfig = new PrivateConfig(newConfig, this);
}
/**
* The binding between a Logger and its configuration.
*/
// TODO: Should not be Serializable per EJ item 74 (2nd Ed)?
protected class PrivateConfig implements Serializable {
private static final long serialVersionUID = 1L;
// config fields are public to make them visible to Logger subclasses
/** LoggerConfig to delegate the actual logging to. */
public final LoggerConfig loggerConfig; // SUPPRESS CHECKSTYLE
/** The current Configuration associated with the LoggerConfig. */
public final Configuration config; // SUPPRESS CHECKSTYLE
private final Level loggerConfigLevel;
private final int intLevel;
private final Logger logger;
public PrivateConfig(final Configuration config, final Logger logger) {
this.config = config;
this.loggerConfig = config.getLoggerConfig(getName());
this.loggerConfigLevel = this.loggerConfig.getLevel();
this.intLevel = this.loggerConfigLevel.intLevel();
this.logger = logger;
}
public PrivateConfig(final PrivateConfig pc, final Level level) {
this.config = pc.config;
this.loggerConfig = pc.loggerConfig;
this.loggerConfigLevel = level;
this.intLevel = this.loggerConfigLevel.intLevel();
this.logger = pc.logger;
}
public PrivateConfig(final PrivateConfig pc, final LoggerConfig lc) {
this.config = pc.config;
this.loggerConfig = lc;
this.loggerConfigLevel = lc.getLevel();
this.intLevel = this.loggerConfigLevel.intLevel();
this.logger = pc.logger;
}
// LOG4J2-151: changed visibility to public
public void logEvent(final LogEvent event) {
config.getConfigurationMonitor().checkConfiguration();
loggerConfig.log(event);
}
boolean filter(final Level level, final Marker marker, final String msg) {
config.getConfigurationMonitor().checkConfiguration();
final Filter filter = config.getFilter();
if (filter != null) {
final Filter.Result r = filter.filter(logger, level, marker, msg);
if (r != Filter.Result.NEUTRAL) {
return r == Filter.Result.ACCEPT;
}
}
return level != null && intLevel >= level.intLevel();
}
boolean filter(final Level level, final Marker marker, final String msg, final Throwable t) {
config.getConfigurationMonitor().checkConfiguration();
final Filter filter = config.getFilter();
if (filter != null) {
final Filter.Result r = filter.filter(logger, level, marker, msg, t);
if (r != Filter.Result.NEUTRAL) {
return r == Filter.Result.ACCEPT;
}
}
return level != null && intLevel >= level.intLevel();
}
boolean filter(final Level level, final Marker marker, final String msg, final Object... p1) {
config.getConfigurationMonitor().checkConfiguration();
final Filter filter = config.getFilter();
if (filter != null) {
final Filter.Result r = filter.filter(logger, level, marker, msg, p1);
if (r != Filter.Result.NEUTRAL) {
return r == Filter.Result.ACCEPT;
}
}
return level != null && intLevel >= level.intLevel();
}
boolean filter(final Level level, final Marker marker, final Object msg, final Throwable t) {
config.getConfigurationMonitor().checkConfiguration();
final Filter filter = config.getFilter();
if (filter != null) {
final Filter.Result r = filter.filter(logger, level, marker, msg, t);
if (r != Filter.Result.NEUTRAL) {
return r == Filter.Result.ACCEPT;
}
}
return level != null && intLevel >= level.intLevel();
}
boolean filter(final Level level, final Marker marker, final Message msg, final Throwable t) {
config.getConfigurationMonitor().checkConfiguration();
final Filter filter = config.getFilter();
if (filter != null) {
final Filter.Result r = filter.filter(logger, level, marker, msg, t);
if (r != Filter.Result.NEUTRAL) {
return r == Filter.Result.ACCEPT;
}
}
return level != null && intLevel >= level.intLevel();
}
}
/**
* Returns a String representation of this instance in the form {@code "name:level[ in context_name]"}.
*
* @return A String describing this Logger instance.
*/
@Override
public String toString() {
final String nameLevel = Strings.EMPTY + getName() + ':' + getLevel();
if (context == null) {
return nameLevel;
}
final String contextName = context.getName();
return contextName == null ? nameLevel : nameLevel + " in " + contextName;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.significant;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, SignificantTermsAggregationBuilder> {
public static final String NAME = "significant_terms";
static final ParseField BACKGROUND_FILTER = new ParseField("background_filter");
static final ParseField HEURISTIC = new ParseField("significance_heuristic");
static final TermsAggregator.BucketCountThresholds DEFAULT_BUCKET_COUNT_THRESHOLDS = new TermsAggregator.BucketCountThresholds(
3, 0, 10, -1);
static final SignificanceHeuristic DEFAULT_SIGNIFICANCE_HEURISTIC = new JLHScore();
public static Aggregator.Parser getParser(ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry) {
ObjectParser<SignificantTermsAggregationBuilder, Void> aggregationParser =
new ObjectParser<>(SignificantTermsAggregationBuilder.NAME);
ValuesSourceParserHelper.declareAnyFields(aggregationParser, true, true);
aggregationParser.declareInt(SignificantTermsAggregationBuilder::shardSize, TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME);
aggregationParser.declareLong(SignificantTermsAggregationBuilder::minDocCount, TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME);
aggregationParser.declareLong(SignificantTermsAggregationBuilder::shardMinDocCount,
TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME);
aggregationParser.declareInt(SignificantTermsAggregationBuilder::size, TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME);
aggregationParser.declareString(SignificantTermsAggregationBuilder::executionHint,
TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME);
aggregationParser.declareObject(SignificantTermsAggregationBuilder::backgroundFilter,
(p, context) -> parseInnerQueryBuilder(p),
SignificantTermsAggregationBuilder.BACKGROUND_FILTER);
aggregationParser.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())),
IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING);
aggregationParser.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)),
IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, ObjectParser.ValueType.STRING_ARRAY);
for (String name : significanceHeuristicParserRegistry.getNames()) {
aggregationParser.declareObject(SignificantTermsAggregationBuilder::significanceHeuristic,
(p, context) -> {
SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserRegistry
.lookupReturningNullIfNotFound(name);
return significanceHeuristicParser.parse(p);
},
new ParseField(name));
}
return new Aggregator.Parser() {
@Override
public AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return aggregationParser.parse(parser, new SignificantTermsAggregationBuilder(aggregationName, null), null);
}
};
}
private IncludeExclude includeExclude = null;
private String executionHint = null;
private QueryBuilder filterBuilder = null;
private TermsAggregator.BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(DEFAULT_BUCKET_COUNT_THRESHOLDS);
private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC;
public SignificantTermsAggregationBuilder(String name, ValueType valueType) {
super(name, ValuesSourceType.ANY, valueType);
}
/**
* Read from a Stream.
*/
public SignificantTermsAggregationBuilder(StreamInput in) throws IOException {
super(in, ValuesSourceType.ANY);
bucketCountThresholds = new BucketCountThresholds(in);
executionHint = in.readOptionalString();
filterBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
includeExclude = in.readOptionalWriteable(IncludeExclude::new);
significanceHeuristic = in.readNamedWriteable(SignificanceHeuristic.class);
}
@Override
protected void innerWriteTo(StreamOutput out) throws IOException {
bucketCountThresholds.writeTo(out);
out.writeOptionalString(executionHint);
out.writeOptionalNamedWriteable(filterBuilder);
out.writeOptionalWriteable(includeExclude);
out.writeNamedWriteable(significanceHeuristic);
}
@Override
protected boolean serializeTargetValueType() {
return true;
}
protected TermsAggregator.BucketCountThresholds getBucketCountThresholds() {
return new TermsAggregator.BucketCountThresholds(bucketCountThresholds);
}
public TermsAggregator.BucketCountThresholds bucketCountThresholds() {
return bucketCountThresholds;
}
public SignificantTermsAggregationBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) {
if (bucketCountThresholds == null) {
throw new IllegalArgumentException("[bucketCountThresholds] must not be null: [" + name + "]");
}
this.bucketCountThresholds = bucketCountThresholds;
return this;
}
/**
* Sets the size - indicating how many term buckets should be returned
* (defaults to 10)
*/
public SignificantTermsAggregationBuilder size(int size) {
if (size <= 0) {
throw new IllegalArgumentException("[size] must be greater than 0. Found [" + size + "] in [" + name + "]");
}
bucketCountThresholds.setRequiredSize(size);
return this;
}
/**
* Sets the shard_size - indicating the number of term buckets each shard
* will return to the coordinating node (the node that coordinates the
* search execution). The higher the shard size is, the more accurate the
* results are.
*/
public SignificantTermsAggregationBuilder shardSize(int shardSize) {
if (shardSize <= 0) {
throw new IllegalArgumentException(
"[shardSize] must be greater than 0. Found [" + shardSize + "] in [" + name + "]");
}
bucketCountThresholds.setShardSize(shardSize);
return this;
}
/**
* Set the minimum document count terms should have in order to appear in
* the response.
*/
public SignificantTermsAggregationBuilder minDocCount(long minDocCount) {
if (minDocCount < 0) {
throw new IllegalArgumentException(
"[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]");
}
bucketCountThresholds.setMinDocCount(minDocCount);
return this;
}
/**
* Set the minimum document count terms should have on the shard in order to
* appear in the response.
*/
public SignificantTermsAggregationBuilder shardMinDocCount(long shardMinDocCount) {
if (shardMinDocCount < 0) {
throw new IllegalArgumentException(
"[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]");
}
bucketCountThresholds.setShardMinDocCount(shardMinDocCount);
return this;
}
/**
* Expert: sets an execution hint to the aggregation.
*/
public SignificantTermsAggregationBuilder executionHint(String executionHint) {
this.executionHint = executionHint;
return this;
}
/**
* Expert: gets an execution hint to the aggregation.
*/
public String executionHint() {
return executionHint;
}
public SignificantTermsAggregationBuilder backgroundFilter(QueryBuilder backgroundFilter) {
if (backgroundFilter == null) {
throw new IllegalArgumentException("[backgroundFilter] must not be null: [" + name + "]");
}
this.filterBuilder = backgroundFilter;
return this;
}
public QueryBuilder backgroundFilter() {
return filterBuilder;
}
/**
* Set terms to include and exclude from the aggregation results
*/
public SignificantTermsAggregationBuilder includeExclude(IncludeExclude includeExclude) {
this.includeExclude = includeExclude;
return this;
}
/**
* Get terms to include and exclude from the aggregation results
*/
public IncludeExclude includeExclude() {
return includeExclude;
}
public SignificantTermsAggregationBuilder significanceHeuristic(SignificanceHeuristic significanceHeuristic) {
if (significanceHeuristic == null) {
throw new IllegalArgumentException("[significanceHeuristic] must not be null: [" + name + "]");
}
this.significanceHeuristic = significanceHeuristic;
return this;
}
public SignificanceHeuristic significanceHeuristic() {
return significanceHeuristic;
}
@Override
protected ValuesSourceAggregatorFactory<ValuesSource, ?> innerBuild(SearchContext context, ValuesSourceConfig<ValuesSource> config,
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
SignificanceHeuristic executionHeuristic = this.significanceHeuristic.rewrite(context);
return new SignificantTermsAggregatorFactory(name, config, includeExclude, executionHint, filterBuilder,
bucketCountThresholds, executionHeuristic, context, parent, subFactoriesBuilder, metaData);
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
bucketCountThresholds.toXContent(builder, params);
if (executionHint != null) {
builder.field(TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint);
}
if (filterBuilder != null) {
builder.field(BACKGROUND_FILTER.getPreferredName(), filterBuilder);
}
if (includeExclude != null) {
includeExclude.toXContent(builder, params);
}
significanceHeuristic.toXContent(builder, params);
return builder;
}
@Override
protected int innerHashCode() {
return Objects.hash(bucketCountThresholds, executionHint, filterBuilder, includeExclude, significanceHeuristic);
}
@Override
protected boolean innerEquals(Object obj) {
SignificantTermsAggregationBuilder other = (SignificantTermsAggregationBuilder) obj;
return Objects.equals(bucketCountThresholds, other.bucketCountThresholds)
&& Objects.equals(executionHint, other.executionHint)
&& Objects.equals(filterBuilder, other.filterBuilder)
&& Objects.equals(includeExclude, other.includeExclude)
&& Objects.equals(significanceHeuristic, other.significanceHeuristic);
}
@Override
public String getType() {
return NAME;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.rest.handler.async;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.concurrent.FutureUtils;
import org.apache.flink.runtime.rest.HttpMethodWrapper;
import org.apache.flink.runtime.rest.handler.HandlerRequest;
import org.apache.flink.runtime.rest.handler.HandlerRequestException;
import org.apache.flink.runtime.rest.handler.RestHandlerException;
import org.apache.flink.runtime.rest.messages.EmptyMessageParameters;
import org.apache.flink.runtime.rest.messages.EmptyRequestBody;
import org.apache.flink.runtime.rest.messages.MessageHeaders;
import org.apache.flink.runtime.rest.messages.MessageParameters;
import org.apache.flink.runtime.rest.messages.MessagePathParameter;
import org.apache.flink.runtime.rest.messages.MessageQueryParameter;
import org.apache.flink.runtime.rest.messages.TriggerId;
import org.apache.flink.runtime.rest.messages.TriggerIdPathParameter;
import org.apache.flink.runtime.rest.messages.queue.QueueStatus;
import org.apache.flink.runtime.webmonitor.RestfulGateway;
import org.apache.flink.runtime.webmonitor.TestingRestfulGateway;
import org.apache.flink.runtime.webmonitor.retriever.GatewayRetriever;
import org.apache.flink.util.ExceptionUtils;
import org.apache.flink.util.FlinkException;
import org.apache.flink.util.TestLogger;
import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseStatus;
import org.junit.Before;
import org.junit.Test;
import javax.annotation.Nullable;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
/**
* Tests for the {@link AbstractAsynchronousOperationHandlers}.
*/
public class AbstractAsynchronousOperationHandlersTest extends TestLogger {
private static final Time TIMEOUT = Time.seconds(10L);
private TestingAsynchronousOperationHandlers testingAsynchronousOperationHandlers;
private TestingAsynchronousOperationHandlers.TestingTriggerHandler testingTriggerHandler;
private TestingAsynchronousOperationHandlers.TestingStatusHandler testingStatusHandler;
@Before
public void setup() {
testingAsynchronousOperationHandlers = new TestingAsynchronousOperationHandlers();
testingTriggerHandler = testingAsynchronousOperationHandlers.new TestingTriggerHandler(
() -> null,
TIMEOUT,
Collections.emptyMap(),
TestingTriggerMessageHeaders.INSTANCE);
testingStatusHandler = testingAsynchronousOperationHandlers.new TestingStatusHandler(
() -> null,
TIMEOUT,
Collections.emptyMap(),
TestingStatusMessageHeaders.INSTANCE);
}
/**
* Tests the triggering and successful completion of an asynchronous operation.
*/
@Test
public void testOperationCompletion() throws Exception {
final CompletableFuture<String> savepointFuture = new CompletableFuture<>();
final TestingRestfulGateway testingRestfulGateway = new TestingRestfulGateway.Builder()
.setTriggerSavepointFunction((JobID jobId, String directory) -> savepointFuture)
.build();
// trigger the operation
final TriggerId triggerId = testingTriggerHandler.handleRequest(
triggerOperationRequest(),
testingRestfulGateway).get().getTriggerId();
AsynchronousOperationResult<OperationResult> operationResult = testingStatusHandler.handleRequest(
statusOperationRequest(triggerId),
testingRestfulGateway).get();
assertThat(operationResult.queueStatus().getId(), is(QueueStatus.inProgress().getId()));
// complete the operation
final String savepointPath = "foobar";
savepointFuture.complete(savepointPath);
operationResult = testingStatusHandler.handleRequest(
statusOperationRequest(triggerId),
testingRestfulGateway).get();
assertThat(operationResult.queueStatus().getId(), is(QueueStatus.completed().getId()));
assertThat(operationResult.resource().value, is(savepointPath));
}
/**
* Tests the triggering and exceptional completion of an asynchronous operation.
*/
@Test
public void testOperationFailure() throws Exception {
final FlinkException testException = new FlinkException("Test exception");
final TestingRestfulGateway testingRestfulGateway = new TestingRestfulGateway.Builder()
.setTriggerSavepointFunction((JobID jobId, String directory) -> FutureUtils.completedExceptionally(testException))
.build();
// trigger the operation
final TriggerId triggerId = testingTriggerHandler.handleRequest(
triggerOperationRequest(),
testingRestfulGateway).get().getTriggerId();
AsynchronousOperationResult<OperationResult> operationResult = testingStatusHandler.handleRequest(
statusOperationRequest(triggerId),
testingRestfulGateway).get();
assertThat(operationResult.queueStatus().getId(), is(QueueStatus.completed().getId()));
final OperationResult resource = operationResult.resource();
assertThat(resource.throwable, is(testException));
}
/**
* Tests that an querying an unknown trigger id will return an exceptionally completed
* future.
*/
@Test
public void testUnknownTriggerId() throws Exception {
final TestingRestfulGateway testingRestfulGateway = new TestingRestfulGateway.Builder().build();
try {
testingStatusHandler.handleRequest(
statusOperationRequest(new TriggerId()),
testingRestfulGateway).get();
fail("This should have failed with a RestHandlerException.");
} catch (ExecutionException ee) {
final Optional<RestHandlerException> optionalRestHandlerException = ExceptionUtils.findThrowable(ee, RestHandlerException.class);
assertThat(optionalRestHandlerException.isPresent(), is(true));
final RestHandlerException restHandlerException = optionalRestHandlerException.get();
assertThat(restHandlerException.getMessage(), containsString("Operation not found"));
assertThat(restHandlerException.getHttpResponseStatus(), is(HttpResponseStatus.NOT_FOUND));
}
}
/**
* Tests that the future returned by {@link AbstractAsynchronousOperationHandlers.StatusHandler#closeAsync()}
* completes when the result of the asynchronous operation is served.
*/
@Test
public void testCloseShouldFinishOnFirstServedResult() throws Exception {
final CompletableFuture<String> savepointFuture = new CompletableFuture<>();
final TestingRestfulGateway testingRestfulGateway = new TestingRestfulGateway.Builder()
.setTriggerSavepointFunction((JobID jobId, String directory) -> savepointFuture)
.build();
final TriggerId triggerId = testingTriggerHandler.handleRequest(
triggerOperationRequest(),
testingRestfulGateway).get().getTriggerId();
final CompletableFuture<Void> closeFuture = testingStatusHandler.closeAsync();
testingStatusHandler.handleRequest(statusOperationRequest(triggerId), testingRestfulGateway).get();
assertThat(closeFuture.isDone(), is(false));
savepointFuture.complete("foobar");
testingStatusHandler.handleRequest(statusOperationRequest(triggerId), testingRestfulGateway).get();
assertThat(closeFuture.isDone(), is(true));
}
private static HandlerRequest<EmptyRequestBody, EmptyMessageParameters> triggerOperationRequest() throws HandlerRequestException {
return new HandlerRequest<>(EmptyRequestBody.getInstance(), EmptyMessageParameters.getInstance());
}
private static HandlerRequest<EmptyRequestBody, TriggerMessageParameters> statusOperationRequest(TriggerId triggerId) throws HandlerRequestException {
return new HandlerRequest<>(
EmptyRequestBody.getInstance(),
new TriggerMessageParameters(),
Collections.singletonMap(TriggerIdPathParameter.KEY, triggerId.toString()),
Collections.emptyMap());
}
private static final class TestOperationKey extends OperationKey {
protected TestOperationKey(TriggerId triggerId) {
super(triggerId);
}
}
private static final class TriggerMessageParameters extends MessageParameters {
private final TriggerIdPathParameter triggerIdPathParameter = new TriggerIdPathParameter();
@Override
public Collection<MessagePathParameter<?>> getPathParameters() {
return Collections.singleton(triggerIdPathParameter);
}
@Override
public Collection<MessageQueryParameter<?>> getQueryParameters() {
return Collections.emptyList();
}
}
private static final class OperationResult {
@Nullable
private final Throwable throwable;
@Nullable
private final String value;
OperationResult(@Nullable String value, @Nullable Throwable throwable) {
this.value = value;
this.throwable = throwable;
}
}
private static final class TestingTriggerMessageHeaders extends AsynchronousOperationTriggerMessageHeaders<EmptyRequestBody, EmptyMessageParameters> {
static final TestingTriggerMessageHeaders INSTANCE = new TestingTriggerMessageHeaders();
private TestingTriggerMessageHeaders() {}
@Override
public HttpResponseStatus getResponseStatusCode() {
return HttpResponseStatus.OK;
}
@Override
public String getDescription() {
return "";
}
@Override
protected String getAsyncOperationDescription() {
return "";
}
@Override
public Class<EmptyRequestBody> getRequestClass() {
return EmptyRequestBody.class;
}
@Override
public EmptyMessageParameters getUnresolvedMessageParameters() {
return EmptyMessageParameters.getInstance();
}
@Override
public HttpMethodWrapper getHttpMethod() {
return HttpMethodWrapper.POST;
}
@Override
public String getTargetRestEndpointURL() {
return "barfoo";
}
}
private static final class TestingStatusMessageHeaders extends AsynchronousOperationStatusMessageHeaders<OperationResult, TriggerMessageParameters> {
private static final TestingStatusMessageHeaders INSTANCE = new TestingStatusMessageHeaders();
private TestingStatusMessageHeaders() {}
@Override
protected Class<OperationResult> getValueClass() {
return OperationResult.class;
}
@Override
public HttpResponseStatus getResponseStatusCode() {
return HttpResponseStatus.OK;
}
@Override
public Class<EmptyRequestBody> getRequestClass() {
return EmptyRequestBody.class;
}
@Override
public TriggerMessageParameters getUnresolvedMessageParameters() {
return new TriggerMessageParameters();
}
@Override
public HttpMethodWrapper getHttpMethod() {
return HttpMethodWrapper.GET;
}
@Override
public String getTargetRestEndpointURL() {
return "foobar";
}
@Override
public String getDescription() {
return "";
}
}
private static final class TestingAsynchronousOperationHandlers extends AbstractAsynchronousOperationHandlers<TestOperationKey, String> {
class TestingTriggerHandler extends TriggerHandler<RestfulGateway, EmptyRequestBody, EmptyMessageParameters> {
protected TestingTriggerHandler(GatewayRetriever<? extends RestfulGateway> leaderRetriever, Time timeout, Map<String, String> responseHeaders, MessageHeaders<EmptyRequestBody, TriggerResponse, EmptyMessageParameters> messageHeaders) {
super(leaderRetriever, timeout, responseHeaders, messageHeaders);
}
@Override
protected CompletableFuture<String> triggerOperation(HandlerRequest<EmptyRequestBody, EmptyMessageParameters> request, RestfulGateway gateway) throws RestHandlerException {
return gateway.triggerSavepoint(new JobID(), null, false, timeout);
}
@Override
protected TestOperationKey createOperationKey(HandlerRequest<EmptyRequestBody, EmptyMessageParameters> request) {
return new TestOperationKey(new TriggerId());
}
}
class TestingStatusHandler extends StatusHandler<RestfulGateway, OperationResult, TriggerMessageParameters> {
protected TestingStatusHandler(GatewayRetriever<? extends RestfulGateway> leaderRetriever, Time timeout, Map<String, String> responseHeaders, MessageHeaders<EmptyRequestBody, AsynchronousOperationResult<OperationResult>, TriggerMessageParameters> messageHeaders) {
super(leaderRetriever, timeout, responseHeaders, messageHeaders);
}
@Override
protected TestOperationKey getOperationKey(HandlerRequest<EmptyRequestBody, TriggerMessageParameters> request) {
final TriggerId triggerId = request.getPathParameter(TriggerIdPathParameter.class);
return new TestOperationKey(triggerId);
}
@Override
protected OperationResult exceptionalOperationResultResponse(Throwable throwable) {
return new OperationResult(null, throwable);
}
@Override
protected OperationResult operationResultResponse(String operationResult) {
return new OperationResult(operationResult, null);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Future;
import javax.cache.CacheException;
import javax.cache.processor.EntryProcessorException;
import javax.cache.processor.MutableEntry;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteAtomicLong;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteClientDisconnectedException;
import org.apache.ignite.IgniteCompute;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.IgniteMessaging;
import org.apache.ignite.IgniteQueue;
import org.apache.ignite.IgniteSet;
import org.apache.ignite.IgniteTransactions;
import org.apache.ignite.cache.CacheEntryProcessor;
import org.apache.ignite.configuration.AtomicConfiguration;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.CollectionConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.events.Event;
import org.apache.ignite.internal.managers.discovery.IgniteDiscoverySpi;
import org.apache.ignite.internal.util.typedef.C1;
import org.apache.ignite.internal.util.typedef.T2;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.apache.ignite.lang.IgniteCallable;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.resources.IgniteInstanceResource;
import org.apache.ignite.spi.discovery.DiscoverySpi;
import org.apache.ignite.testframework.GridTestUtils;
import org.junit.Test;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.ignite.events.EventType.EVT_CLIENT_NODE_DISCONNECTED;
import static org.apache.ignite.events.EventType.EVT_CLIENT_NODE_RECONNECTED;
/**
*
*/
public class IgniteClientReconnectApiExceptionTest extends IgniteClientReconnectAbstractTest {
/** Cache key for test put and invoke operation after reconnect */
private static final int CACHE_PUT_INVOKE_KEY = 10010;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
cfg.setCacheConfiguration(new CacheConfiguration(DEFAULT_CACHE_NAME));
return cfg;
}
/** {@inheritDoc} */
@Override protected int serverCount() {
return 1;
}
/**
* @throws Exception If failed.
*/
@Test
public void testErrorOnDisconnect() throws Exception {
// Check cache operations.
cacheOperationsTest();
// Check cache operations.
beforeTestsStarted();
dataStructureOperationsTest();
// Check ignite operations.
beforeTestsStarted();
igniteOperationsTest();
}
/**
* @throws Exception If failed.
*/
@SuppressWarnings("unchecked")
private void dataStructureOperationsTest() throws Exception {
clientMode = true;
final Ignite client = startGrid(serverCount());
doTestIgniteOperationOnDisconnect(client, Arrays.asList(
// Check atomic long.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
client.atomicLong("testAtomic", 41, true);
}
catch (IgniteClientDisconnectedException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return client.atomicLong("testAtomic", 41, true);
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
assertNotNull(o);
IgniteAtomicLong atomicLong = (IgniteAtomicLong)o;
assertEquals(42, atomicLong.incrementAndGet());
return true;
}
}
),
// Check set.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
client.set("testSet", getCollectionConfiguration());
}
catch (IgniteClientDisconnectedException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return client.set("testSet", getCollectionConfiguration());
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
assertNotNull(o);
IgniteSet set = (IgniteSet)o;
String val = "testVal";
set.add(val);
assertEquals(1, set.size());
assertTrue(set.contains(val));
return true;
}
}
),
// Check ignite queue.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
client.queue("TestQueue", 10, getCollectionConfiguration());
}
catch (IgniteClientDisconnectedException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return client.queue("TestQueue", 10, getCollectionConfiguration());
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
assertNotNull(o);
IgniteQueue queue = (IgniteQueue)o;
String val = "Test";
queue.add(val);
assertEquals(val, queue.poll());
return true;
}
}
)
));
clientMode = false;
}
/**
* @throws Exception If failed.
*/
private void cacheOperationsTest() throws Exception {
clientMode = true;
final Ignite client = startGrid(serverCount());
final IgniteCache<Object, Object> dfltCache = client.cache(DEFAULT_CACHE_NAME);
assertNotNull(dfltCache);
doTestIgniteOperationOnDisconnect(client, Arrays.asList(
// Check put and get operation.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
dfltCache.getAndPut(9999, 9999);
}
catch (CacheException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return dfltCache.getAndPut(9999, 9999);
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
assertNull(o);
assertEquals(9999, dfltCache.get(9999));
return true;
}
}
),
// Check put operation.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
dfltCache.put(10000, 10000);
}
catch (CacheException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
dfltCache.put(10000, 10000);
return true;
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
assertTrue((Boolean)o);
assertEquals(10000, dfltCache.get(10000));
return true;
}
}
),
// Check get operation.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
dfltCache.get(10001);
}
catch (CacheException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return dfltCache.get(10001);
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
assertNull(o);
return true;
}
}
),
// Check put and invoke operation.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
dfltCache.put(CACHE_PUT_INVOKE_KEY, 10000);
dfltCache.invoke(CACHE_PUT_INVOKE_KEY, new CacheEntryProcessor<Object, Object, Object>() {
@Override public Object process(MutableEntry<Object, Object> entry,
Object... arguments) throws EntryProcessorException {
assertTrue(entry.exists());
return (int)entry.getValue() * 2;
}
});
}
catch (CacheException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
dfltCache.put(CACHE_PUT_INVOKE_KEY, 10000);
return dfltCache.invoke(CACHE_PUT_INVOKE_KEY, new CacheEntryProcessor<Object, Object, Object>() {
@Override public Object process(MutableEntry<Object, Object> entry,
Object... arguments) throws EntryProcessorException {
assertTrue(entry.exists());
return (int)entry.getValue() * 2;
}
});
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
assertNotNull(o);
assertEquals(20000, (int)o);
return true;
}
}
),
// Check put async operation.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
dfltCache.putAsync(10002, 10002).get();
}
catch (CacheException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return dfltCache.putAsync(10002, 10002).get();
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
assertNull(o);
assertEquals(10002, dfltCache.get(10002));
return true;
}
}
),
// Check transaction.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
client.transactions();
}
catch (IgniteClientDisconnectedException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return client.transactions();
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
IgniteTransactions txs = (IgniteTransactions)o;
assertNotNull(txs);
return true;
}
}
),
// Check get cache.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
client.cache(DEFAULT_CACHE_NAME);
}
catch (IgniteClientDisconnectedException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return client.cache(DEFAULT_CACHE_NAME);
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
IgniteCache<Object, Object> cache0 = (IgniteCache<Object, Object>)o;
assertNotNull(cache0);
cache0.put(1, 1);
assertEquals(1, cache0.get(1));
return true;
}
}
),
// Check streamer.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
client.dataStreamer(DEFAULT_CACHE_NAME);
}
catch (IgniteClientDisconnectedException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return client.dataStreamer(DEFAULT_CACHE_NAME);
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
IgniteDataStreamer<Object, Object> streamer = (IgniteDataStreamer<Object, Object>)o;
streamer.addData(2, 2);
streamer.close();
assertEquals(2, client.cache(DEFAULT_CACHE_NAME).get(2));
return true;
}
}
),
// Check create cache.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
client.createCache("test_cache");
}
catch (IgniteClientDisconnectedException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return client.createCache("test_cache");
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
IgniteCache<Object, Object> cache = (IgniteCache<Object, Object>)o;
assertNotNull(cache);
cache.put(1, 1);
assertEquals(1, cache.get(1));
return true;
}
}
)
));
clientMode = false;
}
/**
* @throws Exception If failed.
*/
private void igniteOperationsTest() throws Exception {
clientMode = true;
final Ignite client = startGrid(serverCount());
final IgniteCache<Object, Object> dfltCache = client.cache(DEFAULT_CACHE_NAME);
final CountDownLatch recvLatch = new CountDownLatch(1);
assertNotNull(dfltCache);
doTestIgniteOperationOnDisconnect(client, Arrays.asList(
// Check compute.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
client.compute();
}
catch (IgniteClientDisconnectedException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return client.compute();
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
IgniteCompute comp = (IgniteCompute)o;
Collection<UUID> uuids = comp.broadcast(new IgniteCallable<UUID>() {
@IgniteInstanceResource
private Ignite ignite;
@Override public UUID call() throws Exception {
return ignite.cluster().localNode().id();
}
});
assertFalse(uuids.isEmpty());
for (UUID uuid : uuids)
assertNotNull(uuid);
return true;
}
}
),
// Check ping node.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
client.cluster().pingNode(new UUID(0, 0));
}
catch (IgniteClientDisconnectedException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return client.cluster().pingNode(new UUID(0, 0));
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
Boolean pingNode = (Boolean)o;
assertFalse(pingNode);
return true;
}
}
),
// Check register remote listener.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
client.events().remoteListen(null, new IgnitePredicate<Event>() {
@Override public boolean apply(Event event) {
return true;
}
});
}
catch (IgniteClientDisconnectedException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return client.events().remoteListen(null, new IgnitePredicate<Event>() {
@Override public boolean apply(Event event) {
return true;
}
});
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
UUID remoteId = (UUID)o;
assertNotNull(remoteId);
client.events().stopRemoteListen(remoteId);
return true;
}
}
),
// Check message operation.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
client.message().remoteListen(null, new IgniteBiPredicate<UUID, Object>() {
@Override public boolean apply(UUID uuid, Object o) {
if (o.equals("Test message."))
recvLatch.countDown();
return true;
}
});
}
catch (IgniteClientDisconnectedException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return client.message().remoteListen(null, new IgniteBiPredicate<UUID, Object>() {
@Override public boolean apply(UUID uuid, Object o) {
if (o.equals("Test message."))
recvLatch.countDown();
return true;
}
});
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
assertNotNull(o);
IgniteMessaging msg = client.message();
msg.send(null, "Test message.");
try {
assertTrue(recvLatch.await(2, SECONDS));
}
catch (InterruptedException ignored) {
fail("Message wasn't received.");
}
return true;
}
}
),
// Check executor.
new T2<Callable, C1<Object, Boolean>>(
new Callable() {
@Override public Object call() throws Exception {
boolean failed = false;
try {
client.executorService().submit(new Callable<Integer>() {
@Override public Integer call() throws Exception {
return 42;
}
});
}
catch (IgniteClientDisconnectedException e) {
failed = true;
checkAndWait(e);
}
assertTrue(failed);
return client.executorService().submit(new Callable<Integer>() {
@Override public Integer call() throws Exception {
return 42;
}
});
}
},
new C1<Object, Boolean>() {
@Override public Boolean apply(Object o) {
assertNotNull(o);
Future<Integer> fut = (Future<Integer>)o;
try {
assertEquals(42, (int)fut.get());
}
catch (Exception ignored) {
fail("Failed submit task.");
}
return true;
}
}
)
));
clientMode = false;
}
/**
* @param client Client.
* @param ops Operations closures.
* @throws Exception If failed.
*/
@SuppressWarnings("unchecked")
private void doTestIgniteOperationOnDisconnect(Ignite client, final List<T2<Callable, C1<Object, Boolean>>> ops)
throws Exception {
assertNotNull(client.cache(DEFAULT_CACHE_NAME));
final IgniteDiscoverySpi clientSpi = spi0(client);
Ignite srv = clientRouter(client);
DiscoverySpi srvSpi = spi0(srv);
final CountDownLatch disconnectLatch = new CountDownLatch(1);
final CountDownLatch reconnectLatch = new CountDownLatch(1);
log.info("Block reconnect.");
DiscoverySpiTestListener lsnr = new DiscoverySpiTestListener();
clientSpi.setInternalListener(lsnr);
lsnr.startBlockJoin();
final List<IgniteInternalFuture> futs = new ArrayList<>();
client.events().localListen(new IgnitePredicate<Event>() {
@Override public boolean apply(Event evt) {
if (evt.type() == EVT_CLIENT_NODE_DISCONNECTED) {
info("Disconnected: " + evt);
assertEquals(1, reconnectLatch.getCount());
for (T2<Callable, C1<Object, Boolean>> op : ops)
futs.add(GridTestUtils.runAsync(op.get1()));
disconnectLatch.countDown();
}
else if (evt.type() == EVT_CLIENT_NODE_RECONNECTED) {
info("Reconnected: " + evt);
reconnectLatch.countDown();
}
return true;
}
}, EVT_CLIENT_NODE_DISCONNECTED, EVT_CLIENT_NODE_RECONNECTED);
try {
log.info("Fail client.");
srvSpi.failNode(client.cluster().localNode().id(), null);
waitReconnectEvent(disconnectLatch);
assertEquals(ops.size(), futs.size());
for (IgniteInternalFuture<?> fut : futs)
assertNotDone(fut);
U.sleep(2000);
for (IgniteInternalFuture<?> fut : futs)
assertNotDone(fut);
log.info("Allow reconnect.");
lsnr.stopBlockJoin();
waitReconnectEvent(reconnectLatch);
// Check operation after reconnect working.
for (int i = 0; i < futs.size(); i++) {
final int i0 = i;
try {
final Object futRes = futs.get(i0).get(2, SECONDS);
assertTrue(GridTestUtils.runAsync(new Callable<Boolean>() {
@Override public Boolean call() throws Exception {
return ops.get(i0).get2().apply(futRes);
}
}).get(2, SECONDS));
}
catch (IgniteFutureTimeoutCheckedException e) {
e.printStackTrace();
fail("Operation timeout. Iteration: " + i + ".");
}
}
}
finally {
lsnr.stopBlockJoin();
for (IgniteInternalFuture fut : futs)
fut.cancel();
stopAllGrids();
}
}
/** Get {@link CollectionConfiguration} with number of backups equal to {@link AtomicConfiguration} default */
private CollectionConfiguration getCollectionConfiguration() {
return new CollectionConfiguration().setBackups(AtomicConfiguration.DFLT_BACKUPS);
}
}
| |
/**
* Copyright 2014 Simeon GIROUSSE
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gote.importexport;
import java.io.File;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
import org.joda.time.DateTime;
import com.gote.pojo.Game;
import com.gote.pojo.Player;
import com.gote.pojo.Round;
import com.gote.pojo.Tournament;
import com.gote.pojo.TournamentRules;
import com.gote.util.ImportExportUtil;
import com.gote.util.xml.TournamentOpenGothaUtil;
/**
*
* Class to import tournament from OpenGotha file. It has the following structure :
*
* <pre>
* <Tournament >
* <Players>
* <Player agaExpirationDate="" agaId="" club="BuPe" country="HU" egfPin="16237463" ffgLicence="" ffgLicenceStatus="" firstName="Miklos" grade="12K" name="Csizmadia" participating="11111000001111111111" rank="11K" rating="1035" ratingOrigin="EGF" registeringStatus="FIN" smmsCorrection="0"/>
* </Players>
* <Games>
* <Game blackPlayer="ARFFMANRIINA" handicap="0" knownColor="true" result="RESULT_WHITEWINS" roundNumber="8" tableNumber="201" whitePlayer="SCHRAMMCHRISTINA"/>
* </Games>
* <TournamentParameterSet>
* <GeneralParameter SetbasicTime="150" beginDate="2013-07-28" canByoYomiTime="300" complementaryTimeSystem="STDBYOYOMI" director="" endDate="2013-08-10" fischerTime="10" genMMBar="4D" genMMFloor="20K" genMMS2ValueAbsent="1" genMMS2ValueBye="2" genNBW2ValueAbsent="0" genNBW2ValueBye="2" genRoundDownNBWMMS="true" komi="6.5" location="Olsztyn" name="EGC2013main" nbMovesCanTime="15" numberOfCategories="1" numberOfRounds="10" shortName="EGC2013" size="19" stdByoYomiTime="60" />
* <HandicapParameterSet hdBasedOnMMS="true" hdCeiling="9" hdCorrection="3" hdNoHdRankThreshold="30K"/>
* </TournamentParameterSet>
* <Tournament>
* </pre>
*
* @author SGirousse
*/
public class ImportTournamentFromOpenGotha extends ImportTournament {
/** Class logger */
private static Logger LOGGER = Logger.getLogger(ImportTournamentFromOpenGotha.class.getName());
@Override
public Tournament createTournamentFromConfig(File pFile) {
LOGGER.log(Level.INFO, "A new tournament is going to be created from the file : " + pFile.getPath());
Tournament tournament = new Tournament();
String content = ImportExportUtil.getFileContent(pFile);
if (content == null) {
LOGGER.log(Level.SEVERE, "File \"" + pFile.getPath() + "\" content is null");
return null;
}
SAXReader reader = new SAXReader();
Document document;
try {
document = reader.read(new StringReader(content));
} catch (DocumentException e) {
LOGGER.log(Level.SEVERE, "DocumentException, creation stopped : " + e);
return null;
}
Element pElementTournament = document.getRootElement();
boolean initSuccess = initTournament(tournament, pElementTournament);
if (initSuccess) {
return tournament;
} else {
return null;
}
}
public boolean initTournament(Tournament pTournament, Element pElementTournament) {
boolean init = false;
Element elTournamentRulesSet = null;
Element elPlayers = null;
Element elGames = null;
elTournamentRulesSet = pElementTournament.element(TournamentOpenGothaUtil.TAG_TOURNAMENT_PARAMETER_SET);
init = elTournamentRulesSet != null;
if (init) {
init = initTournamentRules(pTournament, elTournamentRulesSet);
} else {
LOGGER.log(Level.SEVERE, "While getting tournament parameters from open gotha file a problem occured.");
return false;
}
if (init) {
elPlayers = pElementTournament.element(TournamentOpenGothaUtil.TAG_PLAYERS);
init = elPlayers != null;
} else {
LOGGER.log(Level.SEVERE, "During tournament rules initialization a problem occured.");
return false;
}
if (init) {
init = initPlayers(pTournament, elPlayers);
} else {
LOGGER.log(Level.SEVERE, "While getting players data from open gotha file a problem occured.");
return false;
}
if (init) {
elGames = pElementTournament.element(TournamentOpenGothaUtil.TAG_GAMES);
init = elGames != null;
} else {
LOGGER.log(Level.SEVERE, "During players initialization a problem occured.");
return false;
}
if (init) {
init = initRounds(pTournament, elGames);
} else {
LOGGER.log(Level.SEVERE, "While getting games data from open gotha file a problem occured.");
return false;
}
return init;
}
/**
* Init tournament rules
*
* @param pTournament Tournament being builded
* @param pElementTournamentRulesSet Element in OpenGotha document
* @return boolean, true if everything worked as expected
*/
public boolean initTournamentRules(Tournament pTournament, Element pElementTournamentRulesSet) {
LOGGER.log(Level.INFO, "Tournament rules initialization");
boolean init = true;
TournamentRules tournamentRules = new TournamentRules();
Element elementGeneralParameters = pElementTournamentRulesSet
.element(TournamentOpenGothaUtil.TAG_GENERAL_PARAMETER_SET);
if (elementGeneralParameters == null) {
return false;
}
if (elementGeneralParameters.attribute(TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_KOMI) != null) {
tournamentRules.setKomi(elementGeneralParameters.attribute(
TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_KOMI).getValue());
}
if (elementGeneralParameters.attribute(TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_SIZE) != null) {
tournamentRules.setSize(elementGeneralParameters.attribute(
TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_SIZE).getValue());
}
if (elementGeneralParameters.attribute(TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_TIME_SYSTEM) != null) {
tournamentRules.setTimeSystem(elementGeneralParameters.attribute(
TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_TIME_SYSTEM).getValue());
}
if (elementGeneralParameters.attribute(TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_BASIC_TIME) != null) {
tournamentRules.setBasicTime(elementGeneralParameters.attribute(
TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_BASIC_TIME).getValue());
}
if (elementGeneralParameters.attribute(TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_BYOYOMI_TIME) != null) {
tournamentRules.setByoYomiDuration(elementGeneralParameters.attribute(
TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_BYOYOMI_TIME).getValue());
}
if (elementGeneralParameters.attribute(TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_BYOYOMI_COUNT) != null) {
tournamentRules.setNumberOfByoYomi(elementGeneralParameters.attribute(
TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_BYOYOMI_COUNT).getValue());
}
pTournament.setTournamentRules(tournamentRules);
if (elementGeneralParameters.attribute(TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_NAME) != null) {
pTournament.setTitle(elementGeneralParameters.attribute(
TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_NAME).getValue());
} else {
LOGGER.log(Level.WARNING, "No title found for the tournament");
}
if (elementGeneralParameters.attribute(TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_BEGIN_DATE) != null) {
pTournament.setStartDate(new DateTime(elementGeneralParameters.attribute(
TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_BEGIN_DATE).getValue()));
} else {
LOGGER.log(Level.WARNING, "No start date tournament");
}
if (elementGeneralParameters.attribute(TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_END_DATE) != null) {
pTournament.setEndDate(new DateTime(elementGeneralParameters.attribute(
TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_END_DATE).getValue()));
} else {
LOGGER.log(Level.WARNING, "No end date tournament");
}
// String numberOfRounds = elementGeneralParameters.attribute(
// TournamentOpenGothaUtil.ATTRIBUTE_GENERAL_PARAMETER_SET_ROUND_NUMBER).getValue();
return init;
}
/**
* Init players list
*
* @param pTournament Tournament being builded
* @param pElementPlayers Element in OpenGotha document
* @return boolean, true if everything worked as expected
*/
public boolean initPlayers(Tournament pTournament, Element pElementPlayers) {
LOGGER.log(Level.INFO, "Players initialization");
boolean init = false;
@SuppressWarnings("unchecked")
List<Element> listOfPlayers = (List<Element>) pElementPlayers.elements(TournamentOpenGothaUtil.TAG_PLAYER);
init = (listOfPlayers != null && !listOfPlayers.isEmpty());
if (init) {
List<Player> tournamentPlayers = new ArrayList<Player>();
for (Element playerElement : listOfPlayers) {
Player player = new Player();
player.setPseudo(playerElement.attribute(TournamentOpenGothaUtil.ATTRIBUTE_PLAYER_NAME).getValue());
player.setFirstname(playerElement.attribute(TournamentOpenGothaUtil.ATTRIBUTE_PLAYER_FIRSTNAME).getValue());
player.setRank(playerElement.attribute(TournamentOpenGothaUtil.ATTRIBUTE_PLAYER_RANK).getValue());
tournamentPlayers.add(player);
}
pTournament.setParticipantsList(tournamentPlayers);
}
return init;
}
/**
* Init rounds and games
*
* @param pTournament Tournament being builded
* @param pElementGames Element in OpenGotha document
* @return boolean, true if everything worked as expected
*/
public boolean initRounds(Tournament pTournament, Element pElementGames) {
LOGGER.log(Level.INFO, "Rounds initialization");
boolean init = false;
@SuppressWarnings("unchecked")
List<Element> listOfGames = (List<Element>) pElementGames.elements(TournamentOpenGothaUtil.TAG_GAME);
init = (listOfGames != null && !listOfGames.isEmpty());
if (init) {
List<Round> tournamentRounds = new ArrayList<Round>();
for (Element gameElement : listOfGames) {
// Check round game number, if it exists, add it to the round if it does not exists create
// the round first
String roundNumberAsText = gameElement.attribute(TournamentOpenGothaUtil.ATTRIBUTE_GAME_ROUND).getValue();
int roundPlacement = -1;
List<Game> games = new ArrayList<Game>();
Round round = new Round();
if (roundNumberAsText != null && !roundNumberAsText.isEmpty()) {
roundPlacement = getRoundPlacement(tournamentRounds, new Integer(roundNumberAsText));
} else {
LOGGER.log(Level.WARNING, "No round number in configuration file. Line is : " + gameElement.toString());
}
if (roundPlacement > -1) {
games = tournamentRounds.get(roundPlacement).getGameList();
} else {
LOGGER.log(Level.INFO, "Round " + roundNumberAsText + " is new and will be created.");
round.setNumber(new Integer(roundNumberAsText));
}
Game game = new Game();
Player black = pTournament.getParticipantWithCompleteName(gameElement.attribute(
TournamentOpenGothaUtil.ATTRIBUTE_GAME_BLACK_PLAYER).getValue());
Player white = pTournament.getParticipantWithCompleteName(gameElement.attribute(
TournamentOpenGothaUtil.ATTRIBUTE_GAME_WHITE_PLAYER).getValue());
String result = gameElement.attribute(TournamentOpenGothaUtil.ATTRIBUTE_GAME_RESULT).getValue();
String handicap = gameElement.attribute(TournamentOpenGothaUtil.ATTRIBUTE_GAME_HANDICAP).getValue();
game.setBlack(black);
game.setWhite(white);
game.setResult(result);
game.setHandicap(handicap);
games.add(game);
if (roundPlacement < 0) {
round.setGameList(games);
round.setDateStart(pTournament.getStartDate());
round.setDateEnd(pTournament.getEndDate());
tournamentRounds.add(round);
}
}
pTournament.setRounds(tournamentRounds);
}
return init;
}
/**
* Get the round placement in the list with its number. Return -1 if no round has been found with
* that number.
*
* @param pListOfRounds List<Round> of the tournament
* @param pRoundNumber Round number used to found Round placement
* @return Position of the round
*/
public int getRoundPlacement(List<Round> pListOfRounds, int pRoundNumber) {
for (int i = 0; i < pListOfRounds.size(); i++) {
if (pListOfRounds.get(i).getNumber() == pRoundNumber) {
return i;
}
}
return -1;
}
}
| |
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions an
* limitations under the License.
*/
package com.android.server.usb;
import android.app.PendingIntent;
import android.app.admin.DevicePolicyManager;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.hardware.usb.IUsbManager;
import android.hardware.usb.UsbAccessory;
import android.hardware.usb.UsbDevice;
import android.hardware.usb.UsbManager;
import android.hardware.usb.UsbPort;
import android.hardware.usb.UsbPortStatus;
import android.os.Binder;
import android.os.Bundle;
import android.os.ParcelFileDescriptor;
import android.os.UserHandle;
import android.os.UserManager;
import android.util.Slog;
import android.util.SparseArray;
import com.android.internal.annotations.GuardedBy;
import com.android.internal.util.IndentingPrintWriter;
import com.android.internal.util.Preconditions;
import com.android.server.SystemService;
import java.io.File;
import java.io.FileDescriptor;
import java.io.PrintWriter;
/**
* UsbService manages all USB related state, including both host and device support.
* Host related events and calls are delegated to UsbHostManager, and device related
* support is delegated to UsbDeviceManager.
*/
public class UsbService extends IUsbManager.Stub {
public static class Lifecycle extends SystemService {
private UsbService mUsbService;
public Lifecycle(Context context) {
super(context);
}
@Override
public void onStart() {
mUsbService = new UsbService(getContext());
publishBinderService(Context.USB_SERVICE, mUsbService);
}
@Override
public void onBootPhase(int phase) {
if (phase == SystemService.PHASE_ACTIVITY_MANAGER_READY) {
mUsbService.systemReady();
} else if (phase == SystemService.PHASE_BOOT_COMPLETED) {
mUsbService.bootCompleted();
}
}
}
private static final String TAG = "UsbService";
private final Context mContext;
private UsbDeviceManager mDeviceManager;
private UsbHostManager mHostManager;
private UsbPortManager mPortManager;
private final UsbAlsaManager mAlsaManager;
private final Object mLock = new Object();
/** Map from {@link UserHandle} to {@link UsbSettingsManager} */
@GuardedBy("mLock")
private final SparseArray<UsbSettingsManager>
mSettingsByUser = new SparseArray<UsbSettingsManager>();
private UsbSettingsManager getSettingsForUser(int userId) {
synchronized (mLock) {
UsbSettingsManager settings = mSettingsByUser.get(userId);
if (settings == null) {
settings = new UsbSettingsManager(mContext, new UserHandle(userId));
mSettingsByUser.put(userId, settings);
}
return settings;
}
}
public UsbService(Context context) {
mContext = context;
mAlsaManager = new UsbAlsaManager(context);
final PackageManager pm = mContext.getPackageManager();
if (pm.hasSystemFeature(PackageManager.FEATURE_USB_HOST)) {
mHostManager = new UsbHostManager(context, mAlsaManager);
}
if (new File("/sys/class/android_usb").exists()) {
mDeviceManager = new UsbDeviceManager(context, mAlsaManager);
}
if (mHostManager != null || mDeviceManager != null) {
mPortManager = new UsbPortManager(context);
}
setCurrentUser(UserHandle.USER_OWNER);
final IntentFilter filter = new IntentFilter();
filter.setPriority(IntentFilter.SYSTEM_HIGH_PRIORITY);
filter.addAction(Intent.ACTION_USER_SWITCHED);
filter.addAction(Intent.ACTION_USER_STOPPED);
filter.addAction(DevicePolicyManager.ACTION_DEVICE_POLICY_MANAGER_STATE_CHANGED);
mContext.registerReceiver(mReceiver, filter, null, null);
}
private BroadcastReceiver mReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
final int userId = intent.getIntExtra(Intent.EXTRA_USER_HANDLE, -1);
final String action = intent.getAction();
if (Intent.ACTION_USER_SWITCHED.equals(action)) {
setCurrentUser(userId);
} else if (Intent.ACTION_USER_STOPPED.equals(action)) {
synchronized (mLock) {
mSettingsByUser.remove(userId);
}
} else if (DevicePolicyManager.ACTION_DEVICE_POLICY_MANAGER_STATE_CHANGED
.equals(action)) {
if (mDeviceManager != null) {
mDeviceManager.updateUserRestrictions();
}
}
}
};
private void setCurrentUser(int userId) {
final UsbSettingsManager userSettings = getSettingsForUser(userId);
if (mHostManager != null) {
mHostManager.setCurrentSettings(userSettings);
}
if (mDeviceManager != null) {
mDeviceManager.setCurrentUser(userId, userSettings);
}
}
public void systemReady() {
mAlsaManager.systemReady();
if (mDeviceManager != null) {
mDeviceManager.systemReady();
}
if (mHostManager != null) {
mHostManager.systemReady();
}
if (mPortManager != null) {
mPortManager.systemReady();
}
}
public void bootCompleted() {
if (mDeviceManager != null) {
mDeviceManager.bootCompleted();
}
}
/* Returns a list of all currently attached USB devices (host mdoe) */
@Override
public void getDeviceList(Bundle devices) {
if (mHostManager != null) {
mHostManager.getDeviceList(devices);
}
}
/* Opens the specified USB device (host mode) */
@Override
public ParcelFileDescriptor openDevice(String deviceName) {
if (mHostManager != null) {
return mHostManager.openDevice(deviceName);
} else {
return null;
}
}
/* returns the currently attached USB accessory (device mode) */
@Override
public UsbAccessory getCurrentAccessory() {
if (mDeviceManager != null) {
return mDeviceManager.getCurrentAccessory();
} else {
return null;
}
}
/* opens the currently attached USB accessory (device mode) */
@Override
public ParcelFileDescriptor openAccessory(UsbAccessory accessory) {
if (mDeviceManager != null) {
return mDeviceManager.openAccessory(accessory);
} else {
return null;
}
}
@Override
public void setDevicePackage(UsbDevice device, String packageName, int userId) {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
getSettingsForUser(userId).setDevicePackage(device, packageName);
}
@Override
public void setAccessoryPackage(UsbAccessory accessory, String packageName, int userId) {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
getSettingsForUser(userId).setAccessoryPackage(accessory, packageName);
}
@Override
public boolean hasDevicePermission(UsbDevice device) {
final int userId = UserHandle.getCallingUserId();
return getSettingsForUser(userId).hasPermission(device);
}
@Override
public boolean hasAccessoryPermission(UsbAccessory accessory) {
final int userId = UserHandle.getCallingUserId();
return getSettingsForUser(userId).hasPermission(accessory);
}
@Override
public void requestDevicePermission(UsbDevice device, String packageName, PendingIntent pi) {
final int userId = UserHandle.getCallingUserId();
getSettingsForUser(userId).requestPermission(device, packageName, pi);
}
@Override
public void requestAccessoryPermission(
UsbAccessory accessory, String packageName, PendingIntent pi) {
final int userId = UserHandle.getCallingUserId();
getSettingsForUser(userId).requestPermission(accessory, packageName, pi);
}
@Override
public void grantDevicePermission(UsbDevice device, int uid) {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
final int userId = UserHandle.getUserId(uid);
getSettingsForUser(userId).grantDevicePermission(device, uid);
}
@Override
public void grantAccessoryPermission(UsbAccessory accessory, int uid) {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
final int userId = UserHandle.getUserId(uid);
getSettingsForUser(userId).grantAccessoryPermission(accessory, uid);
}
@Override
public boolean hasDefaults(String packageName, int userId) {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
return getSettingsForUser(userId).hasDefaults(packageName);
}
@Override
public void clearDefaults(String packageName, int userId) {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
getSettingsForUser(userId).clearDefaults(packageName);
}
@Override
public boolean isFunctionEnabled(String function) {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
return mDeviceManager != null && mDeviceManager.isFunctionEnabled(function);
}
@Override
public void setCurrentFunction(String function) {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
if (!isSupportedCurrentFunction(function)) {
Slog.w(TAG, "Caller of setCurrentFunction() requested unsupported USB function: "
+ function);
function = UsbManager.USB_FUNCTION_NONE;
}
if (mDeviceManager != null) {
mDeviceManager.setCurrentFunctions(function);
} else {
throw new IllegalStateException("USB device mode not supported");
}
}
private static boolean isSupportedCurrentFunction(String function) {
if (function == null) return true;
switch (function) {
case UsbManager.USB_FUNCTION_NONE:
case UsbManager.USB_FUNCTION_AUDIO_SOURCE:
case UsbManager.USB_FUNCTION_MIDI:
case UsbManager.USB_FUNCTION_MTP:
case UsbManager.USB_FUNCTION_PTP:
case UsbManager.USB_FUNCTION_RNDIS:
return true;
}
return false;
}
@Override
public void setUsbDataUnlocked(boolean unlocked) {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
mDeviceManager.setUsbDataUnlocked(unlocked);
}
@Override
public void allowUsbDebugging(boolean alwaysAllow, String publicKey) {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
mDeviceManager.allowUsbDebugging(alwaysAllow, publicKey);
}
@Override
public void denyUsbDebugging() {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
mDeviceManager.denyUsbDebugging();
}
@Override
public void clearUsbDebuggingKeys() {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
mDeviceManager.clearUsbDebuggingKeys();
}
@Override
public UsbPort[] getPorts() {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
final long ident = Binder.clearCallingIdentity();
try {
return mPortManager != null ? mPortManager.getPorts() : null;
} finally {
Binder.restoreCallingIdentity(ident);
}
}
@Override
public UsbPortStatus getPortStatus(String portId) {
Preconditions.checkNotNull(portId, "portId must not be null");
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
final long ident = Binder.clearCallingIdentity();
try {
return mPortManager != null ? mPortManager.getPortStatus(portId) : null;
} finally {
Binder.restoreCallingIdentity(ident);
}
}
@Override
public void setPortRoles(String portId, int powerRole, int dataRole) {
Preconditions.checkNotNull(portId, "portId must not be null");
UsbPort.checkRoles(powerRole, dataRole);
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.MANAGE_USB, null);
final long ident = Binder.clearCallingIdentity();
try {
if (mPortManager != null) {
mPortManager.setPortRoles(portId, powerRole, dataRole, null);
}
} finally {
Binder.restoreCallingIdentity(ident);
}
}
@Override
public void dump(FileDescriptor fd, PrintWriter writer, String[] args) {
mContext.enforceCallingOrSelfPermission(android.Manifest.permission.DUMP, TAG);
final IndentingPrintWriter pw = new IndentingPrintWriter(writer, " ");
final long ident = Binder.clearCallingIdentity();
try {
if (args == null || args.length == 0 || "-a".equals(args[0])) {
pw.println("USB Manager State:");
pw.increaseIndent();
if (mDeviceManager != null) {
mDeviceManager.dump(pw);
}
if (mHostManager != null) {
mHostManager.dump(pw);
}
if (mPortManager != null) {
mPortManager.dump(pw);
}
mAlsaManager.dump(pw);
synchronized (mLock) {
for (int i = 0; i < mSettingsByUser.size(); i++) {
final int userId = mSettingsByUser.keyAt(i);
final UsbSettingsManager settings = mSettingsByUser.valueAt(i);
pw.println("Settings for user " + userId + ":");
pw.increaseIndent();
settings.dump(pw);
pw.decreaseIndent();
}
}
} else if (args.length == 4 && "set-port-roles".equals(args[0])) {
final String portId = args[1];
final int powerRole;
switch (args[2]) {
case "source":
powerRole = UsbPort.POWER_ROLE_SOURCE;
break;
case "sink":
powerRole = UsbPort.POWER_ROLE_SINK;
break;
case "no-power":
powerRole = 0;
break;
default:
pw.println("Invalid power role: " + args[2]);
return;
}
final int dataRole;
switch (args[3]) {
case "host":
dataRole = UsbPort.DATA_ROLE_HOST;
break;
case "device":
dataRole = UsbPort.DATA_ROLE_DEVICE;
break;
case "no-data":
dataRole = 0;
break;
default:
pw.println("Invalid data role: " + args[3]);
return;
}
if (mPortManager != null) {
mPortManager.setPortRoles(portId, powerRole, dataRole, pw);
// Note: It might take some time for the side-effects of this operation
// to be fully applied by the kernel since the driver may need to
// renegotiate the USB port mode. If this proves to be an issue
// during debugging, it might be worth adding a sleep here before
// dumping the new state.
pw.println();
mPortManager.dump(pw);
}
} else if (args.length == 3 && "add-port".equals(args[0])) {
final String portId = args[1];
final int supportedModes;
switch (args[2]) {
case "ufp":
supportedModes = UsbPort.MODE_UFP;
break;
case "dfp":
supportedModes = UsbPort.MODE_DFP;
break;
case "dual":
supportedModes = UsbPort.MODE_DUAL;
break;
case "none":
supportedModes = 0;
break;
default:
pw.println("Invalid mode: " + args[2]);
return;
}
if (mPortManager != null) {
mPortManager.addSimulatedPort(portId, supportedModes, pw);
pw.println();
mPortManager.dump(pw);
}
} else if (args.length == 5 && "connect-port".equals(args[0])) {
final String portId = args[1];
final int mode;
final boolean canChangeMode = args[2].endsWith("?");
switch (canChangeMode ? removeLastChar(args[2]) : args[2]) {
case "ufp":
mode = UsbPort.MODE_UFP;
break;
case "dfp":
mode = UsbPort.MODE_DFP;
break;
default:
pw.println("Invalid mode: " + args[2]);
return;
}
final int powerRole;
final boolean canChangePowerRole = args[3].endsWith("?");
switch (canChangePowerRole ? removeLastChar(args[3]) : args[3]) {
case "source":
powerRole = UsbPort.POWER_ROLE_SOURCE;
break;
case "sink":
powerRole = UsbPort.POWER_ROLE_SINK;
break;
default:
pw.println("Invalid power role: " + args[3]);
return;
}
final int dataRole;
final boolean canChangeDataRole = args[4].endsWith("?");
switch (canChangeDataRole ? removeLastChar(args[4]) : args[4]) {
case "host":
dataRole = UsbPort.DATA_ROLE_HOST;
break;
case "device":
dataRole = UsbPort.DATA_ROLE_DEVICE;
break;
default:
pw.println("Invalid data role: " + args[4]);
return;
}
if (mPortManager != null) {
mPortManager.connectSimulatedPort(portId, mode, canChangeMode,
powerRole, canChangePowerRole, dataRole, canChangeDataRole, pw);
pw.println();
mPortManager.dump(pw);
}
} else if (args.length == 2 && "disconnect-port".equals(args[0])) {
final String portId = args[1];
if (mPortManager != null) {
mPortManager.disconnectSimulatedPort(portId, pw);
pw.println();
mPortManager.dump(pw);
}
} else if (args.length == 2 && "remove-port".equals(args[0])) {
final String portId = args[1];
if (mPortManager != null) {
mPortManager.removeSimulatedPort(portId, pw);
pw.println();
mPortManager.dump(pw);
}
} else if (args.length == 1 && "reset".equals(args[0])) {
if (mPortManager != null) {
mPortManager.resetSimulation(pw);
pw.println();
mPortManager.dump(pw);
}
} else if (args.length == 1 && "ports".equals(args[0])) {
if (mPortManager != null) {
mPortManager.dump(pw);
}
} else {
pw.println("Dump current USB state or issue command:");
pw.println(" ports");
pw.println(" set-port-roles <id> <source|sink|no-power> <host|device|no-data>");
pw.println(" add-port <id> <ufp|dfp|dual|none>");
pw.println(" connect-port <id> <ufp|dfp><?> <source|sink><?> <host|device><?>");
pw.println(" (add ? suffix if mode, power role, or data role can be changed)");
pw.println(" disconnect-port <id>");
pw.println(" remove-port <id>");
pw.println(" reset");
pw.println();
pw.println("Example USB type C port role switch:");
pw.println(" dumpsys usb set-port-roles \"default\" source device");
pw.println();
pw.println("Example USB type C port simulation with full capabilities:");
pw.println(" dumpsys usb add-port \"matrix\" dual");
pw.println(" dumpsys usb connect-port \"matrix\" ufp? sink? device?");
pw.println(" dumpsys usb ports");
pw.println(" dumpsys usb disconnect-port \"matrix\"");
pw.println(" dumpsys usb remove-port \"matrix\"");
pw.println(" dumpsys usb reset");
pw.println();
pw.println("Example USB type C port where only power role can be changed:");
pw.println(" dumpsys usb add-port \"matrix\" dual");
pw.println(" dumpsys usb connect-port \"matrix\" dfp source? host");
pw.println(" dumpsys usb reset");
pw.println();
pw.println("Example USB OTG port where id pin determines function:");
pw.println(" dumpsys usb add-port \"matrix\" dual");
pw.println(" dumpsys usb connect-port \"matrix\" dfp source host");
pw.println(" dumpsys usb reset");
pw.println();
pw.println("Example USB device-only port:");
pw.println(" dumpsys usb add-port \"matrix\" ufp");
pw.println(" dumpsys usb connect-port \"matrix\" ufp sink device");
pw.println(" dumpsys usb reset");
}
} finally {
Binder.restoreCallingIdentity(ident);
}
}
private static final String removeLastChar(String value) {
return value.substring(0, value.length() - 1);
}
}
| |
/* Copyright 2014-2016 ARM Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arm.wlauto.uiauto.gmail;
import android.os.Bundle;
import android.support.test.runner.AndroidJUnit4;
import android.support.test.uiautomator.UiObject;
import android.support.test.uiautomator.UiObjectNotFoundException;
import android.support.test.uiautomator.UiSelector;
import com.arm.wlauto.uiauto.ApplaunchInterface;
import com.arm.wlauto.uiauto.UiAutoUtils;
import com.arm.wlauto.uiauto.UxPerfUiAutomation;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.concurrent.TimeUnit;
// Import the uiautomator libraries
@RunWith(AndroidJUnit4.class)
public class UiAutomation extends UxPerfUiAutomation implements ApplaunchInterface{
private int networkTimeoutSecs = 30;
private long networkTimeout = TimeUnit.SECONDS.toMillis(networkTimeoutSecs);
@Test
public void runUiAutomation() throws Exception {
initialize_instrumentation();
parameters = getParams();
String recipient = parameters.getString("recipient");
setScreenOrientation(ScreenOrientation.NATURAL);
runApplicationInitialization();
clickNewMail();
attachImage();
setToField(recipient);
setSubjectField();
setComposeField();
clickSendButton();
unsetScreenOrientation();
}
// Get application parameters and clear the initial run dialogues of the application launch.
public void runApplicationInitialization() throws Exception {
getPackageParameters();
clearFirstRunDialogues();
}
// Sets the UiObject that marks the end of the application launch.
public UiObject getLaunchEndObject() {
UiObject launchEndObject =
mDevice.findObject(new UiSelector().className("android.widget.ImageButton"));
return launchEndObject;
}
// Returns the launch command for the application.
public String getLaunchCommand() {
String launch_command;
launch_command = UiAutoUtils.createLaunchCommand(parameters);
return launch_command;
}
// Pass the workload parameters, used for applaunch
public void setWorkloadParameters(Bundle workload_parameters) {
parameters = workload_parameters;
}
public void clearFirstRunDialogues() throws Exception {
// The first run dialogues vary on different devices so check if they are there and dismiss
UiObject gotItBox =
mDevice.findObject(new UiSelector().resourceId(packageID + "welcome_tour_got_it")
.className("android.widget.TextView"));
if (gotItBox.exists()) {
gotItBox.clickAndWaitForNewWindow(uiAutoTimeout);
}
UiObject takeMeToBox =
mDevice.findObject(new UiSelector().textContains("Take me to Gmail")
.className("android.widget.TextView"));
if (takeMeToBox.exists()) {
takeMeToBox.clickAndWaitForNewWindow(uiAutoTimeout);
}
UiObject syncNowButton =
mDevice.findObject(new UiSelector().textContains("Sync now")
.className("android.widget.Button"));
if (syncNowButton.exists()) {
syncNowButton.clickAndWaitForNewWindow(uiAutoTimeout);
// On some devices we need to wait for a sync to occur after clearing the data
// We also need to sleep here since waiting for a new window is not enough
sleep(10);
}
// Wait an obnoxiously long period of time for the sync operation to finish
// If it still fails, then there is a problem with the app obtaining the data it needs
// Recommend restarting the phone and/or clearing the app data
UiObject gettingMessages =
mDevice.findObject(new UiSelector().textContains("Getting your messages")
.className("android.widget.TextView"));
UiObject waitingSync =
mDevice.findObject(new UiSelector().textContains("Waiting for sync")
.className("android.widget.TextView"));
if (!waitUntilNoObject(gettingMessages, networkTimeoutSecs*4) ||
!waitUntilNoObject(waitingSync, networkTimeoutSecs*4)) {
throw new UiObjectNotFoundException("Device cannot sync! Try rebooting or clearing app data");
}
}
public void clickNewMail() throws Exception {
String testTag = "click_new";
ActionLogger logger = new ActionLogger(testTag, parameters);
UiObject conversationView =
mDevice.findObject(new UiSelector().resourceIdMatches(packageID + "conversation_list.*"));
if (!conversationView.waitForExists(networkTimeout)) {
throw new UiObjectNotFoundException("Could not find \"conversationView\".");
}
UiObject newMailButton =
getUiObjectByDescription("Compose", "android.widget.ImageButton");
logger.start();
newMailButton.clickAndWaitForNewWindow(uiAutoTimeout);
logger.stop();
}
public void attachImage() throws Exception {
String testTag = "attach_img";
ActionLogger logger = new ActionLogger(testTag, parameters);
UiObject attachIcon =
getUiObjectByResourceId(packageID + "add_attachment", "android.widget.TextView");
logger.start();
attachIcon.click();
UiObject attachFile =
getUiObjectByText("Attach file", "android.widget.TextView");
attachFile.clickAndWaitForNewWindow(uiAutoTimeout);
// Show Roots menu
UiObject rootMenu =
mDevice.findObject(new UiSelector().descriptionContains("Show roots")
.className("android.widget.ImageButton"));
if (rootMenu.exists()){
rootMenu.click();
}
// Check for Photos
UiObject photos =
mDevice.findObject(new UiSelector().text("Photos")
.className("android.widget.TextView"));
// If Photos does not exist use the images folder
if (!photos.waitForExists (uiAutoTimeout)) {
UiObject imagesEntry =
mDevice.findObject(new UiSelector().textContains("Images")
.className("android.widget.TextView"));
if (imagesEntry.waitForExists(uiAutoTimeout)) {
imagesEntry.click();
}
selectGalleryFolder("wa-working");
UiObject imageButton =
mDevice.findObject(new UiSelector().resourceId("com.android.documentsui:id/grid")
.className("android.widget.Gridview")
.childSelector(new UiSelector().index(0)
.className("android.widget.FrameLayout")));
if (!imageButton.exists()){
imageButton =
mDevice.findObject(new UiSelector().resourceId("com.android.documentsui:id/dir_list")
.childSelector(new UiSelector().index(0)
.classNameMatches("android.widget..*Layout")));
}
imageButton.click();
imageButton.waitUntilGone(uiAutoTimeout);
} else {
photos.click();
//Click wa folder image
UiObject waFolder =
mDevice.findObject(new UiSelector().textContains("wa-working")
.className("android.widget.TextView"));
if (!waFolder.waitForExists (uiAutoTimeout)) {
UiObject refresh =
getUiObjectByResourceId("com.google.android.apps.photos:id/image");
refresh.clickAndWaitForNewWindow();
UiObject back =
getUiObjectByResourceId("com.google.android.apps.photos:id/action_mode_close_button");
back.clickAndWaitForNewWindow();
}
waFolder.waitForExists (uiAutoTimeout);
waFolder.click();
//Click test image
UiObject imageFileButton =
mDevice.findObject(new UiSelector().descriptionContains("Photo"));
imageFileButton.click();
UiObject accept = getUiObjectByText("DONE");
if (accept.waitForExists (uiAutoTimeout)) {
accept.click();
}
}
logger.stop();
}
public void setToField(String recipient) throws Exception {
String testTag = "text_to";
ActionLogger logger = new ActionLogger(testTag, parameters);
UiObject toField = getUiObjectByResourceId(packageID + "to");
logger.start();
toField.setText(recipient);
mDevice.pressEnter();
logger.stop();
}
public void setSubjectField() throws Exception {
String testTag = "text_subject";
ActionLogger logger = new ActionLogger(testTag, parameters);
UiObject subjectField = getUiObjectByText("Subject", "android.widget.EditText");
logger.start();
// Click on the subject field is required on some platforms to exit the To box cleanly
subjectField.click();
subjectField.setText("This is a test message");
mDevice.pressEnter();
logger.stop();
}
public void setComposeField() throws Exception {
String testTag = "text_body";
ActionLogger logger = new ActionLogger(testTag, parameters);
UiObject composeField = mDevice.findObject(new UiSelector().text("Compose email")
.classNameMatches("android.widget.EditText"));
if (!composeField.exists()){
composeField = mDevice.findObject(new UiSelector().description("Compose email")
.classNameMatches("android.view.View"));
}
logger.start();
composeField.legacySetText("This is a test composition");
mDevice.pressEnter();
logger.stop();
}
public void clickSendButton() throws Exception {
String testTag = "click_send";
ActionLogger logger = new ActionLogger(testTag, parameters);
UiObject sendButton = getUiObjectByDescription("Send", "android.widget.TextView");
logger.start();
sendButton.clickAndWaitForNewWindow(uiAutoTimeout);
logger.stop();
sendButton.waitUntilGone(networkTimeoutSecs);
}
}
| |
/*
* Copyright (c) 2004 - 2012 Eike Stepper (Berlin, Germany) and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eike Stepper - initial API and implementation
* Stefan Winkler - bug 271444: [DB] Multiple refactorings
* Stefan Winkler - bug 275303: [DB] DBStore does not handle BIG_INTEGER and BIG_DECIMAL
* Kai Schlamp - bug 282976: [DB] Influence Mappings through EAnnotations
* Stefan Winkler - bug 282976: [DB] Influence Mappings through EAnnotations
* Stefan Winkler - bug 285270: [DB] Support XSD based models
* Stefan Winkler - Bug 285426: [DB] Implement user-defined typeMapping support
*/
package org.eclipse.emf.cdo.server.db.mapping;
import org.eclipse.emf.cdo.common.revision.CDORevisionData;
import org.eclipse.emf.cdo.server.internal.db.DBAnnotation;
import org.eclipse.emf.cdo.server.internal.db.MetaDataManager;
import org.eclipse.emf.cdo.server.internal.db.bundle.OM;
import org.eclipse.emf.cdo.server.internal.db.mapping.TypeMappingRegistry;
import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevision;
import org.eclipse.net4j.db.DBType;
import org.eclipse.net4j.db.ddl.IDBField;
import org.eclipse.net4j.db.ddl.IDBTable;
import org.eclipse.net4j.util.container.IManagedContainer;
import org.eclipse.net4j.util.om.trace.ContextTracer;
import org.eclipse.emf.ecore.EStructuralFeature;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* This is a default implementation for the {@link ITypeMapping} interface which provides default behavor for all common
* types. Implementors should provide a constructor which the factory (see below) can use and implement
* {@link #getResultSetValue(ResultSet)}. If needed, {@link #doSetValue(PreparedStatement, int, Object)} can also be
* overridden as a counterpart to {@link #getResultSetValue(ResultSet)}. Finally, an implementor should also implement a
* suitable factory for the {@link TypeMappingRegistry} and register it either manually using
* {@link IManagedContainer#registerFactory(org.eclipse.net4j.util.factory.IFactory)} or using the Net4j Extension Point
* <code>factories</code>.
*
* @author Eike Stepper
* @author Stefan Winkler
* @since 4.0
*/
public abstract class AbstractTypeMapping implements ITypeMapping
{
private static final ContextTracer TRACER = new ContextTracer(OM.DEBUG, AbstractTypeMapping.class);
private IMappingStrategy mappingStrategy;
private EStructuralFeature feature;
private IDBField field;
private DBType dbType;
/**
* Create a new type mapping
*/
public AbstractTypeMapping()
{
super();
}
public final void setMappingStrategy(IMappingStrategy mappingStrategy)
{
this.mappingStrategy = mappingStrategy;
}
public final IMappingStrategy getMappingStrategy()
{
return mappingStrategy;
}
public final void setFeature(EStructuralFeature feature)
{
this.feature = feature;
}
public final EStructuralFeature getFeature()
{
return feature;
}
public final void setValueFromRevision(PreparedStatement stmt, int index, InternalCDORevision revision)
throws SQLException
{
setValue(stmt, index, getRevisionValue(revision));
}
public final void setDefaultValue(PreparedStatement stmt, int index) throws SQLException
{
setValue(stmt, index, getDefaultValue());
}
public final void setValue(PreparedStatement stmt, int index, Object value) throws SQLException
{
if (value == CDORevisionData.NIL)
{
if (TRACER.isEnabled())
{
TRACER.format("TypeMapping for {0}: converting Revision.NIL to DB-null", feature.getName()); //$NON-NLS-1$
}
stmt.setNull(index, getSqlType());
}
else if (value == null)
{
if (feature.isMany() || getDefaultValue() == null)
{
if (TRACER.isEnabled())
{
TRACER.format("TypeMapping for {0}: writing Revision.null as DB.null", feature.getName()); //$NON-NLS-1$
}
stmt.setNull(index, getSqlType());
}
else
{
if (TRACER.isEnabled())
{
TRACER.format("TypeMapping for {0}: converting Revision.null to default value", feature.getName()); //$NON-NLS-1$
}
setDefaultValue(stmt, index);
}
}
else
{
doSetValue(stmt, index, value);
}
}
public final void createDBField(IDBTable table)
{
createDBField(table, mappingStrategy.getFieldName(feature));
}
public final void createDBField(IDBTable table, String fieldName)
{
DBType fieldType = getDBType();
int fieldLength = getDBLength(fieldType);
field = table.addField(fieldName, fieldType, fieldLength);
}
public final void setDBField(IDBTable table, String fieldName)
{
field = table.getField(fieldName);
}
public final IDBField getField()
{
return field;
}
public final void readValueToRevision(ResultSet resultSet, InternalCDORevision revision) throws SQLException
{
Object value = readValue(resultSet);
revision.setValue(getFeature(), value);
}
public final Object readValue(ResultSet resultSet) throws SQLException
{
Object value = getResultSetValue(resultSet);
if (resultSet.wasNull())
{
if (feature.isMany())
{
if (TRACER.isEnabled())
{
TRACER.format("TypeMapping for {0}: read db.null - setting Revision.null", feature.getName()); //$NON-NLS-1$
}
value = null;
}
else
{
if (getDefaultValue() == null)
{
if (TRACER.isEnabled())
{
TRACER.format(
"TypeMapping for {0}: read db.null - setting Revision.null, because of default", feature.getName()); //$NON-NLS-1$
}
value = null;
}
else
{
if (TRACER.isEnabled())
{
TRACER.format("TypeMapping for {0}: read db.null - setting Revision.NIL", feature.getName()); //$NON-NLS-1$
}
value = CDORevisionData.NIL;
}
}
}
return value;
}
protected Object getDefaultValue()
{
return feature.getDefaultValue();
}
protected final Object getRevisionValue(InternalCDORevision revision)
{
return revision.getValue(getFeature());
}
/**
* Implementors could override this method to convert a given value to the database representation and set it to the
* prepared statement.
*
* @param stmt
* the {@link PreparedStatement} which is used for DB access
* @param index
* the parameter index in the statement which should be set
* @param value
* the value of the feature which should be written into the DB
*/
protected void doSetValue(PreparedStatement stmt, int index, Object value) throws SQLException
{
stmt.setObject(index, value, getSqlType());
}
/**
* Returns the SQL type of this TypeMapping. The default implementation considers the type map held by the
* {@link MetaDataManager meta-data manager}. Subclasses may override.
*
* @return The sql type of this TypeMapping.
*/
protected int getSqlType()
{
return getDBType().getCode();
}
public final void setDBType(DBType dbType)
{
this.dbType = dbType;
}
public DBType getDBType()
{
return dbType;
}
/**
* @since 4.1.1
*/
protected int getDBLength(DBType type)
{
String value = DBAnnotation.COLUMN_LENGTH.getValue(feature);
if (value != null)
{
try
{
return Integer.parseInt(value);
}
catch (NumberFormatException e)
{
OM.LOG.error("Illegal columnLength annotation of feature " + feature.getName());
}
}
// TODO: implement DBAdapter.getDBLength
// mappingStrategy.getStore().getDBAdapter().getDBLength(type);
// which should then return the correct default field length for the db type
// XXX (apeteri): we need a common maximum length that is acceptable by both H2 and MySQL
return type == DBType.VARCHAR ? 2048 : IDBField.DEFAULT;
}
/**
* Subclasses should implement this method to read the value from the result set. Typical implementations should look
* similar to this one: <code>resultSet.getString(getField().getName())</code>
*
* @param resultSet
* the result set to read from
* @return the result value read (this has to be compatible with the {@link #feature}.
*/
protected abstract Object getResultSetValue(ResultSet resultSet) throws SQLException;
}
| |
package org.openhome.net.device.providers;
import java.util.LinkedList;
import java.util.List;
import org.openhome.net.core.*;
import org.openhome.net.device.*;
interface IDvProviderAvOpenhomeOrgCredentials1
{
/**
* Set the value of the Ids property
*
* @param aValue new value for the property.
* @return <tt>true</tt> if the value has been updated; <tt>false</tt> if <tt>aValue</tt> was the same as the previous value.
*
*/
public boolean setPropertyIds(String aValue);
/**
* Get a copy of the value of the Ids property
*
* @return value of the Ids property.
*/
public String getPropertyIds();
/**
* Set the value of the PublicKey property
*
* @param aValue new value for the property.
* @return <tt>true</tt> if the value has been updated; <tt>false</tt> if <tt>aValue</tt> was the same as the previous value.
*
*/
public boolean setPropertyPublicKey(String aValue);
/**
* Get a copy of the value of the PublicKey property
*
* @return value of the PublicKey property.
*/
public String getPropertyPublicKey();
/**
* Set the value of the SequenceNumber property
*
* @param aValue new value for the property.
* @return <tt>true</tt> if the value has been updated; <tt>false</tt> if <tt>aValue</tt> was the same as the previous value.
*
*/
public boolean setPropertySequenceNumber(long aValue);
/**
* Get a copy of the value of the SequenceNumber property
*
* @return value of the SequenceNumber property.
*/
public long getPropertySequenceNumber();
}
/**
* Provider for the av.openhome.org:Credentials:1 UPnP service.
*/
public class DvProviderAvOpenhomeOrgCredentials1 extends DvProvider implements IDvProviderAvOpenhomeOrgCredentials1
{
public class Get
{
private String iUserName;
private byte[] iPassword;
private boolean iEnabled;
private String iStatus;
private String iData;
public Get(
String aUserName,
byte[] aPassword,
boolean aEnabled,
String aStatus,
String aData
)
{
iUserName = aUserName;
iPassword = aPassword;
iEnabled = aEnabled;
iStatus = aStatus;
iData = aData;
}
public String getUserName()
{
return iUserName;
}
public byte[] getPassword()
{
return iPassword;
}
public boolean getEnabled()
{
return iEnabled;
}
public String getStatus()
{
return iStatus;
}
public String getData()
{
return iData;
}
}
private IDvInvocationListener iDelegateSet;
private IDvInvocationListener iDelegateClear;
private IDvInvocationListener iDelegateSetEnabled;
private IDvInvocationListener iDelegateGet;
private IDvInvocationListener iDelegateLogin;
private IDvInvocationListener iDelegateReLogin;
private IDvInvocationListener iDelegateGetIds;
private IDvInvocationListener iDelegateGetPublicKey;
private IDvInvocationListener iDelegateGetSequenceNumber;
private PropertyString iPropertyIds;
private PropertyString iPropertyPublicKey;
private PropertyUint iPropertySequenceNumber;
/**
* Constructor
*
* @param aDevice device which owns this provider.
*/
protected DvProviderAvOpenhomeOrgCredentials1(DvDevice aDevice)
{
super(aDevice, "av.openhome.org", "Credentials", 1);
}
/**
* Enable the Ids property.
*/
public void enablePropertyIds()
{
List<String> allowedValues = new LinkedList<String>();
iPropertyIds = new PropertyString(new ParameterString("Ids", allowedValues));
addProperty(iPropertyIds);
}
/**
* Enable the PublicKey property.
*/
public void enablePropertyPublicKey()
{
List<String> allowedValues = new LinkedList<String>();
iPropertyPublicKey = new PropertyString(new ParameterString("PublicKey", allowedValues));
addProperty(iPropertyPublicKey);
}
/**
* Enable the SequenceNumber property.
*/
public void enablePropertySequenceNumber()
{
iPropertySequenceNumber = new PropertyUint(new ParameterUint("SequenceNumber"));
addProperty(iPropertySequenceNumber);
}
/**
* Set the value of the Ids property
*
* @param aValue new value for the property.
* @return <tt>true</tt> if the value has been updated; <tt>false</tt>
* if <tt>aValue</tt> was the same as the previous value.
*/
public boolean setPropertyIds(String aValue)
{
return setPropertyString(iPropertyIds, aValue);
}
/**
* Get a copy of the value of the Ids property
*
* @return value of the Ids property.
*/
public String getPropertyIds()
{
return iPropertyIds.getValue();
}
/**
* Set the value of the PublicKey property
*
* @param aValue new value for the property.
* @return <tt>true</tt> if the value has been updated; <tt>false</tt>
* if <tt>aValue</tt> was the same as the previous value.
*/
public boolean setPropertyPublicKey(String aValue)
{
return setPropertyString(iPropertyPublicKey, aValue);
}
/**
* Get a copy of the value of the PublicKey property
*
* @return value of the PublicKey property.
*/
public String getPropertyPublicKey()
{
return iPropertyPublicKey.getValue();
}
/**
* Set the value of the SequenceNumber property
*
* @param aValue new value for the property.
* @return <tt>true</tt> if the value has been updated; <tt>false</tt>
* if <tt>aValue</tt> was the same as the previous value.
*/
public boolean setPropertySequenceNumber(long aValue)
{
return setPropertyUint(iPropertySequenceNumber, aValue);
}
/**
* Get a copy of the value of the SequenceNumber property
*
* @return value of the SequenceNumber property.
*/
public long getPropertySequenceNumber()
{
return iPropertySequenceNumber.getValue();
}
/**
* Signal that the action Set is supported.
*
* <p>The action's availability will be published in the device's service.xml.
* Set must be overridden if this is called.
*/
protected void enableActionSet()
{
Action action = new Action("Set"); List<String> allowedValues = new LinkedList<String>();
action.addInputParameter(new ParameterString("Id", allowedValues));
action.addInputParameter(new ParameterString("UserName", allowedValues));
action.addInputParameter(new ParameterBinary("Password"));
iDelegateSet = new DoSet();
enableAction(action, iDelegateSet);
}
/**
* Signal that the action Clear is supported.
*
* <p>The action's availability will be published in the device's service.xml.
* Clear must be overridden if this is called.
*/
protected void enableActionClear()
{
Action action = new Action("Clear"); List<String> allowedValues = new LinkedList<String>();
action.addInputParameter(new ParameterString("Id", allowedValues));
iDelegateClear = new DoClear();
enableAction(action, iDelegateClear);
}
/**
* Signal that the action SetEnabled is supported.
*
* <p>The action's availability will be published in the device's service.xml.
* SetEnabled must be overridden if this is called.
*/
protected void enableActionSetEnabled()
{
Action action = new Action("SetEnabled"); List<String> allowedValues = new LinkedList<String>();
action.addInputParameter(new ParameterString("Id", allowedValues));
action.addInputParameter(new ParameterBool("Enabled"));
iDelegateSetEnabled = new DoSetEnabled();
enableAction(action, iDelegateSetEnabled);
}
/**
* Signal that the action Get is supported.
*
* <p>The action's availability will be published in the device's service.xml.
* Get must be overridden if this is called.
*/
protected void enableActionGet()
{
Action action = new Action("Get"); List<String> allowedValues = new LinkedList<String>();
action.addInputParameter(new ParameterString("Id", allowedValues));
action.addOutputParameter(new ParameterString("UserName", allowedValues));
action.addOutputParameter(new ParameterBinary("Password"));
action.addOutputParameter(new ParameterBool("Enabled"));
action.addOutputParameter(new ParameterString("Status", allowedValues));
action.addOutputParameter(new ParameterString("Data", allowedValues));
iDelegateGet = new DoGet();
enableAction(action, iDelegateGet);
}
/**
* Signal that the action Login is supported.
*
* <p>The action's availability will be published in the device's service.xml.
* Login must be overridden if this is called.
*/
protected void enableActionLogin()
{
Action action = new Action("Login"); List<String> allowedValues = new LinkedList<String>();
action.addInputParameter(new ParameterString("Id", allowedValues));
action.addOutputParameter(new ParameterString("Token", allowedValues));
iDelegateLogin = new DoLogin();
enableAction(action, iDelegateLogin);
}
/**
* Signal that the action ReLogin is supported.
*
* <p>The action's availability will be published in the device's service.xml.
* ReLogin must be overridden if this is called.
*/
protected void enableActionReLogin()
{
Action action = new Action("ReLogin"); List<String> allowedValues = new LinkedList<String>();
action.addInputParameter(new ParameterString("Id", allowedValues));
action.addInputParameter(new ParameterString("CurrentToken", allowedValues));
action.addOutputParameter(new ParameterString("NewToken", allowedValues));
iDelegateReLogin = new DoReLogin();
enableAction(action, iDelegateReLogin);
}
/**
* Signal that the action GetIds is supported.
*
* <p>The action's availability will be published in the device's service.xml.
* GetIds must be overridden if this is called.
*/
protected void enableActionGetIds()
{
Action action = new Action("GetIds");
action.addOutputParameter(new ParameterRelated("Ids", iPropertyIds));
iDelegateGetIds = new DoGetIds();
enableAction(action, iDelegateGetIds);
}
/**
* Signal that the action GetPublicKey is supported.
*
* <p>The action's availability will be published in the device's service.xml.
* GetPublicKey must be overridden if this is called.
*/
protected void enableActionGetPublicKey()
{
Action action = new Action("GetPublicKey");
action.addOutputParameter(new ParameterRelated("PublicKey", iPropertyPublicKey));
iDelegateGetPublicKey = new DoGetPublicKey();
enableAction(action, iDelegateGetPublicKey);
}
/**
* Signal that the action GetSequenceNumber is supported.
*
* <p>The action's availability will be published in the device's service.xml.
* GetSequenceNumber must be overridden if this is called.
*/
protected void enableActionGetSequenceNumber()
{
Action action = new Action("GetSequenceNumber");
action.addOutputParameter(new ParameterRelated("SequenceNumber", iPropertySequenceNumber));
iDelegateGetSequenceNumber = new DoGetSequenceNumber();
enableAction(action, iDelegateGetSequenceNumber);
}
/**
* Set action.
*
* <p>Will be called when the device stack receives an invocation of the
* Set action for the owning device.
*
* <p>Must be implemented iff {@link #enableActionSet} was called.</remarks>
*
* @param aInvocation Interface allowing querying of aspects of this particular action invocation.</param>
* @param aId
* @param aUserName
* @param aPassword
*/
protected void set(IDvInvocation aInvocation, String aId, String aUserName, byte[] aPassword)
{
throw (new ActionDisabledError());
}
/**
* Clear action.
*
* <p>Will be called when the device stack receives an invocation of the
* Clear action for the owning device.
*
* <p>Must be implemented iff {@link #enableActionClear} was called.</remarks>
*
* @param aInvocation Interface allowing querying of aspects of this particular action invocation.</param>
* @param aId
*/
protected void clear(IDvInvocation aInvocation, String aId)
{
throw (new ActionDisabledError());
}
/**
* SetEnabled action.
*
* <p>Will be called when the device stack receives an invocation of the
* SetEnabled action for the owning device.
*
* <p>Must be implemented iff {@link #enableActionSetEnabled} was called.</remarks>
*
* @param aInvocation Interface allowing querying of aspects of this particular action invocation.</param>
* @param aId
* @param aEnabled
*/
protected void setEnabled(IDvInvocation aInvocation, String aId, boolean aEnabled)
{
throw (new ActionDisabledError());
}
/**
* Get action.
*
* <p>Will be called when the device stack receives an invocation of the
* Get action for the owning device.
*
* <p>Must be implemented iff {@link #enableActionGet} was called.</remarks>
*
* @param aInvocation Interface allowing querying of aspects of this particular action invocation.</param>
* @param aId
*/
protected Get get(IDvInvocation aInvocation, String aId)
{
throw (new ActionDisabledError());
}
/**
* Login action.
*
* <p>Will be called when the device stack receives an invocation of the
* Login action for the owning device.
*
* <p>Must be implemented iff {@link #enableActionLogin} was called.</remarks>
*
* @param aInvocation Interface allowing querying of aspects of this particular action invocation.</param>
* @param aId
*/
protected String login(IDvInvocation aInvocation, String aId)
{
throw (new ActionDisabledError());
}
/**
* ReLogin action.
*
* <p>Will be called when the device stack receives an invocation of the
* ReLogin action for the owning device.
*
* <p>Must be implemented iff {@link #enableActionReLogin} was called.</remarks>
*
* @param aInvocation Interface allowing querying of aspects of this particular action invocation.</param>
* @param aId
* @param aCurrentToken
*/
protected String reLogin(IDvInvocation aInvocation, String aId, String aCurrentToken)
{
throw (new ActionDisabledError());
}
/**
* GetIds action.
*
* <p>Will be called when the device stack receives an invocation of the
* GetIds action for the owning device.
*
* <p>Must be implemented iff {@link #enableActionGetIds} was called.</remarks>
*
* @param aInvocation Interface allowing querying of aspects of this particular action invocation.</param>
*/
protected String getIds(IDvInvocation aInvocation)
{
throw (new ActionDisabledError());
}
/**
* GetPublicKey action.
*
* <p>Will be called when the device stack receives an invocation of the
* GetPublicKey action for the owning device.
*
* <p>Must be implemented iff {@link #enableActionGetPublicKey} was called.</remarks>
*
* @param aInvocation Interface allowing querying of aspects of this particular action invocation.</param>
*/
protected String getPublicKey(IDvInvocation aInvocation)
{
throw (new ActionDisabledError());
}
/**
* GetSequenceNumber action.
*
* <p>Will be called when the device stack receives an invocation of the
* GetSequenceNumber action for the owning device.
*
* <p>Must be implemented iff {@link #enableActionGetSequenceNumber} was called.</remarks>
*
* @param aInvocation Interface allowing querying of aspects of this particular action invocation.</param>
*/
protected long getSequenceNumber(IDvInvocation aInvocation)
{
throw (new ActionDisabledError());
}
/**
* Must be called for each class instance. Must be called before Core.Library.Close().
*/
public void dispose()
{
synchronized (this)
{
if (iHandle == 0)
{
return;
}
super.dispose();
iHandle = 0;
}
}
private class DoSet implements IDvInvocationListener
{
public void actionInvoked(long aInvocation)
{
DvInvocation invocation = new DvInvocation(aInvocation);
String id;
String userName;
byte[] password;
try
{
invocation.readStart();
id = invocation.readString("Id");
userName = invocation.readString("UserName");
password = invocation.readBinary("Password");
invocation.readEnd();
set(invocation, id, userName, password);
}
catch (ActionError ae)
{
invocation.reportActionError(ae, "Set");
return;
}
catch (PropertyUpdateError pue)
{
invocation.reportError(501, "Invalid XML");
return;
}
catch (Exception e)
{
System.out.println("WARNING: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError or PropertyUpdateError can be thrown by actions");
e.printStackTrace();
return;
}
try
{
invocation.writeStart();
invocation.writeEnd();
}
catch (ActionError ae)
{
return;
}
catch (Exception e)
{
System.out.println("ERROR: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError can be thrown by action response writer");
e.printStackTrace();
}
}
}
private class DoClear implements IDvInvocationListener
{
public void actionInvoked(long aInvocation)
{
DvInvocation invocation = new DvInvocation(aInvocation);
String id;
try
{
invocation.readStart();
id = invocation.readString("Id");
invocation.readEnd();
clear(invocation, id);
}
catch (ActionError ae)
{
invocation.reportActionError(ae, "Clear");
return;
}
catch (PropertyUpdateError pue)
{
invocation.reportError(501, "Invalid XML");
return;
}
catch (Exception e)
{
System.out.println("WARNING: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError or PropertyUpdateError can be thrown by actions");
e.printStackTrace();
return;
}
try
{
invocation.writeStart();
invocation.writeEnd();
}
catch (ActionError ae)
{
return;
}
catch (Exception e)
{
System.out.println("ERROR: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError can be thrown by action response writer");
e.printStackTrace();
}
}
}
private class DoSetEnabled implements IDvInvocationListener
{
public void actionInvoked(long aInvocation)
{
DvInvocation invocation = new DvInvocation(aInvocation);
String id;
boolean enabled;
try
{
invocation.readStart();
id = invocation.readString("Id");
enabled = invocation.readBool("Enabled");
invocation.readEnd();
setEnabled(invocation, id, enabled);
}
catch (ActionError ae)
{
invocation.reportActionError(ae, "SetEnabled");
return;
}
catch (PropertyUpdateError pue)
{
invocation.reportError(501, "Invalid XML");
return;
}
catch (Exception e)
{
System.out.println("WARNING: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError or PropertyUpdateError can be thrown by actions");
e.printStackTrace();
return;
}
try
{
invocation.writeStart();
invocation.writeEnd();
}
catch (ActionError ae)
{
return;
}
catch (Exception e)
{
System.out.println("ERROR: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError can be thrown by action response writer");
e.printStackTrace();
}
}
}
private class DoGet implements IDvInvocationListener
{
public void actionInvoked(long aInvocation)
{
DvInvocation invocation = new DvInvocation(aInvocation);
String id;
String userName;
byte[] password;
boolean enabled;
String status;
String data;
try
{
invocation.readStart();
id = invocation.readString("Id");
invocation.readEnd();
Get outArgs = get(invocation, id);
userName = outArgs.getUserName();
password = outArgs.getPassword();
enabled = outArgs.getEnabled();
status = outArgs.getStatus();
data = outArgs.getData();
}
catch (ActionError ae)
{
invocation.reportActionError(ae, "Get");
return;
}
catch (PropertyUpdateError pue)
{
invocation.reportError(501, "Invalid XML");
return;
}
catch (Exception e)
{
System.out.println("WARNING: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError or PropertyUpdateError can be thrown by actions");
e.printStackTrace();
return;
}
try
{
invocation.writeStart();
invocation.writeString("UserName", userName);
invocation.writeBinary("Password", password);
invocation.writeBool("Enabled", enabled);
invocation.writeString("Status", status);
invocation.writeString("Data", data);
invocation.writeEnd();
}
catch (ActionError ae)
{
return;
}
catch (Exception e)
{
System.out.println("ERROR: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError can be thrown by action response writer");
e.printStackTrace();
}
}
}
private class DoLogin implements IDvInvocationListener
{
public void actionInvoked(long aInvocation)
{
DvInvocation invocation = new DvInvocation(aInvocation);
String id;
String token;
try
{
invocation.readStart();
id = invocation.readString("Id");
invocation.readEnd();
token = login(invocation, id);
}
catch (ActionError ae)
{
invocation.reportActionError(ae, "Login");
return;
}
catch (PropertyUpdateError pue)
{
invocation.reportError(501, "Invalid XML");
return;
}
catch (Exception e)
{
System.out.println("WARNING: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError or PropertyUpdateError can be thrown by actions");
e.printStackTrace();
return;
}
try
{
invocation.writeStart();
invocation.writeString("Token", token);
invocation.writeEnd();
}
catch (ActionError ae)
{
return;
}
catch (Exception e)
{
System.out.println("ERROR: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError can be thrown by action response writer");
e.printStackTrace();
}
}
}
private class DoReLogin implements IDvInvocationListener
{
public void actionInvoked(long aInvocation)
{
DvInvocation invocation = new DvInvocation(aInvocation);
String id;
String currentToken;
String newToken;
try
{
invocation.readStart();
id = invocation.readString("Id");
currentToken = invocation.readString("CurrentToken");
invocation.readEnd();
newToken = reLogin(invocation, id, currentToken);
}
catch (ActionError ae)
{
invocation.reportActionError(ae, "ReLogin");
return;
}
catch (PropertyUpdateError pue)
{
invocation.reportError(501, "Invalid XML");
return;
}
catch (Exception e)
{
System.out.println("WARNING: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError or PropertyUpdateError can be thrown by actions");
e.printStackTrace();
return;
}
try
{
invocation.writeStart();
invocation.writeString("NewToken", newToken);
invocation.writeEnd();
}
catch (ActionError ae)
{
return;
}
catch (Exception e)
{
System.out.println("ERROR: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError can be thrown by action response writer");
e.printStackTrace();
}
}
}
private class DoGetIds implements IDvInvocationListener
{
public void actionInvoked(long aInvocation)
{
DvInvocation invocation = new DvInvocation(aInvocation);
String ids;
try
{
invocation.readStart();
invocation.readEnd();
ids = getIds(invocation);
}
catch (ActionError ae)
{
invocation.reportActionError(ae, "GetIds");
return;
}
catch (PropertyUpdateError pue)
{
invocation.reportError(501, "Invalid XML");
return;
}
catch (Exception e)
{
System.out.println("WARNING: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError or PropertyUpdateError can be thrown by actions");
e.printStackTrace();
return;
}
try
{
invocation.writeStart();
invocation.writeString("Ids", ids);
invocation.writeEnd();
}
catch (ActionError ae)
{
return;
}
catch (Exception e)
{
System.out.println("ERROR: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError can be thrown by action response writer");
e.printStackTrace();
}
}
}
private class DoGetPublicKey implements IDvInvocationListener
{
public void actionInvoked(long aInvocation)
{
DvInvocation invocation = new DvInvocation(aInvocation);
String publicKey;
try
{
invocation.readStart();
invocation.readEnd();
publicKey = getPublicKey(invocation);
}
catch (ActionError ae)
{
invocation.reportActionError(ae, "GetPublicKey");
return;
}
catch (PropertyUpdateError pue)
{
invocation.reportError(501, "Invalid XML");
return;
}
catch (Exception e)
{
System.out.println("WARNING: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError or PropertyUpdateError can be thrown by actions");
e.printStackTrace();
return;
}
try
{
invocation.writeStart();
invocation.writeString("PublicKey", publicKey);
invocation.writeEnd();
}
catch (ActionError ae)
{
return;
}
catch (Exception e)
{
System.out.println("ERROR: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError can be thrown by action response writer");
e.printStackTrace();
}
}
}
private class DoGetSequenceNumber implements IDvInvocationListener
{
public void actionInvoked(long aInvocation)
{
DvInvocation invocation = new DvInvocation(aInvocation);
long sequenceNumber;
try
{
invocation.readStart();
invocation.readEnd();
sequenceNumber = getSequenceNumber(invocation);
}
catch (ActionError ae)
{
invocation.reportActionError(ae, "GetSequenceNumber");
return;
}
catch (PropertyUpdateError pue)
{
invocation.reportError(501, "Invalid XML");
return;
}
catch (Exception e)
{
System.out.println("WARNING: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError or PropertyUpdateError can be thrown by actions");
e.printStackTrace();
return;
}
try
{
invocation.writeStart();
invocation.writeUint("SequenceNumber", sequenceNumber);
invocation.writeEnd();
}
catch (ActionError ae)
{
return;
}
catch (Exception e)
{
System.out.println("ERROR: unexpected exception: " + e.getMessage());
System.out.println(" Only ActionError can be thrown by action response writer");
e.printStackTrace();
}
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.glue.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/GetMLTaskRun" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetMLTaskRunResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The unique identifier of the task run.
* </p>
*/
private String transformId;
/**
* <p>
* The unique run identifier associated with this run.
* </p>
*/
private String taskRunId;
/**
* <p>
* The status for this task run.
* </p>
*/
private String status;
/**
* <p>
* The names of the log groups that are associated with the task run.
* </p>
*/
private String logGroupName;
/**
* <p>
* The list of properties that are associated with the task run.
* </p>
*/
private TaskRunProperties properties;
/**
* <p>
* The error strings that are associated with the task run.
* </p>
*/
private String errorString;
/**
* <p>
* The date and time when this task run started.
* </p>
*/
private java.util.Date startedOn;
/**
* <p>
* The date and time when this task run was last modified.
* </p>
*/
private java.util.Date lastModifiedOn;
/**
* <p>
* The date and time when this task run was completed.
* </p>
*/
private java.util.Date completedOn;
/**
* <p>
* The amount of time (in seconds) that the task run consumed resources.
* </p>
*/
private Integer executionTime;
/**
* <p>
* The unique identifier of the task run.
* </p>
*
* @param transformId
* The unique identifier of the task run.
*/
public void setTransformId(String transformId) {
this.transformId = transformId;
}
/**
* <p>
* The unique identifier of the task run.
* </p>
*
* @return The unique identifier of the task run.
*/
public String getTransformId() {
return this.transformId;
}
/**
* <p>
* The unique identifier of the task run.
* </p>
*
* @param transformId
* The unique identifier of the task run.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetMLTaskRunResult withTransformId(String transformId) {
setTransformId(transformId);
return this;
}
/**
* <p>
* The unique run identifier associated with this run.
* </p>
*
* @param taskRunId
* The unique run identifier associated with this run.
*/
public void setTaskRunId(String taskRunId) {
this.taskRunId = taskRunId;
}
/**
* <p>
* The unique run identifier associated with this run.
* </p>
*
* @return The unique run identifier associated with this run.
*/
public String getTaskRunId() {
return this.taskRunId;
}
/**
* <p>
* The unique run identifier associated with this run.
* </p>
*
* @param taskRunId
* The unique run identifier associated with this run.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetMLTaskRunResult withTaskRunId(String taskRunId) {
setTaskRunId(taskRunId);
return this;
}
/**
* <p>
* The status for this task run.
* </p>
*
* @param status
* The status for this task run.
* @see TaskStatusType
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* The status for this task run.
* </p>
*
* @return The status for this task run.
* @see TaskStatusType
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* The status for this task run.
* </p>
*
* @param status
* The status for this task run.
* @return Returns a reference to this object so that method calls can be chained together.
* @see TaskStatusType
*/
public GetMLTaskRunResult withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* The status for this task run.
* </p>
*
* @param status
* The status for this task run.
* @return Returns a reference to this object so that method calls can be chained together.
* @see TaskStatusType
*/
public GetMLTaskRunResult withStatus(TaskStatusType status) {
this.status = status.toString();
return this;
}
/**
* <p>
* The names of the log groups that are associated with the task run.
* </p>
*
* @param logGroupName
* The names of the log groups that are associated with the task run.
*/
public void setLogGroupName(String logGroupName) {
this.logGroupName = logGroupName;
}
/**
* <p>
* The names of the log groups that are associated with the task run.
* </p>
*
* @return The names of the log groups that are associated with the task run.
*/
public String getLogGroupName() {
return this.logGroupName;
}
/**
* <p>
* The names of the log groups that are associated with the task run.
* </p>
*
* @param logGroupName
* The names of the log groups that are associated with the task run.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetMLTaskRunResult withLogGroupName(String logGroupName) {
setLogGroupName(logGroupName);
return this;
}
/**
* <p>
* The list of properties that are associated with the task run.
* </p>
*
* @param properties
* The list of properties that are associated with the task run.
*/
public void setProperties(TaskRunProperties properties) {
this.properties = properties;
}
/**
* <p>
* The list of properties that are associated with the task run.
* </p>
*
* @return The list of properties that are associated with the task run.
*/
public TaskRunProperties getProperties() {
return this.properties;
}
/**
* <p>
* The list of properties that are associated with the task run.
* </p>
*
* @param properties
* The list of properties that are associated with the task run.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetMLTaskRunResult withProperties(TaskRunProperties properties) {
setProperties(properties);
return this;
}
/**
* <p>
* The error strings that are associated with the task run.
* </p>
*
* @param errorString
* The error strings that are associated with the task run.
*/
public void setErrorString(String errorString) {
this.errorString = errorString;
}
/**
* <p>
* The error strings that are associated with the task run.
* </p>
*
* @return The error strings that are associated with the task run.
*/
public String getErrorString() {
return this.errorString;
}
/**
* <p>
* The error strings that are associated with the task run.
* </p>
*
* @param errorString
* The error strings that are associated with the task run.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetMLTaskRunResult withErrorString(String errorString) {
setErrorString(errorString);
return this;
}
/**
* <p>
* The date and time when this task run started.
* </p>
*
* @param startedOn
* The date and time when this task run started.
*/
public void setStartedOn(java.util.Date startedOn) {
this.startedOn = startedOn;
}
/**
* <p>
* The date and time when this task run started.
* </p>
*
* @return The date and time when this task run started.
*/
public java.util.Date getStartedOn() {
return this.startedOn;
}
/**
* <p>
* The date and time when this task run started.
* </p>
*
* @param startedOn
* The date and time when this task run started.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetMLTaskRunResult withStartedOn(java.util.Date startedOn) {
setStartedOn(startedOn);
return this;
}
/**
* <p>
* The date and time when this task run was last modified.
* </p>
*
* @param lastModifiedOn
* The date and time when this task run was last modified.
*/
public void setLastModifiedOn(java.util.Date lastModifiedOn) {
this.lastModifiedOn = lastModifiedOn;
}
/**
* <p>
* The date and time when this task run was last modified.
* </p>
*
* @return The date and time when this task run was last modified.
*/
public java.util.Date getLastModifiedOn() {
return this.lastModifiedOn;
}
/**
* <p>
* The date and time when this task run was last modified.
* </p>
*
* @param lastModifiedOn
* The date and time when this task run was last modified.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetMLTaskRunResult withLastModifiedOn(java.util.Date lastModifiedOn) {
setLastModifiedOn(lastModifiedOn);
return this;
}
/**
* <p>
* The date and time when this task run was completed.
* </p>
*
* @param completedOn
* The date and time when this task run was completed.
*/
public void setCompletedOn(java.util.Date completedOn) {
this.completedOn = completedOn;
}
/**
* <p>
* The date and time when this task run was completed.
* </p>
*
* @return The date and time when this task run was completed.
*/
public java.util.Date getCompletedOn() {
return this.completedOn;
}
/**
* <p>
* The date and time when this task run was completed.
* </p>
*
* @param completedOn
* The date and time when this task run was completed.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetMLTaskRunResult withCompletedOn(java.util.Date completedOn) {
setCompletedOn(completedOn);
return this;
}
/**
* <p>
* The amount of time (in seconds) that the task run consumed resources.
* </p>
*
* @param executionTime
* The amount of time (in seconds) that the task run consumed resources.
*/
public void setExecutionTime(Integer executionTime) {
this.executionTime = executionTime;
}
/**
* <p>
* The amount of time (in seconds) that the task run consumed resources.
* </p>
*
* @return The amount of time (in seconds) that the task run consumed resources.
*/
public Integer getExecutionTime() {
return this.executionTime;
}
/**
* <p>
* The amount of time (in seconds) that the task run consumed resources.
* </p>
*
* @param executionTime
* The amount of time (in seconds) that the task run consumed resources.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetMLTaskRunResult withExecutionTime(Integer executionTime) {
setExecutionTime(executionTime);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTransformId() != null)
sb.append("TransformId: ").append(getTransformId()).append(",");
if (getTaskRunId() != null)
sb.append("TaskRunId: ").append(getTaskRunId()).append(",");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getLogGroupName() != null)
sb.append("LogGroupName: ").append(getLogGroupName()).append(",");
if (getProperties() != null)
sb.append("Properties: ").append(getProperties()).append(",");
if (getErrorString() != null)
sb.append("ErrorString: ").append(getErrorString()).append(",");
if (getStartedOn() != null)
sb.append("StartedOn: ").append(getStartedOn()).append(",");
if (getLastModifiedOn() != null)
sb.append("LastModifiedOn: ").append(getLastModifiedOn()).append(",");
if (getCompletedOn() != null)
sb.append("CompletedOn: ").append(getCompletedOn()).append(",");
if (getExecutionTime() != null)
sb.append("ExecutionTime: ").append(getExecutionTime());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetMLTaskRunResult == false)
return false;
GetMLTaskRunResult other = (GetMLTaskRunResult) obj;
if (other.getTransformId() == null ^ this.getTransformId() == null)
return false;
if (other.getTransformId() != null && other.getTransformId().equals(this.getTransformId()) == false)
return false;
if (other.getTaskRunId() == null ^ this.getTaskRunId() == null)
return false;
if (other.getTaskRunId() != null && other.getTaskRunId().equals(this.getTaskRunId()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getLogGroupName() == null ^ this.getLogGroupName() == null)
return false;
if (other.getLogGroupName() != null && other.getLogGroupName().equals(this.getLogGroupName()) == false)
return false;
if (other.getProperties() == null ^ this.getProperties() == null)
return false;
if (other.getProperties() != null && other.getProperties().equals(this.getProperties()) == false)
return false;
if (other.getErrorString() == null ^ this.getErrorString() == null)
return false;
if (other.getErrorString() != null && other.getErrorString().equals(this.getErrorString()) == false)
return false;
if (other.getStartedOn() == null ^ this.getStartedOn() == null)
return false;
if (other.getStartedOn() != null && other.getStartedOn().equals(this.getStartedOn()) == false)
return false;
if (other.getLastModifiedOn() == null ^ this.getLastModifiedOn() == null)
return false;
if (other.getLastModifiedOn() != null && other.getLastModifiedOn().equals(this.getLastModifiedOn()) == false)
return false;
if (other.getCompletedOn() == null ^ this.getCompletedOn() == null)
return false;
if (other.getCompletedOn() != null && other.getCompletedOn().equals(this.getCompletedOn()) == false)
return false;
if (other.getExecutionTime() == null ^ this.getExecutionTime() == null)
return false;
if (other.getExecutionTime() != null && other.getExecutionTime().equals(this.getExecutionTime()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTransformId() == null) ? 0 : getTransformId().hashCode());
hashCode = prime * hashCode + ((getTaskRunId() == null) ? 0 : getTaskRunId().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getLogGroupName() == null) ? 0 : getLogGroupName().hashCode());
hashCode = prime * hashCode + ((getProperties() == null) ? 0 : getProperties().hashCode());
hashCode = prime * hashCode + ((getErrorString() == null) ? 0 : getErrorString().hashCode());
hashCode = prime * hashCode + ((getStartedOn() == null) ? 0 : getStartedOn().hashCode());
hashCode = prime * hashCode + ((getLastModifiedOn() == null) ? 0 : getLastModifiedOn().hashCode());
hashCode = prime * hashCode + ((getCompletedOn() == null) ? 0 : getCompletedOn().hashCode());
hashCode = prime * hashCode + ((getExecutionTime() == null) ? 0 : getExecutionTime().hashCode());
return hashCode;
}
@Override
public GetMLTaskRunResult clone() {
try {
return (GetMLTaskRunResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.distributed.internal.locks;
import java.util.HashMap;
import java.util.Iterator;
import org.apache.logging.log4j.Logger;
import org.apache.geode.InternalGemFireError;
import org.apache.geode.distributed.internal.DM;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.Assert;
import org.apache.geode.internal.i18n.LocalizedStrings;
import org.apache.geode.internal.logging.LogService;
import org.apache.geode.internal.logging.log4j.LocalizedMessage;
import org.apache.geode.internal.logging.log4j.LogMarker;
/**
* Keeps track of all the information kept by the elder.
*
* @since GemFire 4.0
*/
public class ElderState {
private static final Logger logger = LogService.getLogger();
/**
* Maps service name keys to GrantorInfo values.
*/
private final HashMap nameToInfo;
private final DM dm;
/**
* Constructs the EdlerState for the given dm. Note that this constructor does not complete until
* elder recovery is complete.
*/
public ElderState(DM dm) {
Assert.assertTrue(dm != null);
this.dm = dm;
this.nameToInfo = new HashMap();
try {
this.dm.getStats().incElders(1);
ElderInitProcessor.init(this.dm, this.nameToInfo);
} catch (NullPointerException e) {
try {
checkForProblem(dm);
} finally {
if (true)
throw e; // conditional prevents eclipse warning
}
} catch (InternalGemFireError e) {
try {
checkForProblem(dm);
} finally {
if (true)
throw e; // conditional prevents eclipse warning
}
} finally {
if (logger.isTraceEnabled(LogMarker.DLS)) {
StringBuffer sb = new StringBuffer("ElderState initialized with:");
for (Iterator grantors = this.nameToInfo.keySet().iterator(); grantors.hasNext();) {
Object key = grantors.next();
// key=dlock svc name, value=GrantorInfo object
sb.append("\n\t" + key + ": " + this.nameToInfo.get(key));
}
logger.trace(LogMarker.DLS, sb.toString());
}
}
}
private void checkForProblem(DM checkDM) {
if (checkDM.getSystem() == null) {
logger.warn(LogMarker.DLS, LocalizedMessage
.create(LocalizedStrings.ElderState_ELDERSTATE_PROBLEM_SYSTEM_0, checkDM.getSystem()));
return;
}
if (checkDM.getSystem().getDistributionManager() == null) {
logger.warn(LogMarker.DLS,
LocalizedMessage.create(
LocalizedStrings.ElderState_ELDERSTATE_PROBLEM_SYSTEM_DISTRIBUTIONMANAGER_0,
checkDM.getSystem().getDistributionManager()));
}
if (checkDM != checkDM.getSystem().getDistributionManager()) {
logger.warn(LogMarker.DLS,
LocalizedMessage.create(
LocalizedStrings.ElderState_ELDERSTATE_PROBLEM_DM_0_BUT_SYSTEM_DISTRIBUTIONMANAGER_1,
new Object[] {checkDM, checkDM.getSystem().getDistributionManager()}));
}
}
/**
* Atomically determine who is the current grantor of the given service. If no current grantor
* exists then the caller is made the grantor.
*
* @param serviceName the name of the lock service we want the grantor of
* @param requestor the id of the member who is making this request
* @return the current grantor of <code>serviceName</code> and recoveryNeeded will be true if
* requestor has become the grantor and needs to recover lock info.
*/
public GrantorInfo getGrantor(String serviceName, InternalDistributedMember requestor,
int dlsSerialNumberRequestor) {
synchronized (this) {
GrantorInfo gi = (GrantorInfo) this.nameToInfo.get(serviceName);
if (gi != null) {
waitWhileInitiatingTransfer(gi);
InternalDistributedMember currentGrantor = gi.getId();
// Note that elder recovery may put GrantorInfo instances in
// the map whose id is null and whose needRecovery is true
if (currentGrantor != null
&& this.dm.getDistributionManagerIds().contains(currentGrantor)) {
return gi;
} else {
if (logger.isTraceEnabled(LogMarker.DLS)) {
logger.trace(LogMarker.DLS, "Elder setting grantor for {} to {} because {} ",
serviceName, requestor, (currentGrantor != null ? "current grantor crashed"
: "of unclean grantor shutdown"));
}
// current grantor crashed; make new guy grantor and force recovery
long myVersion = gi.getVersionId() + 1;
this.nameToInfo.put(serviceName,
new GrantorInfo(requestor, myVersion, dlsSerialNumberRequestor, false));
return new GrantorInfo(requestor, myVersion, dlsSerialNumberRequestor, true);
}
} else {
if (logger.isTraceEnabled(LogMarker.DLS)) {
logger.trace(LogMarker.DLS,
"Elder setting grantor for {} to {} because of clean grantor shutdown", serviceName,
requestor);
}
gi = new GrantorInfo(requestor, 1, dlsSerialNumberRequestor, false);
this.nameToInfo.put(serviceName, gi);
return gi;
}
}
}
/**
* Atomically determine who is the current grantor of the given service.
*
* @param serviceName the name of the lock service we want the grantor of
* @return the current grantor of <code>serviceName</code> and recoveryNeeded will be true if
* requestor has become the grantor and needs to recover lock info.
*/
public GrantorInfo peekGrantor(String serviceName) {
synchronized (this) {
GrantorInfo gi = (GrantorInfo) this.nameToInfo.get(serviceName);
if (gi != null) {
waitWhileInitiatingTransfer(gi);
InternalDistributedMember currentGrantor = gi.getId();
// Note that elder recovery may put GrantorInfo instances in
// the map whose id is null and whose needRecovery is true
if (currentGrantor != null
&& this.dm.getDistributionManagerIds().contains(currentGrantor)) {
return gi;
} else {
return new GrantorInfo(null, 0, 0, true);
}
} else {
return new GrantorInfo(null, 0, 0, false);
}
}
}
/**
* Atomically sets the current grantor of the given service to <code>newGrantor</code>.
*
* @param serviceName the name of the lock service we want the grantor of
* @param newGrantor the id of the member who is making this request
* @param oldTurk if non-null then only do the become if the current grantor is the oldTurk
* @return the previous grantor, which may be null, of <code>serviceName</code> and recoveryNeeded
* will be true if new grantor needs to recover lock info
*/
public GrantorInfo becomeGrantor(String serviceName, InternalDistributedMember newGrantor,
int newGrantorSerialNumber, InternalDistributedMember oldTurk) {
GrantorInfo newInfo = null;
InternalDistributedMember previousGrantor = null;
long newGrantorVersion = -1;
try {
synchronized (this) {
GrantorInfo gi = (GrantorInfo) this.nameToInfo.get(serviceName);
while (gi != null && gi.isInitiatingTransfer()) {
waitWhileInitiatingTransfer(gi);
gi = (GrantorInfo) this.nameToInfo.get(serviceName);
}
if (gi != null) {
previousGrantor = gi.getId();
// Note that elder recovery may put GrantorInfo instances in
// the map whose id is null and whose needRecovery is true
// if previousGrantor still exists...
if (previousGrantor != null
&& this.dm.getDistributionManagerIds().contains(previousGrantor)) {
// if newGrantor is not previousGrantor...
if (!newGrantor.equals(previousGrantor)) {
// problem: specified oldTurk is not previousGrantor...
if (oldTurk != null && !oldTurk.equals(previousGrantor)) {
if (logger.isTraceEnabled(LogMarker.DLS)) {
logger.trace(LogMarker.DLS,
"Elder did not become grantor for {} to {} because oldT was {} and the current grantor is {}",
serviceName, newGrantor, oldTurk, previousGrantor);
}
}
// no oldTurk or oldTurk matches previousGrantor... transfer might occur
else {
// install new grantor
if (logger.isTraceEnabled(LogMarker.DLS)) {
logger.trace(LogMarker.DLS, "Elder forced to set grantor for {} to {}",
serviceName, newGrantor);
}
long myVersion = gi.getVersionId() + 1;
newGrantorVersion = myVersion;
newInfo = new GrantorInfo(newGrantor, myVersion, newGrantorSerialNumber, false);
this.nameToInfo.put(serviceName, newInfo);
if (gi.getId() != null && (oldTurk == null || gi.getId().equals(oldTurk))
&& !gi.getId().equals(newGrantor)) {
beginInitiatingTransfer(newInfo);
}
}
}
// return previous grantor
return new GrantorInfo(gi.getId(), gi.getVersionId(), gi.getSerialNumber(), true);
}
// no previousGrantor in existence...
else {
long myVersion = gi.getVersionId() + 1;
// problem: oldTurk was specified but there is no previousGrantor...
if (oldTurk != null) {
if (logger.isTraceEnabled(LogMarker.DLS)) {
logger.trace(LogMarker.DLS,
"Elder did not become grantor for {} to {} because oldT was {} and the current grantor {} had crashed",
serviceName, newGrantor, oldTurk, previousGrantor);
}
}
// no oldTurk was specified...
else {
if (logger.isTraceEnabled(LogMarker.DLS)) {
logger.trace(LogMarker.DLS,
"Elder forced to set grantor for {} to {} and noticed previous grantor had crashed",
serviceName, newGrantor);
}
// current grantor crashed; make new guy grantor and force recovery
this.nameToInfo.put(serviceName,
new GrantorInfo(newGrantor, myVersion, newGrantorSerialNumber, false));
}
return new GrantorInfo(null, myVersion - 1, gi.getSerialNumber(), true);
}
}
// GrantorInfo was null...
else {
// problem: no oldTurk was specified
if (oldTurk != null) {
if (logger.isTraceEnabled(LogMarker.DLS)) {
logger.trace(LogMarker.DLS,
"Elder did not become grantor for {} to {} because oldT was {} and elder had no current grantor",
serviceName, newGrantor, oldTurk);
}
}
// no oldTurk was specified
else {
if (logger.isTraceEnabled(LogMarker.DLS)) {
logger.trace(LogMarker.DLS,
"Elder forced to set grantor for {} to {} because of clean grantor shutdown",
serviceName, newGrantor);
}
// no current grantor; last one shutdown cleanly
gi = new GrantorInfo(newGrantor, 1, newGrantorSerialNumber, false);
this.nameToInfo.put(serviceName, gi);
}
return new GrantorInfo(null, 0, 0, false);
}
}
} finally {
if (isInitiatingTransfer(newInfo)) {
Assert.assertTrue(newGrantorVersion > -1);
DeposeGrantorProcessor.send(serviceName, previousGrantor, newGrantor, newGrantorVersion,
newGrantorSerialNumber, dm);
finishInitiatingTransfer(newInfo);
}
}
}
/**
* Atomically clears the current grantor of the given service if the current grantor is
* <code>oldGrantor</code>. The next grantor for this service will not need to recover unless
* <code>locksHeld</code> is true.
*
* @param locksHeld true if old grantor had held locks
*/
public void clearGrantor(long grantorVersion, String serviceName, int dlsSerialNumber,
InternalDistributedMember oldGrantor, boolean locksHeld) {
synchronized (this) {
if (grantorVersion == -1) {
// not possible to clear grantor of non-initialized grantorVersion
return;
}
GrantorInfo currentGI = (GrantorInfo) this.nameToInfo.get(serviceName);
if (currentGI == null) {
return; // KIRK added this null check because becomeGrantor may not have talked to elder
// before destroy dls
}
if (currentGI.getVersionId() != grantorVersion
|| currentGI.getSerialNumber() != dlsSerialNumber) {
// not possible to clear mismatched grantorVersion
return;
}
GrantorInfo gi;
if (locksHeld) {
gi = (GrantorInfo) this.nameToInfo.put(serviceName,
new GrantorInfo(null, currentGI.getVersionId(), 0, true));
} else {
gi = (GrantorInfo) this.nameToInfo.remove(serviceName);
}
if (gi != null) {
InternalDistributedMember currentGrantor = gi.getId();
if (!oldGrantor.equals(currentGrantor)) { // fix for 32603
this.nameToInfo.put(serviceName, gi);
if (logger.isTraceEnabled(LogMarker.DLS)) {
logger.trace(LogMarker.DLS,
"Elder not making {} grantor shutdown for {} by {} because the current grantor is {}",
(locksHeld ? "unclean" : "clean"), serviceName, oldGrantor, currentGrantor);
}
} else {
if (logger.isTraceEnabled(LogMarker.DLS)) {
logger.trace(LogMarker.DLS, "Elder making {} grantor shutdown for {} by {}",
(locksHeld ? "unclean" : "clean"), serviceName, oldGrantor);
}
}
}
}
}
private boolean isInitiatingTransfer(GrantorInfo gi) {
if (gi == null)
return false;
synchronized (this) {
return gi.isInitiatingTransfer();
}
}
private void beginInitiatingTransfer(GrantorInfo gi) {
synchronized (this) {
gi.setInitiatingTransfer(true);
}
}
private void finishInitiatingTransfer(GrantorInfo gi) {
synchronized (this) {
gi.setInitiatingTransfer(false);
notifyAll();
}
}
private void waitWhileInitiatingTransfer(GrantorInfo gi) {
synchronized (this) {
boolean interrupted = false;
try {
while (gi.isInitiatingTransfer()) {
try {
wait();
} catch (InterruptedException e) {
interrupted = true;
dm.getCancelCriterion().checkCancelInProgress(e);
}
}
} finally {
if (interrupted)
Thread.currentThread().interrupt();
}
}
}
/** Testing method to force grantor recovery state for named service */
public void forceGrantorRecovery(String serviceName) {
synchronized (this) {
GrantorInfo gi = (GrantorInfo) this.nameToInfo.get(serviceName);
if (gi.isInitiatingTransfer()) {
throw new IllegalStateException(
LocalizedStrings.ElderState_CANNOT_FORCE_GRANTOR_RECOVERY_FOR_GRANTOR_THAT_IS_TRANSFERRING
.toLocalizedString());
}
this.nameToInfo.put(serviceName,
new GrantorInfo(gi.getId(), gi.getVersionId(), gi.getSerialNumber(), true));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.resourceresolver.impl;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.Nonnull;
import org.apache.commons.collections4.BidiMap;
import org.apache.sling.api.resource.LoginException;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.api.resource.ResourceResolverFactory;
import org.apache.sling.api.resource.path.Path;
import org.apache.sling.resourceresolver.impl.console.ResourceResolverWebConsolePlugin;
import org.apache.sling.resourceresolver.impl.helper.ResourceDecoratorTracker;
import org.apache.sling.resourceresolver.impl.helper.ResourceResolverControl;
import org.apache.sling.resourceresolver.impl.mapping.MapConfigurationProvider;
import org.apache.sling.resourceresolver.impl.mapping.MapEntries;
import org.apache.sling.resourceresolver.impl.mapping.MapEntriesHandler;
import org.apache.sling.resourceresolver.impl.mapping.Mapping;
import org.apache.sling.resourceresolver.impl.providers.ResourceProviderTracker;
import org.apache.sling.spi.resource.provider.ResourceProvider;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The <code>CommonResourceResolverFactoryImpl</code> is a singleton
* implementing the shared/common functionality of all resource
* resolver factories.
*/
public class CommonResourceResolverFactoryImpl implements ResourceResolverFactory, MapConfigurationProvider {
private static final Logger LOG = LoggerFactory.getLogger(CommonResourceResolverFactoryImpl.class);
/** Helper for the resource resolver. */
private MapEntriesHandler mapEntries = MapEntriesHandler.EMPTY;
/** The web console plugin. */
private ResourceResolverWebConsolePlugin plugin;
/** The activator */
private final ResourceResolverFactoryActivator activator;
/**
* Thread local holding the resource resolver stack
*/
private ThreadLocal<Stack<WeakReference<ResourceResolver>>> resolverStackHolder = new ThreadLocal<>();
/** Flag indicating whether this factory is still active. */
private final AtomicBoolean isActive = new AtomicBoolean(true);
/** The reference queue to handle disposing of resource resolver instances. */
private final ReferenceQueue<ResourceResolver> resolverReferenceQueue = new ReferenceQueue<>();
/** Map of the ResourceResolverControl's hash code to the references to open resource resolver instances. */
private final Map<Integer, ResolverReference> refs = new ConcurrentHashMap<>();
/** Background thread handling disposing of resource resolver instances. */
private final Thread refQueueThread;
private boolean logUnclosedResolvers;
private final Object optionalNamespaceMangler;
/**
* Create a new common resource resolver factory.
*/
public CommonResourceResolverFactoryImpl(final ResourceResolverFactoryActivator activator) {
this.activator = activator;
this.logUnclosedResolvers = activator.isLogUnclosedResourceResolvers();
this.refQueueThread = new Thread("Apache Sling Resource Resolver Finalizer Thread") {
@Override
public void run() {
while (isLive()) {
try {
final ResolverReference ref = (ResolverReference) resolverReferenceQueue.remove();
ref.close();
} catch ( final InterruptedException ie) {
Thread.currentThread().interrupt();
}
}
}
};
this.refQueueThread.setDaemon(true);
this.refQueueThread.start();
// try create namespace mangler
Object mangler = null;
if ( this.isMangleNamespacePrefixes() ) {
try {
mangler = new JcrNamespaceMangler();
} catch ( final Throwable t) {
LOG.info("Unable to create JCR namespace mangler: {}", t.getMessage());
}
}
this.optionalNamespaceMangler = mangler;
}
// ---------- Resource Resolver Factory ------------------------------------
/**
* @see org.apache.sling.api.resource.ResourceResolverFactory#getAdministrativeResourceResolver(java.util.Map)
*/
@Nonnull
@Override
public ResourceResolver getAdministrativeResourceResolver(final Map<String, Object> passedAuthenticationInfo)
throws LoginException {
checkIsLive();
// create a copy of the passed authentication info as we modify the map
final Map<String, Object> authenticationInfo = new HashMap<>();
authenticationInfo.put(ResourceProvider.AUTH_ADMIN, Boolean.TRUE);
if ( passedAuthenticationInfo != null ) {
authenticationInfo.putAll(passedAuthenticationInfo);
// make sure there is no leaking of service info props
// (but the bundle info is passed on as we need it downstream)
authenticationInfo.remove(SUBSERVICE);
}
return getResourceResolverInternal(authenticationInfo, true);
}
/**
* @see org.apache.sling.api.resource.ResourceResolverFactory#getResourceResolver(java.util.Map)
*/
@Nonnull
@Override
public ResourceResolver getResourceResolver(final Map<String, Object> passedAuthenticationInfo)
throws LoginException {
checkIsLive();
// create a copy of the passed authentication info as we modify the map
final Map<String, Object> authenticationInfo = new HashMap<>();
if ( passedAuthenticationInfo != null ) {
authenticationInfo.putAll(passedAuthenticationInfo);
// make sure there is no leaking of service bundle and info props
authenticationInfo.remove(ResourceProvider.AUTH_SERVICE_BUNDLE);
authenticationInfo.remove(SUBSERVICE);
}
final ResourceResolver result = getResourceResolverInternal(authenticationInfo, false);
Stack<WeakReference<ResourceResolver>> resolverStack = resolverStackHolder.get();
if ( resolverStack == null ) {
resolverStack = new Stack<>();
resolverStackHolder.set(resolverStack);
}
resolverStack.push(new WeakReference<>(result));
return result;
}
/**
* @see org.apache.sling.api.resource.ResourceResolverFactory#getThreadResourceResolver()
*/
@Override
public ResourceResolver getThreadResourceResolver() {
if (!isLive()) {
return null;
}
ResourceResolver result = null;
final Stack<WeakReference<ResourceResolver>> resolverStack = resolverStackHolder.get();
if ( resolverStack != null) {
while ( result == null && !resolverStack.isEmpty() ) {
result = resolverStack.peek().get();
if ( result == null ) {
resolverStack.pop();
}
}
}
return result;
}
// ---------- Implementation helpers --------------------------------------
/**
* Inform about a new resource resolver instance.
* We create a weak reference to be able to close the resolver if close on the
* resource resolver is never called.
* @param resolver The resource resolver
* @param ctrl The resource resolver control
*/
public void register(final ResourceResolver resolver,
final ResourceResolverControl ctrl) {
// create new weak reference
refs.put(ctrl.hashCode(), new ResolverReference(resolver, this.resolverReferenceQueue, ctrl, this));
}
/**
* Inform about a closed resource resolver.
* Make sure to remove it from the current thread context.
* @param resourceResolverImpl The resource resolver
* @param ctrl The resource resolver control
*/
public void unregister(final ResourceResolver resourceResolverImpl,
final ResourceResolverControl ctrl) {
unregisterControl(ctrl);
// on shutdown, the factory might already be closed before the resolvers close
// therefore we have to check for null
final ThreadLocal<Stack<WeakReference<ResourceResolver>>> tl = resolverStackHolder;
if ( tl != null ) {
final Stack<WeakReference<ResourceResolver>> resolverStack = tl.get();
if ( resolverStack != null ) {
final Iterator<WeakReference<ResourceResolver>> i = resolverStack.iterator();
while ( i.hasNext() ) {
final WeakReference<ResourceResolver> ref = i.next();
if ( ref.get() == null || ref.get() == resourceResolverImpl ) {
i.remove();
}
}
if ( resolverStack.isEmpty() ) {
tl.remove();
}
}
}
}
/**
* Create a new ResourceResolver
* @param authenticationInfo The authentication map
* @param isAdmin is an administrative resolver requested?
* @return A resource resolver
* @throws LoginException if login to any of the required resource providers fails.
*/
ResourceResolver getResourceResolverInternal(final Map<String, Object> authenticationInfo,
final boolean isAdmin)
throws LoginException {
checkIsLive();
return new ResourceResolverImpl(this, isAdmin, authenticationInfo);
}
/**
* Close a resource resolver control and remove its corresponding
* resolver reference from the map of weak references.
*
* @param ctrl The resource resolver control
* @return true if the control was closed, false it had been closed before.
*/
private boolean unregisterControl(final ResourceResolverControl ctrl) {
// remove reference from the set of weak references and clear
final ResolverReference reference = refs.remove(ctrl.hashCode());
if (reference != null) {
reference.clear();
}
final boolean doCloseControl = !ctrl.isClosed();
if (doCloseControl) {
ctrl.close();
}
return doCloseControl;
}
private void checkIsLive() throws LoginException {
if ( !isLive() ) {
throw new LoginException("ResourceResolverFactory is deactivated.");
}
}
public MapEntriesHandler getMapEntries() {
return mapEntries;
}
/** Activates this component */
protected void activate(final BundleContext bundleContext) {
final Logger logger = LoggerFactory.getLogger(getClass());
try {
plugin = new ResourceResolverWebConsolePlugin(bundleContext, this, this.activator.getRuntimeService());
} catch (final Throwable ignore) {
// an exception here probably means the web console plugin is not
// available
logger.debug("activate: unable to setup web console plugin.", ignore);
}
// set up the map entries from configuration
try {
mapEntries = new MapEntries(this, bundleContext, this.activator.getEventAdmin());
} catch (final Exception e) {
logger.error("activate: Cannot access repository, failed setting up Mapping Support", e);
}
}
/**
* Deactivates this component
*/
protected void deactivate() {
if (!isActive.compareAndSet(true, false)) {
return;
}
this.refQueueThread.interrupt();
if (plugin != null) {
plugin.dispose();
plugin = null;
}
if (mapEntries instanceof MapEntries ) {
((MapEntries)mapEntries).dispose();
mapEntries = MapEntries.EMPTY;
}
resolverStackHolder = null;
// copy and clear map before closing the remaining references
final Collection<ResolverReference> references = new ArrayList<>(refs.values());
refs.clear();
for(final ResolverReference ref : references) {
ref.close();
}
}
public ResourceDecoratorTracker getResourceDecoratorTracker() {
return this.activator.getResourceDecoratorTracker();
}
public String[] getSearchPath() {
return this.activator.getSearchPath();
}
public boolean isMangleNamespacePrefixes() {
return this.activator.isMangleNamespacePrefixes();
}
public Object getNamespaceMangler() {
return this.optionalNamespaceMangler;
}
@Override
public String getMapRoot() {
return this.activator.getMapRoot();
}
@Override
public boolean isMapConfiguration(String path) {
return this.activator.isMapConfiguration(path);
}
@Override
public Mapping[] getMappings() {
return this.activator.getMappings();
}
@Override
public BidiMap getVirtualURLMap() {
return this.activator.getVirtualURLMap();
}
@Override
public int getDefaultVanityPathRedirectStatus() {
return this.activator.getDefaultVanityPathRedirectStatus();
}
/**
* get's the ServiceTracker of the ResourceAccessSecurity service
*/
public ResourceAccessSecurityTracker getResourceAccessSecurityTracker () {
return this.activator.getResourceAccessSecurityTracker();
}
@Nonnull
@Override
public ResourceResolver getServiceResourceResolver(
final Map<String, Object> authenticationInfo) throws LoginException {
checkIsLive();
return getResourceResolverInternal(authenticationInfo, false);
}
@Override
public boolean isVanityPathEnabled() {
return this.activator.isVanityPathEnabled();
}
@Override
public long getMaxCachedVanityPathEntries() {
return this.activator.getMaxCachedVanityPathEntries();
}
@Override
public boolean isMaxCachedVanityPathEntriesStartup() {
return this.activator.isMaxCachedVanityPathEntriesStartup();
}
@Override
public int getVanityBloomFilterMaxBytes() {
return this.activator.getVanityBloomFilterMaxBytes();
}
@Override
public boolean isOptimizeAliasResolutionEnabled() {
return this.activator.isOptimizeAliasResolutionEnabled();
}
@Override
public boolean hasVanityPathPrecedence() {
return this.activator.hasVanityPathPrecedence();
}
@Override
public Path[] getObservationPaths() {
return this.activator.getObservationPaths();
}
@Override
public List<VanityPathConfig> getVanityPathConfig() {
final String[] includes = this.activator.getVanityPathWhiteList();
final String[] excludes = this.activator.getVanityPathBlackList();
if ( includes == null && excludes == null ) {
return null;
}
final List<VanityPathConfig> configs = new ArrayList<>();
if ( includes != null ) {
for(final String val : includes) {
configs.add(new VanityPathConfig(val, false));
}
}
if ( excludes != null ) {
for(final String val : excludes) {
configs.add(new VanityPathConfig(val, true));
}
}
Collections.sort(configs);
return configs;
}
/**
* Is this factory still alive?
*/
public boolean isLive() {
return this.isActive.get();
}
public boolean shouldLogResourceResolverClosing() {
return activator.shouldLogResourceResolverClosing();
}
public ResourceProviderTracker getResourceProviderTracker() {
return activator.getResourceProviderTracker();
}
@Override
public Map<String, Object> getServiceUserAuthenticationInfo(final String subServiceName)
throws LoginException {
// get an administrative resource resolver
// Ensure a mapped user name: If no user is defined for a bundle
// acting as a service, the user may be null. We can decide whether
// this should yield guest access or no access at all. For now
// no access is granted if there is no service user defined for
// the bundle.
final Bundle bundle = this.activator.getBundleContext().getBundle();
final String userName = this.activator.getServiceUserMapper().getServiceUserID(bundle, subServiceName);
if (userName == null) {
throw new LoginException("Cannot derive user name for bundle "
+ bundle + " and sub service " + subServiceName);
}
final Map<String, Object> authenticationInfo = new HashMap<>();
// ensure proper user name and service bundle
authenticationInfo.put(ResourceResolverFactory.SUBSERVICE, subServiceName);
authenticationInfo.put(ResourceResolverFactory.USER, userName);
authenticationInfo.put(ResourceProvider.AUTH_SERVICE_BUNDLE, bundle);
return authenticationInfo;
}
/**
* Extension of a weak reference to be able to get the control object
* that is used for cleaning up.
*/
private static final class ResolverReference extends WeakReference<ResourceResolver> {
private final ResourceResolverControl control;
private final Exception openingException;
private final CommonResourceResolverFactoryImpl factory;
ResolverReference(final ResourceResolver referent,
final ReferenceQueue<? super ResourceResolver> q,
final ResourceResolverControl ctrl,
final CommonResourceResolverFactoryImpl factory) {
super(referent, q);
this.control = ctrl;
this.factory = factory;
this.openingException = factory.logUnclosedResolvers && LOG.isInfoEnabled() ? new Exception("Opening Stacktrace") : null;
}
public void close() {
try {
if (factory.unregisterControl(this.control) && factory.logUnclosedResolvers) {
if (factory.isLive()) {
LOG.warn("Closed unclosed ResourceResolver. The creation stacktrace is available on info log level.");
} else {
LOG.warn("Forced close of ResourceResolver because the ResourceResolverFactory is shutting down.");
}
LOG.info("Unclosed ResourceResolver was created here: ", openingException);
}
} catch (Throwable t) {
LOG.warn("Exception while closing ResolverReference", t);
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.resourceGroups.db;
import com.facebook.presto.resourceGroups.AbstractResourceConfigurationManager;
import com.facebook.presto.resourceGroups.ManagerSpec;
import com.facebook.presto.resourceGroups.ResourceGroupIdTemplate;
import com.facebook.presto.resourceGroups.ResourceGroupSpec;
import com.facebook.presto.resourceGroups.SelectorSpec;
import com.facebook.presto.spi.memory.ClusterMemoryPoolManager;
import com.facebook.presto.spi.resourceGroups.ResourceGroup;
import com.facebook.presto.spi.resourceGroups.ResourceGroupId;
import com.facebook.presto.spi.resourceGroups.ResourceGroupSelector;
import com.facebook.presto.spi.resourceGroups.SelectionContext;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import io.airlift.log.Logger;
import io.airlift.units.Duration;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.annotation.concurrent.GuardedBy;
import javax.inject.Inject;
import java.util.AbstractMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import static com.google.common.base.Preconditions.checkState;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
public class DbResourceGroupConfigurationManager
extends AbstractResourceConfigurationManager
{
private static final Logger log = Logger.get(DbResourceGroupConfigurationManager.class);
private final ResourceGroupsDao dao;
private final ConcurrentMap<ResourceGroupId, ResourceGroup> groups = new ConcurrentHashMap<>();
@GuardedBy("this")
private Map<ResourceGroupIdTemplate, ResourceGroupSpec> resourceGroupSpecs = new HashMap<>();
private final ConcurrentMap<ResourceGroupIdTemplate, List<ResourceGroupId>> configuredGroups = new ConcurrentHashMap<>();
private final AtomicReference<List<ResourceGroupSpec>> rootGroups = new AtomicReference<>(ImmutableList.of());
private final AtomicReference<List<ResourceGroupSelector>> selectors = new AtomicReference<>();
private final AtomicReference<Optional<Duration>> cpuQuotaPeriod = new AtomicReference<>(Optional.empty());
private final ScheduledExecutorService configExecutor = newSingleThreadScheduledExecutor(daemonThreadsNamed("DbResourceGroupConfigurationManager"));
private final AtomicBoolean started = new AtomicBoolean();
@Inject
public DbResourceGroupConfigurationManager(ClusterMemoryPoolManager memoryPoolManager, ResourceGroupsDao dao)
{
super(memoryPoolManager);
requireNonNull(memoryPoolManager, "memoryPoolManager is null");
requireNonNull(dao, "daoProvider is null");
this.dao = dao;
this.dao.createResourceGroupsGlobalPropertiesTable();
this.dao.createResourceGroupsTable();
this.dao.createSelectorsTable();
load();
}
@Override
protected Optional<Duration> getCpuQuotaPeriod()
{
return cpuQuotaPeriod.get();
}
@Override
protected List<ResourceGroupSpec> getRootGroups()
{
return rootGroups.get();
}
@PreDestroy
public void destroy()
{
configExecutor.shutdownNow();
}
@PostConstruct
public void start()
{
if (started.compareAndSet(false, true)) {
configExecutor.scheduleWithFixedDelay(this::load, 1, 1, TimeUnit.SECONDS);
}
}
@Override
public void configure(ResourceGroup group, SelectionContext context)
{
Map.Entry<ResourceGroupIdTemplate, ResourceGroupSpec> entry = getMatchingSpec(group, context);
if (groups.putIfAbsent(group.getId(), group) == null) {
// If a new spec replaces the spec returned from getMatchingSpec the group will be reconfigured on the next run of load().
configuredGroups.computeIfAbsent(entry.getKey(), v -> new LinkedList<>()).add(group.getId());
}
synchronized (getRootGroup(group.getId())) {
configureGroup(group, entry.getValue());
}
}
@Override
public List<ResourceGroupSelector> getSelectors()
{
return this.selectors.get();
}
private synchronized Optional<Duration> getCpuQuotaPeriodFromDb()
{
List<ResourceGroupGlobalProperties> globalProperties = dao.getResourceGroupGlobalProperties();
checkState(globalProperties.size() <= 1, "There is more than one cpu_quota_period");
return (!globalProperties.isEmpty()) ? globalProperties.get(0).getCpuQuotaPeriod() : Optional.empty();
}
@VisibleForTesting
public synchronized void load()
{
try {
Map.Entry<ManagerSpec, Map<ResourceGroupIdTemplate, ResourceGroupSpec>> specsFromDb = buildSpecsFromDb();
ManagerSpec managerSpec = specsFromDb.getKey();
Map<ResourceGroupIdTemplate, ResourceGroupSpec> resourceGroupSpecs = specsFromDb.getValue();
Set<ResourceGroupIdTemplate> changedSpecs = new HashSet<>();
Set<ResourceGroupIdTemplate> deletedSpecs = Sets.difference(this.resourceGroupSpecs.keySet(), resourceGroupSpecs.keySet());
for (Map.Entry<ResourceGroupIdTemplate, ResourceGroupSpec> entry : resourceGroupSpecs.entrySet()) {
if (!entry.getValue().sameConfig(this.resourceGroupSpecs.get(entry.getKey()))) {
changedSpecs.add(entry.getKey());
}
}
this.resourceGroupSpecs = resourceGroupSpecs;
this.cpuQuotaPeriod.set(managerSpec.getCpuQuotaPeriod());
this.rootGroups.set(managerSpec.getRootGroups());
this.selectors.set(buildSelectors(managerSpec));
configureChangedGroups(changedSpecs);
disableDeletedGroups(deletedSpecs);
}
catch (Throwable e) {
log.error(e, "Error loading configuration from db");
}
}
// Populate temporary data structures to build resource group specs and selectors from db
private synchronized void populateFromDbHelper(Map<Long, ResourceGroupSpecBuilder> recordMap,
Set<Long> rootGroupIds,
Map<Long, ResourceGroupIdTemplate> resourceGroupIdTemplateMap,
Map<Long, Set<Long>> subGroupIdsToBuild)
{
List<ResourceGroupSpecBuilder> records = dao.getResourceGroups();
for (ResourceGroupSpecBuilder record : records) {
recordMap.put(record.getId(), record);
if (!record.getParentId().isPresent()) {
rootGroupIds.add(record.getId());
resourceGroupIdTemplateMap.put(record.getId(), new ResourceGroupIdTemplate(record.getNameTemplate().toString()));
}
else {
subGroupIdsToBuild.computeIfAbsent(record.getParentId().get(), k -> new HashSet<>()).add(record.getId());
}
}
}
private synchronized Map.Entry<ManagerSpec, Map<ResourceGroupIdTemplate, ResourceGroupSpec>> buildSpecsFromDb()
{
// New resource group spec map
Map<ResourceGroupIdTemplate, ResourceGroupSpec> resourceGroupSpecs = new HashMap<>();
// Set of root group db ids
Set<Long> rootGroupIds = new HashSet<>();
// Map of id from db to resource group spec
Map<Long, ResourceGroupSpec> resourceGroupSpecMap = new HashMap<>();
// Map of id from db to resource group template id
Map<Long, ResourceGroupIdTemplate> resourceGroupIdTemplateMap = new HashMap<>();
// Map of id from db to resource group spec builder
Map<Long, ResourceGroupSpecBuilder> recordMap = new HashMap<>();
// Map of subgroup id's not yet built
Map<Long, Set<Long>> subGroupIdsToBuild = new HashMap<>();
populateFromDbHelper(recordMap, rootGroupIds, resourceGroupIdTemplateMap, subGroupIdsToBuild);
// Build up resource group specs from leaf to root
for (LinkedList<Long> queue = new LinkedList<>(rootGroupIds); !queue.isEmpty(); ) {
Long id = queue.pollFirst();
resourceGroupIdTemplateMap.computeIfAbsent(id, k -> {
ResourceGroupSpecBuilder builder = recordMap.get(id);
return ResourceGroupIdTemplate.forSubGroupNamed(
resourceGroupIdTemplateMap.get(builder.getParentId().get()),
builder.getNameTemplate().toString());
});
Set<Long> childrenToBuild = subGroupIdsToBuild.getOrDefault(id, ImmutableSet.of());
// Add to resource group specs if no more child resource groups are left to build
if (childrenToBuild.isEmpty()) {
ResourceGroupSpecBuilder builder = recordMap.get(id);
ResourceGroupSpec resourceGroupSpec = builder.build();
resourceGroupSpecMap.put(id, resourceGroupSpec);
// Add newly built spec to spec map
resourceGroupSpecs.put(resourceGroupIdTemplateMap.get(id), resourceGroupSpec);
// Add this resource group spec to parent subgroups and remove id from subgroup ids to build
builder.getParentId().ifPresent(parentId -> {
recordMap.get(parentId).addSubGroup(resourceGroupSpec);
subGroupIdsToBuild.get(parentId).remove(id);
});
}
else {
// Add this group back to queue since it still has subgroups to build
queue.addFirst(id);
// Add this group's subgroups to the queue so that when this id is dequeued again childrenToBuild will be empty
queue.addAll(0, childrenToBuild);
}
}
// Specs are built from db records, validate and return manager spec
List<ResourceGroupSpec> rootGroups = rootGroupIds.stream().map(resourceGroupSpecMap::get).collect(Collectors.toList());
List<SelectorSpec> selectors = dao.getSelectors().stream().map(selectorRecord ->
new SelectorSpec(
selectorRecord.getUserRegex(),
selectorRecord.getSourceRegex(),
Optional.empty(),
resourceGroupIdTemplateMap.get(selectorRecord.getResourceGroupId()))
).collect(Collectors.toList());
ManagerSpec managerSpec = new ManagerSpec(rootGroups, selectors, getCpuQuotaPeriodFromDb());
validateRootGroups(managerSpec);
return new AbstractMap.SimpleImmutableEntry<>(managerSpec, resourceGroupSpecs);
}
private synchronized void configureChangedGroups(Set<ResourceGroupIdTemplate> changedSpecs)
{
for (ResourceGroupIdTemplate resourceGroupIdTemplate : changedSpecs) {
for (ResourceGroupId resourceGroupId : configuredGroups.getOrDefault(resourceGroupIdTemplate, ImmutableList.of())) {
synchronized (getRootGroup(resourceGroupId)) {
configureGroup(groups.get(resourceGroupId), resourceGroupSpecs.get(resourceGroupIdTemplate));
}
}
}
}
private synchronized void disableDeletedGroups(Set<ResourceGroupIdTemplate> deletedSpecs)
{
for (ResourceGroupIdTemplate resourceGroupIdTemplate : deletedSpecs) {
for (ResourceGroupId resourceGroupId : configuredGroups.getOrDefault(resourceGroupIdTemplate, ImmutableList.of())) {
disableGroup(groups.get(resourceGroupId));
}
}
}
private synchronized void disableGroup(ResourceGroup group)
{
// Disable groups that are removed from the db
group.setMaxRunningQueries(0);
group.setMaxQueuedQueries(0);
}
private ResourceGroup getRootGroup(ResourceGroupId groupId)
{
Optional<ResourceGroupId> parent = groupId.getParent();
while (parent.isPresent()) {
groupId = parent.get();
parent = groupId.getParent();
}
// GroupId is guaranteed to be in groups: it is added before the first call to this method in configure()
return groups.get(groupId);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avalon.fortress.testcase;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import org.apache.avalon.excalibur.logger.LoggerManager;
import org.apache.avalon.fortress.impl.DefaultContainer;
import org.apache.avalon.fortress.impl.DefaultContainerManager;
import org.apache.avalon.fortress.util.FortressConfig;
import org.apache.avalon.fortress.util.OverridableContext;
import org.apache.avalon.framework.activity.Disposable;
import org.apache.avalon.framework.activity.Initializable;
import org.apache.avalon.framework.configuration.Configuration;
import org.apache.avalon.framework.configuration.DefaultConfigurationBuilder;
import org.apache.avalon.framework.container.ContainerUtil;
import org.apache.avalon.framework.context.Context;
import org.apache.avalon.framework.context.DefaultContext;
import org.apache.avalon.framework.logger.Logger;
import org.apache.avalon.framework.logger.LogKitLogger;
import org.apache.avalon.framework.service.ServiceException;
import org.apache.avalon.framework.service.ServiceManager;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
import junit.framework.TestResult;
import org.apache.log.Hierarchy;
import org.apache.log.LogTarget;
import org.apache.log.Priority;
import org.apache.log.format.PatternFormatter;
import org.apache.log.output.io.StreamTarget;
/**
* JUnit TestCase for Avalon Components in Fortress.
* <p>
* This class extends the JUnit TestCase class to setup an environment which
* makes it possible to easily test Avalon Components. The following methods
* and instance variables are exposed for convenience testing:
* </p>
* <dl>
* <dt>m_serviceManager</dt>
* <dd>
* This instance variable contains an initialized ServiceLocator which
* can be used to lookup Components configured in the test configuration
* file. (see below)
* </dd>
* <dt>getLogger()</dt>
* <dd>
* This method returns the default logger for this test case
* </dd>
* </dl>
* <p>
* The following test case configuration can be used as a basis for new tests.
* Detailed are explanations of the configuration elements can be found after
* the example. The example will log all logger output to the console and to
* a log file.
* </p>
* <pre>
* <testcase>
* <annotation>
* <![CDATA[
* <title>{Name of test}</title>
* <para>
* {Description of test}
* The configuration is specified in the file located in
* <parameter>avalon-excalibur/src/test/{path and name of conf file}.xtext</parameter>.
* </para>
* ]]>
* </annotation>
*
* <logger log-level="INFO">
* <factories>
* <factory type="stream" class="org.apache.avalon.excalibur.logger.factory.StreamTargetFactory"/>
* <factory type="file" class="org.apache.avalon.excalibur.logger.factory.FileTargetFactory"/>
* </factories>
*
* <targets>
* <stream id="console">
* <stream>System.out</stream>
* <format type="avalon">
* %7.7{priority} %23.23{time:yyyy-MM-dd' 'HH:mm:ss.SSS} [%30.30{category}] (%{context}): %{message}\n%{throwable}
* </format>
* </stream>
* <file id="log-file">
* <filename>TEST-{full test class name}.log</filename>
* <format type="avalon">
* %7.7{priority} %23.23{time:yyyy-MM-dd' 'HH:mm:ss.SSS} [%30.30{category}] (%{context}): %{message}\n%{throwable}
* </format>
* </file>
* </targets>
*
* <categories>
* <category name="test" log-level="INFO">
* <log-target id-ref="console"/>
* <log-target id-ref="log-file"/>
* </category>
* <category name="jdbc" log-level="INFO">
* <log-target id-ref="console"/>
* <log-target id-ref="log-file"/>
* </category>
* </categories>
* </logger>
*
* <context>
* <entry name="foo" value="bar"/>
* <entry name="baz" class="my.context.Class"/>
* </context>
*
* <roles>
* <role name="org.apache.avalon.excalibur.datasource.DataSourceComponent"
* <component shorthand="jdbc"
* class="org.apache.avalon.excalibur.datasource.JdbcDataSource"
* handler="org.apache.avalon.fortress.impl.handler.ThreadSafeComponentHandler"/>
* </role>
* </roles>
*
* <components>
* <jdbc name="personell" logger="jdbc">
* <pool-controller min="5" max="10"/>
* <jdbc name="personnel"/>
* <dburl>jdbc:odbc:test</dburl>
* <user>test</user>
* <password>test</password>
* <driver>sun.jdbc.odbc.JdbcOdbcDriver</driver>
* </jdbc>
* </components>
* </testcase>
* </pre>
* <p>
* Element Explanation:
* <dl>
* <dt>testcase</dt>
* <dd>Defines a test case configuration. Must contain one each of the
* following elements: <code>annotation</code>, <code>logger</code>,
* <code>context</code>, <code>roles</code>, and <code>components</code>
* </dd>.
*
* <dt>annotation</dt>
* <dd>Defines a test annotation. This element should define a block of
* XML enclosed within a CDATA element. The XML should be made up of a
* <code>title</code> element, naming the test, and a <code>para</code>
* element which is used to describe the test.</dd>
*
* <dt>logger</dt>
* <dd>Configures the logger used by the test cases and the components used
* by the tests. The <code>logger</code> element takes two optional
* attributes:
* <dl>
* <dt>logger</dt><dd>Uses to name the logger which is used to bootstrap
* the LogKit logger. (Defaults to <code>"lm"</code>)</dd>
* <dt>log-level</dt><dd>Because the logger used by the LogKit must be
* created before the Log Kit Manager is initialized, it must be fully
* configured before the <code>logger</code> element is parsed. This
* attribute allows the Log Kit's log priority to be set. This log
* level will also become the default for the Role Manager, Service
* Manager, and all components if they do not have <code>category</code>
* elements declated in the <code>logger</code> element.
* (Defaults to "INFO")</dd>
* </dl>
* The loggers used by test cases and components can be easily configured
* from within this file. The default test configuration, shown above,
* includes a "test" category. This category is used to configure the
* default logger for all test cases. If it is set to "DEBUG", then all
* test debug logging will be enabled. To enalble debug logging for a
* single test case, a child category must be defined for the
* "testCheckTotals" test case as follows:
* <pre>
* <categories>
* <category name="test" log-level="INFO">
* <log-target id-ref="console"/>
* <log-target id-ref="log-file"/>
*
* <category name="testCheckTotals" log-level="DEBUG">
* <log-target id-ref="console"/>
* <log-target id-ref="log-file"/>
* </category>
* </category>
* </categories>
* </pre>
* For general information on how to configure the Logger Manager, please
* refer to the Log Kit documentation.
* </dd>
*
* <dt>context</dt>
* <dd>Allows context properties to be set in the context passed to any
* Contextualizable components.</dd>
*
* <dt>roles</dt>
* <dd>Roles configuration for the Components configured in the
* <code>components</code> element. The logger used by the RoleManager
* can be configured using a <code>logger</code> attribute, which defaults
* to "rm". By default this logger will have the same log level and
* formatting as the LogKit logger. It can be configured by adding a
* <code>category</code> within the <code>logger</code> element.</dd>
*
* <dt>components</dt>
* <dd>Used to configure any Components used by the test cases. The logger
* used by the ServiceLocator can be configured using a <code>logger</code>
* attribute, which defaults to "cm". By default this logger will have the
* same log level and formatting as the LogKit logger. It can be configured
* by adding a <code>category</code> within the <code>logger</code> element.
* </dd>
*
* </dl>
*
* @author <a href="mailto:dev@avalon.apache.org">Avalon Development Team</a>
*/
public class FortressTestCase extends TestCase
{
///Format of default formatter
private static final String FORMAT =
"%7.7{priority} %23.23{time:yyyy-MM-dd' 'HH:mm:ss.SSS} [%30.30{category}] " +
"(%{context}): %{message}\n%{throwable}";
//The default logger
private Logger m_logger;
// The container manager
private DefaultContainerManager m_containerManager;
// The container itself
private DefaultContainer m_container;
private LoggerManager m_loggerManager;
private ServiceManager m_serviceManager;
private static HashMap m_tests = new HashMap();
public FortressTestCase( final String name )
{
super( name );
ArrayList methodList = (ArrayList)FortressTestCase.m_tests.get( getClass() );
Method[] methods = getClass().getMethods();
if( null == methodList )
{
methodList = new ArrayList( methods.length );
for( int i = 0; i < methods.length; i++ )
{
String methodName = methods[ i ].getName();
if( methodName.startsWith( "test" ) &&
( Modifier.isPublic( methods[ i ].getModifiers() ) ) &&
( methods[ i ].getReturnType().equals( Void.TYPE ) ) &&
( methods[ i ].getParameterTypes().length == 0 ) )
{
methodList.add( methodName );
}
}
FortressTestCase.m_tests.put( getClass(), methodList );
}
}
protected final boolean hasService( final String key )
{
return m_serviceManager.hasService( key );
}
protected final Object lookup( final String key )
throws ServiceException
{
return m_serviceManager.lookup( key );
}
protected final void release( final Object object )
{
m_serviceManager.release( object );
}
/** Return the logger */
protected Logger getLogger()
{
return m_logger;
}
/**
* Override <code>run</code> so that we can have code that is run once.
*/
public final void run( TestResult result )
{
ArrayList methodList = (ArrayList)FortressTestCase.m_tests.get( getClass() );
if( null == methodList || methodList.isEmpty() )
{
return; // The test was already run! NOTE: this is a hack.
}
// Set the logger for the initialization phase.
setCurrentLogger( getBaseClassName( getClass() ) );
try
{
prepare();
if( this instanceof Initializable )
{
( (Initializable)this ).initialize();
}
Iterator tests = methodList.iterator();
while( tests.hasNext() )
{
String methodName = (String)tests.next();
setName( methodName );
setCurrentLogger( methodName );
if( getLogger().isDebugEnabled() )
{
getLogger().debug( "" );
getLogger().debug( "========================================" );
getLogger().debug( " begin test: " + methodName );
getLogger().debug( "========================================" );
}
super.run( result );
if( getLogger().isDebugEnabled() )
{
getLogger().debug( "========================================" );
getLogger().debug( " end test: " + methodName );
getLogger().debug( "========================================" );
getLogger().debug( "" );
}
}
}
catch( Exception e )
{
System.out.println( e );
e.printStackTrace();
result.addError( this, e );
}
finally
{
done();
if( this instanceof Disposable )
{
try
{
( (Disposable)this ).dispose();
}
catch( Exception e )
{
result.addFailure( this, new AssertionFailedError( "Disposal Error" ) );
}
}
}
methodList.clear();
FortressTestCase.m_tests.put( getClass(), methodList );
}
/**
* Initializes Fortress.
*
* The configuration file is determined by the class name plus .xtest appended,
* all '.' replaced by '/' and loaded as a resource via classpath
*/
protected void prepare() throws Exception
{
setCurrentLogger( "prepare" );
final String resourceName = getClass().getName().replace( '.', '/' ) + ".xtest";
URL resource = getClass().getClassLoader().getResource( resourceName );
if( resource != null )
{
getLogger().debug( "Loading resource " + resourceName );
prepare( resource.openStream() );
}
else
{
getLogger().warn( "Resource not found " + resourceName );
}
}
/**
* Initializes Fortress.
*
* @param testconf The configuration file is passed as an <code>InputStream</code>
*
* A common way to supply a InputStream is to overide the prepare() method
* in the sub class, do there whatever is needed to get the right InputStream object
* supplying a conformant xtest configuartion and pass it to this prepare method.
* the mentioned initialize method is also the place to set a different logging priority
* to the member variable m_logPriority.
*/
protected final void prepare( final InputStream testconf )
throws Exception
{
getLogger().debug( "FortressTestCase.initialize" );
final DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder();
final Configuration conf = builder.build( testconf );
String annotation = conf.getChild( "annotation" ).getValue( null );
if( ( null != annotation ) && !( "".equals( annotation ) ) )
{
m_logger.info( annotation );
}
final FortressConfig config = new FortressConfig();
config.setContainerClass( DefaultContainer.class );
config.setContextDirectory( "./" );
config.setWorkDirectory( "./" );
config.setLoggerCategory( "fortress" );
config.setLoggerManagerConfiguration( conf.getChild( "logger" ) );
config.setRoleManagerConfiguration( conf.getChild( "roles" ) );
config.setContainerConfiguration( conf.getChild( "components" ) );
m_containerManager = new DefaultContainerManager(
setupContext( conf.getChild( "context", true ), config.getContext() ) );
ContainerUtil.initialize( m_containerManager );
m_container = (DefaultContainer) m_containerManager.getContainer();
m_serviceManager = m_container.getServiceManager();
m_loggerManager = (LoggerManager) m_serviceManager.lookup( LoggerManager.class.getName() );
}
/**
* Sets the logger which will be returned by getLogger and getLogEnabledLogger
*/
private void setCurrentLogger( String name )
{
if( m_loggerManager == null )
{
org.apache.log.Logger logger;
// Logger for the portion of the configuration has been loaded.
logger = Hierarchy.getDefaultHierarchy().getLoggerFor( name );
logger.setPriority( Priority.INFO );
PatternFormatter formatter = new PatternFormatter( FORMAT );
StreamTarget target = new StreamTarget( System.out, formatter );
logger.setLogTargets( new LogTarget[]{target} );
m_logger = new LogKitLogger( logger );
}
else
{
m_logger = m_loggerManager.getLoggerForCategory( "test." + name );
}
}
/**
* Set up a context according to the xtest configuration specifications context
* element.
*
* A method addContext(DefaultContext context) is called here to enable subclasses
* to put additional objects into the context programmatically.
*/
private Context setupContext( final Configuration conf, final Context parentContext )
throws Exception
{
//FIXME(GP): This method should setup the Context object according to the
// configuration spec.
final DefaultContext context = new OverridableContext( parentContext );
final Configuration[] confs = conf.getChildren( "entry" );
for( int i = 0; i < confs.length; i++ )
{
final String key = confs[ i ].getAttribute( "name" );
final String value = confs[ i ].getAttribute( "value", null );
if( value == null )
{
String clazz = confs[ i ].getAttribute( "class" );
Object obj = getClass().getClassLoader().loadClass( clazz ).newInstance();
context.put( key, obj );
if( getLogger().isInfoEnabled() )
{
getLogger().info( "FortressTestCase: added an instance of class "
+ clazz + " to context entry " + key );
}
}
else
{
context.put( key, value );
if( getLogger().isInfoEnabled() )
{
getLogger().info( "FortressTestCase: added value \"" + value
+ "\" to context entry " + key );
}
}
}
addContext( context );
context.makeReadOnly();
return context;
}
/**
* This method may be overwritten by subclasses to put additional objects
* into the context programmatically.
*/
protected void addContext( DefaultContext context )
{
}
/**
* Exctract the base class name of a class.
*/
private String getBaseClassName( Class clazz )
{
String name = clazz.getName();
int pos = name.lastIndexOf( '.' );
if( pos >= 0 )
{
name = name.substring( pos + 1 );
}
return name;
}
/**
* Disposes Fortress
*/
private void done()
{
ContainerUtil.dispose( m_containerManager );
}
}
| |
package pl.tajchert.sample;
import android.animation.Animator;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
import android.animation.TypeEvaluator;
import android.animation.ValueAnimator;
import android.animation.ValueAnimator.AnimatorUpdateListener;
import android.content.Context;
import android.content.res.TypedArray;
import android.os.Handler;
import android.os.Looper;
import android.text.SpannableString;
import android.text.Spanned;
import android.util.AttributeSet;
import android.widget.TextView;
import pl.tajchert.waitingdots.R;
public class DotsTextView extends TextView {
private JumpingSpan dotOne;
private JumpingSpan dotTwo;
private JumpingSpan dotThree;
private int showSpeed = 700;
private int jumpHeight;
private boolean autoPlay;
private boolean isPlaying;
private boolean isHide;
private int period;
private long startTime;
private boolean lockDotOne;
private boolean lockDotTwo;
private boolean lockDotThree;
private Handler handler;
private AnimatorSet mAnimatorSet = new AnimatorSet();
private float textWidth;
public DotsTextView(Context context) {
super(context);
init(context, null);
}
public DotsTextView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
public DotsTextView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context, attrs);
}
private void init(Context context, AttributeSet attrs) {
handler = new Handler(Looper.getMainLooper());
if (attrs != null) {
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.WaitingDots);
period = typedArray.getInt(R.styleable.WaitingDots_period, 6000);
jumpHeight = typedArray.getInt(R.styleable.WaitingDots_jumpHeight, (int) (getTextSize() / 4));
autoPlay = typedArray.getBoolean(R.styleable.WaitingDots_autoplay, true);
typedArray.recycle();
}
dotOne = new JumpingSpan();
dotTwo = new JumpingSpan();
dotThree = new JumpingSpan();
SpannableString spannable = new SpannableString("...");
spannable.setSpan(dotOne, 0, 1, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
spannable.setSpan(dotTwo, 1, 2, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
spannable.setSpan(dotThree, 2, 3, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
setText(spannable, BufferType.SPANNABLE);
textWidth = getPaint().measureText(".", 0, 1);
ObjectAnimator dotOneJumpAnimator = createDotJumpAnimator(dotOne, 0);
dotOneJumpAnimator.addUpdateListener(new AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
invalidate();
}
});
mAnimatorSet.playTogether(dotOneJumpAnimator, createDotJumpAnimator(dotTwo,
period / 6), createDotJumpAnimator(dotThree, period * 2 / 6));
isPlaying = autoPlay;
if(autoPlay) {
start();
}
}
public void start() {
isPlaying = true;
setAllAnimationsRepeatCount(ValueAnimator.INFINITE);
mAnimatorSet.start();
}
private ObjectAnimator createDotJumpAnimator(JumpingSpan jumpingSpan, long delay) {
ObjectAnimator jumpAnimator = ObjectAnimator.ofFloat(jumpingSpan, "translationY", 0, -jumpHeight);
jumpAnimator.setEvaluator(new TypeEvaluator<Number>() {
@Override
public Number evaluate(float fraction, Number from, Number to) {
return Math.max(0, Math.sin(fraction * Math.PI * 2)) * (to.floatValue() - from.floatValue());
}
});
jumpAnimator.setDuration(period);
jumpAnimator.setStartDelay(delay);
jumpAnimator.setRepeatCount(ValueAnimator.INFINITE);
jumpAnimator.setRepeatMode(ValueAnimator.RESTART);
return jumpAnimator;
}
public void stop() {
isPlaying = false;
setAllAnimationsRepeatCount(0);
}
private void setAllAnimationsRepeatCount(int repeatCount) {
for (Animator animator : mAnimatorSet.getChildAnimations()) {
if (animator instanceof ObjectAnimator) {
((ObjectAnimator) animator).setRepeatCount(repeatCount);
}
}
}
public void hide() {
createDotHideAnimator(dotThree, 2).start();
ObjectAnimator dotTwoMoveRightToLeft = createDotHideAnimator(dotTwo, 1);
dotTwoMoveRightToLeft.addUpdateListener(new AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
invalidate();
}
});
dotTwoMoveRightToLeft.start();
isHide = true;
}
public void show() {
ObjectAnimator dotThreeMoveRightToLeft = createDotShowAnimator(dotThree, 2);
dotThreeMoveRightToLeft.start();
ObjectAnimator dotTwoMoveRightToLeft = createDotShowAnimator(dotTwo, 1);
dotTwoMoveRightToLeft.addUpdateListener(new AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
invalidate();
}
});
dotTwoMoveRightToLeft.start();
isHide = false;
}
private ObjectAnimator createDotHideAnimator(JumpingSpan span, float widthMultiplier) {
return createDotHorizontalAnimator(span, 0, -textWidth * widthMultiplier);
}
private ObjectAnimator createDotShowAnimator(JumpingSpan span, int widthMultiplier) {
return createDotHorizontalAnimator(span, -textWidth * widthMultiplier, 0);
}
private ObjectAnimator createDotHorizontalAnimator(JumpingSpan span, float from, float to) {
ObjectAnimator dotThreeMoveRightToLeft = ObjectAnimator.ofFloat(span, "translationX", from, to);
dotThreeMoveRightToLeft.setDuration(showSpeed);
return dotThreeMoveRightToLeft;
}
public void showAndPlay() {
show();
start();
}
public void hideAndStop() {
hide();
stop();
}
public boolean isHide() {
return isHide;
}
public boolean isPlaying() {
return isPlaying;
}
public void setJumpHeight(int jumpHeight) {
this.jumpHeight = jumpHeight;
}
public void setPeriod(int period) {
this.period = period;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client.coprocessor;
import static org.apache.hadoop.hbase.client.coprocessor.AggregationHelper.getParsedGenericInstance;
import static org.apache.hadoop.hbase.client.coprocessor.AggregationHelper.validateArgAndGetPB;
import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.ColumnInterpreter;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
import org.apache.hbase.thirdparty.com.google.protobuf.Message;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateService;
/**
* This client class is for invoking the aggregate functions deployed on the
* Region Server side via the AggregateService. This class will implement the
* supporting functionality for summing/processing the individual results
* obtained from the AggregateService for each region.
* <p>
* This will serve as the client side handler for invoking the aggregate
* functions.
* For all aggregate functions,
* <ul>
* <li>start row < end row is an essential condition (if they are not
* {@link HConstants#EMPTY_BYTE_ARRAY})
* <li>Column family can't be null. In case where multiple families are
* provided, an IOException will be thrown. An optional column qualifier can
* also be defined.</li>
* <li>For methods to find maximum, minimum, sum, rowcount, it returns the
* parameter type. For average and std, it returns a double value. For row
* count, it returns a long value.</li>
* </ul>
* <p>Call {@link #close()} when done.
*/
@InterfaceAudience.Public
public class AggregationClient implements Closeable {
// TODO: This class is not used. Move to examples?
private static final Logger log = LoggerFactory.getLogger(AggregationClient.class);
private final Connection connection;
/**
* An RpcController implementation for use here in this endpoint.
*/
static class AggregationClientRpcController implements RpcController {
private String errorText;
private boolean cancelled = false;
private boolean failed = false;
@Override
public String errorText() {
return this.errorText;
}
@Override
public boolean failed() {
return this.failed;
}
@Override
public boolean isCanceled() {
return this.cancelled;
}
@Override
public void notifyOnCancel(RpcCallback<Object> arg0) {
throw new UnsupportedOperationException();
}
@Override
public void reset() {
this.errorText = null;
this.cancelled = false;
this.failed = false;
}
@Override
public void setFailed(String errorText) {
this.failed = true;
this.errorText = errorText;
}
@Override
public void startCancel() {
this.cancelled = true;
}
}
/**
* Constructor with Conf object
* @param cfg Configuration to use
*/
public AggregationClient(Configuration cfg) {
try {
// Create a connection on construction. Will use it making each of the calls below.
this.connection = ConnectionFactory.createConnection(cfg);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void close() throws IOException {
if (this.connection != null && !this.connection.isClosed()) {
this.connection.close();
}
}
/**
* It gives the maximum value of a column for a given column family for the
* given range. In case qualifier is null, a max of all values for the given
* family is returned.
* @param tableName the name of the table to scan
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return max val <R>
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message> R max(
final TableName tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
try (Table table = connection.getTable(tableName)) {
return max(table, ci, scan);
}
}
/**
* It gives the maximum value of a column for a given column family for the
* given range. In case qualifier is null, a max of all values for the given
* family is returned.
* @param table table to scan.
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return max val <>
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message>
R max(final Table table, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false);
class MaxCallBack implements Batch.Callback<R> {
R max = null;
R getMax() {
return max;
}
@Override
public synchronized void update(byte[] region, byte[] row, R result) {
max = (max == null || (result != null && ci.compare(max, result) < 0)) ? result : max;
}
}
MaxCallBack aMaxCallBack = new MaxCallBack();
table.coprocessorService(AggregateService.class, scan.getStartRow(), scan.getStopRow(),
new Batch.Call<AggregateService, R>() {
@Override
public R call(AggregateService instance) throws IOException {
RpcController controller = new AggregationClientRpcController();
CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
new CoprocessorRpcUtils.BlockingRpcCallback<>();
instance.getMax(controller, requestArg, rpcCallback);
AggregateResponse response = rpcCallback.get();
if (controller.failed()) {
throw new IOException(controller.errorText());
}
if (response.getFirstPartCount() > 0) {
ByteString b = response.getFirstPart(0);
Q q = getParsedGenericInstance(ci.getClass(), 3, b);
return ci.getCellValueFromProto(q);
}
return null;
}
}, aMaxCallBack);
return aMaxCallBack.getMax();
}
/**
* It gives the minimum value of a column for a given column family for the
* given range. In case qualifier is null, a min of all values for the given
* family is returned.
* @param tableName the name of the table to scan
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return min val <R>
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message> R min(
final TableName tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
try (Table table = connection.getTable(tableName)) {
return min(table, ci, scan);
}
}
/**
* It gives the minimum value of a column for a given column family for the
* given range. In case qualifier is null, a min of all values for the given
* family is returned.
* @param table table to scan.
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return min val <R>
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message>
R min(final Table table, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false);
class MinCallBack implements Batch.Callback<R> {
private R min = null;
public R getMinimum() {
return min;
}
@Override
public synchronized void update(byte[] region, byte[] row, R result) {
min = (min == null || (result != null && ci.compare(result, min) < 0)) ? result : min;
}
}
MinCallBack minCallBack = new MinCallBack();
table.coprocessorService(AggregateService.class, scan.getStartRow(), scan.getStopRow(),
new Batch.Call<AggregateService, R>() {
@Override
public R call(AggregateService instance) throws IOException {
RpcController controller = new AggregationClientRpcController();
CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
new CoprocessorRpcUtils.BlockingRpcCallback<>();
instance.getMin(controller, requestArg, rpcCallback);
AggregateResponse response = rpcCallback.get();
if (controller.failed()) {
throw new IOException(controller.errorText());
}
if (response.getFirstPartCount() > 0) {
ByteString b = response.getFirstPart(0);
Q q = getParsedGenericInstance(ci.getClass(), 3, b);
return ci.getCellValueFromProto(q);
}
return null;
}
}, minCallBack);
log.debug("Min fom all regions is: " + minCallBack.getMinimum());
return minCallBack.getMinimum();
}
/**
* It gives the row count, by summing up the individual results obtained from
* regions. In case the qualifier is null, FirstKeyValueFilter is used to
* optimised the operation. In case qualifier is provided, I can't use the
* filter as it may set the flag to skip to next row, but the value read is
* not of the given filter: in this case, this particular row will not be
* counted ==> an error.
* @param tableName the name of the table to scan
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return <R, S>
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message> long rowCount(
final TableName tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
try (Table table = connection.getTable(tableName)) {
return rowCount(table, ci, scan);
}
}
/**
* It gives the row count, by summing up the individual results obtained from
* regions. In case the qualifier is null, FirstKeyValueFilter is used to
* optimised the operation. In case qualifier is provided, I can't use the
* filter as it may set the flag to skip to next row, but the value read is
* not of the given filter: in this case, this particular row will not be
* counted ==> an error.
* @param table table to scan.
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return <R, S>
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message>
long rowCount(final Table table, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, true);
class RowNumCallback implements Batch.Callback<Long> {
private final AtomicLong rowCountL = new AtomicLong(0);
public long getRowNumCount() {
return rowCountL.get();
}
@Override
public void update(byte[] region, byte[] row, Long result) {
rowCountL.addAndGet(result.longValue());
}
}
RowNumCallback rowNum = new RowNumCallback();
table.coprocessorService(AggregateService.class, scan.getStartRow(), scan.getStopRow(),
new Batch.Call<AggregateService, Long>() {
@Override
public Long call(AggregateService instance) throws IOException {
RpcController controller = new AggregationClientRpcController();
CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
new CoprocessorRpcUtils.BlockingRpcCallback<>();
instance.getRowNum(controller, requestArg, rpcCallback);
AggregateResponse response = rpcCallback.get();
if (controller.failed()) {
throw new IOException(controller.errorText());
}
byte[] bytes = getBytesFromResponse(response.getFirstPart(0));
ByteBuffer bb = ByteBuffer.allocate(8).put(bytes);
bb.rewind();
return bb.getLong();
}
}, rowNum);
return rowNum.getRowNumCount();
}
/**
* It sums up the value returned from various regions. In case qualifier is
* null, summation of all the column qualifiers in the given family is done.
* @param tableName the name of the table to scan
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return sum <S>
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message> S sum(
final TableName tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
try (Table table = connection.getTable(tableName)) {
return sum(table, ci, scan);
}
}
/**
* It sums up the value returned from various regions. In case qualifier is
* null, summation of all the column qualifiers in the given family is done.
* @param table table to scan.
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return sum <S>
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message>
S sum(final Table table, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false);
class SumCallBack implements Batch.Callback<S> {
S sumVal = null;
public S getSumResult() {
return sumVal;
}
@Override
public synchronized void update(byte[] region, byte[] row, S result) {
sumVal = ci.add(sumVal, result);
}
}
SumCallBack sumCallBack = new SumCallBack();
table.coprocessorService(AggregateService.class, scan.getStartRow(), scan.getStopRow(),
new Batch.Call<AggregateService, S>() {
@Override
public S call(AggregateService instance) throws IOException {
RpcController controller = new AggregationClientRpcController();
// Not sure what is going on here why I have to do these casts. TODO.
CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
new CoprocessorRpcUtils.BlockingRpcCallback<>();
instance.getSum(controller, requestArg, rpcCallback);
AggregateResponse response = rpcCallback.get();
if (controller.failed()) {
throw new IOException(controller.errorText());
}
if (response.getFirstPartCount() == 0) {
return null;
}
ByteString b = response.getFirstPart(0);
T t = getParsedGenericInstance(ci.getClass(), 4, b);
S s = ci.getPromotedValueFromProto(t);
return s;
}
}, sumCallBack);
return sumCallBack.getSumResult();
}
/**
* It computes average while fetching sum and row count from all the
* corresponding regions. Approach is to compute a global sum of region level
* sum and rowcount and then compute the average.
* @param tableName the name of the table to scan
* @param scan the HBase scan object to use to read data from HBase
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
private <R, S, P extends Message, Q extends Message, T extends Message> Pair<S, Long> getAvgArgs(
final TableName tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
try (Table table = connection.getTable(tableName)) {
return getAvgArgs(table, ci, scan);
}
}
/**
* It computes average while fetching sum and row count from all the
* corresponding regions. Approach is to compute a global sum of region level
* sum and rowcount and then compute the average.
* @param table table to scan.
* @param scan the HBase scan object to use to read data from HBase
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
private <R, S, P extends Message, Q extends Message, T extends Message>
Pair<S, Long> getAvgArgs(final Table table, final ColumnInterpreter<R, S, P, Q, T> ci,
final Scan scan) throws Throwable {
final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false);
class AvgCallBack implements Batch.Callback<Pair<S, Long>> {
S sum = null;
Long rowCount = 0L;
public synchronized Pair<S, Long> getAvgArgs() {
return new Pair<>(sum, rowCount);
}
@Override
public synchronized void update(byte[] region, byte[] row, Pair<S, Long> result) {
sum = ci.add(sum, result.getFirst());
rowCount += result.getSecond();
}
}
AvgCallBack avgCallBack = new AvgCallBack();
table.coprocessorService(AggregateService.class, scan.getStartRow(), scan.getStopRow(),
new Batch.Call<AggregateService, Pair<S, Long>>() {
@Override
public Pair<S, Long> call(AggregateService instance) throws IOException {
RpcController controller = new AggregationClientRpcController();
CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
new CoprocessorRpcUtils.BlockingRpcCallback<>();
instance.getAvg(controller, requestArg, rpcCallback);
AggregateResponse response = rpcCallback.get();
if (controller.failed()) {
throw new IOException(controller.errorText());
}
Pair<S, Long> pair = new Pair<>(null, 0L);
if (response.getFirstPartCount() == 0) {
return pair;
}
ByteString b = response.getFirstPart(0);
T t = getParsedGenericInstance(ci.getClass(), 4, b);
S s = ci.getPromotedValueFromProto(t);
pair.setFirst(s);
ByteBuffer bb = ByteBuffer.allocate(8).put(
getBytesFromResponse(response.getSecondPart()));
bb.rewind();
pair.setSecond(bb.getLong());
return pair;
}
}, avgCallBack);
return avgCallBack.getAvgArgs();
}
/**
* This is the client side interface/handle for calling the average method for
* a given cf-cq combination. It was necessary to add one more call stack as
* its return type should be a decimal value, irrespective of what
* columninterpreter says. So, this methods collects the necessary parameters
* to compute the average and returs the double value.
* @param tableName the name of the table to scan
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return <R, S>
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message>
double avg(final TableName tableName, final ColumnInterpreter<R, S, P, Q, T> ci,
Scan scan) throws Throwable {
Pair<S, Long> p = getAvgArgs(tableName, ci, scan);
return ci.divideForAvg(p.getFirst(), p.getSecond());
}
/**
* This is the client side interface/handle for calling the average method for
* a given cf-cq combination. It was necessary to add one more call stack as
* its return type should be a decimal value, irrespective of what
* columninterpreter says. So, this methods collects the necessary parameters
* to compute the average and returs the double value.
* @param table table to scan.
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return <R, S>
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message> double avg(
final Table table, final ColumnInterpreter<R, S, P, Q, T> ci, Scan scan)
throws Throwable {
Pair<S, Long> p = getAvgArgs(table, ci, scan);
return ci.divideForAvg(p.getFirst(), p.getSecond());
}
/**
* It computes a global standard deviation for a given column and its value.
* Standard deviation is square root of (average of squares -
* average*average). From individual regions, it obtains sum, square sum and
* number of rows. With these, the above values are computed to get the global
* std.
* @param table table to scan.
* @param scan the HBase scan object to use to read data from HBase
* @return standard deviations
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
private <R, S, P extends Message, Q extends Message, T extends Message>
Pair<List<S>, Long> getStdArgs(final Table table, final ColumnInterpreter<R, S, P, Q, T> ci,
final Scan scan) throws Throwable {
final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false);
class StdCallback implements Batch.Callback<Pair<List<S>, Long>> {
long rowCountVal = 0L;
S sumVal = null, sumSqVal = null;
public synchronized Pair<List<S>, Long> getStdParams() {
List<S> l = new ArrayList<>(2);
l.add(sumVal);
l.add(sumSqVal);
Pair<List<S>, Long> p = new Pair<>(l, rowCountVal);
return p;
}
@Override
public synchronized void update(byte[] region, byte[] row, Pair<List<S>, Long> result) {
if (result.getFirst().size() > 0) {
sumVal = ci.add(sumVal, result.getFirst().get(0));
sumSqVal = ci.add(sumSqVal, result.getFirst().get(1));
rowCountVal += result.getSecond();
}
}
}
StdCallback stdCallback = new StdCallback();
table.coprocessorService(AggregateService.class, scan.getStartRow(), scan.getStopRow(),
new Batch.Call<AggregateService, Pair<List<S>, Long>>() {
@Override
public Pair<List<S>, Long> call(AggregateService instance) throws IOException {
RpcController controller = new AggregationClientRpcController();
CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
new CoprocessorRpcUtils.BlockingRpcCallback<>();
instance.getStd(controller, requestArg, rpcCallback);
AggregateResponse response = rpcCallback.get();
if (controller.failed()) {
throw new IOException(controller.errorText());
}
Pair<List<S>, Long> pair = new Pair<>(new ArrayList<>(), 0L);
if (response.getFirstPartCount() == 0) {
return pair;
}
List<S> list = new ArrayList<>();
for (int i = 0; i < response.getFirstPartCount(); i++) {
ByteString b = response.getFirstPart(i);
T t = getParsedGenericInstance(ci.getClass(), 4, b);
S s = ci.getPromotedValueFromProto(t);
list.add(s);
}
pair.setFirst(list);
ByteBuffer bb = ByteBuffer.allocate(8).put(
getBytesFromResponse(response.getSecondPart()));
bb.rewind();
pair.setSecond(bb.getLong());
return pair;
}
}, stdCallback);
return stdCallback.getStdParams();
}
/**
* This is the client side interface/handle for calling the std method for a
* given cf-cq combination. It was necessary to add one more call stack as its
* return type should be a decimal value, irrespective of what
* columninterpreter says. So, this methods collects the necessary parameters
* to compute the std and returns the double value.
* @param tableName the name of the table to scan
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return <R, S>
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message>
double std(final TableName tableName, ColumnInterpreter<R, S, P, Q, T> ci,
Scan scan) throws Throwable {
try (Table table = connection.getTable(tableName)) {
return std(table, ci, scan);
}
}
/**
* This is the client side interface/handle for calling the std method for a
* given cf-cq combination. It was necessary to add one more call stack as its
* return type should be a decimal value, irrespective of what
* columninterpreter says. So, this methods collects the necessary parameters
* to compute the std and returns the double value.
* @param table table to scan.
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return <R, S>
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message> double std(
final Table table, ColumnInterpreter<R, S, P, Q, T> ci, Scan scan) throws Throwable {
Pair<List<S>, Long> p = getStdArgs(table, ci, scan);
double res = 0d;
double avg = ci.divideForAvg(p.getFirst().get(0), p.getSecond());
double avgOfSumSq = ci.divideForAvg(p.getFirst().get(1), p.getSecond());
res = avgOfSumSq - (avg) * (avg); // variance
res = Math.pow(res, 0.5);
return res;
}
/**
* It helps locate the region with median for a given column whose weight
* is specified in an optional column.
* From individual regions, it obtains sum of values and sum of weights.
* @param table table to scan.
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return pair whose first element is a map between start row of the region
* and (sum of values, sum of weights) for the region, the second element is
* (sum of values, sum of weights) for all the regions chosen
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
private <R, S, P extends Message, Q extends Message, T extends Message>
Pair<NavigableMap<byte[], List<S>>, List<S>>
getMedianArgs(final Table table,
final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan) throws Throwable {
final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false);
final NavigableMap<byte[], List<S>> map = new TreeMap<>(Bytes.BYTES_COMPARATOR);
class StdCallback implements Batch.Callback<List<S>> {
S sumVal = null, sumWeights = null;
public synchronized Pair<NavigableMap<byte[], List<S>>, List<S>> getMedianParams() {
List<S> l = new ArrayList<>(2);
l.add(sumVal);
l.add(sumWeights);
Pair<NavigableMap<byte[], List<S>>, List<S>> p = new Pair<>(map, l);
return p;
}
@Override
public synchronized void update(byte[] region, byte[] row, List<S> result) {
map.put(row, result);
sumVal = ci.add(sumVal, result.get(0));
sumWeights = ci.add(sumWeights, result.get(1));
}
}
StdCallback stdCallback = new StdCallback();
table.coprocessorService(AggregateService.class, scan.getStartRow(), scan.getStopRow(),
new Batch.Call<AggregateService, List<S>>() {
@Override
public List<S> call(AggregateService instance) throws IOException {
RpcController controller = new AggregationClientRpcController();
CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
new CoprocessorRpcUtils.BlockingRpcCallback<>();
instance.getMedian(controller, requestArg, rpcCallback);
AggregateResponse response = rpcCallback.get();
if (controller.failed()) {
throw new IOException(controller.errorText());
}
List<S> list = new ArrayList<>();
for (int i = 0; i < response.getFirstPartCount(); i++) {
ByteString b = response.getFirstPart(i);
T t = getParsedGenericInstance(ci.getClass(), 4, b);
S s = ci.getPromotedValueFromProto(t);
list.add(s);
}
return list;
}
}, stdCallback);
return stdCallback.getMedianParams();
}
/**
* This is the client side interface/handler for calling the median method for a
* given cf-cq combination. This method collects the necessary parameters
* to compute the median and returns the median.
* @param tableName the name of the table to scan
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return R the median
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message>
R median(final TableName tableName, ColumnInterpreter<R, S, P, Q, T> ci,
Scan scan) throws Throwable {
try (Table table = connection.getTable(tableName)) {
return median(table, ci, scan);
}
}
/**
* This is the client side interface/handler for calling the median method for a
* given cf-cq combination. This method collects the necessary parameters
* to compute the median and returns the median.
* @param table table to scan.
* @param ci the user's ColumnInterpreter implementation
* @param scan the HBase scan object to use to read data from HBase
* @return R the median
* @throws Throwable The caller is supposed to handle the exception as they are thrown
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message>
R median(final Table table, ColumnInterpreter<R, S, P, Q, T> ci, Scan scan) throws Throwable {
Pair<NavigableMap<byte[], List<S>>, List<S>> p = getMedianArgs(table, ci, scan);
byte[] startRow = null;
byte[] colFamily = scan.getFamilies()[0];
NavigableSet<byte[]> quals = scan.getFamilyMap().get(colFamily);
NavigableMap<byte[], List<S>> map = p.getFirst();
S sumVal = p.getSecond().get(0);
S sumWeights = p.getSecond().get(1);
double halfSumVal = ci.divideForAvg(sumVal, 2L);
double movingSumVal = 0;
boolean weighted = false;
if (quals.size() > 1) {
weighted = true;
halfSumVal = ci.divideForAvg(sumWeights, 2L);
}
for (Map.Entry<byte[], List<S>> entry : map.entrySet()) {
S s = weighted ? entry.getValue().get(1) : entry.getValue().get(0);
double newSumVal = movingSumVal + ci.divideForAvg(s, 1L);
if (newSumVal > halfSumVal) {
// we found the region with the median
break;
}
movingSumVal = newSumVal;
startRow = entry.getKey();
}
// scan the region with median and find it
Scan scan2 = new Scan(scan);
// inherit stop row from method parameter
if (startRow != null) {
scan2.withStartRow(startRow);
}
ResultScanner scanner = null;
try {
int cacheSize = scan2.getCaching();
if (!scan2.getCacheBlocks() || scan2.getCaching() < 2) {
scan2.setCacheBlocks(true);
cacheSize = 5;
scan2.setCaching(cacheSize);
}
scanner = table.getScanner(scan2);
Result[] results = null;
byte[] qualifier = quals.pollFirst();
// qualifier for the weight column
byte[] weightQualifier = weighted ? quals.pollLast() : qualifier;
R value = null;
do {
results = scanner.next(cacheSize);
if (results != null && results.length > 0) {
for (int i = 0; i < results.length; i++) {
Result r = results[i];
// retrieve weight
Cell kv = r.getColumnLatestCell(colFamily, weightQualifier);
R newValue = ci.getValue(colFamily, weightQualifier, kv);
S s = ci.castToReturnType(newValue);
double newSumVal = movingSumVal + ci.divideForAvg(s, 1L);
// see if we have moved past the median
if (newSumVal > halfSumVal) {
return value;
}
movingSumVal = newSumVal;
kv = r.getColumnLatestCell(colFamily, qualifier);
value = ci.getValue(colFamily, qualifier, kv);
}
}
} while (results != null && results.length > 0);
} finally {
if (scanner != null) {
scanner.close();
}
}
return null;
}
byte[] getBytesFromResponse(ByteString response) {
ByteBuffer bb = response.asReadOnlyByteBuffer();
bb.rewind();
byte[] bytes;
if (bb.hasArray()) {
bytes = bb.array();
} else {
bytes = response.toByteArray();
}
return bytes;
}
}
| |
/*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
import java.util.Collection;
import java.util.Comparator;
import java.util.Map.Entry;
import javax.annotation.Nullable;
/**
* An immutable {@link ListMultimap} with reliable user-specified key and value
* iteration order. Does not permit null keys or values.
*
* <p>Unlike {@link Multimaps#unmodifiableListMultimap(ListMultimap)}, which is
* a <i>view</i> of a separate multimap which can still change, an instance of
* {@code ImmutableListMultimap} contains its own data and will <i>never</i>
* change. {@code ImmutableListMultimap} is convenient for
* {@code public static final} multimaps ("constant multimaps") and also lets
* you easily make a "defensive copy" of a multimap provided to your class by
* a caller.
*
* <p><b>Note:</b> Although this class is not final, it cannot be subclassed as
* it has no public or protected constructors. Thus, instances of this class
* are guaranteed to be immutable.
*
* <p>See the Guava User Guide article on <a href=
* "http://code.google.com/p/guava-libraries/wiki/ImmutableCollectionsExplained">
* immutable collections</a>.
*
* @author Jared Levy
* @since 2.0 (imported from Google Collections Library)
*/
@GwtCompatible(serializable = true, emulated = true)
public class ImmutableListMultimap<K, V>
extends ImmutableMultimap<K, V>
implements ListMultimap<K, V> {
/** Returns the empty multimap. */
// Casting is safe because the multimap will never hold any elements.
@SuppressWarnings("unchecked")
public static <K, V> ImmutableListMultimap<K, V> of() {
return (ImmutableListMultimap<K, V>) EmptyImmutableListMultimap.INSTANCE;
}
/**
* Returns an immutable multimap containing a single entry.
*/
public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1) {
ImmutableListMultimap.Builder<K, V> builder
= ImmutableListMultimap.builder();
builder.put(k1, v1);
return builder.build();
}
/**
* Returns an immutable multimap containing the given entries, in order.
*/
public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1, K k2, V v2) {
ImmutableListMultimap.Builder<K, V> builder
= ImmutableListMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
return builder.build();
}
/**
* Returns an immutable multimap containing the given entries, in order.
*/
public static <K, V> ImmutableListMultimap<K, V> of(
K k1, V v1, K k2, V v2, K k3, V v3) {
ImmutableListMultimap.Builder<K, V> builder
= ImmutableListMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
builder.put(k3, v3);
return builder.build();
}
/**
* Returns an immutable multimap containing the given entries, in order.
*/
public static <K, V> ImmutableListMultimap<K, V> of(
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) {
ImmutableListMultimap.Builder<K, V> builder
= ImmutableListMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
builder.put(k3, v3);
builder.put(k4, v4);
return builder.build();
}
/**
* Returns an immutable multimap containing the given entries, in order.
*/
public static <K, V> ImmutableListMultimap<K, V> of(
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) {
ImmutableListMultimap.Builder<K, V> builder
= ImmutableListMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
builder.put(k3, v3);
builder.put(k4, v4);
builder.put(k5, v5);
return builder.build();
}
// looking for of() with > 5 entries? Use the builder instead.
/**
* Returns a new builder. The generated builder is equivalent to the builder
* created by the {@link Builder} constructor.
*/
public static <K, V> Builder<K, V> builder() {
return new Builder<K, V>();
}
/**
* A builder for creating immutable {@code ListMultimap} instances, especially
* {@code public static final} multimaps ("constant multimaps"). Example:
* <pre> {@code
*
* static final Multimap<String, Integer> STRING_TO_INTEGER_MULTIMAP =
* new ImmutableListMultimap.Builder<String, Integer>()
* .put("one", 1)
* .putAll("several", 1, 2, 3)
* .putAll("many", 1, 2, 3, 4, 5)
* .build();}</pre>
*
* Builder instances can be reused; it is safe to call {@link #build} multiple
* times to build multiple multimaps in series. Each multimap contains the
* key-value mappings in the previously created multimaps.
*
* @since 2.0 (imported from Google Collections Library)
*/
public static final class Builder<K, V>
extends ImmutableMultimap.Builder<K, V> {
/**
* Creates a new builder. The returned builder is equivalent to the builder
* generated by {@link ImmutableListMultimap#builder}.
*/
public Builder() {}
@Override public Builder<K, V> put(K key, V value) {
super.put(key, value);
return this;
}
/**
* {@inheritDoc}
*
* @since 11.0
*/
@Override public Builder<K, V> put(
Entry<? extends K, ? extends V> entry) {
super.put(entry);
return this;
}
@Override public Builder<K, V> putAll(K key, Iterable<? extends V> values) {
super.putAll(key, values);
return this;
}
@Override public Builder<K, V> putAll(K key, V... values) {
super.putAll(key, values);
return this;
}
@Override public Builder<K, V> putAll(
Multimap<? extends K, ? extends V> multimap) {
super.putAll(multimap);
return this;
}
/**
* {@inheritDoc}
*
* @since 8.0
*/
@Beta @Override
public Builder<K, V> orderKeysBy(Comparator<? super K> keyComparator) {
super.orderKeysBy(keyComparator);
return this;
}
/**
* {@inheritDoc}
*
* @since 8.0
*/
@Beta @Override
public Builder<K, V> orderValuesBy(Comparator<? super V> valueComparator) {
super.orderValuesBy(valueComparator);
return this;
}
/**
* Returns a newly-created immutable list multimap.
*/
@Override public ImmutableListMultimap<K, V> build() {
return (ImmutableListMultimap<K, V>) super.build();
}
}
/**
* Returns an immutable multimap containing the same mappings as {@code
* multimap}. The generated multimap's key and value orderings correspond to
* the iteration ordering of the {@code multimap.asMap()} view.
*
* <p>Despite the method name, this method attempts to avoid actually copying
* the data when it is safe to do so. The exact circumstances under which a
* copy will or will not be performed are undocumented and subject to change.
*
* @throws NullPointerException if any key or value in {@code multimap} is
* null
*/
public static <K, V> ImmutableListMultimap<K, V> copyOf(
Multimap<? extends K, ? extends V> multimap) {
if (multimap.isEmpty()) {
return of();
}
// TODO(user): copy ImmutableSetMultimap by using asList() on the sets
if (multimap instanceof ImmutableListMultimap) {
@SuppressWarnings("unchecked") // safe since multimap is not writable
ImmutableListMultimap<K, V> kvMultimap
= (ImmutableListMultimap<K, V>) multimap;
if (!kvMultimap.isPartialView()) {
return kvMultimap;
}
}
ImmutableMap.Builder<K, ImmutableList<V>> builder = ImmutableMap.builder();
int size = 0;
for (Entry<? extends K, ? extends Collection<? extends V>> entry
: multimap.asMap().entrySet()) {
ImmutableList<V> list = ImmutableList.copyOf(entry.getValue());
if (!list.isEmpty()) {
builder.put(entry.getKey(), list);
size += list.size();
}
}
return new ImmutableListMultimap<K, V>(builder.build(), size);
}
ImmutableListMultimap(ImmutableMap<K, ImmutableList<V>> map, int size) {
super(map, size);
}
// views
/**
* Returns an immutable list of the values for the given key. If no mappings
* in the multimap have the provided key, an empty immutable list is
* returned. The values are in the same order as the parameters used to build
* this multimap.
*/
@Override public ImmutableList<V> get(@Nullable K key) {
// This cast is safe as its type is known in constructor.
ImmutableList<V> list = (ImmutableList<V>) map.get(key);
return (list == null) ? ImmutableList.<V>of() : list;
}
private transient ImmutableListMultimap<V, K> inverse;
/**
* {@inheritDoc}
*
* <p>Because an inverse of a list multimap can contain multiple pairs with
* the same key and value, this method returns an {@code
* ImmutableListMultimap} rather than the {@code ImmutableMultimap} specified
* in the {@code ImmutableMultimap} class.
*
* @since 11
*/
@Beta
@Override
public ImmutableListMultimap<V, K> inverse() {
ImmutableListMultimap<V, K> result = inverse;
return (result == null) ? (inverse = invert()) : result;
}
private ImmutableListMultimap<V, K> invert() {
Builder<V, K> builder = builder();
for (Entry<K, V> entry : entries()) {
builder.put(entry.getValue(), entry.getKey());
}
ImmutableListMultimap<V, K> invertedMultimap = builder.build();
invertedMultimap.inverse = this;
return invertedMultimap;
}
/**
* Guaranteed to throw an exception and leave the multimap unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@Deprecated @Override public ImmutableList<V> removeAll(Object key) {
throw new UnsupportedOperationException();
}
/**
* Guaranteed to throw an exception and leave the multimap unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@Deprecated @Override public ImmutableList<V> replaceValues(
K key, Iterable<? extends V> values) {
throw new UnsupportedOperationException();
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.openvr;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* <h3>Layout</h3>
*
* <pre><code>
* struct HmdMatrix34_t {
* float m[12];
* }</code></pre>
*/
@NativeType("struct HmdMatrix34_t")
public class HmdMatrix34 extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
/** The struct alignment in bytes. */
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
M;
static {
Layout layout = __struct(
__array(4, 12)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
M = layout.offsetof(0);
}
/**
* Creates a {@code HmdMatrix34} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public HmdMatrix34(ByteBuffer container) {
super(memAddress(container), __checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** @return a {@link FloatBuffer} view of the {@code m} field. */
@NativeType("float[12]")
public FloatBuffer m() { return nm(address()); }
/** @return the value at the specified index of the {@code m} field. */
public float m(int index) { return nm(address(), index); }
/** Copies the specified {@link FloatBuffer} to the {@code m} field. */
public HmdMatrix34 m(@NativeType("float[12]") FloatBuffer value) { nm(address(), value); return this; }
/** Sets the specified value at the specified index of the {@code m} field. */
public HmdMatrix34 m(int index, float value) { nm(address(), index, value); return this; }
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public HmdMatrix34 set(HmdMatrix34 src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code HmdMatrix34} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static HmdMatrix34 malloc() {
return wrap(HmdMatrix34.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code HmdMatrix34} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static HmdMatrix34 calloc() {
return wrap(HmdMatrix34.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code HmdMatrix34} instance allocated with {@link BufferUtils}. */
public static HmdMatrix34 create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(HmdMatrix34.class, memAddress(container), container);
}
/** Returns a new {@code HmdMatrix34} instance for the specified memory address. */
public static HmdMatrix34 create(long address) {
return wrap(HmdMatrix34.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static HmdMatrix34 createSafe(long address) {
return address == NULL ? null : wrap(HmdMatrix34.class, address);
}
/**
* Returns a new {@link HmdMatrix34.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static HmdMatrix34.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link HmdMatrix34.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static HmdMatrix34.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link HmdMatrix34.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static HmdMatrix34.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link HmdMatrix34.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static HmdMatrix34.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static HmdMatrix34.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
// -----------------------------------
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static HmdMatrix34 mallocStack() { return malloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static HmdMatrix34 callocStack() { return calloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static HmdMatrix34 mallocStack(MemoryStack stack) { return malloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static HmdMatrix34 callocStack(MemoryStack stack) { return calloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static HmdMatrix34.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static HmdMatrix34.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static HmdMatrix34.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static HmdMatrix34.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); }
/**
* Returns a new {@code HmdMatrix34} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static HmdMatrix34 malloc(MemoryStack stack) {
return wrap(HmdMatrix34.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code HmdMatrix34} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static HmdMatrix34 calloc(MemoryStack stack) {
return wrap(HmdMatrix34.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link HmdMatrix34.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static HmdMatrix34.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link HmdMatrix34.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static HmdMatrix34.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #m}. */
public static FloatBuffer nm(long struct) { return memFloatBuffer(struct + HmdMatrix34.M, 12); }
/** Unsafe version of {@link #m(int) m}. */
public static float nm(long struct, int index) {
return UNSAFE.getFloat(null, struct + HmdMatrix34.M + check(index, 12) * 4);
}
/** Unsafe version of {@link #m(FloatBuffer) m}. */
public static void nm(long struct, FloatBuffer value) {
if (CHECKS) { checkGT(value, 12); }
memCopy(memAddress(value), struct + HmdMatrix34.M, value.remaining() * 4);
}
/** Unsafe version of {@link #m(int, float) m}. */
public static void nm(long struct, int index, float value) {
UNSAFE.putFloat(null, struct + HmdMatrix34.M + check(index, 12) * 4, value);
}
// -----------------------------------
/** An array of {@link HmdMatrix34} structs. */
public static class Buffer extends StructBuffer<HmdMatrix34, Buffer> implements NativeResource {
private static final HmdMatrix34 ELEMENT_FACTORY = HmdMatrix34.create(-1L);
/**
* Creates a new {@code HmdMatrix34.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link HmdMatrix34#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected HmdMatrix34 getElementFactory() {
return ELEMENT_FACTORY;
}
/** @return a {@link FloatBuffer} view of the {@code m} field. */
@NativeType("float[12]")
public FloatBuffer m() { return HmdMatrix34.nm(address()); }
/** @return the value at the specified index of the {@code m} field. */
public float m(int index) { return HmdMatrix34.nm(address(), index); }
/** Copies the specified {@link FloatBuffer} to the {@code m} field. */
public HmdMatrix34.Buffer m(@NativeType("float[12]") FloatBuffer value) { HmdMatrix34.nm(address(), value); return this; }
/** Sets the specified value at the specified index of the {@code m} field. */
public HmdMatrix34.Buffer m(int index, float value) { HmdMatrix34.nm(address(), index, value); return this; }
}
}
| |
package by.kam32ar.server.helper;
import java.sql.*;
import java.util.*;
import java.util.Date;
public class NamedParameterStatement {
/**
* Native statement
*/
private PreparedStatement statement;
/**
* Index mapping
*/
private final Map<String, List<Integer>> indexMap;
/**
* Query string
*/
private final String parsedQuery;
/**
* Database connection
*/
private final AdvancedConnection connection;
private int autoGeneratedKeys;
private boolean failed;
/**
* Initialize statement
*/
public NamedParameterStatement(AdvancedConnection connection, String query) {
indexMap = new HashMap<String, List<Integer>>();
parsedQuery = parse(query, indexMap);
this.connection = connection;
}
/**
* Parse query
*/
static String parse(String query, Map<String, List<Integer>> paramMap) {
int length = query.length();
StringBuilder parsedQuery = new StringBuilder(length);
boolean inSingleQuote = false;
boolean inDoubleQuote = false;
int index = 1;
for(int i = 0; i < length; i++) {
char c = query.charAt(i);
// String end
if (inSingleQuote) {
if (c == '\'') inSingleQuote = false;
} else if (inDoubleQuote) {
if (c == '"') inDoubleQuote = false;
} else {
// String begin
if (c == '\'') {
inSingleQuote = true;
} else if (c == '"') {
inDoubleQuote = true;
} else if (c == ':' && i + 1 < length &&
Character.isJavaIdentifierStart(query.charAt(i + 1))) {
// Identifier name
int j = i + 2;
while (j < length && Character.isJavaIdentifierPart(query.charAt(j))) j++;
String name = query.substring(i + 1, j);
c = '?';
i += name.length();
// Add to list
List<Integer> indexList = paramMap.get(name);
if (indexList == null) {
indexList = new LinkedList<Integer>();
paramMap.put(name, indexList);
}
indexList.add(index);
index++;
}
}
parsedQuery.append(c);
}
return parsedQuery.toString();
}
public void reset(boolean resetConnection) throws SQLException {
if (statement != null) {
statement.close();
statement = null;
}
if (resetConnection) {
connection.reset();
}
statement = connection.getInstance().prepareStatement(parsedQuery, autoGeneratedKeys);
failed = false;
}
public void prepare(int autoGeneratedKeys) throws SQLException {
this.autoGeneratedKeys = autoGeneratedKeys;
try {
if (statement == null) {
reset(false);
} else if (failed || statement.getWarnings() != null) {
reset(true);
}
} catch (SQLException firstError) {
try {
reset(true);
} catch (SQLException secondError) {
// Log.warning(secondError);
failed = true;
throw secondError;
}
}
}
public void prepare() throws SQLException {
prepare(Statement.NO_GENERATED_KEYS);
}
/**
* Execute query with result
*/
public ResultSet executeQuery() throws SQLException {
try {
return statement.executeQuery();
} catch (SQLException error) {
failed = true;
throw error;
}
}
/**
* Executes query without result
*/
public int executeUpdate() throws SQLException {
try {
return statement.executeUpdate();
} catch (SQLException error) {
failed = true;
throw error;
}
}
/**
* Return generated keys
*/
public ResultSet getGeneratedKeys() throws SQLException {
return statement.getGeneratedKeys();
}
/**
* Immediately closes the statement
*/
public void close() throws SQLException {
statement.close();
}
public void setInt(String name, Integer value) throws SQLException {
List<Integer> indexList = indexMap.get(name);
if (indexList != null) for (Integer index: indexList) {
if (value != null) {
statement.setInt(index, value);
} else {
statement.setNull(index, Types.INTEGER);
}
}
}
public void setLong(String name, Long value) throws SQLException {
List<Integer> indexList = indexMap.get(name);
if (indexList != null) for (Integer index: indexList) {
if (value != null) {
statement.setLong(index, value);
} else {
statement.setNull(index, Types.INTEGER);
}
}
}
public void setBoolean(String name, Boolean value) throws SQLException {
List<Integer> indexList = indexMap.get(name);
if (indexList != null) for (Integer index: indexList) {
if (value != null) {
statement.setBoolean(index, value);
} else {
statement.setNull(index, Types.BOOLEAN);
}
}
}
public void setDouble(String name, Double value) throws SQLException {
List<Integer> indexList = indexMap.get(name);
if (indexList != null) for (Integer index: indexList) {
if (value != null) {
statement.setDouble(index, value);
} else {
statement.setNull(index, Types.DOUBLE);
}
}
}
public void setTimestamp(String name, Date value) throws SQLException {
List<Integer> indexList = indexMap.get(name);
if (indexList != null) for (Integer index: indexList) {
if (value != null) {
statement.setTimestamp(index, new Timestamp(value.getTime()));
} else {
statement.setNull(index, Types.TIMESTAMP);
}
}
}
public void setString(String name, String value) throws SQLException {
List<Integer> indexList = indexMap.get(name);
if (indexList != null) for (Integer index: indexList) {
if (value != null) {
statement.setString(index, value);
} else {
statement.setNull(index, Types.VARCHAR);
}
}
}
}
| |
package com.ota.updates.activities;
import in.uncod.android.bypass.Bypass;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.net.URLConnection;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Locale;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog.Builder;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.AsyncTask;
import android.os.Bundle;
import android.text.method.LinkMovementMethod;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toolbar;
import com.ota.updates.Addon;
import com.ota.updates.R;
import com.ota.updates.RomUpdate;
import com.ota.updates.download.DownloadAddon;
import com.ota.updates.tasks.AddonXmlParser;
import com.ota.updates.utils.Constants;
import com.ota.updates.utils.Preferences;
import com.ota.updates.utils.Utils;
public class AddonActivity extends Activity implements Constants {
public final static String TAG = "AddonActivity";
public static Context mContext;
private static ListView mListview;
private static DownloadAddon mDownloadAddon;
private static Builder mNetworkDialog;
@SuppressLint("NewApi") @Override
public void onCreate(Bundle savedInstanceState) {
mContext = this;
setTheme(Preferences.getTheme(mContext));
boolean isLollipop = Utils.isLollipop();
super.onCreate(savedInstanceState);
setContentView(R.layout.ota_addons);
if (isLollipop) {
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar_addons);
setActionBar(toolbar);
toolbar.setTitle(getResources().getString(R.string.app_name));
}
mListview = (ListView) findViewById(R.id.listview);
mDownloadAddon = new DownloadAddon();
String isRomhut = "";
String urlDomain = RomUpdate.getUrlDomain(mContext);
if(!urlDomain.equals("null")) {
isRomhut = urlDomain.contains("romhut.com") ? "?order_by=name&order_direction=asc" : "";
}
new LoadAddonManifest(mContext).execute(RomUpdate.getAddonsUrl(mContext) + isRomhut);
}
public void setupListView(ArrayList<Addon> addonsList) {
final AddonsArrayAdapter adapter = new AddonsArrayAdapter(mContext, addonsList);
if(mListview != null) {
mListview.setAdapter(adapter);
}
}
private class LoadAddonManifest extends AsyncTask<Object, Void, ArrayList<Addon>> {
public final String TAG = this.getClass().getSimpleName();
private static final String MANIFEST = "addon_manifest.xml";
private ProgressDialog mLoadingDialog;
private Context mContext;
public LoadAddonManifest(Context context) {
mContext = context;
}
@Override
protected void onPreExecute(){
// Show a loading/progress dialog while the search is being performed
mLoadingDialog = new ProgressDialog(mContext);
mLoadingDialog.setIndeterminate(true);
mLoadingDialog.setCancelable(false);
mLoadingDialog.setMessage(mContext.getResources().getString(R.string.loading));
mLoadingDialog.show();
// Delete any existing manifest file before we attempt to download a new one
File manifest = new File(mContext.getFilesDir().getPath(), MANIFEST);
if(manifest.exists()) {
manifest.delete();
}
}
@Override
protected ArrayList<Addon> doInBackground(Object... param) {
try {
InputStream input = null;
URL url = new URL((String) param[0]);
URLConnection connection = url.openConnection();
connection.connect();
// download the file
input = new BufferedInputStream(url.openStream());
OutputStream output = mContext.openFileOutput(
MANIFEST, Context.MODE_PRIVATE);
byte data[] = new byte[1024];
int count;
while ((count = input.read(data)) != -1) {
output.write(data, 0, count);
}
output.flush();
output.close();
input.close();
// file finished downloading, parse it!
return AddonXmlParser.parse(new File(mContext.getFilesDir(), MANIFEST));
} catch (Exception e) {
Log.d(TAG, "Exception: " + e.getMessage());
}
return null;
}
@Override
protected void onPostExecute(ArrayList<Addon> result) {
mLoadingDialog.cancel();
if(result != null) {
setupListView(result);
}
super.onPostExecute(result);
}
}
public static class AddonsArrayAdapter extends ArrayAdapter<Addon> {
public AddonsArrayAdapter(Context context, ArrayList<Addon> users) {
super(context, 0, users);
}
public static void updateProgress(int index, int progress, boolean finished) {
View v = mListview.getChildAt(index -
mListview.getFirstVisiblePosition());
if(v == null) {
return;
}
ProgressBar progressBar = (ProgressBar) v.findViewById(R.id.progress_bar);
if (finished) {
progressBar.setProgress(0);
} else {
progressBar.setProgress(progress);
if(DEBUGGING) {
Log.d(TAG, "Setting Progress as " + progress);
}
}
}
public static void updateButtons(int index, boolean finished) {
View v = mListview.getChildAt((index - 1) -
mListview.getFirstVisiblePosition());
if(v == null) {
return;
}
final Button download = (Button) v.findViewById(R.id.download_button);
final Button cancel = (Button) v.findViewById(R.id.cancel_button);
final Button delete = (Button) v.findViewById(R.id.delete_button);
if (finished) {
download.setVisibility(View.VISIBLE);
download.setText(mContext.getResources().getString(R.string.finished));
download.setClickable(false);
delete.setVisibility(View.VISIBLE);
cancel.setVisibility(View.GONE);
} else {
download.setVisibility(View.VISIBLE);
download.setText(mContext.getResources().getString(R.string.download));
download.setClickable(true);
cancel.setVisibility(View.GONE);
delete.setVisibility(View.GONE);
}
}
private void showNetworkDialog() {
mNetworkDialog = new Builder(mContext);
mNetworkDialog.setTitle(R.string.available_wrong_network_title)
.setMessage(R.string.available_wrong_network_message)
.setPositiveButton(R.string.ok, null)
.setNeutralButton(R.string.settings, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Intent intent = new Intent(mContext, SettingsActivity.class);
mContext.startActivity(intent);
}
});
mNetworkDialog.show();
}
private void deleteConfirm(final File file, final Addon item) {
Builder deleteConfirm = new Builder(mContext);
deleteConfirm.setTitle(R.string.delete);
deleteConfirm.setMessage(mContext.getResources().getString(R.string.delete_confirm) + "\n\n" + file.getName());
deleteConfirm.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (file.exists()) {
file.delete();
updateButtons(item.getId(), false);
}
}
});
deleteConfirm.setNegativeButton(R.string.cancel, null);
deleteConfirm.show();
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final Addon item = getItem(position);
final int index = position;
if (convertView == null) {
convertView = LayoutInflater.from(getContext()).inflate(R.layout.card_addons_list_item, parent, false);
}
TextView title = (TextView) convertView.findViewById(R.id.title);
TextView desc = (TextView) convertView.findViewById(R.id.description);
TextView updatedOn = (TextView) convertView.findViewById(R.id.updatedOn);
TextView filesize = (TextView) convertView.findViewById(R.id.size);
final Button download = (Button) convertView.findViewById(R.id.download_button);
final Button cancel = (Button) convertView.findViewById(R.id.cancel_button);
final Button delete = (Button) convertView.findViewById(R.id.delete_button);
title.setText(item.getTitle());
Bypass byPass = new Bypass(mContext);
String descriptionStr = item.getDesc();
CharSequence string = byPass.markdownToSpannable(descriptionStr);
desc.setText(string);
desc.setMovementMethod(LinkMovementMethod.getInstance());
String UpdatedOnStr = convertView.getResources().getString(R.string.addons_updated_on);
String date = item.getPublishedAt();
Locale locale = Locale.getDefault();
DateFormat fromDate = new SimpleDateFormat("yyyy-MM-dd", locale);
DateFormat toDate = new SimpleDateFormat("dd, MMMM yyyy", locale);
try {
date = toDate.format(fromDate.parse(date));
} catch (ParseException e) {
e.printStackTrace();
}
updatedOn.setText(UpdatedOnStr + " " + date);
filesize.setText(Utils.formatDataFromBytes(item.getFilesize()));
final File file = new File(SD_CARD
+ File.separator
+ OTA_DOWNLOAD_DIR, item.getTitle() + ".zip");
if (DEBUGGING) {
Log.d(TAG, "file path " + file.getAbsolutePath());
Log.d(TAG, "file length " + file.length() + " remoteLength " + item.getFilesize());
}
boolean finished = file.length() == item.getFilesize();
if (finished) {
download.setVisibility(View.VISIBLE);
download.setText(mContext.getResources().getString(R.string.finished));
download.setClickable(false);
delete.setVisibility(View.VISIBLE);
cancel.setVisibility(View.GONE);
} else {
download.setVisibility(View.VISIBLE);
download.setText(mContext.getResources().getString(R.string.download));
download.setClickable(true);
cancel.setVisibility(View.GONE);
delete.setVisibility(View.GONE);
}
download.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
boolean isMobile = Utils.isMobileNetwork(mContext);
boolean isSettingWiFiOnly = Preferences.getNetworkType(mContext).equals(WIFI_ONLY);
if (isMobile && isSettingWiFiOnly) {
showNetworkDialog();
} else {
mDownloadAddon.startDownload(mContext, item.getDownloadLink(), item.getTitle(), item.getId(), index);
download.setVisibility(View.GONE);
cancel.setVisibility(View.VISIBLE);
}
}
});
cancel.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mDownloadAddon.cancelDownload(mContext, index);
download.setVisibility(View.VISIBLE);
cancel.setVisibility(View.GONE);
updateProgress(index, 0, true);
}
});
delete.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
deleteConfirm(file, item);
}
});
return convertView;
}
}
}
| |
/**
*
* Copyright 2011 (C) Rainer Schneider,Roggenburg <schnurlei@googlemail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package de.jdynameta.persistence.manager.impl;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import de.jdynameta.base.metainfo.AssociationInfo;
import de.jdynameta.base.model.PersistentObjectReader;
import de.jdynameta.base.objectlist.ObjectList;
import de.jdynameta.base.objectlist.ObjectListModel;
import de.jdynameta.base.objectlist.ObjectListModelEvent;
import de.jdynameta.base.objectlist.ObjectListModelListener;
import de.jdynameta.base.objectlist.ProxyResolveException;
import de.jdynameta.base.value.ValueObject;
import de.jdynameta.persistence.manager.PersistentObjectManager;
import de.jdynameta.persistence.objectlist.ObjectFilter;
/**
* @author Rainer
* @param <TPersistentObject>
*
*/
@SuppressWarnings("serial")
public class AssociationInfoListModel<TPersistentObject> implements ObjectListModel<TPersistentObject>
{
private final PersistentObjectReader.PersistentListener<TPersistentObject> persistentListener;
private final List<TPersistentObject> allObjects;
protected final ArrayList<ObjectListModelListener<TPersistentObject>> listenerList;
private final ObjectList<TPersistentObject> wrappedObjectList;
private final ObjectFilter<TPersistentObject> filter;
/**
*
* @param aPersistentReader
* @param aAssocInfo
* @param aWrappedList
* @param aParent
*/
public AssociationInfoListModel(PersistentObjectReader<TPersistentObject> aPersistentReader, final AssociationInfo aAssocInfo, ObjectList<TPersistentObject> aWrappedList, final Object aParent) throws ProxyResolveException
{
super();
this.listenerList = new ArrayList<>();
this.allObjects = new ArrayList<>(aWrappedList.size());
wrappedObjectList = aWrappedList;
this.filter = new ObjectFilter<TPersistentObject>()
{
@Override
public boolean accept(Object object)
{
return aParent.equals(((ValueObject) object).getValue(aAssocInfo.getMasterClassReference()));
}
};
this.persistentListener = (PersistentObjectManager.PersistentEvent<TPersistentObject> aEvent) ->
{
switch (aEvent.getState())
{
case OBJECT_CREATED:
if (filter.accept(aEvent.getChangedObject()))
{
allObjects.add(aEvent.getChangedObject());
fireIntervalAdded(AssociationInfoListModel.this, allObjects.size(), allObjects.size());
}
break;
case OBJECT_DELETED:
int index = allObjects.indexOf(aEvent.getChangedObject());
if (index >= 0)
{
allObjects.remove(aEvent.getChangedObject());
fireIntervalRemoved(AssociationInfoListModel.this, index, index);
}
break;
case OBJECT_MODIFIED:
int modIdx = allObjects.indexOf(aEvent.getChangedObject());
if (modIdx >= 0)
{
fireIntervalUpdated(AssociationInfoListModel.this, modIdx, modIdx);
}
break;
}
aEvent.getState();
};
aPersistentReader.addListener(aAssocInfo.getDetailClass(), persistentListener);
}
/**
* Retrieves the filter.
*
* @return The filter currently applied to the model.
*/
public ObjectFilter<TPersistentObject> getFilter()
{
return filter;
}
/**
* Refreshes the collection.
*/
protected void refresh() throws ProxyResolveException
{
this.allObjects.clear();
this.fireIntervalRemoved(this, 0, this.allObjects.size() - 1);
if (this.wrappedObjectList != null)
{
for (Iterator<TPersistentObject> iterator = this.wrappedObjectList.iterator(); iterator.hasNext();)
{
TPersistentObject object = iterator.next();
if (this.filter == null || this.filter.accept(object))
{
this.addToAllObjects(object);
}
}
}
this.fireIntervalAdded(this, 0, this.allObjects.size() - 1);
}
/**
* Sychronize access to allObjects
*
* @param anObject
*/
private synchronized void addToAllObjects(TPersistentObject anObject)
{
this.allObjects.add(anObject);
}
/* (non-Javadoc)
* @see de.comafra.model.objectlist.ObjectList#get(int)
*/
public TPersistentObject get(int index)
{
return this.allObjects.get(index);
}
/* (non-Javadoc)
* @see de.comafra.model.objectlist.ObjectList#iterator()
*/
public Iterator<TPersistentObject> iterator()
{
return this.allObjects.iterator();
}
/* (non-Javadoc)
* @see de.comafra.model.objectlist.ObjectList#size()
*/
public int size()
{
return this.allObjects.size();
}
/* (non-Javadoc)
* @see de.comafra.model.objectlist.ObjectList#indexOf(java.lang.Object)
*/
public int indexOf(Object anObject)
{
return this.allObjects.indexOf(anObject);
}
/* (non-Javadoc)
* @see de.comafra.model.objectlist.ObjectListModel#addObjectListModelListener(de.comafra.model.objectlist.ObjectListModelListener)
*/
public void addObjectListModelListener(ObjectListModelListener<TPersistentObject> aListener)
{
listenerList.add(aListener);
}
/* (non-Javadoc)
* @see de.comafra.model.objectlist.ObjectListModel#removeObjectListModelListener(de.comafra.model.objectlist.ObjectListModelListener)
*/
public void removeObjectListModelListener(ObjectListModelListener<TPersistentObject> aListener)
{
listenerList.remove(aListener);
}
protected final void fireIntervalAdded(ObjectListModel<TPersistentObject> source, int index0, int index1)
{
ObjectListModelEvent<TPersistentObject> event = new ObjectListModelEvent<TPersistentObject>(source, index0, index1);
for (ObjectListModelListener<TPersistentObject> curListener : listenerList)
{
curListener.intervalAdded(event);
}
}
protected final void fireIntervalRemoved(ObjectListModel<TPersistentObject> source, int index0, int index1)
{
ObjectListModelEvent<TPersistentObject> event = new ObjectListModelEvent<TPersistentObject>(source, index0, index1);
for (ObjectListModelListener<TPersistentObject> curListener : listenerList)
{
curListener.intervalRemoved(event);
}
}
protected final void fireIntervalUpdated(ObjectListModel<TPersistentObject> source, int index0, int index1)
{
ObjectListModelEvent<TPersistentObject> event = new ObjectListModelEvent<TPersistentObject>(source, index0, index1);
for (ObjectListModelListener<TPersistentObject> curListener : listenerList)
{
curListener.intervalUpdated(event);
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataproc/v1/autoscaling_policies.proto
package com.google.cloud.dataproc.v1;
/**
*
*
* <pre>
* A request to list autoscaling policies in a project.
* </pre>
*
* Protobuf type {@code google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest}
*/
public final class ListAutoscalingPoliciesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest)
ListAutoscalingPoliciesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAutoscalingPoliciesRequest.newBuilder() to construct.
private ListAutoscalingPoliciesRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAutoscalingPoliciesRequest() {
parent_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAutoscalingPoliciesRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ListAutoscalingPoliciesRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
parent_ = s;
break;
}
case 16:
{
pageSize_ = input.readInt32();
break;
}
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
pageToken_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataproc.v1.AutoscalingPoliciesProto
.internal_static_google_cloud_dataproc_v1_ListAutoscalingPoliciesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataproc.v1.AutoscalingPoliciesProto
.internal_static_google_cloud_dataproc_v1_ListAutoscalingPoliciesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest.class,
com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
private volatile java.lang.Object parent_;
/**
*
*
* <pre>
* Required. The "resource name" of the region or location, as described
* in https://cloud.google.com/apis/design/resource_names.
* * For `projects.regions.autoscalingPolicies.list`, the resource name
* of the region has the following format:
* `projects/{project_id}/regions/{region}`
* * For `projects.locations.autoscalingPolicies.list`, the resource name
* of the location has the following format:
* `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The "resource name" of the region or location, as described
* in https://cloud.google.com/apis/design/resource_names.
* * For `projects.regions.autoscalingPolicies.list`, the resource name
* of the region has the following format:
* `projects/{project_id}/regions/{region}`
* * For `projects.locations.autoscalingPolicies.list`, the resource name
* of the location has the following format:
* `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of results to return in each response.
* Must be less than or equal to 1000. Defaults to 100.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
private volatile java.lang.Object pageToken_;
/**
*
*
* <pre>
* Optional. The page token, returned by a previous call, to request the
* next page of results.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The page token, returned by a previous call, to request the
* next page of results.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest)) {
return super.equals(obj);
}
com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest other =
(com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request to list autoscaling policies in a project.
* </pre>
*
* Protobuf type {@code google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest)
com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataproc.v1.AutoscalingPoliciesProto
.internal_static_google_cloud_dataproc_v1_ListAutoscalingPoliciesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataproc.v1.AutoscalingPoliciesProto
.internal_static_google_cloud_dataproc_v1_ListAutoscalingPoliciesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest.class,
com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest.Builder.class);
}
// Construct using com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataproc.v1.AutoscalingPoliciesProto
.internal_static_google_cloud_dataproc_v1_ListAutoscalingPoliciesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest getDefaultInstanceForType() {
return com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest build() {
com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest buildPartial() {
com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest result =
new com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest(this);
result.parent_ = parent_;
result.pageSize_ = pageSize_;
result.pageToken_ = pageToken_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest) {
return mergeFrom((com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest other) {
if (other == com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The "resource name" of the region or location, as described
* in https://cloud.google.com/apis/design/resource_names.
* * For `projects.regions.autoscalingPolicies.list`, the resource name
* of the region has the following format:
* `projects/{project_id}/regions/{region}`
* * For `projects.locations.autoscalingPolicies.list`, the resource name
* of the location has the following format:
* `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The "resource name" of the region or location, as described
* in https://cloud.google.com/apis/design/resource_names.
* * For `projects.regions.autoscalingPolicies.list`, the resource name
* of the region has the following format:
* `projects/{project_id}/regions/{region}`
* * For `projects.locations.autoscalingPolicies.list`, the resource name
* of the location has the following format:
* `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The "resource name" of the region or location, as described
* in https://cloud.google.com/apis/design/resource_names.
* * For `projects.regions.autoscalingPolicies.list`, the resource name
* of the region has the following format:
* `projects/{project_id}/regions/{region}`
* * For `projects.locations.autoscalingPolicies.list`, the resource name
* of the location has the following format:
* `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The "resource name" of the region or location, as described
* in https://cloud.google.com/apis/design/resource_names.
* * For `projects.regions.autoscalingPolicies.list`, the resource name
* of the region has the following format:
* `projects/{project_id}/regions/{region}`
* * For `projects.locations.autoscalingPolicies.list`, the resource name
* of the location has the following format:
* `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The "resource name" of the region or location, as described
* in https://cloud.google.com/apis/design/resource_names.
* * For `projects.regions.autoscalingPolicies.list`, the resource name
* of the region has the following format:
* `projects/{project_id}/regions/{region}`
* * For `projects.locations.autoscalingPolicies.list`, the resource name
* of the location has the following format:
* `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of results to return in each response.
* Must be less than or equal to 1000. Defaults to 100.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of results to return in each response.
* Must be less than or equal to 1000. Defaults to 100.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of results to return in each response.
* Must be less than or equal to 1000. Defaults to 100.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The page token, returned by a previous call, to request the
* next page of results.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The page token, returned by a previous call, to request the
* next page of results.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The page token, returned by a previous call, to request the
* next page of results.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The page token, returned by a previous call, to request the
* next page of results.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The page token, returned by a previous call, to request the
* next page of results.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest)
private static final com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest();
}
public static com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAutoscalingPoliciesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListAutoscalingPoliciesRequest>() {
@java.lang.Override
public ListAutoscalingPoliciesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ListAutoscalingPoliciesRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ListAutoscalingPoliciesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAutoscalingPoliciesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* $Id: FreemarkerResult.java 832148 2009-11-02 22:23:28Z musachy $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.views.freemarker;
import java.io.CharArrayWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.Locale;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts2.ServletActionContext;
import org.apache.struts2.dispatcher.StrutsResultSupport;
import org.apache.struts2.views.util.ResourceUtil;
import com.opensymphony.xwork2.ActionContext;
import com.opensymphony.xwork2.ActionInvocation;
import com.opensymphony.xwork2.LocaleProvider;
import com.opensymphony.xwork2.inject.Inject;
import com.opensymphony.xwork2.util.ValueStack;
import freemarker.template.Configuration;
import freemarker.template.ObjectWrapper;
import freemarker.template.Template;
import freemarker.template.TemplateException;
import freemarker.template.TemplateExceptionHandler;
import freemarker.template.TemplateModel;
import freemarker.template.TemplateModelException;
/**
* <!-- START SNIPPET: description -->
*
* Renders a view using the Freemarker template engine.
* <p>
* The FreemarkarManager class configures the template loaders so that the
* template location can be either
* </p>
*
* <ul>
*
* <li>relative to the web root folder. eg <code>/WEB-INF/views/home.ftl</code>
* </li>
*
* <li>a classpath resuorce. eg <code>/com/company/web/views/home.ftl</code></li>
*
* </ul>
*
* <!-- END SNIPPET: description -->
*
* <b>This result type takes the following parameters:</b>
*
* <!-- START SNIPPET: params -->
*
* <ul>
*
* <li><b>location (default)</b> - the location of the template to process.</li>
*
* <li><b>parse</b> - true by default. If set to false, the location param will
* not be parsed for Ognl expressions.</li>
*
* <li><b>contentType</b> - defaults to "text/html" unless specified.</li>
*
* <li><b>writeIfCompleted</b> - false by default, write to stream only if there isn't any error
* processing the template. Setting template_exception_handler=rethrow in freemarker.properties
* will have the same effect.</li>
*
* </ul>
*
* <!-- END SNIPPET: params -->
*
* <b>Example:</b>
*
* <pre>
* <!-- START SNIPPET: example -->
*
* <result name="success" type="freemarker">foo.ftl</result>
*
* <!-- END SNIPPET: example -->
* </pre>
*/
public class FreemarkerResult extends StrutsResultSupport {
private static final long serialVersionUID = -3778230771704661631L;
protected ActionInvocation invocation;
protected Configuration configuration;
protected ObjectWrapper wrapper;
protected FreemarkerManager freemarkerManager;
private Writer writer;
private boolean writeIfCompleted = false;
/*
* Struts results are constructed for each result execution
*
* the current context is availible to subclasses via these protected fields
*/
protected String location;
private String pContentType = "text/html";
private static final String PARENT_TEMPLATE_WRITER = FreemarkerResult.class.getName() + ".parentWriter";
public FreemarkerResult() {
super();
}
public FreemarkerResult(String location) {
super(location);
}
@Inject
public void setFreemarkerManager(FreemarkerManager mgr) {
this.freemarkerManager = mgr;
}
public void setContentType(String aContentType) {
pContentType = aContentType;
}
/**
* allow parameterization of the contentType
* the default being text/html
*/
public String getContentType() {
return pContentType;
}
/**
* Execute this result, using the specified template locationArg.
* <p/>
* The template locationArg has already been interoplated for any variable substitutions
* <p/>
* this method obtains the freemarker configuration and the object wrapper from the provided hooks.
* It them implements the template processing workflow by calling the hooks for
* preTemplateProcess and postTemplateProcess
*/
public void doExecute(String locationArg, ActionInvocation invocation) throws IOException, TemplateException {
this.location = locationArg;
this.invocation = invocation;
this.configuration = getConfiguration();
this.wrapper = getObjectWrapper();
ActionContext ctx = invocation.getInvocationContext();
HttpServletRequest req = (HttpServletRequest) ctx.get(ServletActionContext.HTTP_REQUEST);
if (!locationArg.startsWith("/")) {
String base = ResourceUtil.getResourceBase(req);
locationArg = base + "/" + locationArg;
}
Template template = configuration.getTemplate(locationArg, deduceLocale());
TemplateModel model = createModel();
// Give subclasses a chance to hook into preprocessing
if (preTemplateProcess(template, model)) {
try {
// Process the template
Writer writer = getWriter();
if (isWriteIfCompleted() || configuration.getTemplateExceptionHandler() == TemplateExceptionHandler.RETHROW_HANDLER) {
CharArrayWriter parentCharArrayWriter = (CharArrayWriter) req.getAttribute(PARENT_TEMPLATE_WRITER);
boolean isTopTemplate = false;
if (isTopTemplate = (parentCharArrayWriter == null)) {
//this is the top template
parentCharArrayWriter = new CharArrayWriter();
//set it in the request because when the "action" tag is used a new VS and ActionContext is created
req.setAttribute(PARENT_TEMPLATE_WRITER, parentCharArrayWriter);
}
try {
template.process(model, parentCharArrayWriter);
if (isTopTemplate) {
parentCharArrayWriter.flush();
parentCharArrayWriter.writeTo(writer);
}
} finally {
if (isTopTemplate && parentCharArrayWriter != null) {
req.removeAttribute(PARENT_TEMPLATE_WRITER);
parentCharArrayWriter.close();
}
}
} else {
template.process(model, writer);
}
} finally {
// Give subclasses a chance to hook into postprocessing
postTemplateProcess(template, model);
}
}
}
/**
* This method is called from {@link #doExecute(String, ActionInvocation)} to obtain the
* FreeMarker configuration object that this result will use for template loading. This is a
* hook that allows you to custom-configure the configuration object in a subclass, or to fetch
* it from an IoC container.
* <p/>
* <b>
* The default implementation obtains the configuration from the ConfigurationManager instance.
* </b>
*/
protected Configuration getConfiguration() throws TemplateException {
return freemarkerManager.getConfiguration(ServletActionContext.getServletContext());
}
/**
* This method is called from {@link #doExecute(String, ActionInvocation)} to obtain the
* FreeMarker object wrapper object that this result will use for adapting objects into template
* models. This is a hook that allows you to custom-configure the wrapper object in a subclass.
* <p/>
* <b>
* The default implementation returns {@link Configuration#getObjectWrapper()}
* </b>
*/
protected ObjectWrapper getObjectWrapper() {
return configuration.getObjectWrapper();
}
public void setWriter(Writer writer) {
this.writer = writer;
}
/**
* The default writer writes directly to the response writer.
*/
protected Writer getWriter() throws IOException {
if(writer != null) {
return writer;
}
return ServletActionContext.getResponse().getWriter();
}
/**
* Build the instance of the ScopesHashModel, including JspTagLib support
* <p/>
* Objects added to the model are
* <p/>
* <ul>
* <li>Application - servlet context attributes hash model
* <li>JspTaglibs - jsp tag lib factory model
* <li>Request - request attributes hash model
* <li>Session - session attributes hash model
* <li>request - the HttpServletRequst object for direct access
* <li>response - the HttpServletResponse object for direct access
* <li>stack - the OgnLValueStack instance for direct access
* <li>ognl - the instance of the OgnlTool
* <li>action - the action itself
* <li>exception - optional : the JSP or Servlet exception as per the servlet spec (for JSP Exception pages)
* <li>struts - instance of the StrutsUtil class
* </ul>
*/
protected TemplateModel createModel() throws TemplateModelException {
ServletContext servletContext = ServletActionContext.getServletContext();
HttpServletRequest request = ServletActionContext.getRequest();
HttpServletResponse response = ServletActionContext.getResponse();
ValueStack stack = ServletActionContext.getContext().getValueStack();
Object action = null;
if(invocation!= null ) action = invocation.getAction(); //Added for NullPointException
return freemarkerManager.buildTemplateModel(stack, action, servletContext, request, response, wrapper);
}
/**
* Returns the locale used for the {@link Configuration#getTemplate(String, Locale)} call. The base implementation
* simply returns the locale setting of the action (assuming the action implements {@link LocaleProvider}) or, if
* the action does not the configuration's locale is returned. Override this method to provide different behaviour,
*/
protected Locale deduceLocale() {
if (invocation.getAction() instanceof LocaleProvider) {
return ((LocaleProvider) invocation.getAction()).getLocale();
} else {
return configuration.getLocale();
}
}
/**
* the default implementation of postTemplateProcess applies the contentType parameter
*/
protected void postTemplateProcess(Template template, TemplateModel data) throws IOException {
}
/**
* Called before the execution is passed to template.process().
* This is a generic hook you might use in subclasses to perform a specific
* action before the template is processed. By default does nothing.
* A typical action to perform here is to inject application-specific
* objects into the model root
*
* @return true to process the template, false to suppress template processing.
*/
protected boolean preTemplateProcess(Template template, TemplateModel model) throws IOException {
Object attrContentType = template.getCustomAttribute("content_type");
HttpServletResponse response = ServletActionContext.getResponse();
if (response.getContentType() == null) {
if (attrContentType != null) {
response.setContentType(attrContentType.toString());
} else {
String contentType = getContentType();
if (contentType == null) {
contentType = "text/html";
}
String encoding = template.getEncoding();
if (encoding != null) {
contentType = contentType + "; charset=" + encoding;
}
response.setContentType(contentType);
}
}
return true;
}
/**
* @return true write to the stream only when template processing completed successfully (false by default)
*/
public boolean isWriteIfCompleted() {
return writeIfCompleted;
}
/**
* Writes to the stream only when template processing completed successfully
*/
public void setWriteIfCompleted(boolean writeIfCompleted) {
this.writeIfCompleted = writeIfCompleted;
}
}
| |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.interestrate;
import static org.testng.AssertJUnit.assertEquals;
import java.util.ArrayList;
import java.util.List;
import org.testng.annotations.Test;
import org.threeten.bp.Period;
import com.opengamma.analytics.financial.instrument.index.IborIndex;
import com.opengamma.analytics.financial.interestrate.annuity.derivative.Annuity;
import com.opengamma.analytics.financial.interestrate.annuity.derivative.AnnuityCouponFixed;
import com.opengamma.analytics.financial.interestrate.annuity.derivative.AnnuityPaymentFixed;
import com.opengamma.analytics.financial.interestrate.bond.definition.BondFixedSecurity;
import com.opengamma.analytics.financial.interestrate.cash.derivative.Cash;
import com.opengamma.analytics.financial.interestrate.fra.derivative.ForwardRateAgreement;
import com.opengamma.analytics.financial.interestrate.payments.derivative.CouponFixed;
import com.opengamma.analytics.financial.interestrate.payments.derivative.CouponIborSpread;
import com.opengamma.analytics.financial.interestrate.payments.derivative.Payment;
import com.opengamma.analytics.financial.interestrate.payments.derivative.PaymentFixed;
import com.opengamma.analytics.financial.model.interestrate.curve.YieldAndDiscountCurve;
import com.opengamma.analytics.financial.model.interestrate.curve.YieldCurve;
import com.opengamma.analytics.math.curve.ConstantDoublesCurve;
import com.opengamma.financial.convention.businessday.BusinessDayConvention;
import com.opengamma.financial.convention.businessday.BusinessDayConventions;
import com.opengamma.financial.convention.daycount.DayCount;
import com.opengamma.financial.convention.daycount.DayCounts;
import com.opengamma.financial.convention.yield.SimpleYieldConvention;
import com.opengamma.util.money.Currency;
import com.opengamma.util.test.TestGroup;
/**
* @deprecated This class tests deprecated functionality.
*/
@Deprecated
@Test(groups = TestGroup.UNIT)
public class PresentValueCalculatorTest {
private static final PresentValueCalculator PVC = PresentValueCalculator.getInstance();
private static final String FIVE_PC_CURVE_NAME = "5%";
private static final String FOUR_PC_CURVE_NAME = "4%";
private static final String ZERO_PC_CURVE_NAME = "0%";
private static final YieldCurveBundle CURVES;
private static final Currency CUR = Currency.EUR;
private static final Period TENOR = Period.ofMonths(6);
private static final int SETTLEMENT_DAYS = 2;
private static final DayCount DAY_COUNT_INDEX = DayCounts.ACT_360;
private static final BusinessDayConvention BUSINESS_DAY = BusinessDayConventions.MODIFIED_FOLLOWING;
private static final boolean IS_EOM = true;
private static final IborIndex INDEX = new IborIndex(CUR, TENOR, SETTLEMENT_DAYS, DAY_COUNT_INDEX, BUSINESS_DAY, IS_EOM, "Ibor");
static {
YieldAndDiscountCurve curve = YieldCurve.from(ConstantDoublesCurve.from(0.05));
CURVES = new YieldCurveBundle();
CURVES.setCurve(FIVE_PC_CURVE_NAME, curve);
curve = YieldCurve.from(ConstantDoublesCurve.from(0.04));
CURVES.setCurve(FOUR_PC_CURVE_NAME, curve);
curve = YieldCurve.from(ConstantDoublesCurve.from(0.0));
CURVES.setCurve(ZERO_PC_CURVE_NAME, curve);
}
@Test
public void testCash() {
final double t = 7 / 365.0;
final YieldAndDiscountCurve curve = CURVES.getCurve(FIVE_PC_CURVE_NAME);
double r = 1 / t * (1 / curve.getDiscountFactor(t) - 1);
Cash cash = new Cash(CUR, 0, t, 1, r, t, FIVE_PC_CURVE_NAME);
double pv = cash.accept(PVC, CURVES);
assertEquals(0.0, pv, 1e-12);
final double tradeTime = 2.0 / 365.0;
final double yearFrac = 5.0 / 360.0;
r = 1 / yearFrac * (curve.getDiscountFactor(tradeTime) / curve.getDiscountFactor(t) - 1);
cash = new Cash(CUR, tradeTime, t, 1, r, yearFrac, FIVE_PC_CURVE_NAME);
pv = cash.accept(PVC, CURVES);
assertEquals(0.0, pv, 1e-12);
}
@Test
public void testFRA() {
final double paymentTime = 0.5;
final double fixingPeriodEnd = 7. / 12.;
String fundingCurveName = ZERO_PC_CURVE_NAME;
final String forwardCurveName = FIVE_PC_CURVE_NAME;
double paymentYearFraction = fixingPeriodEnd - paymentTime;
final double notional = 1;
final IborIndex index = new IborIndex(CUR, Period.ofMonths(1), 2, DayCounts.ACT_365,
BusinessDayConventions.FOLLOWING, true);
double fixingTime = paymentTime;
final double fixingPeriodStart = paymentTime;
double fixingYearFraction = paymentYearFraction;
final YieldAndDiscountCurve forwardCurve = CURVES.getCurve(FIVE_PC_CURVE_NAME);
final double rate = (forwardCurve.getDiscountFactor(paymentTime) / forwardCurve.getDiscountFactor(fixingPeriodEnd) - 1.0) * 12.0;
ForwardRateAgreement fra = new ForwardRateAgreement(CUR, paymentTime, fundingCurveName, paymentYearFraction, notional, index, fixingTime, fixingPeriodStart, fixingPeriodEnd, fixingYearFraction,
rate, forwardCurveName);
double pv = fra.accept(PVC, CURVES);
assertEquals(0.0, pv, 1e-12);
fixingTime = paymentTime - 2. / 365.;
fixingYearFraction = 31. / 365;
paymentYearFraction = 30. / 360;
fundingCurveName = FIVE_PC_CURVE_NAME;
final double forwardRate = (forwardCurve.getDiscountFactor(fixingPeriodStart) / forwardCurve.getDiscountFactor(fixingPeriodEnd) - 1) / fixingYearFraction;
final double fv = (forwardRate - rate) * paymentYearFraction / (1 + forwardRate * paymentYearFraction);
final double pv2 = fv * forwardCurve.getDiscountFactor(paymentTime);
fra = new ForwardRateAgreement(CUR, paymentTime, fundingCurveName, paymentYearFraction, notional, index, fixingTime, fixingPeriodStart, fixingPeriodEnd, fixingYearFraction, rate, forwardCurveName);
pv = fra.accept(PVC, CURVES);
assertEquals(pv, pv2, 1e-12);
}
// @Test
// public void testFutures() {
// final IborIndex iborIndex = new IborIndex(CUR, Period.ofMonths(3), 2, new MondayToFridayCalendar("A"), DayCounts.ACT_365,
// BusinessDayConventions.FOLLOWING, true);
// final double lastTradingTime = 1.473;
// final double fixingPeriodStartTime = 1.467;
// final double fixingPeriodEndTime = 1.75;
// final double fixingPeriodAccrualFactor = 0.267;
// final double paymentAccrualFactor = 0.25;
// final double referencePrice = 0.0; // TODO CASE - Future refactor - referencePrice = 0.0
// final YieldAndDiscountCurve curve = CURVES.getCurve(FIVE_PC_CURVE_NAME);
// final double rate = (curve.getDiscountFactor(fixingPeriodStartTime) / curve.getDiscountFactor(fixingPeriodEndTime) - 1.0) / fixingPeriodAccrualFactor;
// final double price = 1 - rate;
// final double notional = 1;
// final int quantity = 123;
// InterestRateFutureTransaction ir = new InterestRateFutureTransaction(lastTradingTime, iborIndex, fixingPeriodStartTime, fixingPeriodEndTime, fixingPeriodAccrualFactor, referencePrice, notional, paymentAccrualFactor,
// quantity, "A", FIVE_PC_CURVE_NAME, FIVE_PC_CURVE_NAME);
// double pv = ir.accept(PVC, CURVES);
// assertEquals(price * notional * paymentAccrualFactor * quantity, pv, 1e-12);
// final double deltaPrice = 0.01;
// ir = new InterestRateFutureTransaction(lastTradingTime, iborIndex, fixingPeriodStartTime, fixingPeriodEndTime, fixingPeriodAccrualFactor, deltaPrice, notional, paymentAccrualFactor, quantity, "A",
// FIVE_PC_CURVE_NAME, FIVE_PC_CURVE_NAME);
// pv = ir.accept(PVC, CURVES);
// assertEquals((price - deltaPrice) * notional * paymentAccrualFactor * quantity, pv, 1e-12);
// }
@Test
public void testFixedCouponAnnuity() {
AnnuityCouponFixed annuityReceiver = new AnnuityCouponFixed(CUR, new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, 1.0, ZERO_PC_CURVE_NAME, false);
double pv = annuityReceiver.accept(PVC, CURVES);
assertEquals(10.0, pv, 1e-12);
final int n = 15;
final double alpha = 0.49;
final double yearFrac = 0.51;
final double[] paymentTimes = new double[n];
final double[] coupons = new double[n];
final double[] yearFracs = new double[n];
final YieldAndDiscountCurve curve = CURVES.getCurve(FIVE_PC_CURVE_NAME);
final double rate = curve.getInterestRate(0.0);
for (int i = 0; i < n; i++) {
paymentTimes[i] = (i + 1) * alpha;
coupons[i] = Math.exp((i + 1) * rate * alpha);
yearFracs[i] = yearFrac;
}
annuityReceiver = new AnnuityCouponFixed(CUR, paymentTimes, Math.PI, rate, yearFracs, ZERO_PC_CURVE_NAME, false);
pv = annuityReceiver.accept(PVC, CURVES);
assertEquals(n * yearFrac * rate * Math.PI, pv, 1e-12);
final AnnuityCouponFixed annuityPayer = new AnnuityCouponFixed(CUR, paymentTimes, Math.PI, rate, yearFracs, ZERO_PC_CURVE_NAME, true);
assertEquals(pv, -annuityPayer.accept(PVC, CURVES), 1e-12);
}
@Test
public void testBond() {
final int n = 20;
final double tau = 0.5;
final double yearFrac = 180 / 365.0;
final YieldAndDiscountCurve curve = CURVES.getCurve(FIVE_PC_CURVE_NAME);
final double coupon = (1.0 / curve.getDiscountFactor(tau) - 1.0) / yearFrac;
final CouponFixed[] coupons = new CouponFixed[n];
for (int i = 0; i < n; i++) {
coupons[i] = new CouponFixed(CUR, tau * (i + 1), FIVE_PC_CURVE_NAME, yearFrac, coupon);
}
final AnnuityPaymentFixed nominal = new AnnuityPaymentFixed(new PaymentFixed[] {new PaymentFixed(CUR, tau * n, 1, FIVE_PC_CURVE_NAME)});
final BondFixedSecurity bond = new BondFixedSecurity(nominal, new AnnuityCouponFixed(coupons), 0, 0, 0.5, SimpleYieldConvention.TRUE, 2, FIVE_PC_CURVE_NAME, "S");
final double pv = bond.accept(PVC, CURVES);
assertEquals(1.0, pv, 1e-12);
}
@Test
public void testGenericAnnuity() {
final double time = 3.4;
final double amount = 34.3;
final double coupon = 0.05;
final double yearFrac = 0.5;
final double resetTime = 2.9;
final double notional = 56;
final List<Payment> list = new ArrayList<>();
double expected = 0.0;
Payment temp = new PaymentFixed(CUR, time, amount, FIVE_PC_CURVE_NAME);
expected += amount * CURVES.getCurve(FIVE_PC_CURVE_NAME).getDiscountFactor(time);
list.add(temp);
temp = new CouponFixed(CUR, time, FIVE_PC_CURVE_NAME, yearFrac, notional, coupon);
expected += notional * yearFrac * coupon * CURVES.getCurve(FIVE_PC_CURVE_NAME).getDiscountFactor(time);
list.add(temp);
temp = new CouponIborSpread(CUR, time, ZERO_PC_CURVE_NAME, yearFrac, notional, resetTime, INDEX, resetTime, time, yearFrac, 0.0, FIVE_PC_CURVE_NAME);
expected += notional * (CURVES.getCurve(FIVE_PC_CURVE_NAME).getDiscountFactor(resetTime) / CURVES.getCurve(FIVE_PC_CURVE_NAME).getDiscountFactor(time) - 1);
list.add(temp);
final Annuity<Payment> annuity = new Annuity<>(list, Payment.class, true);
final double pv = annuity.accept(PVC, CURVES);
assertEquals(expected, pv, 1e-12);
}
@Test
public void testFixedPayment() {
final double time = 1.23;
final double amount = 4345.3;
final PaymentFixed payment = new PaymentFixed(CUR, time, amount, FIVE_PC_CURVE_NAME);
final double expected = amount * CURVES.getCurve(FIVE_PC_CURVE_NAME).getDiscountFactor(time);
final double pv = payment.accept(PVC, CURVES);
assertEquals(expected, pv, 1e-8);
}
@Test
public void testFixedCouponPayment() {
final double time = 1.23;
final double yearFrac = 0.56;
final double coupon = 0.07;
final double notional = 1000;
final CouponFixed payment = new CouponFixed(CUR, time, ZERO_PC_CURVE_NAME, yearFrac, notional, coupon);
final double expected = notional * yearFrac * coupon;
final double pv = payment.accept(PVC, CURVES);
assertEquals(expected, pv, 1e-8);
}
@Test
public void forwardLiborPayment() {
final double time = 2.45;
final double resetTime = 2.0;
final double maturity = 2.5;
final double paymentYF = 0.48;
final double forwardYF = 0.5;
final double spread = 0.04;
final double notional = 4.53;
CouponIborSpread payment = new CouponIborSpread(CUR, time, FIVE_PC_CURVE_NAME, paymentYF, notional, resetTime, INDEX, resetTime, maturity, forwardYF, spread, ZERO_PC_CURVE_NAME);
double expected = notional * paymentYF * spread * CURVES.getCurve(FIVE_PC_CURVE_NAME).getDiscountFactor(time);
double pv = payment.accept(PVC, CURVES);
assertEquals(expected, pv, 1e-8);
payment = new CouponIborSpread(CUR, time, ZERO_PC_CURVE_NAME, paymentYF, 1.0, resetTime, INDEX, resetTime, maturity, forwardYF, spread, FIVE_PC_CURVE_NAME);
final double forward = (Math.exp(0.05 * (maturity - resetTime)) - 1) / forwardYF;
expected = paymentYF * (forward + spread);
pv = payment.accept(PVC, CURVES);
assertEquals(expected, pv, 1e-8);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version
* 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package org.apache.storm.daemon.worker;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.storm.Config;
import org.apache.storm.Constants;
import org.apache.storm.StormTimer;
import org.apache.storm.cluster.IStateStorage;
import org.apache.storm.cluster.IStormClusterState;
import org.apache.storm.cluster.VersionedData;
import org.apache.storm.daemon.StormCommon;
import org.apache.storm.daemon.supervisor.AdvancedFSOps;
import org.apache.storm.executor.IRunningExecutor;
import org.apache.storm.generated.Assignment;
import org.apache.storm.generated.DebugOptions;
import org.apache.storm.generated.Grouping;
import org.apache.storm.generated.InvalidTopologyException;
import org.apache.storm.generated.NodeInfo;
import org.apache.storm.generated.StormBase;
import org.apache.storm.generated.StormTopology;
import org.apache.storm.generated.StreamInfo;
import org.apache.storm.generated.TopologyStatus;
import org.apache.storm.grouping.Load;
import org.apache.storm.grouping.LoadMapping;
import org.apache.storm.hooks.IWorkerHook;
import org.apache.storm.messaging.ConnectionWithStatus;
import org.apache.storm.messaging.DeserializingConnectionCallback;
import org.apache.storm.messaging.IConnection;
import org.apache.storm.messaging.IContext;
import org.apache.storm.messaging.TransportFactory;
import org.apache.storm.messaging.netty.BackPressureStatus;
import org.apache.storm.policy.IWaitStrategy;
import org.apache.storm.security.auth.IAutoCredentials;
import org.apache.storm.serialization.ITupleSerializer;
import org.apache.storm.serialization.KryoTupleSerializer;
import org.apache.storm.shade.com.google.common.collect.ImmutableMap;
import org.apache.storm.shade.com.google.common.collect.Sets;
import org.apache.storm.task.WorkerTopologyContext;
import org.apache.storm.tuple.AddressedTuple;
import org.apache.storm.tuple.Fields;
import org.apache.storm.utils.ConfigUtils;
import org.apache.storm.utils.JCQueue;
import org.apache.storm.utils.ObjectReader;
import org.apache.storm.utils.SupervisorClient;
import org.apache.storm.utils.ThriftTopologyUtils;
import org.apache.storm.utils.Utils;
import org.apache.storm.utils.Utils.SmartThread;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class WorkerState {
private static final Logger LOG = LoggerFactory.getLogger(WorkerState.class);
private static final long LOAD_REFRESH_INTERVAL_MS = 5000L;
private static long dropCount = 0;
final Map<String, Object> conf;
final IContext mqContext;
final IConnection receiver;
final String topologyId;
final String assignmentId;
final int supervisorPort;
final int port;
final String workerId;
final IStateStorage stateStorage;
final IStormClusterState stormClusterState;
// when worker bootup, worker will start to setup initial connections to
// other workers. When all connection is ready, we will enable this flag
// and spout and bolt will be activated.
// used in worker only, keep it as atomic
final AtomicBoolean isWorkerActive;
final AtomicBoolean isTopologyActive;
final AtomicReference<Map<String, DebugOptions>> stormComponentToDebug;
// local executors and localTaskIds running in this worker
final Set<List<Long>> localExecutors;
final ArrayList<Integer> localTaskIds;
// [taskId]-> JCQueue : initialized after local executors are initialized
final Map<Integer, JCQueue> localReceiveQueues = new HashMap<>();
final Map<String, Object> topologyConf;
final StormTopology topology;
final StormTopology systemTopology;
final Map<Integer, String> taskToComponent;
final Map<String, Map<String, Fields>> componentToStreamToFields;
final Map<String, List<Integer>> componentToSortedTasks;
final ConcurrentMap<String, Long> blobToLastKnownVersion;
final ReentrantReadWriteLock endpointSocketLock;
final AtomicReference<Map<Integer, NodeInfo>> cachedTaskToNodePort;
final AtomicReference<Map<NodeInfo, IConnection>> cachedNodeToPortSocket;
// executor id is in form [start_task_id end_task_id]
final Map<List<Long>, JCQueue> executorReceiveQueueMap;
final Map<Integer, JCQueue> taskToExecutorQueue;
final Runnable suicideCallback;
final Utils.UptimeComputer uptime;
final Map<String, Object> defaultSharedResources;
final Map<String, Object> userSharedResources;
final LoadMapping loadMapping;
final AtomicReference<Map<String, VersionedData<Assignment>>> assignmentVersions;
// Timers
final StormTimer heartbeatTimer = mkHaltingTimer("heartbeat-timer");
final StormTimer refreshLoadTimer = mkHaltingTimer("refresh-load-timer");
final StormTimer refreshConnectionsTimer = mkHaltingTimer("refresh-connections-timer");
final StormTimer refreshCredentialsTimer = mkHaltingTimer("refresh-credentials-timer");
final StormTimer checkForUpdatedBlobsTimer = mkHaltingTimer("check-for-updated-blobs-timer");
final StormTimer resetLogLevelsTimer = mkHaltingTimer("reset-log-levels-timer");
final StormTimer refreshActiveTimer = mkHaltingTimer("refresh-active-timer");
final StormTimer executorHeartbeatTimer = mkHaltingTimer("executor-heartbeat-timer");
final StormTimer flushTupleTimer = mkHaltingTimer("flush-tuple-timer");
final StormTimer userTimer = mkHaltingTimer("user-timer");
final StormTimer backPressureCheckTimer = mkHaltingTimer("backpressure-check-timer");
private final WorkerTransfer workerTransfer;
private final BackPressureTracker bpTracker;
private final List<IWorkerHook> deserializedWorkerHooks;
// global variables only used internally in class
private final Set<Integer> outboundTasks;
private final AtomicLong nextLoadUpdate = new AtomicLong(0);
private final boolean trySerializeLocal;
private final Collection<IAutoCredentials> autoCredentials;
public WorkerState(Map<String, Object> conf, IContext mqContext, String topologyId, String assignmentId,
int supervisorPort, int port, String workerId, Map<String, Object> topologyConf, IStateStorage stateStorage,
IStormClusterState stormClusterState, Collection<IAutoCredentials> autoCredentials) throws IOException,
InvalidTopologyException {
this.autoCredentials = autoCredentials;
this.conf = conf;
this.localExecutors = new HashSet<>(readWorkerExecutors(stormClusterState, topologyId, assignmentId, port));
this.mqContext = (null != mqContext) ? mqContext : TransportFactory.makeContext(topologyConf);
this.receiver = this.mqContext.bind(topologyId, port);
this.topologyId = topologyId;
this.assignmentId = assignmentId;
this.supervisorPort = supervisorPort;
this.port = port;
this.workerId = workerId;
this.stateStorage = stateStorage;
this.stormClusterState = stormClusterState;
this.isWorkerActive = new AtomicBoolean(false);
this.isTopologyActive = new AtomicBoolean(false);
this.stormComponentToDebug = new AtomicReference<>();
this.executorReceiveQueueMap = mkReceiveQueueMap(topologyConf, localExecutors);
this.localTaskIds = new ArrayList<>();
this.taskToExecutorQueue = new HashMap<>();
this.blobToLastKnownVersion = new ConcurrentHashMap<>();
for (Map.Entry<List<Long>, JCQueue> entry : executorReceiveQueueMap.entrySet()) {
List<Integer> taskIds = StormCommon.executorIdToTasks(entry.getKey());
for (Integer taskId : taskIds) {
this.taskToExecutorQueue.put(taskId, entry.getValue());
}
this.localTaskIds.addAll(taskIds);
}
Collections.sort(localTaskIds);
this.topologyConf = topologyConf;
this.topology = ConfigUtils.readSupervisorTopology(conf, topologyId, AdvancedFSOps.make(conf));
this.systemTopology = StormCommon.systemTopology(topologyConf, topology);
this.taskToComponent = StormCommon.stormTaskInfo(topology, topologyConf);
this.componentToStreamToFields = new HashMap<>();
for (String c : ThriftTopologyUtils.getComponentIds(systemTopology)) {
Map<String, Fields> streamToFields = new HashMap<>();
for (Map.Entry<String, StreamInfo> stream :
ThriftTopologyUtils.getComponentCommon(systemTopology, c).get_streams().entrySet()) {
streamToFields.put(stream.getKey(), new Fields(stream.getValue().get_output_fields()));
}
componentToStreamToFields.put(c, streamToFields);
}
this.componentToSortedTasks = Utils.reverseMap(taskToComponent);
this.componentToSortedTasks.values().forEach(Collections::sort);
this.endpointSocketLock = new ReentrantReadWriteLock();
this.cachedNodeToPortSocket = new AtomicReference<>(new HashMap<>());
this.cachedTaskToNodePort = new AtomicReference<>(new HashMap<>());
this.suicideCallback = Utils.mkSuicideFn();
this.uptime = Utils.makeUptimeComputer();
this.defaultSharedResources = makeDefaultResources();
this.userSharedResources = makeUserResources();
this.loadMapping = new LoadMapping();
this.assignmentVersions = new AtomicReference<>(new HashMap<>());
this.outboundTasks = workerOutboundTasks();
this.trySerializeLocal = topologyConf.containsKey(Config.TOPOLOGY_TESTING_ALWAYS_TRY_SERIALIZE)
&& (Boolean) topologyConf.get(Config.TOPOLOGY_TESTING_ALWAYS_TRY_SERIALIZE);
if (trySerializeLocal) {
LOG.warn("WILL TRY TO SERIALIZE ALL TUPLES (Turn off {} for production", Config.TOPOLOGY_TESTING_ALWAYS_TRY_SERIALIZE);
}
int maxTaskId = getMaxTaskId(componentToSortedTasks);
this.workerTransfer = new WorkerTransfer(this, topologyConf, maxTaskId);
this.bpTracker = new BackPressureTracker(workerId, taskToExecutorQueue);
this.deserializedWorkerHooks = deserializeWorkerHooks();
}
private static double getQueueLoad(JCQueue q) {
JCQueue.QueueMetrics qMetrics = q.getMetrics();
return ((double) qMetrics.population()) / qMetrics.capacity();
}
public static boolean isConnectionReady(IConnection connection) {
return !(connection instanceof ConnectionWithStatus)
|| ((ConnectionWithStatus) connection).status() == ConnectionWithStatus.Status.Ready;
}
private static int getMaxTaskId(Map<String, List<Integer>> componentToSortedTasks) {
int maxTaskId = -1;
for (List<Integer> integers : componentToSortedTasks.values()) {
if (!integers.isEmpty()) {
int tempMax = integers.stream().max(Integer::compareTo).get();
if (tempMax > maxTaskId) {
maxTaskId = tempMax;
}
}
}
return maxTaskId;
}
public List<IWorkerHook> getDeserializedWorkerHooks() {
return deserializedWorkerHooks;
}
public Map<String, Object> getConf() {
return conf;
}
public IConnection getReceiver() {
return receiver;
}
public String getTopologyId() {
return topologyId;
}
public int getPort() {
return port;
}
public String getWorkerId() {
return workerId;
}
public IStateStorage getStateStorage() {
return stateStorage;
}
public AtomicBoolean getIsTopologyActive() {
return isTopologyActive;
}
public AtomicReference<Map<String, DebugOptions>> getStormComponentToDebug() {
return stormComponentToDebug;
}
public Set<List<Long>> getLocalExecutors() {
return localExecutors;
}
public List<Integer> getLocalTaskIds() {
return localTaskIds;
}
public Map<Integer, JCQueue> getLocalReceiveQueues() {
return localReceiveQueues;
}
public Map<String, Object> getTopologyConf() {
return topologyConf;
}
public StormTopology getTopology() {
return topology;
}
public StormTopology getSystemTopology() {
return systemTopology;
}
public Map<Integer, String> getTaskToComponent() {
return taskToComponent;
}
public Map<String, Map<String, Fields>> getComponentToStreamToFields() {
return componentToStreamToFields;
}
public Map<String, List<Integer>> getComponentToSortedTasks() {
return componentToSortedTasks;
}
public Map<String, Long> getBlobToLastKnownVersion() {
return blobToLastKnownVersion;
}
public AtomicReference<Map<NodeInfo, IConnection>> getCachedNodeToPortSocket() {
return cachedNodeToPortSocket;
}
public Map<List<Long>, JCQueue> getExecutorReceiveQueueMap() {
return executorReceiveQueueMap;
}
public Runnable getSuicideCallback() {
return suicideCallback;
}
public Utils.UptimeComputer getUptime() {
return uptime;
}
public Map<String, Object> getDefaultSharedResources() {
return defaultSharedResources;
}
public Map<String, Object> getUserSharedResources() {
return userSharedResources;
}
public LoadMapping getLoadMapping() {
return loadMapping;
}
public AtomicReference<Map<String, VersionedData<Assignment>>> getAssignmentVersions() {
return assignmentVersions;
}
public StormTimer getUserTimer() {
return userTimer;
}
public void refreshConnections() {
try {
refreshConnections(() -> refreshConnectionsTimer.schedule(0, this::refreshConnections));
} catch (Exception e) {
throw Utils.wrapInRuntime(e);
}
}
public SmartThread makeTransferThread() {
return workerTransfer.makeTransferThread();
}
public void refreshConnections(Runnable callback) throws Exception {
Assignment assignment = getLocalAssignment(conf, stormClusterState, topologyId);
Set<NodeInfo> neededConnections = new HashSet<>();
Map<Integer, NodeInfo> newTaskToNodePort = new HashMap<>();
if (null != assignment) {
Map<Integer, NodeInfo> taskToNodePort = StormCommon.taskToNodeport(assignment.get_executor_node_port());
for (Map.Entry<Integer, NodeInfo> taskToNodePortEntry : taskToNodePort.entrySet()) {
Integer task = taskToNodePortEntry.getKey();
if (outboundTasks.contains(task)) {
newTaskToNodePort.put(task, taskToNodePortEntry.getValue());
if (!localTaskIds.contains(task)) {
neededConnections.add(taskToNodePortEntry.getValue());
}
}
}
}
Set<NodeInfo> currentConnections = cachedNodeToPortSocket.get().keySet();
Set<NodeInfo> newConnections = Sets.difference(neededConnections, currentConnections);
Set<NodeInfo> removeConnections = Sets.difference(currentConnections, neededConnections);
// Add new connections atomically
cachedNodeToPortSocket.getAndUpdate(prev -> {
Map<NodeInfo, IConnection> next = new HashMap<>(prev);
for (NodeInfo nodeInfo : newConnections) {
next.put(nodeInfo,
mqContext.connect(
topologyId,
assignment.get_node_host().get(nodeInfo.get_node()), // Host
nodeInfo.get_port().iterator().next().intValue(), // Port
workerTransfer.getRemoteBackPressureStatus()));
}
return next;
});
try {
endpointSocketLock.writeLock().lock();
cachedTaskToNodePort.set(newTaskToNodePort);
} finally {
endpointSocketLock.writeLock().unlock();
}
for (NodeInfo nodeInfo : removeConnections) {
cachedNodeToPortSocket.get().get(nodeInfo).close();
}
// Remove old connections atomically
cachedNodeToPortSocket.getAndUpdate(prev -> {
Map<NodeInfo, IConnection> next = new HashMap<>(prev);
removeConnections.forEach(next::remove);
return next;
});
}
public void refreshStormActive() {
refreshStormActive(() -> refreshActiveTimer.schedule(0, this::refreshStormActive));
}
public void refreshStormActive(Runnable callback) {
StormBase base = stormClusterState.stormBase(topologyId, callback);
isTopologyActive.set(
(null != base)
&& (base.get_status() == TopologyStatus.ACTIVE) && (isWorkerActive.get()));
if (null != base) {
Map<String, DebugOptions> debugOptionsMap = new HashMap<>(base.get_component_debug());
for (DebugOptions debugOptions : debugOptionsMap.values()) {
if (!debugOptions.is_set_samplingpct()) {
debugOptions.set_samplingpct(10);
}
if (!debugOptions.is_set_enable()) {
debugOptions.set_enable(false);
}
}
stormComponentToDebug.set(debugOptionsMap);
LOG.debug("Events debug options {}", stormComponentToDebug.get());
}
}
public void refreshLoad(List<IRunningExecutor> execs) {
Set<Integer> remoteTasks = Sets.difference(new HashSet<>(outboundTasks), new HashSet<>(localTaskIds));
Long now = System.currentTimeMillis();
Map<Integer, Double> localLoad = new HashMap<>();
for (IRunningExecutor exec : execs) {
double receiveLoad = getQueueLoad(exec.getReceiveQueue());
localLoad.put(exec.getExecutorId().get(0).intValue(), receiveLoad);
}
Map<Integer, Load> remoteLoad = new HashMap<>();
cachedNodeToPortSocket.get().values().stream().forEach(conn -> remoteLoad.putAll(conn.getLoad(remoteTasks)));
loadMapping.setLocal(localLoad);
loadMapping.setRemote(remoteLoad);
if (now > nextLoadUpdate.get()) {
receiver.sendLoadMetrics(localLoad);
nextLoadUpdate.set(now + LOAD_REFRESH_INTERVAL_MS);
}
}
// checks if the tasks which had back pressure are now free again. if so, sends an update to other workers
public void refreshBackPressureStatus() {
LOG.debug("Checking for change in Backpressure status on worker's tasks");
boolean bpSituationChanged = bpTracker.refreshBpTaskList();
if (bpSituationChanged) {
BackPressureStatus bpStatus = bpTracker.getCurrStatus();
receiver.sendBackPressureStatus(bpStatus);
}
}
/**
* we will wait all connections to be ready and then activate the spout/bolt when the worker bootup.
*/
public void activateWorkerWhenAllConnectionsReady() {
int delaySecs = 0;
int recurSecs = 1;
refreshActiveTimer.schedule(delaySecs,
() -> {
if (areAllConnectionsReady()) {
LOG.info("All connections are ready for worker {}:{} with id {}", assignmentId, port, workerId);
isWorkerActive.set(Boolean.TRUE);
} else {
refreshActiveTimer.schedule(recurSecs, () -> activateWorkerWhenAllConnectionsReady(), false, 0);
}
}
);
}
public void registerCallbacks() {
LOG.info("Registering IConnectionCallbacks for {}:{}", assignmentId, port);
receiver.registerRecv(new DeserializingConnectionCallback(topologyConf,
getWorkerTopologyContext(),
this::transferLocalBatch));
// Send curr BackPressure status to new clients
receiver.registerNewConnectionResponse(
() -> {
BackPressureStatus bpStatus = bpTracker.getCurrStatus();
LOG.info("Sending BackPressure status to new client. BPStatus: {}", bpStatus);
return bpStatus;
}
);
}
/* Not a Blocking call. If cannot emit, will add 'tuple' to pendingEmits and return 'false'. 'pendingEmits' can be null */
public boolean tryTransferRemote(AddressedTuple tuple, Queue<AddressedTuple> pendingEmits, ITupleSerializer serializer) {
return workerTransfer.tryTransferRemote(tuple, pendingEmits, serializer);
}
public void flushRemotes() throws InterruptedException {
workerTransfer.flushRemotes();
}
public boolean tryFlushRemotes() {
return workerTransfer.tryFlushRemotes();
}
// Receives msgs from remote workers and feeds them to local executors. If any receiving local executor is under Back Pressure,
// informs other workers about back pressure situation. Runs in the NettyWorker thread.
private void transferLocalBatch(ArrayList<AddressedTuple> tupleBatch) {
int lastOverflowCount = 0; // overflowQ size at the time the last BPStatus was sent
for (int i = 0; i < tupleBatch.size(); i++) {
AddressedTuple tuple = tupleBatch.get(i);
JCQueue queue = taskToExecutorQueue.get(tuple.dest);
// 1- try adding to main queue if its overflow is not empty
if (queue.isEmptyOverflow()) {
if (queue.tryPublish(tuple)) {
continue;
}
}
// 2- BP detected (i.e MainQ is full). So try adding to overflow
int currOverflowCount = queue.getOverflowCount();
if (bpTracker.recordBackPressure(tuple.dest)) {
receiver.sendBackPressureStatus(bpTracker.getCurrStatus());
lastOverflowCount = currOverflowCount;
} else {
if (currOverflowCount - lastOverflowCount > 10000) {
// resend BP status, in case prev notification was missed or reordered
BackPressureStatus bpStatus = bpTracker.getCurrStatus();
receiver.sendBackPressureStatus(bpStatus);
lastOverflowCount = currOverflowCount;
LOG.debug("Re-sent BackPressure Status. OverflowCount = {}, BP Status ID = {}. ", currOverflowCount, bpStatus.id);
}
}
if (!queue.tryPublishToOverflow(tuple)) {
dropMessage(tuple, queue);
}
}
}
private void dropMessage(AddressedTuple tuple, JCQueue queue) {
++dropCount;
queue.recordMsgDrop();
LOG.warn(
"Dropping message as overflow threshold has reached for Q = {}. OverflowCount = {}. Total Drop Count= {}, Dropped Message : {}",
queue.getName(), queue.getOverflowCount(), dropCount, tuple);
}
public void checkSerialize(KryoTupleSerializer serializer, AddressedTuple tuple) {
if (trySerializeLocal) {
serializer.serialize(tuple.getTuple());
}
}
public WorkerTopologyContext getWorkerTopologyContext() {
try {
String codeDir = ConfigUtils.supervisorStormResourcesPath(ConfigUtils.supervisorStormDistRoot(conf, topologyId));
String pidDir = ConfigUtils.workerPidsRoot(conf, topologyId);
return new WorkerTopologyContext(systemTopology, topologyConf, taskToComponent, componentToSortedTasks,
componentToStreamToFields, topologyId, codeDir, pidDir, port, localTaskIds,
defaultSharedResources,
userSharedResources, cachedTaskToNodePort, assignmentId);
} catch (IOException e) {
throw Utils.wrapInRuntime(e);
}
}
private List<IWorkerHook> deserializeWorkerHooks() {
List<IWorkerHook> myHookList = new ArrayList<>();
if (topology.is_set_worker_hooks()) {
for (ByteBuffer hook : topology.get_worker_hooks()) {
byte[] hookBytes = Utils.toByteArray(hook);
IWorkerHook hookObject = Utils.javaDeserialize(hookBytes, IWorkerHook.class);
myHookList.add(hookObject);
}
}
return myHookList;
}
public void runWorkerStartHooks() {
WorkerTopologyContext workerContext = getWorkerTopologyContext();
for (IWorkerHook hook : getDeserializedWorkerHooks()) {
hook.start(topologyConf, workerContext);
}
}
public void runWorkerShutdownHooks() {
for (IWorkerHook hook : getDeserializedWorkerHooks()) {
hook.shutdown();
}
}
public void closeResources() {
LOG.info("Shutting down default resources");
((ExecutorService) defaultSharedResources.get(WorkerTopologyContext.SHARED_EXECUTOR)).shutdownNow();
LOG.info("Shut down default resources");
}
public boolean areAllConnectionsReady() {
return cachedNodeToPortSocket.get().values()
.stream()
.map(WorkerState::isConnectionReady)
.reduce((left, right) -> left && right)
.orElse(true);
}
public Collection<IAutoCredentials> getAutoCredentials() {
return this.autoCredentials;
}
private List<List<Long>> readWorkerExecutors(IStormClusterState stormClusterState, String topologyId, String assignmentId,
int port) {
LOG.info("Reading assignments");
List<List<Long>> executorsAssignedToThisWorker = new ArrayList<>();
executorsAssignedToThisWorker.add(Constants.SYSTEM_EXECUTOR_ID);
Map<List<Long>, NodeInfo> executorToNodePort = getLocalAssignment(conf, stormClusterState, topologyId).get_executor_node_port();
for (Map.Entry<List<Long>, NodeInfo> entry : executorToNodePort.entrySet()) {
NodeInfo nodeInfo = entry.getValue();
if (nodeInfo.get_node().equals(assignmentId) && nodeInfo.get_port().iterator().next() == port) {
executorsAssignedToThisWorker.add(entry.getKey());
}
}
return executorsAssignedToThisWorker;
}
private Assignment getLocalAssignment(Map<String, Object> conf, IStormClusterState stormClusterState, String topologyId) {
if (!ConfigUtils.isLocalMode(conf)) {
try (SupervisorClient supervisorClient = SupervisorClient.getConfiguredClient(conf, Utils.hostname(),
supervisorPort)) {
Assignment assignment = supervisorClient.getClient().getLocalAssignmentForStorm(topologyId);
return assignment;
} catch (Throwable tr1) {
//if any error/exception thrown, fetch it from zookeeper
return stormClusterState.remoteAssignmentInfo(topologyId, null);
}
} else {
return stormClusterState.remoteAssignmentInfo(topologyId, null);
}
}
private Map<List<Long>, JCQueue> mkReceiveQueueMap(Map<String, Object> topologyConf, Set<List<Long>> executors) {
Integer recvQueueSize = ObjectReader.getInt(topologyConf.get(Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE));
Integer recvBatchSize = ObjectReader.getInt(topologyConf.get(Config.TOPOLOGY_PRODUCER_BATCH_SIZE));
Integer overflowLimit = ObjectReader.getInt(topologyConf.get(Config.TOPOLOGY_EXECUTOR_OVERFLOW_LIMIT));
if (recvBatchSize > recvQueueSize / 2) {
throw new IllegalArgumentException(Config.TOPOLOGY_PRODUCER_BATCH_SIZE + ":" + recvBatchSize
+ " is greater than half of " + Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE + ":" +
recvQueueSize);
}
IWaitStrategy backPressureWaitStrategy = IWaitStrategy.createBackPressureWaitStrategy(topologyConf);
Map<List<Long>, JCQueue> receiveQueueMap = new HashMap<>();
for (List<Long> executor : executors) {
int port = this.getPort();
receiveQueueMap.put(executor, new JCQueue("receive-queue" + executor.toString(),
recvQueueSize, overflowLimit, recvBatchSize, backPressureWaitStrategy,
this.getTopologyId(), Constants.SYSTEM_COMPONENT_ID, -1, this.getPort()));
}
return receiveQueueMap;
}
private Map<String, Object> makeDefaultResources() {
int threadPoolSize = ObjectReader.getInt(conf.get(Config.TOPOLOGY_WORKER_SHARED_THREAD_POOL_SIZE));
return ImmutableMap.of(WorkerTopologyContext.SHARED_EXECUTOR, Executors.newFixedThreadPool(threadPoolSize));
}
private Map<String, Object> makeUserResources() {
/* TODO: need to invoke a hook provided by the topology, giving it a chance to create user resources.
* this would be part of the initialization hook
* need to separate workertopologycontext into WorkerContext and WorkerUserContext.
* actually just do it via interfaces. just need to make sure to hide setResource from tasks
*/
return new HashMap<>();
}
private StormTimer mkHaltingTimer(String name) {
return new StormTimer(name, (thread, exception) -> {
LOG.error("Error when processing event", exception);
Utils.exitProcess(20, "Error when processing an event");
});
}
/**
* @return seq of task ids that receive messages from this worker
*/
private Set<Integer> workerOutboundTasks() {
WorkerTopologyContext context = getWorkerTopologyContext();
Set<String> components = new HashSet<>();
for (Integer taskId : localTaskIds) {
for (Map<String, Grouping> value : context.getTargets(context.getComponentId(taskId)).values()) {
components.addAll(value.keySet());
}
}
Set<Integer> outboundTasks = new HashSet<>();
for (Map.Entry<String, List<Integer>> entry : Utils.reverseMap(taskToComponent).entrySet()) {
if (components.contains(entry.getKey())) {
outboundTasks.addAll(entry.getValue());
}
}
return outboundTasks;
}
public Set<Integer> getOutboundTasks() {
return this.outboundTasks;
}
/**
* Check if this worker has remote outbound tasks.
* @return true if this worker has remote outbound tasks; false otherwise.
*/
public boolean hasRemoteOutboundTasks() {
Set<Integer> remoteTasks = Sets.difference(new HashSet<>(outboundTasks), new HashSet<>(localTaskIds));
return !remoteTasks.isEmpty();
}
/**
* If all the tasks are local tasks, the topology has only one worker.
* @return true if this worker is the single worker; false otherwise.
*/
public boolean isSingleWorker() {
Set<Integer> nonLocalTasks = Sets.difference(getTaskToComponent().keySet(),
new HashSet<>(localTaskIds));
return nonLocalTasks.isEmpty();
}
public void haltWorkerTransfer() {
workerTransfer.haltTransferThd();
}
public JCQueue getTransferQueue() {
return workerTransfer.getTransferQueue();
}
public interface ILocalTransferCallback {
void transfer(ArrayList<AddressedTuple> tupleBatch);
}
}
| |
/**
Copyright (C) 2009 ModelJUnit Project
This file is part of the ModelJUnit project.
The ModelJUnit project contains free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
The ModelJUnit project is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with ModelJUnit; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package nz.ac.waikato.modeljunit.gui;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URL;
import java.net.URI;
import java.util.regex.Matcher;
import java.lang.reflect.Method;
import nz.ac.waikato.modeljunit.gui.visualisaton.PanelJUNGVisualisation;
import nz.ac.waikato.modeljunit.Action;
import nz.ac.waikato.modeljunit.Model;
import nz.ac.waikato.modeljunit.Tester;
import nz.ac.waikato.modeljunit.GreedyTester;
import nz.ac.waikato.modeljunit.GraphListener;
import nz.ac.waikato.modeljunit.coverage.ActionCoverage;
import nz.ac.waikato.modeljunit.coverage.CoverageHistory;
import nz.ac.waikato.modeljunit.coverage.StateCoverage;
import nz.ac.waikato.modeljunit.coverage.TransitionCoverage;
import nz.ac.waikato.modeljunit.coverage.TransitionPairCoverage;
import org.objectweb.asm.ClassReader;
/** The main ModelJUnit GUI class.
*
* @author Gian Perrone <gian@waikato.ac.nz>
**/
public class ModelJUnitGUI implements Runnable
{
public static final String MODELJUNIT_VERSION = "2.0";
private JFrame mAppWindow;
private String mAppWindowTitle = "ModelJUnit - Untitled*";
private Project mProject;
private static Model mModel;
private PanelJUNGVisualisation mVisualisation;
private PanelCoverage mCoverage;
private PanelResultViewer mResultViewer;
private PanelTestDesign mTestDesign;
private JDialog mSplash;
private boolean mGraphCurrent;
public ModelJUnitGUI() {
mProject = new Project();
Project.setInstance(mProject);
mGraphCurrent = false;
mVisualisation = PanelJUNGVisualisation.getGraphVisualisationInstance();
mCoverage = PanelCoverage.getInstance();
mResultViewer = PanelResultViewer.getResultViewerInstance();
mTestDesign = PanelTestDesign.getTestDesignPanelInstance(this);
buildGUI();
displaySplashWindow();
}
/** Construct an application window. **/
public void buildGUI() {
mAppWindow = new JFrame(mAppWindowTitle);
// For now - set the default close action.
//TODO: Change this to hook into a confirmation dialogue.
mAppWindow.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
// Add the elements to the GUI:
// * MenuBar (Top)
// * Toolbar (Top)
// * Info Panel (Left)
// * Visualizer (Right-Middle)
// * Status panel (Bottom)
// Add menu bar:
mAppWindow.setJMenuBar(new ModelJUnitMenuBar(this));
// Add tool bar:
JPanel toolbar = new Toolbar(this);
toolbar.setPreferredSize(new Dimension(750,40));
mAppWindow.getContentPane().add(toolbar, BorderLayout.PAGE_START);
// Add Info Panel:
JPanel infopanel = new JPanel();
infopanel.setPreferredSize(new Dimension(120,550));
//mAppWindow.getContentPane().add(infopanel, BorderLayout.LINE_START);
// Add visualiser
mVisualisation.setPreferredSize(new Dimension(630,430));
mAppWindow.getContentPane().add(mVisualisation, BorderLayout.CENTER);
// Add status panel
JPanel statuspanel = new JPanel();
JTextArea statustext = new JTextArea();
statustext.setEditable(false);
statuspanel.add(statustext);
statuspanel.setPreferredSize(new Dimension(750,90));
mAppWindow.getContentPane().add(statuspanel, BorderLayout.PAGE_END);
mAppWindow.pack();
}
public void displayAboutWindow() {
final JDialog about = new JDialog(mAppWindow, "About ModelJUnit", true);
about.add(new JLabel("<html><h1>ModelJUnit v2.0-beta1</h1></html>"), BorderLayout.PAGE_START);
about.add(new JLabel("Copyright (c) 2009 The University of Waikato"), BorderLayout.CENTER);
JButton btn = new JButton("OK");
btn.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { about.setVisible(false); } } );
about.add(btn, BorderLayout.PAGE_END);
about.pack();
about.setLocationRelativeTo(null);
about.setVisible(true);
}
public void displaySplashWindow() {
mSplash = new JDialog(mAppWindow, "Welcome to ModelJUnit", true);
JPanel pane = new JPanel();
pane.setLayout(new GridBagLayout());
mSplash.add(pane,BorderLayout.CENTER);
mSplash.add(new JLabel("<html><h1>Welcome to ModelJUnit</h1></html>"), BorderLayout.PAGE_START);
GridBagConstraints c = new GridBagConstraints();
c.gridx = 0;
c.gridy = 0;
c.ipady = 50;
c.fill = GridBagConstraints.HORIZONTAL;
JButton but = new JButton("New Project");
but.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
mSplash.setVisible(false);
showProjectDialog(null);
}
});
pane.add(but, c);
c.gridx = 1;
c.gridy = 0;
c.ipady = 0;
pane.add(new JLabel("<html><em>Create a new empty ModelJUnit project</em><html>"), c);
c.gridx = 0;
c.gridy = 1;
c.ipady = 50;
but = new JButton("Open Project");
but.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
mSplash.setVisible(false);
displayProjectFileChooser(true);
}
});
pane.add(but, c);
c.gridx = 1;
c.gridy = 1;
c.ipady = 0;
pane.add(new JLabel("<html><em>Open an existing ModelJUnit project</em><html>"), c);
c.gridx = 0;
c.gridy = 2;
c.ipady = 50;
but = new JButton("(Example icon)");
pane.add(but, c);
c.gridx = 1;
c.gridy = 2;
c.ipady = 0;
pane.add(new JLabel("<html><em>Double-click on any of the ModelJUnit examples below:</em><html>"), c);
c.gridx = 0;
c.gridy = 3;
c.ipady = 60;
c.gridwidth = 2;
final DefaultListModel exampleModel = new DefaultListModel();
final JList examples = new JList(exampleModel);
exampleModel.addElement("FSM");
exampleModel.addElement("SpecialFSMNoLoops");
exampleModel.addElement("SimpleSet");
exampleModel.addElement("StringSetTest");
exampleModel.addElement("StringSetBuggy");
exampleModel.addElement("AlarmClock");
exampleModel.addElement("TrafficLight");
exampleModel.addElement("SimpleSetWithAdaptor");
exampleModel.addElement("StringSet");
exampleModel.addElement("SpecialFSM");
exampleModel.addElement("LargeSet");
exampleModel.addElement("QuiDonc");
exampleModel.addElement("gsm.GSM11Impl");
exampleModel.addElement("gsm.SimCard");
exampleModel.addElement("ecinema.ECinema");
exampleModel.addElement("ecinema.User");
exampleModel.addElement("ecinema.Showtime");
pane.add(new JScrollPane(examples),c);
MouseListener mouseListener = new MouseAdapter() {
public void mouseClicked(MouseEvent e) {
if (e.getClickCount() == 2) {
int index = examples.locationToIndex(e.getPoint());
String example = ""+exampleModel.get(index);
mSplash.setVisible(false);
loadModelClass(example, "nz.ac.waikato.modeljunit.examples");
boolean[] coverage = {true,true,false,false,false};
Parameter.setCoverageOption(coverage);
mTestDesign.updatePanelSettings();
}
}
};
examples.addMouseListener(mouseListener);
mSplash.pack();
mSplash.setLocationRelativeTo(null);
mSplash.setVisible(true);
}
public void setTitle(String title) {
mAppWindowTitle = title;
mAppWindow.setTitle(title);
}
public String getTitle() {
return mAppWindowTitle;
}
public JFrame getFrame() {
return mAppWindow;
}
public void run() {
mAppWindow.setVisible(true);
mAppWindow.setExtendedState(JFrame.MAXIMIZED_BOTH);
Project pr = new Project();
pr.setName("Test Project");
pr.setFileName(new File("test.mju"));
//pr.setProperty("foobar",new Integer(123));
//pr.setProperty("test","hello, world");
Project.save(pr);
}
/** Display a file chooser and load the model.
*
* This needs to be broken up into two routines so that any
* model-related logic can be called without displaying the file
* chooser. This is so that we can reload a model when opening
* a project.
**/
public void displayFileChooser()
{
// ------------ Open model from class file --------------
FileChooserFilter javaFileFilter = new FileChooserFilter("class",
"Java class Files");
JFileChooser chooser = new JFileChooser();
chooser.setCurrentDirectory(new File(Parameter.getModelChooserDirectory()));
chooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
chooser.setDialogTitle("Select Model File");
chooser.addChoosableFileFilter(javaFileFilter);
int option = chooser.showOpenDialog(mAppWindow);
if (option == JFileChooser.APPROVE_OPTION) {
File f = chooser.getSelectedFile();
loadModelFile(f);
mProject.setModelFile(f);
}
}
public void loadModelClass(String className, String packageName) {
TestExeModel.reset();
Parameter.setClassName(className);
Parameter.setPackageName(packageName);
int actionNumber = 0;
if (TestExeModel.loadModelClassFromFile()) {
Class<?> testcase = TestExeModel.getModelClass();
for (Method method : testcase.getMethods()) {
if (method.isAnnotationPresent(Action.class)) {
actionNumber++;
TestExeModel.addMethod(method);
System.out.println("Added method #"+actionNumber);
}
}
} else {
throw new RuntimeException("Error Loading Model - No @Action annotations!");
}
String cName = Parameter.getPackageName()+"."+Parameter.getClassName();
setTitle("ModelJUnit: " + cName);
mProject.setName(cName);
Model mod = new Model(TestExeModel.getModelObject());
ModelJUnitGUI.setModel(mod);
mGraphCurrent = false;
// buildGraphGUI();
//m_modelInfo1.setText("Model: "+cName);
//m_modelInfo2.setText("Path: "+Parameter.getPackageLocation());
//m_modelInfo3.setText("Actions: "+actionNumber + " actions were loaded.");
newModel(); // tell the other panels about the new model
}
public void loadModelFile(File f) {
String errmsg = null; // null means no errors yet
String wholePath = f.getAbsolutePath();
Parameter.setModelChooserDirectory(f.getParent());
// Reset the existing model
TestExeModel.reset();
// Use ASM to read the package and class name from the .class file
try {
ClassReader reader = new ClassReader(new FileInputStream(f));
String internalName = reader.getClassName();
int slash = internalName.lastIndexOf('/');
String className = internalName.substring(slash+1);
String packageName = "";
String classPath = "";
if (slash >= 0) {
packageName = internalName.substring(0, slash).replaceAll("/", ".");
}
//System.out.println("f.absolutePath="+f.getAbsolutePath());
//System.out.println("internalName="+internalName);
//System.out.println("className="+className);
//System.out.println("packageName="+packageName);
// now calculate the classpath for this .class file.
String sep = Matcher.quoteReplacement(File.separator);
String ignore = ("/"+internalName+".class").replaceAll("/", sep);
//System.out.println("ignore="+ignore);
if (wholePath.endsWith(ignore)) {
classPath = wholePath.substring(0, wholePath.lastIndexOf(ignore));
//System.out.println("MU: classPath="+classPath);
}
else {
errmsg = "Error calculating top of package from: "+wholePath;
}
// Load model from file and initialize the model object
int actionNumber = 0;
if (errmsg == null) {
Parameter.setModelPath(wholePath);
Parameter.setClassName(className);
Parameter.setPackageName(packageName);
Parameter.setPackageLocation(classPath);
if (TestExeModel.loadModelClassFromFile()) {
Class<?> testcase = TestExeModel.getModelClass();
for (Method method : testcase.getMethods()) {
if (method.isAnnotationPresent(Action.class)) {
actionNumber++;
TestExeModel.addMethod(method);
System.out.println("Added method #"+actionNumber);
}
}
}
else {
errmsg = "Invalid model class: no @Action methods.";
}
}
if (errmsg == null) {
// We have successfully loaded a new model
//initializeTester(0);
//initializeTester(1);
//m_butExternalExecute.setEnabled(true);
String cName = Parameter.getPackageName()+"."+Parameter.getClassName();
setTitle("ModelJUnit: " + cName);
mProject.setName(cName);
//Tester tester = new Tester(TestExeModel.getModelObject());
Model mod = new Model(TestExeModel.getModelObject());
ModelJUnitGUI.setModel(mod);
mGraphCurrent = false;
// buildGraphGUI();
//m_modelInfo1.setText("Model: "+cName);
//m_modelInfo2.setText("Path: "+Parameter.getPackageLocation());
//m_modelInfo3.setText("Actions: "+actionNumber + " actions were loaded.");
newModel(); // tell the other panels about the new model
}
}
catch (IOException ex) {
errmsg = "Error reading .class file: "+ex.getLocalizedMessage();
}
if (errmsg != null) {
ErrorMessage.DisplayErrorMessage("Error loading model", errmsg);
TestExeModel.resetModelToNull();
Parameter.setModelPath("");
Parameter.setClassName("");
Parameter.setPackageName("");
Parameter.setPackageLocation("");
//m_modelInfo1.setText(" ");
//m_modelInfo2.setText(MSG_NO_MODEL);
//m_modelInfo3.setText(" ");
// TODO: could call m_gui.newModel() here too? (To reset all panels)
}
}
public void displayProjectFileChooser(boolean opening) {
FileChooserFilter javaFileFilter = new FileChooserFilter("mju",
"ModelJUnit Project Files");
JFileChooser chooser = new JFileChooser();
chooser.setCurrentDirectory(new File(Parameter.getModelChooserDirectory()));
chooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
if(opening)
chooser.setDialogTitle("Select ModelJUnit Project File for Opening");
else
chooser.setDialogTitle("Select Location to Save ModelJUnit Project");
chooser.addChoosableFileFilter(javaFileFilter);
int option = 0;
if(opening)
option = chooser.showOpenDialog(mAppWindow);
else
option = chooser.showSaveDialog(mAppWindow);
if (option == JFileChooser.APPROVE_OPTION) {
String errmsg = null; // null means no errors yet
File f = chooser.getSelectedFile();
String wholePath = f.getAbsolutePath();
Parameter.setModelChooserDirectory(f.getParent());
if(opening) {
mProject = Project.load(f);
Project.setInstance(mProject);
loadModelFile(mProject.getModelFile());
mTestDesign.updatePanelSettings();
} else {
mProject.setFileName(f);
}
}
}
public void showProjectDialog(Project project) {
ProjectDialog pd;
if(project == null) pd = new ProjectDialog(this);
else pd = new ProjectDialog(this, project);
pd.pack();
pd.setLocationRelativeTo(null);
pd.setVisible(true);
}
public void saveProject() {
if(mProject.getFileName() == null) {
displayProjectFileChooser(false);
}
Project.save(mProject);
}
/** Display the window that permits animation of models. **/
public void displayAnimateWindow() {
// if there is no model loaded, throw an error:
if(getModel() == null) {
System.err.println("Error: no model loaded");
//XXX: throw up a dialog box.
return;
}
JFrame animate = new JFrame("Animator - ModelJUnit");
animate.setPreferredSize(new Dimension(760,500));
PanelAnimator pa = PanelAnimator.getInstance();
pa.newModel();
// Add the action history, which the animator supplies.
JScrollPane scroll = new JScrollPane(pa.getActionHistoryList());
JPanel labelPanel = new JPanel();
labelPanel.add(new JLabel("<html><h1>"+getModel().getModelName()+"</h1></html>"),BorderLayout.PAGE_START);
labelPanel.add(pa.getStateLabel(),BorderLayout.PAGE_END);
animate.add(labelPanel, BorderLayout.PAGE_START);
animate.add(pa, BorderLayout.CENTER);
animate.add(scroll, BorderLayout.LINE_END);
animate.add(pa.getResetButton(), BorderLayout.PAGE_END);
animate.pack();
animate.setVisible(true);
}
/** Display the window that shows coverage metrics for models. **/
public void displayCoverageWindow() {
JFrame coverage = new JFrame("Coverage - ModelJUnit");
coverage.setMinimumSize(new Dimension(760,500));
coverage.add(mCoverage, BorderLayout.CENTER);
coverage.add(mCoverage.getProgress(), BorderLayout.PAGE_END);
coverage.setVisible(true);
}
/** Display the window that shows test results. **/
public void displayResultsWindow() {
JFrame results = new JFrame("Results - ModelJUnit");
results.setMinimumSize(new Dimension(760,500));
results.add(mResultViewer);
results.setVisible(true);
}
public static void setModel(Model model) {
mModel = model;
}
public static Model getModel() {
return mModel;
}
public Project getProject() {
return mProject;
}
public void newModel() {
mVisualisation.newModel();
mCoverage.newModel();
mResultViewer.newModel();
mTestDesign.newModel();
}
public void displayAlgorithmPane() {
JFrame dialog = new JFrame("Edit Configuration");
JSplitPane splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT,
mTestDesign, mTestDesign.getCodeView());
splitPane.setOneTouchExpandable(true);
splitPane.setDividerLocation(400);
dialog.add(splitPane);
dialog.pack();
dialog.setVisible(true);
}
public void buildGraphGUI() {
if(mGraphCurrent) return;
/* JDialog dialog = new JDialog(mAppWindow,"Graph building in progress",true);
dialog.setDefaultCloseOperation(JDialog.DO_NOTHING_ON_CLOSE);
dialog.getContentPane().add(new JLabel("ModelJUnit is currently building a graph from your model.\nThis may take a few seconds."));
dialog.pack();
dialog.setVisible(true);*/
mCoverage.getProgress().setIndeterminate(true);
Tester tester = TestExeModel.getTester(0);
tester.buildGraph();
mCoverage.getProgress().setIndeterminate(false);
mGraphCurrent = true;
/*dialog.setVisible(false);*/
}
private void runClass()
{
// Draw line chart in coverage panel
if (mTestDesign.isLineChartDrawable()) {
mCoverage.clearCoverages();
int[] stages = mCoverage.computeStages(TestExeModel.getWalkLength());
mTestDesign.initializeTester(0);
Tester tester = TestExeModel.getTester(0);
/* tester.buildGraph();*/
displayCoverageWindow();
buildGraphGUI();
CoverageHistory[] coverage = new CoverageHistory[TestExeModel.COVERAGE_NUM];
coverage[0] = new CoverageHistory(new StateCoverage(), 1);
coverage[1] = new CoverageHistory(new TransitionCoverage(), 1);
coverage[2] = new CoverageHistory(new TransitionPairCoverage(), 1);
coverage[3] = new CoverageHistory(new ActionCoverage(), 1);
tester.addCoverageMetric(coverage[0]);
tester.addCoverageMetric(coverage[1]);
tester.addCoverageMetric(coverage[2]);
tester.addCoverageMetric(coverage[3]);
// Run test several times to draw line chart
for (int i = 0; i < stages.length; i++) {
tester.generate(stages[0]);
System.out.println("Progress: " + stages[i] + "/" + TestExeModel.getWalkLength());
mCoverage.setProgress(stages[i],TestExeModel.getWalkLength());
// Update the line chart and repaint
mCoverage.addStateCoverage((int) coverage[0].getPercentage());
mCoverage.addTransitionCoverage((int) coverage[1].getPercentage());
mCoverage.addTransitionPairCoverage((int) coverage[2].getPercentage());
mCoverage.addActionCoverage((int) coverage[3].getPercentage());
mCoverage.redrawGraph();
try {
Thread.sleep(100);
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
}
// To reset tester, it solve the problem that coverage matrix incorrect.
mTestDesign.initializeTester(0);
//reset the visualisation panel
mVisualisation.resetRunTimeInformation();
//Try to fully explore the complete graph before running the test explorations
Tester tester = TestExeModel.getTester(0);
GraphListener graph = tester.buildGraph();
mVisualisation.showEmptyExploredGraph(graph);
// Clear the information in Result viewer text area
mResultViewer.resetRunTimeInformation();
// Run test and display test output
TestExeModel.runTestAuto();
// Finish the visualisation panel. This effectively starts the animation.
mVisualisation.updateGUI(true);
}
public void runModel() {
if(mModel == null) return;
SwingWorker<String,String> worker = new SwingWorker<String,String>() {
public String doInBackground() {
runClass();
return "";
}
};
worker.execute();
//runClass();
/*CoverageHistory hist = new CoverageHistory(new TransitionCoverage(), 1);
tester.addCoverageMetric(hist);
tester.addListener("verbose");
while (hist.getPercentage() < 99.0)
tester.generate();
System.out.println("Transition Coverage ="+hist.toString());
System.out.println("History = "+hist.toCSV());*/
}
public static void main(String[] args) {
ModelJUnitGUI gui = new ModelJUnitGUI();
new Thread(gui).start();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import java.util.*;
import org.apache.thrift.TException;
import org.apache.zeppelin.display.AngularObject;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.display.GUI;
import org.apache.zeppelin.helium.ApplicationEventListener;
import org.apache.zeppelin.display.Input;
import org.apache.zeppelin.interpreter.*;
import org.apache.zeppelin.interpreter.InterpreterResult.Type;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterContext;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterResult;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
import org.apache.zeppelin.scheduler.Scheduler;
import org.apache.zeppelin.scheduler.SchedulerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
/**
* Proxy for Interpreter instance that runs on separate process
*/
public class RemoteInterpreter extends Interpreter {
private final RemoteInterpreterProcessListener remoteInterpreterProcessListener;
private final ApplicationEventListener applicationEventListener;
Logger logger = LoggerFactory.getLogger(RemoteInterpreter.class);
Gson gson = new Gson();
private String interpreterRunner;
private String interpreterPath;
private String localRepoPath;
private String className;
private String noteId;
FormType formType;
boolean initialized;
private Map<String, String> env;
private int connectTimeout;
private int maxPoolSize;
private String host;
private int port;
/**
* Remote interpreter and manage interpreter process
*/
public RemoteInterpreter(Properties property,
String noteId,
String className,
String interpreterRunner,
String interpreterPath,
String localRepoPath,
int connectTimeout,
int maxPoolSize,
RemoteInterpreterProcessListener remoteInterpreterProcessListener,
ApplicationEventListener appListener) {
super(property);
this.noteId = noteId;
this.className = className;
initialized = false;
this.interpreterRunner = interpreterRunner;
this.interpreterPath = interpreterPath;
this.localRepoPath = localRepoPath;
env = getEnvFromInterpreterProperty(property);
this.connectTimeout = connectTimeout;
this.maxPoolSize = maxPoolSize;
this.remoteInterpreterProcessListener = remoteInterpreterProcessListener;
this.applicationEventListener = appListener;
}
/**
* Connect to existing process
*/
public RemoteInterpreter(
Properties property,
String noteId,
String className,
String host,
int port,
int connectTimeout,
int maxPoolSize,
RemoteInterpreterProcessListener remoteInterpreterProcessListener,
ApplicationEventListener appListener) {
super(property);
this.noteId = noteId;
this.className = className;
initialized = false;
this.host = host;
this.port = port;
this.connectTimeout = connectTimeout;
this.maxPoolSize = maxPoolSize;
this.remoteInterpreterProcessListener = remoteInterpreterProcessListener;
this.applicationEventListener = appListener;
}
// VisibleForTesting
public RemoteInterpreter(
Properties property,
String noteId,
String className,
String interpreterRunner,
String interpreterPath,
String localRepoPath,
Map<String, String> env,
int connectTimeout,
RemoteInterpreterProcessListener remoteInterpreterProcessListener,
ApplicationEventListener appListener) {
super(property);
this.className = className;
this.noteId = noteId;
this.interpreterRunner = interpreterRunner;
this.interpreterPath = interpreterPath;
this.localRepoPath = localRepoPath;
env.putAll(getEnvFromInterpreterProperty(property));
this.env = env;
this.connectTimeout = connectTimeout;
this.maxPoolSize = 10;
this.remoteInterpreterProcessListener = remoteInterpreterProcessListener;
this.applicationEventListener = appListener;
}
private Map<String, String> getEnvFromInterpreterProperty(Properties property) {
Map<String, String> env = new HashMap<String, String>();
for (Object key : property.keySet()) {
if (isEnvString((String) key)) {
env.put((String) key, property.getProperty((String) key));
}
}
return env;
}
static boolean isEnvString(String key) {
if (key == null || key.length() == 0) {
return false;
}
return key.matches("^[A-Z_0-9]*");
}
@Override
public String getClassName() {
return className;
}
private boolean connectToExistingProcess() {
return host != null && port > 0;
}
public RemoteInterpreterProcess getInterpreterProcess() {
InterpreterGroup intpGroup = getInterpreterGroup();
if (intpGroup == null) {
return null;
}
synchronized (intpGroup) {
if (intpGroup.getRemoteInterpreterProcess() == null) {
RemoteInterpreterProcess remoteProcess;
if (connectToExistingProcess()) {
remoteProcess = new RemoteInterpreterRunningProcess(
connectTimeout,
remoteInterpreterProcessListener,
applicationEventListener,
host,
port);
} else {
// create new remote process
remoteProcess = new RemoteInterpreterManagedProcess(
interpreterRunner, interpreterPath, localRepoPath, env, connectTimeout,
remoteInterpreterProcessListener, applicationEventListener);
}
intpGroup.setRemoteInterpreterProcess(remoteProcess);
}
return intpGroup.getRemoteInterpreterProcess();
}
}
public synchronized void init() {
if (initialized == true) {
return;
}
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
final InterpreterGroup interpreterGroup = getInterpreterGroup();
interpreterProcess.reference(interpreterGroup);
interpreterProcess.setMaxPoolSize(
Math.max(this.maxPoolSize, interpreterProcess.getMaxPoolSize()));
String groupId = interpreterGroup.getId();
synchronized (interpreterProcess) {
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
boolean broken = false;
try {
logger.info("Create remote interpreter {}", getClassName());
if (localRepoPath != null) {
property.put("zeppelin.interpreter.localRepo", localRepoPath);
}
client.createInterpreter(groupId, noteId,
getClassName(), (Map) property);
// Push angular object loaded from JSON file to remote interpreter
if (!interpreterGroup.isAngularRegistryPushed()) {
pushAngularObjectRegistryToRemote(client);
interpreterGroup.setAngularRegistryPushed(true);
}
} catch (TException e) {
logger.error("Failed to create interpreter: {}", getClassName());
throw new InterpreterException(e);
} finally {
// TODO(jongyoul): Fixed it when not all of interpreter in same interpreter group are broken
interpreterProcess.releaseClient(client, broken);
}
}
initialized = true;
}
@Override
public void open() {
InterpreterGroup interpreterGroup = getInterpreterGroup();
synchronized (interpreterGroup) {
// initialize all interpreters in this interpreter group
List<Interpreter> interpreters = interpreterGroup.get(noteId);
for (Interpreter intp : new ArrayList<>(interpreters)) {
Interpreter p = intp;
while (p instanceof WrappedInterpreter) {
p = ((WrappedInterpreter) p).getInnerInterpreter();
}
try {
((RemoteInterpreter) p).init();
} catch (InterpreterException e) {
logger.error("Failed to initialize interpreter: {}. Remove it from interpreterGroup",
p.getClassName());
interpreters.remove(p);
}
}
}
}
@Override
public void close() {
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
boolean broken = false;
try {
client = interpreterProcess.getClient();
if (client != null) {
client.close(noteId, className);
}
} catch (TException e) {
broken = true;
throw new InterpreterException(e);
} catch (Exception e1) {
throw new InterpreterException(e1);
} finally {
if (client != null) {
interpreterProcess.releaseClient(client, broken);
}
getInterpreterProcess().dereference();
}
}
@Override
public InterpreterResult interpret(String st, InterpreterContext context) {
logger.debug("st: {}", st);
FormType form = getFormType();
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
InterpreterContextRunnerPool interpreterContextRunnerPool = interpreterProcess
.getInterpreterContextRunnerPool();
List<InterpreterContextRunner> runners = context.getRunners();
if (runners != null && runners.size() != 0) {
// assume all runners in this InterpreterContext have the same note id
String noteId = runners.get(0).getNoteId();
interpreterContextRunnerPool.clear(noteId);
interpreterContextRunnerPool.addAll(noteId, runners);
}
boolean broken = false;
try {
final GUI currentGUI = context.getGui();
RemoteInterpreterResult remoteResult = client.interpret(
noteId, className, st, convert(context));
Map<String, Object> remoteConfig = (Map<String, Object>) gson.fromJson(
remoteResult.getConfig(), new TypeToken<Map<String, Object>>() {
}.getType());
context.getConfig().clear();
context.getConfig().putAll(remoteConfig);
if (form == FormType.NATIVE) {
GUI remoteGui = gson.fromJson(remoteResult.getGui(), GUI.class);
currentGUI.clear();
currentGUI.setParams(remoteGui.getParams());
currentGUI.setForms(remoteGui.getForms());
} else if (form == FormType.SIMPLE) {
final Map<String, Input> currentForms = currentGUI.getForms();
final Map<String, Object> currentParams = currentGUI.getParams();
final GUI remoteGUI = gson.fromJson(remoteResult.getGui(), GUI.class);
final Map<String, Input> remoteForms = remoteGUI.getForms();
final Map<String, Object> remoteParams = remoteGUI.getParams();
currentForms.putAll(remoteForms);
currentParams.putAll(remoteParams);
}
InterpreterResult result = convert(remoteResult);
return result;
} catch (TException e) {
broken = true;
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client, broken);
}
}
@Override
public void cancel(InterpreterContext context) {
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
boolean broken = false;
try {
client.cancel(noteId, className, convert(context));
} catch (TException e) {
broken = true;
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client, broken);
}
}
@Override
public FormType getFormType() {
init();
if (formType != null) {
return formType;
}
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
boolean broken = false;
try {
formType = FormType.valueOf(client.getFormType(noteId, className));
return formType;
} catch (TException e) {
broken = true;
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client, broken);
}
}
@Override
public int getProgress(InterpreterContext context) {
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
if (interpreterProcess == null || !interpreterProcess.isRunning()) {
return 0;
}
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
boolean broken = false;
try {
return client.getProgress(noteId, className, convert(context));
} catch (TException e) {
broken = true;
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client, broken);
}
}
@Override
public List<InterpreterCompletion> completion(String buf, int cursor) {
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
boolean broken = false;
try {
List completion = client.completion(noteId, className, buf, cursor);
return completion;
} catch (TException e) {
broken = true;
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client, broken);
}
}
@Override
public Scheduler getScheduler() {
int maxConcurrency = maxPoolSize;
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
if (interpreterProcess == null) {
return null;
} else {
return SchedulerFactory.singleton().createOrGetRemoteScheduler(
RemoteInterpreter.class.getName() + noteId + interpreterProcess.hashCode(),
noteId,
interpreterProcess,
maxConcurrency);
}
}
private String getInterpreterGroupKey(InterpreterGroup interpreterGroup) {
return interpreterGroup.getId();
}
private RemoteInterpreterContext convert(InterpreterContext ic) {
return new RemoteInterpreterContext(
ic.getNoteId(),
ic.getParagraphId(),
ic.getParagraphTitle(),
ic.getParagraphText(),
gson.toJson(ic.getAuthenticationInfo()),
gson.toJson(ic.getConfig()),
gson.toJson(ic.getGui()),
gson.toJson(ic.getRunners()));
}
private InterpreterResult convert(RemoteInterpreterResult result) {
return new InterpreterResult(
InterpreterResult.Code.valueOf(result.getCode()),
Type.valueOf(result.getType()),
result.getMsg());
}
/**
* Push local angular object registry to
* remote interpreter. This method should be
* call ONLY inside the init() method
* @param client
* @throws TException
*/
void pushAngularObjectRegistryToRemote(Client client) throws TException {
final AngularObjectRegistry angularObjectRegistry = this.getInterpreterGroup()
.getAngularObjectRegistry();
if (angularObjectRegistry != null && angularObjectRegistry.getRegistry() != null) {
final Map<String, Map<String, AngularObject>> registry = angularObjectRegistry
.getRegistry();
logger.info("Push local angular object registry from ZeppelinServer to" +
" remote interpreter group {}", this.getInterpreterGroup().getId());
final java.lang.reflect.Type registryType = new TypeToken<Map<String,
Map<String, AngularObject>>>() {}.getType();
Gson gson = new Gson();
client.angularRegistryPush(gson.toJson(registry, registryType));
}
}
public Map<String, String> getEnv() {
return env;
}
public void setEnv(Map<String, String> env) {
this.env = env;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.jexl3.parser;
/**
* Common constant strings utilities.
* <p>
* This package methods read JEXL string literals and handle escaping through the
* 'backslash' (ie: \) character. Escaping is used to neutralize string delimiters (the single
* and double quotes) and read Unicode hexadecimal encoded characters.
* </p>
* <p>
* The only escapable characters are the single and double quotes - ''' and '"' -,
* a Unicode sequence starting with 'u' followed by 4 hexadecimals and
* the backslash character - '\' - itself.
* </p>
* <p>
* A sequence where '\' occurs before any non-escapable character or sequence has no effect, the
* sequence output being the same as the input.
* </p>
*/
public class StringParser {
/** Default constructor. */
protected StringParser() {
// nothing to initialize
}
/**
* Builds a string, handles escaping through '\' syntax.
* @param str the string to build from
* @param eatsep whether the separator, the first character, should be considered
* @return the built string
*/
public static String buildString(final CharSequence str, final boolean eatsep) {
return buildString(str, eatsep, true);
}
/**
* Builds a template, does not escape characters.
* @param str the string to build from
* @param eatsep whether the separator, the first character, should be considered
* @return the built string
*/
public static String buildTemplate(final CharSequence str, final boolean eatsep) {
return buildString(str, eatsep, false);
}
/**
* Builds a string, handles escaping through '\' syntax.
* @param str the string to build from
* @param eatsep whether the separator, the first character, should be considered
* @param esc whether escape characters are interpreted or escaped
* @return the built string
*/
private static String buildString(final CharSequence str, final boolean eatsep, final boolean esc) {
final StringBuilder strb = new StringBuilder(str.length());
final char sep = eatsep ? str.charAt(0) : 0;
final int end = str.length() - (eatsep ? 1 : 0);
final int begin = (eatsep ? 1 : 0);
read(strb, str, begin, end, sep, esc);
return strb.toString();
}
/**
* Builds a regex pattern string, handles escaping '/' through '\/' syntax.
* @param str the string to build from
* @return the built string
*/
public static String buildRegex(final CharSequence str) {
return buildString(str.subSequence(1, str.length()), true);
}
/**
* Read the remainder of a string till a given separator,
* handles escaping through '\' syntax.
* @param strb the destination buffer to copy characters into
* @param str the origin
* @param index the offset into the origin
* @param sep the separator, single or double quote, marking end of string
* @return the offset in origin
*/
public static int readString(final StringBuilder strb, final CharSequence str, final int index, final char sep) {
return read(strb, str, index, str.length(), sep, true);
}
/** The length of an escaped unicode sequence. */
private static final int UCHAR_LEN = 4;
/**
* Read the remainder of a string till a given separator,
* handles escaping through '\' syntax.
* @param strb the destination buffer to copy characters into
* @param str the origin
* @param begin the relative offset in str to begin reading
* @param end the relative offset in str to end reading
* @param sep the separator, single or double quote, marking end of string
* @param esc whether escape characters are interpreted or escaped
* @return the last character offset handled in origin
*/
private static int read(final StringBuilder strb, final CharSequence str, final int begin, final int end, final char sep, final boolean esc) {
boolean escape = false;
int index = begin;
for (; index < end; ++index) {
final char c = str.charAt(index);
if (escape) {
if (c == 'u' && (index + UCHAR_LEN) < end && readUnicodeChar(strb, str, index + 1) > 0) {
index += UCHAR_LEN;
} else {
// if c is not an escapable character, re-emmit the backslash before it
final boolean notSeparator = sep == 0 ? c != '\'' && c != '"' : c != sep;
if (notSeparator && c != '\\') {
if (!esc) {
strb.append('\\').append(c);
} else {
switch (c) {
// http://es5.github.io/x7.html#x7.8.4
case 'b':
strb.append('\b');
break; // backspace \u0008
case 't':
strb.append('\t');
break; // horizontal tab \u0009
case 'n':
strb.append('\n');
break; // line feed \u000A
// We don't support vertical tab. If needed, the unicode (\u000B) should be used instead
case 'f':
strb.append('\f');
break; // form feed \u000C
case 'r':
strb.append('\r');
break; // carriage return \u000D
default:
strb.append('\\').append(c);
}
}
} else {
strb.append(c);
}
}
escape = false;
continue;
}
if (c == '\\') {
escape = true;
continue;
}
strb.append(c);
if (c == sep) {
break;
}
}
return index;
}
/** Initial shift value for composing a Unicode char from 4 nibbles (16 - 4). */
private static final int SHIFT = 12;
/** The base 10 offset used to convert hexa characters to decimal. */
private static final int BASE10 = 10;
/**
* Reads a Unicode escape character.
* @param strb the builder to write the character to
* @param str the sequence
* @param begin the begin offset in sequence (after the '\\u')
* @return 0 if char could not be read, 4 otherwise
*/
private static int readUnicodeChar(final StringBuilder strb, final CharSequence str, final int begin) {
char xc = 0;
int bits = SHIFT;
int value;
for (int offset = 0; offset < UCHAR_LEN; ++offset) {
final char c = str.charAt(begin + offset);
if (c >= '0' && c <= '9') {
value = (c - '0');
} else if (c >= 'a' && c <= 'h') {
value = (c - 'a' + BASE10);
} else if (c >= 'A' && c <= 'H') {
value = (c - 'A' + BASE10);
} else {
return 0;
}
xc |= value << bits;
bits -= UCHAR_LEN;
}
strb.append(xc);
return UCHAR_LEN;
}
/** The last 7bits ascii character. */
private static final char LAST_ASCII = 127;
/** The first printable 7bits ascii character. */
private static final char FIRST_ASCII = 32;
/**
* Escapes a String representation, expand non-ASCII characters as Unicode escape sequence.
* @param delim the delimiter character
* @param str the string to escape
* @return the escaped representation
*/
public static String escapeString(final String str, final char delim) {
if (str == null) {
return null;
}
final int length = str.length();
final StringBuilder strb = new StringBuilder(length + 2);
strb.append(delim);
for (int i = 0; i < length; ++i) {
final char c = str.charAt(i);
switch (c) {
case 0:
continue;
case '\b':
strb.append('\\');
strb.append('b');
break;
case '\t':
strb.append('\\');
strb.append('t');
break;
case '\n':
strb.append('\\');
strb.append('n');
break;
case '\f':
strb.append('\\');
strb.append('f');
break;
case '\r':
strb.append('\\');
strb.append('r');
break;
case '\\':
strb.append('\\');
strb.append('\\');
break;
default:
if (c == delim) {
strb.append('\\');
strb.append(delim);
} else if (c >= FIRST_ASCII && c <= LAST_ASCII) {
strb.append(c);
} else {
// convert to Unicode escape sequence
strb.append('\\');
strb.append('u');
final String hex = Integer.toHexString(c);
for (int h = hex.length(); h < UCHAR_LEN; ++h) {
strb.append('0');
}
strb.append(hex);
}
}
}
strb.append(delim);
return strb.toString();
}
/**
* Remove escape char ('\') from an identifier.
* @param str the identifier escaped string, ie with a backslash before space, quote, double-quote and backslash
* @return the string with no '\\' character
*/
public static String unescapeIdentifier(final String str) {
StringBuilder strb = null;
if (str != null) {
int n = 0;
final int last = str.length();
while (n < last) {
final char c = str.charAt(n);
if (c == '\\') {
if (strb == null) {
strb = new StringBuilder(last);
strb.append(str, 0, n);
}
} else if (strb != null) {
strb.append(c);
}
n += 1;
}
}
return strb == null ? str : strb.toString();
}
/**
* Adds a escape char ('\') where needed in a string form of an ide
* @param str the identifier un-escaped string
* @return the string with added backslash character before space, quote, double-quote and backslash
*/
public static String escapeIdentifier(final String str) {
StringBuilder strb = null;
if (str != null) {
int n = 0;
final int last = str.length();
while (n < last) {
final char c = str.charAt(n);
switch (c) {
case ' ':
case '\'':
case '"':
case '\\': {
if (strb == null) {
strb = new StringBuilder(last);
strb.append(str, 0, n);
}
strb.append('\\');
strb.append(c);
break;
}
default:
if (strb != null) {
strb.append(c);
}
}
n += 1;
}
}
return strb == null ? str : strb.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.xerces.impl.xs;
import java.lang.reflect.Array;
import java.util.AbstractList;
import java.util.Iterator;
import java.util.ListIterator;
import java.util.NoSuchElementException;
import java.util.Vector;
import org.apache.xerces.impl.Constants;
import org.apache.xerces.impl.xs.util.StringListImpl;
import org.apache.xerces.impl.xs.util.XSNamedMap4Types;
import org.apache.xerces.impl.xs.util.XSNamedMapImpl;
import org.apache.xerces.impl.xs.util.XSObjectListImpl;
import org.apache.xerces.util.SymbolHash;
import org.apache.xerces.util.XMLSymbols;
import org.apache.xerces.xs.StringList;
import org.apache.xerces.xs.XSAttributeDeclaration;
import org.apache.xerces.xs.XSAttributeGroupDefinition;
import org.apache.xerces.xs.XSConstants;
import org.apache.xerces.xs.XSElementDeclaration;
import org.apache.xerces.xs.XSIDCDefinition;
import org.apache.xerces.xs.XSModel;
import org.apache.xerces.xs.XSModelGroupDefinition;
import org.apache.xerces.xs.XSNamedMap;
import org.apache.xerces.xs.XSNamespaceItem;
import org.apache.xerces.xs.XSNamespaceItemList;
import org.apache.xerces.xs.XSNotationDeclaration;
import org.apache.xerces.xs.XSObject;
import org.apache.xerces.xs.XSObjectList;
import org.apache.xerces.xs.XSTypeDefinition;
/**
* Implements XSModel: a read-only interface that represents an XML Schema,
* which could be components from different namespaces.
*
* @xerces.internal
*
* @author Sandy Gao, IBM
*
* @version $Id$
*/
public final class XSModelImpl extends AbstractList implements XSModel, XSNamespaceItemList {
// the max index / the max value of XSObject type
private static final short MAX_COMP_IDX = XSTypeDefinition.SIMPLE_TYPE;
private static final boolean[] GLOBAL_COMP = {false, // null
true, // attribute
true, // element
true, // type
false, // attribute use
true, // attribute group
true, // group
false, // model group
false, // particle
false, // wildcard
true, // idc
true, // notation
false, // annotation
false, // facet
false, // multi value facet
true, // complex type
true // simple type
};
// number of grammars/namespaces stored here
private final int fGrammarCount;
// all target namespaces
private final String[] fNamespaces;
// all schema grammar objects (for each namespace)
private final SchemaGrammar[] fGrammarList;
// a map from namespace to schema grammar
private final SymbolHash fGrammarMap;
// a map from element declaration to its substitution group
private final SymbolHash fSubGroupMap;
// store a certain kind of components from all namespaces
private final XSNamedMap[] fGlobalComponents;
// store a certain kind of components from one namespace
private final XSNamedMap[][] fNSComponents;
// a string list of all the target namespaces.
private final StringList fNamespacesList;
// store all annotations
private XSObjectList fAnnotations = null;
// whether there is any IDC in this XSModel
private final boolean fHasIDC;
/**
* Construct an XSModelImpl, by storing some grammars and grammars imported
* by them to this object.
*
* @param grammars the array of schema grammars
*/
public XSModelImpl(SchemaGrammar[] grammars) {
this(grammars, Constants.SCHEMA_VERSION_1_0);
}
public XSModelImpl(SchemaGrammar[] grammars, short s4sVersion) {
// copy namespaces/grammars from the array to our arrays
int len = grammars.length;
final int initialSize = Math.max(len+1, 5);
String[] namespaces = new String[initialSize];
SchemaGrammar[] grammarList = new SchemaGrammar[initialSize];
boolean hasS4S = false;
for (int i = 0; i < len; i++) {
final SchemaGrammar sg = grammars[i];
final String tns = sg.getTargetNamespace();
namespaces[i] = tns;
grammarList[i] = sg;
if (tns == SchemaSymbols.URI_SCHEMAFORSCHEMA) {
hasS4S = true;
}
}
// If a schema for the schema namespace isn't included, include it here.
if (!hasS4S) {
namespaces[len] = SchemaSymbols.URI_SCHEMAFORSCHEMA;
grammarList[len++] = SchemaGrammar.getS4SGrammar(s4sVersion);
}
SchemaGrammar sg1, sg2;
Vector gs;
int i, j, k;
// and recursively get all imported grammars, add them to our arrays
for (i = 0; i < len; i++) {
// get the grammar
sg1 = grammarList[i];
gs = sg1.getImportedGrammars();
// for each imported grammar
for (j = gs == null ? -1 : gs.size() - 1; j >= 0; j--) {
sg2 = (SchemaGrammar)gs.elementAt(j);
// check whether this grammar is already in the list
for (k = 0; k < len; k++) {
if (sg2 == grammarList[k]) {
break;
}
}
// if it's not, add it to the list
if (k == len) {
// ensure the capacity of the arrays
if (len == grammarList.length) {
String[] newSA = new String[len*2];
System.arraycopy(namespaces, 0, newSA, 0, len);
namespaces = newSA;
SchemaGrammar[] newGA = new SchemaGrammar[len*2];
System.arraycopy(grammarList, 0, newGA, 0, len);
grammarList = newGA;
}
namespaces[len] = sg2.getTargetNamespace();
grammarList[len] = sg2;
len++;
}
}
}
fNamespaces = namespaces;
fGrammarList = grammarList;
boolean hasIDC = false;
// establish the mapping from namespace to grammars
fGrammarMap = new SymbolHash(len*2);
for (i = 0; i < len; i++) {
fGrammarMap.put(null2EmptyString(fNamespaces[i]), fGrammarList[i]);
// update the idc field
if (fGrammarList[i].hasIDConstraints()) {
hasIDC = true;
}
}
fHasIDC = hasIDC;
fGrammarCount = len;
fGlobalComponents = new XSNamedMap[MAX_COMP_IDX+1];
fNSComponents = new XSNamedMap[len][MAX_COMP_IDX+1];
fNamespacesList = new StringListImpl(fNamespaces, fGrammarCount);
// build substitution groups
fSubGroupMap = buildSubGroups();
}
private SymbolHash buildSubGroups_Org() {
SubstitutionGroupHandler sgHandler = new SubstitutionGroupHandler(null);
for (int i = 0 ; i < fGrammarCount; i++) {
sgHandler.addSubstitutionGroup(fGrammarList[i].getSubstitutionGroups());
}
final XSNamedMap elements = getComponents(XSConstants.ELEMENT_DECLARATION);
final int len = elements.getLength();
final SymbolHash subGroupMap = new SymbolHash(len*2);
XSElementDecl head;
XSElementDeclaration[] subGroup;
for (int i = 0; i < len; i++) {
head = (XSElementDecl)elements.item(i);
subGroup = sgHandler.getSubstitutionGroup(head);
subGroupMap.put(head, subGroup.length > 0 ?
new XSObjectListImpl(subGroup, subGroup.length) : XSObjectListImpl.EMPTY_LIST);
}
return subGroupMap;
}
private SymbolHash buildSubGroups() {
SubstitutionGroupHandler sgHandler = new SubstitutionGroupHandler(null);
for (int i = 0 ; i < fGrammarCount; i++) {
sgHandler.addSubstitutionGroup(fGrammarList[i].getSubstitutionGroups());
}
final XSObjectListImpl elements = getGlobalElements();
final int len = elements.getLength();
final SymbolHash subGroupMap = new SymbolHash(len*2);
XSElementDecl head;
XSElementDeclaration[] subGroup;
for (int i = 0; i < len; i++) {
head = (XSElementDecl)elements.item(i);
subGroup = sgHandler.getSubstitutionGroup(head);
subGroupMap.put(head, subGroup.length > 0 ?
new XSObjectListImpl(subGroup, subGroup.length) : XSObjectListImpl.EMPTY_LIST);
}
return subGroupMap;
}
private XSObjectListImpl getGlobalElements() {
final SymbolHash[] tables = new SymbolHash[fGrammarCount];
int length = 0;
for (int i = 0; i < fGrammarCount; i++) {
tables[i] = fGrammarList[i].fAllGlobalElemDecls;
length += tables[i].getLength();
}
if (length == 0) {
return XSObjectListImpl.EMPTY_LIST;
}
final XSObject[] components = new XSObject[length];
int start = 0;
for (int i = 0; i < fGrammarCount; i++) {
tables[i].getValues(components, start);
start += tables[i].getLength();
}
return new XSObjectListImpl(components, length);
}
/**
* Convenience method. Returns a list of all namespaces that belong to
* this schema.
* @return A list of all namespaces that belong to this schema or
* <code>null</code> if all components don't have a targetNamespace.
*/
public StringList getNamespaces() {
return fNamespacesList;
}
/**
* A set of namespace schema information information items (of type
* <code>XSNamespaceItem</code>), one for each namespace name which
* appears as the target namespace of any schema component in the schema
* used for that assessment, and one for absent if any schema component
* in the schema had no target namespace. For more information see
* schema information.
*/
public XSNamespaceItemList getNamespaceItems() {
return this;
}
/**
* Returns a list of top-level components, i.e. element declarations,
* attribute declarations, etc.
* @param objectType The type of the declaration, i.e.
* <code>ELEMENT_DECLARATION</code>. Note that
* <code>XSTypeDefinition.SIMPLE_TYPE</code> and
* <code>XSTypeDefinition.COMPLEX_TYPE</code> can also be used as the
* <code>objectType</code> to retrieve only complex types or simple
* types, instead of all types.
* @return A list of top-level definitions of the specified type in
* <code>objectType</code> or an empty <code>XSNamedMap</code> if no
* such definitions exist.
*/
public synchronized XSNamedMap getComponents(short objectType) {
if (objectType <= 0 || objectType > MAX_COMP_IDX ||
!GLOBAL_COMP[objectType]) {
return XSNamedMapImpl.EMPTY_MAP;
}
SymbolHash[] tables = new SymbolHash[fGrammarCount];
// get all hashtables from all namespaces for this type of components
if (fGlobalComponents[objectType] == null) {
for (int i = 0; i < fGrammarCount; i++) {
switch (objectType) {
case XSConstants.TYPE_DEFINITION:
case XSTypeDefinition.COMPLEX_TYPE:
case XSTypeDefinition.SIMPLE_TYPE:
tables[i] = fGrammarList[i].fGlobalTypeDecls;
break;
case XSConstants.ATTRIBUTE_DECLARATION:
tables[i] = fGrammarList[i].fGlobalAttrDecls;
break;
case XSConstants.ELEMENT_DECLARATION:
tables[i] = fGrammarList[i].fGlobalElemDecls;
break;
case XSConstants.ATTRIBUTE_GROUP:
tables[i] = fGrammarList[i].fGlobalAttrGrpDecls;
break;
case XSConstants.MODEL_GROUP_DEFINITION:
tables[i] = fGrammarList[i].fGlobalGroupDecls;
break;
case XSConstants.NOTATION_DECLARATION:
tables[i] = fGrammarList[i].fGlobalNotationDecls;
break;
case XSConstants.IDENTITY_CONSTRAINT:
tables[i] = fGrammarList[i].fGlobalIDConstraintDecls;
break;
}
}
// for complex/simple types, create a special implementation,
// which take specific types out of the hash table
if (objectType == XSTypeDefinition.COMPLEX_TYPE ||
objectType == XSTypeDefinition.SIMPLE_TYPE) {
fGlobalComponents[objectType] = new XSNamedMap4Types(fNamespaces, tables, fGrammarCount, objectType);
}
else {
fGlobalComponents[objectType] = new XSNamedMapImpl(fNamespaces, tables, fGrammarCount);
}
}
return fGlobalComponents[objectType];
}
/**
* Convenience method. Returns a list of top-level component declarations
* that are defined within the specified namespace, i.e. element
* declarations, attribute declarations, etc.
* @param objectType The type of the declaration, i.e.
* <code>ELEMENT_DECLARATION</code>.
* @param namespace The namespace to which the declaration belongs or
* <code>null</code> (for components with no target namespace).
* @return A list of top-level definitions of the specified type in
* <code>objectType</code> and defined in the specified
* <code>namespace</code> or an empty <code>XSNamedMap</code>.
*/
public synchronized XSNamedMap getComponentsByNamespace(short objectType,
String namespace) {
if (objectType <= 0 || objectType > MAX_COMP_IDX ||
!GLOBAL_COMP[objectType]) {
return XSNamedMapImpl.EMPTY_MAP;
}
// try to find the grammar
int i = 0;
if (namespace != null) {
for (; i < fGrammarCount; ++i) {
if (namespace.equals(fNamespaces[i])) {
break;
}
}
}
else {
for (; i < fGrammarCount; ++i) {
if (fNamespaces[i] == null) {
break;
}
}
}
if (i == fGrammarCount) {
return XSNamedMapImpl.EMPTY_MAP;
}
// get the hashtable for this type of components
if (fNSComponents[i][objectType] == null) {
SymbolHash table = null;
switch (objectType) {
case XSConstants.TYPE_DEFINITION:
case XSTypeDefinition.COMPLEX_TYPE:
case XSTypeDefinition.SIMPLE_TYPE:
table = fGrammarList[i].fGlobalTypeDecls;
break;
case XSConstants.ATTRIBUTE_DECLARATION:
table = fGrammarList[i].fGlobalAttrDecls;
break;
case XSConstants.ELEMENT_DECLARATION:
table = fGrammarList[i].fGlobalElemDecls;
break;
case XSConstants.ATTRIBUTE_GROUP:
table = fGrammarList[i].fGlobalAttrGrpDecls;
break;
case XSConstants.MODEL_GROUP_DEFINITION:
table = fGrammarList[i].fGlobalGroupDecls;
break;
case XSConstants.NOTATION_DECLARATION:
table = fGrammarList[i].fGlobalNotationDecls;
break;
case XSConstants.IDENTITY_CONSTRAINT:
table = fGrammarList[i].fGlobalIDConstraintDecls;
break;
}
// for complex/simple types, create a special implementation,
// which take specific types out of the hash table
if (objectType == XSTypeDefinition.COMPLEX_TYPE ||
objectType == XSTypeDefinition.SIMPLE_TYPE) {
fNSComponents[i][objectType] = new XSNamedMap4Types(namespace, table, objectType);
}
else {
fNSComponents[i][objectType] = new XSNamedMapImpl(namespace, table);
}
}
return fNSComponents[i][objectType];
}
/**
* Convenience method. Returns a top-level simple or complex type
* definition.
* @param name The name of the definition.
* @param namespace The namespace of the definition, otherwise null.
* @return An <code>XSTypeDefinition</code> or null if such definition
* does not exist.
*/
public XSTypeDefinition getTypeDefinition(String name,
String namespace) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return (XSTypeDefinition)sg.fGlobalTypeDecls.get(name);
}
/**
* Convenience method. Returns a top-level simple or complex type
* definition.
* @param name The name of the definition.
* @param namespace The namespace of the definition, otherwise null.
* @param loc The schema location where the component was defined
* @return An <code>XSTypeDefinition</code> or null if such definition
* does not exist.
*/
public XSTypeDefinition getTypeDefinition(String name,
String namespace,
String loc) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return sg.getGlobalTypeDecl(name, loc);
}
/**
* Convenience method. Returns a top-level attribute declaration.
* @param name The name of the declaration.
* @param namespace The namespace of the definition, otherwise null.
* @return A top-level attribute declaration or null if such declaration
* does not exist.
*/
public XSAttributeDeclaration getAttributeDeclaration(String name,
String namespace) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return (XSAttributeDeclaration)sg.fGlobalAttrDecls.get(name);
}
/**
* Convenience method. Returns a top-level attribute declaration.
* @param name The name of the declaration.
* @param namespace The namespace of the definition, otherwise null.
* @param loc The schema location where the component was defined
* @return A top-level attribute declaration or null if such declaration
* does not exist.
*/
public XSAttributeDeclaration getAttributeDeclaration(String name,
String namespace,
String loc) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return sg.getGlobalAttributeDecl(name, loc);
}
/**
* Convenience method. Returns a top-level element declaration.
* @param name The name of the declaration.
* @param namespace The namespace of the definition, otherwise null.
* @return A top-level element declaration or null if such declaration
* does not exist.
*/
public XSElementDeclaration getElementDeclaration(String name,
String namespace) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return (XSElementDeclaration)sg.fGlobalElemDecls.get(name);
}
/**
* Convenience method. Returns a top-level element declaration.
* @param name The name of the declaration.
* @param namespace The namespace of the definition, otherwise null.
* @param loc The schema location where the component was defined
* @return A top-level element declaration or null if such declaration
* does not exist.
*/
public XSElementDeclaration getElementDeclaration(String name,
String namespace,
String loc) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return sg.getGlobalElementDecl(name, loc);
}
/**
* Convenience method. Returns a top-level attribute group definition.
* @param name The name of the definition.
* @param namespace The namespace of the definition, otherwise null.
* @return A top-level attribute group definition or null if such
* definition does not exist.
*/
public XSAttributeGroupDefinition getAttributeGroup(String name,
String namespace) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return (XSAttributeGroupDefinition)sg.fGlobalAttrGrpDecls.get(name);
}
/**
* Convenience method. Returns a top-level attribute group definition.
* @param name The name of the definition.
* @param namespace The namespace of the definition, otherwise null.
* @param loc The schema location where the component was defined
* @return A top-level attribute group definition or null if such
* definition does not exist.
*/
public XSAttributeGroupDefinition getAttributeGroup(String name,
String namespace,
String loc) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return sg.getGlobalAttributeGroupDecl(name, loc);
}
/**
* Convenience method. Returns a top-level model group definition.
*
* @param name The name of the definition.
* @param namespace The namespace of the definition, otherwise null.
* @return A top-level model group definition definition or null if such
* definition does not exist.
*/
public XSModelGroupDefinition getModelGroupDefinition(String name,
String namespace) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return (XSModelGroupDefinition)sg.fGlobalGroupDecls.get(name);
}
/**
* Convenience method. Returns a top-level model group definition.
*
* @param name The name of the definition.
* @param namespace The namespace of the definition, otherwise null.
* @param loc The schema location where the component was defined
* @return A top-level model group definition definition or null if such
* definition does not exist.
*/
public XSModelGroupDefinition getModelGroupDefinition(String name,
String namespace,
String loc) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return sg.getGlobalGroupDecl(name, loc);
}
/**
* Convenience method. Returns a top-level model group definition.
*
* @param name The name of the definition.
* @param namespace The namespace of the definition, otherwise null.
* @return A top-level model group definition definition or null if such
* definition does not exist.
*/
public XSIDCDefinition getIDCDefinition(String name, String namespace) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return (XSIDCDefinition)sg.fGlobalIDConstraintDecls.get(name);
}
/**
* Convenience method. Returns a top-level model group definition.
*
* @param name The name of the definition.
* @param namespace The namespace of the definition, otherwise null.
* @param loc The schema location where the component was defined
* @return A top-level model group definition definition or null if such
* definition does not exist.
*/
public XSIDCDefinition getIDCDefinition(String name, String namespace,
String loc) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return sg.getIDConstraintDecl(name, loc);
}
/**
* @see org.apache.xerces.xs.XSModel#getNotationDeclaration(String, String)
*/
public XSNotationDeclaration getNotationDeclaration(String name,
String namespace) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return (XSNotationDeclaration)sg.fGlobalNotationDecls.get(name);
}
public XSNotationDeclaration getNotationDeclaration(String name,
String namespace,
String loc) {
SchemaGrammar sg = (SchemaGrammar)fGrammarMap.get(null2EmptyString(namespace));
if (sg == null) {
return null;
}
return sg.getGlobalNotationDecl(name, loc);
}
/**
* [annotations]: a set of annotations if it exists, otherwise an empty
* <code>XSObjectList</code>.
*/
public synchronized XSObjectList getAnnotations() {
if (fAnnotations != null) {
return fAnnotations;
}
// do this in two passes to avoid inaccurate array size
int totalAnnotations = 0;
for (int i = 0; i < fGrammarCount; i++) {
totalAnnotations += fGrammarList[i].fNumAnnotations;
}
if (totalAnnotations == 0) {
fAnnotations = XSObjectListImpl.EMPTY_LIST;
return fAnnotations;
}
XSAnnotationImpl [] annotations = new XSAnnotationImpl [totalAnnotations];
int currPos = 0;
for (int i = 0; i < fGrammarCount; i++) {
SchemaGrammar currGrammar = fGrammarList[i];
if (currGrammar.fNumAnnotations > 0) {
System.arraycopy(currGrammar.fAnnotations, 0, annotations, currPos, currGrammar.fNumAnnotations);
currPos += currGrammar.fNumAnnotations;
}
}
fAnnotations = new XSObjectListImpl(annotations, annotations.length);
return fAnnotations;
}
private static final String null2EmptyString(String str) {
return str == null ? XMLSymbols.EMPTY_STRING : str;
}
/**
* REVISIT: to expose identity constraints from XSModel.
* For now, we only expose whether there are any IDCs.
* We also need to add these methods to the public
* XSModel interface.
*/
public boolean hasIDConstraints() {
return fHasIDC;
}
/**
* Convenience method. Returns a list containing the members of the
* substitution group for the given <code>XSElementDeclaration</code>
* or an empty <code>XSObjectList</code> if the substitution group
* contains no members.
* @param head The substitution group head.
* @return A list containing the members of the substitution group
* for the given <code>XSElementDeclaration</code> or an empty
* <code>XSObjectList</code> if the substitution group contains
* no members.
*/
public XSObjectList getSubstitutionGroup(XSElementDeclaration head) {
return (XSObjectList)fSubGroupMap.get(head);
}
//
// XSNamespaceItemList methods
//
/**
* The number of <code>XSNamespaceItem</code>s in the list. The range of
* valid child object indices is 0 to <code>length-1</code> inclusive.
*/
public int getLength() {
return fGrammarCount;
}
/**
* Returns the <code>index</code>th item in the collection or
* <code>null</code> if <code>index</code> is greater than or equal to
* the number of objects in the list. The index starts at 0.
* @param index index into the collection.
* @return The <code>XSNamespaceItem</code> at the <code>index</code>th
* position in the <code>XSNamespaceItemList</code>, or
* <code>null</code> if the index specified is not valid.
*/
public XSNamespaceItem item(int index) {
if (index < 0 || index >= fGrammarCount) {
return null;
}
return fGrammarList[index];
}
//
// java.util.List methods
//
public Object get(int index) {
if (index >= 0 && index < fGrammarCount) {
return fGrammarList[index];
}
throw new IndexOutOfBoundsException("Index: " + index);
}
public int size() {
return getLength();
}
public Iterator iterator() {
return listIterator0(0);
}
public ListIterator listIterator() {
return listIterator0(0);
}
public ListIterator listIterator(int index) {
if (index >= 0 && index < fGrammarCount) {
return listIterator0(index);
}
throw new IndexOutOfBoundsException("Index: " + index);
}
private ListIterator listIterator0(int index) {
return new XSNamespaceItemListIterator(index);
}
public Object[] toArray() {
Object[] a = new Object[fGrammarCount];
toArray0(a);
return a;
}
public Object[] toArray(Object[] a) {
if (a.length < fGrammarCount) {
Class arrayClass = a.getClass();
Class componentType = arrayClass.getComponentType();
a = (Object[]) Array.newInstance(componentType, fGrammarCount);
}
toArray0(a);
if (a.length > fGrammarCount) {
a[fGrammarCount] = null;
}
return a;
}
private void toArray0(Object[] a) {
if (fGrammarCount > 0) {
System.arraycopy(fGrammarList, 0, a, 0, fGrammarCount);
}
}
private final class XSNamespaceItemListIterator implements ListIterator {
private int index;
public XSNamespaceItemListIterator(int index) {
this.index = index;
}
public boolean hasNext() {
return (index < fGrammarCount);
}
public Object next() {
if (index < fGrammarCount) {
return fGrammarList[index++];
}
throw new NoSuchElementException();
}
public boolean hasPrevious() {
return (index > 0);
}
public Object previous() {
if (index > 0) {
return fGrammarList[--index];
}
throw new NoSuchElementException();
}
public int nextIndex() {
return index;
}
public int previousIndex() {
return index - 1;
}
public void remove() {
throw new UnsupportedOperationException();
}
public void set(Object o) {
throw new UnsupportedOperationException();
}
public void add(Object o) {
throw new UnsupportedOperationException();
}
}
} // class XSModelImpl
| |
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import com.google.common.annotations.GwtCompatible;
import com.google.common.base.Objects;
import java.util.Collection;
import java.util.Iterator;
import javax.annotation.Nullable;
/**
* A collection which forwards all its method calls to another collection.
* Subclasses should override one or more methods to modify the behavior of the
* backing collection as desired per the <a
* href="http://en.wikipedia.org/wiki/Decorator_pattern">decorator pattern</a>.
*
* <p><b>Warning:</b> The methods of {@code ForwardingCollection} forward
* <b>indiscriminately</b> to the methods of the delegate. For example,
* overriding {@link #add} alone <b>will not</b> change the behavior of {@link
* #addAll}, which can lead to unexpected behavior. In this case, you should
* override {@code addAll} as well, either providing your own implementation, or
* delegating to the provided {@code standardAddAll} method.
*
* <p>The {@code standard} methods are not guaranteed to be thread-safe, even
* when all of the methods that they depend on are thread-safe.
*
* @author Kevin Bourrillion
* @author Louis Wasserman
* @since 2.0 (imported from Google Collections Library)
*/
@GwtCompatible
public abstract class ForwardingCollection<E> extends ForwardingObject
implements Collection<E> {
// TODO(user): identify places where thread safety is actually lost
/** Constructor for use by subclasses. */
protected ForwardingCollection() {}
@Override protected abstract Collection<E> delegate();
@Override
public Iterator<E> iterator() {
return delegate().iterator();
}
@Override
public int size() {
return delegate().size();
}
@Override
public boolean removeAll(Collection<?> collection) {
return delegate().removeAll(collection);
}
@Override
public boolean isEmpty() {
return delegate().isEmpty();
}
@Override
public boolean contains(Object object) {
return delegate().contains(object);
}
@Override
public boolean add(E element) {
return delegate().add(element);
}
@Override
public boolean remove(Object object) {
return delegate().remove(object);
}
@Override
public boolean containsAll(Collection<?> collection) {
return delegate().containsAll(collection);
}
@Override
public boolean addAll(Collection<? extends E> collection) {
return delegate().addAll(collection);
}
@Override
public boolean retainAll(Collection<?> collection) {
return delegate().retainAll(collection);
}
@Override
public void clear() {
delegate().clear();
}
@Override
public Object[] toArray() {
return delegate().toArray();
}
@Override
public <T> T[] toArray(T[] array) {
return delegate().toArray(array);
}
/**
* A sensible definition of {@link #contains} in terms of {@link #iterator}.
* If you override {@link #iterator}, you may wish to override {@link
* #contains} to forward to this implementation.
*
* @since 7.0
*/
protected boolean standardContains(@Nullable Object object) {
return Iterators.contains(iterator(), object);
}
/**
* A sensible definition of {@link #containsAll} in terms of {@link #contains}
* . If you override {@link #contains}, you may wish to override {@link
* #containsAll} to forward to this implementation.
*
* @since 7.0
*/
protected boolean standardContainsAll(Collection<?> collection) {
return Collections2.containsAllImpl(this, collection);
}
/**
* A sensible definition of {@link #addAll} in terms of {@link #add}. If you
* override {@link #add}, you may wish to override {@link #addAll} to forward
* to this implementation.
*
* @since 7.0
*/
protected boolean standardAddAll(Collection<? extends E> collection) {
return Iterators.addAll(this, collection.iterator());
}
/**
* A sensible definition of {@link #remove} in terms of {@link #iterator},
* using the iterator's {@code remove} method. If you override {@link
* #iterator}, you may wish to override {@link #remove} to forward to this
* implementation.
*
* @since 7.0
*/
protected boolean standardRemove(@Nullable Object object) {
Iterator<E> iterator = iterator();
while (iterator.hasNext()) {
if (Objects.equal(iterator.next(), object)) {
iterator.remove();
return true;
}
}
return false;
}
/**
* A sensible definition of {@link #removeAll} in terms of {@link #iterator},
* using the iterator's {@code remove} method. If you override {@link
* #iterator}, you may wish to override {@link #removeAll} to forward to this
* implementation.
*
* @since 7.0
*/
protected boolean standardRemoveAll(Collection<?> collection) {
return Iterators.removeAll(iterator(), collection);
}
/**
* A sensible definition of {@link #retainAll} in terms of {@link #iterator},
* using the iterator's {@code remove} method. If you override {@link
* #iterator}, you may wish to override {@link #retainAll} to forward to this
* implementation.
*
* @since 7.0
*/
protected boolean standardRetainAll(Collection<?> collection) {
return Iterators.retainAll(iterator(), collection);
}
/**
* A sensible definition of {@link #clear} in terms of {@link #iterator},
* using the iterator's {@code remove} method. If you override {@link
* #iterator}, you may wish to override {@link #clear} to forward to this
* implementation.
*
* @since 7.0
*/
protected void standardClear() {
Iterators.clear(iterator());
}
/**
* A sensible definition of {@link #isEmpty} as {@code !iterator().hasNext}.
* If you override {@link #isEmpty}, you may wish to override {@link #isEmpty}
* to forward to this implementation. Alternately, it may be more efficient to
* implement {@code isEmpty} as {@code size() == 0}.
*
* @since 7.0
*/
protected boolean standardIsEmpty() {
return !iterator().hasNext();
}
/**
* A sensible definition of {@link #toString} in terms of {@link #iterator}.
* If you override {@link #iterator}, you may wish to override {@link
* #toString} to forward to this implementation.
*
* @since 7.0
*/
protected String standardToString() {
return Collections2.toStringImpl(this);
}
/**
* A sensible definition of {@link #toArray()} in terms of {@link
* #toArray(Object[])}. If you override {@link #toArray(Object[])}, you may
* wish to override {@link #toArray} to forward to this implementation.
*
* @since 7.0
*/
protected Object[] standardToArray() {
Object[] newArray = new Object[size()];
return toArray(newArray);
}
/**
* A sensible definition of {@link #toArray(Object[])} in terms of {@link
* #size} and {@link #iterator}. If you override either of these methods, you
* may wish to override {@link #toArray} to forward to this implementation.
*
* @since 7.0
*/
protected <T> T[] standardToArray(T[] array) {
return ObjectArrays.toArrayImpl(this, array);
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.steps.luciddbbulkloader;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.SQLStatement;
import org.pentaho.di.core.SourceToTargetMapping;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.trans.steps.luciddbbulkloader.LucidDBBulkLoaderMeta;
import org.pentaho.di.ui.core.database.dialog.DatabaseExplorerDialog;
import org.pentaho.di.ui.core.database.dialog.SQLEditor;
import org.pentaho.di.ui.core.dialog.EnterMappingDialog;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.core.widget.ColumnInfo;
import org.pentaho.di.ui.core.widget.TableView;
import org.pentaho.di.ui.core.widget.TextVar;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
import org.pentaho.di.ui.trans.step.TableItemInsertListener;
/**
* Dialog class for the LucidDB bulk loader step.
*
*/
public class LucidDBBulkLoaderDialog extends BaseStepDialog implements StepDialogInterface {
private static Class<?> PKG = LucidDBBulkLoaderMeta.class; // for i18n purposes
private CCombo wConnection;
private Label wlSchema;
private TextVar wSchema;
private FormData fdlSchema, fdSchema;
private Label wlTable;
private Button wbTable;
private TextVar wTable;
private FormData fdlTable, fdbTable, fdTable;
private Label wlFifoPath;
private Button wbFifoPath;
private TextVar wFifoPath;
private FormData fdlFifoPath, fdbFifoPath, fdFifoPath;
private Label wlFifoServer;
private TextVar wFifoServer;
private FormData fdlFifoServer, fdFifoServer;
private Label wlReturn;
private TableView wReturn;
private FormData fdlReturn, fdReturn;
private Button wGetLU;
private FormData fdGetLU;
private Listener lsGetLU;
private Label wlMaxErrors;
private TextVar wMaxErrors;
private FormData fdlMaxErrors, fdMaxErrors;
private Button wDoMapping;
private FormData fdDoMapping;
private LucidDBBulkLoaderMeta input;
public LucidDBBulkLoaderDialog( Shell parent, Object in, TransMeta transMeta, String sname ) {
super( parent, (BaseStepMeta) in, transMeta, sname );
input = (LucidDBBulkLoaderMeta) in;
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN );
props.setLook( shell );
setShellImage( shell, input );
ModifyListener lsMod = new ModifyListener() {
public void modifyText( ModifyEvent e ) {
input.setChanged();
}
};
changed = input.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout( formLayout );
shell.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.Shell.Title" ) );
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Stepname line
wlStepname = new Label( shell, SWT.RIGHT );
wlStepname.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.Stepname.Label" ) );
props.setLook( wlStepname );
fdlStepname = new FormData();
fdlStepname.left = new FormAttachment( 0, 0 );
fdlStepname.right = new FormAttachment( middle, -margin );
fdlStepname.top = new FormAttachment( 0, margin );
wlStepname.setLayoutData( fdlStepname );
wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wStepname.setText( stepname );
props.setLook( wStepname );
wStepname.addModifyListener( lsMod );
fdStepname = new FormData();
fdStepname.left = new FormAttachment( middle, 0 );
fdStepname.top = new FormAttachment( 0, margin );
fdStepname.right = new FormAttachment( 100, 0 );
wStepname.setLayoutData( fdStepname );
// Connection line
wConnection = addConnectionLine( shell, wStepname, middle, margin );
if ( input.getDatabaseMeta() == null && transMeta.nrDatabases() == 1 ) {
wConnection.select( 0 );
}
wConnection.addModifyListener( lsMod );
// Schema line...
wlSchema = new Label( shell, SWT.RIGHT );
wlSchema.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.TargetSchema.Label" ) );
props.setLook( wlSchema );
fdlSchema = new FormData();
fdlSchema.left = new FormAttachment( 0, 0 );
fdlSchema.right = new FormAttachment( middle, -margin );
fdlSchema.top = new FormAttachment( wConnection, margin * 2 );
wlSchema.setLayoutData( fdlSchema );
wSchema = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wSchema );
wSchema.addModifyListener( lsMod );
fdSchema = new FormData();
fdSchema.left = new FormAttachment( middle, 0 );
fdSchema.top = new FormAttachment( wConnection, margin * 2 );
fdSchema.right = new FormAttachment( 100, 0 );
wSchema.setLayoutData( fdSchema );
// Table line...
wlTable = new Label( shell, SWT.RIGHT );
wlTable.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.TargetTable.Label" ) );
props.setLook( wlTable );
fdlTable = new FormData();
fdlTable.left = new FormAttachment( 0, 0 );
fdlTable.right = new FormAttachment( middle, -margin );
fdlTable.top = new FormAttachment( wSchema, margin );
wlTable.setLayoutData( fdlTable );
wbTable = new Button( shell, SWT.PUSH | SWT.CENTER );
props.setLook( wbTable );
wbTable.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.Browse.Button" ) );
fdbTable = new FormData();
fdbTable.right = new FormAttachment( 100, 0 );
fdbTable.top = new FormAttachment( wSchema, margin );
wbTable.setLayoutData( fdbTable );
wTable = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wTable );
wTable.addModifyListener( lsMod );
fdTable = new FormData();
fdTable.left = new FormAttachment( middle, 0 );
fdTable.top = new FormAttachment( wSchema, margin );
fdTable.right = new FormAttachment( wbTable, -margin );
wTable.setLayoutData( fdTable );
// MaxErrors file line
wlMaxErrors = new Label( shell, SWT.RIGHT );
wlMaxErrors.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.MaxErrors.Label" ) );
props.setLook( wlMaxErrors );
fdlMaxErrors = new FormData();
fdlMaxErrors.left = new FormAttachment( 0, 0 );
fdlMaxErrors.top = new FormAttachment( wTable, margin );
fdlMaxErrors.right = new FormAttachment( middle, -margin );
wlMaxErrors.setLayoutData( fdlMaxErrors );
wMaxErrors = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wMaxErrors );
wMaxErrors.addModifyListener( lsMod );
fdMaxErrors = new FormData();
fdMaxErrors.left = new FormAttachment( middle, 0 );
fdMaxErrors.top = new FormAttachment( wTable, margin );
fdMaxErrors.right = new FormAttachment( 100, 0 );
wMaxErrors.setLayoutData( fdMaxErrors );
// Fifo directory line...
//
wlFifoPath = new Label( shell, SWT.RIGHT );
wlFifoPath.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.FifoPath.Label" ) );
props.setLook( wlFifoPath );
fdlFifoPath = new FormData();
fdlFifoPath.left = new FormAttachment( 0, 0 );
fdlFifoPath.right = new FormAttachment( middle, -margin );
fdlFifoPath.top = new FormAttachment( wMaxErrors, margin );
wlFifoPath.setLayoutData( fdlFifoPath );
wbFifoPath = new Button( shell, SWT.PUSH | SWT.CENTER );
props.setLook( wbFifoPath );
wbFifoPath.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.Browse.Button" ) );
fdbFifoPath = new FormData();
fdbFifoPath.right = new FormAttachment( 100, 0 );
fdbFifoPath.top = new FormAttachment( wMaxErrors, margin );
wbFifoPath.setLayoutData( fdbFifoPath );
wFifoPath = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wFifoPath );
wFifoPath.addModifyListener( lsMod );
fdFifoPath = new FormData();
fdFifoPath.left = new FormAttachment( middle, 0 );
fdFifoPath.top = new FormAttachment( wMaxErrors, margin );
fdFifoPath.right = new FormAttachment( wbFifoPath, -margin );
wFifoPath.setLayoutData( fdFifoPath );
// FifoServer line...
wlFifoServer = new Label( shell, SWT.RIGHT );
wlFifoServer.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.FifoServer.Label" ) );
props.setLook( wlFifoServer );
fdlFifoServer = new FormData();
fdlFifoServer.left = new FormAttachment( 0, 0 );
fdlFifoServer.right = new FormAttachment( middle, -margin );
fdlFifoServer.top = new FormAttachment( wFifoPath, margin * 2 );
wlFifoServer.setLayoutData( fdlFifoServer );
wFifoServer = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wFifoServer );
wFifoServer.addModifyListener( lsMod );
fdFifoServer = new FormData();
fdFifoServer.left = new FormAttachment( middle, 0 );
fdFifoServer.top = new FormAttachment( wFifoPath, margin * 2 );
fdFifoServer.right = new FormAttachment( 100, 0 );
wFifoServer.setLayoutData( fdFifoServer );
// THE BUTTONS
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( "System.Button.OK" ) );
wSQL = new Button( shell, SWT.PUSH );
wSQL.setText( BaseMessages.getString( "System.Button.SQL" ) );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( "System.Button.Cancel" ) );
setButtonPositions( new Button[] { wOK, wSQL, wCancel }, margin, null );
// The field Table
wlReturn = new Label( shell, SWT.NONE );
wlReturn.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.Fields.Label" ) );
props.setLook( wlReturn );
fdlReturn = new FormData();
fdlReturn.left = new FormAttachment( 0, 0 );
fdlReturn.top = new FormAttachment( wFifoServer, margin );
wlReturn.setLayoutData( fdlReturn );
int UpInsCols = 3;
int UpInsRows = ( input.getFieldTable() != null ? input.getFieldTable().length : 1 );
ColumnInfo[] ciReturn = new ColumnInfo[UpInsCols];
ciReturn[0] =
new ColumnInfo(
BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.ColumnInfo.TableField" ),
ColumnInfo.COLUMN_TYPE_TEXT, false );
ciReturn[1] =
new ColumnInfo(
BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.ColumnInfo.StreamField" ),
ColumnInfo.COLUMN_TYPE_TEXT, false );
ciReturn[2] =
new ColumnInfo(
BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.ColumnInfo.FormatOK" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { "Y", "N", }, true );
wReturn =
new TableView(
transMeta, shell, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL, ciReturn,
UpInsRows, lsMod, props );
wGetLU = new Button( shell, SWT.PUSH );
wGetLU.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.GetFields.Label" ) );
fdGetLU = new FormData();
fdGetLU.top = new FormAttachment( wlReturn, margin );
fdGetLU.right = new FormAttachment( 100, 0 );
wGetLU.setLayoutData( fdGetLU );
wDoMapping = new Button( shell, SWT.PUSH );
wDoMapping.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.EditMapping.Label" ) );
fdDoMapping = new FormData();
fdDoMapping.top = new FormAttachment( wGetLU, margin );
fdDoMapping.right = new FormAttachment( 100, 0 );
wDoMapping.setLayoutData( fdDoMapping );
wDoMapping.addListener( SWT.Selection, new Listener() {
public void handleEvent( Event arg0 ) {
generateMappings();
}
} );
fdReturn = new FormData();
fdReturn.left = new FormAttachment( 0, 0 );
fdReturn.top = new FormAttachment( wlReturn, margin );
fdReturn.right = new FormAttachment( wGetLU, -margin );
fdReturn.bottom = new FormAttachment( wOK, -2 * margin );
wReturn.setLayoutData( fdReturn );
// Add listeners
lsOK = new Listener() {
public void handleEvent( Event e ) {
ok();
}
};
lsGetLU = new Listener() {
public void handleEvent( Event e ) {
getUpdate();
}
};
lsSQL = new Listener() {
public void handleEvent( Event e ) {
create();
}
};
lsCancel = new Listener() {
public void handleEvent( Event e ) {
cancel();
}
};
wOK.addListener( SWT.Selection, lsOK );
wGetLU.addListener( SWT.Selection, lsGetLU );
wSQL.addListener( SWT.Selection, lsSQL );
wCancel.addListener( SWT.Selection, lsCancel );
lsDef = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wStepname.addSelectionListener( lsDef );
wSchema.addSelectionListener( lsDef );
wFifoPath.addSelectionListener( lsDef );
wFifoServer.addSelectionListener( lsDef );
wTable.addSelectionListener( lsDef );
wMaxErrors.addSelectionListener( lsDef );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
wbTable.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
getTableName();
}
} );
// Set the shell size, based upon previous time...
setSize();
getData();
input.setChanged( changed );
shell.open();
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return stepname;
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData() {
logDebug( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.Log.GettingKeyInfo" ) );
wMaxErrors.setText( "" + input.getMaxErrors() );
if ( input.getFieldTable() != null ) {
for ( int i = 0; i < input.getFieldTable().length; i++ ) {
TableItem item = wReturn.table.getItem( i );
if ( input.getFieldTable()[i] != null ) {
item.setText( 1, input.getFieldTable()[i] );
}
if ( input.getFieldStream()[i] != null ) {
item.setText( 2, input.getFieldStream()[i] );
}
item.setText( 3, input.getFieldFormatOk()[i] ? "Y" : "N" );
}
}
if ( input.getDatabaseMeta() != null ) {
wConnection.setText( input.getDatabaseMeta().getName() );
} else {
if ( transMeta.nrDatabases() == 1 ) {
wConnection.setText( transMeta.getDatabase( 0 ).getName() );
}
}
if ( input.getSchemaName() != null ) {
wSchema.setText( input.getSchemaName() );
}
if ( input.getTableName() != null ) {
wTable.setText( input.getTableName() );
}
if ( input.getFifoDirectory() != null ) {
wFifoPath.setText( input.getFifoDirectory() );
}
if ( input.getFifoServerName() != null ) {
wFifoServer.setText( input.getFifoServerName() );
}
wReturn.setRowNums();
wReturn.optWidth( true );
wStepname.selectAll();
wStepname.setFocus();
}
/**
* Reads in the fields from the previous steps and from the ONE next step and opens an EnterMappingDialog with this
* information. After the user did the mapping, those information is put into the Select/Rename table.
*/
private void generateMappings() {
// Determine the source and target fields...
//
RowMetaInterface sourceFields;
RowMetaInterface targetFields;
try {
sourceFields = transMeta.getPrevStepFields( stepMeta );
} catch ( KettleException e ) {
new ErrorDialog( shell,
BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.DoMapping.UnableToFindSourceFields.Title" ),
BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.DoMapping.UnableToFindSourceFields.Message" ), e );
return;
}
// refresh data
input.setDatabaseMeta( transMeta.findDatabase( wConnection.getText() ) );
input.setTableName( transMeta.environmentSubstitute( wTable.getText() ) );
StepMetaInterface stepMetaInterface = stepMeta.getStepMetaInterface();
try {
targetFields = stepMetaInterface.getRequiredFields( transMeta );
} catch ( KettleException e ) {
new ErrorDialog( shell,
BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.DoMapping.UnableToFindTargetFields.Title" ),
BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.DoMapping.UnableToFindTargetFields.Message" ), e );
return;
}
String[] inputNames = new String[sourceFields.size()];
for ( int i = 0; i < sourceFields.size(); i++ ) {
ValueMetaInterface value = sourceFields.getValueMeta( i );
inputNames[i] = value.getName() + EnterMappingDialog.STRING_ORIGIN_SEPARATOR + value.getOrigin() + ")";
}
// Create the existing mapping list...
//
List<SourceToTargetMapping> mappings = new ArrayList<SourceToTargetMapping>();
StringBuffer missingSourceFields = new StringBuffer();
StringBuffer missingTargetFields = new StringBuffer();
int nrFields = wReturn.nrNonEmpty();
for ( int i = 0; i < nrFields; i++ ) {
TableItem item = wReturn.getNonEmpty( i );
String source = item.getText( 2 );
String target = item.getText( 1 );
int sourceIndex = sourceFields.indexOfValue( source );
if ( sourceIndex < 0 ) {
missingSourceFields.append( Const.CR + " " + source + " --> " + target );
}
int targetIndex = targetFields.indexOfValue( target );
if ( targetIndex < 0 ) {
missingTargetFields.append( Const.CR + " " + source + " --> " + target );
}
if ( sourceIndex < 0 || targetIndex < 0 ) {
continue;
}
SourceToTargetMapping mapping = new SourceToTargetMapping( sourceIndex, targetIndex );
mappings.add( mapping );
}
// show a confirm dialog if some missing field was found
//
if ( missingSourceFields.length() > 0 || missingTargetFields.length() > 0 ) {
String message = "";
if ( missingSourceFields.length() > 0 ) {
message +=
BaseMessages.getString(
PKG, "LucidDBBulkLoaderDialog.DoMapping.SomeSourceFieldsNotFound", missingSourceFields.toString() )
+ Const.CR;
}
if ( missingTargetFields.length() > 0 ) {
message +=
BaseMessages.getString(
PKG, "LucidDBBulkLoaderDialog.DoMapping.SomeTargetFieldsNotFound", missingSourceFields.toString() )
+ Const.CR;
}
message += Const.CR;
message +=
BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.DoMapping.SomeFieldsNotFoundContinue" ) + Const.CR;
MessageDialog.setDefaultImage( GUIResource.getInstance().getImageSpoon() );
boolean goOn =
MessageDialog.openConfirm( shell, BaseMessages.getString(
PKG, "LucidDBBulkLoaderDialog.DoMapping.SomeFieldsNotFoundTitle" ), message );
if ( !goOn ) {
return;
}
}
EnterMappingDialog d =
new EnterMappingDialog( LucidDBBulkLoaderDialog.this.shell, sourceFields.getFieldNames(), targetFields
.getFieldNames(), mappings );
mappings = d.open();
// mappings == null if the user pressed cancel
//
if ( mappings != null ) {
// Clear and re-populate!
//
wReturn.table.removeAll();
wReturn.table.setItemCount( mappings.size() );
for ( int i = 0; i < mappings.size(); i++ ) {
SourceToTargetMapping mapping = mappings.get( i );
TableItem item = wReturn.table.getItem( i );
item.setText( 2, sourceFields.getValueMeta( mapping.getSourcePosition() ).getName() );
item.setText( 1, targetFields.getValueMeta( mapping.getTargetPosition() ).getName() );
}
wReturn.setRowNums();
wReturn.optWidth( true );
}
}
private void cancel() {
stepname = null;
input.setChanged( changed );
dispose();
}
private void getInfo( LucidDBBulkLoaderMeta inf ) {
int nrfields = wReturn.nrNonEmpty();
inf.allocate( nrfields );
inf.setMaxErrors( Const.toInt( wMaxErrors.getText(), 0 ) );
logDebug( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.Log.FoundFields", "" + nrfields ) );
//CHECKSTYLE:Indentation:OFF
for ( int i = 0; i < nrfields; i++ ) {
TableItem item = wReturn.getNonEmpty( i );
inf.getFieldTable()[i] = item.getText( 1 );
inf.getFieldStream()[i] = item.getText( 2 );
inf.getFieldFormatOk()[i] = "Y".equalsIgnoreCase( item.getText( 3 ) );
}
inf.setSchemaName( wSchema.getText() );
inf.setTableName( wTable.getText() );
inf.setDatabaseMeta( transMeta.findDatabase( wConnection.getText() ) );
inf.setFifoDirectory( wFifoPath.getText() );
inf.setFifoServerName( wFifoServer.getText() );
stepname = wStepname.getText(); // return value
}
private void ok() {
if ( Const.isEmpty( wStepname.getText() ) ) {
return;
}
// Get the information for the dialog into the input structure.
getInfo( input );
if ( input.getDatabaseMeta() == null ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.InvalidConnection.DialogMessage" ) );
mb.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.InvalidConnection.DialogTitle" ) );
mb.open();
}
dispose();
}
private void getTableName() {
DatabaseMeta inf = null;
// New class: SelectTableDialog
int connr = wConnection.getSelectionIndex();
if ( connr >= 0 ) {
inf = transMeta.getDatabase( connr );
}
if ( inf != null ) {
logDebug( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.Log.LookingAtConnection" ) + inf.toString() );
DatabaseExplorerDialog std = new DatabaseExplorerDialog( shell, SWT.NONE, inf, transMeta.getDatabases() );
std.setSelectedSchemaAndTable( wSchema.getText(), wTable.getText() );
if ( std.open() ) {
wSchema.setText( Const.NVL( std.getSchemaName(), "" ) );
wTable.setText( Const.NVL( std.getTableName(), "" ) );
}
} else {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.InvalidConnection.DialogMessage" ) );
mb.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.InvalidConnection.DialogTitle" ) );
mb.open();
}
}
private void getUpdate() {
try {
RowMetaInterface r = transMeta.getPrevStepFields( stepname );
if ( r != null ) {
TableItemInsertListener listener = new TableItemInsertListener() {
public boolean tableItemInserted( TableItem tableItem, ValueMetaInterface v ) {
if ( v.getType() == ValueMetaInterface.TYPE_DATE ) {
// The default is : format is OK for dates, see if this sticks later on...
//
tableItem.setText( 3, "Y" );
} else {
tableItem.setText( 3, "Y" ); // default is OK too...
}
return true;
}
};
BaseStepDialog.getFieldsFromPrevious( r, wReturn, 1, new int[] { 1, 2 }, new int[] {}, -1, -1, listener );
}
} catch ( KettleException ke ) {
new ErrorDialog( shell,
BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.FailedToGetFields.DialogTitle" ),
BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.FailedToGetFields.DialogMessage" ), ke );
}
}
// Generate code for create table...
// Conversions done by Database
private void create() {
try {
LucidDBBulkLoaderMeta info = new LucidDBBulkLoaderMeta();
getInfo( info );
String name = stepname; // new name might not yet be linked to other steps!
StepMeta stepMeta =
new StepMeta( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.StepMeta.Title" ), name, info );
RowMetaInterface prev = transMeta.getPrevStepFields( stepname );
SQLStatement sql = info.getSQLStatements( transMeta, stepMeta, prev, repository, metaStore );
if ( !sql.hasError() ) {
if ( sql.hasSQL() ) {
SQLEditor sqledit =
new SQLEditor( transMeta, shell, SWT.NONE, info.getDatabaseMeta(), transMeta.getDbCache(), sql
.getSQL() );
sqledit.open();
} else {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
mb.setMessage( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.NoSQLNeeds.DialogMessage" ) );
mb.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.NoSQLNeeds.DialogTitle" ) );
mb.open();
}
} else {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( sql.getError() );
mb.setText( BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.SQLError.DialogTitle" ) );
mb.open();
}
} catch ( KettleException ke ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.CouldNotBuildSQL.DialogTitle" ),
BaseMessages.getString( PKG, "LucidDBBulkLoaderDialog.CouldNotBuildSQL.DialogMessage" ), ke );
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.kestrel;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import net.spy.memcached.ConnectionFactory;
import net.spy.memcached.ConnectionFactoryBuilder;
import net.spy.memcached.FailureMode;
import net.spy.memcached.MemcachedClient;
import org.apache.camel.CamelContext;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.impl.UriEndpointComponent;
import org.apache.camel.spi.Metadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Camel component which offers queueing over the Memcached protocol
* as supported by Kestrel.
*/
public class KestrelComponent extends UriEndpointComponent {
private static final Logger LOG = LoggerFactory.getLogger(KestrelComponent.class);
private ConnectionFactory memcachedConnectionFactory;
/**
* We cache the memcached clients by queue for reuse
*/
private final Map<String, MemcachedClient> memcachedClientCache = new HashMap<>();
@Metadata(label = "advanced")
private KestrelConfiguration configuration;
public KestrelComponent() {
this(new KestrelConfiguration());
}
public KestrelComponent(KestrelConfiguration configuration) {
super(KestrelEndpoint.class);
this.configuration = configuration;
}
public KestrelComponent(CamelContext context) {
super(context, KestrelEndpoint.class);
configuration = new KestrelConfiguration();
}
@Override
protected void doStart() throws Exception {
super.doStart();
ConnectionFactoryBuilder builder = new ConnectionFactoryBuilder();
// VERY IMPORTANT! Otherwise, spymemcached optimizes away concurrent gets
builder.setShouldOptimize(false);
// We never want spymemcached to time out
builder.setOpTimeout(9999999);
// Retry upon failure
builder.setFailureMode(FailureMode.Retry);
memcachedConnectionFactory = builder.build();
}
public KestrelConfiguration getConfiguration() {
return configuration;
}
/**
* To use a shared configured configuration as base for creating new endpoints.
*/
public void setConfiguration(KestrelConfiguration configuration) {
this.configuration = configuration;
}
protected KestrelEndpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
// Copy the configuration as each endpoint can override defaults
KestrelConfiguration config = getConfiguration().copy();
// Parse the URI, expected to be in one of the following formats:
// 1. Use the base KestrelConfiguration for host addresses:
// kestrel://queue[?parameters]
// kestrel:///queue[?parameters]
// 2. Override the host, but use the default port:
// kestrel://host/queue[?parameters]
// 3. Override the host and port:
// kestrel://host:port/queue[?parameters]
// 4. Supply a list of host addresses:
// kestrel://host[:port],host[:port]/queue[?parameters]
URI u = new URI(uri);
String queue;
String[] addresses = null;
if (u.getPath() == null || "".equals(u.getPath())) {
// This would be the case when they haven't specified any explicit
// address(es), and the queue ends up in the "authority" portion of
// the URI. For example:
// kestrel://queue[?parameters]
queue = u.getAuthority();
} else if (u.getAuthority() == null || "".equals(u.getAuthority())) {
// The "path" was present without an authority, such as:
// kestrel:///queue[?parameters]
queue = u.getPath();
} else {
// Both "path" and "authority" were present in the URI, which
// means both address(es) and the queue were specified, i.e.:
// kestrel://host/queue[?parameters]
// kestrel://host:port/queue[?parameters]
// kestrel://host[:port],host[:port]/queue[?parameters]
addresses = u.getAuthority().split(",");
queue = u.getPath();
}
// Trim off any slash(es), i.e. "/queue/" -> "queue"
while (queue.startsWith("/")) {
queue = queue.substring(1);
}
while (queue.endsWith("/")) {
queue = queue.substring(0, queue.length() - 1);
}
if ("".equals(queue)) {
// This would be the case if the URI didn't include a path, or if
// the path was just "/" or something...throw an exception.
throw new IllegalArgumentException("Queue not specified in endpoint URI: " + uri);
}
if (addresses != null && addresses.length > 0) {
// Override the addresses on the copied config
config.setAddresses(addresses);
} else {
// Explicit address(es) weren't specified on the URI, which is
// no problem...just default the addresses to whatever was set on
// the base KestrelConfiguration. And since we've already copied
// the config, there's nothing else we need to do there. But let's
// make sure the addresses field was indeed set on the base config.
if (config.getAddresses() == null) {
throw new IllegalArgumentException("Addresses not set in base configuration or endpoint: " + uri);
}
}
LOG.info("Creating endpoint for queue \"" + queue + "\" on " + config.getAddressesAsString() + ", parameters=" + parameters);
// Finally, override config with any supplied URI parameters
setProperties(config, parameters);
// Create the endpoint for the given queue with the config we built
return new KestrelEndpoint(uri, this, config, queue);
}
public MemcachedClient getMemcachedClient(KestrelConfiguration config, String queue) {
String key = config.getAddressesAsString() + "/" + queue;
MemcachedClient memcachedClient = memcachedClientCache.get(key);
if (memcachedClient != null) {
return memcachedClient;
}
synchronized (memcachedClientCache) {
if ((memcachedClient = memcachedClientCache.get(key)) == null) {
LOG.info("Creating MemcachedClient for " + key);
try {
memcachedClient = new MemcachedClient(memcachedConnectionFactory, config.getInetSocketAddresses());
} catch (Exception e) {
throw new RuntimeCamelException("Failed to connect to " + key, e);
}
memcachedClientCache.put(key, memcachedClient);
}
}
return memcachedClient;
}
public void closeMemcachedClient(String key, MemcachedClient memcachedClient) {
try {
LOG.debug("Closing client connection to {}", key);
memcachedClient.shutdown();
memcachedClientCache.remove(key);
} catch (Exception e) {
LOG.warn("Failed to close client connection to " + key, e);
}
}
@Override
protected synchronized void doStop() throws Exception {
// Use a copy so we can clear the memcached client cache eagerly
Map<String, MemcachedClient> copy;
synchronized (memcachedClientCache) {
copy = new HashMap<>(memcachedClientCache);
memcachedClientCache.clear();
}
for (Map.Entry<String, MemcachedClient> entry : copy.entrySet()) {
closeMemcachedClient(entry.getKey(), entry.getValue());
}
super.doStop();
}
}
| |
/*PLEASE DO NOT EDIT THIS CODE*/
/*This code was generated using the UMPLE 1.21.0.4666 modeling language!*/
import java.util.*;
// line 16 "TelephoneSystem.ump"
// line 71 "TelephoneSystem.ump"
public class TelephoneNumber
{
//------------------------
// MEMBER VARIABLES
//------------------------
//TelephoneNumber Attributes
private String digits;
//TelephoneNumber Associations
private List<TelephoneNumber> voicemail;
private List<PhoneCall> call;
private List<PhoneCall> phoneCalls;
private TelephoneNumber number;
private VoiceMailBox voiceMailBox;
private List<Feature> features;
private List<PhoneLine> phoneLines;
//------------------------
// CONSTRUCTOR
//------------------------
public TelephoneNumber(String aDigits, VoiceMailBox aVoiceMailBox)
{
digits = aDigits;
voicemail = new ArrayList<TelephoneNumber>();
call = new ArrayList<PhoneCall>();
phoneCalls = new ArrayList<PhoneCall>();
boolean didAddVoiceMailBox = setVoiceMailBox(aVoiceMailBox);
if (!didAddVoiceMailBox)
{
throw new RuntimeException("Unable to create telephoneNumber due to voiceMailBox");
}
features = new ArrayList<Feature>();
phoneLines = new ArrayList<PhoneLine>();
}
//------------------------
// INTERFACE
//------------------------
public boolean setDigits(String aDigits)
{
boolean wasSet = false;
digits = aDigits;
wasSet = true;
return wasSet;
}
public String getDigits()
{
return digits;
}
public TelephoneNumber getVoicemail(int index)
{
TelephoneNumber aVoicemail = voicemail.get(index);
return aVoicemail;
}
public List<TelephoneNumber> getVoicemail()
{
List<TelephoneNumber> newVoicemail = Collections.unmodifiableList(voicemail);
return newVoicemail;
}
public int numberOfVoicemail()
{
int number = voicemail.size();
return number;
}
public boolean hasVoicemail()
{
boolean has = voicemail.size() > 0;
return has;
}
public int indexOfVoicemail(TelephoneNumber aVoicemail)
{
int index = voicemail.indexOf(aVoicemail);
return index;
}
public PhoneCall getCall(int index)
{
PhoneCall aCall = call.get(index);
return aCall;
}
public List<PhoneCall> getCall()
{
List<PhoneCall> newCall = Collections.unmodifiableList(call);
return newCall;
}
public int numberOfCall()
{
int number = call.size();
return number;
}
public boolean hasCall()
{
boolean has = call.size() > 0;
return has;
}
public int indexOfCall(PhoneCall aCall)
{
int index = call.indexOf(aCall);
return index;
}
public PhoneCall getPhoneCall(int index)
{
PhoneCall aPhoneCall = phoneCalls.get(index);
return aPhoneCall;
}
public List<PhoneCall> getPhoneCalls()
{
List<PhoneCall> newPhoneCalls = Collections.unmodifiableList(phoneCalls);
return newPhoneCalls;
}
public int numberOfPhoneCalls()
{
int number = phoneCalls.size();
return number;
}
public boolean hasPhoneCalls()
{
boolean has = phoneCalls.size() > 0;
return has;
}
public int indexOfPhoneCall(PhoneCall aPhoneCall)
{
int index = phoneCalls.indexOf(aPhoneCall);
return index;
}
public TelephoneNumber getNumber()
{
return number;
}
public boolean hasNumber()
{
boolean has = number != null;
return has;
}
public VoiceMailBox getVoiceMailBox()
{
return voiceMailBox;
}
public Feature getFeature(int index)
{
Feature aFeature = features.get(index);
return aFeature;
}
public List<Feature> getFeatures()
{
List<Feature> newFeatures = Collections.unmodifiableList(features);
return newFeatures;
}
public int numberOfFeatures()
{
int number = features.size();
return number;
}
public boolean hasFeatures()
{
boolean has = features.size() > 0;
return has;
}
public int indexOfFeature(Feature aFeature)
{
int index = features.indexOf(aFeature);
return index;
}
public PhoneLine getPhoneLine(int index)
{
PhoneLine aPhoneLine = phoneLines.get(index);
return aPhoneLine;
}
public List<PhoneLine> getPhoneLines()
{
List<PhoneLine> newPhoneLines = Collections.unmodifiableList(phoneLines);
return newPhoneLines;
}
public int numberOfPhoneLines()
{
int number = phoneLines.size();
return number;
}
public boolean hasPhoneLines()
{
boolean has = phoneLines.size() > 0;
return has;
}
public int indexOfPhoneLine(PhoneLine aPhoneLine)
{
int index = phoneLines.indexOf(aPhoneLine);
return index;
}
public static int minimumNumberOfVoicemail()
{
return 0;
}
public boolean addVoicemail(TelephoneNumber aVoicemail)
{
boolean wasAdded = false;
if (voicemail.contains(aVoicemail)) { return false; }
TelephoneNumber existingNumber = aVoicemail.getNumber();
if (existingNumber == null)
{
aVoicemail.setNumber(this);
}
else if (!this.equals(existingNumber))
{
existingNumber.removeVoicemail(aVoicemail);
addVoicemail(aVoicemail);
}
else
{
voicemail.add(aVoicemail);
}
wasAdded = true;
return wasAdded;
}
public boolean removeVoicemail(TelephoneNumber aVoicemail)
{
boolean wasRemoved = false;
if (voicemail.contains(aVoicemail))
{
voicemail.remove(aVoicemail);
aVoicemail.setNumber(null);
wasRemoved = true;
}
return wasRemoved;
}
public boolean addVoicemailAt(TelephoneNumber aVoicemail, int index)
{
boolean wasAdded = false;
if(addVoicemail(aVoicemail))
{
if(index < 0 ) { index = 0; }
if(index > numberOfVoicemail()) { index = numberOfVoicemail() - 1; }
voicemail.remove(aVoicemail);
voicemail.add(index, aVoicemail);
wasAdded = true;
}
return wasAdded;
}
public boolean addOrMoveVoicemailAt(TelephoneNumber aVoicemail, int index)
{
boolean wasAdded = false;
if(voicemail.contains(aVoicemail))
{
if(index < 0 ) { index = 0; }
if(index > numberOfVoicemail()) { index = numberOfVoicemail() - 1; }
voicemail.remove(aVoicemail);
voicemail.add(index, aVoicemail);
wasAdded = true;
}
else
{
wasAdded = addVoicemailAt(aVoicemail, index);
}
return wasAdded;
}
public static int minimumNumberOfCall()
{
return 0;
}
public PhoneCall addCall(String aIsOnHold, String aStartTime, String aDuration)
{
return new PhoneCall(aIsOnHold, aStartTime, aDuration, this);
}
public boolean addCall(PhoneCall aCall)
{
boolean wasAdded = false;
if (call.contains(aCall)) { return false; }
TelephoneNumber existingOriginator = aCall.getOriginator();
boolean isNewOriginator = existingOriginator != null && !this.equals(existingOriginator);
if (isNewOriginator)
{
aCall.setOriginator(this);
}
else
{
call.add(aCall);
}
wasAdded = true;
return wasAdded;
}
public boolean removeCall(PhoneCall aCall)
{
boolean wasRemoved = false;
//Unable to remove aCall, as it must always have a originator
if (!this.equals(aCall.getOriginator()))
{
call.remove(aCall);
wasRemoved = true;
}
return wasRemoved;
}
public boolean addCallAt(PhoneCall aCall, int index)
{
boolean wasAdded = false;
if(addCall(aCall))
{
if(index < 0 ) { index = 0; }
if(index > numberOfCall()) { index = numberOfCall() - 1; }
call.remove(aCall);
call.add(index, aCall);
wasAdded = true;
}
return wasAdded;
}
public boolean addOrMoveCallAt(PhoneCall aCall, int index)
{
boolean wasAdded = false;
if(call.contains(aCall))
{
if(index < 0 ) { index = 0; }
if(index > numberOfCall()) { index = numberOfCall() - 1; }
call.remove(aCall);
call.add(index, aCall);
wasAdded = true;
}
else
{
wasAdded = addCallAt(aCall, index);
}
return wasAdded;
}
public static int minimumNumberOfPhoneCalls()
{
return 0;
}
public boolean addPhoneCall(PhoneCall aPhoneCall)
{
boolean wasAdded = false;
if (phoneCalls.contains(aPhoneCall)) { return false; }
phoneCalls.add(aPhoneCall);
if (aPhoneCall.indexOfParty(this) != -1)
{
wasAdded = true;
}
else
{
wasAdded = aPhoneCall.addParty(this);
if (!wasAdded)
{
phoneCalls.remove(aPhoneCall);
}
}
return wasAdded;
}
public boolean removePhoneCall(PhoneCall aPhoneCall)
{
boolean wasRemoved = false;
if (!phoneCalls.contains(aPhoneCall))
{
return wasRemoved;
}
int oldIndex = phoneCalls.indexOf(aPhoneCall);
phoneCalls.remove(oldIndex);
if (aPhoneCall.indexOfParty(this) == -1)
{
wasRemoved = true;
}
else
{
wasRemoved = aPhoneCall.removeParty(this);
if (!wasRemoved)
{
phoneCalls.add(oldIndex,aPhoneCall);
}
}
return wasRemoved;
}
public boolean addPhoneCallAt(PhoneCall aPhoneCall, int index)
{
boolean wasAdded = false;
if(addPhoneCall(aPhoneCall))
{
if(index < 0 ) { index = 0; }
if(index > numberOfPhoneCalls()) { index = numberOfPhoneCalls() - 1; }
phoneCalls.remove(aPhoneCall);
phoneCalls.add(index, aPhoneCall);
wasAdded = true;
}
return wasAdded;
}
public boolean addOrMovePhoneCallAt(PhoneCall aPhoneCall, int index)
{
boolean wasAdded = false;
if(phoneCalls.contains(aPhoneCall))
{
if(index < 0 ) { index = 0; }
if(index > numberOfPhoneCalls()) { index = numberOfPhoneCalls() - 1; }
phoneCalls.remove(aPhoneCall);
phoneCalls.add(index, aPhoneCall);
wasAdded = true;
}
else
{
wasAdded = addPhoneCallAt(aPhoneCall, index);
}
return wasAdded;
}
public boolean setNumber(TelephoneNumber aNumber)
{
boolean wasSet = false;
TelephoneNumber existingNumber = number;
number = aNumber;
if (existingNumber != null && !existingNumber.equals(aNumber))
{
existingNumber.removeVoicemail(this);
}
if (aNumber != null)
{
aNumber.addVoicemail(this);
}
wasSet = true;
return wasSet;
}
public boolean setVoiceMailBox(VoiceMailBox aVoiceMailBox)
{
boolean wasSet = false;
if (aVoiceMailBox == null)
{
return wasSet;
}
VoiceMailBox existingVoiceMailBox = voiceMailBox;
voiceMailBox = aVoiceMailBox;
if (existingVoiceMailBox != null && !existingVoiceMailBox.equals(aVoiceMailBox))
{
existingVoiceMailBox.removeTelephoneNumber(this);
}
voiceMailBox.addTelephoneNumber(this);
wasSet = true;
return wasSet;
}
public static int minimumNumberOfFeatures()
{
return 0;
}
public boolean addFeature(Feature aFeature)
{
boolean wasAdded = false;
if (features.contains(aFeature)) { return false; }
features.add(aFeature);
if (aFeature.indexOfTelephoneNumber(this) != -1)
{
wasAdded = true;
}
else
{
wasAdded = aFeature.addTelephoneNumber(this);
if (!wasAdded)
{
features.remove(aFeature);
}
}
return wasAdded;
}
public boolean removeFeature(Feature aFeature)
{
boolean wasRemoved = false;
if (!features.contains(aFeature))
{
return wasRemoved;
}
int oldIndex = features.indexOf(aFeature);
features.remove(oldIndex);
if (aFeature.indexOfTelephoneNumber(this) == -1)
{
wasRemoved = true;
}
else
{
wasRemoved = aFeature.removeTelephoneNumber(this);
if (!wasRemoved)
{
features.add(oldIndex,aFeature);
}
}
return wasRemoved;
}
public boolean addFeatureAt(Feature aFeature, int index)
{
boolean wasAdded = false;
if(addFeature(aFeature))
{
if(index < 0 ) { index = 0; }
if(index > numberOfFeatures()) { index = numberOfFeatures() - 1; }
features.remove(aFeature);
features.add(index, aFeature);
wasAdded = true;
}
return wasAdded;
}
public boolean addOrMoveFeatureAt(Feature aFeature, int index)
{
boolean wasAdded = false;
if(features.contains(aFeature))
{
if(index < 0 ) { index = 0; }
if(index > numberOfFeatures()) { index = numberOfFeatures() - 1; }
features.remove(aFeature);
features.add(index, aFeature);
wasAdded = true;
}
else
{
wasAdded = addFeatureAt(aFeature, index);
}
return wasAdded;
}
public static int minimumNumberOfPhoneLines()
{
return 0;
}
public boolean addPhoneLine(PhoneLine aPhoneLine)
{
boolean wasAdded = false;
if (phoneLines.contains(aPhoneLine)) { return false; }
phoneLines.add(aPhoneLine);
if (aPhoneLine.indexOfTelephoneNumber(this) != -1)
{
wasAdded = true;
}
else
{
wasAdded = aPhoneLine.addTelephoneNumber(this);
if (!wasAdded)
{
phoneLines.remove(aPhoneLine);
}
}
return wasAdded;
}
public boolean removePhoneLine(PhoneLine aPhoneLine)
{
boolean wasRemoved = false;
if (!phoneLines.contains(aPhoneLine))
{
return wasRemoved;
}
int oldIndex = phoneLines.indexOf(aPhoneLine);
phoneLines.remove(oldIndex);
if (aPhoneLine.indexOfTelephoneNumber(this) == -1)
{
wasRemoved = true;
}
else
{
wasRemoved = aPhoneLine.removeTelephoneNumber(this);
if (!wasRemoved)
{
phoneLines.add(oldIndex,aPhoneLine);
}
}
return wasRemoved;
}
public boolean addPhoneLineAt(PhoneLine aPhoneLine, int index)
{
boolean wasAdded = false;
if(addPhoneLine(aPhoneLine))
{
if(index < 0 ) { index = 0; }
if(index > numberOfPhoneLines()) { index = numberOfPhoneLines() - 1; }
phoneLines.remove(aPhoneLine);
phoneLines.add(index, aPhoneLine);
wasAdded = true;
}
return wasAdded;
}
public boolean addOrMovePhoneLineAt(PhoneLine aPhoneLine, int index)
{
boolean wasAdded = false;
if(phoneLines.contains(aPhoneLine))
{
if(index < 0 ) { index = 0; }
if(index > numberOfPhoneLines()) { index = numberOfPhoneLines() - 1; }
phoneLines.remove(aPhoneLine);
phoneLines.add(index, aPhoneLine);
wasAdded = true;
}
else
{
wasAdded = addPhoneLineAt(aPhoneLine, index);
}
return wasAdded;
}
public void delete()
{
while( !voicemail.isEmpty() )
{
voicemail.get(0).setNumber(null);
}
for(int i=call.size(); i > 0; i--)
{
PhoneCall aCall = call.get(i - 1);
aCall.delete();
}
ArrayList<PhoneCall> copyOfPhoneCalls = new ArrayList<PhoneCall>(phoneCalls);
phoneCalls.clear();
for(PhoneCall aPhoneCall : copyOfPhoneCalls)
{
aPhoneCall.removeParty(this);
}
if (number != null)
{
TelephoneNumber placeholderNumber = number;
this.number = null;
placeholderNumber.removeVoicemail(this);
}
VoiceMailBox placeholderVoiceMailBox = voiceMailBox;
this.voiceMailBox = null;
placeholderVoiceMailBox.removeTelephoneNumber(this);
ArrayList<Feature> copyOfFeatures = new ArrayList<Feature>(features);
features.clear();
for(Feature aFeature : copyOfFeatures)
{
aFeature.removeTelephoneNumber(this);
}
ArrayList<PhoneLine> copyOfPhoneLines = new ArrayList<PhoneLine>(phoneLines);
phoneLines.clear();
for(PhoneLine aPhoneLine : copyOfPhoneLines)
{
aPhoneLine.removeTelephoneNumber(this);
}
}
public String toString()
{
String outputString = "";
return super.toString() + "["+
"digits" + ":" + getDigits()+ "]" + System.getProperties().getProperty("line.separator") +
" " + "voiceMailBox = "+(getVoiceMailBox()!=null?Integer.toHexString(System.identityHashCode(getVoiceMailBox())):"null")
+ outputString;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.erasurecode.coder;
import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECChunk;
import org.apache.hadoop.io.erasurecode.coder.util.HHUtil;
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
/**
* Hitchhiker-XOR Erasure decoding step, a wrapper of all the necessary
* information to perform a decoding step involved in the whole process of
* decoding a block group.
*/
@InterfaceAudience.Private
public class HHXORErasureDecodingStep extends HHErasureCodingStep {
private int pbIndex;
private int[] piggyBackIndex;
private int[] piggyBackFullIndex;
private int[] erasedIndexes;
private RawErasureDecoder rsRawDecoder;
private RawErasureEncoder xorRawEncoder;
/**
* The constructor with all the necessary info.
* @param inputBlocks
* @param erasedIndexes the indexes of erased blocks in inputBlocks array
* @param outputBlocks
* @param rawDecoder underlying RS decoder for hitchhiker decoding
* @param rawEncoder underlying XOR encoder for hitchhiker decoding
*/
public HHXORErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes,
ECBlock[] outputBlocks, RawErasureDecoder rawDecoder,
RawErasureEncoder rawEncoder) {
super(inputBlocks, outputBlocks);
this.pbIndex = rawDecoder.getNumParityUnits() - 1;
this.erasedIndexes = erasedIndexes;
this.rsRawDecoder = rawDecoder;
this.xorRawEncoder = rawEncoder;
this.piggyBackIndex = HHUtil.initPiggyBackIndexWithoutPBVec(
rawDecoder.getNumDataUnits(), rawDecoder.getNumParityUnits());
this.piggyBackFullIndex = HHUtil.initPiggyBackFullIndexVec(
rawDecoder.getNumDataUnits(), piggyBackIndex);
}
@Override
public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) {
if (erasedIndexes.length == 0) {
return;
}
ByteBuffer[] inputBuffers = ECChunk.toBuffers(inputChunks);
ByteBuffer[] outputBuffers = ECChunk.toBuffers(outputChunks);
performCoding(inputBuffers, outputBuffers);
}
private void performCoding(ByteBuffer[] inputs, ByteBuffer[] outputs) {
final int numDataUnits = rsRawDecoder.getNumDataUnits();
final int numParityUnits = rsRawDecoder.getNumParityUnits();
final int numTotalUnits = numDataUnits + numParityUnits;
final int subPacketSize = getSubPacketSize();
ByteBuffer fisrtValidInput = HHUtil.findFirstValidInput(inputs);
final int bufSize = fisrtValidInput.remaining();
if (inputs.length != numTotalUnits * getSubPacketSize()) {
throw new IllegalArgumentException("Invalid inputs length");
}
if (outputs.length != erasedIndexes.length * getSubPacketSize()) {
throw new IllegalArgumentException("Invalid outputs length");
}
// notes:inputs length = numDataUnits * subPacketizationSize
// first numDataUnits length is first sub-stripe,
// second numDataUnits length is second sub-stripe
ByteBuffer[][] newIn = new ByteBuffer[subPacketSize][numTotalUnits];
for (int i = 0; i < subPacketSize; ++i) {
for (int j = 0; j < numTotalUnits; ++j) {
newIn[i][j] = inputs[i * numTotalUnits + j];
}
}
ByteBuffer[][] newOut = new ByteBuffer[subPacketSize][erasedIndexes.length];
for (int i = 0; i < subPacketSize; ++i) {
for (int j = 0; j < erasedIndexes.length; ++j) {
newOut[i][j] = outputs[i * erasedIndexes.length + j];
}
}
if (erasedIndexes.length == 1 && erasedIndexes[0] < numDataUnits) {
// Only reconstruct one data unit missing
doDecodeSingle(newIn, newOut, erasedIndexes[0], bufSize,
fisrtValidInput.isDirect());
} else {
doDecodeMultiAndParity(newIn, newOut, erasedIndexes, bufSize);
}
}
private void doDecodeSingle(ByteBuffer[][] inputs, ByteBuffer[][] outputs,
int erasedLocationToFix, int bufSize,
boolean isDirect) {
final int numDataUnits = rsRawDecoder.getNumDataUnits();
final int numParityUnits = rsRawDecoder.getNumParityUnits();
final int subPacketSize = getSubPacketSize();
int[][] inputPositions = new int[subPacketSize][inputs[0].length];
for (int i = 0; i < subPacketSize; ++i) {
for (int j = 0; j < inputs[i].length; ++j) {
if (inputs[i][j] != null) {
inputPositions[i][j] = inputs[i][j].position();
}
}
}
ByteBuffer[] tempInputs = new ByteBuffer[numDataUnits + numParityUnits];
for (int i = 0; i < tempInputs.length; ++i) {
tempInputs[i] = inputs[1][i];
}
ByteBuffer[][] tmpOutputs = new ByteBuffer[subPacketSize][numParityUnits];
for (int i = 0; i < getSubPacketSize(); ++i) {
for (int j = 0; j < erasedIndexes.length; ++j) {
tmpOutputs[i][j] = outputs[i][j];
}
for (int m = erasedIndexes.length; m < numParityUnits; ++m) {
tmpOutputs[i][m] = HHUtil.allocateByteBuffer(isDirect, bufSize);
}
}
// First consider the second subPacket
int[] erasedLocation = new int[numParityUnits];
erasedLocation[0] = erasedLocationToFix;
// assign the erased locations based on the locations not read for
// second subPacket but from decoding
for (int i = 1; i < numParityUnits; i++) {
erasedLocation[i] = numDataUnits + i;
tempInputs[numDataUnits + i] = null;
}
rsRawDecoder.decode(tempInputs, erasedLocation, tmpOutputs[1]);
int piggyBackParityIndex = piggyBackFullIndex[erasedLocationToFix];
ByteBuffer piggyBack = HHUtil.getPiggyBackForDecode(inputs, tmpOutputs,
piggyBackParityIndex, numDataUnits, numParityUnits, pbIndex);
// Second consider the first subPacket.
// get the value of the piggyback associated with the erased location
if (isDirect) {
// decode the erased value in the first subPacket by using the piggyback
int idxToWrite = 0;
doDecodeByPiggyBack(inputs[0], tmpOutputs[0][idxToWrite], piggyBack,
erasedLocationToFix);
} else {
ByteBuffer buffer;
byte[][][] newInputs = new byte[getSubPacketSize()][inputs[0].length][];
int[][] inputOffsets = new int[getSubPacketSize()][inputs[0].length];
byte[][][] newOutputs = new byte[getSubPacketSize()][numParityUnits][];
int[][] outOffsets = new int[getSubPacketSize()][numParityUnits];
for (int i = 0; i < getSubPacketSize(); ++i) {
for (int j = 0; j < inputs[0].length; ++j) {
buffer = inputs[i][j];
if (buffer != null) {
inputOffsets[i][j] = buffer.arrayOffset() + buffer.position();
newInputs[i][j] = buffer.array();
}
}
}
for (int i = 0; i < getSubPacketSize(); ++i) {
for (int j = 0; j < numParityUnits; ++j) {
buffer = tmpOutputs[i][j];
if (buffer != null) {
outOffsets[i][j] = buffer.arrayOffset() + buffer.position();
newOutputs[i][j] = buffer.array();
}
}
}
byte[] newPiggyBack = piggyBack.array();
// decode the erased value in the first subPacket by using the piggyback
int idxToWrite = 0;
doDecodeByPiggyBack(newInputs[0], inputOffsets[0],
newOutputs[0][idxToWrite], outOffsets[0][idxToWrite],
newPiggyBack, erasedLocationToFix, bufSize);
}
for (int i = 0; i < subPacketSize; ++i) {
for (int j = 0; j < inputs[i].length; ++j) {
if (inputs[i][j] != null) {
inputs[i][j].position(inputPositions[i][j] + bufSize);
}
}
}
}
private void doDecodeByPiggyBack(ByteBuffer[] inputs,
ByteBuffer outputs,
ByteBuffer piggyBack,
int erasedLocationToFix) {
final int thisPiggyBackSetIdx = piggyBackFullIndex[erasedLocationToFix];
final int startIndex = piggyBackIndex[thisPiggyBackSetIdx - 1];
final int endIndex = piggyBackIndex[thisPiggyBackSetIdx];
// recover first sub-stripe data by XOR piggyback
int bufSize = piggyBack.remaining();
for (int i = piggyBack.position();
i < piggyBack.position() + bufSize; i++) {
for (int j = startIndex; j < endIndex; j++) {
if (inputs[j] != null) {
piggyBack.put(i, (byte)
(piggyBack.get(i) ^ inputs[j].get(inputs[j].position() + i)));
}
}
outputs.put(outputs.position() + i, piggyBack.get(i));
}
}
private void doDecodeByPiggyBack(byte[][] inputs, int[] inputOffsets,
byte[] outputs, int outOffset,
byte[] piggyBack, int erasedLocationToFix,
int bufSize) {
final int thisPiggyBackSetIdx = piggyBackFullIndex[erasedLocationToFix];
final int startIndex = piggyBackIndex[thisPiggyBackSetIdx - 1];
final int endIndex = piggyBackIndex[thisPiggyBackSetIdx];
// recover first sub-stripe data by XOR piggyback
for (int i = 0; i < bufSize; i++) {
for (int j = startIndex; j < endIndex; j++) {
if (inputs[j] != null) {
piggyBack[i] = (byte) (piggyBack[i] ^ inputs[j][i + inputOffsets[j]]);
}
}
outputs[i + outOffset] = piggyBack[i];
}
}
private void doDecodeMultiAndParity(ByteBuffer[][] inputs,
ByteBuffer[][] outputs,
int[] erasedLocationToFix, int bufSize) {
final int numDataUnits = rsRawDecoder.getNumDataUnits();
final int numParityUnits = rsRawDecoder.getNumParityUnits();
final int numTotalUnits = numDataUnits + numParityUnits;
int[] parityToFixFlag = new int[numTotalUnits];
for (int i = 0; i < erasedLocationToFix.length; ++i) {
if (erasedLocationToFix[i] >= numDataUnits) {
parityToFixFlag[erasedLocationToFix[i]] = 1;
}
}
int[] inputPositions = new int[inputs[0].length];
for (int i = 0; i < inputPositions.length; i++) {
if (inputs[0][i] != null) {
inputPositions[i] = inputs[0][i].position();
}
}
// decoded first sub-stripe
rsRawDecoder.decode(inputs[0], erasedLocationToFix, outputs[0]);
for (int i = 0; i < inputs[0].length; i++) {
if (inputs[0][i] != null) {
// dataLen bytes consumed
inputs[0][i].position(inputPositions[i]);
}
}
ByteBuffer[] tempInput = new ByteBuffer[numDataUnits];
for (int i = 0; i < numDataUnits; ++i) {
tempInput[i] = inputs[0][i];
//
// if (!isDirect && tempInput[i] != null) {
// tempInput[i].position(tempInput[i].position() - bufSize);
// }
}
for (int i = 0; i < erasedLocationToFix.length; ++i) {
if (erasedLocationToFix[i] < numDataUnits) {
tempInput[erasedLocationToFix[i]] = outputs[0][i];
}
}
ByteBuffer[] piggyBack = HHUtil.getPiggyBacksFromInput(tempInput,
piggyBackIndex, numParityUnits, 0, xorRawEncoder);
for (int j = numDataUnits + 1; j < numTotalUnits; ++j) {
if (parityToFixFlag[j] == 0 && inputs[1][j] != null) {
// f(b) + f(a1,a2,a3....)
for (int k = inputs[1][j].position(),
m = piggyBack[j - numDataUnits - 1].position();
k < inputs[1][j].limit(); ++k, ++m) {
inputs[1][j].put(k, (byte)
(inputs[1][j].get(k) ^
piggyBack[j - numDataUnits - 1].get(m)));
}
}
}
// decoded second sub-stripe
rsRawDecoder.decode(inputs[1], erasedLocationToFix, outputs[1]);
// parity index = 0, the data have no piggyBack
for (int j = 0; j < erasedLocationToFix.length; ++j) {
if (erasedLocationToFix[j] < numTotalUnits
&& erasedLocationToFix[j] > numDataUnits) {
int parityIndex = erasedLocationToFix[j] - numDataUnits - 1;
for (int k = outputs[1][j].position(),
m = piggyBack[parityIndex].position();
k < outputs[1][j].limit(); ++k, ++m) {
outputs[1][j].put(k, (byte)
(outputs[1][j].get(k) ^ piggyBack[parityIndex].get(m)));
}
}
}
for (int i = 0; i < inputs[0].length; i++) {
if (inputs[0][i] != null) {
// dataLen bytes consumed
inputs[0][i].position(inputPositions[i] + bufSize);
}
}
}
}
| |
/*
* Copyright (c) 2018.
* J. Melzer
*/
package com.jmelzer.jitty.service;
import com.jmelzer.jitty.dao.TournamentPlayerRepository;
import com.jmelzer.jitty.model.*;
import com.jmelzer.jitty.model.dto.*;
import org.springframework.beans.BeanUtils;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
/**
* Created by J. Melzer on 30.07.2016.
* copy entities
*/
public class CopyManager {
final static DateFormat hourFormatter = SimpleDateFormat.getTimeInstance(DateFormat.SHORT);
static public KOFieldDTO copyForBracket(KOField koField) {
return copy(koField, true);
}
static public KOFieldDTO copy(KOField koField, boolean withSets) {
KOFieldDTO dto = new KOFieldDTO();
BeanUtils.copyProperties(koField, dto, "round");
Round round = koField.getRound();
RoundDTO lastRoundDto = null;
do {
RoundDTO rdto = new RoundDTO();
BeanUtils.copyProperties(round, rdto, "games");
rdto.setRoundType(copy(round.getRoundType()));
if (lastRoundDto == null) {
dto.setRound(rdto);
} else {
lastRoundDto.setNextRound(rdto);
}
lastRoundDto = rdto;
for (TournamentSingleGame game : round.getGames()) {
rdto.addGame(copy(game, withSets));
}
round = round.getNextRound();
} while (round != null);
return dto;
}
private static RoundTypeDTO copy(RoundType roundType) {
RoundTypeDTO dto = new RoundTypeDTO();
dto.setValue(roundType.getValue());
dto.setName(roundType.getName());
return dto;
}
static public TournamentSingleGameDTO copy(TournamentSingleGame game, boolean withSets) {
TournamentSingleGameDTO dto = new TournamentSingleGameDTO();
BeanUtils.copyProperties(game, dto, "player1", "player2", "sets", "round");
if (game.getPlayer1() != null) {
dto.setPlayer1(copy(game.getPlayer1()));
}
if (game.getPlayer2() != null) {
dto.setPlayer2(copy(game.getPlayer2()));
}
if (game.getGroup() != null) {
dto.setGroup(copy(game.getTcName(), game.getGroup()));
}
if (withSets && game.getSets() != null && !game.getSets().isEmpty()) {
for (GameSet gameSet : game.getSets()) {
dto.addSet(copy(gameSet));
}
}
if (game.getRound() != null) {
RoundDTO rdto = new RoundDTO();
BeanUtils.copyProperties(game.getRound(), rdto, "games");
rdto.setRoundType(copy(game.getRound().getRoundType()));
dto.setRound(rdto);
}
return dto;
}
public static TournamentPlayerDTO copy(TournamentPlayer player) {
TournamentPlayerDTO dto = new TournamentPlayerDTO();
BeanUtils.copyProperties(player, dto, "playedGames", "games", "classes", "association", "classes", "tournaments");
if (player.getLastGameAt() != null) {
LocalDateTime time = LocalDateTime.ofInstant(player.getLastGameAt().toInstant(), ZoneId.systemDefault());
long h = ChronoUnit.HOURS.between(time, LocalDateTime.now());
long m = ChronoUnit.MINUTES.between(time, LocalDateTime.now());
dto.setPeriodSinceLastGame(h + "h" + m + "m");
dto.setLastGameAt("letztes Spiel um " + hourFormatter.format(player.getLastGameAt()));
} else {
dto.setPeriodSinceLastGame("--");
dto.setLastGameAt("noch nicht gespielt");
}
return dto;
}
public static TournamentGroupDTO copy(String clzName, TournamentGroup group) {
TournamentGroupDTO dto = new TournamentGroupDTO();
BeanUtils.copyProperties(group, dto, "players", "tournamentClass");
dto.setTournamentClass(new TournamentClassDTO());
dto.getTournamentClass().setName(clzName);
return dto;
}
public static GameSetDTO copy(GameSet set) {
GameSetDTO dto = new GameSetDTO();
BeanUtils.copyProperties(set, dto);
return dto;
}
static public KOFieldDTO copy(KOField koField) {
return copy(koField, false);
}
static public void copy(TournamentClassDTO source, TournamentClass target, TournamentPlayerRepository playerRepository) {
BeanUtils.copyProperties(source, target, "players", "system");
// for (TournamentGroupDTO group : source.getGroups()) {
// target.addGroup(copy(group, playerRepository));
// }
}
public static List<TournamentSingleGame> copy(List<TournamentSingleGameDTO> dtos, String name, Long tid) {
List<TournamentSingleGame> games = new ArrayList<>();
for (TournamentSingleGameDTO dto : dtos) {
TournamentSingleGame game = new TournamentSingleGame();
copy(dto, game);
game.setTcName(name);
game.setTid(tid);
games.add(game);
}
return games;
}
public static void copy(TournamentSingleGameDTO dto, TournamentSingleGame game) {
BeanUtils.copyProperties(dto, game, "group", "player1", "player2", "sets");
game.getSets().clear();
for (GameSetDTO gameSet : dto.getSets()) {
game.getSets().add(copy(gameSet));
}
}
public static GameSet copy(GameSetDTO dto) {
GameSet gameSet = new GameSet();
BeanUtils.copyProperties(dto, gameSet);
return gameSet;
}
public static TournamentDTO copy(Tournament tournament) {
TournamentDTO dto = new TournamentDTO();
BeanUtils.copyProperties(tournament, dto, "classes");
for (TournamentClass tournamentClass : tournament.getClasses()) {
dto.addClass(copyOnly(tournamentClass));
}
return dto;
}
static public TournamentClassDTO copyOnly(TournamentClass clz) {
TournamentClassDTO dto = new TournamentClassDTO();
BeanUtils.copyProperties(clz, dto, "players", "system");
return dto;
}
public static Tournament copy(TournamentDTO dto, Tournament t) {
BeanUtils.copyProperties(dto, t, "classes");
return t;
}
static public GroupPhaseDTO copy(GroupPhaseDTO source, GroupPhase target, TournamentPlayerRepository playerRepository) {
BeanUtils.copyProperties(source, target, "groups", "system");
if (source.getGroups() != null) {
for (TournamentGroupDTO group : source.getGroups()) {
target.addGroup(copy(group, playerRepository));
}
}
return source;
}
public static TournamentGroup copy(TournamentGroupDTO dto, TournamentPlayerRepository playerRepository) {
TournamentGroup group = new TournamentGroup();
BeanUtils.copyProperties(dto, group, "players");
for (TournamentPlayerDTO tournamentPlayerDTO : dto.getPlayers()) {
TournamentPlayer player = playerRepository.getOne(tournamentPlayerDTO.getId());
group.addPlayer(player);
}
return group;
}
static public TournamentClassDTO copy(TournamentClass clz, boolean withSystem) {
TournamentClassDTO dto = new TournamentClassDTO();
BeanUtils.copyProperties(clz, dto, "system", "players");
if (withSystem && clz.getSystem() != null) {
dto.setSystem(new TournamentSystemDTO());
for (Phase phase : clz.getSystem().getPhases()) {
dto.getSystem().addPhase(copy(phase, true));
}
}
return dto;
}
public static PhaseDTO copy(Phase phase, boolean onlyPhase) {
if (phase instanceof GroupPhase) {
return copy((GroupPhase) phase, onlyPhase);
} else if (phase instanceof KOPhase) {
return copy((KOPhase) phase, onlyPhase);
} else if (phase instanceof SwissSystemPhase) {
return copy((SwissSystemPhase) phase, onlyPhase);
} else {
throw new IllegalArgumentException("unkown " + phase);
}
}
public static GroupPhaseDTO copy(GroupPhase groupPhase, boolean onlyPhase) {
GroupPhaseDTO dto = new GroupPhaseDTO();
BeanUtils.copyProperties(groupPhase, dto, "system", "groups", "system");
if (!onlyPhase) {
for (TournamentGroup group : groupPhase.getGroups()) {
TournamentGroupDTO dtoGroup = copy(groupPhase.getSystem().getTournamentClass().getName(), group);
dto.addGroup(dtoGroup);
for (TournamentPlayer player : group.getPlayers()) {
dtoGroup.addPlayer(copy(player));
}
}
}
return dto;
}
public static KOPhaseDTO copy(KOPhase phase, boolean onlyPhase) {
KOPhaseDTO dto = new KOPhaseDTO();
BeanUtils.copyProperties(phase, dto, "koField", "system");
return dto;
}
public static SwissSystemPhaseDTO copy(SwissSystemPhase phase, boolean onlyPhase) {
SwissSystemPhaseDTO dto = new SwissSystemPhaseDTO();
BeanUtils.copyProperties(phase, dto, "group", "system");
if (!onlyPhase) {
TournamentGroupDTO dtoGroup = copy(phase.getSystem().getTournamentClass().getName(), phase.getGroup());
dto.setGroup(dtoGroup);
for (TournamentPlayer player : phase.getGroup().getPlayers()) {
dtoGroup.addPlayer(copy(player));
}
}
return dto;
}
public static void fillResults(TournamentGroup group, GroupResultDTO groupResultDTO) {
int pos = 1;
for (PlayerStatistic ps : group.getRanking()) {
GroupResultEntryDTO entry = new GroupResultEntryDTO();
entry.setPos(pos++);
entry.setClub(ps.player.getClub() != null ? ps.player.getClub().getName() : "");
entry.setPlayerName(ps.player.getFullName());
entry.setGameStat(ps.win + ":" + ps.lose);
entry.setSetStat(ps.setsWon + ":" + ps.setsLost);
entry.setDetailResult(ps.detailResult);
groupResultDTO.getEntries().add(entry);
}
}
}
| |
/*
* Copyright 2012 OSBI Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.saiku.plugin.resources;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import org.pentaho.platform.api.engine.IPluginManager;
import org.pentaho.platform.engine.core.system.PentahoSessionHolder;
import org.pentaho.platform.engine.core.system.PentahoSystem;
import org.pentaho.platform.plugin.services.pluginmgr.PluginClassLoader;
import org.saiku.plugin.util.ResourceManager;
import org.saiku.plugin.util.packager.Packager;
import org.saiku.service.datasource.DatasourceService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* QueryServlet contains all the methods required when manipulating an OLAP Query.
* @author Paul Stoellberger
*
*/
@Component
@Path("/saiku/{username}/plugin")
@XmlAccessorType(XmlAccessType.NONE)
public class PluginResource {
private static final Logger log = LoggerFactory.getLogger(PluginResource.class);
@Autowired
private DatasourceService datasourceService;
// @GET
// @Produces({"text/plain" })
// @Path("/cda")
// public String getCda(@QueryParam("query") String query)
// {
// try {
// SaikuQuery sq = queryService.getQuery(query);
// SaikuDatasource ds = datasourceService.getDatasource(sq.getCube().getConnectionName());
// Properties props = ds.getProperties();
//
// String cdaFile = getCdaAsString(
// props.getProperty(ISaikuConnection.DRIVER_KEY),
// props.getProperty(ISaikuConnection.URL_KEY),
// sq.getName(),
// sq.getMdx());
//
// return cdaFile;
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
// return "";
// }
// @GET
// @Produces({"application/json" })
// @Path("/cda/execute")
// public Response execute(
// @QueryParam("query") String query,
// @QueryParam("type") String type)
// {
// try {
// String cdaString = getCda(query);
// Document cda = DocumentHelper.parseText(cdaString);
//
// final CdaSettings cdaSettings = new CdaSettings(cda, "cda1", null);
//
// log.debug("Doing query on Cda - Initializing CdaEngine");
// final CdaEngine engine = CdaEngine.getInstance();
// final QueryOptions queryOptions = new QueryOptions();
// queryOptions.setDataAccessId("1");
// queryOptions.setOutputType("json");
// log.info("Doing query");
// ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
// PrintStream printstream = new PrintStream(outputStream);
// engine.doQuery(printstream, cdaSettings, queryOptions);
// byte[] doc = outputStream.toByteArray();
//
// return Response.ok(doc, MediaType.APPLICATION_JSON).header(
// "content-length",doc.length).build();
//
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
// return Response.serverError().build();
//
//
// }
@GET
@Produces({"application/javascript" })
@Path("/plugins")
public String getPlugins(@QueryParam("debug") @DefaultValue("false") Boolean debug)
{
try {
Packager packager = Packager.getInstance();
List<File> files = new ArrayList<File>();
String searchRootDir = PentahoSystem.getApplicationContext().getSolutionPath("saiku/plugins");
File searchRootFile = new File(searchRootDir);
if (searchRootFile.exists()) {
List<File> solutionFiles = getJsFiles(searchRootFile);
files.addAll(solutionFiles);
}
final IPluginManager pluginManager = (IPluginManager) PentahoSystem.get(IPluginManager.class, PentahoSessionHolder.getSession());
Long start = (new Date()).getTime();
for (String plugin : pluginManager.getRegisteredPlugins()) {
final PluginClassLoader pluginClassloader = (PluginClassLoader) pluginManager.getClassLoader(plugin);
File pluginDir = pluginClassloader.getPluginDir();
File saikuDir = new File(pluginDir, "saiku");
if (saikuDir.exists()) {
File saikuPluginDir = new File(saikuDir, "plugins");
if (saikuPluginDir.exists()) {
List<File> jsFiles = getJsFiles(saikuPluginDir);
files.addAll(jsFiles);
}
}
}
Long end = (new Date()).getTime();
log.debug("Looking for all plugin files time: " + (end - start) + "ms - Files: " + files.size());
if (files.size() > 0) {
String pluginRootDir = PentahoSystem.getApplicationContext().getSolutionPath("system/saiku");
File[] fileArray = files.toArray(new File[files.size()]);
packager.registerPackage("scripts", Packager.Filetype.JS, searchRootDir, pluginRootDir + "/../../system/saiku/ui/js/scripts.js", fileArray);
packager.minifyPackage("scripts", ( debug ? Packager.Mode.CONCATENATE : Packager.Mode.MINIFY));
return ResourceManager.getInstance().getResourceAsString( "ui/js/scripts.js");
}
} catch (IOException ioe) {
log.error("Error fetching plugins", ioe);
}
return "";
}
private List<File> getJsFiles(File rootDir) {
List<File> result = new ArrayList<File>();
File[] files = rootDir.listFiles(new FilenameFilter() {
public boolean accept(File file, String name) {
return name.endsWith(".js");
}
});
if (files != null)
result.addAll(Arrays.asList(files));
File[] folders = rootDir.listFiles(new FilenameFilter() {
public boolean accept(File file, String name) {
return file.isDirectory();
}
});
if (folders != null) {
for (File f : folders) {
List<File> partial = getJsFiles(f);
if (partial != null)
result.addAll(partial);
}
}
return result;
}
// private CdaSettings initCda(String sessionId, String domain) throws Exception {
// CdaSettings cda = new CdaSettings("cda" + sessionId, null);
//
// String[] domainInfo = domain.split("/");
// Connection connection = new MetadataConnection("1", domainInfo[0] + "/" + domainInfo[1], domainInfo[1]);
// Connection con = new jdbcconn
//
// MqlDataAccess dataAccess = new MqlDataAccess(sessionId, sessionId, "1", "");
// //dataAccess.setCacheEnabled(true);
// cda.addConnection(connection);
// cda.addDataAccess(dataAccess);
// return cda;
// }
// private CdaSettings getCdaSettings(String sessionId, SaikuDatasource ds, SaikuQuery query) {
//
// try {
// Document document = DocumentHelper.parseText("");
//
// return new CdaSettings(document, sessionId, null);
// } catch (Exception e) {
// e.printStackTrace();
// }
// return null;
// }
// private Document getCdaAsDocument(String driver, String url, String name, String query) throws Exception {
// String cda = getCdaAsString(driver, url, name, query);
// return DocumentHelper.parseText(cda);
// }
//
// private String getCdaAsString(String driver, String url, String name, String query) throws Exception {
// String cda = getCdaTemplate();
// cda = cda.replaceAll("@@DRIVER@@", driver);
// cda = cda.replaceAll("@@NAME@@", name);
// cda = cda.replaceAll("@@URL@@", url);
// cda = cda.replaceAll("@@QUERY@@", query);
// return cda;
// }
//
// private String getCdaTemplate() {
// String cda =
// "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
// "<CDADescriptor>\n" +
// " <DataSources>\n" +
// " <Connection id=\"1\" type=\"olap4j.jdbc\">\n" +
// " <Driver>@@DRIVER@@</Driver>\n" +
// " <Url>@@URL@@</Url>\n" +
// " </Connection>\n" +
// " </DataSources>\n" +
// " <DataAccess id=\"1\" connection=\"1\" type=\"olap4j\" access=\"public\">\n" +
// " <Name>@@NAME@@</Name>\n" +
// " <Query><![CDATA[" +
// " @@QUERY@@" +
// " ]]></Query>\n" +
// " </DataAccess>\n" +
// "</CDADescriptor>\n";
//
// return cda;
// }
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package net.starlark.java.eval;
import static com.google.common.truth.Truth.assertThat;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** A test class for functions and scoping. */
@RunWith(JUnit4.class)
public final class FunctionTest {
private final EvaluationTestCase ev = new EvaluationTestCase();
@Test
public void testDef() throws Exception {
ev.exec("def f(a, b=1, *args, c, d=2, **kwargs): pass");
StarlarkFunction f = (StarlarkFunction) ev.lookup("f");
assertThat(f).isNotNull();
assertThat(f.getName()).isEqualTo("f");
assertThat(f.getParameterNames())
.containsExactly("a", "b", "c", "d", "args", "kwargs")
.inOrder();
assertThat(f.hasVarargs()).isTrue();
assertThat(f.hasKwargs()).isTrue();
assertThat(getDefaults(f))
.containsExactly(null, StarlarkInt.of(1), null, StarlarkInt.of(2), null, null)
.inOrder();
// same, sans varargs
ev.exec("def g(a, b=1, *, c, d=2, **kwargs): pass");
StarlarkFunction g = (StarlarkFunction) ev.lookup("g");
assertThat(g.getParameterNames()).containsExactly("a", "b", "c", "d", "kwargs").inOrder();
assertThat(g.hasVarargs()).isFalse();
assertThat(g.hasKwargs()).isTrue();
assertThat(getDefaults(g))
.containsExactly(null, StarlarkInt.of(1), null, StarlarkInt.of(2), null)
.inOrder();
}
private static List<Object> getDefaults(StarlarkFunction fn) {
List<Object> defaults = new ArrayList<>();
for (int i = 0; i < fn.getParameterNames().size(); i++) {
defaults.add(fn.getDefaultValue(i));
}
return defaults;
}
@Test
public void testFunctionDefCallOuterFunc() throws Exception {
List<Object> params = new ArrayList<>();
createOuterFunction(params);
ev.exec(
"def func(a):", //
" outer_func(a)",
"func(1)",
"func(2)");
assertThat(params).containsExactly(StarlarkInt.of(1), StarlarkInt.of(2)).inOrder();
}
private void createOuterFunction(final List<Object> params) throws Exception {
StarlarkCallable outerFunc =
new StarlarkCallable() {
@Override
public String getName() {
return "outer_func";
}
@Override
public NoneType call(StarlarkThread thread, Tuple args, Dict<String, Object> kwargs)
throws EvalException {
params.addAll(args);
return Starlark.NONE;
}
};
ev.update("outer_func", outerFunc);
}
@Test
public void testFunctionDefNoEffectOutsideScope() throws Exception {
ev.update("a", StarlarkInt.of(1));
ev.exec(
"def func():", //
" a = 2",
"func()\n");
assertThat(ev.lookup("a")).isEqualTo(StarlarkInt.of(1));
}
@Test
public void testFunctionDefGlobalVaribleReadInFunction() throws Exception {
ev.exec(
"a = 1", //
"def func():",
" b = a",
" return b",
"c = func()\n");
assertThat(ev.lookup("c")).isEqualTo(StarlarkInt.of(1));
}
@Test
public void testFunctionDefLocalGlobalScope() throws Exception {
ev.exec(
"a = 1", //
"def func():",
" a = 2",
" b = a",
" return b",
"c = func()\n");
assertThat(ev.lookup("c")).isEqualTo(StarlarkInt.of(2));
}
@Test
public void testFunctionDefLocalVariableReferencedBeforeAssignment() throws Exception {
ev.checkEvalErrorContains(
"local variable 'a' is referenced before assignment.",
"a = 1",
"def func():",
" b = a",
" a = 2",
" return b",
"c = func()\n");
}
@Test
public void testFunctionDefLocalVariableReferencedInCallBeforeAssignment() throws Exception {
ev.checkEvalErrorContains(
"local variable 'a' is referenced before assignment.",
"def dummy(x):",
" pass",
"a = 1",
"def func():",
" dummy(a)",
" a = 2",
"func()\n");
}
@Test
public void testFunctionDefLocalVariableReferencedAfterAssignment() throws Exception {
ev.exec(
"a = 1", //
"def func():",
" a = 2",
" b = a",
" a = 3",
" return b",
"c = func()\n");
assertThat(ev.lookup("c")).isEqualTo(StarlarkInt.of(2));
}
@SuppressWarnings("unchecked")
@Test
public void testStarlarkGlobalComprehensionIsAllowed() throws Exception {
ev.exec("a = [i for i in [1, 2, 3]]\n");
assertThat((Iterable<Object>) ev.lookup("a"))
.containsExactly(StarlarkInt.of(1), StarlarkInt.of(2), StarlarkInt.of(3))
.inOrder();
}
@Test
public void testFunctionReturn() throws Exception {
ev.exec(
"def func():", //
" return 2",
"b = func()\n");
assertThat(ev.lookup("b")).isEqualTo(StarlarkInt.of(2));
}
@Test
public void testFunctionReturnFromALoop() throws Exception {
ev.exec(
"def func():", //
" for i in [1, 2, 3, 4, 5]:",
" return i",
"b = func()\n");
assertThat(ev.lookup("b")).isEqualTo(StarlarkInt.of(1));
}
@Test
public void testFunctionExecutesProperly() throws Exception {
ev.exec(
"def func(a):",
" b = 1",
" if a:",
" b = 2",
" return b",
"c = func(0)",
"d = func(1)\n");
assertThat(ev.lookup("c")).isEqualTo(StarlarkInt.of(1));
assertThat(ev.lookup("d")).isEqualTo(StarlarkInt.of(2));
}
@Test
public void testFunctionCallFromFunction() throws Exception {
final List<Object> params = new ArrayList<>();
createOuterFunction(params);
ev.exec(
"def func2(a):",
" outer_func(a)",
"def func1(b):",
" func2(b)",
"func1(1)",
"func1(2)\n");
assertThat(params).containsExactly(StarlarkInt.of(1), StarlarkInt.of(2)).inOrder();
}
@Test
public void testFunctionCallFromFunctionReadGlobalVar() throws Exception {
ev.exec(
"a = 1", //
"def func2():",
" return a",
"def func1():",
" return func2()",
"b = func1()\n");
assertThat(ev.lookup("b")).isEqualTo(StarlarkInt.of(1));
}
@Test
public void testFunctionParamCanShadowGlobalVarAfterGlobalVarIsRead() throws Exception {
ev.exec(
"a = 1",
"def func2(a):",
" return 0",
"def func1():",
" dummy = a",
" return func2(2)",
"b = func1()\n");
assertThat(ev.lookup("b")).isEqualTo(StarlarkInt.of(0));
}
@Test
public void testSingleLineFunction() throws Exception {
ev.exec(
"def func(): return 'a'", //
"s = func()\n");
assertThat(ev.lookup("s")).isEqualTo("a");
}
@Test
public void testFunctionReturnsDictionary() throws Exception {
ev.exec(
"def func(): return {'a' : 1}", //
"d = func()",
"a = d['a']\n");
assertThat(ev.lookup("a")).isEqualTo(StarlarkInt.of(1));
}
@Test
public void testFunctionReturnsList() throws Exception {
ev.exec(
"def func(): return [1, 2, 3]", //
"d = func()",
"a = d[1]\n");
assertThat(ev.lookup("a")).isEqualTo(StarlarkInt.of(2));
}
@Test
public void testFunctionNameAliasing() throws Exception {
ev.exec(
"def func(a):", //
" return a + 1",
"alias = func",
"r = alias(1)");
assertThat(ev.lookup("r")).isEqualTo(StarlarkInt.of(2));
}
@Test
public void testCallingFunctionsWithMixedModeArgs() throws Exception {
ev.exec(
"def func(a, b, c):", //
" return a + b + c",
"v = func(1, c = 2, b = 3)");
assertThat(ev.lookup("v")).isEqualTo(StarlarkInt.of(6));
}
private String functionWithOptionalArgs() {
return "def func(a, b = None, c = None):\n"
+ " r = a + 'a'\n"
+ " if b:\n"
+ " r += 'b'\n"
+ " if c:\n"
+ " r += 'c'\n"
+ " return r\n";
}
@Test
public void testWhichOptionalArgsAreDefinedForFunctions() throws Exception {
ev.exec(
functionWithOptionalArgs(),
"v1 = func('1', 1, 1)",
"v2 = func(b = 2, a = '2', c = 2)",
"v3 = func('3')",
"v4 = func('4', c = 1)\n");
assertThat(ev.lookup("v1")).isEqualTo("1abc");
assertThat(ev.lookup("v2")).isEqualTo("2abc");
assertThat(ev.lookup("v3")).isEqualTo("3a");
assertThat(ev.lookup("v4")).isEqualTo("4ac");
}
@Test
public void testDefaultArguments() throws Exception {
ev.exec(
"def func(a, b = 'b', c = 'c'):",
" return a + b + c",
"v1 = func('a', 'x', 'y')",
"v2 = func(b = 'x', a = 'a', c = 'y')",
"v3 = func('a')",
"v4 = func('a', c = 'y')\n");
assertThat(ev.lookup("v1")).isEqualTo("axy");
assertThat(ev.lookup("v2")).isEqualTo("axy");
assertThat(ev.lookup("v3")).isEqualTo("abc");
assertThat(ev.lookup("v4")).isEqualTo("aby");
}
@Test
public void testDefaultArgumentsInsufficientArgNum() throws Exception {
ev.checkEvalError(
"func() missing 1 required positional argument: a",
"def func(a, b = 'b', c = 'c'):",
" return a + b + c",
"func()");
}
@Test
public void testArgsIsNotIterable() throws Exception {
ev.checkEvalError(
"argument after * must be an iterable, not int",
"def func1(a, b): return a + b",
"func1('a', *42)");
ev.checkEvalError(
"argument after * must be an iterable, not string",
"def func2(a, b): return a + b",
"func2('a', *'str')");
}
@Test
public void testKeywordOnly() throws Exception {
ev.checkEvalError(
"func() missing 1 required keyword-only argument: b", //
"def func(a, *, b): pass",
"func(5)");
ev.checkEvalError(
"func() accepts no more than 1 positional argument but got 2",
"def func(a, *, b): pass",
"func(5, 6)");
ev.exec("def func(a, *, b, c = 'c'): return a + b + c");
assertThat(ev.eval("func('a', b = 'b')")).isEqualTo("abc");
assertThat(ev.eval("func('a', b = 'b', c = 'd')")).isEqualTo("abd");
}
@Test
public void testStarArgsAndKeywordOnly() throws Exception {
ev.checkEvalError(
"func() missing 1 required keyword-only argument: b",
"def func(a, *args, b): pass",
"func(5)");
ev.checkEvalError(
"func() missing 1 required keyword-only argument: b",
"def func(a, *args, b): pass",
"func(5, 6)");
ev.exec("def func(a, *args, b, c = 'c'): return a + str(args) + b + c");
assertThat(ev.eval("func('a', b = 'b')")).isEqualTo("a()bc");
assertThat(ev.eval("func('a', b = 'b', c = 'd')")).isEqualTo("a()bd");
assertThat(ev.eval("func('a', 1, 2, b = 'b')")).isEqualTo("a(1, 2)bc");
assertThat(ev.eval("func('a', 1, 2, b = 'b', c = 'd')")).isEqualTo("a(1, 2)bd");
}
@Test
public void testCannotPassResidualsByName() throws Exception {
ev.checkEvalError(
"f() got unexpected keyword argument: args", "def f(*args): pass", "f(args=[])");
ev.exec("def f(**kwargs): return kwargs");
assertThat(Starlark.repr(ev.eval("f(kwargs=1)"))).isEqualTo("{\"kwargs\": 1}");
}
@Test
public void testKeywordOnlyAfterStarArg() throws Exception {
ev.checkEvalError(
"func() missing 1 required keyword-only argument: c",
"def func(a, *b, c): pass",
"func(5)");
ev.checkEvalError(
"func() missing 1 required keyword-only argument: c",
"def func(a, *b, c): pass",
"func(5, 6, 7)");
ev.exec("def func(a, *b, c): return a + str(b) + c");
assertThat(ev.eval("func('a', c = 'c')")).isEqualTo("a()c");
assertThat(ev.eval("func('a', 1, c = 'c')")).isEqualTo("a(1,)c");
assertThat(ev.eval("func('a', 1, 2, c = 'c')")).isEqualTo("a(1, 2)c");
}
@Test
public void testKwargsBadKey() throws Exception {
ev.checkEvalError(
"keywords must be strings, not int", //
"def func(a, b): return a + b",
"func('a', **{3: 1})");
}
@Test
public void testKwargsIsNotDict() throws Exception {
ev.checkEvalError(
"argument after ** must be a dict, not int",
"def func(a, b): return a + b",
"func('a', **42)");
}
@Test
public void testKwargsCollision() throws Exception {
ev.checkEvalError(
"func() got multiple values for parameter 'b'",
"def func(a, b): return a + b",
"func('a', 'b', **{'b': 'foo'})");
}
@Test
public void testKwargsCollisionWithNamed() throws Exception {
ev.checkEvalError(
"func() got multiple values for parameter 'b'",
"def func(a, b): return a + b",
"func('a', b = 'b', **{'b': 'foo'})");
}
@Test
public void testDefaultArguments2() throws Exception {
ev.exec(
"a = 2",
"def foo(x=a): return x",
"def bar():",
" a = 3",
" return foo()",
"v = bar()\n");
assertThat(ev.lookup("v")).isEqualTo(StarlarkInt.of(2));
}
@Test
public void testMixingPositionalOptional() throws Exception {
ev.exec(
"def f(name, value = '', optional = ''):", //
" return value",
"v = f('name', 'value')");
assertThat(ev.lookup("v")).isEqualTo("value");
}
@Test
public void testStarArg() throws Exception {
ev.exec(
"def f(name, value = '1', optional = '2'): return name + value + optional",
"v1 = f(*['name', 'value'])",
"v2 = f('0', *['name', 'value'])",
"v3 = f('0', optional = '3', *['b'])",
"v4 = f(name='a', *[])\n");
assertThat(ev.lookup("v1")).isEqualTo("namevalue2");
assertThat(ev.lookup("v2")).isEqualTo("0namevalue");
assertThat(ev.lookup("v3")).isEqualTo("0b3");
assertThat(ev.lookup("v4")).isEqualTo("a12");
}
@Test
public void testStarParam() throws Exception {
ev.exec(
"def f(name, value = '1', optional = '2', *rest):",
" r = name + value + optional + '|'",
" for x in rest: r += x",
" return r",
"v1 = f('a', 'b', 'c', 'd', 'e')",
"v2 = f('a', optional='b', value='c')",
"v3 = f('a')");
assertThat(ev.lookup("v1")).isEqualTo("abc|de");
assertThat(ev.lookup("v2")).isEqualTo("acb|");
assertThat(ev.lookup("v3")).isEqualTo("a12|");
}
@Test
public void testKwParam() throws Exception {
ev.exec(
"def foo(a, b, c=3, d=4, g=7, h=8, *args, **kwargs):\n"
+ " return (a, b, c, d, g, h, args, kwargs)\n"
+ "v1 = foo(1, 2)\n"
+ "v2 = foo(1, h=9, i=0, *['x', 'y', 'z', 't'])\n"
+ "v3 = foo(1, i=0, *[2, 3, 4, 5, 6, 7, 8])\n"
+ "def bar(**kwargs):\n"
+ " return kwargs\n"
+ "b1 = bar(name='foo', type='jpg', version=42).items()\n"
+ "b2 = bar()\n");
assertThat(Starlark.repr(ev.lookup("v1"))).isEqualTo("(1, 2, 3, 4, 7, 8, (), {})");
assertThat(Starlark.repr(ev.lookup("v2")))
.isEqualTo("(1, \"x\", \"y\", \"z\", \"t\", 9, (), {\"i\": 0})");
assertThat(Starlark.repr(ev.lookup("v3"))).isEqualTo("(1, 2, 3, 4, 5, 6, (7, 8), {\"i\": 0})");
assertThat(Starlark.repr(ev.lookup("b1")))
.isEqualTo("[(\"name\", \"foo\"), (\"type\", \"jpg\"), (\"version\", 42)]");
assertThat(Starlark.repr(ev.lookup("b2"))).isEqualTo("{}");
}
@Test
public void testTrailingCommas() throws Exception {
// Test that trailing commas are allowed in function definitions and calls
// even after last *args or **kwargs expressions, like python3
ev.exec(
"def f(*args, **kwargs): pass\n"
+ "v1 = f(1,)\n"
+ "v2 = f(*(1,2),)\n"
+ "v3 = f(a=1,)\n"
+ "v4 = f(**{\"a\": 1},)\n");
assertThat(Starlark.repr(ev.lookup("v1"))).isEqualTo("None");
assertThat(Starlark.repr(ev.lookup("v2"))).isEqualTo("None");
assertThat(Starlark.repr(ev.lookup("v3"))).isEqualTo("None");
assertThat(Starlark.repr(ev.lookup("v4"))).isEqualTo("None");
}
@Test
public void testCalls() throws Exception {
ev.exec("def f(a, b = None): return a, b");
assertThat(Starlark.repr(ev.eval("f(1)"))).isEqualTo("(1, None)");
assertThat(Starlark.repr(ev.eval("f(1, 2)"))).isEqualTo("(1, 2)");
assertThat(Starlark.repr(ev.eval("f(a=1)"))).isEqualTo("(1, None)");
assertThat(Starlark.repr(ev.eval("f(a=1, b=2)"))).isEqualTo("(1, 2)");
assertThat(Starlark.repr(ev.eval("f(b=2, a=1)"))).isEqualTo("(1, 2)");
ev.checkEvalError(
"f() missing 1 required positional argument: a", //
"f()");
ev.checkEvalError(
"f() accepts no more than 2 positional arguments but got 3", //
"f(1, 2, 3)");
ev.checkEvalError(
"f() got unexpected keyword arguments: c, d", //
"f(1, 2, c=3, d=4)");
ev.checkEvalError(
"f() missing 1 required positional argument: a", //
"f(b=2)");
ev.checkEvalError(
"f() missing 1 required positional argument: a", //
"f(b=2)");
ev.checkEvalError(
"f() got multiple values for parameter 'a'", //
"f(2, a=1)");
ev.checkEvalError(
"f() got unexpected keyword argument: c", //
"f(b=2, a=1, c=3)");
ev.exec("def g(*, one, two, three): pass");
ev.checkEvalError(
"g() got unexpected keyword argument: tree (did you mean 'three'?)", //
"g(tree=3)");
ev.checkEvalError(
"g() does not accept positional arguments, but got 3", //
"g(1, 2 ,3)");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.notebook.repo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.zeppelin.conf.ZeppelinConfiguration;
import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars;
import org.apache.zeppelin.interpreter.InterpreterFactory;
import org.apache.zeppelin.interpreter.InterpreterOption;
import org.apache.zeppelin.interpreter.mock.MockInterpreter1;
import org.apache.zeppelin.interpreter.mock.MockInterpreter2;
import org.apache.zeppelin.notebook.JobListenerFactory;
import org.apache.zeppelin.notebook.Note;
import org.apache.zeppelin.notebook.Notebook;
import org.apache.zeppelin.notebook.Paragraph;
import org.apache.zeppelin.scheduler.Job;
import org.apache.zeppelin.scheduler.Job.Status;
import org.apache.zeppelin.scheduler.JobListener;
import org.apache.zeppelin.scheduler.SchedulerFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class NotebookRepoSyncTest implements JobListenerFactory {
private File mainZepDir;
private ZeppelinConfiguration conf;
private SchedulerFactory schedulerFactory;
private File mainNotebookDir;
private File secNotebookDir;
private Notebook notebookSync;
private NotebookRepoSync notebookRepoSync;
private InterpreterFactory factory;
private static final Logger LOG = LoggerFactory.getLogger(NotebookRepoSyncTest.class);
@Before
public void setUp() throws Exception {
String zpath = System.getProperty("java.io.tmpdir")+"/ZeppelinLTest_"+System.currentTimeMillis();
mainZepDir = new File(zpath);
mainZepDir.mkdirs();
new File(mainZepDir, "conf").mkdirs();
String mainNotePath = zpath+"/notebook";
String secNotePath = mainNotePath + "_secondary";
mainNotebookDir = new File(mainNotePath);
secNotebookDir = new File(secNotePath);
mainNotebookDir.mkdirs();
secNotebookDir.mkdirs();
System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), mainZepDir.getAbsolutePath());
System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), mainNotebookDir.getAbsolutePath());
System.setProperty(ConfVars.ZEPPELIN_INTERPRETERS.getVarName(), "org.apache.zeppelin.interpreter.mock.MockInterpreter1,org.apache.zeppelin.interpreter.mock.MockInterpreter2");
System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), "org.apache.zeppelin.notebook.repo.VFSNotebookRepo,org.apache.zeppelin.notebook.repo.mock.VFSNotebookRepoMock");
LOG.info("main Note dir : " + mainNotePath);
LOG.info("secondary note dir : " + secNotePath);
conf = ZeppelinConfiguration.create();
this.schedulerFactory = new SchedulerFactory();
MockInterpreter1.register("mock1", "org.apache.zeppelin.interpreter.mock.MockInterpreter1");
MockInterpreter2.register("mock2", "org.apache.zeppelin.interpreter.mock.MockInterpreter2");
factory = new InterpreterFactory(conf, new InterpreterOption(false), null);
notebookRepoSync = new NotebookRepoSync(conf);
notebookSync = new Notebook(conf, notebookRepoSync, schedulerFactory, factory, this);
}
@After
public void tearDown() throws Exception {
delete(mainZepDir);
}
@Test
public void testRepoCount() throws IOException {
assertTrue(notebookRepoSync.getMaxRepoNum() >= notebookRepoSync.getRepoCount());
}
@Test
public void testSyncOnCreate() throws IOException {
/* check that both storage systems are empty */
assertTrue(notebookRepoSync.getRepoCount() > 1);
assertEquals(0, notebookRepoSync.list(0).size());
assertEquals(0, notebookRepoSync.list(1).size());
/* create note */
Note note = notebookSync.createNote();
// check that automatically saved on both storages
assertEquals(1, notebookRepoSync.list(0).size());
assertEquals(1, notebookRepoSync.list(1).size());
assertEquals(notebookRepoSync.list(0).get(0).getId(),notebookRepoSync.list(1).get(0).getId());
}
@Test
public void testSyncOnDelete() throws IOException {
/* create note */
assertTrue(notebookRepoSync.getRepoCount() > 1);
assertEquals(0, notebookRepoSync.list(0).size());
assertEquals(0, notebookRepoSync.list(1).size());
Note note = notebookSync.createNote();
/* check that created in both storage systems */
assertEquals(1, notebookRepoSync.list(0).size());
assertEquals(1, notebookRepoSync.list(1).size());
assertEquals(notebookRepoSync.list(0).get(0).getId(),notebookRepoSync.list(1).get(0).getId());
/* remove Note */
notebookSync.removeNote(notebookRepoSync.list(0).get(0).getId());
/* check that deleted in both storages */
assertEquals(0, notebookRepoSync.list(0).size());
assertEquals(0, notebookRepoSync.list(1).size());
}
@Test
public void testSyncUpdateMain() throws IOException {
/* create note */
Note note = notebookSync.createNote();
Paragraph p1 = note.addParagraph();
Map config = p1.getConfig();
config.put("enabled", true);
p1.setConfig(config);
p1.setText("hello world");
/* new paragraph exists in note instance */
assertEquals(1, note.getParagraphs().size());
/* new paragraph not yet saved into storages */
assertEquals(0, notebookRepoSync.get(0,
notebookRepoSync.list(0).get(0).getId()).getParagraphs().size());
assertEquals(0, notebookRepoSync.get(1,
notebookRepoSync.list(1).get(0).getId()).getParagraphs().size());
/* save to storage under index 0 (first storage) */
notebookRepoSync.save(0, note);
/* check paragraph saved to first storage */
assertEquals(1, notebookRepoSync.get(0,
notebookRepoSync.list(0).get(0).getId()).getParagraphs().size());
/* check paragraph isn't saved to second storage */
assertEquals(0, notebookRepoSync.get(1,
notebookRepoSync.list(1).get(0).getId()).getParagraphs().size());
/* apply sync */
notebookRepoSync.sync();
/* check whether added to second storage */
assertEquals(1, notebookRepoSync.get(1,
notebookRepoSync.list(1).get(0).getId()).getParagraphs().size());
/* check whether same paragraph id */
assertEquals(p1.getId(), notebookRepoSync.get(0,
notebookRepoSync.list(0).get(0).getId()).getLastParagraph().getId());
assertEquals(p1.getId(), notebookRepoSync.get(1,
notebookRepoSync.list(1).get(0).getId()).getLastParagraph().getId());
}
@Test
public void testSyncOnList() throws IOException {
/* check that both storage repos are empty */
assertTrue(notebookRepoSync.getRepoCount() > 1);
assertEquals(0, notebookRepoSync.list(0).size());
assertEquals(0, notebookRepoSync.list(1).size());
File srcDir = new File("src/test/resources/2A94M5J1Z");
File destDir = new File(secNotebookDir + "/2A94M5J1Z");
/* copy manually new notebook into secondary storage repo and check repos */
try {
FileUtils.copyDirectory(srcDir, destDir);
} catch (IOException e) {
e.printStackTrace();
}
assertEquals(0, notebookRepoSync.list(0).size());
assertEquals(1, notebookRepoSync.list(1).size());
/* Although new notebook is added to secondary storage it's not displayed
* on list() with ZEPPELIN_NOTEBOOK_RELOAD_FROM_STORAGE set to false
*/
System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_RELOAD_FROM_STORAGE.getVarName(), "false");
assertEquals(0, notebookRepoSync.list().size());
/* notebook is synced after ZEPPELIN_NOTEBOOK_RELOAD_FROM_STORAGE variable is set to true */
System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_RELOAD_FROM_STORAGE.getVarName(), "true");
assertEquals(1, notebookRepoSync.list().size());
assertEquals(1, notebookRepoSync.list(0).size());
assertEquals(1, notebookRepoSync.list(1).size());
}
static void delete(File file){
if(file.isFile()) file.delete();
else if(file.isDirectory()){
File [] files = file.listFiles();
if(files!=null && files.length>0){
for(File f : files){
delete(f);
}
}
file.delete();
}
}
@Override
public JobListener getParagraphJobListener(Note note) {
return new JobListener(){
@Override
public void onProgressUpdate(Job job, int progress) {
}
@Override
public void beforeStatusChange(Job job, Status before, Status after) {
}
@Override
public void afterStatusChange(Job job, Status before, Status after) {
}
};
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.admin;
import org.jboss.resteasy.annotations.cache.NoCache;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.model.Policy;
import org.keycloak.authorization.model.Resource;
import org.keycloak.authorization.model.ResourceServer;
import org.keycloak.authorization.model.Scope;
import org.keycloak.authorization.store.PolicyStore;
import org.keycloak.authorization.store.ResourceStore;
import org.keycloak.authorization.store.StoreFactory;
import org.keycloak.models.ClientModel;
import org.keycloak.models.Constants;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.UserModel;
import org.keycloak.models.UserProvider;
import org.keycloak.representations.idm.authorization.PolicyRepresentation;
import org.keycloak.representations.idm.authorization.ResourceOwnerRepresentation;
import org.keycloak.representations.idm.authorization.ResourceRepresentation;
import org.keycloak.representations.idm.authorization.ScopeRepresentation;
import org.keycloak.services.ErrorResponse;
import org.keycloak.services.resources.admin.RealmAuth;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.keycloak.models.utils.ModelToRepresentation.toRepresentation;
import static org.keycloak.models.utils.RepresentationToModel.toModel;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class ResourceSetService {
private final AuthorizationProvider authorization;
private final RealmAuth auth;
private ResourceServer resourceServer;
public ResourceSetService(ResourceServer resourceServer, AuthorizationProvider authorization, RealmAuth auth) {
this.resourceServer = resourceServer;
this.authorization = authorization;
this.auth = auth;
}
@POST
@Consumes("application/json")
@Produces("application/json")
public Response create(ResourceRepresentation resource) {
requireManage();
StoreFactory storeFactory = this.authorization.getStoreFactory();
Resource existingResource = storeFactory.getResourceStore().findByName(resource.getName(), this.resourceServer.getId());
ResourceOwnerRepresentation owner = resource.getOwner();
if (existingResource != null && existingResource.getResourceServer().getId().equals(this.resourceServer.getId())
&& existingResource.getOwner().equals(owner)) {
return ErrorResponse.exists("Resource with name [" + resource.getName() + "] already exists.");
}
if (owner != null) {
String ownerId = owner.getId();
if (ownerId != null) {
if (!resourceServer.getClientId().equals(ownerId)) {
RealmModel realm = authorization.getRealm();
KeycloakSession keycloakSession = authorization.getKeycloakSession();
UserProvider users = keycloakSession.users();
UserModel ownerModel = users.getUserById(ownerId, realm);
if (ownerModel == null) {
ownerModel = users.getUserByUsername(ownerId, realm);
}
if (ownerModel == null) {
return ErrorResponse.error("Owner must be a valid username or user identifier. If the resource server, the client id or null.", Status.BAD_REQUEST);
}
owner.setId(ownerModel.getId());
}
}
}
Resource model = toModel(resource, this.resourceServer, authorization);
ResourceRepresentation representation = new ResourceRepresentation();
representation.setId(model.getId());
return Response.status(Status.CREATED).entity(representation).build();
}
@Path("{id}")
@PUT
@Consumes("application/json")
@Produces("application/json")
public Response update(@PathParam("id") String id, ResourceRepresentation resource) {
requireManage();
resource.setId(id);
StoreFactory storeFactory = this.authorization.getStoreFactory();
ResourceStore resourceStore = storeFactory.getResourceStore();
Resource model = resourceStore.findById(resource.getId(), resourceServer.getId());
if (model == null) {
return Response.status(Status.NOT_FOUND).build();
}
toModel(resource, resourceServer, authorization);
return Response.noContent().build();
}
@Path("{id}")
@DELETE
public Response delete(@PathParam("id") String id) {
requireManage();
StoreFactory storeFactory = authorization.getStoreFactory();
Resource resource = storeFactory.getResourceStore().findById(id, resourceServer.getId());
if (resource == null) {
return Response.status(Status.NOT_FOUND).build();
}
PolicyStore policyStore = storeFactory.getPolicyStore();
List<Policy> policies = policyStore.findByResource(id, resourceServer.getId());
for (Policy policyModel : policies) {
if (policyModel.getResources().size() == 1) {
policyStore.delete(policyModel.getId());
} else {
policyModel.removeResource(resource);
}
}
storeFactory.getResourceStore().delete(id);
return Response.noContent().build();
}
@Path("{id}")
@GET
@NoCache
@Produces("application/json")
public Response findById(@PathParam("id") String id) {
requireView();
StoreFactory storeFactory = authorization.getStoreFactory();
Resource model = storeFactory.getResourceStore().findById(id, resourceServer.getId());
if (model == null) {
return Response.status(Status.NOT_FOUND).build();
}
return Response.ok(toRepresentation(model, this.resourceServer, authorization, true)).build();
}
@Path("{id}/scopes")
@GET
@NoCache
@Produces("application/json")
public Response getScopes(@PathParam("id") String id) {
requireView();
StoreFactory storeFactory = authorization.getStoreFactory();
Resource model = storeFactory.getResourceStore().findById(id, resourceServer.getId());
if (model == null) {
return Response.status(Status.NOT_FOUND).build();
}
List<ScopeRepresentation> scopes = model.getScopes().stream().map(scope -> {
ScopeRepresentation representation = new ScopeRepresentation();
representation.setId(scope.getId());
representation.setName(scope.getName());
return representation;
}).collect(Collectors.toList());
if (model.getType() != null) {
ResourceStore resourceStore = authorization.getStoreFactory().getResourceStore();
for (Resource typed : resourceStore.findByType(model.getType(), resourceServer.getId())) {
if (typed.getOwner().equals(resourceServer.getClientId()) && !typed.getId().equals(model.getId())) {
scopes.addAll(typed.getScopes().stream().map(model1 -> {
ScopeRepresentation scope = new ScopeRepresentation();
scope.setId(model1.getId());
scope.setName(model1.getName());
String iconUri = model1.getIconUri();
if (iconUri != null) {
scope.setIconUri(iconUri);
}
return scope;
}).filter(scopeRepresentation -> !scopes.contains(scopeRepresentation)).collect(Collectors.toList()));
}
}
}
return Response.ok(scopes).build();
}
@Path("{id}/permissions")
@GET
@NoCache
@Produces("application/json")
public Response getPermissions(@PathParam("id") String id) {
requireView();
StoreFactory storeFactory = authorization.getStoreFactory();
Resource model = storeFactory.getResourceStore().findById(id, resourceServer.getId());
if (model == null) {
return Response.status(Status.NOT_FOUND).build();
}
PolicyStore policyStore = authorization.getStoreFactory().getPolicyStore();
Set<Policy> policies = new HashSet<>();
policies.addAll(policyStore.findByResource(model.getId(), resourceServer.getId()));
policies.addAll(policyStore.findByResourceType(model.getType(), resourceServer.getId()));
policies.addAll(policyStore.findByScopeIds(model.getScopes().stream().map(scope -> scope.getId()).collect(Collectors.toList()), resourceServer.getId()));
List<PolicyRepresentation> representation = new ArrayList<>();
for (Policy policyModel : policies) {
PolicyRepresentation policy = new PolicyRepresentation();
policy.setId(policyModel.getId());
policy.setName(policyModel.getName());
policy.setType(policyModel.getType());
if (!representation.contains(policy)) {
representation.add(policy);
}
}
return Response.ok(representation).build();
}
@Path("/search")
@GET
@Produces("application/json")
@NoCache
public Response find(@QueryParam("name") String name) {
this.auth.requireView();
StoreFactory storeFactory = authorization.getStoreFactory();
if (name == null) {
return Response.status(Status.BAD_REQUEST).build();
}
Resource model = storeFactory.getResourceStore().findByName(name, this.resourceServer.getId());
if (model == null) {
return Response.status(Status.OK).build();
}
return Response.ok(toRepresentation(model, this.resourceServer, authorization)).build();
}
@GET
@NoCache
@Produces("application/json")
public Response find(@QueryParam("_id") String id,
@QueryParam("name") String name,
@QueryParam("uri") String uri,
@QueryParam("owner") String owner,
@QueryParam("type") String type,
@QueryParam("scope") String scope,
@QueryParam("deep") Boolean deep,
@QueryParam("first") Integer firstResult,
@QueryParam("max") Integer maxResult) {
requireView();
StoreFactory storeFactory = authorization.getStoreFactory();
if (deep == null) {
deep = true;
}
Map<String, String[]> search = new HashMap<>();
if (id != null && !"".equals(id.trim())) {
search.put("id", new String[] {id});
}
if (name != null && !"".equals(name.trim())) {
search.put("name", new String[] {name});
}
if (uri != null && !"".equals(uri.trim())) {
search.put("uri", new String[] {uri});
}
if (owner != null && !"".equals(owner.trim())) {
RealmModel realm = authorization.getKeycloakSession().getContext().getRealm();
ClientModel clientModel = realm.getClientByClientId(owner);
if (clientModel != null) {
owner = clientModel.getId();
} else {
UserModel user = authorization.getKeycloakSession().users().getUserByUsername(owner, realm);
if (user != null) {
owner = user.getId();
}
}
search.put("owner", new String[] {owner});
}
if (type != null && !"".equals(type.trim())) {
search.put("type", new String[] {type});
}
if (scope != null && !"".equals(scope.trim())) {
HashMap<String, String[]> scopeFilter = new HashMap<>();
scopeFilter.put("name", new String[] {scope});
List<Scope> scopes = authorization.getStoreFactory().getScopeStore().findByResourceServer(scopeFilter, resourceServer.getId(), -1, -1);
if (scopes.isEmpty()) {
return Response.ok(Collections.emptyList()).build();
}
search.put("scope", scopes.stream().map(Scope::getId).toArray(String[]::new));
}
Boolean finalDeep = deep;
return Response.ok(
storeFactory.getResourceStore().findByResourceServer(search, this.resourceServer.getId(), firstResult != null ? firstResult : -1, maxResult != null ? maxResult : Constants.DEFAULT_MAX_RESULTS).stream()
.map(resource -> toRepresentation(resource, resourceServer, authorization, finalDeep))
.collect(Collectors.toList()))
.build();
}
private void requireView() {
if (this.auth != null) {
this.auth.requireView();
}
}
private void requireManage() {
if (this.auth != null) {
this.auth.requireManage();
}
}
}
| |
/*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.controller;
import com.thoughtworks.go.config.Agent;
import com.thoughtworks.go.config.SecurityConfig;
import com.thoughtworks.go.config.ServerConfig;
import com.thoughtworks.go.config.UpdateConfigCommand;
import com.thoughtworks.go.domain.JarDetector;
import com.thoughtworks.go.helper.AgentInstanceMother;
import com.thoughtworks.go.plugin.infra.commons.PluginsZip;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.server.service.*;
import com.thoughtworks.go.util.SystemEnvironment;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Base64;
import static com.thoughtworks.go.util.SystemEnvironment.AGENT_EXTRA_PROPERTIES;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Base64.getEncoder;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
import static org.springframework.http.HttpStatus.CONFLICT;
public class AgentRegistrationControllerTest {
private final MockHttpServletRequest request = new MockHttpServletRequest();
private final MockHttpServletResponse response = new MockHttpServletResponse();
private AgentService agentService;
private GoConfigService goConfigService;
private AgentRegistrationController controller;
private SystemEnvironment systemEnvironment;
private PluginsZip pluginsZip;
private File pluginZipFile;
private EphemeralAutoRegisterKeyService ephemeralAutoRegisterKeyService;
@BeforeEach
public void setUp(@TempDir Path temporaryFolder) throws Exception {
agentService = mock(AgentService.class);
systemEnvironment = mock(SystemEnvironment.class);
goConfigService = mock(GoConfigService.class);
ephemeralAutoRegisterKeyService = mock(EphemeralAutoRegisterKeyService.class);
pluginZipFile = Files.createFile(temporaryFolder.resolve("plugins.zip")).toFile();
FileUtils.writeStringToFile(pluginZipFile, "content", UTF_8);
when(systemEnvironment.get(SystemEnvironment.ALL_PLUGINS_ZIP_PATH)).thenReturn(pluginZipFile.getAbsolutePath());
when(systemEnvironment.get(AGENT_EXTRA_PROPERTIES)).thenReturn("");
pluginsZip = mock(PluginsZip.class);
controller = new AgentRegistrationController(agentService, goConfigService, systemEnvironment, pluginsZip, ephemeralAutoRegisterKeyService);
controller.populateAgentChecksum();
controller.populateLauncherChecksum();
controller.populateTFSSDKChecksum();
}
@Test
public void shouldRegisterWithProvidedAgentInformation() {
when(agentService.isRegistered("blahAgent-uuid")).thenReturn(false);
ServerConfig serverConfig = mockedServerConfig("token-generation-key", "someKey");
when(goConfigService.serverConfig()).thenReturn(serverConfig);
when(agentService.createAgentUsername("blahAgent-uuid", request.getRemoteAddr(), "blahAgent-host")).thenReturn(new Username("some-agent-login-name"));
controller.agentRequest("blahAgent-host", "blahAgent-uuid", "blah-location", "34567", "osx", "", "", "", "", "", "", token("blahAgent-uuid", serverConfig.getTokenGenerationKey()), request);
verify(agentService).requestRegistration(AgentRuntimeInfo.fromServer(new Agent("blahAgent-uuid", "blahAgent-host", request.getRemoteAddr()), false, "blah-location", 34567L, "osx"));
}
@Test
public void shouldAutoRegisterAgent() {
String uuid = "uuid";
final ServerConfig serverConfig = mockedServerConfig("token-generation-key", "someKey");
final String token = token(uuid, serverConfig.getTokenGenerationKey());
when(agentService.isRegistered(uuid)).thenReturn(false);
when(goConfigService.serverConfig()).thenReturn(serverConfig);
when(agentService.createAgentUsername(uuid, request.getRemoteAddr(), "host")).thenReturn(new Username("some-agent-login-name"));
controller.agentRequest("host", uuid, "location", "233232", "osx", "someKey", "", "", "", "", "", token, request);
verify(agentService).requestRegistration(AgentRuntimeInfo.fromServer(new Agent(uuid, "host", request.getRemoteAddr()), false, "location", 233232L, "osx"));
verify(agentService).register(any(Agent.class));
}
@Test
public void shouldAutoRegisterAgentWithHostnameFromAutoRegisterProperties() {
String uuid = "uuid";
when(agentService.isRegistered(uuid)).thenReturn(false);
ServerConfig serverConfig = mockedServerConfig("token-generation-key", "someKey");
when(goConfigService.serverConfig()).thenReturn(serverConfig);
when(agentService.createAgentUsername(uuid, request.getRemoteAddr(), "autoregister-hostname")).thenReturn(new Username("some-agent-login-name"));
controller.agentRequest("host", uuid, "location", "233232", "osx", "someKey", "", "", "autoregister-hostname", "", "", token(uuid, serverConfig.getTokenGenerationKey()), request);
verify(agentService).requestRegistration(AgentRuntimeInfo.fromServer(
new Agent(uuid, "autoregister-hostname", request.getRemoteAddr()), false, "location", 233232L, "osx"));
verify(agentService).register(any(Agent.class));
}
@Test
public void shouldNotAutoRegisterAgentIfKeysDoNotMatch() {
String uuid = "uuid";
when(agentService.isRegistered(uuid)).thenReturn(false);
ServerConfig serverConfig = mockedServerConfig("token-generation-key", "someKey");
when(goConfigService.serverConfig()).thenReturn(serverConfig);
when(agentService.createAgentUsername(uuid, request.getRemoteAddr(), "host")).thenReturn(new Username("some-agent-login-name"));
controller.agentRequest("host", uuid, "location", "233232", "osx", "", "", "", "", "", "", token(uuid, serverConfig.getTokenGenerationKey()), request);
verify(agentService).requestRegistration(AgentRuntimeInfo.fromServer(new Agent(uuid, "host", request.getRemoteAddr()), false, "location", 233232L, "osx"));
verify(goConfigService, never()).updateConfig(any(UpdateConfigCommand.class));
}
@Test
public void checkAgentStatusShouldIncludeMd5Checksum_forAgent_forLauncher_whenChecksumsAreCached() throws Exception {
when(pluginsZip.md5()).thenReturn("plugins-zip-md5");
when(systemEnvironment.get(AGENT_EXTRA_PROPERTIES)).thenReturn("extra=property");
controller.checkAgentStatus(response);
try (InputStream stream = JarDetector.tfsJar(systemEnvironment).getJarURL().openStream()) {
assertEquals(DigestUtils.md5Hex(stream), response.getHeader(SystemEnvironment.AGENT_TFS_SDK_MD5_HEADER));
}
try (InputStream stream = JarDetector.createFromRelativeDefaultFile(systemEnvironment, "agent-launcher.jar").invoke()) {
assertEquals(DigestUtils.md5Hex(stream), response.getHeader(SystemEnvironment.AGENT_LAUNCHER_CONTENT_MD5_HEADER));
}
try (InputStream stream = JarDetector.createFromRelativeDefaultFile(systemEnvironment, "agent.jar").invoke()) {
assertEquals(DigestUtils.md5Hex(stream), response.getHeader(SystemEnvironment.AGENT_CONTENT_MD5_HEADER));
}
assertEquals("plugins-zip-md5", response.getHeader(SystemEnvironment.AGENT_PLUGINS_ZIP_MD5_HEADER));
}
@Test
public void checkAgentStatusShouldIncludeExtraPropertiesInBase64() {
final String extraPropertiesValue = "extra=property another=extra.property";
final String base64ExtraPropertiesValue = java.util.Base64.getEncoder().encodeToString(extraPropertiesValue.getBytes(UTF_8));
when(pluginsZip.md5()).thenReturn("plugins-zip-md5");
when(systemEnvironment.get(AGENT_EXTRA_PROPERTIES)).thenReturn(extraPropertiesValue);
controller.checkAgentStatus(response);
assertEquals(base64ExtraPropertiesValue, response.getHeader(SystemEnvironment.AGENT_EXTRA_PROPERTIES_HEADER));
}
@Test
public void headShouldIncludeMd5ChecksumAndServerUrl_forAgent() throws Exception {
controller.checkAgentVersion(response);
try (InputStream stream = JarDetector.createFromRelativeDefaultFile(systemEnvironment, "agent.jar").invoke()) {
assertEquals(DigestUtils.md5Hex(stream), response.getHeader("Content-MD5"));
}
}
@Test
public void headShouldIncludeMd5ChecksumAndServerUrl_forAgentLauncher() throws Exception {
controller.checkAgentLauncherVersion(response);
try (InputStream stream = JarDetector.createFromRelativeDefaultFile(systemEnvironment, "agent-launcher.jar").invoke()) {
assertEquals(DigestUtils.md5Hex(stream), response.getHeader("Content-MD5"));
}
}
@Test
public void contentShouldIncludeMd5Checksum_forAgent() throws Exception {
controller.downloadAgent(response);
assertEquals("application/octet-stream", response.getContentType());
try (InputStream stream = JarDetector.createFromRelativeDefaultFile(systemEnvironment, "agent.jar").invoke()) {
assertEquals(DigestUtils.md5Hex(stream), response.getHeader("Content-MD5"));
}
try (InputStream is = JarDetector.createFromRelativeDefaultFile(systemEnvironment, "agent.jar").invoke()) {
assertTrue(Arrays.equals(IOUtils.toByteArray(is), response.getContentAsByteArray()));
}
}
@Test
public void contentShouldIncludeExtraAgentPropertiesInBase64_forAgent() throws IOException {
final String extraPropertiesValue = "extra=property another=extra.property";
final String base64ExtraPropertiesValue = getEncoder().encodeToString(extraPropertiesValue.getBytes(UTF_8));
when(systemEnvironment.get(AGENT_EXTRA_PROPERTIES)).thenReturn(extraPropertiesValue);
controller.downloadAgent(response);
assertEquals(base64ExtraPropertiesValue, response.getHeader(SystemEnvironment.AGENT_EXTRA_PROPERTIES_HEADER));
}
@Test
public void shouldSendAnEmptyStringInBase64_AsAgentExtraProperties_IfTheValueIsTooBigAfterConvertingToBase64() throws IOException {
final String longExtraPropertiesValue = StringUtils.rightPad("", AgentRegistrationController.MAX_HEADER_LENGTH, "z");
final String expectedValueToBeUsedForProperties = "";
final String expectedBase64ExtraPropertiesValue = getEncoder().encodeToString(expectedValueToBeUsedForProperties.getBytes(UTF_8));
when(systemEnvironment.get(AGENT_EXTRA_PROPERTIES)).thenReturn(longExtraPropertiesValue);
controller.downloadAgent(response);
assertEquals(expectedBase64ExtraPropertiesValue, response.getHeader(SystemEnvironment.AGENT_EXTRA_PROPERTIES_HEADER));
}
@Test
public void contentShouldIncludeMd5Checksum_forAgentLauncher() throws Exception {
controller.downloadAgentLauncher(response);
assertEquals("application/octet-stream", response.getContentType());
try (InputStream stream = JarDetector.createFromRelativeDefaultFile(systemEnvironment, "agent-launcher.jar").invoke()) {
assertEquals(DigestUtils.md5Hex(stream), response.getHeader("Content-MD5"));
}
try (InputStream is = JarDetector.createFromRelativeDefaultFile(systemEnvironment, "agent-launcher.jar").invoke()) {
assertTrue(Arrays.equals(IOUtils.toByteArray(is), response.getContentAsByteArray()));
}
}
@Test
public void headShouldIncludeMd5Checksum_forPluginsZip() {
when(pluginsZip.md5()).thenReturn("md5");
controller.checkAgentPluginsZipStatus(response);
assertEquals("md5", response.getHeader("Content-MD5"));
verify(pluginsZip).md5();
}
@Test
public void shouldReturnAgentPluginsZipWhenRequested() throws Exception {
when(pluginsZip.md5()).thenReturn("md5");
controller.downloadPluginsZip(response);
String actual = response.getContentAsString();
assertEquals("application/octet-stream", response.getContentType());
assertEquals("content", actual);
}
@Test
public void shouldReturnChecksumOfTfsJar() throws Exception {
controller.checkTfsImplVersion(response);
try (InputStream stream = JarDetector.tfsJar(systemEnvironment).getJarURL().openStream()) {
assertEquals(DigestUtils.md5Hex(stream), response.getHeader("Content-MD5"));
}
}
@Test
public void shouldRenderTheTfsJar() throws Exception {
controller.downloadTfsImplJar(response);
assertEquals("application/octet-stream", response.getContentType());
try (InputStream stream = JarDetector.tfsJar(systemEnvironment).getJarURL().openStream()) {
assertEquals(DigestUtils.md5Hex(stream), response.getHeader("Content-MD5"));
}
try (InputStream is = JarDetector.tfsJar(systemEnvironment).getJarURL().openStream()) {
assertTrue(Arrays.equals(IOUtils.toByteArray(is), response.getContentAsByteArray()));
}
}
@Test
public void shouldGenerateToken() {
final ServerConfig serverConfig = mockedServerConfig("agent-auto-register-key", "someKey");
when(goConfigService.serverConfig()).thenReturn(serverConfig);
when(agentService.findAgent("uuid-from-agent")).thenReturn(AgentInstanceMother.idle());
when(agentService.isRegistered("uuid-from-agent")).thenReturn(false);
final ResponseEntity responseEntity = controller.getToken("uuid-from-agent");
assertThat(responseEntity.getStatusCode(), is(HttpStatus.OK));
assertThat(responseEntity.getBody(), is("JCmJaW6YbEA4fIUqf8L9lRV81ua10wV+wRYOFdaBLcM="));
}
@Test
public void shouldRejectGenerateTokenRequestIfAgentIsInPendingState() {
final ServerConfig serverConfig = mockedServerConfig("agent-auto-register-key", "someKey");
when(goConfigService.serverConfig()).thenReturn(serverConfig);
when(agentService.findAgent("uuid-from-agent")).thenReturn(AgentInstanceMother.pendingInstance());
when(agentService.isRegistered("uuid-from-agent")).thenReturn(false);
final ResponseEntity responseEntity = controller.getToken("uuid-from-agent");
assertThat(responseEntity.getStatusCode(), is(CONFLICT));
assertThat(responseEntity.getBody(), is("A token has already been issued for this agent."));
}
@Test
public void shouldRejectGenerateTokenRequestIfAgentIsInConfig() {
final ServerConfig serverConfig = mockedServerConfig("agent-auto-register-key", "someKey");
when(goConfigService.serverConfig()).thenReturn(serverConfig);
when(agentService.findAgent("uuid-from-agent")).thenReturn(AgentInstanceMother.idle());
when(agentService.isRegistered("uuid-from-agent")).thenReturn(true);
final ResponseEntity responseEntity = controller.getToken("uuid-from-agent");
assertThat(responseEntity.getStatusCode(), is(CONFLICT));
assertThat(responseEntity.getBody(), is("A token has already been issued for this agent."));
}
@Test
public void shouldRejectGenerateTokenRequestIfUUIDIsEmpty() {
final ResponseEntity responseEntity = controller.getToken(" ");
assertThat(responseEntity.getStatusCode(), is(CONFLICT));
assertThat(responseEntity.getBody(), is("UUID cannot be blank."));
}
@Test
public void shouldRejectRegistrationRequestWhenInvalidTokenProvided() {
when(agentService.isRegistered("blahAgent-uuid")).thenReturn(false);
ServerConfig serverConfig = mockedServerConfig("token-generation-key", "someKey");
when(goConfigService.serverConfig()).thenReturn(serverConfig);
when(agentService.createAgentUsername("blahAgent-uuid", request.getRemoteAddr(), "blahAgent-host")).thenReturn(new Username("some-agent-login-name"));
ResponseEntity responseEntity = controller.agentRequest("blahAgent-host", "blahAgent-uuid", "blah-location", "34567", "osx", "", "", "", "", "", "", "an-invalid-token", request);
assertThat(responseEntity.getBody(), is("Not a valid token."));
assertThat(responseEntity.getStatusCode(), is(HttpStatus.FORBIDDEN));
verify(serverConfig, times(0)).shouldAutoRegisterAgentWith("someKey");
verifyNoMoreInteractions(agentService);
}
@Test
public void shouldAutoRegisterElasticAgentIfEphemeralAutoRegisterKeyIsValid() {
String uuid = "elastic-uuid";
final ServerConfig serverConfig = mockedServerConfig("token-generation-key", "auto_register_key");
final String token = token(uuid, serverConfig.getTokenGenerationKey());
when(agentService.isRegistered(uuid)).thenReturn(false);
when(goConfigService.serverConfig()).thenReturn(serverConfig);
when(agentService.createAgentUsername(uuid, request.getRemoteAddr(), "host")).thenReturn(new Username("some-agent-login-name"));
when(ephemeralAutoRegisterKeyService.validateAndRevoke("someKey")).thenReturn(true);
String elasticAgentId = "elastic-agent-id";
String elasticPluginId = "elastic-plugin-id";
AgentRuntimeInfo agentRuntimeInfo = AgentRuntimeInfo.fromServer(new Agent(uuid, "host", request.getRemoteAddr()), false, "location", 233232L, "osx");
controller.agentRequest("host", uuid, "location", "233232", "osx", "someKey", "", "e1", "", elasticAgentId, elasticPluginId, token, request);
verify(agentService).findElasticAgent(elasticAgentId, elasticPluginId);
verify(agentService, times(2)).isRegistered(uuid);
verify(agentService).register(any(Agent.class));
verify(agentService).requestRegistration(ElasticAgentRuntimeInfo.fromServer(agentRuntimeInfo, elasticAgentId, elasticPluginId));
}
@Test
public void shouldNotRelyOnAutoRegisterKeyForRegisteringElasticAgents() {
String uuid = "elastic-uuid";
String autoRegisterKey = "auto_register_key";
final ServerConfig serverConfig = mockedServerConfig("token-generation-key", autoRegisterKey);
final String token = token(uuid, serverConfig.getTokenGenerationKey());
when(agentService.isRegistered(uuid)).thenReturn(false);
when(goConfigService.serverConfig()).thenReturn(serverConfig);
when(agentService.createAgentUsername(uuid, request.getRemoteAddr(), "host")).thenReturn(new Username("some-agent-login-name"));
when(ephemeralAutoRegisterKeyService.validateAndRevoke(any())).thenReturn(false);
controller.agentRequest("host", uuid, "location", "233232", "osx", autoRegisterKey,
"", "e1", "", "elastic-agent-id",
"elastic-plugin-id", token, request);
verify(agentService, never()).register(any(Agent.class));
}
private String token(String uuid, String tokenGenerationKey) {
try {
Mac mac = Mac.getInstance("HmacSHA256");
SecretKeySpec secretKey = new SecretKeySpec(tokenGenerationKey.getBytes(), "HmacSHA256");
mac.init(secretKey);
return Base64.getEncoder().encodeToString(mac.doFinal(uuid.getBytes()));
} catch (NoSuchAlgorithmException | InvalidKeyException e) {
throw new RuntimeException(e);
}
}
private ServerConfig mockedServerConfig(String tokenGenerationKey, String agentAutoRegisterKey) {
final ServerConfig serverConfig = mock(ServerConfig.class);
when(serverConfig.getTokenGenerationKey()).thenReturn(tokenGenerationKey);
when(serverConfig.getAgentAutoRegisterKey()).thenReturn(agentAutoRegisterKey);
when(serverConfig.shouldAutoRegisterAgentWith(agentAutoRegisterKey)).thenReturn(true);
when(serverConfig.security()).thenReturn(new SecurityConfig());
return serverConfig;
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.ui.laf.darcula;
import com.intellij.icons.AllIcons;
import com.intellij.ide.DataManager;
import com.intellij.ide.RecentProjectsManagerBase;
import com.intellij.ide.ReopenProjectAction;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.impl.PresentationFactory;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.ui.ClickListener;
import com.intellij.ui.SimpleColoredComponent;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.ui.EmptyIcon;
import com.intellij.util.ui.UIUtil;
import javax.swing.*;
import javax.swing.border.EmptyBorder;
import java.awt.*;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.io.File;
import java.util.ArrayList;
import static java.awt.GridBagConstraints.HORIZONTAL;
import static java.awt.GridBagConstraints.NORTHWEST;
/**
* @author Konstantin Bulenkov
*/
public class DarculaWelcomeScreenForm {
private final DarculaIntelliJWelcomeScreen myWelcomeScreen;
private JPanel myRoot;
private JPanel myRecentProjects;
private JPanel myQuickStartPanel;
private JPanel myHelpPanel;
private ArrayList<DarculaQuickStartButton> myQuickStartButtons = new ArrayList<DarculaQuickStartButton>();
private ArrayList<DarculaHelpButton> myHelpButtons = new ArrayList<DarculaHelpButton>();
public DarculaWelcomeScreenForm(DarculaIntelliJWelcomeScreen welcomeScreen) {
myWelcomeScreen = welcomeScreen;
createRecentProjectPanel(myRoot);
myQuickStartPanel.add(fillButtons(myRoot, true), BorderLayout.NORTH);
JPanel p = new JPanel(); p.setOpaque(false);
myQuickStartPanel.add(p, BorderLayout.CENTER);
myHelpPanel.add(fillButtons(myRoot, false), BorderLayout.NORTH);
p = new JPanel(); p.setOpaque(false);
myHelpPanel.add(p, BorderLayout.CENTER);
}
private void createRecentProjectPanel(final JPanel root) {
myRecentProjects.removeAll();
myRecentProjects.setBorder(new EmptyBorder(0, 20, 0, 20));
final AnAction[] recentProjectsActions = RecentProjectsManagerBase.getInstance().getRecentProjectsActions(false);
JLabel caption = new JLabel("Recent Projects");
caption.setUI(DarculaWelcomeScreenLabelUI.createUI(caption));
caption.setBorder(new EmptyBorder(10, 0, 0, 0));
caption.setHorizontalAlignment(SwingConstants.CENTER);
caption.setFont(new Font("Tahoma", Font.BOLD, 18));
final Color fg = UIUtil.getPanelBackground();
caption.setForeground(UIUtil.getPanelBackground());
myRecentProjects.add(caption, new GridBagConstraints(0, 0, 2, 1, 1, 0, NORTHWEST, HORIZONTAL, new Insets(0, 0, 10, 0), 0, 0));
int row = 1;
for (final AnAction action : recentProjectsActions) {
if (!(action instanceof ReopenProjectAction)) continue;
final SimpleColoredComponent pathLabel = new SimpleColoredComponent();
final SimpleColoredComponent nameLabel = new SimpleColoredComponent() {
@Override
public Dimension getPreferredSize() {
boolean hasIcon = getIcon() != null;
Dimension preferredSize = super.getPreferredSize();
return new Dimension(preferredSize.width + (hasIcon ? 0 : AllIcons.Actions.CloseNew.getIconWidth() + myIconTextGap),
preferredSize.height);
}
@Override
public Dimension getMinimumSize() {
return getPreferredSize();
}
};
nameLabel.append(String.valueOf(row) + ". ", new SimpleTextAttributes(SimpleTextAttributes.STYLE_BOLD, null));
nameLabel.append(((ReopenProjectAction)action).getProjectName(),
new SimpleTextAttributes(/*SimpleTextAttributes.STYLE_UNDERLINE | */SimpleTextAttributes.STYLE_BOLD, null));
nameLabel.setIconOnTheRight(true);
String path = ((ReopenProjectAction)action).getProjectPath();
File pathFile = new File(path);
if (pathFile.isDirectory() && pathFile.getName().equals(((ReopenProjectAction)action).getProjectName())) {
path = pathFile.getParent();
}
path = FileUtil.getLocationRelativeToUserHome(path);
pathLabel.append(" " + path, new SimpleTextAttributes(SimpleTextAttributes.STYLE_SMALLER, null));
nameLabel.setFont(new Font("Tahoma", Font.PLAIN, 14));
pathLabel.setFont(new Font("Tahoma", Font.PLAIN, 8));
for (final SimpleColoredComponent label : new SimpleColoredComponent[]{nameLabel, pathLabel}) {
label.setForeground(fg);
label.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
new ClickListener() {
@Override
public boolean onClick(MouseEvent e, int clickCount) {
if (e.getButton() == MouseEvent.BUTTON1) {
DataContext dataContext = DataManager.getInstance().getDataContext(root);
int fragment = label.findFragmentAt(e.getX());
if (fragment == SimpleColoredComponent.FRAGMENT_ICON) {
final int rc = Messages.showOkCancelDialog(PlatformDataKeys.PROJECT.getData(dataContext),
"Remove '" + action.getTemplatePresentation().getText() +
"' from recent projects list?",
"Remove Recent Project",
Messages.getQuestionIcon());
if (rc == 0) {
final RecentProjectsManagerBase manager = RecentProjectsManagerBase.getInstance();
assert action instanceof ReopenProjectAction : action;
manager.removePath(((ReopenProjectAction)action).getProjectPath());
final AnAction[] actions = manager.getRecentProjectsActions(false);
if (actions.length == 0) {
myRecentProjects.setVisible(false);
}
else {
for (int i = myRecentProjects.getComponentCount() - 1; i >= 0; i--) {
myRecentProjects.remove(i);
}
final Container parent = myRecentProjects.getParent();
parent.remove(myRecentProjects);
createRecentProjectPanel(root);
root.add(myRecentProjects, BorderLayout.CENTER);
myRecentProjects.revalidate();
}
}
}
else if (fragment != -1) {
AnActionEvent event = new AnActionEvent(e, dataContext, "", action.getTemplatePresentation(), ActionManager.getInstance(), 0);
action.actionPerformed(event);
}
}
return true;
}
}.installOn(label);
label.addMouseListener(new MouseAdapter() {
@Override
public void mouseEntered(MouseEvent e) {
nameLabel.setIcon(AllIcons.Actions.CloseNew);
nameLabel.setForeground(new Color(0xE09600));
pathLabel.setForeground(new Color(0xE09600));
}
@Override
public void mouseExited(MouseEvent e) {
nameLabel.setIcon(EmptyIcon.create(AllIcons.Actions.CloseNew));
nameLabel.setForeground(UIUtil.getPanelBackground());
pathLabel.setForeground(UIUtil.getPanelBackground());
}
});
}
nameLabel.setIcon(EmptyIcon.create(AllIcons.Actions.CloseNew));
nameLabel.setOpaque(false);
pathLabel.setOpaque(false);
nameLabel.setIconOpaque(false);
action.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_0 + row, InputEvent.ALT_DOWN_MASK)), root,
myWelcomeScreen);
myRecentProjects.add(nameLabel,
new GridBagConstraints(1, 2 * row - 1, 1, 1, 1, 0, NORTHWEST, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
myRecentProjects.add(pathLabel, new GridBagConstraints(1, 2*row, 1, 1, 1, 0, NORTHWEST, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
row++;
if (row == 10) break;
}
}
private JPanel fillButtons(final JPanel root, boolean quickStartActions) {
final ActionGroup actionGroup = (ActionGroup)ActionManager.getInstance()
.getAction(quickStartActions ? IdeActions.GROUP_WELCOME_SCREEN_QUICKSTART : IdeActions.GROUP_WELCOME_SCREEN_DOC);
fillActions(root, actionGroup, quickStartActions);
final JPanel panel = new JPanel() {
@Override
public Dimension getPreferredSize() {
return new Dimension((root.getWidth() - myRecentProjects.getPreferredSize().width) / 2, super.getPreferredSize().height+ 60);
}
@Override
public Dimension getMinimumSize() {
return new Dimension(getPreferredSize().width, super.getMinimumSize().height);
}
@Override
public Dimension getMaximumSize() {
return new Dimension(getPreferredSize().width, super.getMaximumSize().height);
}
};
panel.setBorder(new EmptyBorder(10, 10, 10, 10));
panel.setOpaque(false);
panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS));
if (quickStartActions) {
for (DarculaQuickStartButton button : myQuickStartButtons) {
panel.add(button.getComponent());
}
}
else {
for (DarculaHelpButton button : myHelpButtons) {
panel.add(button.getComponent());
}
}
return panel;
}
private void fillActions(JPanel root, final ActionGroup group, boolean quickStart) {
final AnAction[] actions = group.getChildren(null);
PresentationFactory factory = new PresentationFactory();
for (final AnAction action : actions) {
if (action instanceof ActionGroup) {
final ActionGroup childGroup = (ActionGroup)action;
fillActions(root, childGroup, quickStart);
}
else {
Presentation presentation = factory.getPresentation(action);
action.update(new AnActionEvent(null, DataManager.getInstance().getDataContext(root),
ActionPlaces.WELCOME_SCREEN, presentation, ActionManager.getInstance(), 0));
if (presentation.isVisible()) {
if (quickStart) {
myQuickStartButtons.add(new DarculaQuickStartButton(action));
} else {
myHelpButtons.add(new DarculaHelpButton(action));
}
}
}
}
}
public JComponent getComponent() {
return myRoot;
}
private void createUIComponents() {
myRecentProjects = new JPanel(new GridBagLayout()) {
@Override
public Dimension getMinimumSize() {
return getPreferredSize();
}
@Override
public Dimension getMaximumSize() {
return getPreferredSize();
}
};
myRecentProjects.setOpaque(false);
}
}
| |
/*
* Copyright 2003-2017 Dave Griffith, Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.bugs;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.CommonClassNames;
import com.intellij.psi.PsiType;
import com.intellij.psi.util.InheritanceUtil;
import com.siyeh.InspectionGadgetsBundle;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
final class FormatDecode {
private static final Pattern fsPattern = Pattern.compile("%(\\d+\\$)?([-#+ 0,(<]*)?(\\d+)?(\\.\\d*)?([tT])?([a-zA-Z%])");
private FormatDecode() {}
private static final Validator ALL_VALIDATOR = new AllValidator();
private static final int LEFT_JUSTIFY = 1; // '-'
private static final int ALTERNATE = 2; // '#'
private static final int PLUS = 4; // '+'
private static final int LEADING_SPACE = 8; // ' '
private static final int ZERO_PAD = 16; // '0'
private static final int GROUP = 32; // ','
private static final int PARENTHESES = 64; // '('
private static final int PREVIOUS = 128; // '<'
private static int flag(char c) {
switch (c) {
case '-': return LEFT_JUSTIFY;
case '#': return ALTERNATE;
case '+': return PLUS;
case ' ': return LEADING_SPACE;
case '0': return ZERO_PAD;
case ',': return GROUP;
case '(': return PARENTHESES;
case '<': return PREVIOUS;
default: return -1;
}
}
private static String flagString(int flags) {
final StringBuilder result = new StringBuilder(8);
if ((flags & LEFT_JUSTIFY) != 0) {
result.append('-');
}
if ((flags & ALTERNATE) != 0) {
result.append('#');
}
if ((flags & PLUS) != 0) {
result.append('+');
}
if ((flags & LEADING_SPACE) != 0) {
result.append(' ');
}
if ((flags & ZERO_PAD) != 0) {
result.append('0');
}
if ((flags & GROUP) != 0) {
result.append(',');
}
if ((flags & PARENTHESES) != 0) {
result.append('(');
}
if ((flags & PREVIOUS) != 0) {
result.append('<');
}
return result.toString();
}
private static void checkFlags(int value, int allowedFlags, String specifier) {
final int result = value & ~allowedFlags;
if (result != 0) {
final String flags = flagString(result);
throw new IllegalFormatException(InspectionGadgetsBundle.message("format.string.error.flags.not.allowed", flags, specifier, flags.length()));
}
}
public static Validator @NotNull [] decode(String formatString, int argumentCount) {
final ArrayList<Validator> parameters = new ArrayList<>();
final Matcher matcher = fsPattern.matcher(formatString);
boolean previousAllowed = false;
int implicit = 0;
int pos = 0;
int i = 0;
while (matcher.find(i)) {
final int start = matcher.start();
if (start != i) {
checkText(formatString.substring(i, start));
}
i = matcher.end();
final String specifier = matcher.group();
final String posSpec = matcher.group(1);
final String flags = matcher.group(2);
final String width = matcher.group(3);
final String precision = matcher.group(4);
final String dateSpec = matcher.group(5);
@NonNls final String conversion = matcher.group(6);
int flagBits = 0;
for (int j = 0; j < flags.length(); j++) {
final char flag = flags.charAt(j);
final int bit = flag(flag);
if (bit == -1) {
throw new IllegalFormatException(InspectionGadgetsBundle.message("format.string.error.unexpected.flag", flag, specifier));
}
if ((flagBits | bit) == flagBits) {
throw new IllegalFormatException(InspectionGadgetsBundle.message("format.string.error.duplicate.flag", flag, specifier));
}
flagBits |= bit;
}
// check this first because it should not affect "implicit"
if ("n".equals(conversion)) {
// no flags allowed
checkFlags(flagBits, 0, specifier);
if (!StringUtil.isEmpty(width)) {
throw new IllegalFormatException(InspectionGadgetsBundle.message("format.string.error.width.not.allowed", width, specifier));
}
checkNoPrecision(precision, specifier);
continue;
}
else if ("%".equals(conversion)) { // literal '%'
checkFlags(flagBits, LEFT_JUSTIFY, specifier);
checkNoPrecision(precision, specifier);
continue;
}
if (posSpec != null) {
if (isAllBitsSet(flagBits, PREVIOUS)) {
throw new IllegalFormatException(
InspectionGadgetsBundle.message("format.string.error.unnecessary.position.specifier", posSpec, specifier));
}
final String num = posSpec.substring(0, posSpec.length() - 1);
pos = Integer.parseInt(num) - 1;
if (pos < 0) {
throw new IllegalFormatException(InspectionGadgetsBundle.message("format.string.error.illegal.position.specifier", posSpec, specifier));
}
previousAllowed = true;
}
else if (isAllBitsSet(flagBits, PREVIOUS)) {
// reuse last pos
if (!previousAllowed) {
throw new IllegalFormatException(InspectionGadgetsBundle.message("format.string.error.previous.element.not.found", specifier));
}
}
else {
previousAllowed = true;
pos = implicit++;
}
final Validator allowed;
if (dateSpec != null) { // a t or T
checkFlags(flagBits, LEFT_JUSTIFY | PREVIOUS, specifier);
checkNoPrecision(precision, specifier);
allowed = new DateValidator(specifier);
}
else {
switch (conversion.charAt(0)) {
case 'b': // boolean (general)
case 'B':
case 'h': // Integer hex string (general
case 'H':
checkFlags(flagBits, LEFT_JUSTIFY | PREVIOUS, specifier);
allowed = ALL_VALIDATOR;
break;
case 's': // formatted string (general)
case 'S':
checkFlags(flagBits, LEFT_JUSTIFY | ALTERNATE | PREVIOUS, specifier);
allowed = (flagBits & ALTERNATE) != 0 ? new FormattableValidator(specifier) : ALL_VALIDATOR;
break;
case 'c': // unicode character
case 'C':
checkFlags(flagBits, LEFT_JUSTIFY | PREVIOUS, specifier);
checkNoPrecision(precision, specifier);
allowed = new CharValidator(specifier);
break;
case 'd': // decimal integer
checkFlags(flagBits, ~ALTERNATE, specifier);
allowed = new IntValidator(specifier);
break;
case 'o': // octal integer
case 'x': // hexadecimal integer
case 'X':
checkFlags(flagBits, ~(PLUS | LEADING_SPACE | GROUP), specifier);
checkNoPrecision(precision, specifier);
allowed = new IntValidator(specifier);
break;
case 'a': // hexadecimal floating-point number
case 'A':
checkFlags(flagBits, ~(PARENTHESES | GROUP), specifier);
allowed = new FloatValidator(specifier);
break;
case 'e': // floating point -> decimal number in computerized scientific notation
case 'E':
checkFlags(flagBits, ~GROUP, specifier);
allowed = new FloatValidator(specifier);
break;
case 'g': // scientific notation
case 'G':
checkFlags(flagBits, ~ALTERNATE, specifier);
allowed = new FloatValidator(specifier);
break;
case 'f': // floating point -> decimal number
allowed = new FloatValidator(specifier);
break;
default:
throw new IllegalFormatException(InspectionGadgetsBundle.message("format.string.error.unknown.conversion", specifier));
}
}
if (precision != null && precision.length() < 2) {
throw new IllegalFormatException(InspectionGadgetsBundle.message("format.string.error.invalid.precision", specifier));
}
if (isAllBitsSet(flagBits, LEADING_SPACE | PLUS)) {
throw new IllegalFormatException(InspectionGadgetsBundle.message("format.string.error.illegal.flag.combination", ' ', '+', specifier));
}
if (isAllBitsSet(flagBits, LEFT_JUSTIFY | ZERO_PAD)) {
throw new IllegalFormatException(InspectionGadgetsBundle.message("format.string.error.illegal.flag.combination", '-', '0', specifier));
}
if (StringUtil.isEmpty(width)) {
if (isAllBitsSet(flagBits, LEFT_JUSTIFY)) {
throw new IllegalFormatException(InspectionGadgetsBundle.message("format.string.error.left.justify.no.width", specifier));
}
if (isAllBitsSet(flagBits, ZERO_PAD)) {
throw new IllegalFormatException(InspectionGadgetsBundle.message("format.string.error.zero.padding.no.width", specifier));
}
}
storeValidator(allowed, pos, parameters, argumentCount);
}
if (i < formatString.length()) {
checkText(formatString.substring(i));
}
return parameters.toArray(new Validator[0]);
}
private static void checkNoPrecision(String precision, String specifier) {
if (!StringUtil.isEmpty(precision)) {
throw new IllegalFormatException(InspectionGadgetsBundle.message("format.string.error.precision.not.allowed", precision, specifier));
}
}
private static boolean isAllBitsSet(int value, int mask) {
return (value & mask) == mask;
}
private static void checkText(String s) {
if (s.indexOf('%') != -1) {
throw new IllegalFormatException();
}
}
private static void storeValidator(Validator validator, int pos, ArrayList<Validator> parameters, int argumentCount) {
if (pos < parameters.size()) {
final Validator existing = parameters.get(pos);
if (existing == null) {
parameters.set(pos, validator);
}
else if (existing instanceof MultiValidator) {
((MultiValidator)existing).addValidator(validator);
}
else if (existing != validator) {
final MultiValidator multiValidator = new MultiValidator(existing.getSpecifier());
multiValidator.addValidator(existing);
multiValidator.addValidator(validator);
parameters.set(pos, multiValidator);
}
}
else {
while (pos > parameters.size() && argumentCount > parameters.size()) {
parameters.add(null);
}
parameters.add(validator);
}
}
public static class IllegalFormatException extends RuntimeException {
public IllegalFormatException(@Nls String message) {
super(message);
}
public IllegalFormatException() {}
}
private static class AllValidator extends Validator {
AllValidator() {
super("");
}
@Override
public boolean valid(PsiType type) {
return true;
}
}
private static class DateValidator extends Validator {
DateValidator(String specifier) {
super(specifier);
}
@Override
public boolean valid(PsiType type) {
final String text = type.getCanonicalText();
return PsiType.LONG.equals(type) ||
CommonClassNames.JAVA_LANG_LONG.equals(text) ||
InheritanceUtil.isInheritor(type, CommonClassNames.JAVA_UTIL_DATE) ||
InheritanceUtil.isInheritor(type, CommonClassNames.JAVA_UTIL_CALENDAR) ||
InheritanceUtil.isInheritor(type, "java.time.temporal.TemporalAccessor");
}
}
private static class CharValidator extends Validator {
CharValidator(String specifier) {
super(specifier);
}
@Override
public boolean valid(PsiType type) {
if (PsiType.CHAR.equals(type) || PsiType.BYTE.equals(type) || PsiType.SHORT.equals(type) || PsiType.INT.equals(type)) {
return true;
}
final String text = type.getCanonicalText();
return CommonClassNames.JAVA_LANG_CHARACTER.equals(text) ||
CommonClassNames.JAVA_LANG_BYTE.equals(text) ||
CommonClassNames.JAVA_LANG_SHORT.equals(text) ||
CommonClassNames.JAVA_LANG_INTEGER.equals(text);
}
}
private static class IntValidator extends Validator {
IntValidator(String specifier) {
super(specifier);
}
@Override
public boolean valid(PsiType type) {
final String text = type.getCanonicalText();
return PsiType.INT.equals(type) ||
CommonClassNames.JAVA_LANG_INTEGER.equals(text) ||
PsiType.LONG.equals(type) ||
CommonClassNames.JAVA_LANG_LONG.equals(text) ||
PsiType.SHORT.equals(type) ||
CommonClassNames.JAVA_LANG_SHORT.equals(text) ||
PsiType.BYTE.equals(type) ||
CommonClassNames.JAVA_LANG_BYTE.equals(text) ||
"java.math.BigInteger".equals(text);
}
}
private static class FloatValidator extends Validator {
FloatValidator(String specifier) {
super(specifier);
}
@Override
public boolean valid(PsiType type) {
final String text = type.getCanonicalText();
return PsiType.DOUBLE.equals(type) ||
CommonClassNames.JAVA_LANG_DOUBLE.equals(text) ||
PsiType.FLOAT.equals(type) ||
CommonClassNames.JAVA_LANG_FLOAT.equals(text) ||
"java.math.BigDecimal".equals(text);
}
}
private static class FormattableValidator extends Validator {
FormattableValidator(String specifier) {
super(specifier);
}
@Override
public boolean valid(PsiType type) {
return InheritanceUtil.isInheritor(type, "java.util.Formattable");
}
}
private static class MultiValidator extends Validator {
private final Set<Validator> validators = new HashSet<>(3);
MultiValidator(String specifier) {
super(specifier);
}
@Override
public boolean valid(PsiType type) {
for (Validator validator : validators) {
if (!validator.valid(type)) {
return false;
}
}
return true;
}
public void addValidator(Validator validator) {
validators.add(validator);
}
}
abstract static class Validator {
private final String mySpecifier;
Validator(String specifier) {
mySpecifier = specifier;
}
public abstract boolean valid(PsiType type);
public String getSpecifier() {
return mySpecifier;
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2006-2007 University of Toronto Database Group
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software
* is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
* OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*******************************************************************************/
package simfunctions;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Vector;
import utility.Config;
import dbdriver.MySqlDB;
import experiment.IdScore;
public class GeneralizedEditSimilarity extends Preprocess {
public static double insertionCost = 0.5;
public String similarityMetric = "editdistance";
public static HashMap<String, HashMap<String, Double>> tokenStokenTEDst=null;
// Tokenizer based on whitespace; every token is a word
// Its not required for Edit-Distance metric
public HashMap<String, Double> getTF(String str) {
tokenizeUsingQgrams = false;
return getTokenTFMultiple(str);
}
public Vector<String> getTokenVector(String str) {
// Generalized edit distance considers word based tokens at the first
// level
Vector<String> tokenVector = new Vector<String>();
String[] tokens = str.toLowerCase().split(" ");
for (int i = 0; i < tokens.length; i++) {
tokenVector.add(tokens[i]);
}
return tokenVector;
}
//This stores the normalized ed for querytokens with all other tokens
public static void initializeTokenTokenEDMap(String query, HashMap<String, Double> qgramIDF){
tokenStokenTEDst = new HashMap<String, HashMap<String, Double>>();
String[] tokens = query.toLowerCase().split(" ");
for(int i=0; i < tokens.length; i++){
HashMap<String, Double> tokenEDMap = new HashMap<String, Double>();
for(String token: qgramIDF.keySet()){
tokenEDMap.put(token, normalizedEditDistance(tokens[i], token));
}
tokenStokenTEDst.put(tokens[i], tokenEDMap);
}
}
public double getNormalizedEditDistanceFromEDMap(String s, String t){
double ed =0;
try{
ed = tokenStokenTEDst.get(s).get(t);
}catch (Exception e) {
System.err.println("The ED MAP does not contain entry for "+s+" and "+t);
// TODO: handle exception
ed = normalizedEditDistance(s, t);
}
return ed;
}
public void preprocessTable(Vector<String> records, HashMap<String, Double> qgramIDF,
Vector<HashMap<String, Double>> recordTokenWeights, String tableName) {
Config config = new Config();
MySqlDB mysqlDB = new MySqlDB(config.returnURL(), config.user, config.passwd);
int numberOfRecords = 0, k = 0;
try {
String query = "select tid, " + config.preprocessingColumn + " from " + config.dbName + "." + tableName
+ " order by tid asc";
ResultSet rs = mysqlDB.executeQuery(query);
if (rs != null) {
while (rs.next()) {
String str = rs.getString(config.preprocessingColumn);
k = rs.getInt("tid");
str=str.toLowerCase();
if ((str != null) && (!str.equals(""))) {
records.insertElementAt(str, k - 1);
// Find the tf's of all the qgrams
getDFandTFweight(k - 1, str, qgramIDF, recordTokenWeights);
}
numberOfRecords++;
}
}
mysqlDB.close();
} catch (Exception e) {
System.out.println("database error: cannot read table");
e.printStackTrace();
}
if (logToDB) {
storePreprocessedDataToDB(tableName, recordTokenWeights, extractMetricName(this.getClass().getName())
+ "_tf");
}
// convert the df into IDF
convertDFtoIDF(qgramIDF, numberOfRecords, recordTokenWeights);
if (logToDB) {
storePreprocessedIDF(tableName, qgramIDF, extractMetricName(this.getClass().getName()) + "_idf");
}
// Now convert the tf in fileQrmanWeight to tfidf weights
/*
* convertTFtoWeights(records, qgramIDF, recordTokenWeights); if
* (logToDB) { storePreprocessedDataToDB(tableName, recordTokenWeights,
* extractMetricName(this.getClass().getName()) + "_wt"); }
*/
}
// This function is for the metrics which has to go iteratively through all
// the records to get the final scoreList
public List<IdScore> getSimilarRecords(String query, HashMap<String, Double> qgramIDF,
Vector<HashMap<String, Double>> recordTokenWeights, Vector<String> recordVector) {
List<IdScore> scoreList = new ArrayList<IdScore>();
//t1 = System.currentTimeMillis();
initializeTokenTokenEDMap(query, qgramIDF);
//t2 = System.currentTimeMillis();
//System.out.println("intialization time: "+(t2-t1));
double meanIDF = 0;
for (double idf : qgramIDF.values()) {
meanIDF += idf;
}
meanIDF = meanIDF / qgramIDF.size();
// System.out.println("meanIDF: "+meanIDF);
// find the weighted sum for the tokens in query
Vector<String> inputTokenVector = getTokenVector(query);
Vector<Double> weightedTokenVector = incrementalWeightedSumVector(inputTokenVector, qgramIDF, meanIDF);
for (int k = 0; k < recordVector.size(); k++) {
// find meanIDF value
double score = generalizedEditSimilarity(query, recordVector.get(k), qgramIDF, meanIDF, weightedTokenVector);
// for similar records score must be greater than zero
if (score > 0) {
scoreList.add(new IdScore(k, score));
}
}
// // System.gc();
Collections.sort(scoreList);
//t3 = System.currentTimeMillis();
//System.out.println("Query time: "+(t3-t1));
return scoreList;
}
public double generalizedEditSimilarity(String s, String t, HashMap<String, Double> qgramIDF, double meanIDF,
Vector<Double> weightedTokenVector) {
int weightVectorSize = weightedTokenVector.size();
if ((weightVectorSize <= 1) || (weightedTokenVector.get(weightVectorSize - 1) <= 0)) {
return 0;
} else {
double transformationCost = weightedLD(s, t, qgramIDF, meanIDF, weightedTokenVector);
return 1.0 - Math.min(1, transformationCost / weightedTokenVector.get(weightVectorSize - 1));
}
}
public double weightedSumTokens(Vector<String> tokenVector, HashMap<String, Double> qgramIDF, double meanIDF) {
double weightedTokenVectorSum = 0;
for (String token : tokenVector) {
if (qgramIDF.containsKey(token)) {
weightedTokenVectorSum += qgramIDF.get(token);
} else {
weightedTokenVectorSum += meanIDF;
}
}
return weightedTokenVectorSum;
}
// it returns the vector "v" such that v(i) = \sum_{0 <= j < i} weight(u(j))
// where u is the vector of tokens
public Vector<Double> incrementalWeightedSumVector(Vector<String> tokenVector, HashMap<String, Double> qgramIDF,
double meanIDF) {
Vector<Double> incrementalTokenWeightSumVector = new Vector<Double>();
double weightedTokenVectorSum = 0;
incrementalTokenWeightSumVector.add(0.0);
for (int i = 0; i < tokenVector.size(); i++) {
String token = tokenVector.get(i);
weightedTokenVectorSum += getIDFWeight(token, qgramIDF, meanIDF);
incrementalTokenWeightSumVector.add(weightedTokenVectorSum);
}
return incrementalTokenWeightSumVector;
}
public double getIDFWeight(String token, HashMap<String, Double> qgramIDF, double meanIDF) {
if (qgramIDF.containsKey(token)) {
return qgramIDF.get(token);
} else {
return meanIDF;
}
}
public double weightedLD(String s, String t, HashMap<String, Double> qgramIDF, double meanIDF,
Vector<Double> weightedTokenVector) {
Vector<String> inputTokenVector = getTokenVector(s);
Vector<String> tupleTokenVector = getTokenVector(t);
Vector<Double> tupleTokenIncrementalWeightedSum = incrementalWeightedSumVector(tupleTokenVector, qgramIDF,
meanIDF);
double d[][];
double ed[][];
double track[][];
int n, m, i, j;
String tok_i, tok_j;
double cost = 0, wt_i, wt_j;
// Step 1
n = inputTokenVector.size();
m = tupleTokenVector.size();
if (n == 0) {
return insertionCost * tupleTokenIncrementalWeightedSum.get(m);
}
if (m == 0) {
return weightedTokenVector.get(n);
}
d = new double[n + 1][m + 1];
//FOR DEBUG PURPOSE
ed = new double[n][m];
track = new double[n][m];
// Step 2
for (i = 0; i <= n; i++) {
d[i][0] = weightedTokenVector.get(i);
}
for (j = 0; j <= m; j++) {
d[0][j] = insertionCost * tupleTokenIncrementalWeightedSum.get(j);
}
// Step 3
for (i = 1; i <= n; i++) {
tok_i = inputTokenVector.get(i - 1);
wt_i = getIDFWeight(tok_i, qgramIDF, meanIDF);
// Step 4
for (j = 1; j <= m; j++) {
tok_j = tupleTokenVector.get(j - 1);
wt_j = getIDFWeight(tok_j, qgramIDF, meanIDF);
ed[i - 1][j - 1] = normalizedEditDistance(tok_i, tok_j);
cost = wt_i * ed[i - 1][j - 1];
//cost = wt_i* normalizedEditDistance(tok_i, tok_j);
//cost = wt_i*getNormalizedEditDistanceFromEDMap(tok_i, tok_j);
// Step 6
d[i][j] = Minimum(d[i - 1][j] + wt_i, d[i][j - 1] + insertionCost*wt_j, d[i - 1][j - 1] + cost);
// to track the path FOR DEBUG PURPOSE
track[i - 1][j - 1] = trackMinimum(d[i - 1][j] + wt_i, d[i][j - 1] + insertionCost*wt_j, d[i - 1][j - 1] + cost);
if ((track[i - 1][j - 1] == 211) && (ed[i - 1][j - 1] == 0)) {
track[i - 1][j - 1] = 200;
}
}
}
/*
* FOR DEBUGGING PURPOSE
*/
if (s.contains("union") && t.contains("union") && t.contains("elec")) {
System.out.println("QUERY TOKENS");
printTokenIDFs(inputTokenVector, qgramIDF, meanIDF);
System.out.println("TUPLE TOKENS");
printTokenIDFs(tupleTokenVector, qgramIDF, meanIDF);
System.out.println("Edit Distance Metric");
print2DMatrix(ed);
System.out.println("GENERALIZED Edit Distance Metric");
print2DMatrix(d);
System.out.println("GENERALIZED Edit Distance TRACK Metric");
print2DMatrix(track);
}
// Step 7
return d[n][m];
}
public void printTokenIDFs(Vector<String> tokenVector, HashMap<String, Double> qgramIDF, double meanIDF) {
for (String token : tokenVector) {
System.out.print(token + ": " + getIDFWeight(token, qgramIDF, meanIDF) + " ");
}
System.out.println();
}
public void print2DMatrix(double[][] matrix) {
for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix[i].length; j++) {
System.out.print(matrix[i][j] + " ");
}
System.out.println();
}
}
private double trackMinimum(double a, double b, double c) {
/*
* 211 fro diagonal, 210 for up, 201 for left
*/
double mi, mj;
mi = a;
mj = 210;
if (b < mi) {
mj = 201;
mi = b;
}
if (c < mi) {
mj = 211;
}
return mj;
}
/*
* The code below has been copied from
* http://www.merriampark.com/ld.htm#JAVA
*/
private double Minimum(double a, double b, double c) {
double mi;
mi = a;
if (b < mi) {
mi = b;
}
if (c < mi) {
mi = c;
}
return mi;
}
public static double normalizedEditDistance(String s, String t) {
int ed = LD(s, t);
return ed * 1.0 / Math.max(s.length(), t.length());
}
private static int Minimum(int a, int b, int c) {
int mi;
mi = a;
if (b < mi) {
mi = b;
}
if (c < mi) {
mi = c;
}
return mi;
}
// *****************************
// Compute Levenshtein distance
// *****************************
public static int LD(String s, String t) {
int d[][]; // matrix
int n; // length of s
int m; // length of t
int i; // iterates through s
int j; // iterates through t
char s_i; // ith character of s
char t_j; // jth character of t
int cost; // cost
// Step 1
n = s.length();
m = t.length();
if (n == 0) {
return m;
}
if (m == 0) {
return n;
}
d = new int[n + 1][m + 1];
// Step 2
for (i = 0; i <= n; i++) {
d[i][0] = i;
}
for (j = 0; j <= m; j++) {
d[0][j] = j;
}
// Step 3
for (i = 1; i <= n; i++) {
s_i = s.charAt(i - 1);
// Step 4
for (j = 1; j <= m; j++) {
t_j = t.charAt(j - 1);
// Step 5
if (s_i == t_j) {
cost = 0;
} else {
cost = 1;
}
// Step 6
d[i][j] = Minimum(d[i - 1][j] + 1, d[i][j - 1] + 1, d[i - 1][j - 1] + cost);
}
}
// Step 7
return d[n][m];
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.util.Collections;
import java.util.Iterator;
import com.google.common.base.Objects;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.rows.*;
import org.apache.cassandra.db.rows.EncodingStats;
import org.apache.cassandra.utils.ObjectSizes;
import org.apache.cassandra.utils.memory.AbstractAllocator;
/**
* A mutable implementation of {@code DeletionInfo}.
*/
public class MutableDeletionInfo implements DeletionInfo
{
private static final long EMPTY_SIZE = ObjectSizes.measure(new MutableDeletionInfo(0, 0));
/**
* This represents a deletion of the entire partition. We can't represent this within the RangeTombstoneList, so it's
* kept separately. This also slightly optimizes the common case of a full partition deletion.
*/
private DeletionTime partitionDeletion;
/**
* A list of range tombstones within the partition. This is left as null if there are no range tombstones
* (to save an allocation (since it's a common case).
*/
private RangeTombstoneList ranges;
/**
* Creates a DeletionInfo with only a top-level (row) tombstone.
* @param markedForDeleteAt the time after which the entire row should be considered deleted
* @param localDeletionTime what time the deletion write was applied locally (for purposes of
* purging the tombstone after gc_grace_seconds).
*/
public MutableDeletionInfo(long markedForDeleteAt, int localDeletionTime)
{
// Pre-1.1 node may return MIN_VALUE for non-deleted container, but the new default is MAX_VALUE
// (see CASSANDRA-3872)
this(new DeletionTime(markedForDeleteAt, localDeletionTime == Integer.MIN_VALUE ? Integer.MAX_VALUE : localDeletionTime));
}
public MutableDeletionInfo(DeletionTime partitionDeletion)
{
this(partitionDeletion, null);
}
public MutableDeletionInfo(DeletionTime partitionDeletion, RangeTombstoneList ranges)
{
this.partitionDeletion = partitionDeletion;
this.ranges = ranges;
}
/**
* Returns a new DeletionInfo that has no top-level tombstone or any range tombstones.
*/
public static MutableDeletionInfo live()
{
return new MutableDeletionInfo(DeletionTime.LIVE);
}
public MutableDeletionInfo mutableCopy()
{
return new MutableDeletionInfo(partitionDeletion, ranges == null ? null : ranges.copy());
}
public MutableDeletionInfo copy(AbstractAllocator allocator)
{
RangeTombstoneList rangesCopy = null;
if (ranges != null)
rangesCopy = ranges.copy(allocator);
return new MutableDeletionInfo(partitionDeletion, rangesCopy);
}
/**
* Returns whether this DeletionInfo is live, that is deletes no columns.
*/
public boolean isLive()
{
return partitionDeletion.isLive() && (ranges == null || ranges.isEmpty());
}
/**
* Potentially replaces the top-level tombstone with another, keeping whichever has the higher markedForDeleteAt
* timestamp.
* @param newInfo the deletion time to add to this deletion info.
*/
public void add(DeletionTime newInfo)
{
if (newInfo.supersedes(partitionDeletion))
partitionDeletion = newInfo;
}
public void add(RangeTombstone tombstone, ClusteringComparator comparator)
{
if (ranges == null) // Introduce getInitialRangeTombstoneAllocationSize
ranges = new RangeTombstoneList(comparator, DatabaseDescriptor.getInitialRangeTombstoneListAllocationSize());
ranges.add(tombstone);
}
/**
* Combines another DeletionInfo with this one and returns the result. Whichever top-level tombstone
* has the higher markedForDeleteAt timestamp will be kept, along with its localDeletionTime. The
* range tombstones will be combined.
*
* @return this object.
*/
public DeletionInfo add(DeletionInfo newInfo)
{
add(newInfo.getPartitionDeletion());
// We know MutableDeletionInfo is the only impelementation and we're not mutating it, it's just to get access to the
// RangeTombstoneList directly.
assert newInfo instanceof MutableDeletionInfo;
RangeTombstoneList newRanges = ((MutableDeletionInfo)newInfo).ranges;
if (ranges == null)
ranges = newRanges == null ? null : newRanges.copy();
else if (newRanges != null)
ranges.addAll(newRanges);
return this;
}
public DeletionTime getPartitionDeletion()
{
return partitionDeletion;
}
// Use sparingly, not the most efficient thing
public Iterator<RangeTombstone> rangeIterator(boolean reversed)
{
return ranges == null ? Collections.emptyIterator() : ranges.iterator(reversed);
}
public Iterator<RangeTombstone> rangeIterator(Slice slice, boolean reversed)
{
return ranges == null ? Collections.emptyIterator() : ranges.iterator(slice, reversed);
}
public RangeTombstone rangeCovering(Clustering name)
{
return ranges == null ? null : ranges.search(name);
}
public int dataSize()
{
int size = TypeSizes.sizeof(partitionDeletion.markedForDeleteAt());
return size + (ranges == null ? 0 : ranges.dataSize());
}
public boolean hasRanges()
{
return ranges != null && !ranges.isEmpty();
}
public int rangeCount()
{
return hasRanges() ? ranges.size() : 0;
}
public long maxTimestamp()
{
return ranges == null ? partitionDeletion.markedForDeleteAt() : Math.max(partitionDeletion.markedForDeleteAt(), ranges.maxMarkedAt());
}
/**
* Whether this deletion info may modify the provided one if added to it.
*/
public boolean mayModify(DeletionInfo delInfo)
{
return partitionDeletion.compareTo(delInfo.getPartitionDeletion()) > 0 || hasRanges();
}
@Override
public String toString()
{
if (ranges == null || ranges.isEmpty())
return String.format("{%s}", partitionDeletion);
else
return String.format("{%s, ranges=%s}", partitionDeletion, rangesAsString());
}
private String rangesAsString()
{
assert !ranges.isEmpty();
StringBuilder sb = new StringBuilder();
ClusteringComparator cc = ranges.comparator();
Iterator<RangeTombstone> iter = rangeIterator(false);
while (iter.hasNext())
{
RangeTombstone i = iter.next();
sb.append(i.deletedSlice().toString(cc));
sb.append('@');
sb.append(i.deletionTime());
}
return sb.toString();
}
// Updates all the timestamp of the deletion contained in this DeletionInfo to be {@code timestamp}.
public DeletionInfo updateAllTimestamp(long timestamp)
{
if (partitionDeletion.markedForDeleteAt() != Long.MIN_VALUE)
partitionDeletion = new DeletionTime(timestamp, partitionDeletion.localDeletionTime());
if (ranges != null)
ranges.updateAllTimestamp(timestamp);
return this;
}
@Override
public boolean equals(Object o)
{
if(!(o instanceof MutableDeletionInfo))
return false;
MutableDeletionInfo that = (MutableDeletionInfo)o;
return partitionDeletion.equals(that.partitionDeletion) && Objects.equal(ranges, that.ranges);
}
@Override
public final int hashCode()
{
return Objects.hashCode(partitionDeletion, ranges);
}
@Override
public long unsharedHeapSize()
{
return EMPTY_SIZE + partitionDeletion.unsharedHeapSize() + (ranges == null ? 0 : ranges.unsharedHeapSize());
}
public void collectStats(EncodingStats.Collector collector)
{
collector.update(partitionDeletion);
if (ranges != null)
ranges.collectStats(collector);
}
public static Builder builder(DeletionTime partitionLevelDeletion, ClusteringComparator comparator, boolean reversed)
{
return new Builder(partitionLevelDeletion, comparator, reversed);
}
/**
* Builds DeletionInfo object from (in order) range tombstone markers.
*/
public static class Builder
{
private final MutableDeletionInfo deletion;
private final ClusteringComparator comparator;
private final boolean reversed;
private RangeTombstoneMarker openMarker;
private Builder(DeletionTime partitionLevelDeletion, ClusteringComparator comparator, boolean reversed)
{
this.deletion = new MutableDeletionInfo(partitionLevelDeletion);
this.comparator = comparator;
this.reversed = reversed;
}
public void add(RangeTombstoneMarker marker)
{
// We need to start by the close case in case that's a boundary
if (marker.isClose(reversed))
{
DeletionTime openDeletion = openMarker.openDeletionTime(reversed);
assert marker.closeDeletionTime(reversed).equals(openDeletion);
ClusteringBound open = openMarker.openBound(reversed);
ClusteringBound close = marker.closeBound(reversed);
Slice slice = reversed ? Slice.make(close, open) : Slice.make(open, close);
deletion.add(new RangeTombstone(slice, openDeletion), comparator);
}
if (marker.isOpen(reversed))
{
openMarker = marker;
}
}
public MutableDeletionInfo build()
{
return deletion;
}
}
}
| |
/*
* Copyright 2016 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config;
import com.thoughtworks.go.config.exceptions.ConfigFileHasChangedException;
import com.thoughtworks.go.config.exceptions.ConfigMergeException;
import com.thoughtworks.go.config.materials.svn.SvnMaterialConfig;
import com.thoughtworks.go.config.materials.tfs.TfsMaterialConfig;
import com.thoughtworks.go.config.registry.ConfigElementImplementationRegistry;
import com.thoughtworks.go.domain.GoConfigRevision;
import com.thoughtworks.go.helper.ConfigFileFixture;
import com.thoughtworks.go.helper.PipelineConfigMother;
import com.thoughtworks.go.helper.StageConfigMother;
import com.thoughtworks.go.server.util.ServerVersion;
import com.thoughtworks.go.serverhealth.ServerHealthService;
import com.thoughtworks.go.service.ConfigRepository;
import com.thoughtworks.go.util.*;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.FileUtils;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.hamcrest.core.Is;
import org.joda.time.DateTime;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.springframework.security.GrantedAuthority;
import org.springframework.security.context.SecurityContext;
import org.springframework.security.context.SecurityContextHolder;
import org.springframework.security.providers.UsernamePasswordAuthenticationToken;
import org.springframework.security.userdetails.User;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import java.util.Vector;
import static com.thoughtworks.go.helper.ConfigFileFixture.VALID_XML_3169;
import static com.thoughtworks.go.util.GoConfigFileHelper.loadAndMigrate;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsNull.nullValue;
import static org.hamcrest.core.StringContains.containsString;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class GoFileConfigDataSourceTest {
private GoFileConfigDataSource dataSource;
private GoConfigFileHelper configHelper;
private SystemEnvironment systemEnvironment;
private ConfigRepository configRepository;
private TimeProvider timeProvider;
private ConfigCache configCache = new ConfigCache();
private GoConfigDao goConfigDao;
@Before
public void setup() throws Exception {
systemEnvironment = new SystemEnvironment();
configHelper = new GoConfigFileHelper();
configHelper.onSetUp();
configRepository = new ConfigRepository(systemEnvironment);
configRepository.initialize();
timeProvider = mock(TimeProvider.class);
when(timeProvider.currentTime()).thenReturn(new Date());
ServerVersion serverVersion = new ServerVersion();
ConfigElementImplementationRegistry registry = ConfigElementImplementationRegistryMother.withNoPlugins();
dataSource = new GoFileConfigDataSource(new GoConfigMigration(new GoConfigMigration.UpgradeFailedHandler() {
public void handle(Exception e) {
throw new RuntimeException(e);
}
}, configRepository, new TimeProvider(), configCache, registry),
configRepository, systemEnvironment, timeProvider, configCache, serverVersion, registry, mock(ServerHealthService.class));
dataSource.upgradeIfNecessary();
CachedFileGoConfig fileService = new CachedFileGoConfig(dataSource, new ServerHealthService());
fileService.loadConfigIfNull();
goConfigDao = new GoConfigDao(fileService);
configHelper.load();
configHelper.usingCruiseConfigDao(goConfigDao);
}
@After
public void teardown() throws Exception {
configHelper.onTearDown();
systemEnvironment.reset(SystemEnvironment.ENABLE_CONFIG_MERGE_FEATURE);
}
private static class UserAwarePipelineAddingCommand implements UpdateConfigCommand, UserAware {
private final String pipelineName;
private final String username;
UserAwarePipelineAddingCommand(String pipelineName, String username) {
this.pipelineName = pipelineName;
this.username = username;
}
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.addPipeline("my-grp", PipelineConfigMother.createPipelineConfig(pipelineName, "stage-other", "job-yet-another"));
return cruiseConfig;
}
public ConfigModifyingUser user() {
return new ConfigModifyingUser(username);
}
}
@Test
public void shouldUse_UserFromSession_asConfigModifyingUserWhenNoneGiven() throws GitAPIException, IOException {
SecurityContext context = SecurityContextHolder.getContext();
context.setAuthentication(new UsernamePasswordAuthenticationToken(new User("loser_boozer", "pass", true, true, true, true, new GrantedAuthority[]{}), null));
goConfigDao.updateMailHost(getMailHost("mailhost.local"));
CruiseConfig cruiseConfig = goConfigDao.load();
GoConfigRevision revision = configRepository.getRevision(cruiseConfig.getMd5());
assertThat(revision.getUsername(), is("loser_boozer"));
}
@Test
public void shouldVersionTheCruiseConfigXmlWhenSaved() throws Exception {
CachedGoConfig cachedGoConfig = configHelper.getCachedGoConfig();
CruiseConfig configForEdit = cachedGoConfig.loadForEditing();
GoConfigHolder configHolder = new GoConfigHolder(cachedGoConfig.currentConfig(), configForEdit);
Date loserChangedAt = new DateTime().plusDays(2).toDate();
when(timeProvider.currentTime()).thenReturn(loserChangedAt);
GoConfigHolder afterFirstSave = dataSource.writeWithLock(new UserAwarePipelineAddingCommand("foo-pipeline", "loser"), configHolder).getConfigHolder();
Date biggerLoserChangedAt = new DateTime().plusDays(4).toDate();
when(timeProvider.currentTime()).thenReturn(biggerLoserChangedAt);
GoConfigHolder afterSecondSave = dataSource.writeWithLock(new UserAwarePipelineAddingCommand("bar-pipeline", "bigger_loser"), afterFirstSave).getConfigHolder();
String expectedMd5 = afterFirstSave.config.getMd5();
GoConfigRevision firstRev = configRepository.getRevision(expectedMd5);
assertThat(firstRev.getUsername(), is("loser"));
assertThat(firstRev.getGoVersion(), is("N/A"));
assertThat(firstRev.getMd5(), is(expectedMd5));
assertThat(firstRev.getTime(), is(loserChangedAt));
assertThat(firstRev.getSchemaVersion(), is(GoConstants.CONFIG_SCHEMA_VERSION));
assertThat(com.thoughtworks.go.config.ConfigMigrator.load(firstRev.getContent()), is(afterFirstSave.configForEdit));
CruiseConfig config = afterSecondSave.config;
assertThat(config.hasPipelineNamed(new CaseInsensitiveString("bar-pipeline")), is(true));
expectedMd5 = config.getMd5();
GoConfigRevision secondRev = configRepository.getRevision(expectedMd5);
assertThat(secondRev.getUsername(), is("bigger_loser"));
assertThat(secondRev.getGoVersion(), is("N/A"));
assertThat(secondRev.getMd5(), is(expectedMd5));
assertThat(secondRev.getTime(), is(biggerLoserChangedAt));
assertThat(secondRev.getSchemaVersion(), is(GoConstants.CONFIG_SCHEMA_VERSION));
assertThat(com.thoughtworks.go.config.ConfigMigrator.load(secondRev.getContent()), is(afterSecondSave.configForEdit));
}
@Test
public void shouldSaveTheCruiseConfigXml() throws Exception {
File file = dataSource.fileLocation();
dataSource.write(ConfigMigrator.migrate(VALID_XML_3169), false);
assertThat(FileUtils.readFileToString(file), containsString("http://hg-server/hg/connectfour"));
}
@Test
public void shouldNotCorruptTheCruiseConfigXml() throws Exception {
File file = dataSource.fileLocation();
String originalCopy = FileUtils.readFileToString(file);
try {
dataSource.write("abc", false);
fail("Should not allow us to write an invalid config");
} catch (Exception e) {
assertThat(e.getMessage(), containsString("Content is not allowed in prolog"));
}
assertThat(FileUtils.readFileToString(file), Is.is(originalCopy));
}
@Test
public void shouldLoadAsUser_Filesystem_WithMd5Sum() throws Exception {
GoConfigHolder configHolder = goConfigDao.loadConfigHolder();
String md5 = DigestUtils.md5Hex(FileUtils.readFileToString(dataSource.fileLocation()));
assertThat(configHolder.configForEdit.getMd5(), is(md5));
assertThat(configHolder.config.getMd5(), is(md5));
CruiseConfig forEdit = configHolder.configForEdit;
forEdit.addPipeline("my-awesome-group", PipelineConfigMother.createPipelineConfig("pipeline-foo", "stage-bar", "job-baz"));
FileOutputStream fos = new FileOutputStream(dataSource.fileLocation());
new MagicalGoConfigXmlWriter(configCache, ConfigElementImplementationRegistryMother.withNoPlugins()).write(forEdit, fos, false);
configHolder = dataSource.load();
String xmlText = FileUtils.readFileToString(dataSource.fileLocation());
String secondMd5 = DigestUtils.md5Hex(xmlText);
assertThat(configHolder.configForEdit.getMd5(), is(secondMd5));
assertThat(configHolder.config.getMd5(), is(secondMd5));
assertThat(configHolder.configForEdit.getMd5(), is(not(md5)));
GoConfigRevision commitedVersion = configRepository.getRevision(secondMd5);
assertThat(commitedVersion.getContent(), is(xmlText));
assertThat(commitedVersion.getUsername(), is(GoFileConfigDataSource.FILESYSTEM));
}
@Test
public void shouldEncryptSvnPasswordWhenConfigIsChangedViaFileSystem() throws Exception {
String configContent = ConfigFileFixture.configWithPipeline(String.format(
"<pipeline name='pipeline1'>"
+ " <materials>"
+ " <svn url='svnurl' username='admin' password='%s'/>"
+ " </materials>"
+ " <stage name='mingle'>"
+ " <jobs>"
+ " <job name='do-something'>"
+ " </job>"
+ " </jobs>"
+ " </stage>"
+ "</pipeline>", "hello"), GoConstants.CONFIG_SCHEMA_VERSION);
FileUtils.writeStringToFile(dataSource.fileLocation(), configContent);
GoConfigHolder configHolder = dataSource.load();
PipelineConfig pipelineConfig = configHolder.config.pipelineConfigByName(new CaseInsensitiveString("pipeline1"));
SvnMaterialConfig svnMaterialConfig = (SvnMaterialConfig) pipelineConfig.materialConfigs().get(0);
assertThat(svnMaterialConfig.getEncryptedPassword(), is(not(nullValue())));
}
@Test
public void shouldEncryptTfsPasswordWhenConfigIsChangedViaFileSystem() throws Exception {
String configContent = ConfigFileFixture.configWithPipeline(String.format(
"<pipeline name='pipeline1'>"
+ " <materials>"
+ " <tfs url='http://some.repo.local' username='username@domain' password='password' projectPath='$/project_path' />"
+ " </materials>"
+ " <stage name='mingle'>"
+ " <jobs>"
+ " <job name='do-something'>"
+ " </job>"
+ " </jobs>"
+ " </stage>"
+ "</pipeline>", "hello"), GoConstants.CONFIG_SCHEMA_VERSION);
FileUtils.writeStringToFile(dataSource.fileLocation(), configContent);
GoConfigHolder configHolder = dataSource.load();
PipelineConfig pipelineConfig = configHolder.config.pipelineConfigByName(new CaseInsensitiveString("pipeline1"));
TfsMaterialConfig tfsMaterial = (TfsMaterialConfig) pipelineConfig.materialConfigs().get(0);
assertThat(tfsMaterial.getEncryptedPassword(), is(not(nullValue())));
}
@Test
public void shouldNotReloadIfConfigDoesNotChange() throws Exception {
LogFixture log = LogFixture.startListening();
dataSource.reloadIfModified();
GoConfigHolder loadedConfig = dataSource.load();
assertThat(log.getLog(), containsString("Config file changed at"));
assertThat(loadedConfig, not(nullValue()));
log.clear();
loadedConfig = dataSource.load();
assertThat(log.getLog(), not(containsString("Config file changed at")));
assertThat(loadedConfig, is(nullValue()));
}
@Test
public void shouldUpdateFileAttributesIfFileContentsHaveNotChanged() throws Exception {//so that it doesn't have to do the file content checksum computation next time
dataSource.reloadIfModified();
assertThat(dataSource.load(), not(nullValue()));
GoFileConfigDataSource.ReloadIfModified reloadStrategy = (GoFileConfigDataSource.ReloadIfModified) ReflectionUtil.getField(dataSource, "reloadStrategy");
ReflectionUtil.setField(reloadStrategy, "lastModified", -1);
ReflectionUtil.setField(reloadStrategy, "prevSize", -1);
assertThat(dataSource.load(), is(nullValue()));
assertThat((Long) ReflectionUtil.getField(reloadStrategy, "lastModified"), is(dataSource.fileLocation().lastModified()));
assertThat((Long) ReflectionUtil.getField(reloadStrategy, "prevSize"), is(dataSource.fileLocation().length()));
}
@Test
public void shouldBeAbleToConcurrentAccess() throws Exception {
GoConfigFileHelper helper = new GoConfigFileHelper(loadAndMigrate(ConfigFileFixture.CONFIG_WITH_NANT_AND_EXEC_BUILDER));
final String xml = FileUtil.readContentFromFile(helper.getConfigFile());
final List<Exception> errors = new Vector<Exception>();
Thread thread1 = new Thread(new Runnable() {
public void run() {
for (int i = 0; i < 5; i++) {
try {
goConfigDao.updateMailHost(new MailHost("hostname", 9999, "user", "password", false, false, "from@local", "admin@local"));
} catch (Exception e) {
e.printStackTrace();
errors.add(e);
}
}
}
}, "Update-license");
Thread thread2 = new Thread(new Runnable() {
public void run() {
for (int i = 0; i < 5; i++) {
try {
dataSource.write(xml, false);
} catch (Exception e) {
e.printStackTrace();
errors.add(e);
}
}
}
}, "Modify-config");
thread1.start();
thread2.start();
thread1.join();
thread2.join();
assertThat(errors.size(), is(0));
}
@Test
public void shouldGetMergedConfig() throws Exception {
configHelper.addMailHost(getMailHost("mailhost.local.old"));
GoConfigHolder goConfigHolder = dataSource.forceLoad(dataSource.fileLocation());
CruiseConfig oldConfigForEdit = goConfigHolder.configForEdit;
final String oldMD5 = oldConfigForEdit.getMd5();
MailHost oldMailHost = oldConfigForEdit.server().mailHost();
assertThat(oldMailHost.getHostName(), is("mailhost.local.old"));
assertThat(oldMailHost.getHostName(), is(not("mailhost.local")));
goConfigDao.updateMailHost(getMailHost("mailhost.local"));
goConfigHolder = dataSource.forceLoad(dataSource.fileLocation());
GoFileConfigDataSource.GoConfigSaveResult result = dataSource.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public String unmodifiedMd5() {
return oldMD5;
}
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.addPipeline("g", PipelineConfigMother.pipelineConfig("p1", StageConfigMother.custom("s", "b")));
return cruiseConfig;
}
}, goConfigHolder);
assertThat(result.getConfigHolder().config.server().mailHost().getHostName(), is("mailhost.local"));
assertThat(result.getConfigHolder().config.hasPipelineNamed(new CaseInsensitiveString("p1")), is(true));
}
@Test
public void shouldPropagateConfigHasChangedException() throws Exception {
String originalMd5 = dataSource.forceLoad(dataSource.fileLocation()).configForEdit.getMd5();
goConfigDao.updateConfig(configHelper.addPipelineCommand(originalMd5, "p1", "s1", "b1"));
GoConfigHolder goConfigHolder = dataSource.forceLoad(dataSource.fileLocation());
try {
dataSource.writeWithLock(configHelper.addPipelineCommand(originalMd5, "p2", "s", "b"), goConfigHolder);
fail("Should throw ConfigFileHasChanged exception");
} catch (Exception e) {
assertThat(e.getCause().getClass().getName(), e.getCause() instanceof ConfigMergeException, is(true));
}
}
@Test
public void shouldThrowConfigMergeExceptionWhenConfigMergeFeatureIsTurnedOff() throws Exception {
String firstMd5 = dataSource.forceLoad(dataSource.fileLocation()).configForEdit.getMd5();
goConfigDao.updateConfig(configHelper.addPipelineCommand(firstMd5, "p0", "s0", "b0"));
String originalMd5 = dataSource.forceLoad(dataSource.fileLocation()).configForEdit.getMd5();
goConfigDao.updateConfig(configHelper.addPipelineCommand(originalMd5, "p1", "s1", "j1"));
GoConfigHolder goConfigHolder = dataSource.forceLoad(dataSource.fileLocation());
systemEnvironment.set(SystemEnvironment.ENABLE_CONFIG_MERGE_FEATURE, Boolean.FALSE);
try {
dataSource.writeWithLock(configHelper.changeJobNameCommand(originalMd5, "p0", "s0", "b0", "j0"), goConfigHolder);
fail("Should throw ConfigMergeException");
} catch (RuntimeException e) {
ConfigMergeException cme = (ConfigMergeException) e.getCause();
assertThat(cme.getMessage(), is(ConfigFileHasChangedException.CONFIG_CHANGED_PLEASE_REFRESH));
}
}
@Test
public void shouldGetConfigMergedStateWhenAMergerOccurs() throws Exception {
configHelper.addMailHost(getMailHost("mailhost.local.old"));
String originalMd5 = dataSource.forceLoad(dataSource.fileLocation()).configForEdit.getMd5();
configHelper.addMailHost(getMailHost("mailhost.local"));
GoConfigHolder goConfigHolder = dataSource.forceLoad(dataSource.fileLocation());
GoFileConfigDataSource.GoConfigSaveResult goConfigSaveResult = dataSource.writeWithLock(configHelper.addPipelineCommand(originalMd5, "p1", "s", "b"), goConfigHolder);
assertThat(goConfigSaveResult.getConfigSaveState(), is(ConfigSaveState.MERGED));
}
private MailHost getMailHost(String hostName) {
return new MailHost(hostName, 9999, "user", "password", true, false, "from@local", "admin@local");
}
@Test
public void shouldGetConfigUpdateStateWhenAnUpdateOccurs() throws Exception {
String originalMd5 = dataSource.forceLoad(dataSource.fileLocation()).configForEdit.getMd5();
GoConfigHolder goConfigHolder = dataSource.forceLoad(dataSource.fileLocation());
GoFileConfigDataSource.GoConfigSaveResult goConfigSaveResult = dataSource.writeWithLock(configHelper.addPipelineCommand(originalMd5, "p1", "s", "b"), goConfigHolder);
assertThat(goConfigSaveResult.getConfigSaveState(), is(ConfigSaveState.UPDATED));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.undertow;
import java.io.IOException;
import java.net.URI;
import java.nio.ByteBuffer;
import java.util.Collection;
import io.undertow.Handlers;
import io.undertow.server.HttpHandler;
import io.undertow.server.HttpServerExchange;
import io.undertow.server.handlers.form.EagerFormParsingHandler;
import io.undertow.util.Headers;
import io.undertow.util.HttpString;
import io.undertow.util.Methods;
import io.undertow.util.MimeMappings;
import io.undertow.util.StatusCodes;
import io.undertow.websockets.core.WebSocketChannel;
import org.apache.camel.AsyncCallback;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.Processor;
import org.apache.camel.TypeConverter;
import org.apache.camel.component.undertow.UndertowConstants.EventType;
import org.apache.camel.component.undertow.handlers.CamelWebSocketHandler;
import org.apache.camel.impl.DefaultConsumer;
import org.apache.camel.util.CollectionStringBuffer;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.StringHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The Undertow consumer which is also an Undertow HttpHandler implementation to handle incoming request.
*/
public class UndertowConsumer extends DefaultConsumer implements HttpHandler {
private static final Logger LOG = LoggerFactory.getLogger(UndertowConsumer.class);
private CamelWebSocketHandler webSocketHandler;
public UndertowConsumer(UndertowEndpoint endpoint, Processor processor) {
super(endpoint, processor);
}
@Override
public UndertowEndpoint getEndpoint() {
return (UndertowEndpoint) super.getEndpoint();
}
@Override
protected void doStart() throws Exception {
super.doStart();
final UndertowEndpoint endpoint = getEndpoint();
if (endpoint.isWebSocket()) {
/*
* note that the new CamelWebSocketHandler() we pass to registerEndpoint() does not necessarily have to be
* the same instance that is returned from there
*/
this.webSocketHandler = (CamelWebSocketHandler) endpoint.getComponent().registerEndpoint(endpoint.getHttpHandlerRegistrationInfo(), endpoint.getSslContext(), new CamelWebSocketHandler());
this.webSocketHandler.setConsumer(this);
} else {
// allow for HTTP 1.1 continue
endpoint.getComponent().registerEndpoint(endpoint.getHttpHandlerRegistrationInfo(), endpoint.getSslContext(), Handlers.httpContinueRead(
// wrap with EagerFormParsingHandler to enable undertow form parsers
new EagerFormParsingHandler().setNext(UndertowConsumer.this)));
}
}
@Override
protected void doStop() throws Exception {
super.doStop();
if (this.webSocketHandler != null) {
this.webSocketHandler.setConsumer(null);
}
UndertowEndpoint endpoint = getEndpoint();
endpoint .getComponent().unregisterEndpoint(endpoint.getHttpHandlerRegistrationInfo(), endpoint.getSslContext());
}
@Override
public void handleRequest(HttpServerExchange httpExchange) throws Exception {
HttpString requestMethod = httpExchange.getRequestMethod();
if (Methods.OPTIONS.equals(requestMethod) && !getEndpoint().isOptionsEnabled()) {
CollectionStringBuffer csb = new CollectionStringBuffer(",");
Collection<HttpHandlerRegistrationInfo> handlers = getEndpoint().getComponent().getHandlers();
for (HttpHandlerRegistrationInfo reg : handlers) {
URI uri = reg.getUri();
// what other HTTP methods may exists for the same path
if (reg.getMethodRestrict() != null && getEndpoint().getHttpURI().equals(uri)) {
String restrict = reg.getMethodRestrict();
if (restrict.endsWith(",OPTIONS")) {
restrict = restrict.substring(0, restrict.length() - 8);
}
csb.append(restrict);
}
}
String allowedMethods = csb.toString();
if (ObjectHelper.isEmpty(allowedMethods)) {
allowedMethods = getEndpoint().getHttpMethodRestrict();
}
if (ObjectHelper.isEmpty(allowedMethods)) {
allowedMethods = "GET,HEAD,POST,PUT,DELETE,TRACE,OPTIONS,CONNECT,PATCH";
}
if (!allowedMethods.contains("OPTIONS")) {
allowedMethods = allowedMethods + ",OPTIONS";
}
//return list of allowed methods in response headers
httpExchange.setStatusCode(StatusCodes.OK);
httpExchange.getResponseHeaders().put(ExchangeHeaders.CONTENT_LENGTH, 0);
// do not include content-type as that would indicate to the caller that we can only do text/plain
httpExchange.getResponseHeaders().put(Headers.ALLOW, allowedMethods);
httpExchange.getResponseSender().close();
return;
}
//perform blocking operation on exchange
if (httpExchange.isInIoThread()) {
httpExchange.dispatch(this);
return;
}
//create new Exchange
//binding is used to extract header and payload(if available)
Exchange camelExchange = getEndpoint().createExchange(httpExchange);
//Unit of Work to process the Exchange
createUoW(camelExchange);
try {
getProcessor().process(camelExchange);
} catch (Exception e) {
getExceptionHandler().handleException(e);
} finally {
doneUoW(camelExchange);
}
Object body = getResponseBody(httpExchange, camelExchange);
TypeConverter tc = getEndpoint().getCamelContext().getTypeConverter();
if (body == null) {
LOG.trace("No payload to send as reply for exchange: {}", camelExchange);
httpExchange.getResponseHeaders().put(ExchangeHeaders.CONTENT_TYPE, MimeMappings.DEFAULT_MIME_MAPPINGS.get("txt"));
httpExchange.getResponseSender().send("No response available");
} else {
ByteBuffer bodyAsByteBuffer = tc.convertTo(ByteBuffer.class, body);
httpExchange.getResponseSender().send(bodyAsByteBuffer);
}
httpExchange.getResponseSender().close();
}
/**
* Create an {@link Exchange} from the associated {@link UndertowEndpoint} and set the {@code in} {@link Message}'s
* body to the given {@code message} and {@link UndertowConstants#CONNECTION_KEY} header to the given
* {@code connectionKey}.
*
* @param connectionKey an identifier of {@link WebSocketChannel} through which the {@code message} was received
* @param message the message received via the {@link WebSocketChannel}
*/
public void sendMessage(final String connectionKey, final Object message) {
final Exchange exchange = getEndpoint().createExchange();
// set header and body
exchange.getIn().setHeader(UndertowConstants.CONNECTION_KEY, connectionKey);
exchange.getIn().setBody(message);
// send exchange using the async routing engine
getAsyncProcessor().process(exchange, new AsyncCallback() {
public void done(boolean doneSync) {
if (exchange.getException() != null) {
getExceptionHandler().handleException("Error processing exchange", exchange,
exchange.getException());
}
}
});
}
/**
* Send a notification related a WebSocket peer.
*
* @param connectionKey of WebSocket peer
* @param eventType the type of the event
*/
public void sendEventNotification(String connectionKey, EventType eventType) {
final Exchange exchange = getEndpoint().createExchange();
final Message in = exchange.getIn();
in.setHeader(UndertowConstants.CONNECTION_KEY, connectionKey);
in.setHeader(UndertowConstants.EVENT_TYPE, eventType.getCode());
in.setHeader(UndertowConstants.EVENT_TYPE_ENUM, eventType);
// send exchange using the async routing engine
getAsyncProcessor().process(exchange, new AsyncCallback() {
public void done(boolean doneSync) {
if (exchange.getException() != null) {
getExceptionHandler().handleException("Error processing exchange", exchange, exchange.getException());
}
}
});
}
private Object getResponseBody(HttpServerExchange httpExchange, Exchange camelExchange) throws IOException {
Object result;
if (camelExchange.hasOut()) {
result = getEndpoint().getUndertowHttpBinding().toHttpResponse(httpExchange, camelExchange.getOut());
} else {
result = getEndpoint().getUndertowHttpBinding().toHttpResponse(httpExchange, camelExchange.getIn());
}
return result;
}
}
| |
/*******************************************************************************
* Copyright 2014 United States Government as represented by the
* Administrator of the National Aeronautics and Space Administration.
* All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package gov.nasa.ensemble.core.plan.advisor.view.fixing;
import gov.nasa.ensemble.common.type.IStringifier;
import gov.nasa.ensemble.common.type.StringifierRegistry;
import gov.nasa.ensemble.common.ui.EnsembleComposite;
import gov.nasa.ensemble.core.model.plan.EActivityGroup;
import gov.nasa.ensemble.core.model.plan.EPlanElement;
import gov.nasa.ensemble.core.plan.advisor.PlanAdvisor;
import gov.nasa.ensemble.core.plan.advisor.fixing.SuggestedStartTime;
import gov.nasa.ensemble.core.plan.advisor.fixing.SuggestionComparator;
import gov.nasa.ensemble.core.plan.advisor.fixing.ViolationFixes;
import gov.nasa.ensemble.core.plan.editor.PlanPrinter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.Queue;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Tree;
import org.eclipse.swt.widgets.TreeColumn;
import org.eclipse.swt.widgets.TreeItem;
import org.eclipse.ui.PlatformUI;
public class FixingViolationsWizardFixesPage extends WizardPage {
private static final IStringifier<Date> DATE_STRINGIFIER = StringifierRegistry.getStringifier(Date.class);
private final ISelection selection;
private ViolationFixes violationFixes;
private Tree movedItems;
private Table unfixedItems;
private Table opposingItems;
public FixingViolationsWizardFixesPage(ISelection selection) {
super("Suggested fixes");
this.selection = selection;
}
public synchronized void setViolationFixes(ViolationFixes violationFixes) {
this.violationFixes = violationFixes;
setMessage("The plan advisor suggests the following changes.");
updateViolationFixes();
}
@Override
public void createControl(Composite parent) {
Composite controlComposite = new EnsembleComposite(parent, SWT.NONE);
controlComposite.setLayout(new FillLayout(SWT.VERTICAL));
Composite movedComposite = new EnsembleComposite(controlComposite, SWT.NONE);
GridLayout movedLayout = new GridLayout(1, false);
movedComposite.setLayout(movedLayout);
Label movedLabel = new Label(movedComposite, SWT.WRAP);
movedLabel.setText("Items to be moved:");
movedItems = createTree(movedComposite);
movedItems.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true));
Composite bottomComposite = new EnsembleComposite(controlComposite, SWT.NONE);
bottomComposite.setLayout(new FillLayout());
Composite unfixedComposite = new EnsembleComposite(bottomComposite, SWT.NONE);
GridLayout unfixedLayout = new GridLayout(1, false);
unfixedComposite.setLayout(unfixedLayout);
Label unfixedLabel = new Label(unfixedComposite, SWT.WRAP);
unfixedLabel.setText("Items that couldn't be fixed:");
unfixedItems = createActivityTable(unfixedComposite, false);
unfixedItems.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true));
if (selection != null) {
Composite opposingComposite = new EnsembleComposite(bottomComposite, SWT.NONE);
GridLayout opposingLayout = new GridLayout(1, false);
opposingComposite.setLayout(opposingLayout);
Label opposingLabel = new Label(opposingComposite, SWT.WRAP);
opposingLabel.setText("Items to be added to the selection:");
opposingItems = createActivityTable(opposingComposite, true);
opposingItems.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true));
}
updateViolationFixes();
setControl(controlComposite);
}
public ViolationFixes getAcceptedViolationFixes() {
if (violationFixes == null) {
return null;
}
PlanAdvisor advisor = violationFixes.getAdvisor();
java.util.List<SuggestedStartTime> acceptedStartTimes = getAcceptedStartTimes();
java.util.List<EPlanElement> unsatisfiedNodes = getAffectedNodes(unfixedItems);
java.util.List<EPlanElement> opposingNodes = getAffectedNodes(opposingItems);
return new ViolationFixes(advisor, acceptedStartTimes, unsatisfiedNodes, opposingNodes);
}
/*
* Utility methods
*/
private Tree createTree(Composite parent) {
final Tree tree = new Tree(parent, SWT.BORDER | SWT.V_SCROLL | SWT.H_SCROLL | SWT.CHECK | SWT.FULL_SELECTION);
// new TableColumn(table, SWT.LEFT).setText("");
new TreeColumn(tree, SWT.LEFT).setText("Name");
new TreeColumn(tree, SWT.LEFT).setText("Suggested Start Time");
tree.setFont(parent.getFont());
tree.addMouseListener(new TreeDoubleClickToggleMouseListener(tree));
tree.addListener(SWT.Selection, new CheckboxTreeListener());
return tree;
}
private Table createActivityTable(Composite parent, boolean checked) {
final Table table = new Table(parent, SWT.BORDER | SWT.V_SCROLL | SWT.H_SCROLL | (checked ? SWT.CHECK : 0) | SWT.FULL_SELECTION);
new TableColumn(table, SWT.LEFT).setText("Activity Name");
table.setFont(parent.getFont());
table.addMouseListener(new TableDoubleClickToggleMouseListener(table));
return table;
}
private synchronized void updateViolationFixes() {
if (violationFixes != null) {
if (movedItems != null) {
java.util.List<SuggestedStartTime> startTimes = violationFixes.getStartTimes();
if (startTimes.isEmpty()) {
showNoActivitiesToMove(movedItems, "<not moving any activities>");
} else {
showActivitiesToMove(startTimes);
}
}
if (unfixedItems != null) {
java.util.List<EPlanElement> unsatisfiedNodes = violationFixes.getUnsatisfiedNodes();
if (unsatisfiedNodes.isEmpty()) {
showNoActivitiesToAffect(unfixedItems, "<no unfixed activities>");
} else {
showPlanElementsToAffect(unsatisfiedNodes, unfixedItems);
}
}
if (opposingItems != null) {
java.util.List<EPlanElement> opposingNodes = violationFixes.getOpposingNodes();
if (opposingNodes.isEmpty()) {
showNoActivitiesToAffect(opposingItems, "<no activities to be selected>");
} else {
showPlanElementsToAffect(opposingNodes, opposingItems);
}
}
}
}
private void showActivitiesToMove(java.util.List<SuggestedStartTime> startTimes) {
movedItems.clearAll(true);
Map<EActivityGroup, TreeItem> groupToItemMap = new HashMap<EActivityGroup, TreeItem>();
startTimes = new ArrayList<SuggestedStartTime>(startTimes);
Collections.sort(startTimes, new SuggestionComparator());
for (SuggestedStartTime suggestion : startTimes) {
createSuggestionItem(groupToItemMap, suggestion);
}
// movedItems.setHeaderVisible(true);
movedItems.setEnabled(true);
for (TreeColumn column : movedItems.getColumns()) {
column.pack();
}
}
private void createSuggestionItem(Map<EActivityGroup, TreeItem> groupToItemMap, SuggestedStartTime suggestion) {
EObject parent = suggestion.node.eContainer();
TreeItem item = null;
if (parent instanceof EActivityGroup) {
EActivityGroup group = (EActivityGroup) parent;
final TreeItem groupItem = findGroupItem(groupToItemMap, group);
item = new TreeItem(groupItem, SWT.NONE);
groupItem.setExpanded(true);
} else {
item = new TreeItem(movedItems, SWT.NONE);
}
Date start = suggestion.ideal;
String activityText = PlanPrinter.getPrintName(suggestion.node);
String dateText = DATE_STRINGIFIER.getDisplayString(start);
item.setChecked(true);
item.setData(suggestion);
item.setText(0, activityText);
item.setText(1, dateText);
}
private TreeItem findGroupItem(Map<EActivityGroup, TreeItem> groupToItemMap, EActivityGroup group) {
TreeItem groupItem = groupToItemMap.get(group);
if (groupItem == null) {
groupItem = new TreeItem(movedItems, SWT.NONE);
String groupText = PlanPrinter.getPrintName(group);
groupItem.setChecked(true);
groupItem.setText(0, groupText);
groupItem.setText(1, "");
groupToItemMap.put(group, groupItem);
}
return groupItem;
}
private void showPlanElementsToAffect(java.util.List<EPlanElement> nodes, Table items) {
items.clearAll();
for (EPlanElement node : nodes) {
String printName = PlanPrinter.getPrintName(node);
TableItem item = new TableItem(items, SWT.NONE);
item.setChecked(true);
item.setData(node);
item.setText(0, printName);
}
items.setEnabled(true);
for (TableColumn column : items.getColumns()) {
column.pack();
}
}
private void showNoActivitiesToMove(Tree items, String string) {
items.clearAll(true);
TreeItem item = new TreeItem(items, SWT.NONE);
item.setText(string);
items.setEnabled(false);
for (TreeColumn column : items.getColumns()) {
column.pack();
}
}
private void showNoActivitiesToAffect(Table items, String string) {
items.clearAll();
TableItem item = new TableItem(items, SWT.NONE);
item.setText(string);
items.setEnabled(false);
for (TableColumn column : items.getColumns()) {
column.pack();
}
}
private java.util.List<SuggestedStartTime> getAcceptedStartTimes() {
java.util.List<SuggestedStartTime> acceptedStartTimes = new ArrayList<SuggestedStartTime>();
Queue<TreeItem> treeItems = new LinkedList<TreeItem>(Arrays.asList(movedItems.getItems()));
while (!treeItems.isEmpty()) {
TreeItem item = treeItems.poll();
treeItems.addAll(Arrays.asList(item.getItems()));
if (item.getChecked()) {
Object data = item.getData();
if (data instanceof SuggestedStartTime) {
SuggestedStartTime suggestion = (SuggestedStartTime) data;
acceptedStartTimes.add(suggestion);
}
}
}
return acceptedStartTimes;
}
private java.util.List<EPlanElement> getAffectedNodes(Table items) {
java.util.List<EPlanElement> affectedNodes = new ArrayList<EPlanElement>();
if (items != null) {
for (TableItem item : items.getItems()) {
if (item.getChecked()) {
Object data = item.getData();
affectedNodes.add((EPlanElement)data);
}
}
}
return affectedNodes;
}
/**
* Listen to the children's checkbox, if it changes state, update the parent state
*
* @author Andrew
*
*/
private final class CheckboxTreeListener implements Listener {
@Override
public void handleEvent(Event event) {
if ((event.detail == SWT.CHECK)
&& (event.item instanceof TreeItem)) {
TreeItem item = (TreeItem) event.item;
item.setGrayed(false);
boolean checked = item.getChecked();
for (TreeItem childItem : item.getItems()) {
childItem.setChecked(checked);
}
TreeItem parentItem = item.getParentItem();
if (parentItem != null) {
boolean allChecked = true;
boolean allUnchecked = true;
for (TreeItem siblings : parentItem.getItems()) {
if (siblings.getChecked()) {
allUnchecked = false;
} else {
allChecked = false;
}
}
if (allChecked) {
parentItem.setGrayed(false);
parentItem.setChecked(true);
} else if (allUnchecked) {
parentItem.setGrayed(false);
parentItem.setChecked(false);
} else {
parentItem.setGrayed(true);
parentItem.setChecked(true);
}
}
}
}
}
/**
* This listener will toggle the checkbox on an table item when it is double-clicked.
*
* @author Andrew
*
*/
private static final class TableDoubleClickToggleMouseListener extends MouseAdapter {
private final Table table;
private TableDoubleClickToggleMouseListener(Table table) {
this.table = table;
}
@Override
public void mouseDoubleClick(MouseEvent event) {
TableItem item = table.getItem(new Point(event.x,event.y));
if (item != null) {
item.setChecked(!item.getChecked());
}
}
}
/**
* This listener will toggle the checkbox on an tree item when it is double-clicked.
*
* @author Andrew
*
*/
private static final class TreeDoubleClickToggleMouseListener extends MouseAdapter {
private final Tree tree;
private TreeDoubleClickToggleMouseListener(Tree tree) {
this.tree = tree;
}
@Override
public void mouseDoubleClick(MouseEvent event) {
TreeItem item = tree.getItem(new Point(event.x,event.y));
if (item != null) {
item.setChecked(!item.getChecked());
}
}
}
@Override
public void performHelp() {
PlatformUI.getWorkbench().getHelpSystem().displayHelp("gov.nasa.arc.spife.core.plan.advisor.fixing.FixingViolationsWizardFixesPage");
super.performHelp();
}
}
| |
// @formatter:off
/*
*
* This file is part of streaming pool (http://www.streamingpool.org).
*
* Copyright (c) 2017-present, CERN. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// @formatter:on
package org.streamingpool.core.service.stream;
import static io.reactivex.Flowable.interval;
import static io.reactivex.Flowable.just;
import static io.reactivex.Flowable.merge;
import static io.reactivex.Flowable.never;
import static io.reactivex.Flowable.timer;
import static java.util.Arrays.asList;
import static java.util.concurrent.TimeUnit.DAYS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.ImmutableSet;
import io.reactivex.Flowable;
import io.reactivex.flowables.ConnectableFlowable;
import io.reactivex.schedulers.TestScheduler;
import io.reactivex.subscribers.TestSubscriber;
import org.streamingpool.core.conf.PoolConfiguration;
import org.streamingpool.core.service.StreamId;
import org.streamingpool.core.service.impl.LocalPool;
import org.streamingpool.core.service.impl.PoolContent;
import org.streamingpool.core.service.streamfactory.DelayedStreamFactory;
import org.streamingpool.core.service.streamfactory.OverlapBufferStreamFactory;
import org.streamingpool.core.service.streamid.BufferSpecification;
import org.streamingpool.core.service.streamid.BufferSpecification.EndStreamMatcher;
import org.streamingpool.core.service.streamid.OverlapBufferStreamId;
public class OverlapBufferStreamTest {
private LocalPool pool;
private TestSubscriber<List<Long>> testSubscriber;
private PoolConfiguration poolConfiguration;
private TestScheduler testScheduler;
@Before
public void setUp() {
OverlapBufferStreamFactory factory = new OverlapBufferStreamFactory();
testScheduler = new TestScheduler();
poolConfiguration = new PoolConfiguration(testScheduler);
pool = new LocalPool(asList(factory, new DelayedStreamFactory()), poolConfiguration, new PoolContent());
testSubscriber = new TestSubscriber<>();
}
@Test
public void ifStartEmitsOnlyOnceBeforeDataStreamNeverEnds() throws InterruptedException {
CountDownLatch sync = new CountDownLatch(1);
ConnectableFlowable<?> sourceStream = just(0L).publish();
ConnectableFlowable<?> startStream = just(new Object()).publish();
sourceStream.buffer(startStream, opening -> never()).doOnTerminate(sync::countDown)
.subscribe(System.out::println);
sourceStream.connect();
startStream.connect();
sync.await(5, SECONDS);
assertThat(sync.getCount()).isEqualTo(0L);
}
@Test
public void dataStreamEndsBeforeEndStreamEmitsShouldBufferEverything() {
StreamId<Long> sourceId = registerRx(merge(never(), interval(1, SECONDS, testScheduler).take(5)));
StreamId<Object> startId = registerRx(merge(just(new Object()).delay(2, SECONDS, testScheduler), never()));
StreamId<Object> endId = registerRx(never());
subscribe(OverlapBufferStreamId.of(sourceId,
BufferSpecification.ofStartEnd(startId, Collections.singleton(EndStreamMatcher.endingOnEvery(endId)))));
testScheduler.advanceTimeBy(10, DAYS);
//OnNext event should not happen if the end trigger does not yield any value (and the start stream does not complete)
testSubscriber.assertValueCount(0);
}
@Test
public void dataStreamEndsBeforeStartStreamEmitsShouldNotEmitAnything() {
StreamId<Long> sourceId = registerRx(interval(1, SECONDS, testScheduler).take(10));
StreamId<Object> startId = registerRx(never());
StreamId<Object> endId = registerRx(never());
OverlapBufferStreamId<Long> buffer = OverlapBufferStreamId.of(sourceId,
BufferSpecification.ofStartEnd(startId, Collections.singleton(EndStreamMatcher.endingOnEvery(endId))));
subscribe(buffer);
testScheduler.advanceTimeBy(365, TimeUnit.DAYS);
testSubscriber.assertEmpty();
}
@Test
public void bufferEndsStreamUsingDelayedStart() {
Flowable<Object> startStream = interval(0,3, SECONDS, testScheduler).cast(Object.class);
StreamId<Long> sourceId = registerRx(interval(1,1000, MILLISECONDS, testScheduler).take(10));
StreamId<Object> startId = registerRx(startStream);
StreamId<Object> endId = registerRx(startStream.delay(3, SECONDS, testScheduler));
subscribe(OverlapBufferStreamId.of(sourceId, BufferSpecification
.ofStartEnd(startId, Collections.singleton(EndStreamMatcher.endingOnMatch(endId, Objects::equals)))));
testScheduler.advanceTimeBy(3, SECONDS);
testSubscriber.assertValueCount(1);
testSubscriber.assertValueAt(0, v -> asList(0L, 1L, 2L).equals(v));
testScheduler.advanceTimeBy(3, SECONDS);
testSubscriber.assertValueCount(2);
testSubscriber.assertValueAt(1,v -> asList(3L, 4L, 5L).equals(v));
testScheduler.advanceTimeBy(3, SECONDS);
testSubscriber.assertValueCount(3);
testSubscriber.assertValueAt(2, v -> asList(6L, 7L, 8L).equals(v));
testScheduler.advanceTimeBy(3, SECONDS);
testSubscriber.assertValueCount(4);
testSubscriber.assertValueAt(3, v -> asList(9L).equals(v));
}
@Test
public void bufferEndsWithTimeout() {
Flowable<Object> startStream = interval(0, 3, SECONDS, testScheduler).take(3).cast(Object.class);
StreamId<Long> sourceId = registerRx(interval(0,1, SECONDS, testScheduler).take(10));
StreamId<Object> startId = registerRx(startStream);
StreamId<Object> endId = registerRx(never());
Flowable<?> timeout = timer(5200, MILLISECONDS, testScheduler);
subscribe(OverlapBufferStreamId.of(sourceId, BufferSpecification
.ofStartEndTimeout(startId, ImmutableSet.of(EndStreamMatcher.endingOnEvery(endId)), timeout)));
testScheduler.advanceTimeBy(5300L, MILLISECONDS);
testSubscriber.assertValueCount(1);
testSubscriber.assertValueAt(0, v -> asList(0L, 1L, 2L, 3L, 4L, 5L).equals(v));
testScheduler.advanceTimeBy(3100L, MILLISECONDS);
testSubscriber.assertValueCount(2);
testSubscriber.assertValueAt(1, v -> asList(3L, 4L, 5L, 6L, 7L, 8L).equals(v));
testScheduler.advanceTimeBy(3100L, MILLISECONDS);
testSubscriber.assertValueCount(3);
testSubscriber.assertValueAt(2, v -> asList(6L, 7L, 8L, 9L).equals(v));
}
@Test
public void bufferCompletelyOverlap() {
StreamId<Long> sourceId = registerRx(interval(1, SECONDS, testScheduler).take(10));
StreamId<Object> startId = registerRx(just(new Object(), new Object()).delay(5500, MILLISECONDS, testScheduler).onBackpressureBuffer());
StreamId<Object> endId = registerRx(never());
Flowable<?> timeout = timer(5200, MILLISECONDS, testScheduler);
subscribe(OverlapBufferStreamId.of(sourceId, BufferSpecification
.ofStartEndTimeout(startId, ImmutableSet.of(EndStreamMatcher.endingOnEvery(endId)), timeout)));
testScheduler.advanceTimeBy(11, SECONDS);
testSubscriber.awaitCount(2);
testSubscriber.assertValueCount(2);
assertThat( testSubscriber.values().get(0)).containsExactlyElementsOf( testSubscriber.values().get(1));
}
private void subscribe(OverlapBufferStreamId<Long> bufferId) {
Flowable.fromPublisher(pool.discover(bufferId)).subscribe(testSubscriber);
}
private <T> StreamId<T> registerRx(Flowable<T> stream) {
@SuppressWarnings("unchecked")
StreamId<T> id = mock(StreamId.class);
pool.provide(id, stream);
return id;
}
}
| |
/****************************************************************************
Copyright (c) 2010-2012 cocos2d-x.org
Copyright (c) 2013-2016 Chukong Technologies Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
package org.cocos2dx.lib;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Color;
import android.graphics.Typeface;
import android.os.Looper;
import android.text.Editable;
import android.text.InputType;
import android.text.Selection;
import android.text.TextWatcher;
import android.util.Log;
import android.util.SparseArray;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.View;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.FrameLayout;
import android.widget.TextView;
public class Cocos2dxEditBoxHelper {
private static final String TAG = Cocos2dxEditBoxHelper.class.getSimpleName();
private static Cocos2dxActivity mCocos2dxActivity;
private static ResizeLayout mFrameLayout;
private static SparseArray<Cocos2dxEditBox> mEditBoxArray;
private static int mViewTag = 0;
private static float mPadding = 5.0f;
//Call native methods
private static native void editBoxEditingDidBegin(int index);
public static void __editBoxEditingDidBegin(int index){
editBoxEditingDidBegin(index);
}
private static native void editBoxEditingChanged(int index, String text);
public static void __editBoxEditingChanged(int index, String text){
editBoxEditingChanged(index, text);
}
private static native void editBoxEditingDidEnd(int index, String text);
public static void __editBoxEditingDidEnd(int index, String text){
editBoxEditingDidEnd(index, text);
}
private static native void editBoxEditingReturn(int index);
public static void __editBoxEditingReturn(int index) {
editBoxEditingReturn(index);
}
public Cocos2dxEditBoxHelper(ResizeLayout layout) {
Cocos2dxEditBoxHelper.mFrameLayout = layout;
Cocos2dxEditBoxHelper.mCocos2dxActivity = (Cocos2dxActivity) Cocos2dxActivity.getContext();
Cocos2dxEditBoxHelper.mEditBoxArray = new SparseArray<Cocos2dxEditBox>();
}
public static int getPadding(float scaleX) {
Resources r = mCocos2dxActivity.getResources();
return (int) (mPadding * scaleX);
}
public static int createEditBox(final int left, final int top, final int width, final int height, final float scaleX) {
final int index = mViewTag;
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
final Cocos2dxEditBox editBox = new Cocos2dxEditBox(mCocos2dxActivity);
editBox.setFocusable(true);
editBox.setFocusableInTouchMode(true);
editBox.setInputFlag(5); //kEditBoxInputFlagLowercaseAllCharacters
editBox.setInputMode(6); //kEditBoxInputModeSingleLine
editBox.setReturnType(0); //kKeyboardReturnTypeDefault
editBox.setHintTextColor(Color.GRAY);
editBox.setVisibility(View.GONE);
editBox.setBackgroundColor(Color.TRANSPARENT);
editBox.setTextColor(Color.WHITE);
editBox.setSingleLine();
editBox.setOpenGLViewScaleX(scaleX);
editBox.setPadding(getPadding(scaleX),0, 0, 0);
FrameLayout.LayoutParams lParams = new FrameLayout.LayoutParams(
FrameLayout.LayoutParams.WRAP_CONTENT,
FrameLayout.LayoutParams.WRAP_CONTENT);
lParams.leftMargin = left;
lParams.topMargin = top;
lParams.width = width;
lParams.height = height;
lParams.gravity = Gravity.TOP | Gravity.LEFT;
mFrameLayout.addView(editBox, lParams);
editBox.setTag(false);
editBox.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
//http://stackoverflow.com/questions/21713246/addtextchangedlistener-and-ontextchanged-are-always-called-when-android-fragment
@Override
public void afterTextChanged(final Editable s) {
if (!editBox.getChangedTextProgrammatically()) {
if((Boolean)editBox.getTag()) {
mCocos2dxActivity.runOnGLThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBoxHelper.__editBoxEditingChanged(index, s.toString());
}
});
}
}
editBox.setChangedTextProgrammatically(false);
}
});
editBox.setOnFocusChangeListener(new View.OnFocusChangeListener() {
@Override
public void onFocusChange(View v, boolean hasFocus) {
editBox.setTag(true);
editBox.setChangedTextProgrammatically(false);
if (hasFocus) {
mCocos2dxActivity.runOnGLThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBoxHelper.__editBoxEditingDidBegin(index);
}
});
editBox.setSelection(editBox.getText().length());
mFrameLayout.setEnableForceDoLayout(true);
mCocos2dxActivity.getGLSurfaceView().setSoftKeyboardShown(true);
Log.d(TAG, "edit box get focus");
} else {
editBox.setVisibility(View.GONE);
// Note that we must to copy a string to prevent string content is modified
// on UI thread while 's.toString' is invoked at the same time.
final String text = new String(editBox.getText().toString());
mCocos2dxActivity.runOnGLThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBoxHelper.__editBoxEditingDidEnd(index, text);
}
});
mCocos2dxActivity.hideVirtualButton();
mFrameLayout.setEnableForceDoLayout(false);
Log.d(TAG, "edit box lose focus");
}
}
});
editBox.setOnKeyListener(new View.OnKeyListener() {
public boolean onKey(View v, int keyCode, KeyEvent event) {
// If the event is a key-down event on the "enter" button
if ((event.getAction() == KeyEvent.ACTION_DOWN) &&
(keyCode == KeyEvent.KEYCODE_ENTER)) {
//if editbox doesn't support multiline, just hide the keyboard
if ((editBox.getInputType() & InputType.TYPE_TEXT_FLAG_MULTI_LINE) != InputType.TYPE_TEXT_FLAG_MULTI_LINE) {
Cocos2dxEditBoxHelper.runEditBoxEditingReturnInGLThread(index);
Cocos2dxEditBoxHelper.closeKeyboardOnUiThread(index);
return true;
} else {
Cocos2dxEditBoxHelper.runEditBoxEditingReturnInGLThread(index);
}
}
return false;
}
});
editBox.setOnEditorActionListener(new TextView.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if (actionId == EditorInfo.IME_ACTION_DONE) {
Cocos2dxEditBoxHelper.closeKeyboardOnUiThread(index);
}
return false;
}
});
mEditBoxArray.put(index, editBox);
}
});
return mViewTag++;
}
private static void runEditBoxEditingReturnInGLThread(final int index) {
mCocos2dxActivity.runOnGLThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBoxHelper.__editBoxEditingReturn(index);
}
});
}
public static void removeEditBox(final int index) {
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (editBox != null) {
mEditBoxArray.remove(index);
mFrameLayout.removeView(editBox);
Log.e(TAG, "remove EditBox");
}
}
});
}
public static void setFont(final int index, final String fontName, final float fontSize){
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (editBox != null) {
Typeface tf;
if (!fontName.isEmpty()) {
if (fontName.endsWith(".ttf")) {
try {
tf = Cocos2dxTypefaces.get(mCocos2dxActivity.getContext(), fontName);
} catch (final Exception e) {
Log.e("Cocos2dxEditBoxHelper", "error to create ttf type face: "
+ fontName);
// The file may not find, use system font.
tf = Typeface.create(fontName, Typeface.NORMAL);
}
} else {
tf = Typeface.create(fontName, Typeface.NORMAL);
}
}else{
tf = Typeface.DEFAULT;
}
if (fontSize >= 0){
editBox.setTextSize(TypedValue.COMPLEX_UNIT_PX, fontSize);
}
editBox.setTypeface(tf);
}
}
});
}
public static void setFontColor(final int index, final int red, final int green, final int blue, final int alpha){
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (editBox != null) {
editBox.setTextColor(Color.argb(alpha, red, green, blue));
}
}
});
}
public static void setPlaceHolderText(final int index, final String text){
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (editBox != null) {
editBox.setHint(text);
}
}
});
}
public static void setPlaceHolderTextColor(final int index, final int red, final int green, final int blue, final int alpha){
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (editBox != null) {
editBox.setHintTextColor(Color.argb(alpha, red, green, blue));
}
}
});
}
public static void setMaxLength(final int index, final int maxLength) {
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (editBox != null) {
editBox.setMaxLength(maxLength);
}
}
});
}
public static void setVisible(final int index, final boolean visible) {
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (editBox != null) {
editBox.setVisibility(visible ? View.VISIBLE : View.GONE);
}
}
});
}
public static void setText(final int index, final String text){
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (editBox != null) {
editBox.setChangedTextProgrammatically(true);
editBox.setText(text);
int position = text.length();
editBox.setSelection(position);
}
}
});
}
public static void setReturnType(final int index, final int returnType) {
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (editBox != null) {
editBox.setReturnType(returnType);
}
}
});
}
public static void setInputMode(final int index, final int inputMode) {
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (editBox != null) {
editBox.setInputMode(inputMode);
}
}
});
}
public static void setInputFlag(final int index, final int inputFlag) {
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (editBox != null) {
editBox.setInputFlag(inputFlag);
}
}
});
}
public static void setEditBoxViewRect(final int index, final int left, final int top, final int maxWidth, final int maxHeight) {
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (editBox != null) {
editBox.setEditBoxViewRect(left, top, maxWidth, maxHeight);
}
}
});
}
public static void openKeyboard(final int index) {
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
openKeyboardOnUiThread(index);
}
});
}
private static void openKeyboardOnUiThread(int index) {
if (Looper.myLooper() != Looper.getMainLooper()) {
Log.e(TAG, "openKeyboardOnUiThread doesn't run on UI thread!");
return;
}
final InputMethodManager imm = (InputMethodManager) mCocos2dxActivity.getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (null != editBox) {
editBox.requestFocus();
mCocos2dxActivity.getGLSurfaceView().requestLayout();
imm.showSoftInput(editBox, 0);
mCocos2dxActivity.getGLSurfaceView().setSoftKeyboardShown(true);
}
}
private static void closeKeyboardOnUiThread(int index) {
if (Looper.myLooper() != Looper.getMainLooper()) {
Log.e(TAG, "closeKeyboardOnUiThread doesn't run on UI thread!");
return;
}
final InputMethodManager imm = (InputMethodManager) mCocos2dxActivity.getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
Cocos2dxEditBox editBox = mEditBoxArray.get(index);
if (null != editBox) {
imm.hideSoftInputFromWindow(editBox.getWindowToken(), 0);
mCocos2dxActivity.getGLSurfaceView().setSoftKeyboardShown(false);
mCocos2dxActivity.getGLSurfaceView().requestFocus();
// can take effect after GLSurfaceView has focus
mCocos2dxActivity.hideVirtualButton();
}
}
// Note that closeKeyboard will be invoked on GL thread
public static void closeKeyboard(final int index) {
mCocos2dxActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
closeKeyboardOnUiThread(index);
}
});
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices.cluster;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest;
import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction;
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
import org.elasticsearch.action.admin.indices.open.TransportOpenIndexAction;
import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.DestructiveOperations;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.action.support.master.TransportMasterNodeActionUtils;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.EmptyClusterInfoService;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.AliasValidator;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService;
import org.elasticsearch.cluster.metadata.MetaDataDeleteIndexService;
import org.elasticsearch.cluster.metadata.MetaDataIndexStateService;
import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService;
import org.elasticsearch.cluster.metadata.MetaDataUpdateSettingsService;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.FailedShard;
import org.elasticsearch.cluster.routing.allocation.RandomAllocationDeciderTests;
import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.NodeServicesProvider;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.test.gateway.TestGatewayAllocator;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.stream.Collectors;
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
import static org.elasticsearch.env.Environment.PATH_HOME_SETTING;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyList;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ClusterStateChanges extends AbstractComponent {
private final AllocationService allocationService;
private final ClusterService clusterService;
private final ShardStateAction.ShardFailedClusterStateTaskExecutor shardFailedClusterStateTaskExecutor;
private final ShardStateAction.ShardStartedClusterStateTaskExecutor shardStartedClusterStateTaskExecutor;
// transport actions
private final TransportCloseIndexAction transportCloseIndexAction;
private final TransportOpenIndexAction transportOpenIndexAction;
private final TransportDeleteIndexAction transportDeleteIndexAction;
private final TransportUpdateSettingsAction transportUpdateSettingsAction;
private final TransportClusterRerouteAction transportClusterRerouteAction;
private final TransportCreateIndexAction transportCreateIndexAction;
public ClusterStateChanges() {
super(Settings.builder().put(PATH_HOME_SETTING.getKey(), "dummy").build());
allocationService = new AllocationService(settings, new AllocationDeciders(settings,
new HashSet<>(Arrays.asList(new SameShardAllocationDecider(settings),
new ReplicaAfterPrimaryActiveAllocationDecider(settings),
new RandomAllocationDeciderTests.RandomAllocationDecider(getRandom())))),
new TestGatewayAllocator(), new BalancedShardsAllocator(settings),
EmptyClusterInfoService.INSTANCE);
shardFailedClusterStateTaskExecutor = new ShardStateAction.ShardFailedClusterStateTaskExecutor(allocationService, null, logger);
shardStartedClusterStateTaskExecutor = new ShardStateAction.ShardStartedClusterStateTaskExecutor(allocationService, logger);
ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
ActionFilters actionFilters = new ActionFilters(Collections.emptySet());
IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(settings);
DestructiveOperations destructiveOperations = new DestructiveOperations(settings, clusterSettings);
Environment environment = new Environment(settings);
ThreadPool threadPool = null; // it's not used
Transport transport = null; // it's not used
// mocks
clusterService = mock(ClusterService.class);
IndicesService indicesService = mock(IndicesService.class);
// MetaDataCreateIndexService creates indices using its IndicesService instance to check mappings -> fake it here
try {
when(indicesService.createIndex(any(NodeServicesProvider.class), any(IndexMetaData.class), anyList()))
.then(invocationOnMock -> {
IndexService indexService = mock(IndexService.class);
IndexMetaData indexMetaData = (IndexMetaData)invocationOnMock.getArguments()[1];
when(indexService.index()).thenReturn(indexMetaData.getIndex());
MapperService mapperService = mock(MapperService.class);
when(indexService.mapperService()).thenReturn(mapperService);
when(mapperService.docMappers(anyBoolean())).thenReturn(Collections.emptyList());
when(indexService.getIndexEventListener()).thenReturn(new IndexEventListener() {});
return indexService;
});
} catch (IOException e) {
throw new IllegalStateException(e);
}
// services
TransportService transportService = new TransportService(settings, transport, threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR);
MetaDataIndexUpgradeService metaDataIndexUpgradeService = new MetaDataIndexUpgradeService(settings, null, null) {
// metaData upgrader should do nothing
@Override
public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData) {
return indexMetaData;
}
};
NodeServicesProvider nodeServicesProvider = new NodeServicesProvider(threadPool, null, null, null, null, null, clusterService);
MetaDataIndexStateService indexStateService = new MetaDataIndexStateService(settings, clusterService, allocationService,
metaDataIndexUpgradeService, nodeServicesProvider, indicesService);
MetaDataDeleteIndexService deleteIndexService = new MetaDataDeleteIndexService(settings, clusterService, allocationService);
MetaDataUpdateSettingsService metaDataUpdateSettingsService = new MetaDataUpdateSettingsService(settings, clusterService,
allocationService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, indicesService, nodeServicesProvider);
MetaDataCreateIndexService createIndexService = new MetaDataCreateIndexService(settings, clusterService, indicesService,
allocationService, new AliasValidator(settings), environment,
nodeServicesProvider, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, threadPool);
transportCloseIndexAction = new TransportCloseIndexAction(settings, transportService, clusterService, threadPool,
indexStateService, clusterSettings, actionFilters, indexNameExpressionResolver, destructiveOperations);
transportOpenIndexAction = new TransportOpenIndexAction(settings, transportService,
clusterService, threadPool, indexStateService, actionFilters, indexNameExpressionResolver, destructiveOperations);
transportDeleteIndexAction = new TransportDeleteIndexAction(settings, transportService,
clusterService, threadPool, deleteIndexService, actionFilters, indexNameExpressionResolver, destructiveOperations);
transportUpdateSettingsAction = new TransportUpdateSettingsAction(settings,
transportService, clusterService, threadPool, metaDataUpdateSettingsService, actionFilters, indexNameExpressionResolver);
transportClusterRerouteAction = new TransportClusterRerouteAction(settings,
transportService, clusterService, threadPool, allocationService, actionFilters, indexNameExpressionResolver);
transportCreateIndexAction = new TransportCreateIndexAction(settings,
transportService, clusterService, threadPool, createIndexService, actionFilters, indexNameExpressionResolver);
}
public ClusterState createIndex(ClusterState state, CreateIndexRequest request) {
return execute(transportCreateIndexAction, request, state);
}
public ClusterState closeIndices(ClusterState state, CloseIndexRequest request) {
return execute(transportCloseIndexAction, request, state);
}
public ClusterState openIndices(ClusterState state, OpenIndexRequest request) {
return execute(transportOpenIndexAction, request, state);
}
public ClusterState deleteIndices(ClusterState state, DeleteIndexRequest request) {
return execute(transportDeleteIndexAction, request, state);
}
public ClusterState updateSettings(ClusterState state, UpdateSettingsRequest request) {
return execute(transportUpdateSettingsAction, request, state);
}
public ClusterState reroute(ClusterState state, ClusterRerouteRequest request) {
return execute(transportClusterRerouteAction, request, state);
}
public ClusterState deassociateDeadNodes(ClusterState clusterState, boolean reroute, String reason) {
return allocationService.deassociateDeadNodes(clusterState, reroute, reason);
}
public ClusterState applyFailedShards(ClusterState clusterState, List<FailedShard> failedShards) {
List<ShardStateAction.ShardEntry> entries = failedShards.stream().map(failedShard ->
new ShardStateAction.ShardEntry(failedShard.getRoutingEntry().shardId(), failedShard.getRoutingEntry().allocationId().getId(),
0L, failedShard.getMessage(), failedShard.getFailure()))
.collect(Collectors.toList());
try {
return shardFailedClusterStateTaskExecutor.execute(clusterState, entries).resultingState;
} catch (Exception e) {
throw ExceptionsHelper.convertToRuntime(e);
}
}
public ClusterState applyStartedShards(ClusterState clusterState, List<ShardRouting> startedShards) {
List<ShardStateAction.ShardEntry> entries = startedShards.stream().map(startedShard ->
new ShardStateAction.ShardEntry(startedShard.shardId(), startedShard.allocationId().getId(), 0L, "shard started", null))
.collect(Collectors.toList());
try {
return shardStartedClusterStateTaskExecutor.execute(clusterState, entries).resultingState;
} catch (Exception e) {
throw ExceptionsHelper.convertToRuntime(e);
}
}
private <Request extends MasterNodeRequest<Request>, Response extends ActionResponse> ClusterState execute(
TransportMasterNodeAction<Request, Response> masterNodeAction, Request request, ClusterState clusterState) {
return executeClusterStateUpdateTask(clusterState, () -> {
try {
TransportMasterNodeActionUtils.runMasterOperation(masterNodeAction, request, clusterState, new PlainActionFuture<>());
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
private ClusterState executeClusterStateUpdateTask(ClusterState state, Runnable runnable) {
ClusterState[] result = new ClusterState[1];
doAnswer(invocationOnMock -> {
ClusterStateUpdateTask task = (ClusterStateUpdateTask)invocationOnMock.getArguments()[1];
result[0] = task.execute(state);
return null;
}).when(clusterService).submitStateUpdateTask(anyString(), any(ClusterStateUpdateTask.class));
runnable.run();
assertThat(result[0], notNullValue());
return result[0];
}
}
| |
package org.basex.query.value.map;
import static org.basex.query.QueryError.*;
import static org.basex.query.QueryText.*;
import org.basex.query.*;
import org.basex.query.util.collation.*;
import org.basex.query.util.list.*;
import org.basex.query.value.*;
import org.basex.query.value.item.*;
import org.basex.query.value.type.*;
import org.basex.util.*;
/**
* A single binding of a {@link XQMap}.
*
* @author BaseX Team 2005-20, BSD License
* @author Leo Woerteler
*/
final class TrieLeaf extends TrieNode {
/** Hash code of the key, stored for performance. */
final int hash;
/** Key of this binding. */
final Item key;
/** Value of this binding. */
final Value value;
/**
* Constructor.
* @param hash hash code of the key
* @param key key
* @param value value
*/
TrieLeaf(final int hash, final Item key, final Value value) {
super(1);
this.hash = hash;
this.key = key;
this.value = value;
assert verify();
}
@Override
TrieNode put(final int hs, final Item ky, final Value vl, final int level, final InputInfo ii)
throws QueryException {
// same hash, replace or merge
if(hs == hash) return key.sameKey(ky, ii) ? new TrieLeaf(hs, ky, vl) :
new TrieList(hash, key, value, ky, vl);
// different hash, branch
final TrieNode[] ch = new TrieNode[KIDS];
final int a = key(hs, level), b = key(hash, level);
final int used;
if(a == b) {
ch[a] = put(hs, ky, vl, level + 1, ii);
used = 1 << a;
} else {
ch[a] = new TrieLeaf(hs, ky, vl);
ch[b] = this;
used = 1 << a | 1 << b;
}
return new TrieBranch(ch, used, 2);
}
@Override
TrieNode delete(final int hs, final Item ky, final int level, final InputInfo ii)
throws QueryException {
return hs == hash && key.sameKey(ky, ii) ? null : this;
}
@Override
Value get(final int hs, final Item ky, final int level, final InputInfo ii)
throws QueryException {
return hs == hash && key.sameKey(ky, ii) ? value : null;
}
@Override
boolean contains(final int hs, final Item ky, final int level, final InputInfo ii)
throws QueryException {
return hs == hash && key.sameKey(ky, ii);
}
@Override
TrieNode addAll(final TrieNode node, final int level, final MergeDuplicates merge,
final QueryContext qc, final InputInfo ii) throws QueryException {
return node.add(this, level, merge, qc, ii);
}
@Override
TrieNode add(final TrieLeaf leaf, final int level, final MergeDuplicates merge,
final QueryContext qc, final InputInfo ii) throws QueryException {
qc.checkStop();
if(hash == leaf.hash) {
if(!key.sameKey(leaf.key, ii))
return new TrieList(hash, key, value, leaf.key, leaf.value);
switch(merge) {
case USE_FIRST:
case UNSPECIFIED:
return leaf;
case USE_LAST:
return this;
case COMBINE:
return new TrieLeaf(hash, key, ValueBuilder.concat(leaf.value, value, qc));
default:
throw MERGE_DUPLICATE_X.get(ii, key);
}
}
final TrieNode[] ch = new TrieNode[KIDS];
final int k = key(hash, level), ok = key(leaf.hash, level), nu;
// same key? add recursively
if(k == ok) {
ch[k] = add(leaf, level + 1, merge, qc, ii);
nu = 1 << k;
} else {
ch[k] = this;
ch[ok] = leaf;
nu = 1 << k | 1 << ok;
}
return new TrieBranch(ch, nu, 2);
}
@Override
TrieNode add(final TrieList list, final int level, final MergeDuplicates merge,
final QueryContext qc, final InputInfo ii) throws QueryException {
// same hash? insert binding
if(hash == list.hash) {
for(int i = 0; i < list.size; i++) {
if(key.sameKey(list.keys[i], ii)) {
final Item[] ks = list.keys.clone();
final Value[] vs = list.values.clone();
ks[i] = key;
switch(merge) {
case USE_FIRST:
case UNSPECIFIED:
break;
case USE_LAST:
vs[i] = value;
break;
case COMBINE:
vs[i] = ValueBuilder.concat(list.values[i], value, qc);
break;
default:
throw MERGE_DUPLICATE_X.get(ii, key);
}
return new TrieList(hash, ks, vs);
}
}
return new TrieList(hash, Array.add(list.keys, key), Array.add(list.values, value));
}
final TrieNode[] ch = new TrieNode[KIDS];
final int k = key(hash, level), ok = key(list.hash, level), nu;
// same key? add recursively
if(k == ok) {
ch[k] = add(list, level + 1, merge, qc, ii);
nu = 1 << k;
} else {
ch[k] = this;
ch[ok] = list;
nu = 1 << k | 1 << ok;
}
return new TrieBranch(ch, nu, list.size + 1);
}
@Override
TrieNode add(final TrieBranch branch, final int level, final MergeDuplicates merge,
final QueryContext qc, final InputInfo ii) throws QueryException {
final int k = key(hash, level);
final TrieNode[] ch = branch.copyKids();
final TrieNode old = ch[k];
ch[k] = old == null ? this : old.addAll(this, level + 1, merge, qc, ii);
return new TrieBranch(ch, branch.used | 1 << k,
branch.size + ch[k].size - (old != null ? old.size : 0));
}
@Override
boolean verify() {
try {
return key.hash(null) == hash;
} catch(final QueryException ex) {
Util.debug(ex);
return false;
}
}
@Override
void keys(final ItemList keys) {
keys.add(key);
}
@Override
void values(final ValueBuilder vs) {
vs.add(value);
}
@Override
void cache(final boolean lazy, final InputInfo ii) throws QueryException {
key.cache(lazy, ii);
value.cache(lazy, ii);
}
@Override
boolean materialized() {
for(final Item item : value) {
if(item.persistent() || item.materialize(null, false) == null) return false;
}
return true;
}
@Override
void forEach(final ValueBuilder vb, final FItem func, final QueryContext qc, final InputInfo ii)
throws QueryException {
vb.add(func.invokeValue(qc, ii, key, value));
}
@Override
boolean instanceOf(final AtomType kt, final SeqType dt) {
return (kt == null || key.type.instanceOf(kt)) && (dt == null || dt.instance(value));
}
@Override
boolean deep(final TrieNode node, final Collation coll, final InputInfo ii)
throws QueryException {
return node instanceof TrieLeaf && key.sameKey(((TrieLeaf) node).key, ii) &&
deep(value, ((TrieLeaf) node).value, coll, ii);
}
@Override
int hash(final InputInfo ii) throws QueryException {
return 31 * hash + value.hash(ii);
}
@Override
StringBuilder append(final StringBuilder sb, final String indent) {
return sb.append(indent).append("`-- ").append(key).append(" => ").append(value).append('\n');
}
@Override
StringBuilder append(final StringBuilder sb) {
if(more(sb)) sb.append(key).append(MAPASG).append(value).append(SEP);
return sb;
}
}
| |
package net.sf.jsqlparser.test.tablesfinder;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import net.sf.jsqlparser.expression.AllComparisonExpression;
import net.sf.jsqlparser.expression.AnyComparisonExpression;
import net.sf.jsqlparser.expression.BinaryExpression;
import net.sf.jsqlparser.expression.CaseExpression;
import net.sf.jsqlparser.expression.DateValue;
import net.sf.jsqlparser.expression.DoubleValue;
import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.ExpressionVisitor;
import net.sf.jsqlparser.expression.Function;
import net.sf.jsqlparser.expression.InverseExpression;
import net.sf.jsqlparser.expression.JdbcParameter;
import net.sf.jsqlparser.expression.LongValue;
import net.sf.jsqlparser.expression.NullValue;
import net.sf.jsqlparser.expression.Parenthesis;
import net.sf.jsqlparser.expression.StringValue;
import net.sf.jsqlparser.expression.TimeValue;
import net.sf.jsqlparser.expression.TimestampValue;
import net.sf.jsqlparser.expression.WhenClause;
import net.sf.jsqlparser.expression.operators.arithmetic.Addition;
import net.sf.jsqlparser.expression.operators.arithmetic.BitwiseAnd;
import net.sf.jsqlparser.expression.operators.arithmetic.BitwiseOr;
import net.sf.jsqlparser.expression.operators.arithmetic.BitwiseXor;
import net.sf.jsqlparser.expression.operators.arithmetic.Concat;
import net.sf.jsqlparser.expression.operators.arithmetic.Division;
import net.sf.jsqlparser.expression.operators.arithmetic.Multiplication;
import net.sf.jsqlparser.expression.operators.arithmetic.Subtraction;
import net.sf.jsqlparser.expression.operators.conditional.AndExpression;
import net.sf.jsqlparser.expression.operators.conditional.OrExpression;
import net.sf.jsqlparser.expression.operators.relational.Between;
import net.sf.jsqlparser.expression.operators.relational.EqualsTo;
import net.sf.jsqlparser.expression.operators.relational.ExpressionList;
import net.sf.jsqlparser.expression.operators.relational.GreaterThan;
import net.sf.jsqlparser.expression.operators.relational.GreaterThanEquals;
import net.sf.jsqlparser.expression.operators.relational.InExpression;
import net.sf.jsqlparser.expression.operators.relational.IsNullExpression;
import net.sf.jsqlparser.expression.operators.relational.ItemsListVisitor;
import net.sf.jsqlparser.expression.operators.relational.LikeExpression;
import net.sf.jsqlparser.expression.operators.relational.ExistsExpression;
import net.sf.jsqlparser.expression.operators.relational.Matches;
import net.sf.jsqlparser.expression.operators.relational.MinorThan;
import net.sf.jsqlparser.expression.operators.relational.MinorThanEquals;
import net.sf.jsqlparser.expression.operators.relational.NotEqualsTo;
import net.sf.jsqlparser.schema.Column;
import net.sf.jsqlparser.schema.Table;
import net.sf.jsqlparser.statement.select.FromItem;
import net.sf.jsqlparser.statement.select.FromItemVisitor;
import net.sf.jsqlparser.statement.select.Join;
import net.sf.jsqlparser.statement.select.PlainSelect;
import net.sf.jsqlparser.statement.select.Select;
import net.sf.jsqlparser.statement.select.SelectVisitor;
import net.sf.jsqlparser.statement.select.SubJoin;
import net.sf.jsqlparser.statement.select.SubSelect;
import net.sf.jsqlparser.statement.select.Union;
public class TablesNamesFinder implements SelectVisitor, FromItemVisitor, ExpressionVisitor, ItemsListVisitor {
private List tables;
public List getTableList(Select select) {
tables = new ArrayList();
select.getSelectBody().accept(this);
return tables;
}
public void visit(PlainSelect plainSelect) {
plainSelect.getFromItem().accept(this);
if (plainSelect.getJoins() != null) {
for (Iterator joinsIt = plainSelect.getJoins().iterator(); joinsIt.hasNext();) {
Join join = (Join) joinsIt.next();
join.getRightItem().accept(this);
}
}
if (plainSelect.getWhere() != null)
plainSelect.getWhere().accept(this);
}
public void visit(Union union) {
for (Iterator iter = union.getPlainSelects().iterator(); iter.hasNext();) {
PlainSelect plainSelect = (PlainSelect) iter.next();
visit(plainSelect);
}
}
public void visit(Table tableName) {
String tableWholeName = tableName.getWholeTableName();
tables.add(tableWholeName);
}
public void visit(SubSelect subSelect) {
subSelect.getSelectBody().accept(this);
}
public void visit(Addition addition) {
visitBinaryExpression(addition);
}
public void visit(AndExpression andExpression) {
visitBinaryExpression(andExpression);
}
public void visit(Between between) {
between.getLeftExpression().accept(this);
between.getBetweenExpressionStart().accept(this);
between.getBetweenExpressionEnd().accept(this);
}
public void visit(Column tableColumn) {
}
public void visit(Division division) {
visitBinaryExpression(division);
}
public void visit(DoubleValue doubleValue) {
}
public void visit(EqualsTo equalsTo) {
visitBinaryExpression(equalsTo);
}
public void visit(Function function) {
}
public void visit(GreaterThan greaterThan) {
visitBinaryExpression(greaterThan);
}
public void visit(GreaterThanEquals greaterThanEquals) {
visitBinaryExpression(greaterThanEquals);
}
public void visit(InExpression inExpression) {
inExpression.getLeftExpression().accept(this);
inExpression.getItemsList().accept(this);
}
public void visit(InverseExpression inverseExpression) {
inverseExpression.getExpression().accept(this);
}
public void visit(IsNullExpression isNullExpression) {
}
public void visit(JdbcParameter jdbcParameter) {
}
public void visit(LikeExpression likeExpression) {
visitBinaryExpression(likeExpression);
}
public void visit(ExistsExpression existsExpression) {
existsExpression.getRightExpression().accept(this);
}
public void visit(LongValue longValue) {
}
public void visit(MinorThan minorThan) {
visitBinaryExpression(minorThan);
}
public void visit(MinorThanEquals minorThanEquals) {
visitBinaryExpression(minorThanEquals);
}
public void visit(Multiplication multiplication) {
visitBinaryExpression(multiplication);
}
public void visit(NotEqualsTo notEqualsTo) {
visitBinaryExpression(notEqualsTo);
}
public void visit(NullValue nullValue) {
}
public void visit(OrExpression orExpression) {
visitBinaryExpression(orExpression);
}
public void visit(Parenthesis parenthesis) {
parenthesis.getExpression().accept(this);
}
public void visit(StringValue stringValue) {
}
public void visit(Subtraction subtraction) {
visitBinaryExpression(subtraction);
}
public void visitBinaryExpression(BinaryExpression binaryExpression) {
binaryExpression.getLeftExpression().accept(this);
binaryExpression.getRightExpression().accept(this);
}
public void visit(ExpressionList expressionList) {
for (Iterator iter = expressionList.getExpressions().iterator(); iter.hasNext();) {
Expression expression = (Expression) iter.next();
expression.accept(this);
}
}
public void visit(DateValue dateValue) {
}
public void visit(TimestampValue timestampValue) {
}
public void visit(TimeValue timeValue) {
}
/* (non-Javadoc)
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.CaseExpression)
*/
public void visit(CaseExpression caseExpression) {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.WhenClause)
*/
public void visit(WhenClause whenClause) {
// TODO Auto-generated method stub
}
public void visit(AllComparisonExpression allComparisonExpression) {
allComparisonExpression.GetSubSelect().getSelectBody().accept(this);
}
public void visit(AnyComparisonExpression anyComparisonExpression) {
anyComparisonExpression.GetSubSelect().getSelectBody().accept(this);
}
public void visit(SubJoin subjoin) {
subjoin.getLeft().accept(this);
subjoin.getJoin().getRightItem().accept(this);
}
public void visit(Concat concat) {
visitBinaryExpression(concat);
}
public void visit(Matches matches) {
visitBinaryExpression(matches);
}
public void visit(BitwiseAnd bitwiseAnd) {
visitBinaryExpression(bitwiseAnd);
}
public void visit(BitwiseOr bitwiseOr) {
visitBinaryExpression(bitwiseOr);
}
public void visit(BitwiseXor bitwiseXor) {
visitBinaryExpression(bitwiseXor);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.jndi;
import javax.naming.Binding;
import javax.naming.CompositeName;
import javax.naming.Context;
import javax.naming.LinkRef;
import javax.naming.Name;
import javax.naming.NameClassPair;
import javax.naming.NameNotFoundException;
import javax.naming.NameParser;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.NotContextException;
import javax.naming.OperationNotSupportedException;
import javax.naming.Reference;
import javax.naming.spi.NamingManager;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Map;
import org.apache.activemq.artemis.core.client.ActiveMQClientLogger;
/**
* A read-only Context
* <p>
* This version assumes it and all its subcontext are
* read-only and any attempt to modify (e.g. through bind) will result in an
* OperationNotSupportedException. Each Context in the tree builds a cache of
* the entries in all sub-contexts to optimise the performance of lookup.
* <p>
* This implementation is intended to optimise the performance of lookup(String)
* to about the level of a HashMap get. It has been observed that the scheme
* resolution phase performed by the JVM takes considerably longer, so for
* optimum performance lookups should be coded like:
* </p>
* <code>
* Context componentContext = (Context)new InitialContext().lookup("java:comp");
* String envEntry = (String) componentContext.lookup("env/myEntry");
* String envEntry2 = (String) componentContext.lookup("env/myEntry2");
* </code>
*/
@SuppressWarnings("unchecked")
public class ReadOnlyContext implements Context, Serializable {
public static final String SEPARATOR = "/";
protected static final NameParser NAME_PARSER = new NameParserImpl();
private static final long serialVersionUID = -5754338187296859149L;
protected final Hashtable<String, Object> environment; // environment for this context
protected final Map<String, Object> bindings; // bindings at my level
protected final Map<String, Object> treeBindings; // all bindings under me
private boolean frozen;
private String nameInNamespace = "";
public ReadOnlyContext() {
environment = new Hashtable<>();
bindings = new HashMap<>();
treeBindings = new HashMap<>();
}
public ReadOnlyContext(Hashtable env) {
if (env == null) {
this.environment = new Hashtable<>();
} else {
this.environment = new Hashtable<>(env);
}
this.bindings = Collections.EMPTY_MAP;
this.treeBindings = Collections.EMPTY_MAP;
}
public ReadOnlyContext(Hashtable environment, Map<String, Object> bindings) {
if (environment == null) {
this.environment = new Hashtable<>();
} else {
this.environment = new Hashtable<>(environment);
}
this.bindings = new HashMap<>();
treeBindings = new HashMap<>();
if (bindings != null) {
for (Map.Entry<String, Object> binding : bindings.entrySet()) {
try {
internalBind(binding.getKey(), binding.getValue());
} catch (Throwable e) {
ActiveMQClientLogger.LOGGER.error("Failed to bind " + binding.getKey() + "=" + binding.getValue(), e);
}
}
}
frozen = true;
}
public ReadOnlyContext(Hashtable environment, Map<String, Object> bindings, String nameInNamespace) {
this(environment, bindings);
this.nameInNamespace = nameInNamespace;
}
protected ReadOnlyContext(ReadOnlyContext clone, Hashtable env) {
this.bindings = clone.bindings;
this.treeBindings = clone.treeBindings;
this.environment = new Hashtable<>(env);
}
protected ReadOnlyContext(ReadOnlyContext clone, Hashtable<String, Object> env, String nameInNamespace) {
this(clone, env);
this.nameInNamespace = nameInNamespace;
}
public void freeze() {
frozen = true;
}
boolean isFrozen() {
return frozen;
}
/**
* internalBind is intended for use only during setup or possibly by
* suitably synchronized superclasses. It binds every possible lookup into a
* map in each context. To do this, each context strips off one name segment
* and if necessary creates a new context for it. Then it asks that context
* to bind the remaining name. It returns a map containing all the bindings
* from the next context, plus the context it just created (if it in fact
* created it). (the names are suitably extended by the segment originally
* lopped off).
*
* @param name
* @param value
* @return
* @throws javax.naming.NamingException
*/
protected Map<String, Object> internalBind(String name, Object value) throws NamingException {
assert name != null && name.length() > 0;
assert !frozen;
Map<String, Object> newBindings = new HashMap<>();
int pos = name.indexOf('/');
if (pos == -1) {
if (treeBindings.put(name, value) != null) {
throw new NamingException("Something already bound at " + name);
}
bindings.put(name, value);
newBindings.put(name, value);
} else {
String segment = name.substring(0, pos);
assert segment != null;
assert !segment.equals("");
Object o = treeBindings.get(segment);
if (o == null) {
o = newContext();
treeBindings.put(segment, o);
bindings.put(segment, o);
newBindings.put(segment, o);
} else if (!(o instanceof ReadOnlyContext)) {
throw new NamingException("Something already bound where a subcontext should go");
}
ReadOnlyContext readOnlyContext = (ReadOnlyContext) o;
String remainder = name.substring(pos + 1);
Map<String, Object> subBindings = readOnlyContext.internalBind(remainder, value);
for (Map.Entry<String, Object> entry : subBindings.entrySet()) {
String subName = segment + "/" + entry.getKey();
Object bound = entry.getValue();
treeBindings.put(subName, bound);
newBindings.put(subName, bound);
}
}
return newBindings;
}
protected ReadOnlyContext newContext() {
return new ReadOnlyContext();
}
@Override
public Object addToEnvironment(String propName, Object propVal) throws NamingException {
return environment.put(propName, propVal);
}
@Override
public Hashtable<String, Object> getEnvironment() throws NamingException {
return (Hashtable<String, Object>) environment.clone();
}
@Override
public Object removeFromEnvironment(String propName) throws NamingException {
return environment.remove(propName);
}
@Override
public Object lookup(String name) throws NamingException {
if (name.length() == 0) {
return this;
}
Object result = treeBindings.get(name);
if (result == null) {
result = bindings.get(name);
}
if (result == null) {
int pos = name.indexOf(':');
if (pos > 0) {
String scheme = name.substring(0, pos);
Context ctx = NamingManager.getURLContext(scheme, environment);
if (ctx == null) {
throw new NamingException("scheme " + scheme + " not recognized");
}
return ctx.lookup(name);
} else {
// Split out the first name of the path
// and look for it in the bindings map.
CompositeName path = new CompositeName(name);
if (path.size() == 0) {
return this;
} else {
String first = path.get(0);
Object obj = bindings.get(first);
if (obj == null) {
throw new NameNotFoundException(name);
} else if (obj instanceof Context && path.size() > 1) {
Context subContext = (Context) obj;
obj = subContext.lookup(path.getSuffix(1));
}
return obj;
}
}
}
if (result instanceof LinkRef) {
LinkRef ref = (LinkRef) result;
result = lookup(ref.getLinkName());
}
if (result instanceof Reference) {
try {
result = NamingManager.getObjectInstance(result, null, null, this.environment);
} catch (NamingException e) {
throw e;
} catch (Exception e) {
throw (NamingException) new NamingException("could not look up : " + name).initCause(e);
}
}
if (result instanceof ReadOnlyContext) {
String prefix = getNameInNamespace();
if (prefix.length() > 0) {
prefix = prefix + SEPARATOR;
}
result = new ReadOnlyContext((ReadOnlyContext) result, environment, prefix + name);
}
return result;
}
@Override
public Object lookup(Name name) throws NamingException {
return lookup(name.toString());
}
@Override
public Object lookupLink(String name) throws NamingException {
return lookup(name);
}
@Override
public Name composeName(Name name, Name prefix) throws NamingException {
Name result = (Name) prefix.clone();
result.addAll(name);
return result;
}
@Override
public String composeName(String name, String prefix) throws NamingException {
CompositeName result = new CompositeName(prefix);
result.addAll(new CompositeName(name));
return result.toString();
}
@Override
public NamingEnumeration<NameClassPair> list(String name) throws NamingException {
Object o = lookup(name);
if (o == this) {
return new ListEnumeration();
} else if (o instanceof Context) {
return ((Context) o).list("");
} else {
throw new NotContextException();
}
}
@Override
public NamingEnumeration<Binding> listBindings(String name) throws NamingException {
Object o = lookup(name);
if (o == this) {
return new ListBindingEnumeration();
} else if (o instanceof Context) {
return ((Context) o).listBindings("");
} else {
throw new NotContextException();
}
}
@Override
public Object lookupLink(Name name) throws NamingException {
return lookupLink(name.toString());
}
@Override
public NamingEnumeration<NameClassPair> list(Name name) throws NamingException {
return list(name.toString());
}
@Override
public NamingEnumeration<Binding> listBindings(Name name) throws NamingException {
return listBindings(name.toString());
}
@Override
public void bind(Name name, Object obj) throws NamingException {
throw new OperationNotSupportedException();
}
@Override
public void bind(String name, Object obj) throws NamingException {
throw new OperationNotSupportedException();
}
@Override
public void close() throws NamingException {
// ignore
}
@Override
public Context createSubcontext(Name name) throws NamingException {
throw new OperationNotSupportedException();
}
@Override
public Context createSubcontext(String name) throws NamingException {
throw new OperationNotSupportedException();
}
@Override
public void destroySubcontext(Name name) throws NamingException {
throw new OperationNotSupportedException();
}
@Override
public void destroySubcontext(String name) throws NamingException {
throw new OperationNotSupportedException();
}
@Override
public String getNameInNamespace() throws NamingException {
return nameInNamespace;
}
@Override
public NameParser getNameParser(Name name) throws NamingException {
return NAME_PARSER;
}
@Override
public NameParser getNameParser(String name) throws NamingException {
return NAME_PARSER;
}
@Override
public void rebind(Name name, Object obj) throws NamingException {
throw new OperationNotSupportedException();
}
@Override
public void rebind(String name, Object obj) throws NamingException {
throw new OperationNotSupportedException();
}
@Override
public void rename(Name oldName, Name newName) throws NamingException {
throw new OperationNotSupportedException();
}
@Override
public void rename(String oldName, String newName) throws NamingException {
throw new OperationNotSupportedException();
}
@Override
public void unbind(Name name) throws NamingException {
throw new OperationNotSupportedException();
}
@Override
public void unbind(String name) throws NamingException {
throw new OperationNotSupportedException();
}
private abstract class LocalNamingEnumeration implements NamingEnumeration {
private final Iterator<Map.Entry<String, Object>> i = bindings.entrySet().iterator();
@Override
public boolean hasMore() throws NamingException {
return i.hasNext();
}
@Override
public boolean hasMoreElements() {
return i.hasNext();
}
protected Map.Entry<String, Object> getNext() {
return i.next();
}
@Override
public void close() throws NamingException {
}
}
private class ListEnumeration extends LocalNamingEnumeration {
ListEnumeration() {
}
@Override
public Object next() throws NamingException {
return nextElement();
}
@Override
public Object nextElement() {
Map.Entry<String, Object> entry = getNext();
return new NameClassPair(entry.getKey(), entry.getValue().getClass().getName());
}
}
private class ListBindingEnumeration extends LocalNamingEnumeration {
ListBindingEnumeration() {
}
@Override
public Object next() throws NamingException {
return nextElement();
}
@Override
public Object nextElement() {
Map.Entry<String, Object> entry = getNext();
return new Binding(entry.getKey(), entry.getValue());
}
}
}
| |
/*
* Copyright 2021 Jim Voris.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.qvcsos.server.dataaccess.impl;
import com.qvcsos.server.DatabaseManager;
import com.qvcsos.server.dataaccess.FileRevisionDAO;
import com.qvcsos.server.datamodel.Branch;
import com.qvcsos.server.datamodel.FileRevision;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Jim Voris
*/
public class FileRevisionDAOImpl implements FileRevisionDAO {
/**
* Create our logger object.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(FileRevisionDAOImpl.class);
private static final int ID_RESULT_SET_INDEX = 1;
private static final int BRANCH_ID_RESULT_SET_INDEX = 2;
private static final int FILE_ID_RESULT_SET_INDEX = 3;
private static final int ANCESTOR_REVISION_ID_RESULT_SET_INDEX = 4;
private static final int REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX = 5;
private static final int COMMIT_ID_RESULT_SET_INDEX = 6;
private static final int PROMOTED_FLAG_RESULT_SET_INDEX = 7;
private static final int WORKFILE_EDIT_DATE_RESULT_SET_INDEX = 8;
private static final int REVISION_DIGEST_RESULT_SET_INDEX = 9;
private static final int REVISION_SIZE_RESULT_SET_INDEX = 10;
private static final int REVISION_DATA_RESULT_SET_INDEX = 11;
private final String schemaName;
private final String findById;
private final String findFileRevisions;
private final String findAllFileRevisions;
private final String findNewestRevisionOnBranch;
private final String findNewestRevisionAllBranches;
private final String findNewestBranchRevision;
private final String findPromotionCandidates;
private final String findNewestPromotedRevision;
private final String findByBranchIdAndAncestorRevisionAndFileId;
private final String findCommonAncestorRevision;
private final String findFileIdListForCommitId;
private final String insertFileRevision;
private final String updateAncestorRevision;
private final String markPromoted;
public FileRevisionDAOImpl(String schema) {
this.schemaName = schema;
String selectAllSegment = "SELECT ID, BRANCH_ID, FILE_ID, ANCESTOR_REVISION_ID, REVERSE_DELTA_REVISION_ID, COMMIT_ID, PROMOTED_FLAG, WORKFILE_EDIT_DATE, REVISION_DIGEST, "
+ "LENGTH(REVISION_DATA) AS REVISION_SIZE, "
+ "REVISION_DATA FROM ";
String selectHeaderSegment = "SELECT ID, BRANCH_ID, FILE_ID, ANCESTOR_REVISION_ID, REVERSE_DELTA_REVISION_ID, COMMIT_ID, PROMOTED_FLAG, WORKFILE_EDIT_DATE, REVISION_DIGEST, "
+ "LENGTH(REVISION_DATA) AS REVISION_SIZE FROM ";
this.findById = selectAllSegment + this.schemaName + ".FILE_REVISION WHERE ID = ?";
this.findFileRevisions = selectHeaderSegment + this.schemaName + ".FILE_REVISION WHERE FILE_ID = ? AND BRANCH_ID IN (%s) ORDER BY ID DESC";
this.findAllFileRevisions = selectHeaderSegment + this.schemaName + ".FILE_REVISION WHERE FILE_ID = ? ORDER BY ID DESC";
this.findNewestRevisionOnBranch = selectAllSegment + this.schemaName + ".FILE_REVISION WHERE FILE_ID = ? AND BRANCH_ID = ? ORDER BY ID DESC LIMIT 1";
this.findNewestRevisionAllBranches = selectAllSegment + this.schemaName + ".FILE_REVISION WHERE FILE_ID = ? ORDER BY ID DESC LIMIT 1";
this.findNewestBranchRevision = selectHeaderSegment + this.schemaName + ".FILE_REVISION WHERE BRANCH_ID = ? ORDER BY ID DESC LIMIT 1";
this.findPromotionCandidates = selectHeaderSegment + this.schemaName + ".FILE_REVISION WHERE BRANCH_ID = ? AND PROMOTED_FLAG = FALSE ORDER BY FILE_ID, ID DESC";
this.findNewestPromotedRevision = selectHeaderSegment + this.schemaName + ".FILE_REVISION WHERE BRANCH_ID = ? AND FILE_ID = ? AND PROMOTED_FLAG = TRUE ORDER BY ID DESC LIMIT 1";
this.findByBranchIdAndAncestorRevisionAndFileId = selectHeaderSegment + this.schemaName + ".FILE_REVISION WHERE BRANCH_ID = ? AND ANCESTOR_REVISION_ID = ? AND "
+ "FILE_ID = ? ORDER BY ID DESC LIMIT 1";
this.findCommonAncestorRevision = selectHeaderSegment + this.schemaName + ".FILE_REVISION WHERE BRANCH_ID = ? AND ID <= ? AND "
+ "ID <= ? AND "
+ "FILE_ID = ? ORDER BY ID DESC LIMIT 1";
this.findFileIdListForCommitId = "SELECT FILE_ID FROM " + this.schemaName + ".FILE_REVISION WHERE COMMIT_ID = ?";
this.insertFileRevision = "INSERT INTO " + this.schemaName
+ ".FILE_REVISION (BRANCH_ID, FILE_ID, ANCESTOR_REVISION_ID, REVERSE_DELTA_REVISION_ID, COMMIT_ID, PROMOTED_FLAG, WORKFILE_EDIT_DATE, REVISION_DIGEST, REVISION_DATA) "
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) RETURNING ID";
this.updateAncestorRevision = "UPDATE " + this.schemaName + ".FILE_REVISION SET REVERSE_DELTA_REVISION_ID = ?, REVISION_DATA = ? WHERE ID = ? RETURNING ID";
this.markPromoted = "UPDATE " + this.schemaName + ".FILE_REVISION SET PROMOTED_FLAG = TRUE WHERE BRANCH_ID = ? AND FILE_ID = ?";
}
@Override
public FileRevision findById(Integer id) {
FileRevision revision = null;
ResultSet resultSet = null;
PreparedStatement preparedStatement = null;
try {
Connection connection = DatabaseManager.getInstance().getConnection();
preparedStatement = connection.prepareStatement(this.findById, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
preparedStatement.setInt(1, id);
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
Integer fetchedId = resultSet.getInt(ID_RESULT_SET_INDEX);
Integer fetchedBranchId = resultSet.getInt(BRANCH_ID_RESULT_SET_INDEX);
Integer fetchedFileId = resultSet.getInt(FILE_ID_RESULT_SET_INDEX);
Object fetchedAncestorRevisionObject = resultSet.getObject(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedAncestorRevisionId = null;
if (fetchedAncestorRevisionObject != null) {
fetchedAncestorRevisionId = resultSet.getInt(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
}
Object fetchedReverseDeltaRevisionObject = resultSet.getObject(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedReverseDeltaRevisionId = null;
if (fetchedReverseDeltaRevisionObject != null) {
fetchedReverseDeltaRevisionId = resultSet.getInt(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
}
Integer fetchedCommitId = resultSet.getInt(COMMIT_ID_RESULT_SET_INDEX);
Boolean fetchedPromotedFlag = resultSet.getBoolean(PROMOTED_FLAG_RESULT_SET_INDEX);
Timestamp fetchedWorkfileEditDate = resultSet.getTimestamp(WORKFILE_EDIT_DATE_RESULT_SET_INDEX);
byte[] fetchedDigest = resultSet.getBytes(REVISION_DIGEST_RESULT_SET_INDEX);
Integer fetchedSize = resultSet.getInt(REVISION_SIZE_RESULT_SET_INDEX);
byte[] fetchedData = resultSet.getBytes(REVISION_DATA_RESULT_SET_INDEX);
revision = new FileRevision();
revision.setId(fetchedId);
revision.setBranchId(fetchedBranchId);
revision.setFileId(fetchedFileId);
revision.setAncestorRevisionId(fetchedAncestorRevisionId);
revision.setReverseDeltaRevisionId(fetchedReverseDeltaRevisionId);
revision.setCommitId(fetchedCommitId);
revision.setPromotedFlag(fetchedPromotedFlag);
revision.setWorkfileEditDate(fetchedWorkfileEditDate);
revision.setRevisionDigest(fetchedDigest);
revision.setRevisionDataSize(fetchedSize);
revision.setRevisionData(fetchedData);
}
} catch (SQLException e) {
LOGGER.error("FileRevisionDAOImpl: SQL exception in findById", e);
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in findById", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, resultSet, preparedStatement);
}
return revision;
}
@Override
public List<FileRevision> findFileRevisions(String branchesToSearch, Integer fileId) {
List<FileRevision> fileRevisionList = new ArrayList<>();
ResultSet resultSet = null;
PreparedStatement preparedStatement = null;
try {
String queryString = String.format(findFileRevisions, branchesToSearch);
Connection connection = DatabaseManager.getInstance().getConnection();
preparedStatement = connection.prepareStatement(queryString, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
preparedStatement.setInt(1, fileId);
resultSet = preparedStatement.executeQuery();
while (resultSet.next()) {
Integer fetchedId = resultSet.getInt(ID_RESULT_SET_INDEX);
Integer fetchedBranchId = resultSet.getInt(BRANCH_ID_RESULT_SET_INDEX);
Integer fetchedFileId = resultSet.getInt(FILE_ID_RESULT_SET_INDEX);
Object fetchedAncestorRevisionObject = resultSet.getObject(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedAncestorRevisionId = null;
if (fetchedAncestorRevisionObject != null) {
fetchedAncestorRevisionId = resultSet.getInt(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
}
Object fetchedReverseDeltaRevisionObject = resultSet.getObject(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedReverseDeltaRevisionId = null;
if (fetchedReverseDeltaRevisionObject != null) {
fetchedReverseDeltaRevisionId = resultSet.getInt(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
}
Integer fetchedCommitId = resultSet.getInt(COMMIT_ID_RESULT_SET_INDEX);
Boolean fetchedPromotedFlag = resultSet.getBoolean(PROMOTED_FLAG_RESULT_SET_INDEX);
Timestamp fetchedWorkfileEditDate = resultSet.getTimestamp(WORKFILE_EDIT_DATE_RESULT_SET_INDEX);
byte[] fetchedDigest = resultSet.getBytes(REVISION_DIGEST_RESULT_SET_INDEX);
Integer fetchedSize = resultSet.getInt(REVISION_SIZE_RESULT_SET_INDEX);
FileRevision fileRevision = new FileRevision();
fileRevision.setId(fetchedId);
fileRevision.setBranchId(fetchedBranchId);
fileRevision.setFileId(fetchedFileId);
fileRevision.setAncestorRevisionId(fetchedAncestorRevisionId);
fileRevision.setReverseDeltaRevisionId(fetchedReverseDeltaRevisionId);
fileRevision.setCommitId(fetchedCommitId);
fileRevision.setPromotedFlag(fetchedPromotedFlag);
fileRevision.setWorkfileEditDate(fetchedWorkfileEditDate);
fileRevision.setRevisionDigest(fetchedDigest);
fileRevision.setRevisionDataSize(fetchedSize);
fileRevisionList.add(fileRevision);
}
} catch (SQLException e) {
LOGGER.error("FileRevisionDAOImpl: SQL exception in findFileRevisions", e);
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in findFileRevisions", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, resultSet, preparedStatement);
}
return fileRevisionList;
}
@Override
public List<FileRevision> findAllFileRevisions(Integer fileId) {
List<FileRevision> fileRevisionList = new ArrayList<>();
ResultSet resultSet = null;
PreparedStatement preparedStatement = null;
try {
Connection connection = DatabaseManager.getInstance().getConnection();
preparedStatement = connection.prepareStatement(findAllFileRevisions, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
preparedStatement.setInt(1, fileId);
resultSet = preparedStatement.executeQuery();
while (resultSet.next()) {
Integer fetchedId = resultSet.getInt(ID_RESULT_SET_INDEX);
Integer fetchedBranchId = resultSet.getInt(BRANCH_ID_RESULT_SET_INDEX);
Integer fetchedFileId = resultSet.getInt(FILE_ID_RESULT_SET_INDEX);
Object fetchedAncestorRevisionObject = resultSet.getObject(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedAncestorRevisionId = null;
if (fetchedAncestorRevisionObject != null) {
fetchedAncestorRevisionId = resultSet.getInt(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
}
Object fetchedReverseDeltaRevisionObject = resultSet.getObject(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedReverseDeltaRevisionId = null;
if (fetchedReverseDeltaRevisionObject != null) {
fetchedReverseDeltaRevisionId = resultSet.getInt(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
}
Integer fetchedCommitId = resultSet.getInt(COMMIT_ID_RESULT_SET_INDEX);
Boolean fetchedPromotedFlag = resultSet.getBoolean(PROMOTED_FLAG_RESULT_SET_INDEX);
Timestamp fetchedWorkfileEditDate = resultSet.getTimestamp(WORKFILE_EDIT_DATE_RESULT_SET_INDEX);
byte[] fetchedDigest = resultSet.getBytes(REVISION_DIGEST_RESULT_SET_INDEX);
Integer fetchedSize = resultSet.getInt(REVISION_SIZE_RESULT_SET_INDEX);
FileRevision fileRevision = new FileRevision();
fileRevision.setId(fetchedId);
fileRevision.setBranchId(fetchedBranchId);
fileRevision.setFileId(fetchedFileId);
fileRevision.setAncestorRevisionId(fetchedAncestorRevisionId);
fileRevision.setReverseDeltaRevisionId(fetchedReverseDeltaRevisionId);
fileRevision.setCommitId(fetchedCommitId);
fileRevision.setPromotedFlag(fetchedPromotedFlag);
fileRevision.setWorkfileEditDate(fetchedWorkfileEditDate);
fileRevision.setRevisionDigest(fetchedDigest);
fileRevision.setRevisionDataSize(fetchedSize);
fileRevisionList.add(fileRevision);
}
} catch (SQLException e) {
LOGGER.error("FileRevisionDAOImpl: SQL exception in findAllFileRevisions", e);
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in findAllFileRevisions", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, resultSet, preparedStatement);
}
return fileRevisionList;
}
@Override
public FileRevision findNewestRevisionOnBranch(Integer branchId, Integer fileId) {
FileRevision newestRevision = null;
ResultSet resultSet = null;
PreparedStatement preparedStatement = null;
try {
Connection connection = DatabaseManager.getInstance().getConnection();
preparedStatement = connection.prepareStatement(this.findNewestRevisionOnBranch, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
preparedStatement.setInt(1, fileId);
preparedStatement.setInt(2, branchId);
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
Integer fetchedId = resultSet.getInt(ID_RESULT_SET_INDEX);
Integer fetchedBranchId = resultSet.getInt(BRANCH_ID_RESULT_SET_INDEX);
Integer fetchedFileId = resultSet.getInt(FILE_ID_RESULT_SET_INDEX);
Object fetchedAncestorRevisionObject = resultSet.getObject(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedAncestorRevisionId = null;
if (fetchedAncestorRevisionObject != null) {
fetchedAncestorRevisionId = resultSet.getInt(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
}
Object fetchedReverseDeltaRevisionObject = resultSet.getObject(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedReverseDeltaRevisionId = null;
if (fetchedReverseDeltaRevisionObject != null) {
fetchedReverseDeltaRevisionId = resultSet.getInt(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
}
Integer fetchedCommitId = resultSet.getInt(COMMIT_ID_RESULT_SET_INDEX);
Boolean fetchedPromotedFlag = resultSet.getBoolean(PROMOTED_FLAG_RESULT_SET_INDEX);
Timestamp fetchedWorkfileEditDate = resultSet.getTimestamp(WORKFILE_EDIT_DATE_RESULT_SET_INDEX);
byte[] fetchedDigest = resultSet.getBytes(REVISION_DIGEST_RESULT_SET_INDEX);
Integer fetchedSize = resultSet.getInt(REVISION_SIZE_RESULT_SET_INDEX);
byte[] fetchedData = resultSet.getBytes(REVISION_DATA_RESULT_SET_INDEX);
newestRevision = new FileRevision();
newestRevision.setId(fetchedId);
newestRevision.setBranchId(fetchedBranchId);
newestRevision.setFileId(fetchedFileId);
newestRevision.setAncestorRevisionId(fetchedAncestorRevisionId);
newestRevision.setReverseDeltaRevisionId(fetchedReverseDeltaRevisionId);
newestRevision.setCommitId(fetchedCommitId);
newestRevision.setPromotedFlag(fetchedPromotedFlag);
newestRevision.setWorkfileEditDate(fetchedWorkfileEditDate);
newestRevision.setRevisionDigest(fetchedDigest);
newestRevision.setRevisionDataSize(fetchedSize);
newestRevision.setRevisionData(fetchedData);
}
} catch (SQLException e) {
LOGGER.error("FileRevisionDAOImpl: SQL exception in findNewestRevisionOnBranch", e);
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in findNewestRevisionOnBranch", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, resultSet, preparedStatement);
}
return newestRevision;
}
@Override
public FileRevision findNewestRevisionAllBranches(Integer fileId) {
FileRevision newestRevision = null;
ResultSet resultSet = null;
PreparedStatement preparedStatement = null;
try {
Connection connection = DatabaseManager.getInstance().getConnection();
preparedStatement = connection.prepareStatement(this.findNewestRevisionAllBranches, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
preparedStatement.setInt(1, fileId);
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
Integer fetchedId = resultSet.getInt(ID_RESULT_SET_INDEX);
Integer fetchedBranchId = resultSet.getInt(BRANCH_ID_RESULT_SET_INDEX);
Integer fetchedFileId = resultSet.getInt(FILE_ID_RESULT_SET_INDEX);
Object fetchedAncestorRevisionObject = resultSet.getObject(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedAncestorRevisionId = null;
if (fetchedAncestorRevisionObject != null) {
fetchedAncestorRevisionId = resultSet.getInt(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
}
Object fetchedReverseDeltaRevisionObject = resultSet.getObject(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedReverseDeltaRevisionId = null;
if (fetchedReverseDeltaRevisionObject != null) {
fetchedReverseDeltaRevisionId = resultSet.getInt(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
}
Integer fetchedCommitId = resultSet.getInt(COMMIT_ID_RESULT_SET_INDEX);
Boolean fetchedPromotedFlag = resultSet.getBoolean(PROMOTED_FLAG_RESULT_SET_INDEX);
Timestamp fetchedWorkfileEditDate = resultSet.getTimestamp(WORKFILE_EDIT_DATE_RESULT_SET_INDEX);
byte[] fetchedDigest = resultSet.getBytes(REVISION_DIGEST_RESULT_SET_INDEX);
Integer fetchedSize = resultSet.getInt(REVISION_SIZE_RESULT_SET_INDEX);
byte[] fetchedData = resultSet.getBytes(REVISION_DATA_RESULT_SET_INDEX);
newestRevision = new FileRevision();
newestRevision.setId(fetchedId);
newestRevision.setBranchId(fetchedBranchId);
newestRevision.setFileId(fetchedFileId);
newestRevision.setAncestorRevisionId(fetchedAncestorRevisionId);
newestRevision.setReverseDeltaRevisionId(fetchedReverseDeltaRevisionId);
newestRevision.setCommitId(fetchedCommitId);
newestRevision.setPromotedFlag(fetchedPromotedFlag);
newestRevision.setWorkfileEditDate(fetchedWorkfileEditDate);
newestRevision.setRevisionDigest(fetchedDigest);
newestRevision.setRevisionDataSize(fetchedSize);
newestRevision.setRevisionData(fetchedData);
}
} catch (SQLException e) {
LOGGER.error("FileRevisionDAOImpl: SQL exception in findNewestRevisionAllBranches", e);
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in findNewestRevisionAllBranches", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, resultSet, preparedStatement);
}
return newestRevision;
}
@Override
public FileRevision findNewestBranchRevision(int branchId) {
FileRevision revision = null;
ResultSet resultSet = null;
PreparedStatement preparedStatement = null;
try {
Connection connection = DatabaseManager.getInstance().getConnection();
preparedStatement = connection.prepareStatement(this.findNewestBranchRevision, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
preparedStatement.setInt(1, branchId);
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
Integer fetchedId = resultSet.getInt(ID_RESULT_SET_INDEX);
Integer fetchedBranchId = resultSet.getInt(BRANCH_ID_RESULT_SET_INDEX);
Integer fetchedFileId = resultSet.getInt(FILE_ID_RESULT_SET_INDEX);
Object fetchedAncestorRevisionObject = resultSet.getObject(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedAncestorRevisionId = null;
if (fetchedAncestorRevisionObject != null) {
fetchedAncestorRevisionId = resultSet.getInt(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
}
Object fetchedReverseDeltaRevisionObject = resultSet.getObject(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedReverseDeltaRevisionId = null;
if (fetchedReverseDeltaRevisionObject != null) {
fetchedReverseDeltaRevisionId = resultSet.getInt(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
}
Integer fetchedCommitId = resultSet.getInt(COMMIT_ID_RESULT_SET_INDEX);
Boolean fetchedPromotedFlag = resultSet.getBoolean(PROMOTED_FLAG_RESULT_SET_INDEX);
Timestamp fetchedWorkfileEditDate = resultSet.getTimestamp(WORKFILE_EDIT_DATE_RESULT_SET_INDEX);
byte[] fetchedDigest = resultSet.getBytes(REVISION_DIGEST_RESULT_SET_INDEX);
Integer fetchedSize = resultSet.getInt(REVISION_SIZE_RESULT_SET_INDEX);
revision = new FileRevision();
revision.setId(fetchedId);
revision.setBranchId(fetchedBranchId);
revision.setFileId(fetchedFileId);
revision.setAncestorRevisionId(fetchedAncestorRevisionId);
revision.setReverseDeltaRevisionId(fetchedReverseDeltaRevisionId);
revision.setCommitId(fetchedCommitId);
revision.setPromotedFlag(fetchedPromotedFlag);
revision.setWorkfileEditDate(fetchedWorkfileEditDate);
revision.setRevisionDigest(fetchedDigest);
revision.setRevisionDataSize(fetchedSize);
}
} catch (SQLException e) {
LOGGER.error("FileRevisionDAOImpl: SQL exception in findNewestBranchRevision", e);
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in findNewestBranchRevision", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, resultSet, preparedStatement);
}
return revision;
}
@Override
public List<FileRevision> findPromotionCandidates(Branch promoteFromBranch, Branch promoteToBranch) {
List<FileRevision> fileRevisionList = new ArrayList<>();
ResultSet resultSet = null;
PreparedStatement preparedStatement = null;
try {
Connection connection = DatabaseManager.getInstance().getConnection();
preparedStatement = connection.prepareStatement(this.findPromotionCandidates, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
preparedStatement.setInt(1, promoteFromBranch.getId());
Map<Integer, FileRevision> fileRevisionMap = new TreeMap<>();
resultSet = preparedStatement.executeQuery();
while (resultSet.next()) {
Integer fetchedId = resultSet.getInt(ID_RESULT_SET_INDEX);
Integer fetchedBranchId = resultSet.getInt(BRANCH_ID_RESULT_SET_INDEX);
Integer fetchedFileId = resultSet.getInt(FILE_ID_RESULT_SET_INDEX);
Object fetchedAncestorRevisionObject = resultSet.getObject(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedAncestorRevisionId = null;
if (fetchedAncestorRevisionObject != null) {
fetchedAncestorRevisionId = resultSet.getInt(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
}
Object fetchedReverseDeltaRevisionObject = resultSet.getObject(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedReverseDeltaRevisionId = null;
if (fetchedReverseDeltaRevisionObject != null) {
fetchedReverseDeltaRevisionId = resultSet.getInt(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
}
Integer fetchedCommitId = resultSet.getInt(COMMIT_ID_RESULT_SET_INDEX);
Boolean fetchedPromotedFlag = resultSet.getBoolean(PROMOTED_FLAG_RESULT_SET_INDEX);
Timestamp fetchedWorkfileEditDate = resultSet.getTimestamp(WORKFILE_EDIT_DATE_RESULT_SET_INDEX);
byte[] fetchedDigest = resultSet.getBytes(REVISION_DIGEST_RESULT_SET_INDEX);
Integer fetchedSize = resultSet.getInt(REVISION_SIZE_RESULT_SET_INDEX);
// We only need the newest revision.
if (!fileRevisionMap.containsKey(fetchedFileId)) {
FileRevision fileRevision = new FileRevision();
fileRevision.setId(fetchedId);
fileRevision.setBranchId(fetchedBranchId);
fileRevision.setFileId(fetchedFileId);
fileRevision.setAncestorRevisionId(fetchedAncestorRevisionId);
fileRevision.setReverseDeltaRevisionId(fetchedReverseDeltaRevisionId);
fileRevision.setCommitId(fetchedCommitId);
fileRevision.setPromotedFlag(fetchedPromotedFlag);
fileRevision.setWorkfileEditDate(fetchedWorkfileEditDate);
fileRevision.setRevisionDigest(fetchedDigest);
fileRevision.setRevisionDataSize(fetchedSize);
fileRevisionMap.put(fetchedFileId, fileRevision);
}
}
fileRevisionList = prunePromotedRevisions(fileRevisionMap);
} catch (SQLException e) {
LOGGER.error("FileRevisionDAOImpl: SQL exception in findPromotionCandidates", e);
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in findPromotionCandidates", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, resultSet, preparedStatement);
}
return fileRevisionList;
}
@Override
public FileRevision findCommonAncestorRevision(Integer promoteToBranchId, Integer newestBranchAncestorId, Integer newestPromoteToAncestorId, Integer fileId) {
FileRevision revision = null;
ResultSet resultSet = null;
PreparedStatement preparedStatement = null;
try {
Connection connection = DatabaseManager.getInstance().getConnection();
preparedStatement = connection.prepareStatement(this.findCommonAncestorRevision, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
// <editor-fold>
preparedStatement.setInt(1, promoteToBranchId);
preparedStatement.setInt(2, newestBranchAncestorId);
preparedStatement.setInt(3, newestPromoteToAncestorId);
preparedStatement.setInt(4, fileId);
// </editor-fold>
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
Integer fetchedId = resultSet.getInt(ID_RESULT_SET_INDEX);
Integer fetchedBranchId = resultSet.getInt(BRANCH_ID_RESULT_SET_INDEX);
Integer fetchedFileId = resultSet.getInt(FILE_ID_RESULT_SET_INDEX);
Object fetchedAncestorRevisionObject = resultSet.getObject(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedAncestorRevisionId = null;
if (fetchedAncestorRevisionObject != null) {
fetchedAncestorRevisionId = resultSet.getInt(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
}
Object fetchedReverseDeltaRevisionObject = resultSet.getObject(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedReverseDeltaRevisionId = null;
if (fetchedReverseDeltaRevisionObject != null) {
fetchedReverseDeltaRevisionId = resultSet.getInt(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
}
Integer fetchedCommitId = resultSet.getInt(COMMIT_ID_RESULT_SET_INDEX);
Boolean fetchedPromotedFlag = resultSet.getBoolean(PROMOTED_FLAG_RESULT_SET_INDEX);
Timestamp fetchedWorkfileEditDate = resultSet.getTimestamp(WORKFILE_EDIT_DATE_RESULT_SET_INDEX);
byte[] fetchedDigest = resultSet.getBytes(REVISION_DIGEST_RESULT_SET_INDEX);
Integer fetchedSize = resultSet.getInt(REVISION_SIZE_RESULT_SET_INDEX);
revision = new FileRevision();
revision.setId(fetchedId);
revision.setBranchId(fetchedBranchId);
revision.setFileId(fetchedFileId);
revision.setAncestorRevisionId(fetchedAncestorRevisionId);
revision.setReverseDeltaRevisionId(fetchedReverseDeltaRevisionId);
revision.setCommitId(fetchedCommitId);
revision.setPromotedFlag(fetchedPromotedFlag);
revision.setWorkfileEditDate(fetchedWorkfileEditDate);
revision.setRevisionDigest(fetchedDigest);
revision.setRevisionDataSize(fetchedSize);
}
} catch (SQLException e) {
LOGGER.error("FileRevisionDAOImpl: SQL exception in findCommonAncestorRevision", e);
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in findCommonAncestorRevision", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, resultSet, preparedStatement);
}
return revision;
}
@Override
public FileRevision findNewestPromotedRevision(int promoteFromBranchId, Integer fileId) {
FileRevision revision = null;
ResultSet resultSet = null;
PreparedStatement preparedStatement = null;
try {
Connection connection = DatabaseManager.getInstance().getConnection();
preparedStatement = connection.prepareStatement(this.findNewestPromotedRevision, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
preparedStatement.setInt(1, promoteFromBranchId);
preparedStatement.setInt(2, fileId);
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
Integer fetchedId = resultSet.getInt(ID_RESULT_SET_INDEX);
Integer fetchedBranchId = resultSet.getInt(BRANCH_ID_RESULT_SET_INDEX);
Integer fetchedFileId = resultSet.getInt(FILE_ID_RESULT_SET_INDEX);
Object fetchedAncestorRevisionObject = resultSet.getObject(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedAncestorRevisionId = null;
if (fetchedAncestorRevisionObject != null) {
fetchedAncestorRevisionId = resultSet.getInt(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
}
Object fetchedReverseDeltaRevisionObject = resultSet.getObject(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedReverseDeltaRevisionId = null;
if (fetchedReverseDeltaRevisionObject != null) {
fetchedReverseDeltaRevisionId = resultSet.getInt(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
}
Integer fetchedCommitId = resultSet.getInt(COMMIT_ID_RESULT_SET_INDEX);
Boolean fetchedPromotedFlag = resultSet.getBoolean(PROMOTED_FLAG_RESULT_SET_INDEX);
Timestamp fetchedWorkfileEditDate = resultSet.getTimestamp(WORKFILE_EDIT_DATE_RESULT_SET_INDEX);
byte[] fetchedDigest = resultSet.getBytes(REVISION_DIGEST_RESULT_SET_INDEX);
Integer fetchedSize = resultSet.getInt(REVISION_SIZE_RESULT_SET_INDEX);
revision = new FileRevision();
revision.setId(fetchedId);
revision.setBranchId(fetchedBranchId);
revision.setFileId(fetchedFileId);
revision.setAncestorRevisionId(fetchedAncestorRevisionId);
revision.setReverseDeltaRevisionId(fetchedReverseDeltaRevisionId);
revision.setCommitId(fetchedCommitId);
revision.setPromotedFlag(fetchedPromotedFlag);
revision.setWorkfileEditDate(fetchedWorkfileEditDate);
revision.setRevisionDigest(fetchedDigest);
revision.setRevisionDataSize(fetchedSize);
}
} catch (SQLException e) {
LOGGER.error("FileRevisionDAOImpl: SQL exception in findBranchTipRevisionByBranchIdAndFileId", e);
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in findBranchTipRevisionByBranchIdAndFileId", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, resultSet, preparedStatement);
}
return revision;
}
@Override
public FileRevision findByBranchIdAndAncestorRevisionAndFileId(int promoteToBranchId, Integer ancestorRevisionId, Integer fileId) {
FileRevision revision = null;
ResultSet resultSet = null;
PreparedStatement preparedStatement = null;
try {
Connection connection = DatabaseManager.getInstance().getConnection();
preparedStatement = connection.prepareStatement(this.findByBranchIdAndAncestorRevisionAndFileId, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
// <editor-fold>
preparedStatement.setInt(1, promoteToBranchId);
preparedStatement.setInt(2, ancestorRevisionId);
preparedStatement.setInt(3, fileId);
// </editor-fold>
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
Integer fetchedId = resultSet.getInt(ID_RESULT_SET_INDEX);
Integer fetchedBranchId = resultSet.getInt(BRANCH_ID_RESULT_SET_INDEX);
Integer fetchedFileId = resultSet.getInt(FILE_ID_RESULT_SET_INDEX);
Object fetchedAncestorRevisionObject = resultSet.getObject(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedAncestorRevisionId = null;
if (fetchedAncestorRevisionObject != null) {
fetchedAncestorRevisionId = resultSet.getInt(ANCESTOR_REVISION_ID_RESULT_SET_INDEX);
}
Object fetchedReverseDeltaRevisionObject = resultSet.getObject(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
Integer fetchedReverseDeltaRevisionId = null;
if (fetchedReverseDeltaRevisionObject != null) {
fetchedReverseDeltaRevisionId = resultSet.getInt(REVERSE_DELTA_REVISION_ID_RESULT_SET_INDEX);
}
Integer fetchedCommitId = resultSet.getInt(COMMIT_ID_RESULT_SET_INDEX);
Boolean fetchedPromotedFlag = resultSet.getBoolean(PROMOTED_FLAG_RESULT_SET_INDEX);
Timestamp fetchedWorkfileEditDate = resultSet.getTimestamp(WORKFILE_EDIT_DATE_RESULT_SET_INDEX);
byte[] fetchedDigest = resultSet.getBytes(REVISION_DIGEST_RESULT_SET_INDEX);
Integer fetchedSize = resultSet.getInt(REVISION_SIZE_RESULT_SET_INDEX);
revision = new FileRevision();
revision.setId(fetchedId);
revision.setBranchId(fetchedBranchId);
revision.setFileId(fetchedFileId);
revision.setAncestorRevisionId(fetchedAncestorRevisionId);
revision.setReverseDeltaRevisionId(fetchedReverseDeltaRevisionId);
revision.setCommitId(fetchedCommitId);
revision.setPromotedFlag(fetchedPromotedFlag);
revision.setWorkfileEditDate(fetchedWorkfileEditDate);
revision.setRevisionDigest(fetchedDigest);
revision.setRevisionDataSize(fetchedSize);
}
} catch (SQLException e) {
LOGGER.error("FileRevisionDAOImpl: SQL exception in findBranchTipRevisionByBranchIdAndFileId", e);
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in findBranchTipRevisionByBranchIdAndFileId", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, resultSet, preparedStatement);
}
return revision;
}
@Override
public List<Integer> findFileIdListForCommitId(Integer commitId) {
List<Integer> fileIdList = new ArrayList<>();
ResultSet resultSet = null;
PreparedStatement preparedStatement = null;
try {
Connection connection = DatabaseManager.getInstance().getConnection();
preparedStatement = connection.prepareStatement(this.findFileIdListForCommitId, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
preparedStatement.setInt(1, commitId);
resultSet = preparedStatement.executeQuery();
while (resultSet.next()) {
Integer fetchedFileId = resultSet.getInt(1);
fileIdList.add(fetchedFileId);
}
} catch (SQLException e) {
LOGGER.error("FileRevisionDAOImpl: SQL exception in findPromotionCandidates", e);
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in findPromotionCandidates", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, resultSet, preparedStatement);
}
return fileIdList;
}
@Override
public Integer insert(FileRevision fileRevision) throws SQLException {
PreparedStatement preparedStatement = null;
ResultSet rs = null;
Integer returnId = null;
try {
Connection connection = DatabaseManager.getInstance().getConnection();
connection.setAutoCommit(false);
preparedStatement = connection.prepareStatement(this.insertFileRevision);
// <editor-fold>
preparedStatement.setInt(1, fileRevision.getBranchId());
preparedStatement.setInt(2, fileRevision.getFileId());
if (fileRevision.getAncestorRevisionId() != null) {
preparedStatement.setInt(3, fileRevision.getAncestorRevisionId());
} else {
preparedStatement.setNull(3, java.sql.Types.INTEGER);
}
if (fileRevision.getReverseDeltaRevisionId() != null) {
preparedStatement.setInt(4, fileRevision.getReverseDeltaRevisionId());
} else {
preparedStatement.setNull(4, java.sql.Types.INTEGER);
}
preparedStatement.setInt(5, fileRevision.getCommitId());
preparedStatement.setBoolean(6, false);
preparedStatement.setTimestamp(7, fileRevision.getWorkfileEditDate());
preparedStatement.setBytes(8, fileRevision.getRevisionDigest());
preparedStatement.setBytes(9, fileRevision.getRevisionData());
// </editor-fold>
rs = preparedStatement.executeQuery();
if (rs.next()) {
returnId = rs.getInt(1);
}
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in insert", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, rs, preparedStatement);
}
return returnId;
}
@Override
public boolean updateAncestorRevision(Integer id, Integer reverseDeltaRevisionId, byte[] reverseDeltaScript) throws SQLException {
PreparedStatement preparedStatement = null;
boolean returnFlag = false;
try {
Connection connection = DatabaseManager.getInstance().getConnection();
connection.setAutoCommit(false);
preparedStatement = connection.prepareStatement(this.updateAncestorRevision);
// <editor-fold>
preparedStatement.setInt(1, reverseDeltaRevisionId);
preparedStatement.setBytes(2, reverseDeltaScript);
preparedStatement.setInt(3, id);
// </editor-fold>
returnFlag = preparedStatement.execute();
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in updateAncestorRevision", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, null, preparedStatement);
}
return returnFlag;
}
@Override
public boolean markPromoted(Integer fileRevisionId) throws SQLException {
PreparedStatement preparedStatement = null;
boolean returnFlag = false;
try {
FileRevision fileRevision = findById(fileRevisionId);
Connection connection = DatabaseManager.getInstance().getConnection();
connection.setAutoCommit(false);
preparedStatement = connection.prepareStatement(this.markPromoted);
// <editor-fold>
preparedStatement.setInt(1, fileRevision.getBranchId());
preparedStatement.setInt(2, fileRevision.getFileId());
// </editor-fold>
returnFlag = preparedStatement.execute();
connection.commit();
} catch (IllegalStateException e) {
LOGGER.error("FileRevisionDAOImpl: exception in markPromoted", e);
throw e;
} finally {
DAOHelper.closeDbResources(LOGGER, null, preparedStatement);
}
return returnFlag;
}
private List<FileRevision> prunePromotedRevisions(Map<Integer, FileRevision> fileRevisionMap) {
List<FileRevision> fileRevisionList = new ArrayList<>();
// Discard any revisions that have been promoted already.
Set<Integer> fileIdSet = fileRevisionMap.keySet();
// Prune any revisions already promoted...
Map<Integer, FileRevision> prunedRevisionMap = new TreeMap<>();
for (Integer fileId : fileIdSet) {
FileRevision fileRevision = fileRevisionMap.get(fileId);
if (!fileRevision.getPromotedFlag()) {
prunedRevisionMap.put(fileId, fileRevision);
}
}
fileIdSet = prunedRevisionMap.keySet();
for (Integer revisionId : fileIdSet) {
fileRevisionList.add(prunedRevisionMap.get(revisionId));
}
return fileRevisionList;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.run.v1;
/**
* CloudRun request.
*
* @since 1.3
*/
@SuppressWarnings("javadoc")
public abstract class CloudRunRequest<T> extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest<T> {
/**
* @param client Google client
* @param method HTTP Method
* @param uriTemplate URI template for the path relative to the base URL. If it starts with a "/"
* the base path from the base URL will be stripped out. The URI template can also be a
* full URL. URI template expansion is done using
* {@link com.google.api.client.http.UriTemplate#expand(String, String, Object, boolean)}
* @param content A POJO that can be serialized into JSON or {@code null} for none
* @param responseClass response class to parse into
*/
public CloudRunRequest(
CloudRun client, String method, String uriTemplate, Object content, Class<T> responseClass) {
super(
client,
method,
uriTemplate,
content,
responseClass);
}
/** V1 error format. */
@com.google.api.client.util.Key("$.xgafv")
private java.lang.String $Xgafv;
/**
* V1 error format.
*/
public java.lang.String get$Xgafv() {
return $Xgafv;
}
/** V1 error format. */
public CloudRunRequest<T> set$Xgafv(java.lang.String $Xgafv) {
this.$Xgafv = $Xgafv;
return this;
}
/** OAuth access token. */
@com.google.api.client.util.Key("access_token")
private java.lang.String accessToken;
/**
* OAuth access token.
*/
public java.lang.String getAccessToken() {
return accessToken;
}
/** OAuth access token. */
public CloudRunRequest<T> setAccessToken(java.lang.String accessToken) {
this.accessToken = accessToken;
return this;
}
/** Data format for response. */
@com.google.api.client.util.Key
private java.lang.String alt;
/**
* Data format for response. [default: json]
*/
public java.lang.String getAlt() {
return alt;
}
/** Data format for response. */
public CloudRunRequest<T> setAlt(java.lang.String alt) {
this.alt = alt;
return this;
}
/** JSONP */
@com.google.api.client.util.Key
private java.lang.String callback;
/**
* JSONP
*/
public java.lang.String getCallback() {
return callback;
}
/** JSONP */
public CloudRunRequest<T> setCallback(java.lang.String callback) {
this.callback = callback;
return this;
}
/** Selector specifying which fields to include in a partial response. */
@com.google.api.client.util.Key
private java.lang.String fields;
/**
* Selector specifying which fields to include in a partial response.
*/
public java.lang.String getFields() {
return fields;
}
/** Selector specifying which fields to include in a partial response. */
public CloudRunRequest<T> setFields(java.lang.String fields) {
this.fields = fields;
return this;
}
/**
* API key. Your API key identifies your project and provides you with API access, quota, and
* reports. Required unless you provide an OAuth 2.0 token.
*/
@com.google.api.client.util.Key
private java.lang.String key;
/**
* API key. Your API key identifies your project and provides you with API access, quota, and
* reports. Required unless you provide an OAuth 2.0 token.
*/
public java.lang.String getKey() {
return key;
}
/**
* API key. Your API key identifies your project and provides you with API access, quota, and
* reports. Required unless you provide an OAuth 2.0 token.
*/
public CloudRunRequest<T> setKey(java.lang.String key) {
this.key = key;
return this;
}
/** OAuth 2.0 token for the current user. */
@com.google.api.client.util.Key("oauth_token")
private java.lang.String oauthToken;
/**
* OAuth 2.0 token for the current user.
*/
public java.lang.String getOauthToken() {
return oauthToken;
}
/** OAuth 2.0 token for the current user. */
public CloudRunRequest<T> setOauthToken(java.lang.String oauthToken) {
this.oauthToken = oauthToken;
return this;
}
/** Returns response with indentations and line breaks. */
@com.google.api.client.util.Key
private java.lang.Boolean prettyPrint;
/**
* Returns response with indentations and line breaks. [default: true]
*/
public java.lang.Boolean getPrettyPrint() {
return prettyPrint;
}
/** Returns response with indentations and line breaks. */
public CloudRunRequest<T> setPrettyPrint(java.lang.Boolean prettyPrint) {
this.prettyPrint = prettyPrint;
return this;
}
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string
* assigned to a user, but should not exceed 40 characters.
*/
@com.google.api.client.util.Key
private java.lang.String quotaUser;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string
* assigned to a user, but should not exceed 40 characters.
*/
public java.lang.String getQuotaUser() {
return quotaUser;
}
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string
* assigned to a user, but should not exceed 40 characters.
*/
public CloudRunRequest<T> setQuotaUser(java.lang.String quotaUser) {
this.quotaUser = quotaUser;
return this;
}
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
@com.google.api.client.util.Key
private java.lang.String uploadType;
/**
* Legacy upload protocol for media (e.g. "media", "multipart").
*/
public java.lang.String getUploadType() {
return uploadType;
}
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
public CloudRunRequest<T> setUploadType(java.lang.String uploadType) {
this.uploadType = uploadType;
return this;
}
/** Upload protocol for media (e.g. "raw", "multipart"). */
@com.google.api.client.util.Key("upload_protocol")
private java.lang.String uploadProtocol;
/**
* Upload protocol for media (e.g. "raw", "multipart").
*/
public java.lang.String getUploadProtocol() {
return uploadProtocol;
}
/** Upload protocol for media (e.g. "raw", "multipart"). */
public CloudRunRequest<T> setUploadProtocol(java.lang.String uploadProtocol) {
this.uploadProtocol = uploadProtocol;
return this;
}
@Override
public final CloudRun getAbstractGoogleClient() {
return (CloudRun) super.getAbstractGoogleClient();
}
@Override
public CloudRunRequest<T> setDisableGZipContent(boolean disableGZipContent) {
return (CloudRunRequest<T>) super.setDisableGZipContent(disableGZipContent);
}
@Override
public CloudRunRequest<T> setRequestHeaders(com.google.api.client.http.HttpHeaders headers) {
return (CloudRunRequest<T>) super.setRequestHeaders(headers);
}
@Override
public CloudRunRequest<T> set(String parameterName, Object value) {
return (CloudRunRequest<T>) super.set(parameterName, value);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.math;
import java.io.Serializable;
public class BigDecimal extends Number implements Comparable<BigDecimal>, Serializable {
public static final int ROUND_UP = 0;
public static final int ROUND_DOWN = 1;
public static final int ROUND_CEILING = 2;
public static final int ROUND_FLOOR = 3;
public static final int ROUND_HALF_UP = 4;
public static final int ROUND_HALF_DOWN = 5;
public static final int ROUND_HALF_EVEN = 6;
public static final int ROUND_UNNECESSARY = 7;
public static final BigDecimal ZERO = new BigDecimal(0, 0);
public static final BigDecimal ONE = new BigDecimal(1, 0);
public static final BigDecimal TEN = new BigDecimal(10, 0);
private BigDecimal(int smallValue, int scale) {
}
private BigDecimal(long smallValue, int scale) {
}
public BigDecimal(char[] in, int offset, int len) {
}
public BigDecimal(char[] in, int offset, int len, MathContext mc) {
this(in, offset, len);
//inplaceRound(mc);
}
public BigDecimal(char[] in) {
this(in, 0, in.length);
}
public BigDecimal(char[] in, MathContext mc) {
this(in, 0, in.length);
//inplaceRound(mc);
}
public BigDecimal(String val) {
this(val.toCharArray(), 0, val.length());
}
public BigDecimal(String val, MathContext mc) {
this(val.toCharArray(), 0, val.length());
//inplaceRound(mc);
}
public BigDecimal(double val) {
}
public BigDecimal(double val, MathContext mc) {
this(val);
//inplaceRound(mc);
}
public BigDecimal(BigInteger val) {
this(val, 0);
}
public BigDecimal(BigInteger val, MathContext mc) {
this(val);
//inplaceRound(mc);
}
public BigDecimal(BigInteger unscaledVal, int scale) {
if (unscaledVal == null) {
throw new NullPointerException("unscaledVal == null");
}
//this.scale = scale;
//setUnscaledValue(unscaledVal);
}
public BigDecimal(BigInteger unscaledVal, int scale, MathContext mc) {
this(unscaledVal, scale);
//inplaceRound(mc);
}
public BigDecimal(int val) {
this(val, 0);
}
public BigDecimal(int val, MathContext mc) {
this(val, 0);
//inplaceRound(mc);
}
public BigDecimal(long val) {
this(val, 0);
}
public BigDecimal(long val, MathContext mc) {
this(val);
//inplaceRound(mc);
}
native public static BigDecimal valueOf(long unscaledVal, int scale);
native public static BigDecimal valueOf(long unscaledVal);
native public static BigDecimal valueOf(double val);
native public BigDecimal add(BigDecimal augend);
native public BigDecimal add(BigDecimal augend, MathContext mc);
native public BigDecimal subtract(BigDecimal subtrahend);
native public BigDecimal subtract(BigDecimal subtrahend, MathContext mc);
native public BigDecimal multiply(BigDecimal multiplicand);
native public BigDecimal multiply(BigDecimal multiplicand, MathContext mc);
native public BigDecimal divide(BigDecimal divisor, int scale, int roundingMode);
native public BigDecimal divide(BigDecimal divisor, int scale, RoundingMode roundingMode);
native public BigDecimal divide(BigDecimal divisor, int roundingMode);
native public BigDecimal divide(BigDecimal divisor, RoundingMode roundingMode);
native public BigDecimal divide(BigDecimal divisor);
native public BigDecimal divide(BigDecimal divisor, MathContext mc);
native public BigDecimal divideToIntegralValue(BigDecimal divisor);
native public BigDecimal divideToIntegralValue(BigDecimal divisor, MathContext mc);
native public BigDecimal remainder(BigDecimal divisor);
native public BigDecimal remainder(BigDecimal divisor, MathContext mc);
native public BigDecimal[] divideAndRemainder(BigDecimal divisor);
native public BigDecimal[] divideAndRemainder(BigDecimal divisor, MathContext mc);
native public BigDecimal pow(int n);
native public BigDecimal pow(int n, MathContext mc);
native public BigDecimal abs();
native public BigDecimal abs(MathContext mc);
native public BigDecimal negate();
native public BigDecimal negate(MathContext mc);
native public BigDecimal plus();
native public BigDecimal plus(MathContext mc);
native public int signum();
native public int scale();
native public int precision();
native public BigInteger unscaledValue();
native public BigDecimal round(MathContext mc);
native public BigDecimal setScale(int newScale, RoundingMode roundingMode);
native public BigDecimal setScale(int newScale, int roundingMode);
native public BigDecimal setScale(int newScale);
native public BigDecimal movePointLeft(int n);
native public BigDecimal movePointRight(int n);
native public BigDecimal scaleByPowerOfTen(int n);
native public BigDecimal stripTrailingZeros();
native public int compareTo(BigDecimal val);
native public boolean equals(Object x);
native public BigDecimal min(BigDecimal val);
native public BigDecimal max(BigDecimal val);
native public int hashCode();
native public String toString();
native public String toEngineeringString();
native public String toPlainString();
native public BigInteger toBigInteger();
native public BigInteger toBigIntegerExact();
native public long longValue();
native public long longValueExact();
native public int intValue();
native public int intValueExact();
native public short shortValueExact();
native public byte byteValueExact();
native public float floatValue();
native public double doubleValue();
native public BigDecimal ulp();
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.notification.impl.actions;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.notification.*;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.MessageDialogBuilder;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.ToolWindowId;
import com.intellij.util.messages.MessageBus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.HyperlinkEvent;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
/**
* @author spleaner
* @author Sergey.Malenkov
*/
public class NotificationTestAction extends AnAction implements DumbAware {
public static final String TEST_GROUP_ID = "Test Notification";
private static final NotificationGroup TEST_STICKY_GROUP =
new NotificationGroup("Test Sticky Notification", NotificationDisplayType.STICKY_BALLOON, true);
private static final NotificationGroup TEST_TOOLWINDOW_GROUP =
NotificationGroup.toolWindowGroup("Test ToolWindow Notification", ToolWindowId.TODO_VIEW, true);
private static final String MESSAGE_KEY = "NotificationTestAction_Message";
@Override
public void actionPerformed(@NotNull AnActionEvent event) {
new NotificationDialog(event.getProject()).show();
}
private static final class NotificationDialog extends DialogWrapper {
private final JTextArea myMessage = new JTextArea(10, 50);
private final MessageBus myMessageBus;
private NotificationDialog(@Nullable Project project) {
super(project, true, IdeModalityType.MODELESS);
myMessageBus = project != null ? project.getMessageBus() : ApplicationManager.getApplication().getMessageBus();
init();
setOKButtonText("Notify");
setTitle("Test Notification");
myMessage.setText(
PropertiesComponent.getInstance().getValue(MESSAGE_KEY, "GroupID:\nTitle:\nSubtitle:\nContent:\nContent:\nActions:\nSticky:\n"));
}
@Nullable
@Override
protected String getDimensionServiceKey() {
return "NotificationTestAction";
}
@Override
protected JComponent createCenterPanel() {
JPanel panel = new JPanel(new BorderLayout(10, 10));
panel.add(BorderLayout.CENTER, new JScrollPane(myMessage));
return panel;
}
@NotNull
@Override
protected Action[] createActions() {
return new Action[]{getOKAction(), getCancelAction()};
}
@Override
public void doCancelAction() {
PropertiesComponent.getInstance().setValue(MESSAGE_KEY, myMessage.getText());
super.doCancelAction();
}
@Override
protected void doOKAction() {
newNotification(myMessage.getText());
}
private void newNotification(String text) {
final List<NotificationInfo> notifications = new ArrayList<>();
NotificationInfo notification = null;
for (String line : StringUtil.splitByLines(text, false)) {
if (line.length() == 0) {
if (notification != null) {
notification = null;
continue;
}
}
if (line.startsWith("//")) {
continue;
}
if (line.startsWith("--")) {
break;
}
if (notification == null) {
notification = new NotificationInfo();
notifications.add(notification);
}
if (line.startsWith("GroupID:")) {
notification.setGroupId(StringUtil.substringAfter(line, ":"));
}
else if (line.startsWith("Title:")) {
notification.setTitle(StringUtil.substringAfter(line, ":"));
}
else if (line.startsWith("Content:")) {
String value = StringUtil.substringAfter(line, ":");
if (value != null) {
notification.addContent(value);
}
}
else if (line.startsWith("Subtitle:")) {
notification.setSubtitle(StringUtil.substringAfter(line, ":"));
}
else if (line.startsWith("Actions:")) {
String value = StringUtil.substringAfter(line, ":");
if (value != null) {
notification.setActions(StringUtil.split(value, ","));
}
}
else if (line.startsWith("Type:")) {
notification.setType(StringUtil.substringAfter(line, ":"));
}
else if (line.startsWith("Sticky:")) {
notification.setSticky("true".equals(StringUtil.substringAfter(line, ":")));
}
else if (line.startsWith("Listener:")) {
notification.setAddListener("true".equals(StringUtil.substringAfter(line, ":")));
}
else if (line.startsWith("Toolwindow:")) {
notification.setToolwindow("true".equals(StringUtil.substringAfter(line, ":")));
}
}
ApplicationManager.getApplication().executeOnPooledThread(() -> {
for (NotificationInfo info : notifications) {
myMessageBus.syncPublisher(Notifications.TOPIC).notify(info.getNotification());
}
});
}
}
private static class NotificationInfo implements NotificationListener {
private String myGroupId;
private String myTitle;
private String mySubtitle;
private List<String> myContent;
private List<String> myActions;
private NotificationType myType = NotificationType.INFORMATION;
private boolean mySticky;
private boolean myAddListener;
private boolean myToolwindow;
private Notification myNotification;
public Notification getNotification() {
if (myNotification == null) {
Icon icon = null;
if (!StringUtil.isEmpty(myGroupId)) {
icon = IconLoader.findIcon(myGroupId);
}
String displayId = mySticky ? TEST_STICKY_GROUP.getDisplayId() : TEST_GROUP_ID;
if (myToolwindow) {
displayId = TEST_TOOLWINDOW_GROUP.getDisplayId();
}
String content = myContent == null ? "" : StringUtil.join(myContent, "\n");
if (icon == null) {
myNotification =
new Notification(displayId, StringUtil.notNullize(myTitle), content, myType, getListener());
}
else {
myNotification = new Notification(displayId, icon, myTitle, mySubtitle, content, myType, getListener());
}
if (myActions != null) {
for (String action : myActions) {
myNotification.addAction(new MyAnAction(action));
}
}
}
return myNotification;
}
@Nullable
private NotificationListener getListener() {
return myAddListener ? this : null;
}
public void setGroupId(@Nullable String groupId) {
myGroupId = groupId;
}
public void setTitle(@Nullable String title) {
myTitle = title;
}
public void setSubtitle(@Nullable String subtitle) {
mySubtitle = subtitle;
}
public void setAddListener(boolean addListener) {
myAddListener = addListener;
}
public void addContent(@NotNull String content) {
if (myContent == null) {
myContent = new ArrayList<>();
}
myContent.add(content);
}
public void setActions(@NotNull List<String> actions) {
myActions = actions;
}
public void setSticky(boolean sticky) {
mySticky = sticky;
}
public void setToolwindow(boolean toolwindow) {
myToolwindow = toolwindow;
}
public void setType(@Nullable String type) {
if ("info".equals(type)) {
myType = NotificationType.INFORMATION;
}
else if ("error".equals(type)) {
myType = NotificationType.ERROR;
}
else if ("warn".equals(type)) {
myType = NotificationType.WARNING;
}
}
@Override
public void hyperlinkUpdate(@NotNull Notification notification, @NotNull HyperlinkEvent event) {
if (MessageDialogBuilder.yesNo("Notification Listener", event.getDescription() + " Expire?").isYes()) {
myNotification.expire();
myNotification = null;
}
}
private class MyAnAction extends AnAction {
private MyAnAction(@Nullable String text) {
if (text != null) {
if (text.endsWith(".png")) {
Icon icon = IconLoader.findIcon(text);
if (icon != null) {
getTemplatePresentation().setIcon(icon);
return;
}
}
getTemplatePresentation().setText(text);
}
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
Notification.get(e);
if (MessageDialogBuilder.yesNo("AnAction", getTemplatePresentation().getText() + " Expire?").isYes()) {
myNotification.expire();
myNotification = null;
}
}
}
}
}
| |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.facebook.react.views.view;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ColorFilter;
import android.graphics.DashPathEffect;
import android.graphics.Outline;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PathEffect;
import android.graphics.PointF;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Region;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.view.View;
import androidx.annotation.Nullable;
import com.facebook.react.common.annotations.VisibleForTesting;
import com.facebook.react.modules.i18nmanager.I18nUtil;
import com.facebook.react.uimanager.FloatUtil;
import com.facebook.react.uimanager.Spacing;
import com.facebook.yoga.YogaConstants;
import java.util.Arrays;
import java.util.Locale;
/**
* A subclass of {@link Drawable} used for background of {@link ReactViewGroup}. It supports drawing
* background color and borders (including rounded borders) by providing a react friendly API
* (setter for each of those properties).
*
* <p>The implementation tries to allocate as few objects as possible depending on which properties
* are set. E.g. for views with rounded background/borders we allocate {@code
* mInnerClipPathForBorderRadius} and {@code mInnerClipTempRectForBorderRadius}. In case when view
* have a rectangular borders we allocate {@code mBorderWidthResult} and similar. When only
* background color is set we won't allocate any extra/unnecessary objects.
*/
public class ReactViewBackgroundDrawable extends Drawable {
private static final int DEFAULT_BORDER_COLOR = Color.BLACK;
private static final int DEFAULT_BORDER_RGB = 0x00FFFFFF & DEFAULT_BORDER_COLOR;
private static final int DEFAULT_BORDER_ALPHA = (0xFF000000 & DEFAULT_BORDER_COLOR) >>> 24;
// ~0 == 0xFFFFFFFF, all bits set to 1.
private static final int ALL_BITS_SET = ~0;
// 0 == 0x00000000, all bits set to 0.
private static final int ALL_BITS_UNSET = 0;
private enum BorderStyle {
SOLID,
DASHED,
DOTTED;
public static @Nullable PathEffect getPathEffect(BorderStyle style, float borderWidth) {
switch (style) {
case SOLID:
return null;
case DASHED:
return new DashPathEffect(
new float[] {borderWidth * 3, borderWidth * 3, borderWidth * 3, borderWidth * 3}, 0);
case DOTTED:
return new DashPathEffect(
new float[] {borderWidth, borderWidth, borderWidth, borderWidth}, 0);
default:
return null;
}
}
};
/* Value at Spacing.ALL index used for rounded borders, whole array used by rectangular borders */
private @Nullable Spacing mBorderWidth;
private @Nullable Spacing mBorderRGB;
private @Nullable Spacing mBorderAlpha;
private @Nullable BorderStyle mBorderStyle;
/* Used for rounded border and rounded background */
private @Nullable PathEffect mPathEffectForBorderStyle;
private @Nullable Path mInnerClipPathForBorderRadius;
private @Nullable Path mOuterClipPathForBorderRadius;
private @Nullable Path mPathForBorderRadiusOutline;
private @Nullable Path mPathForBorder;
private @Nullable Path mCenterDrawPath;
private @Nullable RectF mInnerClipTempRectForBorderRadius;
private @Nullable RectF mOuterClipTempRectForBorderRadius;
private @Nullable RectF mTempRectForBorderRadiusOutline;
private @Nullable RectF mTempRectForCenterDrawPath;
private @Nullable PointF mInnerTopLeftCorner;
private @Nullable PointF mInnerTopRightCorner;
private @Nullable PointF mInnerBottomRightCorner;
private @Nullable PointF mInnerBottomLeftCorner;
private boolean mNeedUpdatePathForBorderRadius = false;
private float mBorderRadius = YogaConstants.UNDEFINED;
/* Used by all types of background and for drawing borders */
private final Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
private int mColor = Color.TRANSPARENT;
private int mAlpha = 255;
private @Nullable float[] mBorderCornerRadii;
private final Context mContext;
private int mLayoutDirection;
public enum BorderRadiusLocation {
TOP_LEFT,
TOP_RIGHT,
BOTTOM_RIGHT,
BOTTOM_LEFT,
TOP_START,
TOP_END,
BOTTOM_START,
BOTTOM_END
}
public ReactViewBackgroundDrawable(Context context) {
mContext = context;
}
@Override
public void draw(Canvas canvas) {
updatePathEffect();
if (!hasRoundedBorders()) {
drawRectangularBackgroundWithBorders(canvas);
} else {
drawRoundedBackgroundWithBorders(canvas);
}
}
public boolean hasRoundedBorders() {
if (!YogaConstants.isUndefined(mBorderRadius) && mBorderRadius > 0) {
return true;
}
if (mBorderCornerRadii != null) {
for (final float borderRadii : mBorderCornerRadii) {
if (!YogaConstants.isUndefined(borderRadii) && borderRadii > 0) {
return true;
}
}
}
return false;
}
@Override
protected void onBoundsChange(Rect bounds) {
super.onBoundsChange(bounds);
mNeedUpdatePathForBorderRadius = true;
}
@Override
public void setAlpha(int alpha) {
if (alpha != mAlpha) {
mAlpha = alpha;
invalidateSelf();
}
}
@Override
public int getAlpha() {
return mAlpha;
}
@Override
public void setColorFilter(ColorFilter cf) {
// do nothing
}
@Override
public int getOpacity() {
return ColorUtil.getOpacityFromColor(ColorUtil.multiplyColorAlpha(mColor, mAlpha));
}
/* Android's elevation implementation requires this to be implemented to know where to draw the shadow. */
@Override
public void getOutline(Outline outline) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
super.getOutline(outline);
return;
}
if ((!YogaConstants.isUndefined(mBorderRadius) && mBorderRadius > 0)
|| mBorderCornerRadii != null) {
updatePath();
outline.setConvexPath(mPathForBorderRadiusOutline);
} else {
outline.setRect(getBounds());
}
}
public void setBorderWidth(int position, float width) {
if (mBorderWidth == null) {
mBorderWidth = new Spacing();
}
if (!FloatUtil.floatsEqual(mBorderWidth.getRaw(position), width)) {
mBorderWidth.set(position, width);
switch (position) {
case Spacing.ALL:
case Spacing.LEFT:
case Spacing.BOTTOM:
case Spacing.RIGHT:
case Spacing.TOP:
case Spacing.START:
case Spacing.END:
mNeedUpdatePathForBorderRadius = true;
}
invalidateSelf();
}
}
public void setBorderColor(int position, float rgb, float alpha) {
this.setBorderRGB(position, rgb);
this.setBorderAlpha(position, alpha);
}
private void setBorderRGB(int position, float rgb) {
// set RGB component
if (mBorderRGB == null) {
mBorderRGB = new Spacing(DEFAULT_BORDER_RGB);
}
if (!FloatUtil.floatsEqual(mBorderRGB.getRaw(position), rgb)) {
mBorderRGB.set(position, rgb);
invalidateSelf();
}
}
private void setBorderAlpha(int position, float alpha) {
// set Alpha component
if (mBorderAlpha == null) {
mBorderAlpha = new Spacing(DEFAULT_BORDER_ALPHA);
}
if (!FloatUtil.floatsEqual(mBorderAlpha.getRaw(position), alpha)) {
mBorderAlpha.set(position, alpha);
invalidateSelf();
}
}
public void setBorderStyle(@Nullable String style) {
BorderStyle borderStyle =
style == null ? null : BorderStyle.valueOf(style.toUpperCase(Locale.US));
if (mBorderStyle != borderStyle) {
mBorderStyle = borderStyle;
mNeedUpdatePathForBorderRadius = true;
invalidateSelf();
}
}
public void setRadius(float radius) {
if (!FloatUtil.floatsEqual(mBorderRadius, radius)) {
mBorderRadius = radius;
mNeedUpdatePathForBorderRadius = true;
invalidateSelf();
}
}
public void setRadius(float radius, int position) {
if (mBorderCornerRadii == null) {
mBorderCornerRadii = new float[8];
Arrays.fill(mBorderCornerRadii, YogaConstants.UNDEFINED);
}
if (!FloatUtil.floatsEqual(mBorderCornerRadii[position], radius)) {
mBorderCornerRadii[position] = radius;
mNeedUpdatePathForBorderRadius = true;
invalidateSelf();
}
}
public float getFullBorderRadius() {
return YogaConstants.isUndefined(mBorderRadius) ? 0 : mBorderRadius;
}
public float getBorderRadius(final BorderRadiusLocation location) {
return getBorderRadiusOrDefaultTo(YogaConstants.UNDEFINED, location);
}
public float getBorderRadiusOrDefaultTo(
final float defaultValue, final BorderRadiusLocation location) {
if (mBorderCornerRadii == null) {
return defaultValue;
}
final float radius = mBorderCornerRadii[location.ordinal()];
if (YogaConstants.isUndefined(radius)) {
return defaultValue;
}
return radius;
}
public void setColor(int color) {
mColor = color;
invalidateSelf();
}
/** Similar to Drawable.getLayoutDirection, but available in APIs < 23. */
public int getResolvedLayoutDirection() {
return mLayoutDirection;
}
/** Similar to Drawable.setLayoutDirection, but available in APIs < 23. */
public boolean setResolvedLayoutDirection(int layoutDirection) {
if (mLayoutDirection != layoutDirection) {
mLayoutDirection = layoutDirection;
return onResolvedLayoutDirectionChanged(layoutDirection);
}
return false;
}
/** Similar to Drawable.onLayoutDirectionChanged, but available in APIs < 23. */
public boolean onResolvedLayoutDirectionChanged(int layoutDirection) {
return false;
}
@VisibleForTesting
public int getColor() {
return mColor;
}
private void drawRoundedBackgroundWithBorders(Canvas canvas) {
updatePath();
canvas.save();
int useColor = ColorUtil.multiplyColorAlpha(mColor, mAlpha);
if (Color.alpha(useColor) != 0) { // color is not transparent
mPaint.setColor(useColor);
mPaint.setStyle(Paint.Style.FILL);
canvas.drawPath(mInnerClipPathForBorderRadius, mPaint);
}
final RectF borderWidth = getDirectionAwareBorderInsets();
int colorLeft = getBorderColor(Spacing.LEFT);
int colorTop = getBorderColor(Spacing.TOP);
int colorRight = getBorderColor(Spacing.RIGHT);
int colorBottom = getBorderColor(Spacing.BOTTOM);
if (borderWidth.top > 0
|| borderWidth.bottom > 0
|| borderWidth.left > 0
|| borderWidth.right > 0) {
// If it's a full and even border draw inner rect path with stroke
final float fullBorderWidth = getFullBorderWidth();
int borderColor = getBorderColor(Spacing.ALL);
if (borderWidth.top == fullBorderWidth
&& borderWidth.bottom == fullBorderWidth
&& borderWidth.left == fullBorderWidth
&& borderWidth.right == fullBorderWidth
&& colorLeft == borderColor
&& colorTop == borderColor
&& colorRight == borderColor
&& colorBottom == borderColor) {
if (fullBorderWidth > 0) {
mPaint.setColor(ColorUtil.multiplyColorAlpha(borderColor, mAlpha));
mPaint.setStyle(Paint.Style.STROKE);
mPaint.setStrokeWidth(fullBorderWidth);
canvas.drawPath(mCenterDrawPath, mPaint);
}
}
// In the case of uneven border widths/colors draw quadrilateral in each direction
else {
mPaint.setStyle(Paint.Style.FILL);
// Draw border
canvas.clipPath(mOuterClipPathForBorderRadius, Region.Op.INTERSECT);
canvas.clipPath(mInnerClipPathForBorderRadius, Region.Op.DIFFERENCE);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
final boolean isRTL = getResolvedLayoutDirection() == View.LAYOUT_DIRECTION_RTL;
int colorStart = getBorderColor(Spacing.START);
int colorEnd = getBorderColor(Spacing.END);
if (I18nUtil.getInstance().doLeftAndRightSwapInRTL(mContext)) {
if (!isBorderColorDefined(Spacing.START)) {
colorStart = colorLeft;
}
if (!isBorderColorDefined(Spacing.END)) {
colorEnd = colorRight;
}
final int directionAwareColorLeft = isRTL ? colorEnd : colorStart;
final int directionAwareColorRight = isRTL ? colorStart : colorEnd;
colorLeft = directionAwareColorLeft;
colorRight = directionAwareColorRight;
} else {
final int directionAwareColorLeft = isRTL ? colorEnd : colorStart;
final int directionAwareColorRight = isRTL ? colorStart : colorEnd;
final boolean isColorStartDefined = isBorderColorDefined(Spacing.START);
final boolean isColorEndDefined = isBorderColorDefined(Spacing.END);
final boolean isDirectionAwareColorLeftDefined =
isRTL ? isColorEndDefined : isColorStartDefined;
final boolean isDirectionAwareColorRightDefined =
isRTL ? isColorStartDefined : isColorEndDefined;
if (isDirectionAwareColorLeftDefined) {
colorLeft = directionAwareColorLeft;
}
if (isDirectionAwareColorRightDefined) {
colorRight = directionAwareColorRight;
}
}
}
final float left = mOuterClipTempRectForBorderRadius.left;
final float right = mOuterClipTempRectForBorderRadius.right;
final float top = mOuterClipTempRectForBorderRadius.top;
final float bottom = mOuterClipTempRectForBorderRadius.bottom;
if (borderWidth.left > 0) {
final float x1 = left;
final float y1 = top;
final float x2 = mInnerTopLeftCorner.x;
final float y2 = mInnerTopLeftCorner.y;
final float x3 = mInnerBottomLeftCorner.x;
final float y3 = mInnerBottomLeftCorner.y;
final float x4 = left;
final float y4 = bottom;
drawQuadrilateral(canvas, colorLeft, x1, y1, x2, y2, x3, y3, x4, y4);
}
if (borderWidth.top > 0) {
final float x1 = left;
final float y1 = top;
final float x2 = mInnerTopLeftCorner.x;
final float y2 = mInnerTopLeftCorner.y;
final float x3 = mInnerTopRightCorner.x;
final float y3 = mInnerTopRightCorner.y;
final float x4 = right;
final float y4 = top;
drawQuadrilateral(canvas, colorTop, x1, y1, x2, y2, x3, y3, x4, y4);
}
if (borderWidth.right > 0) {
final float x1 = right;
final float y1 = top;
final float x2 = mInnerTopRightCorner.x;
final float y2 = mInnerTopRightCorner.y;
final float x3 = mInnerBottomRightCorner.x;
final float y3 = mInnerBottomRightCorner.y;
final float x4 = right;
final float y4 = bottom;
drawQuadrilateral(canvas, colorRight, x1, y1, x2, y2, x3, y3, x4, y4);
}
if (borderWidth.bottom > 0) {
final float x1 = left;
final float y1 = bottom;
final float x2 = mInnerBottomLeftCorner.x;
final float y2 = mInnerBottomLeftCorner.y;
final float x3 = mInnerBottomRightCorner.x;
final float y3 = mInnerBottomRightCorner.y;
final float x4 = right;
final float y4 = bottom;
drawQuadrilateral(canvas, colorBottom, x1, y1, x2, y2, x3, y3, x4, y4);
}
}
}
canvas.restore();
}
private void updatePath() {
if (!mNeedUpdatePathForBorderRadius) {
return;
}
mNeedUpdatePathForBorderRadius = false;
if (mInnerClipPathForBorderRadius == null) {
mInnerClipPathForBorderRadius = new Path();
}
if (mOuterClipPathForBorderRadius == null) {
mOuterClipPathForBorderRadius = new Path();
}
if (mPathForBorderRadiusOutline == null) {
mPathForBorderRadiusOutline = new Path();
}
if (mCenterDrawPath == null) {
mCenterDrawPath = new Path();
}
if (mInnerClipTempRectForBorderRadius == null) {
mInnerClipTempRectForBorderRadius = new RectF();
}
if (mOuterClipTempRectForBorderRadius == null) {
mOuterClipTempRectForBorderRadius = new RectF();
}
if (mTempRectForBorderRadiusOutline == null) {
mTempRectForBorderRadiusOutline = new RectF();
}
if (mTempRectForCenterDrawPath == null) {
mTempRectForCenterDrawPath = new RectF();
}
mInnerClipPathForBorderRadius.reset();
mOuterClipPathForBorderRadius.reset();
mPathForBorderRadiusOutline.reset();
mCenterDrawPath.reset();
mInnerClipTempRectForBorderRadius.set(getBounds());
mOuterClipTempRectForBorderRadius.set(getBounds());
mTempRectForBorderRadiusOutline.set(getBounds());
mTempRectForCenterDrawPath.set(getBounds());
final RectF borderWidth = getDirectionAwareBorderInsets();
mInnerClipTempRectForBorderRadius.top += borderWidth.top;
mInnerClipTempRectForBorderRadius.bottom -= borderWidth.bottom;
mInnerClipTempRectForBorderRadius.left += borderWidth.left;
mInnerClipTempRectForBorderRadius.right -= borderWidth.right;
mTempRectForCenterDrawPath.top += borderWidth.top * 0.5f;
mTempRectForCenterDrawPath.bottom -= borderWidth.bottom * 0.5f;
mTempRectForCenterDrawPath.left += borderWidth.left * 0.5f;
mTempRectForCenterDrawPath.right -= borderWidth.right * 0.5f;
final float borderRadius = getFullBorderRadius();
float topLeftRadius = getBorderRadiusOrDefaultTo(borderRadius, BorderRadiusLocation.TOP_LEFT);
float topRightRadius = getBorderRadiusOrDefaultTo(borderRadius, BorderRadiusLocation.TOP_RIGHT);
float bottomLeftRadius =
getBorderRadiusOrDefaultTo(borderRadius, BorderRadiusLocation.BOTTOM_LEFT);
float bottomRightRadius =
getBorderRadiusOrDefaultTo(borderRadius, BorderRadiusLocation.BOTTOM_RIGHT);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
final boolean isRTL = getResolvedLayoutDirection() == View.LAYOUT_DIRECTION_RTL;
float topStartRadius = getBorderRadius(BorderRadiusLocation.TOP_START);
float topEndRadius = getBorderRadius(BorderRadiusLocation.TOP_END);
float bottomStartRadius = getBorderRadius(BorderRadiusLocation.BOTTOM_START);
float bottomEndRadius = getBorderRadius(BorderRadiusLocation.BOTTOM_END);
if (I18nUtil.getInstance().doLeftAndRightSwapInRTL(mContext)) {
if (YogaConstants.isUndefined(topStartRadius)) {
topStartRadius = topLeftRadius;
}
if (YogaConstants.isUndefined(topEndRadius)) {
topEndRadius = topRightRadius;
}
if (YogaConstants.isUndefined(bottomStartRadius)) {
bottomStartRadius = bottomLeftRadius;
}
if (YogaConstants.isUndefined(bottomEndRadius)) {
bottomEndRadius = bottomRightRadius;
}
final float directionAwareTopLeftRadius = isRTL ? topEndRadius : topStartRadius;
final float directionAwareTopRightRadius = isRTL ? topStartRadius : topEndRadius;
final float directionAwareBottomLeftRadius = isRTL ? bottomEndRadius : bottomStartRadius;
final float directionAwareBottomRightRadius = isRTL ? bottomStartRadius : bottomEndRadius;
topLeftRadius = directionAwareTopLeftRadius;
topRightRadius = directionAwareTopRightRadius;
bottomLeftRadius = directionAwareBottomLeftRadius;
bottomRightRadius = directionAwareBottomRightRadius;
} else {
final float directionAwareTopLeftRadius = isRTL ? topEndRadius : topStartRadius;
final float directionAwareTopRightRadius = isRTL ? topStartRadius : topEndRadius;
final float directionAwareBottomLeftRadius = isRTL ? bottomEndRadius : bottomStartRadius;
final float directionAwareBottomRightRadius = isRTL ? bottomStartRadius : bottomEndRadius;
if (!YogaConstants.isUndefined(directionAwareTopLeftRadius)) {
topLeftRadius = directionAwareTopLeftRadius;
}
if (!YogaConstants.isUndefined(directionAwareTopRightRadius)) {
topRightRadius = directionAwareTopRightRadius;
}
if (!YogaConstants.isUndefined(directionAwareBottomLeftRadius)) {
bottomLeftRadius = directionAwareBottomLeftRadius;
}
if (!YogaConstants.isUndefined(directionAwareBottomRightRadius)) {
bottomRightRadius = directionAwareBottomRightRadius;
}
}
}
final float innerTopLeftRadiusX = Math.max(topLeftRadius - borderWidth.left, 0);
final float innerTopLeftRadiusY = Math.max(topLeftRadius - borderWidth.top, 0);
final float innerTopRightRadiusX = Math.max(topRightRadius - borderWidth.right, 0);
final float innerTopRightRadiusY = Math.max(topRightRadius - borderWidth.top, 0);
final float innerBottomRightRadiusX = Math.max(bottomRightRadius - borderWidth.right, 0);
final float innerBottomRightRadiusY = Math.max(bottomRightRadius - borderWidth.bottom, 0);
final float innerBottomLeftRadiusX = Math.max(bottomLeftRadius - borderWidth.left, 0);
final float innerBottomLeftRadiusY = Math.max(bottomLeftRadius - borderWidth.bottom, 0);
mInnerClipPathForBorderRadius.addRoundRect(
mInnerClipTempRectForBorderRadius,
new float[] {
innerTopLeftRadiusX,
innerTopLeftRadiusY,
innerTopRightRadiusX,
innerTopRightRadiusY,
innerBottomRightRadiusX,
innerBottomRightRadiusY,
innerBottomLeftRadiusX,
innerBottomLeftRadiusY,
},
Path.Direction.CW);
mOuterClipPathForBorderRadius.addRoundRect(
mOuterClipTempRectForBorderRadius,
new float[] {
topLeftRadius,
topLeftRadius,
topRightRadius,
topRightRadius,
bottomRightRadius,
bottomRightRadius,
bottomLeftRadius,
bottomLeftRadius
},
Path.Direction.CW);
float extraRadiusForOutline = 0;
if (mBorderWidth != null) {
extraRadiusForOutline = mBorderWidth.get(Spacing.ALL) / 2f;
}
mPathForBorderRadiusOutline.addRoundRect(
mTempRectForBorderRadiusOutline,
new float[] {
topLeftRadius + extraRadiusForOutline,
topLeftRadius + extraRadiusForOutline,
topRightRadius + extraRadiusForOutline,
topRightRadius + extraRadiusForOutline,
bottomRightRadius + extraRadiusForOutline,
bottomRightRadius + extraRadiusForOutline,
bottomLeftRadius + extraRadiusForOutline,
bottomLeftRadius + extraRadiusForOutline
},
Path.Direction.CW);
mCenterDrawPath.addRoundRect(
mTempRectForCenterDrawPath,
new float[] {
Math.max(topLeftRadius - borderWidth.left * 0.5f,
(borderWidth.left > 0.0f) ? (topLeftRadius / borderWidth.left) : 0.0f),
Math.max(topLeftRadius - borderWidth.top * 0.5f,
(borderWidth.top > 0.0f) ? (topLeftRadius / borderWidth.top) : 0.0f),
Math.max(topRightRadius - borderWidth.right * 0.5f,
(borderWidth.right > 0.0f) ? (topRightRadius / borderWidth.right) : 0.0f),
Math.max(topRightRadius - borderWidth.top * 0.5f,
(borderWidth.top > 0.0f) ? (topRightRadius / borderWidth.top) : 0.0f),
Math.max(bottomRightRadius - borderWidth.right * 0.5f,
(borderWidth.right > 0.0f) ? (bottomRightRadius / borderWidth.right) : 0.0f),
Math.max(bottomRightRadius - borderWidth.bottom * 0.5f,
(borderWidth.bottom > 0.0f) ? (bottomRightRadius / borderWidth.bottom) : 0.0f),
Math.max(bottomLeftRadius - borderWidth.left * 0.5f,
(borderWidth.left > 0.0f) ? (bottomLeftRadius / borderWidth.left) : 0.0f),
Math.max(bottomLeftRadius - borderWidth.bottom * 0.5f,
(borderWidth.bottom > 0.0f) ? (bottomLeftRadius / borderWidth.bottom) : 0.0f)
},
Path.Direction.CW);
/**
* Rounded Multi-Colored Border Algorithm:
*
* <p>Let O (for outer) = (top, left, bottom, right) be the rectangle that represents the size
* and position of a view V. Since the box-sizing of all React Native views is border-box, any
* border of V will render inside O.
*
* <p>Let BorderWidth = (borderTop, borderLeft, borderBottom, borderRight).
*
* <p>Let I (for inner) = O - BorderWidth.
*
* <p>Then, remembering that O and I are rectangles and that I is inside O, O - I gives us the
* border of V. Therefore, we can use canvas.clipPath to draw V's border.
*
* <p>canvas.clipPath(O, Region.OP.INTERSECT);
*
* <p>canvas.clipPath(I, Region.OP.DIFFERENCE);
*
* <p>canvas.drawRect(O, paint);
*
* <p>This lets us draw non-rounded single-color borders.
*
* <p>To extend this algorithm to rounded single-color borders, we:
*
* <p>1. Curve the corners of O by the (border radii of V) using Path#addRoundRect.
*
* <p>2. Curve the corners of I by (border radii of V - border widths of V) using
* Path#addRoundRect.
*
* <p>Let O' = curve(O, border radii of V).
*
* <p>Let I' = curve(I, border radii of V - border widths of V)
*
* <p>The rationale behind this decision is the (first sentence of the) following section in the
* CSS Backgrounds and Borders Module Level 3:
* https://www.w3.org/TR/css3-background/#the-border-radius.
*
* <p>After both O and I have been curved, we can execute the following lines once again to
* render curved single-color borders:
*
* <p>canvas.clipPath(O, Region.OP.INTERSECT);
*
* <p>canvas.clipPath(I, Region.OP.DIFFERENCE);
*
* <p>canvas.drawRect(O, paint);
*
* <p>To extend this algorithm to rendering multi-colored rounded borders, we render each side
* of the border as its own quadrilateral. Suppose that we were handling the case where all the
* border radii are 0. Then, the four quadrilaterals would be:
*
* <p>Left: (O.left, O.top), (I.left, I.top), (I.left, I.bottom), (O.left, O.bottom)
*
* <p>Top: (O.left, O.top), (I.left, I.top), (I.right, I.top), (O.right, O.top)
*
* <p>Right: (O.right, O.top), (I.right, I.top), (I.right, I.bottom), (O.right, O.bottom)
*
* <p>Bottom: (O.right, O.bottom), (I.right, I.bottom), (I.left, I.bottom), (O.left, O.bottom)
*
* <p>Now, lets consider what happens when we render a rounded border (radii != 0). For the sake
* of simplicity, let's focus on the top edge of the Left border:
*
* <p>Let borderTopLeftRadius = 5. Let borderLeftWidth = 1. Let borderTopWidth = 2.
*
* <p>We know that O is curved by the ellipse E_O (a = 5, b = 5). We know that I is curved by
* the ellipse E_I (a = 5 - 1, b = 5 - 2).
*
* <p>Since we have clipping, it should be safe to set the top-left point of the Left
* quadrilateral's top edge to (O.left, O.top).
*
* <p>But, what should the top-right point be?
*
* <p>The fact that the border is curved shouldn't change the slope (nor the position) of the
* line connecting the top-left and top-right points of the Left quadrilateral's top edge.
* Therefore, The top-right point should lie somewhere on the line L = (1 - a) * (O.left, O.top)
* + a * (I.left, I.top).
*
* <p>a != 0, because then the top-left and top-right points would be the same and
* borderLeftWidth = 1. a != 1, because then the top-right point would not touch an edge of the
* ellipse E_I. We want the top-right point to touch an edge of the inner ellipse because the
* border curves with E_I on the top-left corner of V.
*
* <p>Therefore, it must be the case that a > 1. Two natural locations of the top-right point
* exist: 1. The first intersection of L with E_I. 2. The second intersection of L with E_I.
*
* <p>We choose the top-right point of the top edge of the Left quadrilateral to be an arbitrary
* intersection of L with E_I.
*/
if (mInnerTopLeftCorner == null) {
mInnerTopLeftCorner = new PointF();
}
/** Compute mInnerTopLeftCorner */
mInnerTopLeftCorner.x = mInnerClipTempRectForBorderRadius.left;
mInnerTopLeftCorner.y = mInnerClipTempRectForBorderRadius.top;
getEllipseIntersectionWithLine(
// Ellipse Bounds
mInnerClipTempRectForBorderRadius.left,
mInnerClipTempRectForBorderRadius.top,
mInnerClipTempRectForBorderRadius.left + 2 * innerTopLeftRadiusX,
mInnerClipTempRectForBorderRadius.top + 2 * innerTopLeftRadiusY,
// Line Start
mOuterClipTempRectForBorderRadius.left,
mOuterClipTempRectForBorderRadius.top,
// Line End
mInnerClipTempRectForBorderRadius.left,
mInnerClipTempRectForBorderRadius.top,
// Result
mInnerTopLeftCorner);
/** Compute mInnerBottomLeftCorner */
if (mInnerBottomLeftCorner == null) {
mInnerBottomLeftCorner = new PointF();
}
mInnerBottomLeftCorner.x = mInnerClipTempRectForBorderRadius.left;
mInnerBottomLeftCorner.y = mInnerClipTempRectForBorderRadius.bottom;
getEllipseIntersectionWithLine(
// Ellipse Bounds
mInnerClipTempRectForBorderRadius.left,
mInnerClipTempRectForBorderRadius.bottom - 2 * innerBottomLeftRadiusY,
mInnerClipTempRectForBorderRadius.left + 2 * innerBottomLeftRadiusX,
mInnerClipTempRectForBorderRadius.bottom,
// Line Start
mOuterClipTempRectForBorderRadius.left,
mOuterClipTempRectForBorderRadius.bottom,
// Line End
mInnerClipTempRectForBorderRadius.left,
mInnerClipTempRectForBorderRadius.bottom,
// Result
mInnerBottomLeftCorner);
/** Compute mInnerTopRightCorner */
if (mInnerTopRightCorner == null) {
mInnerTopRightCorner = new PointF();
}
mInnerTopRightCorner.x = mInnerClipTempRectForBorderRadius.right;
mInnerTopRightCorner.y = mInnerClipTempRectForBorderRadius.top;
getEllipseIntersectionWithLine(
// Ellipse Bounds
mInnerClipTempRectForBorderRadius.right - 2 * innerTopRightRadiusX,
mInnerClipTempRectForBorderRadius.top,
mInnerClipTempRectForBorderRadius.right,
mInnerClipTempRectForBorderRadius.top + 2 * innerTopRightRadiusY,
// Line Start
mOuterClipTempRectForBorderRadius.right,
mOuterClipTempRectForBorderRadius.top,
// Line End
mInnerClipTempRectForBorderRadius.right,
mInnerClipTempRectForBorderRadius.top,
// Result
mInnerTopRightCorner);
/** Compute mInnerBottomRightCorner */
if (mInnerBottomRightCorner == null) {
mInnerBottomRightCorner = new PointF();
}
mInnerBottomRightCorner.x = mInnerClipTempRectForBorderRadius.right;
mInnerBottomRightCorner.y = mInnerClipTempRectForBorderRadius.bottom;
getEllipseIntersectionWithLine(
// Ellipse Bounds
mInnerClipTempRectForBorderRadius.right - 2 * innerBottomRightRadiusX,
mInnerClipTempRectForBorderRadius.bottom - 2 * innerBottomRightRadiusY,
mInnerClipTempRectForBorderRadius.right,
mInnerClipTempRectForBorderRadius.bottom,
// Line Start
mOuterClipTempRectForBorderRadius.right,
mOuterClipTempRectForBorderRadius.bottom,
// Line End
mInnerClipTempRectForBorderRadius.right,
mInnerClipTempRectForBorderRadius.bottom,
// Result
mInnerBottomRightCorner);
}
private static void getEllipseIntersectionWithLine(
double ellipseBoundsLeft,
double ellipseBoundsTop,
double ellipseBoundsRight,
double ellipseBoundsBottom,
double lineStartX,
double lineStartY,
double lineEndX,
double lineEndY,
PointF result) {
final double ellipseCenterX = (ellipseBoundsLeft + ellipseBoundsRight) / 2;
final double ellipseCenterY = (ellipseBoundsTop + ellipseBoundsBottom) / 2;
/**
* Step 1:
*
* <p>Translate the line so that the ellipse is at the origin.
*
* <p>Why? It makes the math easier by changing the ellipse equation from ((x -
* ellipseCenterX)/a)^2 + ((y - ellipseCenterY)/b)^2 = 1 to (x/a)^2 + (y/b)^2 = 1.
*/
lineStartX -= ellipseCenterX;
lineStartY -= ellipseCenterY;
lineEndX -= ellipseCenterX;
lineEndY -= ellipseCenterY;
/**
* Step 2:
*
* <p>Ellipse equation: (x/a)^2 + (y/b)^2 = 1 Line equation: y = mx + c
*/
final double a = Math.abs(ellipseBoundsRight - ellipseBoundsLeft) / 2;
final double b = Math.abs(ellipseBoundsBottom - ellipseBoundsTop) / 2;
final double m = (lineEndY - lineStartY) / (lineEndX - lineStartX);
final double c = lineStartY - m * lineStartX; // Just a point on the line
/**
* Step 3:
*
* <p>Substitute the Line equation into the Ellipse equation. Solve for x. Eventually, you'll
* have to use the quadratic formula.
*
* <p>Quadratic formula: Ax^2 + Bx + C = 0
*/
final double A = (b * b + a * a * m * m);
final double B = 2 * a * a * c * m;
final double C = (a * a * (c * c - b * b));
/**
* Step 4:
*
* <p>Apply Quadratic formula. D = determinant / 2A
*/
final double D = Math.sqrt(-C / A + Math.pow(B / (2 * A), 2));
final double x2 = -B / (2 * A) - D;
final double y2 = m * x2 + c;
/**
* Step 5:
*
* <p>Undo the space transformation in Step 5.
*/
final double x = x2 + ellipseCenterX;
final double y = y2 + ellipseCenterY;
if (!Double.isNaN(x) && !Double.isNaN(y)) {
result.x = (float) x;
result.y = (float) y;
}
}
public float getBorderWidthOrDefaultTo(final float defaultValue, final int spacingType) {
if (mBorderWidth == null) {
return defaultValue;
}
final float width = mBorderWidth.getRaw(spacingType);
if (YogaConstants.isUndefined(width)) {
return defaultValue;
}
return width;
}
/** Set type of border */
private void updatePathEffect() {
mPathEffectForBorderStyle =
mBorderStyle != null ? BorderStyle.getPathEffect(mBorderStyle, getFullBorderWidth()) : null;
mPaint.setPathEffect(mPathEffectForBorderStyle);
}
/** For rounded borders we use default "borderWidth" property. */
public float getFullBorderWidth() {
return (mBorderWidth != null && !YogaConstants.isUndefined(mBorderWidth.getRaw(Spacing.ALL)))
? mBorderWidth.getRaw(Spacing.ALL)
: 0f;
}
/**
* Quickly determine if all the set border colors are equal. Bitwise AND all the set colors
* together, then OR them all together. If the AND and the OR are the same, then the colors are
* compatible, so return this color.
*
* <p>Used to avoid expensive path creation and expensive calls to canvas.drawPath
*
* @return A compatible border color, or zero if the border colors are not compatible.
*/
private static int fastBorderCompatibleColorOrZero(
int borderLeft,
int borderTop,
int borderRight,
int borderBottom,
int colorLeft,
int colorTop,
int colorRight,
int colorBottom) {
int andSmear =
(borderLeft > 0 ? colorLeft : ALL_BITS_SET)
& (borderTop > 0 ? colorTop : ALL_BITS_SET)
& (borderRight > 0 ? colorRight : ALL_BITS_SET)
& (borderBottom > 0 ? colorBottom : ALL_BITS_SET);
int orSmear =
(borderLeft > 0 ? colorLeft : ALL_BITS_UNSET)
| (borderTop > 0 ? colorTop : ALL_BITS_UNSET)
| (borderRight > 0 ? colorRight : ALL_BITS_UNSET)
| (borderBottom > 0 ? colorBottom : ALL_BITS_UNSET);
return andSmear == orSmear ? andSmear : 0;
}
private void drawRectangularBackgroundWithBorders(Canvas canvas) {
mPaint.setStyle(Paint.Style.FILL);
int useColor = ColorUtil.multiplyColorAlpha(mColor, mAlpha);
if (Color.alpha(useColor) != 0) { // color is not transparent
mPaint.setColor(useColor);
canvas.drawRect(getBounds(), mPaint);
}
final RectF borderWidth = getDirectionAwareBorderInsets();
final int borderLeft = Math.round(borderWidth.left);
final int borderTop = Math.round(borderWidth.top);
final int borderRight = Math.round(borderWidth.right);
final int borderBottom = Math.round(borderWidth.bottom);
// maybe draw borders?
if (borderLeft > 0 || borderRight > 0 || borderTop > 0 || borderBottom > 0) {
Rect bounds = getBounds();
int colorLeft = getBorderColor(Spacing.LEFT);
int colorTop = getBorderColor(Spacing.TOP);
int colorRight = getBorderColor(Spacing.RIGHT);
int colorBottom = getBorderColor(Spacing.BOTTOM);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
final boolean isRTL = getResolvedLayoutDirection() == View.LAYOUT_DIRECTION_RTL;
int colorStart = getBorderColor(Spacing.START);
int colorEnd = getBorderColor(Spacing.END);
if (I18nUtil.getInstance().doLeftAndRightSwapInRTL(mContext)) {
if (!isBorderColorDefined(Spacing.START)) {
colorStart = colorLeft;
}
if (!isBorderColorDefined(Spacing.END)) {
colorEnd = colorRight;
}
final int directionAwareColorLeft = isRTL ? colorEnd : colorStart;
final int directionAwareColorRight = isRTL ? colorStart : colorEnd;
colorLeft = directionAwareColorLeft;
colorRight = directionAwareColorRight;
} else {
final int directionAwareColorLeft = isRTL ? colorEnd : colorStart;
final int directionAwareColorRight = isRTL ? colorStart : colorEnd;
final boolean isColorStartDefined = isBorderColorDefined(Spacing.START);
final boolean isColorEndDefined = isBorderColorDefined(Spacing.END);
final boolean isDirectionAwareColorLeftDefined =
isRTL ? isColorEndDefined : isColorStartDefined;
final boolean isDirectionAwareColorRightDefined =
isRTL ? isColorStartDefined : isColorEndDefined;
if (isDirectionAwareColorLeftDefined) {
colorLeft = directionAwareColorLeft;
}
if (isDirectionAwareColorRightDefined) {
colorRight = directionAwareColorRight;
}
}
}
int left = bounds.left;
int top = bounds.top;
// Check for fast path to border drawing.
int fastBorderColor =
fastBorderCompatibleColorOrZero(
borderLeft,
borderTop,
borderRight,
borderBottom,
colorLeft,
colorTop,
colorRight,
colorBottom);
if (fastBorderColor != 0) {
if (Color.alpha(fastBorderColor) != 0) {
// Border color is not transparent.
int right = bounds.right;
int bottom = bounds.bottom;
mPaint.setColor(fastBorderColor);
if (borderLeft > 0) {
int leftInset = left + borderLeft;
canvas.drawRect(left, top, leftInset, bottom - borderBottom, mPaint);
}
if (borderTop > 0) {
int topInset = top + borderTop;
canvas.drawRect(left + borderLeft, top, right, topInset, mPaint);
}
if (borderRight > 0) {
int rightInset = right - borderRight;
canvas.drawRect(rightInset, top + borderTop, right, bottom, mPaint);
}
if (borderBottom > 0) {
int bottomInset = bottom - borderBottom;
canvas.drawRect(left, bottomInset, right - borderRight, bottom, mPaint);
}
}
} else {
// If the path drawn previously is of the same color,
// there would be a slight white space between borders
// with anti-alias set to true.
// Therefore we need to disable anti-alias, and
// after drawing is done, we will re-enable it.
mPaint.setAntiAlias(false);
int width = bounds.width();
int height = bounds.height();
if (borderLeft > 0) {
final float x1 = left;
final float y1 = top;
final float x2 = left + borderLeft;
final float y2 = top + borderTop;
final float x3 = left + borderLeft;
final float y3 = top + height - borderBottom;
final float x4 = left;
final float y4 = top + height;
drawQuadrilateral(canvas, colorLeft, x1, y1, x2, y2, x3, y3, x4, y4);
}
if (borderTop > 0) {
final float x1 = left;
final float y1 = top;
final float x2 = left + borderLeft;
final float y2 = top + borderTop;
final float x3 = left + width - borderRight;
final float y3 = top + borderTop;
final float x4 = left + width;
final float y4 = top;
drawQuadrilateral(canvas, colorTop, x1, y1, x2, y2, x3, y3, x4, y4);
}
if (borderRight > 0) {
final float x1 = left + width;
final float y1 = top;
final float x2 = left + width;
final float y2 = top + height;
final float x3 = left + width - borderRight;
final float y3 = top + height - borderBottom;
final float x4 = left + width - borderRight;
final float y4 = top + borderTop;
drawQuadrilateral(canvas, colorRight, x1, y1, x2, y2, x3, y3, x4, y4);
}
if (borderBottom > 0) {
final float x1 = left;
final float y1 = top + height;
final float x2 = left + width;
final float y2 = top + height;
final float x3 = left + width - borderRight;
final float y3 = top + height - borderBottom;
final float x4 = left + borderLeft;
final float y4 = top + height - borderBottom;
drawQuadrilateral(canvas, colorBottom, x1, y1, x2, y2, x3, y3, x4, y4);
}
// re-enable anti alias
mPaint.setAntiAlias(true);
}
}
}
private void drawQuadrilateral(
Canvas canvas,
int fillColor,
float x1,
float y1,
float x2,
float y2,
float x3,
float y3,
float x4,
float y4) {
if (fillColor == Color.TRANSPARENT) {
return;
}
if (mPathForBorder == null) {
mPathForBorder = new Path();
}
mPaint.setColor(fillColor);
mPathForBorder.reset();
mPathForBorder.moveTo(x1, y1);
mPathForBorder.lineTo(x2, y2);
mPathForBorder.lineTo(x3, y3);
mPathForBorder.lineTo(x4, y4);
mPathForBorder.lineTo(x1, y1);
canvas.drawPath(mPathForBorder, mPaint);
}
private int getBorderWidth(int position) {
if (mBorderWidth == null) {
return 0;
}
final float width = mBorderWidth.get(position);
return YogaConstants.isUndefined(width) ? -1 : Math.round(width);
}
private static int colorFromAlphaAndRGBComponents(float alpha, float rgb) {
int rgbComponent = 0x00FFFFFF & (int) rgb;
int alphaComponent = 0xFF000000 & ((int) alpha) << 24;
return rgbComponent | alphaComponent;
}
private boolean isBorderColorDefined(int position) {
final float rgb = mBorderRGB != null ? mBorderRGB.get(position) : YogaConstants.UNDEFINED;
final float alpha = mBorderAlpha != null ? mBorderAlpha.get(position) : YogaConstants.UNDEFINED;
return !YogaConstants.isUndefined(rgb) && !YogaConstants.isUndefined(alpha);
}
private int getBorderColor(int position) {
float rgb = mBorderRGB != null ? mBorderRGB.get(position) : DEFAULT_BORDER_RGB;
float alpha = mBorderAlpha != null ? mBorderAlpha.get(position) : DEFAULT_BORDER_ALPHA;
return ReactViewBackgroundDrawable.colorFromAlphaAndRGBComponents(alpha, rgb);
}
public RectF getDirectionAwareBorderInsets() {
final float borderWidth = getBorderWidthOrDefaultTo(0, Spacing.ALL);
final float borderTopWidth = getBorderWidthOrDefaultTo(borderWidth, Spacing.TOP);
final float borderBottomWidth = getBorderWidthOrDefaultTo(borderWidth, Spacing.BOTTOM);
float borderLeftWidth = getBorderWidthOrDefaultTo(borderWidth, Spacing.LEFT);
float borderRightWidth = getBorderWidthOrDefaultTo(borderWidth, Spacing.RIGHT);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1 && mBorderWidth != null) {
final boolean isRTL = getResolvedLayoutDirection() == View.LAYOUT_DIRECTION_RTL;
float borderStartWidth = mBorderWidth.getRaw(Spacing.START);
float borderEndWidth = mBorderWidth.getRaw(Spacing.END);
if (I18nUtil.getInstance().doLeftAndRightSwapInRTL(mContext)) {
if (YogaConstants.isUndefined(borderStartWidth)) {
borderStartWidth = borderLeftWidth;
}
if (YogaConstants.isUndefined(borderEndWidth)) {
borderEndWidth = borderRightWidth;
}
final float directionAwareBorderLeftWidth = isRTL ? borderEndWidth : borderStartWidth;
final float directionAwareBorderRightWidth = isRTL ? borderStartWidth : borderEndWidth;
borderLeftWidth = directionAwareBorderLeftWidth;
borderRightWidth = directionAwareBorderRightWidth;
} else {
final float directionAwareBorderLeftWidth = isRTL ? borderEndWidth : borderStartWidth;
final float directionAwareBorderRightWidth = isRTL ? borderStartWidth : borderEndWidth;
if (!YogaConstants.isUndefined(directionAwareBorderLeftWidth)) {
borderLeftWidth = directionAwareBorderLeftWidth;
}
if (!YogaConstants.isUndefined(directionAwareBorderRightWidth)) {
borderRightWidth = directionAwareBorderRightWidth;
}
}
}
return new RectF(borderLeftWidth, borderTopWidth, borderRightWidth, borderBottomWidth);
}
}
| |
/*
* Copyright [2013] [Cloud4SOA, www.cloud4soa.eu]
*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright 2009-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.client.lib.util;
import org.cloudfoundry.client.lib.domain.CloudApplication;
import org.cloudfoundry.client.lib.domain.CloudEntity;
import org.cloudfoundry.client.lib.domain.CloudOrganization;
import org.cloudfoundry.client.lib.domain.CloudService;
import org.cloudfoundry.client.lib.domain.CloudServiceOffering;
import org.cloudfoundry.client.lib.domain.CloudServicePlan;
import org.cloudfoundry.client.lib.domain.CloudSpace;
import org.cloudfoundry.client.lib.domain.Staging;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* Class handling the mapping of the cloud domain objects
*
* @author Thomas Risberg
*/
//TODO: use some more advanced JSON mapping framework?
public class CloudEntityResourceMapper {
private static SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z");
@SuppressWarnings("unchecked")
public String getNameOfResource(Map<String, Object> resource) {
return getEntityAttribute(resource, "name", String.class);
}
@SuppressWarnings("unchecked")
public UUID getGuidOfResource(Map<String, Object> resource) {
return getMeta(resource).getGuid();
}
@SuppressWarnings("unchecked")
public <T> T mapResource(Map<String, Object> resource, Class<T> targetClass) {
if (targetClass == CloudSpace.class) {
return (T) mapSpaceResource(resource);
}
if (targetClass == CloudApplication.class) {
return (T) mapApplicationResource(resource);
}
if (targetClass == CloudService.class) {
return (T) mapServiceInstanceResource(resource);
}
if (targetClass == CloudServiceOffering.class) {
return (T) mapServiceResource(resource);
}
throw new IllegalArgumentException(
"Error during mapping - unsupported class for entity mapping " + targetClass.getName());
}
private CloudSpace mapSpaceResource(Map<String, Object> resource) {
Map<String, Object> organizationMap = getEmbeddedResource(resource, "organization");
CloudOrganization organization = null;
if (organizationMap != null) {
organization = mapOrganizationResource(organizationMap);
}
CloudSpace space =
new CloudSpace(getMeta(resource), getNameOfResource(resource), organization);
return space;
}
private CloudOrganization mapOrganizationResource(Map<String, Object> resource) {
CloudOrganization org = new CloudOrganization(getMeta(resource), getNameOfResource(resource));
return org;
}
@SuppressWarnings("unchecked")
private CloudApplication mapApplicationResource(Map<String, Object> resource) {
CloudApplication app = new CloudApplication(
getMeta(resource),
getNameOfResource(resource));
app.setInstances(getEntityAttribute(resource, "instances", Integer.class));
app.setServices(new ArrayList<String>());
app.setState(CloudApplication.AppState.valueOf(getEntityAttribute(resource, "state", String.class)));
//TODO: debug
Integer runningInstancesAttribute = getEntityAttribute(resource, "running_instances", Integer.class);
if (runningInstancesAttribute != null) {
app.setRunningInstances(runningInstancesAttribute);
}
app.setDebug(null);
String runtime = null;
Map<String, Object> runtimeResource = getEmbeddedResource(resource, "runtime");
if (runtimeResource != null) {
runtime = getEntityAttribute(runtimeResource, "name", String.class);
}
String framework = null;
Map<String, Object> frameworkResource = getEmbeddedResource(resource, "framework");
if (frameworkResource != null) {
framework = getEntityAttribute(frameworkResource, "name", String.class);
}
Staging staging = new Staging(runtime, framework);
String command = getEntityAttribute(resource, "command", String.class);
if (command != null) {
staging.setCommand(command);
}
app.setStaging(staging);
Map envMap = getEntityAttribute(resource, "environment_json", Map.class);
if (envMap.size() > 0) {
app.setEnv(envMap);
}
Map<String, Integer> resources = app.getResources();
resources.put("memory", getEntityAttribute(resource, "memory", Integer.class));
resources.put("file_descriptors", getEntityAttribute(resource, "file_descriptors", Integer.class));
resources.put("disk_quota", getEntityAttribute(resource, "disk_quota", Integer.class));
// add v1 resources
resources.put("fds", getEntityAttribute(resource, "file_descriptors", Integer.class));
resources.put("disk", getEntityAttribute(resource, "disk_quota", Integer.class));
app.setResources(resources);
List<Map<String, Object>> serviceBindings = getEntityAttribute(resource, "service_bindings", List.class);
List<String> serviceList = new ArrayList<String>();
for (Map<String, Object> binding : serviceBindings) {
Map<String, Object> service = getEntityAttribute(binding, "service_instance", Map.class);
serviceList.add(getNameOfResource(service));
}
app.setServices(serviceList);
return app;
}
private CloudService mapServiceInstanceResource(Map<String, Object> resource) {
CloudService cloudService = new CloudService(
getMeta(resource),
getNameOfResource(resource));
Map<String, Object> servicePlanResource = getEmbeddedResource(resource, "service_plan");
Map<String, Object> serviceResource = null;
if (servicePlanResource != null) {
serviceResource = getEmbeddedResource(servicePlanResource, "service");
}
if (servicePlanResource != null && serviceResource != null) {
//TODO: assuming vendor corresponds to the service.provider and not service_instance.vendor_data
cloudService.setLabel(getEntityAttribute(serviceResource, "label", String.class));
cloudService.setProvider(getEntityAttribute(serviceResource, "provider", String.class));
cloudService.setVersion(getEntityAttribute(serviceResource, "version", String.class));
}
if (servicePlanResource != null) {
cloudService.setPlan(getEntityAttribute(servicePlanResource, "name", String.class));
}
return cloudService;
}
private CloudServiceOffering mapServiceResource(Map<String, Object> resource) {
CloudServiceOffering cloudServiceOffering = new CloudServiceOffering(
getMeta(resource),
getEntityAttribute(resource, "label", String.class),
getEntityAttribute(resource, "provider", String.class),
getEntityAttribute(resource, "version", String.class));
cloudServiceOffering.setDescription(getEntityAttribute(resource, "description", String.class));
List<Map<String, Object>> servicePlanList = getEmbeddedResourceList(getEntity(resource), "service_plans");
if (servicePlanList != null) {
for (Map<String, Object> servicePlanResource : servicePlanList) {
CloudServicePlan servicePlan =
new CloudServicePlan(
getMeta(servicePlanResource),
getEntityAttribute(servicePlanResource, "name", String.class),
cloudServiceOffering);
cloudServiceOffering.addCloudServicePlan(servicePlan);
}
}
return cloudServiceOffering;
}
@SuppressWarnings("unchecked")
public static CloudEntity.Meta getMeta(Map<String, Object> resource) {
Map<String, Object> metadata = (Map<String, Object>) resource.get("metadata");
UUID guid = UUID.fromString(String.valueOf(metadata.get("guid")));
Date createdDate = null;
String created = String.valueOf(metadata.get("created_at"));
if (created != null) {
try {
createdDate = dateFormatter.parse(created);
} catch (Exception ignore) {}
}
Date updatedDate = null;
String updated = String.valueOf(metadata.get("updated_at"));
if (updated != null) {
try {
updatedDate = dateFormatter.parse(updated);
} catch (Exception ignore) {}
}
CloudEntity.Meta meta = new CloudEntity.Meta(guid, createdDate, updatedDate);
return meta;
}
@SuppressWarnings("unchecked")
public static Map<String, Object> getEntity(Map<String, Object> resource) {
return (Map<String, Object>) resource.get("entity");
}
@SuppressWarnings("unchecked")
public static <T> T getEntityAttribute(Map<String, Object> resource, String attributeName, Class<T> targetClass) {
if (resource == null) {
return null;
}
Map<String, Object> entity = (Map<String, Object>) resource.get("entity");
if (targetClass == String.class) {
return (T) String.valueOf(entity.get(attributeName));
}
if (targetClass == Integer.class) {
return (T) entity.get(attributeName);
}
if (targetClass == Map.class) {
return (T) entity.get(attributeName);
}
if (targetClass == List.class) {
return (T) entity.get(attributeName);
}
throw new IllegalArgumentException(
"Error during mapping - unsupported class for attribute mapping " + targetClass.getName());
}
@SuppressWarnings("unchecked")
public static Map<String, Object> getEmbeddedResource(Map<String, Object> resource, String embeddedResourceName) {
Map<String, Object> entity = (Map<String, Object>) resource.get("entity");
return (Map<String, Object>) entity.get(embeddedResourceName);
}
@SuppressWarnings("unchecked")
public static List<Map<String, Object>> getEmbeddedResourceList(Map<String, Object> resource, String embeddedResourceName) {
return (List<Map<String, Object>>) resource.get(embeddedResourceName);
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Properties;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.MaskingLibrary.CompiledZapperMask;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2012-2016 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Prop_InstantDeath extends Property
{
@Override public String ID(){return "Prop_InstantDeath";}
@Override public long flags() { return super.flags()|Ability.FLAG_POTENTIALLY_DEADLY; }
@Override protected int canAffectCode(){return Ability.CAN_ROOMS|Ability.CAN_AREAS|Ability.CAN_ITEMS|Ability.CAN_MOBS;}
protected CompiledZapperMask mask=null;
protected volatile boolean[] killTrigger={false};
public Prop_InstantDeath()
{
super();
}
@Override
public void setMiscText(String newMiscText)
{
super.setMiscText(newMiscText);
final String maskStr=CMParms.getParmStr(newMiscText,"mask","");
mask=null;
if((maskStr!=null)&&(maskStr.trim().length()>0))
mask=CMLib.masking().getPreCompiledMask(maskStr);
}
@Override
public String accountForYourself()
{
return "instant killing";
}
public Set<MOB> getEveryoneHere(MOB spareMe, Room R)
{
final Set<MOB> V=new HashSet<MOB>();
if(R==null)
return V;
for(int i=0;i<R.numInhabitants();i++)
{
final MOB M=R.fetchInhabitant(i);
if((spareMe!=null)&&(spareMe==M))
continue;
if((M!=null)
&&(!CMSecurity.isAllowed(M,R,CMSecurity.SecFlag.IMMORT))
&&((mask==null)||(CMLib.masking().maskCheck(mask, M, false))))
V.add(M);
}
return V;
//CMLib.combat().postDeath(null,M,null);
}
protected MOB getTickersMOB(Tickable ticking)
{
if(ticking==null)
return null;
if(ticking instanceof MOB)
return (MOB)ticking;
else
if(ticking instanceof Item)
if(((Item)ticking).owner() != null)
if(((Item)ticking).owner() instanceof MOB)
return (MOB)((Item)ticking).owner();
return null;
}
protected Room getTickersRoom(Tickable ticking)
{
if(ticking==null)
return null;
if(ticking instanceof Room)
return (Room)ticking;
final MOB mob=getTickersMOB(ticking);
if(mob!=null)
return mob.location();
if(ticking instanceof Item)
if(((Item)ticking).owner() != null)
if(((Item)ticking).owner() instanceof Room)
return (Room)((Item)ticking).owner();
return null;
}
public Set<MOB> getDeadMOBsFrom(Environmental whoE)
{
if(whoE instanceof MOB)
{
final MOB mob=(MOB)whoE;
final Room room=mob.location();
if(room!=null)
return getEveryoneHere(mob,room);
}
else
if(whoE instanceof Item)
{
final Item item=(Item)whoE;
final Environmental E=item.owner();
if(E!=null)
{
final Room room=getTickersRoom(whoE);
if(room!=null)
{
if((E instanceof MOB)&&((mask==null)||(CMLib.masking().maskCheck(mask, E, false))))
return new XHashSet<MOB>((MOB)E);
else
if(E instanceof Room)
return getEveryoneHere(null,(Room)E);
room.recoverRoomStats();
}
}
}
else
if(whoE instanceof Room)
return getEveryoneHere(null,(Room)whoE);
else
if(whoE instanceof Area)
{
final Set<MOB> allMobs=new HashSet<MOB>();
for(final Enumeration<Room> r=((Area)whoE).getMetroMap();r.hasMoreElements();)
{
final Room R=r.nextElement();
allMobs.addAll(getEveryoneHere(null,R));
}
}
return new HashSet<MOB>();
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
if(tickID!=Tickable.TICKID_MISCELLANEOUS)
return super.tick(ticking, tickID);
while(killTrigger[0])
{
final LinkedList<MOB> killThese=new LinkedList<MOB>();
synchronized(killTrigger)
{
killThese.addAll(getDeadMOBsFrom(affected));
killTrigger[0]=false;
}
for(final MOB M : killThese)
{
CMLib.combat().postDeath(null, M, null);
}
}
return false;
}
@Override
public void executeMsg(Environmental affecting, CMMsg msg)
{
super.executeMsg(affecting,msg);
if(msg.amITarget(affecting))
{
boolean activated=false;
if(affecting instanceof MOB)
{
if((msg.targetMajor(CMMsg.MASK_MALICIOUS))
&&(!msg.source().isMonster()))
activated=true;
}
else
if((affecting instanceof Food)
||(affecting instanceof Drink))
{
if((msg.targetMinor()==CMMsg.TYP_EAT)
||(msg.targetMinor()==CMMsg.TYP_DRINK))
activated=true;
}
else
if((affecting instanceof Armor)
||(affecting instanceof Weapon))
{
if((msg.targetMinor()==CMMsg.TYP_WEAR)
||(msg.targetMinor()==CMMsg.TYP_HOLD)
||(msg.targetMinor()==CMMsg.TYP_WIELD))
activated=true;
}
else
if(affecting instanceof Item)
{
if((msg.targetMinor()==CMMsg.TYP_GET)||(msg.targetMinor()==CMMsg.TYP_PUSH)||(msg.targetMinor()==CMMsg.TYP_PULL))
activated=true;
}
else
activated=true;
if(activated)
{
synchronized(killTrigger)
{
killTrigger[0]=true;
if(!CMLib.threads().isTicking(this, Tickable.TICKID_MISCELLANEOUS))
CMLib.threads().startTickDown(this, Tickable.TICKID_MISCELLANEOUS, 500,1);
}
}
}
}
}
| |
package org.cryptoworkshop.ximix.node.crypto.signature;
import java.math.BigInteger;
import java.util.concurrent.TimeUnit;
import it.unisa.dia.gas.jpbc.Element;
import it.unisa.dia.gas.jpbc.Pairing;
import org.bouncycastle.asn1.ASN1Encodable;
import org.bouncycastle.math.ec.ECCurve;
import org.bouncycastle.math.ec.ECPoint;
import org.cryptoworkshop.ximix.client.connection.ServiceConnectionException;
import org.cryptoworkshop.ximix.client.connection.signing.Participant;
import org.cryptoworkshop.ximix.common.asn1.message.AlgorithmServiceMessage;
import org.cryptoworkshop.ximix.common.asn1.message.BigIntegerMessage;
import org.cryptoworkshop.ximix.common.asn1.message.CommandMessage;
import org.cryptoworkshop.ximix.common.asn1.message.ECPointMessage;
import org.cryptoworkshop.ximix.common.asn1.message.ElementMessage;
import org.cryptoworkshop.ximix.common.asn1.message.MessageReply;
import org.cryptoworkshop.ximix.common.asn1.message.ShareMessage;
import org.cryptoworkshop.ximix.common.asn1.message.SignatureMessage;
import org.cryptoworkshop.ximix.common.crypto.Algorithm;
import org.cryptoworkshop.ximix.common.crypto.threshold.LagrangeWeightCalculator;
import org.cryptoworkshop.ximix.node.service.NodeContext;
/**
* Base class for engine classes that generate threshold signatures.
*/
public abstract class SignerEngine
{
protected final Algorithm algorithm;
protected final NodeContext nodeContext;
/**
* Base constrcutor.
*
* @param algorithm the algorithm this engine is associated with.
* @param nodeContext the context for the node this engine is associated with.
*/
protected SignerEngine(Algorithm algorithm, NodeContext nodeContext)
{
this.algorithm = algorithm;
this.nodeContext = nodeContext;
}
public Algorithm getAlgorithm()
{
return algorithm;
}
protected MessageReply sendMessage(String node, Enum type, ASN1Encodable message)
throws ServiceConnectionException
{
if (node.equals(nodeContext.getName()))
{
return handle(new SignatureMessage(algorithm, type, message));
}
else
{
return nodeContext.getPeerMap().get(node).sendMessage(CommandMessage.Type.SIGNATURE_MESSAGE, new AlgorithmServiceMessage(getAlgorithm(), new SignatureMessage(algorithm, type, message)));
}
}
protected MessageReply replyOkay(ASN1Encodable payload)
{
return new MessageReply(MessageReply.Type.OKAY, payload);
}
protected BigInteger accumulateBigInteger(Participant[] nodes, Enum fetchOperatorType, ASN1Encodable request, BigInteger fieldSize)
throws ServiceConnectionException
{
ASN1Encodable[] valueShares = getShareData(nodes, fetchOperatorType, request);
//
// we don't need to know how many peers, just the maximum index (max(sequenceNo) + 1) of the one available
//
LagrangeWeightCalculator calculator = new LagrangeWeightCalculator(valueShares.length, fieldSize);
BigInteger[] weights = calculator.computeWeights(valueShares);
int baseIndex = getBaseIndex(valueShares);
BigInteger baseValue = BigIntegerMessage.getInstance(valueShares[baseIndex]).getValue();
BigInteger baseWeight = weights[baseIndex];
// weighting
BigInteger value = baseValue.multiply(baseWeight);
for (int i = baseIndex + 1; i < weights.length; i++)
{
if (valueShares[i] != null)
{
value = value.add(BigIntegerMessage.getInstance(valueShares[i]).getValue().multiply(weights[i])).mod(fieldSize);
}
}
return value;
}
protected ECPoint accumulateECPoint(Participant[] nodes, Enum fetchOperatorType, ASN1Encodable request, ECCurve curve, BigInteger fieldSize)
throws ServiceConnectionException
{
ASN1Encodable[] valueShares = getShareData(nodes, fetchOperatorType, request);
//
// we don't need to know how many peers, just the maximum index (max(sequenceNo) + 1) of the one available
//
LagrangeWeightCalculator calculator = new LagrangeWeightCalculator(valueShares.length, fieldSize);
BigInteger[] weights = calculator.computeWeights(valueShares);
int baseIndex = getBaseIndex(valueShares);
ECPoint baseValue = ECPointMessage.getInstance(curve, valueShares[baseIndex]).getPoint();
BigInteger baseWeight = weights[baseIndex];
// weighting
ECPoint value = baseValue.multiply(baseWeight);
for (int i = baseIndex + 1; i < weights.length; i++)
{
if (valueShares[i] != null)
{
value = value.add(ECPointMessage.getInstance(curve, valueShares[i]).getPoint().multiply(weights[i]));
}
}
return value.normalize();
}
protected Element accumulateElement(Participant[] nodes, Enum fetchOperatorType, ASN1Encodable request, Pairing pairing, BigInteger fieldSize)
throws ServiceConnectionException
{
ASN1Encodable[] valueShares = getShareData(nodes, fetchOperatorType, request);
//
// we don't need to know how many peers, just the maximum index (max(sequenceNo) + 1) of the one available
//
LagrangeWeightCalculator calculator = new LagrangeWeightCalculator(valueShares.length, fieldSize);
BigInteger[] weights = calculator.computeWeights(valueShares);
int baseIndex = getBaseIndex(valueShares);
Element baseValue = ElementMessage.getInstance(pairing, valueShares[baseIndex]).getValue();
BigInteger baseWeight = weights[baseIndex];
// weighting
Element value = baseValue.powZn(pairing.getZr().newElement(baseWeight));
for (int i = baseIndex + 1; i < weights.length; i++)
{
if (valueShares[i] != null)
{
value = value.mul(ElementMessage.getInstance(pairing, valueShares[i]).getValue().powZn(pairing.getZr().newElement(weights[i])));
}
}
return value;
}
void execute(Runnable task)
{
nodeContext.execute(task);
}
void schedule(Runnable task, long time, TimeUnit timeUnit)
{
nodeContext.schedule(task, time, timeUnit);
}
/**
* Find the first non-null element in a share array.
*
* @param valueShares share array to examine
* @return the index of the first non-null element.
*/
private int getBaseIndex(ASN1Encodable[] valueShares)
{
int baseIndex = 0;
for (int i = 0; i != valueShares.length; i++)
{
if (valueShares[i] != null)
{
baseIndex = i;
break;
}
}
return baseIndex;
}
/**
* Return a properly distributed list of shares with null values occupying any gaps.
*
* @throws ServiceConnectionException
*/
private ASN1Encodable[] getShareData(Participant[] nodes, Enum fetchOperatorType, ASN1Encodable request)
throws ServiceConnectionException
{
MessageReply[] replys = new MessageReply[nodes.length];
// TODO: deal with drop outs
int count = 0;
while (count != nodes.length)
{
replys[count] = sendMessage(nodes[count].getName(), fetchOperatorType, request);
if (replys[count].getType() != MessageReply.Type.OKAY)
{
// TODO: maybe log
replys[count] = null;
}
count++;
}
ShareMessage[] shareMessages = new ShareMessage[nodes.length];
int maxSequenceNo = 0;
for (int i = 0; i != shareMessages.length; i++)
{
shareMessages[i] = ShareMessage.getInstance(replys[i].getPayload());
if (maxSequenceNo < shareMessages[i].getSequenceNo())
{
maxSequenceNo = shareMessages[i].getSequenceNo();
}
}
ASN1Encodable[] valueShares = new ASN1Encodable[maxSequenceNo + 1];
for (int i = 0; i != shareMessages.length; i++)
{
ShareMessage shareMsg = shareMessages[i];
valueShares[shareMsg.getSequenceNo()] = shareMsg.getShareData();
}
return valueShares;
}
public abstract MessageReply handle(SignatureMessage signatureMessage);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.