gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.formatter.java; import com.intellij.formatting.Spacing; import com.intellij.lang.ASTNode; import com.intellij.lang.StdLanguages; import com.intellij.lexer.JavaLexer; import com.intellij.lexer.Lexer; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.formatter.FormatterUtil; import com.intellij.psi.impl.source.SourceTreeToPsiMap; import com.intellij.psi.impl.source.codeStyle.ImportHelper; import com.intellij.psi.impl.source.javadoc.PsiDocMethodOrFieldRef; import com.intellij.psi.impl.source.jsp.jspJava.JspClassLevelDeclarationStatement; import com.intellij.psi.impl.source.jsp.jspJava.JspCodeBlock; import com.intellij.psi.impl.source.jsp.jspJava.JspJavaComment; import com.intellij.psi.impl.source.tree.*; import com.intellij.psi.javadoc.PsiDocTag; import com.intellij.psi.tree.ChildRoleBase; import com.intellij.psi.tree.IElementType; import com.intellij.psi.tree.TokenSet; import com.intellij.psi.tree.java.IJavaElementType; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.containers.ConcurrentHashMap; import org.jetbrains.annotations.Nullable; import java.util.Map; public class JavaSpacePropertyProcessor extends JavaElementVisitor { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.formatter.java.JavaSpacePropertyProcessor"); private PsiElement myParent; private int myRole1; private int myRole2; private CodeStyleSettings mySettings; private Spacing myResult; private ASTNode myChild1; private ASTNode myChild2; private IElementType myType1; private IElementType myType2; private ImportHelper myImportHelper; private static final ThreadLocal<JavaSpacePropertyProcessor> mySharedProcessorAllocator = new ThreadLocal<JavaSpacePropertyProcessor>(); private void doInit(final ASTNode child, final CodeStyleSettings settings) { init(child); mySettings = settings; if (myChild1.getPsi().getLanguage() != StdLanguages.JAVA || myChild2.getPsi().getLanguage() != StdLanguages.JAVA) { return; } if (myChild2 != null && StdTokenSets.COMMENT_BIT_SET.contains(myChild2.getElementType())) { if (mySettings.KEEP_FIRST_COLUMN_COMMENT) { myResult = Spacing .createKeepingFirstColumnSpacing(0, Integer.MAX_VALUE, true, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = Spacing.createSpacing(0, Integer.MAX_VALUE, 0, true, mySettings.KEEP_BLANK_LINES_IN_CODE); } } else { if (myParent != null) { myParent.accept(this); if (myResult == null) { final ASTNode prev = getPrevElementType(myChild2); if (prev != null && prev.getElementType() == JavaTokenType.END_OF_LINE_COMMENT) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (!canStickChildrenTogether(myChild1, myChild2)) { myResult = Spacing .createSpacing(1, Integer.MIN_VALUE, 0, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (myChild1.getElementType() == JavaTokenType.C_STYLE_COMMENT){ myResult = null; } else if (!shouldKeepSpace(myParent)){ myResult = Spacing.createSpacing(0, 0, 0, true, mySettings.KEEP_BLANK_LINES_IN_CODE); } } } } } private void clear() { myResult = null; myChild2 = myChild1 = null; myParent = null; myImportHelper = null; myRole1 = myRole2 = -1; myType1 = myType2 = null; } private static boolean shouldKeepSpace(final PsiElement parent) { ASTNode node = parent.getNode(); if (node == null) { return true; } final IElementType type = node.getElementType(); if (type == JavaDocTokenType.DOC_TAG_VALUE_TOKEN) { return PsiTreeUtil.getParentOfType(parent, PsiDocMethodOrFieldRef.class) != null; } return type == JavaDocElementType.DOC_COMMENT || type == JavaDocElementType.DOC_TAG || type == JavaDocElementType.DOC_INLINE_TAG; } private void init(final ASTNode child) { if (child == null) return; ASTNode treePrev = child.getTreePrev(); while (treePrev != null && isWhiteSpace(treePrev)) { treePrev = treePrev.getTreePrev(); } if (treePrev == null) { init(child.getTreeParent()); } else { myChild2 = child; myChild1 = treePrev; final CompositeElement parent = (CompositeElement)treePrev.getTreeParent(); myParent = SourceTreeToPsiMap.treeElementToPsi(parent); myRole1 = parent.getChildRole(treePrev); myType1 = treePrev.getElementType(); myRole2 = parent.getChildRole(child); myType2 = child.getElementType(); } } private static boolean isWhiteSpace(final ASTNode treePrev) { return treePrev != null && (treePrev.getElementType() == TokenType.WHITE_SPACE || treePrev.getTextLength() == 0); } private Spacing getResult() { final Spacing result = myResult; clear(); return result; } @Override public void visitArrayAccessExpression(PsiArrayAccessExpression expression) { if (myRole1 == ChildRole.ARRAY && myRole2 == ChildRole.LBRACKET) { final boolean space = false; createSpaceInCode(space); } else if (myRole1 == ChildRole.LBRACKET || myRole2 == ChildRole.RBRACKET) { createSpaceInCode(mySettings.SPACE_WITHIN_BRACKETS); } } private void createSpaceInCode(final boolean space) { createSpaceProperty(space, mySettings.KEEP_BLANK_LINES_IN_CODE); } @Override public void visitNewExpression(PsiNewExpression expression) { if (myRole2 == ChildRole.ARRAY_INITIALIZER) { createSpaceInCode(mySettings.SPACE_BEFORE_ARRAY_INITIALIZER_LBRACE); } else if (myRole1 == ChildRole.NEW_KEYWORD) { createSpaceInCode(true); } else if (myRole2 == ChildRole.ARGUMENT_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_METHOD_CALL_PARENTHESES); } // We don't want to insert space between brackets in case of expression like 'new int[] {1}', hence, we check that exactly // one of the children is bracket. else if (myRole1 == ChildRole.LBRACKET ^ myRole2 == ChildRole.RBRACKET) { createSpaceInCode(mySettings.SPACE_WITHIN_BRACKETS); } } @Override public void visitArrayInitializerExpression(PsiArrayInitializerExpression expression) { visitArrayInitializer(); } @Override public void visitClass(PsiClass aClass) { if (myChild1.getElementType() == JavaDocElementType.DOC_COMMENT) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); return; } if (myRole2 == ChildRole.LBRACE) { PsiIdentifier nameIdentifier = aClass.getNameIdentifier(); int dependanceStart = nameIdentifier == null ? myParent.getTextRange().getStartOffset() : nameIdentifier.getTextRange().getStartOffset(); myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_CLASS_LBRACE, mySettings.CLASS_BRACE_STYLE, new TextRange(dependanceStart, myChild1.getTextRange().getEndOffset()), false, true); } else if (myRole1 == ChildRole.LBRACE) { if (aClass.isEnum()) { createParenSpace(true, false); } else if (aClass instanceof PsiAnonymousClass) { if (myRole2 == ChildRole.CLASS_INITIALIZER && isTheOnlyClassMember(myChild2)) { myResult = Spacing.createSpacing(0, 0, 0, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else { myResult = Spacing.createSpacing(0, 0, mySettings.BLANK_LINES_AFTER_ANONYMOUS_CLASS_HEADER + 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else { myResult = Spacing.createSpacing( 0, 0, mySettings.BLANK_LINES_AFTER_CLASS_HEADER + 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS ); } } else if (myRole2 == ChildRole.RBRACE && aClass.isEnum()) { createParenSpace(true, false); } else processClassBody(); } private static boolean isTheOnlyClassMember(final ASTNode node) { ASTNode next = node.getTreeNext(); if (next == null || !(next.getElementType() == JavaTokenType.RBRACE)) return false; ASTNode prev = node.getTreePrev(); if (prev == null || !(prev.getElementType() == JavaTokenType.LBRACE)) return false; return true; } private void processClassBody() { if (myChild1 instanceof JspJavaComment || myChild2 instanceof JspJavaComment) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else if (processMethod()) { } else if (myRole2 == ChildRole.CLASS_INITIALIZER) { if (myRole1 == ChildRole.LBRACE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (myRole1 == ChildRole.FIELD) { int lines = Math.max(getLinesAroundField(), getLinesAroundMethod()) + 1; myResult = Spacing.createSpacing(0, mySettings.SPACE_BEFORE_CLASS_LBRACE ? 1 : 0, 0, true, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE, lines); } else if (myRole1 == ChildRole.CLASS) { setAroundClassSpacing(); } else { final int blankLines = getLinesAroundMethod() + 1; myResult = Spacing .createSpacing(0, 0, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole1 == ChildRole.CLASS_INITIALIZER) { if (myRole2 == ChildRole.RBRACE) { int minLineFeeds = getMinLineFeedsBetweenRBraces(myChild1); myResult = Spacing.createSpacing( 0, Integer.MAX_VALUE, minLineFeeds, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE ); } else if (myRole2 == ChildRole.CLASS) { setAroundClassSpacing(); } else { final int blankLines = getLinesAroundMethod() + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole1 == ChildRole.CLASS) { if (myRole2 == ChildRole.RBRACE) { myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_CLASS + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole2 == ChildRole.CLASS) { if (myRole1 == ChildRole.LBRACE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_CLASS + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole2 == ChildRole.FIELD) { if (myRole1 == ChildRole.COMMA) { createSpaceProperty(true, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (myRole1 == ChildRole.LBRACE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else { final int blankLines = getLinesAroundField() + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole1 == ChildRole.FIELD) { if (myRole2 == ChildRole.COMMA) { ASTNode lastChildNode = myChild1.getLastChildNode(); if (lastChildNode != null && lastChildNode.getElementType() == JavaTokenType.SEMICOLON) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else { createSpaceProperty(false, false, 0); } } else if (myRole2 == ChildRole.RBRACE) { myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else { final int blankLines = getLinesAroundField() + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole2 == ChildRole.COMMA || myChild2.getElementType() == JavaTokenType.SEMICOLON) { createSpaceProperty(false, false, 0); } else if (myRole1 == ChildRole.COMMA) { createSpaceProperty(mySettings.SPACE_AFTER_COMMA, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else if (myRole1 == ChildRole.MODIFIER_LIST) { processModifierList(); } else if (myRole1 == ChildRole.LBRACE && myRole2 == ChildRole.RBRACE) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else if (myRole2 == ChildRole.EXTENDS_LIST || myRole2 == ChildRole.IMPLEMENTS_LIST) { createSpaceInCode(true); } else if (myRole2 == ChildRole.TYPE_PARAMETER_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_TYPE_PARAMETER_LIST); } else if (myRole2 == ChildRole.ARGUMENT_LIST) { createSpaceInCode(false); } else if (myRole2 == ChildRole.RBRACE) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } } /** * Initializes {@link #myResult} property with {@link Spacing} which <code>'min line feeds'</code> property is defined * from {@link CodeStyleSettings#BLANK_LINES_AROUND_CLASS} value. */ private void setAroundClassSpacing() { myResult = Spacing.createSpacing(0, Integer.MAX_VALUE, mySettings.BLANK_LINES_AROUND_CLASS + 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } private boolean processMethod() { if (myRole2 == ChildRole.METHOD || myChild2.getElementType() == JavaElementType.METHOD) { if (myRole1 == ChildRole.LBRACE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else { final int blankLines = getLinesAroundMethod() + 1; myResult = Spacing .createSpacing(0, 0, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole1 == ChildRole.METHOD || myChild1.getElementType() == JavaElementType.METHOD) { if (myRole1 == ChildRole.LBRACE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else { final int blankLines = getLinesAroundMethod() + 1; myResult = Spacing .createSpacing(0, 0, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } if (myRole2 == ChildRole.RBRACE) { myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else { final int blankLines = getLinesAroundMethod() + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } return myResult != null; } /** * Allows to calculate <code>'min line feed'</code> setting of the {@link Spacing} to be used between two closing braces * (assuming that left AST node that ends with closing brace is given to this method). * * @param leftNode left AST node that ends with closing brace * @return <code>'min line feed'</code> setting of {@link Spacing} object to use for the given AST node and * closing brace */ private static int getMinLineFeedsBetweenRBraces(ASTNode leftNode) { // The general idea is to return zero in situation when opening curly braces goes one after other, e.g. // new Expectations() {{ // foo();}} // We don't want line feed between closing curly braces here. if (leftNode == null || leftNode.getElementType() != JavaElementType.CLASS_INITIALIZER) { return 1; } ASTNode lbraceCandidate = leftNode.getTreePrev(); return (lbraceCandidate != null && lbraceCandidate.getElementType() == JavaTokenType.LBRACE) ? 0 : 1; } private int getLinesAroundMethod() { boolean useInterfaceMethodSpacing = !isClass(myParent) || (isAbstractMethod(myChild1) && isAbstractMethod(myChild2)); return useInterfaceMethodSpacing ? mySettings.BLANK_LINES_AROUND_METHOD_IN_INTERFACE : mySettings.BLANK_LINES_AROUND_METHOD; } private int getLinesAroundField() { if (isClass(myParent)) { return mySettings.BLANK_LINES_AROUND_FIELD; } else { return mySettings.BLANK_LINES_AROUND_FIELD_IN_INTERFACE; } } private static boolean isClass(final PsiElement parent) { if (parent instanceof PsiClass) { return !((PsiClass)parent).isInterface(); } return false; } private static boolean isAbstractMethod(ASTNode node) { PsiElement element = node.getPsi(); if (element instanceof PsiMethod) { PsiMethod method = (PsiMethod)element; return method.getModifierList().hasModifierProperty(PsiModifier.ABSTRACT); } return false; } @Override public void visitInstanceOfExpression(PsiInstanceOfExpression expression) { createSpaceInCode(true); } @Override public void visitEnumConstantInitializer(PsiEnumConstantInitializer enumConstantInitializer) { if (myRole2 == ChildRole.EXTENDS_LIST || myRole2 == ChildRole.IMPLEMENTS_LIST) { createSpaceInCode(true); } else { processMethod(); } } @Override public void visitImportList(PsiImportList list) { if (ElementType.IMPORT_STATEMENT_BASE_BIT_SET.contains(myChild1.getElementType()) && ElementType.IMPORT_STATEMENT_BASE_BIT_SET.contains(myChild2.getElementType())) { if (myImportHelper == null) myImportHelper = new ImportHelper(mySettings); int emptyLines = myImportHelper.getEmptyLinesBetween( (PsiImportStatementBase)SourceTreeToPsiMap.treeElementToPsi(myChild1), (PsiImportStatementBase)SourceTreeToPsiMap.treeElementToPsi(myChild2) ) + 1; myResult = Spacing.createSpacing(0, 0, emptyLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } @Override public void visitFile(PsiFile file) { if (myType1 == JavaElementType.PACKAGE_STATEMENT) { int lf = mySettings.BLANK_LINES_AFTER_PACKAGE + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else if (myType2 == JavaElementType.PACKAGE_STATEMENT) { int lf = mySettings.BLANK_LINES_BEFORE_PACKAGE + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else if (myType1 == JavaElementType.IMPORT_LIST) { int lf = mySettings.BLANK_LINES_AFTER_IMPORTS + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else if (myType2 == JavaElementType.IMPORT_LIST) { int lf = mySettings.BLANK_LINES_BEFORE_IMPORTS + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else if (myType2 == JavaElementType.CLASS) { int lf = mySettings.BLANK_LINES_AROUND_CLASS + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } @Override public void visitWhileStatement(PsiWhileStatement statement) { if (myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_WHILE_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_WHILE_PARENTHESES); } else if (myRole2 == ChildRole.LOOP_BODY || myChild2.getElementType() == JavaElementType.CODE_BLOCK) { if (myChild2.getElementType() == JavaElementType.BLOCK_STATEMENT) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_WHILE_LBRACE, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild1.getTextRange().getEndOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE, true); } else { createSpacingBeforeElementInsideControlStatement(); } } } @Override public void visitDoWhileStatement(PsiDoWhileStatement statement) { if (myRole1 == ChildRole.WHILE_KEYWORD && myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_WHILE_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_WHILE_PARENTHESES); } else if (myRole2 == ChildRole.LOOP_BODY) { if (myChild2.getElementType() == JavaElementType.BLOCK_STATEMENT) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_DO_LBRACE, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE, true); } else { createSpacingBeforeElementInsideControlStatement(); } } else if (myRole1 == ChildRole.LOOP_BODY || myChild2.getElementType() == JavaElementType.CODE_BLOCK) { processOnNewLineCondition(mySettings.WHILE_ON_NEW_LINE, mySettings.SPACE_BEFORE_WHILE_KEYWORD); } } private void processOnNewLineCondition(final boolean onNewLine) { processOnNewLineCondition(onNewLine, true); } private void processOnNewLineCondition(final boolean onNewLine, final boolean createSpaceInline) { if (onNewLine) { if (!mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = Spacing .createDependentLFSpacing(0, 1, myParent.getTextRange(), mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } else { createSpaceProperty(createSpaceInline, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } @Override public void visitThrowStatement(PsiThrowStatement statement) { if (myChild1.getElementType() == JavaTokenType.THROW_KEYWORD) { createSpaceInCode(true); } } @Override public void visitTryStatement(PsiTryStatement statement) { if (myRole2 == ChildRole.FINALLY_KEYWORD || myRole2 == ChildRole.CATCH_SECTION) { boolean putRightChildOnNewLine = myRole2 == ChildRole.FINALLY_KEYWORD ? mySettings.FINALLY_ON_NEW_LINE : mySettings.CATCH_ON_NEW_LINE; if (putRightChildOnNewLine) { processOnNewLineCondition(true); } else { boolean useSpace = (myRole2 == ChildRole.CATCH_SECTION && mySettings.SPACE_BEFORE_CATCH_KEYWORD) || (myRole2 == ChildRole.FINALLY_KEYWORD && mySettings.SPACE_BEFORE_FINALLY_KEYWORD); createSpaceProperty(useSpace, false, 0); } return; } if (myRole2 == ChildRole.TRY_BLOCK || myRole2 == ChildRole.FINALLY_BLOCK) { boolean useSpaceBeforeLBrace = myRole2 == ChildRole.TRY_BLOCK ? mySettings.SPACE_BEFORE_TRY_LBRACE : mySettings.SPACE_BEFORE_FINALLY_LBRACE; myResult = getSpaceBeforeLBrace(useSpaceBeforeLBrace, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE, true); } } @Override public void visitForeachStatement(PsiForeachStatement statement) { if (myRole1 == ChildRole.FOR_KEYWORD && myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_FOR_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_FOR_PARENTHESES); } else if (myRole1 == ChildRole.FOR_ITERATION_PARAMETER && myRole2 == ChildRole.COLON) { createSpaceInCode(true); } else if (myRole1 == ChildRole.COLON && myRole2 == ChildRole.FOR_ITERATED_VALUE) { createSpaceInCode(true); } else if (myRole2 == ChildRole.LOOP_BODY) { if (myChild2.getElementType() == JavaElementType.BLOCK_STATEMENT) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_FOR_LBRACE, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild1.getTextRange().getEndOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE, true); } else if (mySettings.KEEP_CONTROL_STATEMENT_IN_ONE_LINE) { myResult = Spacing .createDependentLFSpacing(1, 1, myParent.getTextRange(), false, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = Spacing.createSpacing(0, 0, 1, false, mySettings.KEEP_BLANK_LINES_IN_CODE); } } } @Override public void visitAssignmentExpression(PsiAssignmentExpression expression) { if (myRole1 == ChildRole.OPERATION_SIGN || myRole2 == ChildRole.OPERATION_SIGN) { createSpaceInCode(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } } @Override public void visitParenthesizedExpression(PsiParenthesizedExpression expression) { if (myRole1 == ChildRole.LPARENTH) { createParenSpace(mySettings.PARENTHESES_EXPRESSION_LPAREN_WRAP, mySettings.SPACE_WITHIN_PARENTHESES); } else if (myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.PARENTHESES_EXPRESSION_RPAREN_WRAP, mySettings.SPACE_WITHIN_PARENTHESES); } } @Override public void visitCodeBlock(PsiCodeBlock block) { processCodeBlock(keepInOneLine(block), block.getTextRange()); } @Override public void visitCodeFragment(JavaCodeFragment codeFragment) { final TokenSet statementBitSet = ElementType.STATEMENT_BIT_SET; if (statementBitSet.contains(myChild1.getElementType()) && statementBitSet.contains(myChild2.getElementType())) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } private void processCodeBlock(final boolean keepInOneLine, final TextRange textRange) { if (myParent instanceof JspCodeBlock) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (myRole1 == ChildRoleBase.NONE || myRole2 == ChildRoleBase.NONE) { final IElementType firstElementType = myChild1.getElementType(); if ( firstElementType == JavaTokenType.END_OF_LINE_COMMENT || firstElementType == JavaTokenType.C_STYLE_COMMENT) { myResult = Spacing.createDependentLFSpacing(0, 1, myParent.getTextRange(), mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = null; } } else if (myRole1 == ChildRole.LBRACE) { if (!keepInOneLine) { int blankLines = 1; if (myParent != null) { ASTNode parentNode = myParent.getNode(); if (parentNode != null) { ASTNode grandPa = parentNode.getTreeParent(); if (grandPa != null && grandPa.getElementType() == JavaElementType.METHOD) { blankLines += mySettings.BLANK_LINES_BEFORE_METHOD_BODY; } } } myResult = Spacing.createSpacing( 0, 0, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE ); } else { myResult = Spacing .createDependentLFSpacing(0, 1, textRange, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } else if (myRole2 == ChildRole.RBRACE) { if (!keepInOneLine) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else { myResult = Spacing .createDependentLFSpacing(0, 1, textRange, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } } else if (myChild1.getElementType() == JavaElementType.SWITCH_LABEL_STATEMENT && myChild2.getElementType() == JavaElementType .BLOCK_STATEMENT) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_SWITCH_LBRACE, mySettings.BRACE_STYLE, null, false, true); } else if (myRole1 == ChildRole.STATEMENT_IN_BLOCK && myRole2 == ChildRole.STATEMENT_IN_BLOCK) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } private boolean keepInOneLine(final PsiCodeBlock block) { if (block.getParent() instanceof PsiMethod) { return mySettings.KEEP_SIMPLE_METHODS_IN_ONE_LINE; } else { return mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE; } } @Override public void visitIfStatement(PsiIfStatement statement) { if (myRole2 == ChildRole.ELSE_KEYWORD) { if (myChild1.getElementType() != JavaElementType.BLOCK_STATEMENT) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { if (mySettings.ELSE_ON_NEW_LINE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { createSpaceProperty(mySettings.SPACE_BEFORE_ELSE_KEYWORD, false, 0); } } } else if (myRole1 == ChildRole.ELSE_KEYWORD) { if (myChild2.getElementType() == JavaElementType.IF_STATEMENT) { if (mySettings.SPECIAL_ELSE_IF_TREATMENT) { createSpaceProperty(false, false, 0); } else { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } else { if (myChild2.getElementType() == JavaElementType.BLOCK_STATEMENT || myChild2.getElementType() == JavaElementType.CODE_BLOCK) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_ELSE_LBRACE, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE, true); } else { createSpacingBeforeElementInsideControlStatement(); } } } else if (myChild2.getElementType() == JavaElementType.BLOCK_STATEMENT || myChild2.getElementType() == JavaElementType.CODE_BLOCK) { boolean space = myRole2 == ChildRole.ELSE_BRANCH ? mySettings.SPACE_BEFORE_ELSE_LBRACE : mySettings.SPACE_BEFORE_IF_LBRACE; myResult = getSpaceBeforeLBrace(space, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild1.getTextRange().getEndOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE, true); } else if (myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_IF_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_IF_PARENTHESES); } else if (myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_IF_PARENTHESES); } else if (myRole2 == ChildRole.THEN_BRANCH) { createSpacingBeforeElementInsideControlStatement(); } } private void createSpacingBeforeElementInsideControlStatement() { if (mySettings.KEEP_CONTROL_STATEMENT_IN_ONE_LINE && myChild1.getElementType() != JavaTokenType.END_OF_LINE_COMMENT) { //createNonLFSpace(1, null, mySettings.KEEP_LINE_BREAKS); createSpaceProperty(true, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); //myResult = Spacing.createSpacing(1, 1, 0, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = Spacing.createSpacing(1, 1, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } private Spacing createNonLFSpace(int spaces, final TextRange dependantRange, final boolean keepLineBreaks) { final ASTNode prev = getPrevElementType(myChild2); if (prev != null && prev.getElementType() == JavaTokenType.END_OF_LINE_COMMENT) { return Spacing .createSpacing(0, Integer.MAX_VALUE, 1, keepLineBreaks, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (dependantRange != null) { return Spacing .createDependentLFSpacing(spaces, spaces, dependantRange, keepLineBreaks, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { return Spacing.createSpacing(spaces, spaces, 0, keepLineBreaks, mySettings.KEEP_BLANK_LINES_IN_CODE); } } @Nullable private static ASTNode getPrevElementType(final ASTNode child) { return FormatterUtil.getLeafNonSpaceBefore(child); } private Spacing getSpaceBeforeLBrace(final boolean spaceBeforeLbrace, int braceStyle, TextRange dependantRange, boolean keepOneLine, boolean useParentBlockAsDependencyAllTheTime) { int space = spaceBeforeLbrace ? 1 : 0; if (dependantRange != null && braceStyle == CodeStyleSettings.NEXT_LINE_IF_WRAPPED) { return createNonLFSpace(space, dependantRange, false); } else if (braceStyle == CodeStyleSettings.END_OF_LINE || braceStyle == CodeStyleSettings.NEXT_LINE_IF_WRAPPED) { return createNonLFSpace(space, null, false); } else if (braceStyle == CodeStyleSettings.NEXT_LINE && !mySettings.KEEP_SIMPLE_METHODS_IN_ONE_LINE) { return Spacing.createSpacing(0, 0, 1, false, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (keepOneLine) { TextRange dependencyRangeToUse = dependantRange == null || useParentBlockAsDependencyAllTheTime ? myParent.getTextRange() : dependantRange; return Spacing.createDependentLFSpacing( space, space, dependencyRangeToUse, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE ); } else { return Spacing.createSpacing(0, 0, 1, false, mySettings.KEEP_BLANK_LINES_IN_CODE); } } @Override public void visitBinaryExpression(PsiBinaryExpression expression) { PsiJavaToken sign = expression.getOperationSign(); IElementType i = sign.getTokenType(); if (myRole1 == ChildRole.OPERATION_SIGN || myRole2 == ChildRole.OPERATION_SIGN) { if (i == JavaTokenType.OROR || i == JavaTokenType.ANDAND) { createSpaceInCode(mySettings.SPACE_AROUND_LOGICAL_OPERATORS); } else if (i == JavaTokenType.OR || i == JavaTokenType.AND || i == JavaTokenType.XOR) { createSpaceInCode(mySettings.SPACE_AROUND_BITWISE_OPERATORS); } else if (i == JavaTokenType.EQEQ || i == JavaTokenType.NE) { createSpaceInCode(mySettings.SPACE_AROUND_EQUALITY_OPERATORS); } else if (i == JavaTokenType.GT || i == JavaTokenType.LT || i == JavaTokenType.GE || i == JavaTokenType.LE) { createSpaceInCode(mySettings.SPACE_AROUND_RELATIONAL_OPERATORS); } else if (i == JavaTokenType.PLUS || i == JavaTokenType.MINUS) { createSpaceInCode(mySettings.SPACE_AROUND_ADDITIVE_OPERATORS); } else if (i == JavaTokenType.ASTERISK || i == JavaTokenType.DIV || i == JavaTokenType.PERC) { createSpaceInCode(mySettings.SPACE_AROUND_MULTIPLICATIVE_OPERATORS); } else if (i == JavaTokenType.LTLT || i == JavaTokenType.GTGT || i == JavaTokenType.GTGTGT) { createSpaceInCode(mySettings.SPACE_AROUND_SHIFT_OPERATORS); } else { createSpaceInCode(false); } } } @Override public void visitField(PsiField field) { if (myChild1.getElementType() == JavaDocElementType.DOC_COMMENT) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); return; } if (myRole1 == ChildRole.INITIALIZER_EQ || myRole2 == ChildRole.INITIALIZER_EQ) { createSpaceInCode(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } else if (myRole1 == ChildRole.TYPE || myRole2 == ChildRole.TYPE) { createSpaceInCode(true); } else if (myChild2.getElementType() == JavaTokenType.SEMICOLON) { createSpaceProperty(false, false, 0); } else if (myRole1 == ChildRole.MODIFIER_LIST) { createSpaceProperty(true, false, 0); } } @Override public void visitLocalVariable(PsiLocalVariable variable) { if (myRole1 == ChildRole.INITIALIZER_EQ || myRole2 == ChildRole.INITIALIZER_EQ) { createSpaceInCode(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } else if (myRole1 == ChildRole.MODIFIER_LIST) { createSpaceInCode(true); } else if (myRole2 == ChildRole.TYPE_REFERENCE || myRole1 == ChildRole.TYPE_REFERENCE) { createSpaceInCode(true); } else if (myRole2 == ChildRole.TYPE || myRole1 == ChildRole.TYPE) { createSpaceInCode(true); } else if (myChild2.getElementType() == JavaTokenType.SEMICOLON) { final PsiElement pp = myParent.getParent(); if (pp instanceof PsiDeclarationStatement) { final PsiElement ppp = pp.getParent(); if (ppp instanceof PsiForStatement) { createSpaceInCode(mySettings.SPACE_BEFORE_SEMICOLON); return; } } createSpaceProperty(false, false, 0); } } @Override public void visitMethod(PsiMethod method) { if (myChild1.getElementType() == JavaDocElementType.DOC_COMMENT) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); return; } if (myRole2 == ChildRole.PARAMETER_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_METHOD_PARENTHESES); } else if (myRole1 == ChildRole.PARAMETER_LIST && myRole2 == ChildRole.THROWS_LIST || myRole1 == ChildRole.TYPE_PARAMETER_LIST) { createSpaceInCode(true); } else if (myRole2 == ChildRole.METHOD_BODY) { PsiElement methodName = method.getNameIdentifier(); int dependencyStart = methodName == null ? myParent.getTextRange().getStartOffset() : methodName.getTextRange().getStartOffset(); PsiModifierList modifierList = method.getModifierList(); PsiAnnotation[] annotations = modifierList.getAnnotations(); boolean useParentBlockAsDependencyAllTheTime = true; if (annotations.length > 0) { useParentBlockAsDependencyAllTheTime = false; PsiAnnotation annotation = annotations[annotations.length - 1]; ASTNode nextModifier = FormattingAstUtil.getNextNonWhiteSpaceNode(annotation.getNode()); if (nextModifier == null) { PsiElement element = modifierList.getNextSibling(); if (element != null) { ASTNode node = element.getNode(); if (node != null && node.getTextLength() > 0) { dependencyStart = element.getTextRange().getStartOffset(); } } } else { dependencyStart = nextModifier.getStartOffset(); } } ASTNode dependencyEndAnchor = mySettings.METHOD_BRACE_STYLE == CodeStyleSettings.NEXT_LINE ? myChild2 : myChild1; int dependencyEnd = dependencyEndAnchor.getTextRange().getEndOffset(); myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_METHOD_LBRACE, mySettings.METHOD_BRACE_STYLE, new TextRange(dependencyStart, dependencyEnd), mySettings.KEEP_SIMPLE_METHODS_IN_ONE_LINE, useParentBlockAsDependencyAllTheTime); } else if (myRole1 == ChildRole.MODIFIER_LIST) { processModifierList(); } else if (StdTokenSets.COMMENT_BIT_SET.contains(myChild1.getElementType()) && (myRole2 == ChildRole.MODIFIER_LIST || myRole2 == ChildRole.TYPE_REFERENCE)) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else if (myRole2 == ChildRole.DEFAULT_KEYWORD || myRole2 == ChildRole.ANNOTATION_DEFAULT_VALUE) { createSpaceInCode(true); } else if (myChild2.getElementType() == JavaTokenType.SEMICOLON) { createSpaceInCode(false); } else if (myRole1 == ChildRole.TYPE) { createSpaceInCode(true); } } private void processModifierList() { if (mySettings.MODIFIER_LIST_WRAP) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { createSpaceProperty(true, false, 0); } } @Override public void visitModifierList(PsiModifierList list) { createSpaceInCode(true); } @Override public void visitParameterList(PsiParameterList list) { if (myRole1 == ChildRole.LPARENTH && myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.METHOD_PARAMETERS_LPAREN_ON_NEXT_LINE, false); } else if (myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.METHOD_PARAMETERS_RPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_METHOD_PARENTHESES); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } else if (myRole1 == ChildRole.LPARENTH) { createParenSpace(mySettings.METHOD_PARAMETERS_LPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_METHOD_PARENTHESES); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } } private void createParenSpace(final boolean onNewLine, final boolean space) { createParenSpace(onNewLine, space, myParent.getTextRange()); } private void createParenSpace(final boolean onNewLine, final boolean space, final TextRange dependance) { if (onNewLine) { final int spaces = space ? 1 : 0; myResult = Spacing .createDependentLFSpacing(spaces, spaces, dependance, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { createSpaceInCode(space); } } @Override public void visitElement(PsiElement element) { if (myRole1 == ChildRole.MODIFIER_LIST) { processModifierList(); } else if (myRole1 == ChildRole.OPERATION_SIGN) { createSpaceInCode(mySettings.SPACE_AROUND_UNARY_OPERATOR); } else if (myChild1.getElementType() == JavaDocTokenType.DOC_TAG_VALUE_TOKEN && myChild2.getElementType() == JavaDocTokenType.DOC_TAG_VALUE_TOKEN) { createSpaceInCode(true); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(mySettings.SPACE_AFTER_COMMA); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(mySettings.SPACE_BEFORE_COMMA); } } @Override public void visitExpressionList(PsiExpressionList list) { if (myRole1 == ChildRole.LPARENTH && myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE, false); } else if (myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.CALL_PARAMETERS_RPAREN_ON_NEXT_LINE, myRole1 == ChildRole.COMMA || mySettings.SPACE_WITHIN_METHOD_CALL_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH) { createParenSpace(mySettings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_METHOD_CALL_PARENTHESES); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } } @Override public void visitSynchronizedStatement(PsiSynchronizedStatement statement) { if (myRole1 == ChildRole.SYNCHRONIZED_KEYWORD || myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_SYNCHRONIZED_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_SYNCHRONIZED_PARENTHESES); } else if (myRole2 == ChildRole.BLOCK) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_SYNCHRONIZED_LBRACE, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE, true); } } @Override public void visitSwitchLabelStatement(PsiSwitchLabelStatement statement) { if (myRole1 == ChildRole.CASE_KEYWORD || myRole2 == ChildRole.CASE_EXPRESSION) { createSpaceProperty(true, false, 0); } } @Override public void visitSwitchStatement(PsiSwitchStatement statement) { if (myRole1 == ChildRole.SWITCH_KEYWORD && myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_SWITCH_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_SWITCH_PARENTHESES); } else if (myRole2 == ChildRole.SWITCH_BODY) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_SWITCH_LBRACE, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE, true); } } @Override public void visitForStatement(PsiForStatement statement) { if (myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_FOR_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH) { ASTNode rparenth = findFrom(myChild2, JavaTokenType.RPARENTH, true); if (rparenth == null) { createSpaceInCode(mySettings.SPACE_WITHIN_FOR_PARENTHESES); } else { createParenSpace(mySettings.FOR_STATEMENT_LPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_FOR_PARENTHESES, new TextRange(myChild1.getTextRange().getStartOffset(), rparenth.getTextRange().getEndOffset())); if (myChild2.getElementType() == JavaElementType.EMPTY_STATEMENT) { createSpaceInCode(mySettings.SPACE_BEFORE_SEMICOLON); } } } else if (myRole2 == ChildRole.RPARENTH) { ASTNode lparenth = findFrom(myChild2, JavaTokenType.LPARENTH, false); if (lparenth == null) { createSpaceInCode(mySettings.SPACE_WITHIN_FOR_PARENTHESES); } else { createParenSpace(mySettings.FOR_STATEMENT_RPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_FOR_PARENTHESES, new TextRange(lparenth.getTextRange().getStartOffset(), myChild2.getTextRange().getEndOffset())); } } else if (myRole1 == ChildRole.FOR_INITIALIZATION) { createSpaceInCode(mySettings.SPACE_AFTER_SEMICOLON); } else if (myRole1 == ChildRole.CONDITION) { createSpaceInCode(mySettings.SPACE_BEFORE_SEMICOLON); } else if (myRole1 == ChildRole.FOR_SEMICOLON) { createSpaceInCode(mySettings.SPACE_AFTER_SEMICOLON); } else if (myRole2 == ChildRole.LOOP_BODY || myChild2.getElementType() == JavaElementType.CODE_BLOCK) { if (myChild2.getElementType() == JavaElementType.BLOCK_STATEMENT) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_FOR_LBRACE, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild1.getTextRange().getEndOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE, true); } else if (mySettings.KEEP_CONTROL_STATEMENT_IN_ONE_LINE) { myResult = Spacing .createDependentLFSpacing(1, 1, myParent.getTextRange(), false, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = Spacing.createSpacing(0, 0, 1, false, mySettings.KEEP_BLANK_LINES_IN_CODE); } } } @Nullable private static ASTNode findFrom(ASTNode current, final IElementType expected, boolean forward) { while (current != null) { if (current.getElementType() == expected) return current; current = forward ? current.getTreeNext() : current.getTreePrev(); } return null; } @Override public void visitCatchSection(PsiCatchSection section) { if (myRole2 == ChildRole.CATCH_BLOCK) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_CATCH_LBRACE, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE, true); } else if (myRole2 == ChildRole.CATCH_BLOCK_PARAMETER_LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_CATCH_PARENTHESES); } else if (myRole1 == ChildRole.CATCH_BLOCK_PARAMETER_LPARENTH || myRole2 == ChildRole.CATCH_BLOCK_PARAMETER_RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_CATCH_PARENTHESES); } } @Override public void visitReferenceParameterList(PsiReferenceParameterList list) { if (myRole1 == ChildRole.LT_IN_TYPE_LIST && myRole2 == ChildRole.TYPE_IN_REFERENCE_PARAMETER_LIST) { createSpaceInCode(false); } else if (myRole1 == ChildRole.LT_IN_TYPE_LIST && myRole2 == ChildRole.GT_IN_TYPE_LIST) { createSpaceInCode(true); } else if (myRole1 == ChildRole.TYPE_IN_REFERENCE_PARAMETER_LIST && myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } else if (myRole1 == ChildRole.COMMA && myRole2 == ChildRole.TYPE_IN_REFERENCE_PARAMETER_LIST) { createSpaceInCode(mySettings.SPACE_AFTER_COMMA_IN_TYPE_ARGUMENTS); } else if (myRole2 == ChildRole.GT_IN_TYPE_LIST) { createSpaceInCode(false); } } @Override public void visitTypeCastExpression(PsiTypeCastExpression expression) { if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_CAST_PARENTHESES); } else if (myRole1 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_AFTER_TYPE_CAST); } } private void createSpaceProperty(boolean space, int keepBlankLines) { createSpaceProperty(space, mySettings.KEEP_LINE_BREAKS, keepBlankLines); } private void createSpaceProperty(boolean space, boolean keepLineBreaks, final int keepBlankLines) { final ASTNode prev = getPrevElementType(myChild2); if (prev != null && prev.getElementType() == JavaTokenType.END_OF_LINE_COMMENT) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { if (!space && !canStickChildrenTogether(myChild1, myChild2)) { space = true; } if (!keepLineBreaks && myRole2 == ChildRoleBase.NONE) { keepLineBreaks = true; } myResult = Spacing.createSpacing(space ? 1 : 0, space ? 1 : 0, 0, keepLineBreaks, keepBlankLines); } } @Override public void visitReferenceList(PsiReferenceList list) { if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } else if (myRole1 == ChildRole.EXTENDS_KEYWORD || myRole2 == ChildRole.EXTENDS_KEYWORD) { createSpaceInCode(true); } else if (myRole1 == ChildRole.AMPERSAND_IN_BOUNDS_LIST || myRole2 == ChildRole.AMPERSAND_IN_BOUNDS_LIST) { createSpaceInCode(true); } else if (myRole1 == ChildRole.IMPLEMENTS_KEYWORD || myRole2 == ChildRole.IMPLEMENTS_KEYWORD) { createSpaceInCode(true); } else if (myRole1 == ChildRole.THROWS_KEYWORD) { createSpaceInCode(true); } } @Override public void visitReferenceExpression(PsiReferenceExpression expression) { visitReferenceElement(expression); } @Override public void visitConditionalExpression(PsiConditionalExpression expression) { if (myRole2 == ChildRole.QUEST) { createSpaceInCode(mySettings.SPACE_BEFORE_QUEST); } else if (myRole1 == ChildRole.QUEST) { createSpaceInCode(mySettings.SPACE_AFTER_QUEST); } else if (myRole2 == ChildRole.COLON) { createSpaceInCode(mySettings.SPACE_BEFORE_COLON); } else if (myRole1 == ChildRole.COLON) { createSpaceInCode(mySettings.SPACE_AFTER_COLON); } } @Override public void visitStatement(PsiStatement statement) { if (myRole2 == ChildRole.CLOSING_SEMICOLON) { createSpaceInCode(false); } if (statement instanceof JspClassLevelDeclarationStatement) { processClassBody(); } } @Override public void visitReturnStatement(PsiReturnStatement statement) { if (myChild2.getElementType() == JavaTokenType.SEMICOLON) { createSpaceInCode(false); } else if (myRole1 == ChildRole.RETURN_KEYWORD) { createSpaceInCode(true); } else { super.visitReturnStatement(statement); } } @Override public void visitMethodCallExpression(PsiMethodCallExpression expression) { if (myRole2 == ChildRole.ARGUMENT_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_METHOD_CALL_PARENTHESES); } } @Override public void visitTypeParameter(PsiTypeParameter classParameter) { createSpaceInCode(true); } @Override public void visitTypeElement(PsiTypeElement type) { if (myChild2.getElementType() == JavaTokenType.ELLIPSIS) { createSpaceInCode(false); } else if (myChild2.getElementType() == JavaTokenType.LBRACKET || myChild2.getElementType() == JavaTokenType.RBRACKET) { createSpaceInCode(false); } else { createSpaceInCode(true); } } @Override public void visitDeclarationStatement(PsiDeclarationStatement declarationStatement) { if (myRole2 == ChildRole.COMMA) { createSpaceProperty(false, false, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } } @Override public void visitTypeParameterList(PsiTypeParameterList list) { if (myRole2 == ChildRole.GT_IN_TYPE_LIST) { createSpaceInCode(false); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(mySettings.SPACE_AFTER_COMMA_IN_TYPE_ARGUMENTS); } } @Override public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { if (myRole2 == ChildRole.REFERENCE_PARAMETER_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_TYPE_PARAMETER_LIST); } else if (myRole2 == ChildRole.DOT){ createSpaceInCode(false); } } @Override public void visitAnnotation(PsiAnnotation annotation) { if (myRole2 == ChildRole.PARAMETER_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_ANOTATION_PARAMETER_LIST); } else if (myChild1.getElementType() == JavaTokenType.AT && myChild2.getElementType() == JavaElementType.JAVA_CODE_REFERENCE) { createSpaceInCode(false); } } @Override public void visitClassInitializer(PsiClassInitializer initializer) { if (myChild2.getElementType() == JavaElementType.CODE_BLOCK) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_METHOD_LBRACE, mySettings.BRACE_STYLE, null, false, true); } } @Override public void visitAnnotationParameterList(PsiAnnotationParameterList list) { if (myRole1 == ChildRole.LPARENTH && myRole2 == ChildRole.RPARENTH) { createSpaceInCode(false); } // There is a possible case that annotation key-value pair is used in 'shorten' form (with implicit name 'values'). It's also // possible that target value is surrounded by curly braces. We want to define child role accordingly then. else if (myRole1 == ChildRole.LPARENTH && mySettings.SPACE_BEFORE_ARRAY_INITIALIZER_LBRACE && myRole2 == ChildRole.ANNOTATION_VALUE) { createSpaceInCode(true); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_ANNOTATION_PARENTHESES); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } } @Override public void visitNameValuePair(PsiNameValuePair pair) { if (myRole1 == ChildRole.OPERATION_SIGN || myRole2 == ChildRole.OPERATION_SIGN) { createSpaceInCode(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } } @Override public void visitAnnotationArrayInitializer(PsiArrayInitializerMemberValue initializer) { visitArrayInitializer(); } private void visitArrayInitializer() { if (myRole1 == ChildRole.LBRACE) { if (mySettings.ARRAY_INITIALIZER_LBRACE_ON_NEXT_LINE) { int spaces = mySettings.SPACE_WITHIN_ARRAY_INITIALIZER_BRACES ? 1 : 0; myResult = Spacing .createDependentLFSpacing(spaces, spaces, myParent.getTextRange(), mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { createSpaceProperty(mySettings.SPACE_WITHIN_ARRAY_INITIALIZER_BRACES, mySettings.KEEP_BLANK_LINES_IN_CODE); } } else if (myRole2 == ChildRole.LBRACE) { createSpaceInCode(mySettings.SPACE_BEFORE_ARRAY_INITIALIZER_LBRACE); } else if (myRole2 == ChildRole.RBRACE) { if (mySettings.ARRAY_INITIALIZER_RBRACE_ON_NEXT_LINE) { int spaces = mySettings.SPACE_WITHIN_ARRAY_INITIALIZER_BRACES ? 1 : 0; myResult = Spacing .createDependentLFSpacing(spaces, spaces, myParent.getTextRange(), mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else { createSpaceProperty(mySettings.SPACE_WITHIN_ARRAY_INITIALIZER_BRACES, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(mySettings.SPACE_AFTER_COMMA); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(mySettings.SPACE_BEFORE_COMMA); } } @Override public void visitEnumConstant(PsiEnumConstant enumConstant) { if (myRole2 == ChildRole.ARGUMENT_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_METHOD_CALL_PARENTHESES); } else if (myRole2 == ChildRole.ANONYMOUS_CLASS) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_CLASS_LBRACE, mySettings.METHOD_BRACE_STYLE, enumConstant.getTextRange(), mySettings.KEEP_SIMPLE_METHODS_IN_ONE_LINE, true); } } @Override public void visitDocTag(PsiDocTag tag) { if (myChild1.getElementType() == JavaDocTokenType.DOC_TAG_NAME && myChild2.getElementType() == JavaDocTokenType.DOC_TAG_VALUE_TOKEN) { myResult = Spacing.createSpacing(1, 1, 0, false, 0); } } @Override public void visitAssertStatement(PsiAssertStatement statement) { if (myChild1.getElementType() == JavaTokenType.ASSERT_KEYWORD) { createSpaceInCode(true); } else if (myChild1.getElementType() == JavaTokenType.COLON){ createSpaceInCode(mySettings.SPACE_AFTER_COLON); } else if (myChild2.getElementType() == JavaTokenType.COLON) { createSpaceInCode(mySettings.SPACE_BEFORE_COLON); } } @Override public void visitParameter(PsiParameter parameter) { if (myRole1 == ChildRole.TYPE || myRole1 == ChildRole.MODIFIER_LIST) { createSpaceInCode(true); } } @SuppressWarnings({"ConstantConditions"}) public static Spacing getSpacing(ASTNode node, CodeStyleSettings settings) { JavaSpacePropertyProcessor spacePropertyProcessor = mySharedProcessorAllocator.get(); try { if (spacePropertyProcessor == null) { spacePropertyProcessor = new JavaSpacePropertyProcessor(); mySharedProcessorAllocator.set(spacePropertyProcessor); } spacePropertyProcessor.doInit(node, settings); return spacePropertyProcessor.getResult(); } finally { spacePropertyProcessor.clear(); } } private static boolean isWS(final ASTNode lastChild) { return lastChild != null && lastChild.getElementType() == TokenType.WHITE_SPACE; } private static final Map<Pair<IElementType, IElementType>, Boolean> myCanStickJavaTokensMatrix = new ConcurrentHashMap<Pair<IElementType, IElementType>, Boolean>(); public static boolean canStickChildrenTogether(final ASTNode child1, final ASTNode child2) { if (child1 == null || child2 == null) return true; if (isWS(child1) || isWS(child2)) return true; ASTNode token1 = TreeUtil.findLastLeaf(child1); ASTNode token2 = TreeUtil.findFirstLeaf(child2); LOG.assertTrue(token1 != null); LOG.assertTrue(token2 != null); return !(token1.getElementType() instanceof IJavaElementType && token2.getElementType()instanceof IJavaElementType) || canStickJavaTokens(token1,token2); } private static boolean canStickJavaTokens(ASTNode token1, ASTNode token2) { IElementType type1 = token1.getElementType(); IElementType type2 = token2.getElementType(); Pair<IElementType, IElementType> pair = new Pair<IElementType, IElementType>(type1, type2); Boolean res = myCanStickJavaTokensMatrix.get(pair); if (res == null) { if (!checkToken(token1) || !checkToken(token2)) return true; String text = token1.getText() + token2.getText(); Lexer lexer = new JavaLexer(LanguageLevel.HIGHEST); lexer.start(text); boolean canMerge = lexer.getTokenType() == type1; lexer.advance(); canMerge &= lexer.getTokenType() == type2; res = canMerge; myCanStickJavaTokensMatrix.put(pair, res); } return res.booleanValue(); } private static boolean checkToken(final ASTNode token1) { Lexer lexer = new JavaLexer(LanguageLevel.HIGHEST); final String text = token1.getText(); lexer.start(text); if (lexer.getTokenType() != token1.getElementType()) return false; lexer.advance(); return lexer.getTokenType() == null; } }
/** * */ package test.api; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; import org.junit.Test; import javastrava.api.API; import javastrava.auth.ref.AuthorisationScope; import javastrava.model.StravaEntity; import javastrava.service.exception.NotFoundException; import javastrava.service.exception.UnauthorizedException; import test.api.callback.APICreateCallback; import test.utils.RateLimitedTestRunner; /** * <p> * Tests for API create methods * </p> * * @author Dan Shannon * @param <T> * Class of object being created * @param <U> * Class of identifier of the parent (so mostly, Integer) * */ public abstract class APICreateTest<T extends StravaEntity, U> extends APITest<T> { /** * Attempt to create an object inside a parent that does not exist. Creation call should return a {@link NotFoundException}. * * @throws Exception * if the test fails in an unexpected way */ @Test public void create_invalidParent() throws Exception { RateLimitedTestRunner.run(() -> { final API api = apiWithWriteAccess(); T createdObject = null; try { createdObject = creator().create(api, createObject(), invalidParentId()); } catch (final NotFoundException e) { // Expected return; } forceDelete(createdObject); fail("Created an object with an invalid parent!"); //$NON-NLS-1$ }); } /** * Attempt to create an object inside a parent that is private and belongs to another user. Creation call should throw an {@link UnauthorizedException} * * @throws Exception * if the test fails in an unexpected way */ @Test public void create_privateParentBelongsToOtherUser() throws Exception { RateLimitedTestRunner.run(() -> { final API api = apiWithFullAccess(); T createdObject = null; try { createdObject = creator().create(api, createObject(), privateParentOtherUserId()); } catch (final UnauthorizedException e) { // Expected return; } forceDelete(createdObject); fail("Created an object with a private parent that belongs to another user!"); //$NON-NLS-1$ }); } /** * Attempt to create an object inside a private parent, using a token that does not have {@link AuthorisationScope#VIEW_PRIVATE view_private} scope. Creation call should throw an * {@link UnauthorizedException} * * @throws Exception * if the test fails in an unexpected way */ @Test public void create_privateParentWithoutViewPrivate() throws Exception { RateLimitedTestRunner.run(() -> { final API api = apiWithWriteAccess(); T createdObject = null; try { createdObject = creator().create(api, createObject(), privateParentId()); } catch (final UnauthorizedException e) { // Expected return; } forceDelete(createdObject); fail("Created an object with a private parent, but without view_private"); //$NON-NLS-1$ }); } /** * Attempt to create an object inside a private parent, using a token that does have {@link AuthorisationScope#VIEW_PRIVATE view_private} scope. Creation call should succeed. * * @throws Exception * if the test fails in an unexpected way */ @Test public void create_privateParentWithViewPrivate() throws Exception { RateLimitedTestRunner.run(() -> { final API api = apiWithFullAccess(); final T result = creator().create(api, createObject(), privateParentId()); if (!this.createAPIResponseIsNull()) { forceDelete(result); assertNotNull(result); validate(result); } }); } /** * Attempt to create a valid object. Creation call should succeed. * * @throws Exception * if the test fails in an unexpected way */ @Test public void create_valid() throws Exception { RateLimitedTestRunner.run(() -> { final API api = apiWithWriteAccess(); final T result = creator().create(api, createObject(), validParentId()); if (!this.createAPIResponseIsNull()) { forceDelete(result); assertNotNull(result); validate(result); } }); } /** * Attempt to create a valid object inside a parent that belongs to another user (but is not private). Creation call should succeed. * * @throws Exception * if the test fails in an unexpected way */ @Test public void create_validParentBelongsToOtherUser() throws Exception { RateLimitedTestRunner.run(() -> { final API api = apiWithWriteAccess(); final T result = creator().create(api, createObject(), validParentOtherUserId()); if (this.createAPIResponseIsNull() == false) { forceDelete(result); assertNotNull(result); validate(result); } }); } /** * Attempt to create a valid object using a token that does not have {@link AuthorisationScope#WRITE} scope. Creation call should throw an {@link UnauthorizedException} * * @throws Exception * if the test fails in an unexpected way */ @Test public void create_validParentNoWriteAccess() throws Exception { RateLimitedTestRunner.run(() -> { final API api = api(); T createdObject = null; try { createdObject = creator().create(api, createObject(), validParentId()); } catch (final UnauthorizedException e) { // Expected return; } forceDelete(createdObject); fail("Created an object with a valid parent, but without write access!"); //$NON-NLS-1$ }); } /** * @return <code>true</code> if the response from the API when creating an object is null */ @SuppressWarnings("static-method") protected boolean createAPIResponseIsNull() { return false; } /** * Create an object * * @return The object created */ protected abstract T createObject(); /** * Callback used to call the API create method * * @return The creator */ protected abstract APICreateCallback<T, U> creator(); /** * Force delete the object * * @param objectToDelete * The object to be deleted */ protected abstract void forceDelete(T objectToDelete); /** * Get an invalid identifier of a parent (i.e. one that doesn't exist) * * @return The id */ protected abstract U invalidParentId(); /** * Get an identifier of a private parent object that belongs to the authenticated user * * @return The id */ protected abstract U privateParentId(); /** * Get an identifier of a private parent object that does NOT to the authenticated user * * @return The id */ protected abstract U privateParentOtherUserId(); /** * Get a valid identifier of a parent object that belongs to the authenticated user * * @return The id */ protected abstract U validParentId(); /** * Get a valid identifier of a parent object that does NOT belong to the authenticated user * * @return The id */ protected abstract U validParentOtherUserId(); }
/** * Copyright 2007-2015, Kaazing Corporation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaazing.k3po.driver.internal.behavior.visitor; import java.util.List; import org.kaazing.k3po.driver.internal.behavior.visitor.InjectBarriersVisitor.State; import org.kaazing.k3po.lang.internal.RegionInfo; import org.kaazing.k3po.lang.internal.ast.AstAcceptNode; import org.kaazing.k3po.lang.internal.ast.AstAcceptableNode; import org.kaazing.k3po.lang.internal.ast.AstAcceptedNode; import org.kaazing.k3po.lang.internal.ast.AstBoundNode; import org.kaazing.k3po.lang.internal.ast.AstChildClosedNode; import org.kaazing.k3po.lang.internal.ast.AstChildOpenedNode; import org.kaazing.k3po.lang.internal.ast.AstCloseNode; import org.kaazing.k3po.lang.internal.ast.AstClosedNode; import org.kaazing.k3po.lang.internal.ast.AstConnectAbortNode; import org.kaazing.k3po.lang.internal.ast.AstConnectAbortedNode; import org.kaazing.k3po.lang.internal.ast.AstConnectNode; import org.kaazing.k3po.lang.internal.ast.AstConnectedNode; import org.kaazing.k3po.lang.internal.ast.AstDisconnectNode; import org.kaazing.k3po.lang.internal.ast.AstDisconnectedNode; import org.kaazing.k3po.lang.internal.ast.AstNode; import org.kaazing.k3po.lang.internal.ast.AstOpenedNode; import org.kaazing.k3po.lang.internal.ast.AstPropertyNode; import org.kaazing.k3po.lang.internal.ast.AstReadAbortNode; import org.kaazing.k3po.lang.internal.ast.AstReadAbortedNode; import org.kaazing.k3po.lang.internal.ast.AstReadAdviseNode; import org.kaazing.k3po.lang.internal.ast.AstReadAdvisedNode; import org.kaazing.k3po.lang.internal.ast.AstReadAwaitNode; import org.kaazing.k3po.lang.internal.ast.AstReadClosedNode; import org.kaazing.k3po.lang.internal.ast.AstReadConfigNode; import org.kaazing.k3po.lang.internal.ast.AstReadNotifyNode; import org.kaazing.k3po.lang.internal.ast.AstReadOptionNode; import org.kaazing.k3po.lang.internal.ast.AstReadValueNode; import org.kaazing.k3po.lang.internal.ast.AstRejectedNode; import org.kaazing.k3po.lang.internal.ast.AstScriptNode; import org.kaazing.k3po.lang.internal.ast.AstStreamNode; import org.kaazing.k3po.lang.internal.ast.AstStreamableNode; import org.kaazing.k3po.lang.internal.ast.AstUnbindNode; import org.kaazing.k3po.lang.internal.ast.AstUnboundNode; import org.kaazing.k3po.lang.internal.ast.AstWriteAbortNode; import org.kaazing.k3po.lang.internal.ast.AstWriteAbortedNode; import org.kaazing.k3po.lang.internal.ast.AstWriteAdviseNode; import org.kaazing.k3po.lang.internal.ast.AstWriteAdvisedNode; import org.kaazing.k3po.lang.internal.ast.AstWriteAwaitNode; import org.kaazing.k3po.lang.internal.ast.AstWriteCloseNode; import org.kaazing.k3po.lang.internal.ast.AstWriteConfigNode; import org.kaazing.k3po.lang.internal.ast.AstWriteFlushNode; import org.kaazing.k3po.lang.internal.ast.AstWriteNotifyNode; import org.kaazing.k3po.lang.internal.ast.AstWriteOptionNode; import org.kaazing.k3po.lang.internal.ast.AstWriteValueNode; public class InjectBarriersVisitor implements AstNode.Visitor<AstScriptNode, State> { public enum ReadWriteState { NONE, READ, WRITE } public static final class State { private List<AstStreamNode> streams; private List<AstStreamableNode> streamables; private ReadWriteState readWriteState; private int readWriteBarrierCount; } @Override public AstScriptNode visit(AstScriptNode script, State state) { AstScriptNode newScript = new AstScriptNode(); newScript.setRegionInfo(script.getRegionInfo()); newScript.getProperties().addAll(script.getProperties()); state.streams = newScript.getStreams(); for (AstStreamNode stream : script.getStreams()) { stream.accept(this, state); } return newScript; } @Override public AstScriptNode visit(AstPropertyNode propertyNode, State state) { return null; } @Override public AstScriptNode visit(AstAcceptNode acceptNode, State state) { state.readWriteState = ReadWriteState.NONE; AstAcceptNode newAcceptNode = new AstAcceptNode(acceptNode); state.streamables = newAcceptNode.getStreamables(); for (AstStreamableNode streamable : acceptNode.getStreamables()) { streamable.accept(this, state); } for (AstAcceptableNode acceptableNode : acceptNode.getAcceptables()) { acceptableNode.accept(this, state); } state.streams.add(newAcceptNode); return null; } @Override public AstScriptNode visit(AstAcceptedNode acceptedNode, State state) { state.readWriteState = ReadWriteState.NONE; AstAcceptedNode newAcceptedNode = new AstAcceptedNode(); newAcceptedNode.setRegionInfo(acceptedNode.getRegionInfo()); newAcceptedNode.setAcceptName(acceptedNode.getAcceptName()); state.streamables = newAcceptedNode.getStreamables(); for (AstStreamableNode streamable : acceptedNode.getStreamables()) { streamable.accept(this, state); } state.streams.add(newAcceptedNode); return null; } @Override public AstScriptNode visit(AstRejectedNode rejectedNode, State state) { state.readWriteState = ReadWriteState.NONE; AstRejectedNode newRejectedNode = new AstRejectedNode(); newRejectedNode.setRegionInfo(rejectedNode.getRegionInfo()); newRejectedNode.setAcceptName(rejectedNode.getAcceptName()); state.streamables = newRejectedNode.getStreamables(); for (AstStreamableNode streamable : rejectedNode.getStreamables()) { streamable.accept(this, state); } state.streams.add(newRejectedNode); return null; } @Override public AstScriptNode visit(AstConnectNode connectNode, State state) { state.readWriteState = ReadWriteState.NONE; AstConnectNode newConnectNode = new AstConnectNode(connectNode); state.streamables = newConnectNode.getStreamables(); for (AstStreamableNode streamable : connectNode.getStreamables()) { streamable.accept(this, state); } state.streams.add(newConnectNode); return null; } @Override public AstScriptNode visit(AstConnectAbortNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstConnectAbortedNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstReadAwaitNode node, State state) { state.readWriteState = ReadWriteState.NONE; state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstWriteAwaitNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstReadNotifyNode node, State state) { state.readWriteState = ReadWriteState.NONE; state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstWriteNotifyNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstWriteValueNode node, State state) { conditionallyInjectWriteBarrier(state, node.getRegionInfo()); state.streamables.add(node); state.readWriteState = ReadWriteState.WRITE; return null; } @Override public AstScriptNode visit(AstDisconnectNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstUnbindNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstCloseNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstWriteAbortNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstReadAbortNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstReadAbortedNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstWriteAbortedNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstChildOpenedNode childOpenedNode, State state) { state.streamables.add(childOpenedNode); return null; } @Override public AstScriptNode visit(AstChildClosedNode childClosedNode, State state) { state.streamables.add(childClosedNode); return null; } @Override public AstScriptNode visit(AstOpenedNode openedNode, State state) { state.streamables.add(openedNode); return null; } @Override public AstScriptNode visit(AstBoundNode boundNode, State state) { state.streamables.add(boundNode); return null; } @Override public AstScriptNode visit(AstConnectedNode connectedNode, State state) { state.streamables.add(connectedNode); return null; } @Override public AstScriptNode visit(AstReadValueNode node, State state) { state.streamables.add(node); state.readWriteState = ReadWriteState.READ; return null; } @Override public AstScriptNode visit(AstDisconnectedNode disconnectedNode, State state) { state.streamables.add(disconnectedNode); return null; } @Override public AstScriptNode visit(AstUnboundNode unboundNode, State state) { state.streamables.add(unboundNode); return null; } @Override public AstScriptNode visit(AstClosedNode closedNode, State state) { state.streamables.add(closedNode); return null; } @Override public AstScriptNode visit(AstReadConfigNode node, State state) { state.streamables.add(node); state.readWriteState = ReadWriteState.READ; return null; } @Override public AstScriptNode visit(AstWriteConfigNode node, State state) { conditionallyInjectWriteBarrier(state, node.getRegionInfo()); state.streamables.add(node); state.readWriteState = ReadWriteState.WRITE; return null; } @Override public AstScriptNode visit(AstReadAdviseNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstWriteAdviseNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstReadAdvisedNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstWriteAdvisedNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstReadClosedNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstWriteCloseNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstWriteFlushNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstReadOptionNode node, State state) { state.streamables.add(node); return null; } @Override public AstScriptNode visit(AstWriteOptionNode node, State state) { state.streamables.add(node); return null; } private void conditionallyInjectWriteBarrier(State state, RegionInfo regionInfo) { List<AstStreamableNode> streamables = state.streamables; switch (state.readWriteState) { case READ: String barrierName = String.format("~read~write~%d", ++state.readWriteBarrierCount); AstReadNotifyNode readNotify = new AstReadNotifyNode(); readNotify.setRegionInfo(regionInfo); readNotify.setBarrierName(barrierName); AstWriteAwaitNode writeAwait = new AstWriteAwaitNode(); writeAwait.setRegionInfo(regionInfo); writeAwait.setBarrierName(barrierName); streamables.add(readNotify); streamables.add(writeAwait); break; default: break; } } }
/* Copyright (C) 2005-2012, by the President and Fellows of Harvard College. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Dataverse Network - A web application to share, preserve and analyze research data. Developed at the Institute for Quantitative Social Science, Harvard University. Version 3.0. */ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import java.io.File; import java.io.FileInputStream; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import javax.ejb.EJB; import javax.ejb.Stateless; import java.security.PrivateKey; /* Handlenet imports: */ import net.handle.hdllib.AbstractMessage; import net.handle.hdllib.AbstractResponse; import net.handle.hdllib.AdminRecord; import net.handle.hdllib.ClientSessionTracker; import net.handle.hdllib.CreateHandleRequest; import net.handle.hdllib.DeleteHandleRequest; import net.handle.hdllib.Encoder; import net.handle.hdllib.HandleException; import net.handle.hdllib.HandleResolver; import net.handle.hdllib.HandleValue; import net.handle.hdllib.ModifyValueRequest; import net.handle.hdllib.PublicKeyAuthenticationInfo; import net.handle.hdllib.ResolutionRequest; import net.handle.hdllib.Util; import org.apache.commons.lang.NotImplementedException; /** * * @author Leonid Andreev * * This is a *partial* implementation of the Handles global id * service. * As of now, it only does the registration updates, to accommodate * the modifyRegistration datasets API sub-command. */ @Stateless public class HandlenetServiceBean extends AbstractGlobalIdServiceBean { @EJB DataverseServiceBean dataverseService; @EJB SettingsServiceBean settingsService; private static final Logger logger = Logger.getLogger(HandlenetServiceBean.class.getCanonicalName()); private static final String HANDLE_PROTOCOL_TAG = "hdl"; int handlenetIndex = System.getProperty("dataverse.handlenet.index")!=null? Integer.parseInt(System.getProperty("dataverse.handlenet.index")) : 300; public HandlenetServiceBean() { logger.log(Level.FINE,"Constructor"); } @Override public boolean registerWhenPublished() { return false; // TODO current value plays safe, can we loosen up? } public void reRegisterHandle(DvObject dvObject) { logger.log(Level.FINE,"reRegisterHandle"); if (!HANDLE_PROTOCOL_TAG.equals(dvObject.getProtocol())) { logger.log(Level.WARNING, "reRegisterHandle called on a dvObject with the non-handle global id: {0}", dvObject.getId()); } String handle = getDvObjectHandle(dvObject); boolean handleRegistered = isHandleRegistered(handle); if (handleRegistered) { // Rebuild/Modify an existing handle logger.log(Level.INFO, "Re-registering an existing handle id {0}", handle); String authHandle = getHandleAuthority(dvObject); HandleResolver resolver = new HandleResolver(); String datasetUrl = getRegistrationUrl(dvObject); logger.log(Level.INFO, "New registration URL: {0}", datasetUrl); PublicKeyAuthenticationInfo auth = getAuthInfo(dvObject.getAuthority()); try { AdminRecord admin = new AdminRecord(authHandle.getBytes("UTF8"), handlenetIndex, true, true, true, true, true, true, true, true, true, true, true, true); int timestamp = (int) (System.currentTimeMillis() / 1000); HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes("UTF8"), Encoder.encodeAdminRecord(admin), HandleValue.TTL_TYPE_RELATIVE, 86400, timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes("UTF8"), datasetUrl.getBytes(), HandleValue.TTL_TYPE_RELATIVE, 86400, timestamp, null, true, true, true, false)}; ModifyValueRequest req = new ModifyValueRequest(handle.getBytes("UTF8"), val, auth); resolver.traceMessages = true; AbstractResponse response = resolver.processRequest(req); if (response.responseCode == AbstractMessage.RC_SUCCESS) { logger.info("\nGot Response: \n" + response); } else { logger.info("\nGot Error: \n" + response); } } catch (Throwable t) { logger.fine("\nError: " + t); } } else { // Create a new handle from scratch: logger.log(Level.INFO, "Handle {0} not registered. Registering (creating) from scratch.", handle); registerNewHandle(dvObject); } } public Throwable registerNewHandle(DvObject dvObject) { logger.log(Level.FINE,"registerNewHandle"); String handlePrefix = dvObject.getAuthority(); String handle = getDvObjectHandle(dvObject); String datasetUrl = getRegistrationUrl(dvObject); logger.log(Level.INFO, "Creating NEW handle {0}", handle); String authHandle = getHandleAuthority(dvObject); PublicKeyAuthenticationInfo auth = getAuthInfo(handlePrefix); HandleResolver resolver = new HandleResolver(); try { AdminRecord admin = new AdminRecord(authHandle.getBytes("UTF8"), handlenetIndex, true, true, true, true, true, true, true, true, true, true, true, true); int timestamp = (int) (System.currentTimeMillis() / 1000); HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes("UTF8"), Encoder.encodeAdminRecord(admin), HandleValue.TTL_TYPE_RELATIVE, 86400, timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes("UTF8"), datasetUrl.getBytes(), HandleValue.TTL_TYPE_RELATIVE, 86400, timestamp, null, true, true, true, false)}; CreateHandleRequest req = new CreateHandleRequest(handle.getBytes("UTF8"), val, auth); resolver.traceMessages = true; AbstractResponse response = resolver.processRequest(req); if (response.responseCode == AbstractMessage.RC_SUCCESS) { logger.log(Level.INFO, "Success! Response: \n{0}", response); return null; } else { logger.log(Level.WARNING, "RegisterNewHandle failed. Error response: {0}", response); return new Exception("registerNewHandle failed: " + response); } } catch (Throwable t) { logger.log(Level.WARNING, "registerNewHandle failed", t); return t; } } public boolean isHandleRegistered(String handle){ logger.log(Level.FINE,"isHandleRegistered"); boolean handleRegistered = false; ResolutionRequest req = buildResolutionRequest(handle); AbstractResponse response = null; HandleResolver resolver = new HandleResolver(); try { response = resolver.processRequest(req); } catch (HandleException ex) { logger.log(Level.WARNING, "Caught exception trying to process lookup request", ex); } if((response!=null && response.responseCode==AbstractMessage.RC_SUCCESS)) { logger.log(Level.INFO, "Handle {0} registered.", handle); handleRegistered = true; } return handleRegistered; } private ResolutionRequest buildResolutionRequest(final String handle) { logger.log(Level.FINE,"buildResolutionRequest"); String handlePrefix = handle.substring(0,handle.indexOf("/")); PublicKeyAuthenticationInfo auth = getAuthInfo(handlePrefix); byte[][] types = null; int[] indexes = null; ResolutionRequest req = new ResolutionRequest(Util.encodeString(handle), types, indexes, auth); req.certify = false; req.cacheCertify = true; req.authoritative = false; req.ignoreRestrictedValues = true; return req; } private PublicKeyAuthenticationInfo getAuthInfo(String handlePrefix) { logger.log(Level.FINE,"getAuthInfo"); byte[] key = null; String adminCredFile = System.getProperty("dataverse.handlenet.admcredfile"); int handlenetIndex = System.getProperty("dataverse.handlenet.index")!=null? Integer.parseInt(System.getProperty("dataverse.handlenet.index")) : 300; key = readKey(adminCredFile); PrivateKey privkey = null; privkey = readPrivKey(key, adminCredFile); String authHandle = getHandleAuthority(handlePrefix); PublicKeyAuthenticationInfo auth = new PublicKeyAuthenticationInfo(Util.encodeString(authHandle), handlenetIndex, privkey); return auth; } private String getRegistrationUrl(DvObject dvObject) { logger.log(Level.FINE,"getRegistrationUrl"); String siteUrl = systemConfig.getDataverseSiteUrl(); String targetUrl = siteUrl + dvObject.getTargetUrl() + "hdl:" + dvObject.getAuthority() + "/" + dvObject.getIdentifier(); return targetUrl; } public String getSiteUrl() { logger.log(Level.FINE,"getSiteUrl"); String hostUrl = System.getProperty("dataverse.siteUrl"); if (hostUrl != null && !"".equals(hostUrl)) { return hostUrl; } String hostName = System.getProperty("dataverse.fqdn"); if (hostName == null) { try { hostName = InetAddress.getLocalHost().getCanonicalHostName(); } catch (UnknownHostException e) { return null; } } hostUrl = "https://" + hostName; return hostUrl; } private byte[] readKey(final String file) { logger.log(Level.FINE,"readKey"); byte[] key = null; try { File f = new File(file); FileInputStream fs = new FileInputStream(f); key = new byte[(int)f.length()]; int n=0; while(n<key.length) { key[n++] = (byte)fs.read(); } } catch (Throwable t){ logger.log(Level.SEVERE, "Cannot read private key {0}: {1}", new Object[]{file, t}); } return key; } private PrivateKey readPrivKey(byte[] key, final String file) { logger.log(Level.FINE,"readPrivKey"); PrivateKey privkey=null; String secret = System.getProperty("dataverse.handlenet.admprivphrase"); byte secKey[] = null; try { if ( Util.requiresSecretKey(key) ) { secKey = secret.getBytes(); } key = Util.decrypt(key, secKey); privkey = Util.getPrivateKeyFromBytes(key, 0); } catch (Throwable t){ logger.log(Level.SEVERE, "Can''t load private key in {0}: {1}", new Object[]{file, t}); } return privkey; } private String getDvObjectHandle(DvObject dvObject) { /* * This is different from dataset.getGlobalId() in that we don't * need the "hdl:" prefix. */ String handle = dvObject.getAuthority() + "/" + dvObject.getIdentifier(); return handle; } private String getHandleAuthority(DvObject dvObject){ return getHandleAuthority(dvObject.getAuthority()); } private String getHandleAuthority(String handlePrefix) { logger.log(Level.FINE,"getHandleAuthority"); return "0.NA/" + handlePrefix; } @Override public boolean alreadyExists(DvObject dvObject) throws Exception { String handle = getDvObjectHandle(dvObject); return isHandleRegistered(handle); } @Override public boolean alreadyExists(GlobalId pid) throws Exception { String handle = pid.getAuthority() + "/" + pid.getIdentifier(); return isHandleRegistered(handle); } @Override public Map<String,String> getIdentifierMetadata(DvObject dvObject) { throw new NotImplementedException(); } @Override public HashMap lookupMetadataFromIdentifier(String protocol, String authority, String identifier) { throw new NotImplementedException(); } @Override public String modifyIdentifierTargetURL(DvObject dvObject) throws Exception { logger.log(Level.FINE,"modifyIdentifier"); reRegisterHandle(dvObject); if(dvObject instanceof Dataset){ Dataset dataset = (Dataset) dvObject; dataset.getFiles().forEach((df) -> { reRegisterHandle(df); }); } return getIdentifier(dvObject); } @Override public void deleteIdentifier(DvObject dvObject) throws Exception { String handle = getDvObjectHandle(dvObject); String authHandle = getAuthHandle(dvObject); String adminCredFile = System.getProperty("dataverse.handlenet.admcredfile"); int handlenetIndex = System.getProperty("dataverse.handlenet.index")!=null? Integer.parseInt(System.getProperty("dataverse.handlenet.index")) : 300; byte[] key = readKey(adminCredFile); PrivateKey privkey = readPrivKey(key, adminCredFile); HandleResolver resolver = new HandleResolver(); resolver.setSessionTracker(new ClientSessionTracker()); PublicKeyAuthenticationInfo auth = new PublicKeyAuthenticationInfo(Util.encodeString(authHandle), handlenetIndex, privkey); DeleteHandleRequest req = new DeleteHandleRequest(Util.encodeString(handle), auth); AbstractResponse response=null; try { response = resolver.processRequest(req); } catch (HandleException ex) { ex.printStackTrace(); } if(response==null || response.responseCode!=AbstractMessage.RC_SUCCESS) { logger.fine("error deleting '"+handle+"': "+response); } else { logger.fine("deleted "+handle); } } private boolean updateIdentifierStatus(DvObject dvObject, String statusIn) { logger.log(Level.FINE,"updateIdentifierStatus"); reRegisterHandle(dvObject); // No Need to register new - this is only called when a handle exists return true; } private String getAuthHandle(DvObject dvObject) { // TODO hack: GNRSServiceBean retrieved this from vdcNetworkService return "0.NA/" + dvObject.getAuthority(); } @Override public List<String> getProviderInformation(){ ArrayList <String> providerInfo = new ArrayList<>(); String providerName = "Handle"; String providerLink = "https://hdl.handle.net"; providerInfo.add(providerName); providerInfo.add(providerLink); return providerInfo; } @Override public String createIdentifier(DvObject dvObject) throws Throwable { Throwable result = registerNewHandle(dvObject); if (result != null) throw result; // TODO get exceptions from under the carpet return getDvObjectHandle(dvObject); } @Override public boolean publicizeIdentifier(DvObject dvObject) { if (dvObject.getIdentifier() == null || dvObject.getIdentifier().isEmpty()){ generateIdentifier(dvObject); } return updateIdentifierStatus(dvObject, "public"); } }
/* * * Copyright 2016 Big Data Curation Lab, University of Toronto, * Patricia Arocena, * Boris Glavic, * Renee J. Miller * * This software also contains code derived from STBenchmark as described in * with the permission of the authors: * * Bogdan Alexe, Wang-Chiew Tan, Yannis Velegrakis * * This code was originally described in: * * STBenchmark: Towards a Benchmark for Mapping Systems * Alexe, Bogdan and Tan, Wang-Chiew and Velegrakis, Yannis * PVLDB: Proceedings of the VLDB Endowment archive * 2008, vol. 1, no. 1, pp. 230-244 * * The copyright of the ToxGene (included as a jar file: toxgene.jar) belongs to * Denilson Barbosa. The iBench distribution contains this jar file with the * permission of the author of ToxGene * (http://www.cs.toronto.edu/tox/toxgene/index.html) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package tresc.benchmark.schemaGen; import java.util.ArrayList; import java.util.List; import org.vagabond.util.CollectionUtils; import org.vagabond.xmlmodel.MappingType; import org.vagabond.xmlmodel.RelationType; import tresc.benchmark.Constants.ScenarioName; import tresc.benchmark.utils.Utils; import vtools.dataModel.expression.Path; import vtools.dataModel.expression.Projection; import vtools.dataModel.expression.Query; import vtools.dataModel.expression.SPJQuery; import vtools.dataModel.expression.SelectClauseList; import vtools.dataModel.expression.Variable; //MN ENHANCED genTargetRels to pass types of attributes as argument to addRelation - 11 May 2014 //MN ENHANCED genSourceRels to pass types of attributes as argument to addRelation - 11 May 2014 //MN FIXED keySize in chooseTargetRels - 11 May 2014 /** * Copies the source relation and deletes an attribute from the target relation. * * @author mdangelo */ public class DeleteAttributeScenarioGenerator extends AbstractScenarioGenerator { private int numOfSrcTblAttr; private int numDelAttr; private int keySize; //MN added new attribute to check whether we are reusing target relation or not - 11 May 2014 private boolean targetReuse; //MN public DeleteAttributeScenarioGenerator() { ; } @Override protected void initPartialMapping() { super.initPartialMapping(); numOfSrcTblAttr = Utils.getRandomNumberAroundSomething(_generator, numOfElements,numOfElementsDeviation); numDelAttr = Utils.getRandomNumberAroundSomething(_generator, numRemovedAttr, numRemovedAttrDeviation); // PRG BUG FIX - Generate source relation with at least 2 elements - Sep 16, 2012 numOfSrcTblAttr = (numOfSrcTblAttr > 1 ? numOfSrcTblAttr : 2); // make sure we never delete all attributes, and that we never delete no attributes numDelAttr = (numDelAttr > 0) ? numDelAttr : 1; numDelAttr = (numDelAttr < numOfSrcTblAttr) ? numDelAttr : (numOfSrcTblAttr-1); // PRG FIX - DO NOT ENFORCE KEY UNLESS EXPLICITLY REQUESTED - Sep 16, 2012 keySize = Utils.getRandomNumberAroundSomething(_generator, primaryKeySize, primaryKeySizeDeviation); // PRG Adjust keySize w.r.t number of source table attributes and number of to be deleted attributes // e.g. numOfSrcTblAttr = 3, numDelAttr = 2 and ConfigOptions.PrimaryKeySize = 2. Then keySize should be 1 // e.g. numOfSrcTblAttr = 3, numDelAttr = 1 and ConfigOptions.PrimaryKeySize = 2. Then keySize should be 2 keySize = (keySize > numOfSrcTblAttr - numDelAttr) ? numOfSrcTblAttr - numDelAttr : keySize; //MN BEGIN - 11 May 2014 targetReuse = false; //MN END } @Override protected boolean chooseSourceRels() throws Exception { int minAttrs = keySize + numDelAttr; RelationType rel; // get a random relation rel = getRandomRel(true, minAttrs); if (rel == null) return false; numOfSrcTblAttr = rel.sizeOfAttrArray(); // create primary key if necessary if (!rel.isSetPrimaryKey() && keySize > 0) { fac.addPrimaryKey(rel.getName(), CollectionUtils.createSequence(0, keySize), true); } // adapt keySize else if (rel.isSetPrimaryKey()) { keySize = rel.getPrimaryKey().sizeOfAttrArray(); if (rel.sizeOfAttrArray() - keySize < numDelAttr) numDelAttr = rel.sizeOfAttrArray() - keySize; } m.addSourceRel(rel); return true; } @Override // PRG ADD Source Code to Support Key Generation - Sep 17, 2012 protected void genSourceRels() throws Exception { String srcName = randomRelName(0); String[] attrs = new String[numOfSrcTblAttr]; //MN considered an array to store types of attributes of sourc relation - 11 May 2014 String[] attrsType = new String[numOfSrcTblAttr]; //MN // First, generate the appropriate number of key elements // Note: keySize should be > 0 to generate any key elements String[] keys = new String[keySize]; for (int j = 0; j < keySize; j++) keys[j] = randomAttrName(0, 0) + "ke" + j; // Second, generate remaining source attributes int keyCount = 0; for (int i = 0; i < numOfSrcTblAttr; i++) { String attrName = randomAttrName(0, i); // Note: the body of this IF construct would not be executed when keySize = 0 if (keyCount < keySize) attrName = keys[keyCount]; keyCount++; attrs[i] = attrName; //MN BEGIN - 11 May 2014 if(targetReuse) if(i<m.getTargetRels().get(0).getAttrArray().length) attrsType[i] = m.getTargetRels().get(0).getAttrArray(i).getDataType(); else attrsType[i] = "TEXT"; //MN END } //MN BEGIN - 11 May 2014 if(!targetReuse) fac.addRelation(getRelHook(0), srcName, attrs, true); else fac.addRelation(getRelHook(0), srcName, attrs, attrsType, true); //MN END // Add primary key if explicitly requested if (keySize > 0) fac.addPrimaryKey(srcName, keys, true); //MN BEGIN - 11 May 2014 targetReuse = false; //MN END } @Override protected boolean chooseTargetRels() throws Exception { RelationType rel; int minAttr = keySize; rel = getRandomRel(false, minAttr); //MN we can consider Max_Num_Tries to add more flexibility - 11 May 2014 if (rel == null) return false; if (keySize > 0 && !rel.isSetPrimaryKey()) { fac.addPrimaryKey(rel.getName(), CollectionUtils.createSequence(0, keySize), false); } else if (rel.isSetPrimaryKey()) { keySize = rel.getPrimaryKey().sizeOfAttrArray(); } numOfSrcTblAttr = rel.sizeOfAttrArray() + numDelAttr; m.addTargetRel(rel); //MN BEGIN - 11 May 2014 targetReuse = true; //MN END return true; } @Override // PRG ADD Source Code to Support Key Generation - Sep 17, 2012 protected void genTargetRels() throws Exception { String trgName = randomRelName(0); String[] attrs = new String[numOfSrcTblAttr-numDelAttr]; String[] srcAttrs = m.getAttrIds(0, true); //MN considered an array to store types of attributes of target relation - 4 May 2014 List<String> attrsType = new ArrayList<String> (); //MN // copy all the source attributes except the last few (to account for the ones we want to delete) System.arraycopy(srcAttrs, 0, attrs, 0, numOfSrcTblAttr-numDelAttr); //MN BEGIN - 4 May 2014 for(int i=0; i<numOfSrcTblAttr-numDelAttr; i++) attrsType.add(m.getSourceRels().get(0).getAttrArray(i).getDataType()); //MN END //MN changed - 4 May 2014 fac.addRelation(getRelHook(0), trgName, attrs, attrsType.toArray(new String[] {}), false); // PRG ADD primary key to target relation if necessary String[] keys = new String[keySize]; for (int j = 0; j < keySize; j++) keys[j] = srcAttrs[j]; if (keySize > 0) fac.addPrimaryKey(trgName, keys, false); } @Override protected void genCorrespondences() { for (int i = 0; i < numOfSrcTblAttr-numDelAttr; i++) addCorr(0, i, 0, i); } @Override protected void genMappings() throws Exception { MappingType m1 = fac.addMapping(m.getCorrs()); // source and target tables get fresh variables fac.addForeachAtom(m1, 0, fac.getFreshVars(0, numOfSrcTblAttr)); fac.addExistsAtom(m1, 0, fac.getFreshVars(0, numOfSrcTblAttr - numDelAttr)); } @Override protected void genTransformations() throws Exception { String creates = m.getRelName(0, false); Query q; q = genQueries(); q.storeCode(q.toTrampString(m.getMapIds())); q = addQueryOrUnion(creates, q); fac.addTransformation(q.getStoredCode(), m.getMapIds(), creates); } private Query genQueries() throws Exception { String sourceRelName = m.getRelName(0, true); String[] tAttrs = m.getAttrIds(0, false); // create the query for the source table and add the from clause SPJQuery q = new SPJQuery(); q.getFrom().add(new Variable("X"), new Projection(Path.ROOT, sourceRelName)); SelectClauseList sel = q.getSelect(); // add entries to the select clause for (String a: tAttrs) { Projection att = new Projection(new Variable("X"), a); sel.add(a, att); } return q; } @Override public ScenarioName getScenType() { return ScenarioName.DELATTRIBUTE; } }
/* * ProGuard -- shrinking, optimization, obfuscation, and preverification * of Java bytecode. * * Copyright (c) 2002-2016 Eric Lafortune @ GuardSquare * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package proguard.classfile.visitor; import proguard.classfile.*; import proguard.classfile.attribute.*; import proguard.classfile.attribute.annotation.*; import proguard.classfile.attribute.annotation.target.*; import proguard.classfile.attribute.annotation.target.visitor.*; import proguard.classfile.attribute.annotation.visitor.*; import proguard.classfile.attribute.preverification.*; import proguard.classfile.attribute.preverification.visitor.*; import proguard.classfile.attribute.visitor.*; import proguard.classfile.constant.*; import proguard.classfile.constant.visitor.ConstantVisitor; import proguard.classfile.instruction.*; import proguard.classfile.instruction.visitor.InstructionVisitor; import proguard.classfile.util.*; import java.io.PrintStream; /** * This <code>ClassVisitor</code> prints out the complete internal * structure of the classes it visits. * * @author Eric Lafortune */ public class ClassPrinter extends SimplifiedVisitor implements ClassVisitor, ConstantVisitor, MemberVisitor, AttributeVisitor, BootstrapMethodInfoVisitor, InnerClassesInfoVisitor, ExceptionInfoVisitor, StackMapFrameVisitor, VerificationTypeVisitor, LineNumberInfoVisitor, ParameterInfoVisitor, LocalVariableInfoVisitor, LocalVariableTypeInfoVisitor, AnnotationVisitor, TypeAnnotationVisitor, TargetInfoVisitor, LocalVariableTargetElementVisitor, TypePathInfoVisitor, ElementValueVisitor, InstructionVisitor { private static final String INDENTATION = " "; private final PrintStream ps; private int indentation; /** * Creates a new ClassPrinter that prints to <code>System.out</code>. */ public ClassPrinter() { this(System.out); } /** * Creates a new ClassPrinter that prints to the given * <code>PrintStream</code>. */ public ClassPrinter(PrintStream printStream) { ps = printStream; } // Implementations for ClassVisitor. public void visitProgramClass(ProgramClass programClass) { println("_____________________________________________________________________"); println(visitorInfo(programClass) + " " + "Program class: " + programClass.getName()); indent(); println("Superclass: " + programClass.getSuperName()); println("Major version: 0x" + Integer.toHexString(ClassUtil.internalMajorClassVersion(programClass.u4version))); println("Minor version: 0x" + Integer.toHexString(ClassUtil.internalMinorClassVersion(programClass.u4version))); println(" = target " + ClassUtil.externalClassVersion(programClass.u4version)); println("Access flags: 0x" + Integer.toHexString(programClass.u2accessFlags)); println(" = " + ((programClass.u2accessFlags & ClassConstants.ACC_ANNOTATTION) != 0 ? "@ " : "") + ClassUtil.externalClassAccessFlags(programClass.u2accessFlags) + ((programClass.u2accessFlags & ClassConstants.ACC_ENUM) != 0 ? "enum " : (programClass.u2accessFlags & ClassConstants.ACC_INTERFACE) == 0 ? "class " : "") + ClassUtil.externalClassName(programClass.getName()) + (programClass.u2superClass == 0 ? "" : " extends " + ClassUtil.externalClassName(programClass.getSuperName()))); outdent(); println(); println("Interfaces (count = " + programClass.u2interfacesCount + "):"); indent(); programClass.interfaceConstantsAccept(this); outdent(); println(); println("Constant Pool (count = " + programClass.u2constantPoolCount + "):"); indent(); programClass.constantPoolEntriesAccept(this); outdent(); println(); println("Fields (count = " + programClass.u2fieldsCount + "):"); indent(); programClass.fieldsAccept(this); outdent(); println(); println("Methods (count = " + programClass.u2methodsCount + "):"); indent(); programClass.methodsAccept(this); outdent(); println(); println("Class file attributes (count = " + programClass.u2attributesCount + "):"); indent(); programClass.attributesAccept(this); outdent(); println(); } public void visitLibraryClass(LibraryClass libraryClass) { println("_____________________________________________________________________"); println(visitorInfo(libraryClass) + " " + "Library class: " + libraryClass.getName()); indent(); println("Superclass: " + libraryClass.getSuperName()); println("Access flags: 0x" + Integer.toHexString(libraryClass.u2accessFlags)); println(" = " + ((libraryClass.u2accessFlags & ClassConstants.ACC_ANNOTATTION) != 0 ? "@ " : "") + ClassUtil.externalClassAccessFlags(libraryClass.u2accessFlags) + ((libraryClass.u2accessFlags & ClassConstants.ACC_ENUM) != 0 ? "enum " : (libraryClass.u2accessFlags & ClassConstants.ACC_INTERFACE) == 0 ? "class " : "") + ClassUtil.externalClassName(libraryClass.getName()) + (libraryClass.getSuperName() == null ? "" : " extends " + ClassUtil.externalClassName(libraryClass.getSuperName()))); outdent(); println(); println("Interfaces (count = " + libraryClass.interfaceClasses.length + "):"); for (int index = 0; index < libraryClass.interfaceClasses.length; index++) { Clazz interfaceClass = libraryClass.interfaceClasses[index]; if (interfaceClass != null) { println(" + " + interfaceClass.getName()); } } println("Fields (count = " + libraryClass.fields.length + "):"); libraryClass.fieldsAccept(this); println("Methods (count = " + libraryClass.methods.length + "):"); libraryClass.methodsAccept(this); } // Implementations for ConstantVisitor. public void visitIntegerConstant(Clazz clazz, IntegerConstant integerConstant) { println(visitorInfo(integerConstant) + " Integer [" + integerConstant.getValue() + "]"); } public void visitLongConstant(Clazz clazz, LongConstant longConstant) { println(visitorInfo(longConstant) + " Long [" + longConstant.getValue() + "]"); } public void visitFloatConstant(Clazz clazz, FloatConstant floatConstant) { println(visitorInfo(floatConstant) + " Float [" + floatConstant.getValue() + "]"); } public void visitDoubleConstant(Clazz clazz, DoubleConstant doubleConstant) { println(visitorInfo(doubleConstant) + " Double [" + doubleConstant.getValue() + "]"); } public void visitStringConstant(Clazz clazz, StringConstant stringConstant) { println(visitorInfo(stringConstant) + " String [" + stringConstant.getString(clazz) + "]"); } public void visitUtf8Constant(Clazz clazz, Utf8Constant utf8Constant) { println(visitorInfo(utf8Constant) + " Utf8 [" + utf8Constant.getString() + "]"); } public void visitInvokeDynamicConstant(Clazz clazz, InvokeDynamicConstant invokeDynamicConstant) { println(visitorInfo(invokeDynamicConstant) + " InvokeDynamic [bootstrap method index = " + invokeDynamicConstant.u2bootstrapMethodAttributeIndex + "]:"); indent(); clazz.constantPoolEntryAccept(invokeDynamicConstant.u2nameAndTypeIndex, this); outdent(); } public void visitMethodHandleConstant(Clazz clazz, MethodHandleConstant methodHandleConstant) { println(visitorInfo(methodHandleConstant) + " MethodHandle [kind = " + methodHandleConstant.u1referenceKind + "]:"); indent(); clazz.constantPoolEntryAccept(methodHandleConstant.u2referenceIndex, this); outdent(); } public void visitFieldrefConstant(Clazz clazz, FieldrefConstant fieldrefConstant) { println(visitorInfo(fieldrefConstant) + " Fieldref [" + clazz.getClassName(fieldrefConstant.u2classIndex) + "." + clazz.getName(fieldrefConstant.u2nameAndTypeIndex) + " " + clazz.getType(fieldrefConstant.u2nameAndTypeIndex) + "]"); } public void visitInterfaceMethodrefConstant(Clazz clazz, InterfaceMethodrefConstant interfaceMethodrefConstant) { println(visitorInfo(interfaceMethodrefConstant) + " InterfaceMethodref [" + clazz.getClassName(interfaceMethodrefConstant.u2classIndex) + "." + clazz.getName(interfaceMethodrefConstant.u2nameAndTypeIndex) + " " + clazz.getType(interfaceMethodrefConstant.u2nameAndTypeIndex) + "]"); } public void visitMethodrefConstant(Clazz clazz, MethodrefConstant methodrefConstant) { println(visitorInfo(methodrefConstant) + " Methodref [" + clazz.getClassName(methodrefConstant.u2classIndex) + "." + clazz.getName(methodrefConstant.u2nameAndTypeIndex) + " " + clazz.getType(methodrefConstant.u2nameAndTypeIndex) + "]"); } public void visitClassConstant(Clazz clazz, ClassConstant classConstant) { println(visitorInfo(classConstant) + " Class [" + classConstant.getName(clazz) + "]"); } public void visitMethodTypeConstant(Clazz clazz, MethodTypeConstant methodTypeConstant) { println(visitorInfo(methodTypeConstant) + " MethodType [" + methodTypeConstant.getType(clazz) + "]"); } public void visitNameAndTypeConstant(Clazz clazz, NameAndTypeConstant nameAndTypeConstant) { println(visitorInfo(nameAndTypeConstant) + " NameAndType [" + nameAndTypeConstant.getName(clazz) + " " + nameAndTypeConstant.getType(clazz) + "]"); } // Implementations for MemberVisitor. public void visitProgramField(ProgramClass programClass, ProgramField programField) { println(visitorInfo(programField) + " " + "Field: " + programField.getName(programClass) + " " + programField.getDescriptor(programClass)); indent(); println("Access flags: 0x" + Integer.toHexString(programField.u2accessFlags)); println(" = " + ClassUtil.externalFullFieldDescription(programField.u2accessFlags, programField.getName(programClass), programField.getDescriptor(programClass))); visitMember(programClass, programField); outdent(); } public void visitProgramMethod(ProgramClass programClass, ProgramMethod programMethod) { println(visitorInfo(programMethod) + " " + "Method: " + programMethod.getName(programClass) + programMethod.getDescriptor(programClass)); indent(); println("Access flags: 0x" + Integer.toHexString(programMethod.u2accessFlags)); println(" = " + ClassUtil.externalFullMethodDescription(programClass.getName(), programMethod.u2accessFlags, programMethod.getName(programClass), programMethod.getDescriptor(programClass))); visitMember(programClass, programMethod); outdent(); } private void visitMember(ProgramClass programClass, ProgramMember programMember) { if (programMember.u2attributesCount > 0) { println("Class member attributes (count = " + programMember.u2attributesCount + "):"); programMember.attributesAccept(programClass, this); } } public void visitLibraryField(LibraryClass libraryClass, LibraryField libraryField) { println(visitorInfo(libraryField) + " " + "Field: " + libraryField.getName(libraryClass) + " " + libraryField.getDescriptor(libraryClass)); indent(); println("Access flags: 0x" + Integer.toHexString(libraryField.u2accessFlags)); println(" = " + ClassUtil.externalFullFieldDescription(libraryField.u2accessFlags, libraryField.getName(libraryClass), libraryField.getDescriptor(libraryClass))); outdent(); } public void visitLibraryMethod(LibraryClass libraryClass, LibraryMethod libraryMethod) { println(visitorInfo(libraryMethod) + " " + "Method: " + libraryMethod.getName(libraryClass) + " " + libraryMethod.getDescriptor(libraryClass)); indent(); println("Access flags: 0x" + Integer.toHexString(libraryMethod.u2accessFlags)); println(" = " + ClassUtil.externalFullMethodDescription(libraryClass.getName(), libraryMethod.u2accessFlags, libraryMethod.getName(libraryClass), libraryMethod.getDescriptor(libraryClass))); outdent(); } // Implementations for AttributeVisitor. // Note that attributes are typically only referenced once, so we don't // test if they are marked already. public void visitUnknownAttribute(Clazz clazz, UnknownAttribute unknownAttribute) { println(visitorInfo(unknownAttribute) + " Unknown attribute (" + unknownAttribute.getAttributeName(clazz) + ")"); } public void visitBootstrapMethodsAttribute(Clazz clazz, BootstrapMethodsAttribute bootstrapMethodsAttribute) { println(visitorInfo(bootstrapMethodsAttribute) + " Bootstrap methods attribute (count = " + bootstrapMethodsAttribute.u2bootstrapMethodsCount + "):"); indent(); bootstrapMethodsAttribute.bootstrapMethodEntriesAccept(clazz, this); outdent(); } public void visitSourceFileAttribute(Clazz clazz, SourceFileAttribute sourceFileAttribute) { println(visitorInfo(sourceFileAttribute) + " Source file attribute:"); indent(); clazz.constantPoolEntryAccept(sourceFileAttribute.u2sourceFileIndex, this); outdent(); } public void visitSourceDirAttribute(Clazz clazz, SourceDirAttribute sourceDirAttribute) { println(visitorInfo(sourceDirAttribute) + " Source dir attribute:"); indent(); clazz.constantPoolEntryAccept(sourceDirAttribute.u2sourceDirIndex, this); outdent(); } public void visitInnerClassesAttribute(Clazz clazz, InnerClassesAttribute innerClassesAttribute) { println(visitorInfo(innerClassesAttribute) + " Inner classes attribute (count = " + innerClassesAttribute.u2classesCount + ")"); indent(); innerClassesAttribute.innerClassEntriesAccept(clazz, this); outdent(); } public void visitEnclosingMethodAttribute(Clazz clazz, EnclosingMethodAttribute enclosingMethodAttribute) { println(visitorInfo(enclosingMethodAttribute) + " Enclosing method attribute:"); indent(); clazz.constantPoolEntryAccept(enclosingMethodAttribute.u2classIndex, this); if (enclosingMethodAttribute.u2nameAndTypeIndex != 0) { clazz.constantPoolEntryAccept(enclosingMethodAttribute.u2nameAndTypeIndex, this); } outdent(); } public void visitDeprecatedAttribute(Clazz clazz, DeprecatedAttribute deprecatedAttribute) { println(visitorInfo(deprecatedAttribute) + " Deprecated attribute"); } public void visitSyntheticAttribute(Clazz clazz, SyntheticAttribute syntheticAttribute) { println(visitorInfo(syntheticAttribute) + " Synthetic attribute"); } public void visitSignatureAttribute(Clazz clazz, SignatureAttribute signatureAttribute) { println(visitorInfo(signatureAttribute) + " Signature attribute:"); indent(); clazz.constantPoolEntryAccept(signatureAttribute.u2signatureIndex, this); outdent(); } public void visitConstantValueAttribute(Clazz clazz, Field field, ConstantValueAttribute constantValueAttribute) { println(visitorInfo(constantValueAttribute) + " Constant value attribute:"); clazz.constantPoolEntryAccept(constantValueAttribute.u2constantValueIndex, this); } public void visitMethodParametersAttribute(Clazz clazz, Method method, MethodParametersAttribute methodParametersAttribute) { println(visitorInfo(methodParametersAttribute) + " Method parameters attribute (count = " + methodParametersAttribute.u1parametersCount + ")"); indent(); methodParametersAttribute.parametersAccept(clazz, method, this); outdent(); } public void visitExceptionsAttribute(Clazz clazz, Method method, ExceptionsAttribute exceptionsAttribute) { println(visitorInfo(exceptionsAttribute) + " Exceptions attribute (count = " + exceptionsAttribute.u2exceptionIndexTableLength + ")"); indent(); exceptionsAttribute.exceptionEntriesAccept((ProgramClass)clazz, this); outdent(); } public void visitCodeAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute) { println(visitorInfo(codeAttribute) + " Code attribute instructions (code length = "+ codeAttribute.u4codeLength + ", locals = "+ codeAttribute.u2maxLocals + ", stack = "+ codeAttribute.u2maxStack + "):"); indent(); codeAttribute.instructionsAccept(clazz, method, this); println("Code attribute exceptions (count = " + codeAttribute.u2exceptionTableLength + "):"); codeAttribute.exceptionsAccept(clazz, method, this); println("Code attribute attributes (attribute count = " + codeAttribute.u2attributesCount + "):"); codeAttribute.attributesAccept(clazz, method, this); outdent(); } public void visitStackMapAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute, StackMapAttribute stackMapAttribute) { println(visitorInfo(codeAttribute) + " Stack map attribute (count = "+ stackMapAttribute.u2stackMapFramesCount + "):"); indent(); stackMapAttribute.stackMapFramesAccept(clazz, method, codeAttribute, this); outdent(); } public void visitStackMapTableAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute, StackMapTableAttribute stackMapTableAttribute) { println(visitorInfo(codeAttribute) + " Stack map table attribute (count = "+ stackMapTableAttribute.u2stackMapFramesCount + "):"); indent(); stackMapTableAttribute.stackMapFramesAccept(clazz, method, codeAttribute, this); outdent(); } public void visitLineNumberTableAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute, LineNumberTableAttribute lineNumberTableAttribute) { println(visitorInfo(lineNumberTableAttribute) + " Line number table attribute (count = " + lineNumberTableAttribute.u2lineNumberTableLength + ")"); indent(); lineNumberTableAttribute.lineNumbersAccept(clazz, method, codeAttribute, this); outdent(); } public void visitLocalVariableTableAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute, LocalVariableTableAttribute localVariableTableAttribute) { println(visitorInfo(localVariableTableAttribute) + " Local variable table attribute (count = " + localVariableTableAttribute.u2localVariableTableLength + ")"); indent(); localVariableTableAttribute.localVariablesAccept(clazz, method, codeAttribute, this); outdent(); } public void visitLocalVariableTypeTableAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute, LocalVariableTypeTableAttribute localVariableTypeTableAttribute) { println(visitorInfo(localVariableTypeTableAttribute) + " Local variable type table attribute (count = "+ localVariableTypeTableAttribute.u2localVariableTypeTableLength + ")"); indent(); localVariableTypeTableAttribute.localVariablesAccept(clazz, method, codeAttribute, this); outdent(); } public void visitRuntimeVisibleAnnotationsAttribute(Clazz clazz, RuntimeVisibleAnnotationsAttribute runtimeVisibleAnnotationsAttribute) { println(visitorInfo(runtimeVisibleAnnotationsAttribute) + " Runtime visible annotations attribute:"); indent(); runtimeVisibleAnnotationsAttribute.annotationsAccept(clazz, this); outdent(); } public void visitRuntimeInvisibleAnnotationsAttribute(Clazz clazz, RuntimeInvisibleAnnotationsAttribute runtimeInvisibleAnnotationsAttribute) { println(visitorInfo(runtimeInvisibleAnnotationsAttribute) + " Runtime invisible annotations attribute:"); indent(); runtimeInvisibleAnnotationsAttribute.annotationsAccept(clazz, this); outdent(); } public void visitRuntimeVisibleParameterAnnotationsAttribute(Clazz clazz, Method method, RuntimeVisibleParameterAnnotationsAttribute runtimeVisibleParameterAnnotationsAttribute) { println(visitorInfo(runtimeVisibleParameterAnnotationsAttribute) + " Runtime visible parameter annotations attribute (parameter count = " + runtimeVisibleParameterAnnotationsAttribute.u1parametersCount + "):"); indent(); runtimeVisibleParameterAnnotationsAttribute.annotationsAccept(clazz, method, this); outdent(); } public void visitRuntimeInvisibleParameterAnnotationsAttribute(Clazz clazz, Method method, RuntimeInvisibleParameterAnnotationsAttribute runtimeInvisibleParameterAnnotationsAttribute) { println(visitorInfo(runtimeInvisibleParameterAnnotationsAttribute) + " Runtime invisible parameter annotations attribute (parameter count = " + runtimeInvisibleParameterAnnotationsAttribute.u1parametersCount + "):"); indent(); runtimeInvisibleParameterAnnotationsAttribute.annotationsAccept(clazz, method, this); outdent(); } public void visitRuntimeVisibleTypeAnnotationsAttribute(Clazz clazz, RuntimeVisibleTypeAnnotationsAttribute runtimeVisibleTypeAnnotationsAttribute) { println(visitorInfo(runtimeVisibleTypeAnnotationsAttribute) + " Runtime visible type annotations attribute"); indent(); runtimeVisibleTypeAnnotationsAttribute.typeAnnotationsAccept(clazz, this); outdent(); } public void visitRuntimeInvisibleTypeAnnotationsAttribute(Clazz clazz, RuntimeInvisibleTypeAnnotationsAttribute runtimeInvisibleTypeAnnotationsAttribute) { println(visitorInfo(runtimeInvisibleTypeAnnotationsAttribute) + " Runtime invisible type annotations attribute"); indent(); runtimeInvisibleTypeAnnotationsAttribute.typeAnnotationsAccept(clazz, this); outdent(); } public void visitAnnotationDefaultAttribute(Clazz clazz, Method method, AnnotationDefaultAttribute annotationDefaultAttribute) { println(visitorInfo(annotationDefaultAttribute) + " Annotation default attribute:"); indent(); annotationDefaultAttribute.defaultValueAccept(clazz, this); outdent(); } // Implementations for BootstrapMethodInfoVisitor. public void visitBootstrapMethodInfo(Clazz clazz, BootstrapMethodInfo bootstrapMethodInfo) { println(visitorInfo(bootstrapMethodInfo) + " BootstrapMethodInfo (argument count = " + bootstrapMethodInfo.u2methodArgumentCount+ "):"); indent(); clazz.constantPoolEntryAccept(bootstrapMethodInfo.u2methodHandleIndex, this); bootstrapMethodInfo.methodArgumentsAccept(clazz, this); outdent(); } // Implementations for InnerClassesInfoVisitor. public void visitInnerClassesInfo(Clazz clazz, InnerClassesInfo innerClassesInfo) { println(visitorInfo(innerClassesInfo) + " InnerClassesInfo:"); indent(); println("Access flags: 0x" + Integer.toHexString(innerClassesInfo.u2innerClassAccessFlags) + " = " + ClassUtil.externalClassAccessFlags(innerClassesInfo.u2innerClassAccessFlags)); innerClassesInfo.innerClassConstantAccept(clazz, this); innerClassesInfo.outerClassConstantAccept(clazz, this); innerClassesInfo.innerNameConstantAccept(clazz, this); outdent(); } // Implementations for InstructionVisitor. public void visitAnyInstruction(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, Instruction instruction) { println(instruction.toString(offset)); } public void visitConstantInstruction(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, ConstantInstruction constantInstruction) { println(constantInstruction.toString(offset)); indent(); clazz.constantPoolEntryAccept(constantInstruction.constantIndex, this); outdent(); } public void visitTableSwitchInstruction(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, TableSwitchInstruction tableSwitchInstruction) { println(tableSwitchInstruction.toString(offset)); indent(); int[] jumpOffsets = tableSwitchInstruction.jumpOffsets; for (int index = 0; index < jumpOffsets.length; index++) { int jumpOffset = jumpOffsets[index]; println(Integer.toString(tableSwitchInstruction.lowCase + index) + ": offset = " + jumpOffset + ", target = " + (offset + jumpOffset)); } int defaultOffset = tableSwitchInstruction.defaultOffset; println("default: offset = " + defaultOffset + ", target = "+ (offset + defaultOffset)); outdent(); } public void visitLookUpSwitchInstruction(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, LookUpSwitchInstruction lookUpSwitchInstruction) { println(lookUpSwitchInstruction.toString(offset)); indent(); int[] cases = lookUpSwitchInstruction.cases; int[] jumpOffsets = lookUpSwitchInstruction.jumpOffsets; for (int index = 0; index < jumpOffsets.length; index++) { int jumpOffset = jumpOffsets[index]; println(Integer.toString(cases[index]) + ": offset = " + jumpOffset + ", target = " + (offset + jumpOffset)); } int defaultOffset = lookUpSwitchInstruction.defaultOffset; println("default: offset = " + defaultOffset + ", target = "+ (offset + defaultOffset)); outdent(); } // Implementations for ExceptionInfoVisitor. public void visitExceptionInfo(Clazz clazz, Method method, CodeAttribute codeAttribute, ExceptionInfo exceptionInfo) { println(visitorInfo(exceptionInfo) + " ExceptionInfo (" + exceptionInfo.u2startPC + " -> " + exceptionInfo.u2endPC + ": " + exceptionInfo.u2handlerPC + "):"); if (exceptionInfo.u2catchType != 0) { clazz.constantPoolEntryAccept(exceptionInfo.u2catchType, this); } } // Implementations for StackMapFrameVisitor. public void visitSameZeroFrame(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, SameZeroFrame sameZeroFrame) { println(visitorInfo(sameZeroFrame) + " [" + offset + "]" + " Var: ..., Stack: (empty)"); } public void visitSameOneFrame(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, SameOneFrame sameOneFrame) { print(visitorInfo(sameOneFrame) + " [" + offset + "]" + " Var: ..., Stack: "); sameOneFrame.stackItemAccept(clazz, method, codeAttribute, offset, this); println(); } public void visitLessZeroFrame(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, LessZeroFrame lessZeroFrame) { println(visitorInfo(lessZeroFrame) + " [" + offset + "]" + " Var: -" + lessZeroFrame.choppedVariablesCount + ", Stack: (empty)"); } public void visitMoreZeroFrame(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, MoreZeroFrame moreZeroFrame) { print(visitorInfo(moreZeroFrame) + " [" + offset + "]" + " Var: ..."); moreZeroFrame.additionalVariablesAccept(clazz, method, codeAttribute, offset, this); ps.println(", Stack: (empty)"); } public void visitFullFrame(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, FullFrame fullFrame) { print(visitorInfo(fullFrame) + " [" + offset + "]" + " Var: "); fullFrame.variablesAccept(clazz, method, codeAttribute, offset, this); ps.print(", Stack: "); fullFrame.stackAccept(clazz, method, codeAttribute, offset, this); println(); } // Implementations for VerificationTypeVisitor. public void visitIntegerType(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, IntegerType integerType) { ps.print("[i]"); } public void visitFloatType(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, FloatType floatType) { ps.print("[f]"); } public void visitLongType(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, LongType longType) { ps.print("[l]"); } public void visitDoubleType(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, DoubleType doubleType) { ps.print("[d]"); } public void visitTopType(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, TopType topType) { ps.print("[T]"); } public void visitObjectType(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, ObjectType objectType) { ps.print("[a:" + clazz.getClassName(objectType.u2classIndex) + "]"); } public void visitNullType(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, NullType nullType) { ps.print("[n]"); } public void visitUninitializedType(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, UninitializedType uninitializedType) { ps.print("[u:" + uninitializedType.u2newInstructionOffset + "]"); } public void visitUninitializedThisType(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, UninitializedThisType uninitializedThisType) { ps.print("[u:this]"); } // Implementations for LineNumberInfoVisitor. public void visitLineNumberInfo(Clazz clazz, Method method, CodeAttribute codeAttribute, LineNumberInfo lineNumberInfo) { println("[" + lineNumberInfo.u2startPC + "] -> line " + lineNumberInfo.u2lineNumber + (lineNumberInfo.getSource() == null ? "" : " [" + lineNumberInfo.getSource() + "]")); } // Implementations for ParameterInfoVisitor. public void visitParameterInfo(Clazz clazz, Method method, int parameterIndex, ParameterInfo parameterInfo) { println("p" + parameterIndex + ": access flags: 0x" + Integer.toHexString(parameterInfo.u2accessFlags) + " = " + ClassUtil.externalParameterAccessFlags(parameterInfo.u2accessFlags) + (parameterInfo.u2nameIndex == 0 ? "" : " [" + parameterInfo.getName(clazz) + "]")); } // Implementations for LocalVariableInfoVisitor. public void visitLocalVariableInfo(Clazz clazz, Method method, CodeAttribute codeAttribute, LocalVariableInfo localVariableInfo) { println("v" + localVariableInfo.u2index + ": " + localVariableInfo.u2startPC + " -> " + (localVariableInfo.u2startPC + localVariableInfo.u2length) + " [" + localVariableInfo.getDescriptor(clazz) + " " + localVariableInfo.getName(clazz) + "]"); } // Implementations for LocalVariableTypeInfoVisitor. public void visitLocalVariableTypeInfo(Clazz clazz, Method method, CodeAttribute codeAttribute, LocalVariableTypeInfo localVariableTypeInfo) { println("v" + localVariableTypeInfo.u2index + ": " + localVariableTypeInfo.u2startPC + " -> " + (localVariableTypeInfo.u2startPC + localVariableTypeInfo.u2length) + " [" + localVariableTypeInfo.getSignature(clazz) + " " + localVariableTypeInfo.getName(clazz) + "]"); } // Implementations for AnnotationVisitor. public void visitAnnotation(Clazz clazz, Annotation annotation) { println(visitorInfo(annotation) + " Annotation [" + annotation.getType(clazz) + "]:"); indent(); annotation.elementValuesAccept(clazz, this); outdent(); } // Implementations for TypeAnnotationVisitor. public void visitTypeAnnotation(Clazz clazz, TypeAnnotation typeAnnotation) { println(visitorInfo(typeAnnotation) + " Type annotation [" + typeAnnotation.getType(clazz) + "]:"); indent(); typeAnnotation.targetInfoAccept(clazz, this); println("Type path (count = " + typeAnnotation.typePath.length + "):"); indent(); typeAnnotation.typePathInfosAccept(clazz, this); outdent(); typeAnnotation.elementValuesAccept(clazz, this); outdent(); } // Implementations for TargetInfoVisitor. public void visitTypeParameterTargetInfo(Clazz clazz, TypeAnnotation typeAnnotation, TypeParameterTargetInfo typeParameterTargetInfo) { println("Target (type = 0x" + Integer.toHexString(typeParameterTargetInfo.u1targetType) + "): Parameter #" + typeParameterTargetInfo.u1typeParameterIndex); } public void visitSuperTypeTargetInfo(Clazz clazz, TypeAnnotation typeAnnotation, SuperTypeTargetInfo superTypeTargetInfo) { println("Target (type = 0x" + Integer.toHexString(superTypeTargetInfo.u1targetType) + "): " + (superTypeTargetInfo.u2superTypeIndex == 0xffff ? "super class" : "interface #" + superTypeTargetInfo.u2superTypeIndex)); } public void visitTypeParameterBoundTargetInfo(Clazz clazz, TypeAnnotation typeAnnotation, TypeParameterBoundTargetInfo typeParameterBoundTargetInfo) { println("Target (type = 0x" + Integer.toHexString(typeParameterBoundTargetInfo.u1targetType) + "): parameter #" + typeParameterBoundTargetInfo.u1typeParameterIndex + ", bound #" + typeParameterBoundTargetInfo.u1boundIndex); } public void visitEmptyTargetInfo(Clazz clazz, Member member, TypeAnnotation typeAnnotation, EmptyTargetInfo emptyTargetInfo) { println("Target (type = 0x" + Integer.toHexString(emptyTargetInfo.u1targetType) + ")"); } public void visitFormalParameterTargetInfo(Clazz clazz, Method method, TypeAnnotation typeAnnotation, FormalParameterTargetInfo formalParameterTargetInfo) { println("Target (type = 0x" + Integer.toHexString(formalParameterTargetInfo.u1targetType) + "): formal parameter #" + formalParameterTargetInfo.u1formalParameterIndex); } public void visitThrowsTargetInfo(Clazz clazz, Method method, TypeAnnotation typeAnnotation, ThrowsTargetInfo throwsTargetInfo) { println("Target (type = 0x" + Integer.toHexString(throwsTargetInfo.u1targetType) + "): throws #" + throwsTargetInfo.u2throwsTypeIndex); } public void visitLocalVariableTargetInfo(Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, LocalVariableTargetInfo localVariableTargetInfo) { println("Target (type = 0x" + Integer.toHexString(localVariableTargetInfo.u1targetType) + "): local variables (count = " + localVariableTargetInfo.u2tableLength + ")"); indent(); localVariableTargetInfo.targetElementsAccept(clazz, method, codeAttribute, typeAnnotation, this); outdent(); } public void visitCatchTargetInfo(Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, CatchTargetInfo catchTargetInfo) { println("Target (type = 0x" + Integer.toHexString(catchTargetInfo.u1targetType) + "): catch #" + catchTargetInfo.u2exceptionTableIndex); } public void visitOffsetTargetInfo(Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, OffsetTargetInfo offsetTargetInfo) { println("Target (type = 0x" + Integer.toHexString(offsetTargetInfo.u1targetType) + "): offset " + offsetTargetInfo.u2offset); } public void visitTypeArgumentTargetInfo(Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, TypeArgumentTargetInfo typeArgumentTargetInfo) { println("Target (type = 0x" + Integer.toHexString(typeArgumentTargetInfo.u1targetType) + "): offset " + typeArgumentTargetInfo.u2offset + ", type argument " + typeArgumentTargetInfo.u1typeArgumentIndex); } // Implementations for TypePathInfoVisitor. public void visitTypePathInfo(Clazz clazz, TypeAnnotation typeAnnotation, TypePathInfo typePathInfo) { println("kind = " + typePathInfo.u1typePathKind + ", argument index = " + typePathInfo.u1typeArgumentIndex); } // Implementations for LocalVariableTargetElementVisitor. public void visitLocalVariableTargetElement(Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, LocalVariableTargetInfo localVariableTargetInfo, LocalVariableTargetElement localVariableTargetElement) { println("v" + localVariableTargetElement.u2index + ": " + localVariableTargetElement.u2startPC + " -> " + (localVariableTargetElement.u2startPC + localVariableTargetElement.u2length)); } // Implementations for ElementValueVisitor. public void visitConstantElementValue(Clazz clazz, Annotation annotation, ConstantElementValue constantElementValue) { println(visitorInfo(constantElementValue) + " Constant element value [" + (constantElementValue.u2elementNameIndex == 0 ? "(default)" : constantElementValue.getMethodName(clazz)) + " '" + constantElementValue.u1tag + "']"); indent(); clazz.constantPoolEntryAccept(constantElementValue.u2constantValueIndex, this); outdent(); } public void visitEnumConstantElementValue(Clazz clazz, Annotation annotation, EnumConstantElementValue enumConstantElementValue) { println(visitorInfo(enumConstantElementValue) + " Enum constant element value [" + (enumConstantElementValue.u2elementNameIndex == 0 ? "(default)" : enumConstantElementValue.getMethodName(clazz)) + ", " + enumConstantElementValue.getTypeName(clazz) + ", " + enumConstantElementValue.getConstantName(clazz) + "]"); } public void visitClassElementValue(Clazz clazz, Annotation annotation, ClassElementValue classElementValue) { println(visitorInfo(classElementValue) + " Class element value [" + (classElementValue.u2elementNameIndex == 0 ? "(default)" : classElementValue.getMethodName(clazz)) + ", " + classElementValue.getClassName(clazz) + "]"); } public void visitAnnotationElementValue(Clazz clazz, Annotation annotation, AnnotationElementValue annotationElementValue) { println(visitorInfo(annotationElementValue) + " Annotation element value [" + (annotationElementValue.u2elementNameIndex == 0 ? "(default)" : annotationElementValue.getMethodName(clazz)) + "]:"); indent(); annotationElementValue.annotationAccept(clazz, this); outdent(); } public void visitArrayElementValue(Clazz clazz, Annotation annotation, ArrayElementValue arrayElementValue) { println(visitorInfo(arrayElementValue) + " Array element value [" + (arrayElementValue.u2elementNameIndex == 0 ? "(default)" : arrayElementValue.getMethodName(clazz)) + "]:"); indent(); arrayElementValue.elementValuesAccept(clazz, annotation, this); outdent(); } // Small utility methods. private void indent() { indentation++; } private void outdent() { indentation--; } private void println(String string) { print(string); println(); } private void print(String string) { for (int index = 0; index < indentation; index++) { ps.print(INDENTATION); } ps.print(string); } private void println() { ps.println(); } private String visitorInfo(VisitorAccepter visitorAccepter) { return visitorAccepter.getVisitorInfo() == null ? "-" : "+"; } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.erd.model; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.ext.erd.ERDActivator; import org.jkiss.dbeaver.ext.erd.ERDConstants; import org.jkiss.dbeaver.model.DBPNamedObject; import org.jkiss.dbeaver.model.DBUtils; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.struct.*; import org.jkiss.dbeaver.model.struct.rdb.DBSTable; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.utils.CommonUtils; import java.lang.reflect.InvocationTargetException; import java.util.*; /** * Table collector */ public class DiagramObjectCollector { private static final Log log = Log.getLog(DiagramObjectCollector.class); private final EntityDiagram diagram; private final List<ERDEntity> erdEntities = new ArrayList<>(); private boolean showViews; public DiagramObjectCollector(EntityDiagram diagram) { this.diagram = diagram; } public static Collection<DBSEntity> collectTables( DBRProgressMonitor monitor, Collection<? extends DBSObject> roots) throws DBException { Set<DBSEntity> tables = new LinkedHashSet<>(); collectTables(monitor, roots, tables); return tables; } public boolean isShowViews() { return showViews; } public void setShowViews(boolean showViews) { this.showViews = showViews; } private static void collectTables( DBRProgressMonitor monitor, Collection<? extends DBSObject> roots, Set<DBSEntity> tables) throws DBException { for (DBSObject root : roots) { if (monitor.isCanceled()) { break; } if (root instanceof DBSAlias) { root = ((DBSAlias) root).getTargetObject(monitor); } if (root instanceof DBSFolder) { collectTables(monitor, ((DBSFolder) root).getChildrenObjects(monitor), tables); } else if (root instanceof DBSEntity) { tables.add((DBSEntity) root); } if (root instanceof DBSObjectContainer) { collectTables(monitor, (DBSObjectContainer) root, tables); } } } private static void collectTables( DBRProgressMonitor monitor, DBSObjectContainer container, Set<DBSEntity> tables) throws DBException { if (monitor.isCanceled()) { return; } container.cacheStructure(monitor, DBSObjectContainer.STRUCT_ALL); final Collection<? extends DBSObject> children = container.getChildren(monitor); if (!CommonUtils.isEmpty(children)) { Class<? extends DBSObject> childType = container.getChildType(monitor); DBSObjectFilter objectFilter = container.getDataSource().getContainer().getObjectFilter(childType, container, true); for (DBSObject entity : children) { if (monitor.isCanceled()) { break; } if (objectFilter != null && !objectFilter.matches(entity.getName())) { continue; } if (entity instanceof DBSEntity) { tables.add((DBSEntity) entity); } else if (entity instanceof DBSObjectContainer) { collectTables(monitor, (DBSObjectContainer) entity, tables); } } } } public void generateDiagramObjects( DBRProgressMonitor monitor, Collection<? extends DBSObject> roots) throws DBException { Collection<DBSEntity> tables = collectTables(monitor, roots); for (DBSEntity table : tables) { if (DBUtils.isHiddenObject(table)) { // Skip hidden tables continue; } if (!showViews && table instanceof DBSTable && ((DBSTable) table).isView()) { // Skip views continue; } addDiagramEntity(monitor, table); } // Add new relations for (ERDEntity erdEntity : erdEntities) { erdEntity.addModelRelations(monitor, diagram, true, false); } } private void addDiagramEntity(DBRProgressMonitor monitor, DBSEntity table) { if (diagram.containsTable(table) && !diagram.getDecorator().allowEntityDuplicates()) { // Avoid duplicates return; } ERDEntity erdEntity = ERDUtils.makeEntityFromObject(monitor, diagram, erdEntities, table, null); if (erdEntity != null) { erdEntities.add(erdEntity); } } private boolean aliasExist(String alias) { for (ERDEntity entity : erdEntities) { if (CommonUtils.equalObjects(entity.getAlias(), alias)) { return true; } } return false; } public List<ERDEntity> getDiagramEntities() { return erdEntities; } public static List<ERDEntity> generateEntityList(final EntityDiagram diagram, Collection<DBPNamedObject> objects, boolean showViews) { final List<DBSObject> roots = new ArrayList<>(); for (DBPNamedObject object : objects) { if (object instanceof DBSObject) { roots.add((DBSObject) object); } } final List<ERDEntity> entities = new ArrayList<>(); try { UIUtils.runInProgressService(monitor -> { DiagramObjectCollector collector = new DiagramObjectCollector(diagram); collector.setShowViews(showViews); //boolean showViews = ERDActivator.getDefault().getPreferenceStore().getBoolean(ERDConstants.PREF_DIAGRAM_SHOW_VIEWS); try { collector.generateDiagramObjects(monitor, roots); } catch (DBException e) { throw new InvocationTargetException(e); } entities.addAll(collector.getDiagramEntities()); }); } catch (InvocationTargetException e) { log.error(e.getTargetException()); } catch (InterruptedException e) { // interrupted } return entities; } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.InconsistentFilesystemException; import com.google.devtools.build.lib.cmdline.PackageIdentifier; import com.google.devtools.build.lib.cmdline.RepositoryName; import com.google.devtools.build.lib.concurrent.BatchCallback; import com.google.devtools.build.lib.concurrent.ParallelVisitor.UnusedException; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.packages.BuildFileContainsErrorsException; import com.google.devtools.build.lib.packages.NoSuchPackageException; import com.google.devtools.build.lib.packages.Package; import com.google.devtools.build.lib.pkgcache.AbstractRecursivePackageProvider; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.rules.repository.RepositoryDirectoryValue; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.skyframe.SkyFunction.Environment; import com.google.devtools.build.skyframe.SkyKey; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; /** * A {@link com.google.devtools.build.lib.pkgcache.RecursivePackageProvider} backed by an {@link * Environment}. Its methods may throw {@link MissingDepException} if the package values this * depends on haven't been calculated and added to its environment. * * <p>This implementation never emits events through the {@link ExtendedEventHandler}s passed to its * methods. Instead, it emits events through its environment's {@link Environment#getListener()}. */ public final class EnvironmentBackedRecursivePackageProvider extends AbstractRecursivePackageProvider { private final Environment env; private final AtomicBoolean encounteredPackageErrors = new AtomicBoolean(false); EnvironmentBackedRecursivePackageProvider(Environment env) { this.env = env; } /** * Whether any of the calls to {@link #getPackage}, {@link #getTarget}, {@link #bulkGetPackages}, * or {@link * com.google.devtools.build.lib.pkgcache.RecursivePackageProvider#streamPackagesUnderDirectory} * encountered a package in error. * * <p>The client of {@link EnvironmentBackedRecursivePackageProvider} may want to check this. See * comments in {@link #getPackage} for details. */ boolean encounteredPackageErrors() { return encounteredPackageErrors.get(); } @Override public Package getPackage(ExtendedEventHandler eventHandler, PackageIdentifier packageName) throws NoSuchPackageException, MissingDepException, InterruptedException { SkyKey pkgKey = PackageValue.key(packageName); PackageValue pkgValue; try { pkgValue = (PackageValue) env.getValueOrThrow(pkgKey, NoSuchPackageException.class); if (pkgValue == null) { throw new MissingDepException(); } } catch (NoSuchPackageException e) { encounteredPackageErrors.set(true); throw e; } Package pkg = pkgValue.getPackage(); if (pkg.containsErrors()) { // If this is a nokeep_going build, we must shut the build down by throwing an exception. To // do that, we request a node that will throw an exception, and then try to catch it and // continue. This gives the framework notification to shut down the build if it should. try { env.getValueOrThrow( PackageErrorFunction.key(packageName), BuildFileContainsErrorsException.class); Preconditions.checkState(env.valuesMissing(), "Should have thrown for %s", packageName); throw new MissingDepException(); } catch (BuildFileContainsErrorsException e) { // If this is a keep_going build, then the user of this RecursivePackageProvider has two // options for handling the "package in error" case. The user must either inspect the // package returned by this method, or else determine whether any errors have been seen via // the "encounteredPackageErrors" method. encounteredPackageErrors.set(true); } } return pkgValue.getPackage(); } @Override public Map<PackageIdentifier, Package> bulkGetPackages(Iterable<PackageIdentifier> pkgIds) throws NoSuchPackageException, InterruptedException { ImmutableMap.Builder<PackageIdentifier, Package> builder = ImmutableMap.builder(); for (PackageIdentifier pkgId : pkgIds) { builder.put(pkgId, getPackage(env.getListener(), pkgId)); } return builder.build(); } @Override public boolean isPackage(ExtendedEventHandler eventHandler, PackageIdentifier packageId) throws MissingDepException, InterruptedException { SkyKey packageLookupKey = PackageLookupValue.key(packageId); try { PackageLookupValue packageLookupValue = (PackageLookupValue) env.getValueOrThrow( packageLookupKey, NoSuchPackageException.class, InconsistentFilesystemException.class); if (packageLookupValue == null) { throw new MissingDepException(); } return packageLookupValue.packageExists(); } catch (NoSuchPackageException | InconsistentFilesystemException e) { env.getListener().handle(Event.error(e.getMessage())); encounteredPackageErrors.set(true); return false; } } @Override public void streamPackagesUnderDirectory( BatchCallback<PackageIdentifier, UnusedException> results, ExtendedEventHandler eventHandler, RepositoryName repository, PathFragment directory, ImmutableSet<PathFragment> ignoredSubdirectories, ImmutableSet<PathFragment> excludedSubdirectories) throws MissingDepException, InterruptedException { PathPackageLocator packageLocator = PrecomputedValue.PATH_PACKAGE_LOCATOR.get(env); if (packageLocator == null) { throw new MissingDepException(); } List<Root> roots = new ArrayList<>(); if (repository.isMain()) { roots.addAll(packageLocator.getPathEntries()); } else { RepositoryDirectoryValue repositoryValue = (RepositoryDirectoryValue) env.getValue(RepositoryDirectoryValue.key(repository)); if (repositoryValue == null) { throw new MissingDepException(); } if (!repositoryValue.repositoryExists()) { eventHandler.handle(Event.error(String.format("No such repository '%s'", repository))); return; } roots.add(Root.fromPath(repositoryValue.getPath())); } if (ignoredSubdirectories.contains(directory)) { return; } ImmutableSet<PathFragment> filteredIgnoredSubdirectories = ImmutableSet.copyOf( Iterables.filter( ignoredSubdirectories, path -> !path.equals(directory) && path.startsWith(directory))); for (Root root : roots) { RecursivePkgValue lookup = (RecursivePkgValue) env.getValue( RecursivePkgValue.key( repository, RootedPath.toRootedPath(root, directory), filteredIgnoredSubdirectories)); if (lookup == null) { // Typically a null value from Environment.getValue(k) means that either the key k is // missing a dependency or an exception was thrown during evaluation of k. Here, if this // getValue call returns null in a keep_going build, it can only mean a missing dependency // because RecursivePkgFunction#compute never throws. // In a nokeep_going build, a lower-level exception that RecursivePkgFunction ignored may // bubble up to here, but we ignore it and depend on the top-level caller to be flexible in // the exception types it can accept. throw new MissingDepException(); } if (lookup.hasErrors()) { encounteredPackageErrors.set(true); } for (String packageName : lookup.getPackages().toList()) { // TODO(bazel-team): Make RecursivePkgValue return NestedSet<PathFragment> so this transform // is unnecessary. PathFragment packageNamePathFragment = PathFragment.create(packageName); if (!Iterables.any(excludedSubdirectories, packageNamePathFragment::startsWith)) { results.process( ImmutableList.of(PackageIdentifier.create(repository, packageNamePathFragment))); } } } } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.test.api.runtime.migration; import java.util.Collection; import org.camunda.bpm.model.bpmn.BpmnModelInstance; import org.camunda.bpm.model.bpmn.builder.AbstractActivityBuilder; import org.camunda.bpm.model.bpmn.builder.AbstractBaseElementBuilder; import org.camunda.bpm.model.bpmn.builder.AbstractFlowNodeBuilder; import org.camunda.bpm.model.bpmn.builder.CallActivityBuilder; import org.camunda.bpm.model.bpmn.builder.EndEventBuilder; import org.camunda.bpm.model.bpmn.builder.IntermediateCatchEventBuilder; import org.camunda.bpm.model.bpmn.builder.ServiceTaskBuilder; import org.camunda.bpm.model.bpmn.builder.StartEventBuilder; import org.camunda.bpm.model.bpmn.builder.SubProcessBuilder; import org.camunda.bpm.model.bpmn.builder.UserTaskBuilder; import org.camunda.bpm.model.bpmn.instance.Activity; import org.camunda.bpm.model.bpmn.instance.Association; import org.camunda.bpm.model.bpmn.instance.BaseElement; import org.camunda.bpm.model.bpmn.instance.BpmnModelElementInstance; import org.camunda.bpm.model.bpmn.instance.Definitions; import org.camunda.bpm.model.bpmn.instance.FlowElement; import org.camunda.bpm.model.bpmn.instance.FlowNode; import org.camunda.bpm.model.bpmn.instance.Message; import org.camunda.bpm.model.bpmn.instance.MultiInstanceLoopCharacteristics; import org.camunda.bpm.model.bpmn.instance.SequenceFlow; import org.camunda.bpm.model.bpmn.instance.Signal; import org.camunda.bpm.model.bpmn.instance.SubProcess; import org.camunda.bpm.model.xml.Model; import org.camunda.bpm.model.xml.instance.DomDocument; import org.camunda.bpm.model.xml.instance.ModelElementInstance; import org.camunda.bpm.model.xml.type.ModelElementType; import org.camunda.bpm.model.xml.validation.ModelElementValidator; import org.camunda.bpm.model.xml.validation.ValidationResults; public class ModifiableBpmnModelInstance implements BpmnModelInstance { protected BpmnModelInstance modelInstance; public ModifiableBpmnModelInstance(BpmnModelInstance modelInstance) { this.modelInstance = modelInstance; } public static ModifiableBpmnModelInstance modify(BpmnModelInstance modelInstance) { return new ModifiableBpmnModelInstance(modelInstance.clone()); } public Definitions getDefinitions() { return modelInstance.getDefinitions(); } public void setDefinitions(Definitions definitions) { modelInstance.setDefinitions(definitions); } @Override public BpmnModelInstance clone() { return modelInstance.clone(); } public DomDocument getDocument() { return modelInstance.getDocument(); } public ModelElementInstance getDocumentElement() { return modelInstance.getDocumentElement(); } public void setDocumentElement(ModelElementInstance documentElement) { modelInstance.setDocumentElement(documentElement); } public <T extends ModelElementInstance> T newInstance(Class<T> type) { return modelInstance.newInstance(type); } public <T extends ModelElementInstance> T newInstance(ModelElementType type) { return modelInstance.newInstance(type); } public Model getModel() { return modelInstance.getModel(); } public <T extends ModelElementInstance> T getModelElementById(String id) { return modelInstance.getModelElementById(id); } public Collection<ModelElementInstance> getModelElementsByType(ModelElementType referencingType) { return modelInstance.getModelElementsByType(referencingType); } public <T extends ModelElementInstance> Collection<T> getModelElementsByType(Class<T> referencingClass) { return modelInstance.getModelElementsByType(referencingClass); } @SuppressWarnings("unchecked") public <T extends AbstractBaseElementBuilder> T getBuilderForElementById(String id, Class<T> builderClass) { BaseElement modelElementById = modelInstance.getModelElementById(id); return (T) modelElementById.builder(); } public AbstractActivityBuilder activityBuilder(String activityId) { return getBuilderForElementById(activityId, AbstractActivityBuilder.class); } public AbstractFlowNodeBuilder flowNodeBuilder(String flowNodeId) { return getBuilderForElementById(flowNodeId, AbstractFlowNodeBuilder.class); } public UserTaskBuilder userTaskBuilder(String userTaskId) { return getBuilderForElementById(userTaskId, UserTaskBuilder.class); } public ServiceTaskBuilder serviceTaskBuilder(String serviceTaskId) { return getBuilderForElementById(serviceTaskId, ServiceTaskBuilder.class); } public CallActivityBuilder callActivityBuilder(String callActivityId) { return getBuilderForElementById(callActivityId, CallActivityBuilder.class); } public IntermediateCatchEventBuilder intermediateCatchEventBuilder(String eventId) { return getBuilderForElementById(eventId, IntermediateCatchEventBuilder.class); } public StartEventBuilder startEventBuilder(String eventId) { return getBuilderForElementById(eventId, StartEventBuilder.class); } public EndEventBuilder endEventBuilder(String eventId) { return getBuilderForElementById(eventId, EndEventBuilder.class); } public ModifiableBpmnModelInstance changeElementId(String oldId, String newId) { BaseElement element = getModelElementById(oldId); element.setId(newId); return this; } public ModifiableBpmnModelInstance changeElementName(String elementId, String newName) { FlowElement flowElement = getModelElementById(elementId); flowElement.setName(newName); return this; } public ModifiableBpmnModelInstance removeChildren(String elementId) { BaseElement element = getModelElementById(elementId); Collection<BaseElement> children = element.getChildElementsByType(BaseElement.class); for (BaseElement child : children) { element.removeChildElement(child); } return this; } public ModifiableBpmnModelInstance renameMessage(String oldMessageName, String newMessageName) { Collection<Message> messages = modelInstance.getModelElementsByType(Message.class); for (Message message : messages) { if (message.getName().equals(oldMessageName)) { message.setName(newMessageName); } } return this; } public ModifiableBpmnModelInstance renameSignal(String oldSignalName, String newSignalName) { Collection<Signal> signals = modelInstance.getModelElementsByType(Signal.class); for (Signal signal : signals) { if (signal.getName().equals(oldSignalName)) { signal.setName(newSignalName); } } return this; } public ModifiableBpmnModelInstance swapElementIds(String firstElementId, String secondElementId) { BaseElement firstElement = getModelElementById(firstElementId); BaseElement secondElement = getModelElementById(secondElementId); secondElement.setId("___TEMP___ID___"); firstElement.setId(secondElementId); secondElement.setId(firstElementId); return this; } public SubProcessBuilder addSubProcessTo(String parentId) { SubProcess eventSubProcess = modelInstance.newInstance(SubProcess.class); BpmnModelElementInstance parent = getModelElementById(parentId); parent.addChildElement(eventSubProcess); return eventSubProcess.builder(); } public ModifiableBpmnModelInstance removeFlowNode(String flowNodeId) { FlowNode flowNode = getModelElementById(flowNodeId); ModelElementInstance scope = flowNode.getParentElement(); for (SequenceFlow outgoingFlow : flowNode.getOutgoing()) { scope.removeChildElement(outgoingFlow); } for (SequenceFlow incomingFlow : flowNode.getIncoming()) { scope.removeChildElement(incomingFlow); } Collection<Association> associations = scope.getChildElementsByType(Association.class); for (Association association : associations) { if (flowNode.equals(association.getSource()) || flowNode.equals(association.getTarget())) { scope.removeChildElement(association); } } scope.removeChildElement(flowNode); return this; } public ModifiableBpmnModelInstance asyncBeforeInnerMiActivity(String activityId) { Activity activity = modelInstance.getModelElementById(activityId); MultiInstanceLoopCharacteristics miCharacteristics = (MultiInstanceLoopCharacteristics) activity.getUniqueChildElementByType(MultiInstanceLoopCharacteristics.class); miCharacteristics.setCamundaAsyncBefore(true); return this; } public ModifiableBpmnModelInstance asyncAfterInnerMiActivity(String activityId) { Activity activity = modelInstance.getModelElementById(activityId); MultiInstanceLoopCharacteristics miCharacteristics = (MultiInstanceLoopCharacteristics) activity.getUniqueChildElementByType(MultiInstanceLoopCharacteristics.class); miCharacteristics.setCamundaAsyncAfter(true); return this; } public ValidationResults validate(Collection<ModelElementValidator<?>> validators) { return null; } }
/** * Copyright Google Inc. All Rights Reserved. * <p/> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.firebase.codelab.friendlychat; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.annotation.NonNull; import android.support.v4.content.ContextCompat; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.Editable; import android.text.InputFilter; import android.text.TextWatcher; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.ImageView; import android.widget.ProgressBar; import android.widget.TextView; import android.widget.Toast; import com.bumptech.glide.Glide; import com.firebase.ui.database.FirebaseRecyclerAdapter; import com.google.android.gms.ads.AdRequest; import com.google.android.gms.ads.AdView; import com.google.android.gms.appinvite.AppInvite; import com.google.android.gms.appinvite.AppInviteInvitation; import com.google.android.gms.auth.api.Auth; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.OnSuccessListener; import com.google.firebase.analytics.FirebaseAnalytics; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.FirebaseUser; import com.google.firebase.crash.FirebaseCrash; import com.google.firebase.database.DatabaseReference; import com.google.firebase.database.FirebaseDatabase; import com.google.firebase.remoteconfig.FirebaseRemoteConfig; import com.google.firebase.remoteconfig.FirebaseRemoteConfigSettings; import com.google.firebase.appindexing.Action; import com.google.firebase.appindexing.FirebaseAppIndex; import com.google.firebase.appindexing.FirebaseUserActions; import com.google.firebase.appindexing.Indexable; import com.google.firebase.appindexing.builders.Indexables; import com.google.firebase.appindexing.builders.PersonBuilder; import com.google.firebase.storage.FirebaseStorage; import com.google.firebase.storage.StorageReference; import com.google.firebase.storage.UploadTask; import java.util.HashMap; import java.util.Map; import de.hdodenhof.circleimageview.CircleImageView; public class MainActivity extends AppCompatActivity implements GoogleApiClient.OnConnectionFailedListener { public static class MessageViewHolder extends RecyclerView.ViewHolder { public TextView messageTextView; public ImageView messageImageView; public TextView messengerTextView; public CircleImageView messengerImageView; public MessageViewHolder(View v) { super(v); messageTextView = (TextView) itemView.findViewById(R.id.messageTextView); messageImageView = (ImageView) itemView.findViewById(R.id.messageImageView); messengerTextView = (TextView) itemView.findViewById(R.id.messengerTextView); messengerImageView = (CircleImageView) itemView.findViewById(R.id.messengerImageView); } } private static final String TAG = "MainActivity"; public static final String MESSAGES_CHILD = "messages"; private static final int REQUEST_INVITE = 1; private static final int REQUEST_IMAGE = 2; private static final String LOADING_IMAGE_URL = "https://www.google.com/images/spin-32.gif"; public static final int DEFAULT_MSG_LENGTH_LIMIT = 10; public static final String ANONYMOUS = "anonymous"; private static final String MESSAGE_SENT_EVENT = "message_sent"; private String mUsername; private String mPhotoUrl; private SharedPreferences mSharedPreferences; private GoogleApiClient mGoogleApiClient; private static final String MESSAGE_URL = "http://friendlychat.firebase.google.com/message/"; private Button mSendButton; private RecyclerView mMessageRecyclerView; private LinearLayoutManager mLinearLayoutManager; private ProgressBar mProgressBar; private EditText mMessageEditText; private ImageView mAddMessageImageView; // Firebase instance variables @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); // Set default username is anonymous. mUsername = ANONYMOUS; mGoogleApiClient = new GoogleApiClient.Builder(this) .enableAutoManage(this /* FragmentActivity */, this /* OnConnectionFailedListener */) .addApi(Auth.GOOGLE_SIGN_IN_API) .build(); // Initialize ProgressBar and RecyclerView. mProgressBar = (ProgressBar) findViewById(R.id.progressBar); mMessageRecyclerView = (RecyclerView) findViewById(R.id.messageRecyclerView); mLinearLayoutManager = new LinearLayoutManager(this); mLinearLayoutManager.setStackFromEnd(true); mMessageRecyclerView.setLayoutManager(mLinearLayoutManager); mProgressBar.setVisibility(ProgressBar.INVISIBLE); mMessageEditText = (EditText) findViewById(R.id.messageEditText); mMessageEditText.setFilters(new InputFilter[]{new InputFilter.LengthFilter(mSharedPreferences .getInt(CodelabPreferences.FRIENDLY_MSG_LENGTH, DEFAULT_MSG_LENGTH_LIMIT))}); mMessageEditText.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { if (charSequence.toString().trim().length() > 0) { mSendButton.setEnabled(true); } else { mSendButton.setEnabled(false); } } @Override public void afterTextChanged(Editable editable) { } }); mSendButton = (Button) findViewById(R.id.sendButton); mSendButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { // Send messages on click. } }); mAddMessageImageView = (ImageView) findViewById(R.id.addMessageImageView); mAddMessageImageView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { // Select image for image message on click. } }); } @Override public void onStart() { super.onStart(); // Check if user is signed in. // TODO: Add code to check if user is signed in. } @Override public void onPause() { super.onPause(); } @Override public void onResume() { super.onResume(); } @Override public void onDestroy() { super.onDestroy(); } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.main_menu, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { return super.onOptionsItemSelected(item); } @Override public void onConnectionFailed(@NonNull ConnectionResult connectionResult) { // An unresolvable error has occurred and Google APIs (including Sign-In) will not // be available. Log.d(TAG, "onConnectionFailed:" + connectionResult); Toast.makeText(this, "Google Play Services error.", Toast.LENGTH_SHORT).show(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.interpreter; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.NullArgumentException; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; import org.apache.zeppelin.dep.Dependency; import org.apache.zeppelin.dep.DependencyResolver; import org.apache.zeppelin.display.AngularObjectRegistry; import org.apache.zeppelin.display.AngularObjectRegistryListener; import org.apache.zeppelin.interpreter.Interpreter.RegisteredInterpreter; import org.apache.zeppelin.interpreter.remote.RemoteAngularObjectRegistry; import org.apache.zeppelin.interpreter.remote.RemoteInterpreter; import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcessListener; import org.apache.zeppelin.scheduler.Job; import org.apache.zeppelin.scheduler.Job.Status; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sonatype.aether.RepositoryException; import org.sonatype.aether.repository.Authentication; import org.sonatype.aether.repository.RemoteRepository; import java.io.*; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.util.*; /** * Manage interpreters. */ public class InterpreterFactory { Logger logger = LoggerFactory.getLogger(InterpreterFactory.class); private Map<String, URLClassLoader> cleanCl = Collections .synchronizedMap(new HashMap<String, URLClassLoader>()); private ZeppelinConfiguration conf; String[] interpreterClassList; private Map<String, InterpreterSetting> interpreterSettings = new HashMap<String, InterpreterSetting>(); private Map<String, List<String>> interpreterBindings = new HashMap<String, List<String>>(); private List<RemoteRepository> interpreterRepositories; private Gson gson; private InterpreterOption defaultOption; AngularObjectRegistryListener angularObjectRegistryListener; private final RemoteInterpreterProcessListener remoteInterpreterProcessListener; private DependencyResolver depResolver; public InterpreterFactory(ZeppelinConfiguration conf, AngularObjectRegistryListener angularObjectRegistryListener, RemoteInterpreterProcessListener remoteInterpreterProcessListener, DependencyResolver depResolver) throws InterpreterException, IOException, RepositoryException { this(conf, new InterpreterOption(true), angularObjectRegistryListener, remoteInterpreterProcessListener, depResolver); } public InterpreterFactory(ZeppelinConfiguration conf, InterpreterOption defaultOption, AngularObjectRegistryListener angularObjectRegistryListener, RemoteInterpreterProcessListener remoteInterpreterProcessListener, DependencyResolver depResolver) throws InterpreterException, IOException, RepositoryException { this.conf = conf; this.defaultOption = defaultOption; this.angularObjectRegistryListener = angularObjectRegistryListener; this.depResolver = depResolver; this.interpreterRepositories = depResolver.getRepos(); this.remoteInterpreterProcessListener = remoteInterpreterProcessListener; String replsConf = conf.getString(ConfVars.ZEPPELIN_INTERPRETERS); interpreterClassList = replsConf.split(","); GsonBuilder builder = new GsonBuilder(); builder.setPrettyPrinting(); builder.registerTypeAdapter( InterpreterSetting.InterpreterInfo.class, new InterpreterInfoSerializer()); gson = builder.create(); init(); } private void init() throws InterpreterException, IOException, RepositoryException { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); // Load classes File[] interpreterDirs = new File(conf.getInterpreterDir()).listFiles(); if (interpreterDirs != null) { for (File path : interpreterDirs) { logger.info("Reading " + path.getAbsolutePath()); URL[] urls = null; try { urls = recursiveBuildLibList(path); } catch (MalformedURLException e1) { logger.error("Can't load jars ", e1); } URLClassLoader ccl = new URLClassLoader(urls, oldcl); for (String className : interpreterClassList) { try { Class.forName(className, true, ccl); Set<String> keys = Interpreter.registeredInterpreters.keySet(); for (String intName : keys) { if (className.equals( Interpreter.registeredInterpreters.get(intName).getClassName())) { Interpreter.registeredInterpreters.get(intName).setPath(path.getAbsolutePath()); logger.info("Interpreter " + intName + " found. class=" + className); cleanCl.put(path.getAbsolutePath(), ccl); } } } catch (ClassNotFoundException e) { // nothing to do } } } } loadFromFile(); // if no interpreter settings are loaded, create default set synchronized (interpreterSettings) { if (interpreterSettings.size() == 0) { HashMap<String, List<RegisteredInterpreter>> groupClassNameMap = new HashMap<String, List<RegisteredInterpreter>>(); for (String k : Interpreter.registeredInterpreters.keySet()) { RegisteredInterpreter info = Interpreter.registeredInterpreters.get(k); if (!groupClassNameMap.containsKey(info.getGroup())) { groupClassNameMap.put(info.getGroup(), new LinkedList<RegisteredInterpreter>()); } groupClassNameMap.get(info.getGroup()).add(info); } for (String className : interpreterClassList) { for (String groupName : groupClassNameMap.keySet()) { List<RegisteredInterpreter> infos = groupClassNameMap.get(groupName); boolean found = false; Properties p = new Properties(); for (RegisteredInterpreter info : infos) { if (found == false && info.getClassName().equals(className)) { found = true; } for (String k : info.getProperties().keySet()) { p.put(k, info.getProperties().get(k).getDefaultValue()); } } if (found) { // add all interpreters in group add(groupName, groupName, new LinkedList<Dependency>(), defaultOption, p); groupClassNameMap.remove(groupName); break; } } } } } for (String settingId : interpreterSettings.keySet()) { InterpreterSetting setting = interpreterSettings.get(settingId); logger.info("Interpreter setting group {} : id={}, name={}", setting.getGroup(), settingId, setting.getName()); } } private void loadFromFile() throws IOException { GsonBuilder builder = new GsonBuilder(); builder.setPrettyPrinting(); builder.registerTypeAdapter( InterpreterSetting.InterpreterInfo.class, new InterpreterInfoSerializer()); Gson gson = builder.create(); File settingFile = new File(conf.getInterpreterSettingPath()); if (!settingFile.exists()) { // nothing to read return; } FileInputStream fis = new FileInputStream(settingFile); InputStreamReader isr = new InputStreamReader(fis); BufferedReader bufferedReader = new BufferedReader(isr); StringBuilder sb = new StringBuilder(); String line; while ((line = bufferedReader.readLine()) != null) { sb.append(line); } isr.close(); fis.close(); String json = sb.toString(); InterpreterInfoSaving info = gson.fromJson(json, InterpreterInfoSaving.class); for (String k : info.interpreterSettings.keySet()) { InterpreterSetting setting = info.interpreterSettings.get(k); // Always use separate interpreter process // While we decided to turn this feature on always (without providing // enable/disable option on GUI). // previously created setting should turn this feature on here. setting.getOption().setRemote(true); InterpreterSetting intpSetting = new InterpreterSetting( setting.id(), setting.getName(), setting.getGroup(), setting.getInterpreterInfos(), setting.getProperties(), setting.getDependencies(), setting.getOption()); InterpreterGroup interpreterGroup = createInterpreterGroup(setting.id(), setting.getOption()); intpSetting.setInterpreterGroup(interpreterGroup); interpreterSettings.put(k, intpSetting); } this.interpreterBindings = info.interpreterBindings; if (info.interpreterRepositories != null) { for (RemoteRepository repo : info.interpreterRepositories) { if (!depResolver.getRepos().contains(repo)) { this.interpreterRepositories.add(repo); } } } } private void loadInterpreterDependencies(InterpreterSetting intSetting) throws IOException, RepositoryException { // dependencies to prevent library conflict File localRepoDir = new File(conf.getInterpreterLocalRepoPath() + "/" + intSetting.id()); if (localRepoDir.exists()) { FileUtils.cleanDirectory(localRepoDir); } // load dependencies List<Dependency> deps = intSetting.getDependencies(); if (deps != null) { for (Dependency d: deps) { if (d.getExclusions() != null) { depResolver.load( d.getGroupArtifactVersion(), d.getExclusions(), conf.getString(ConfVars.ZEPPELIN_DEP_LOCALREPO) + "/" + intSetting.id()); } else { depResolver.load( d.getGroupArtifactVersion(), conf.getString(ConfVars.ZEPPELIN_DEP_LOCALREPO) + "/" + intSetting.id()); } } } } private void saveToFile() throws IOException { String jsonString; synchronized (interpreterSettings) { InterpreterInfoSaving info = new InterpreterInfoSaving(); info.interpreterBindings = interpreterBindings; info.interpreterSettings = interpreterSettings; info.interpreterRepositories = interpreterRepositories; jsonString = gson.toJson(info); } File settingFile = new File(conf.getInterpreterSettingPath()); if (!settingFile.exists()) { settingFile.createNewFile(); } FileOutputStream fos = new FileOutputStream(settingFile, false); OutputStreamWriter out = new OutputStreamWriter(fos); out.append(jsonString); out.close(); fos.close(); } private RegisteredInterpreter getRegisteredReplInfoFromClassName(String clsName) { Set<String> keys = Interpreter.registeredInterpreters.keySet(); for (String intName : keys) { RegisteredInterpreter info = Interpreter.registeredInterpreters.get(intName); if (clsName.equals(info.getClassName())) { return info; } } return null; } /** * Return ordered interpreter setting list. * The list does not contain more than one setting from the same interpreter class. * Order by InterpreterClass (order defined by ZEPPELIN_INTERPRETERS), Interpreter setting name * @return */ public List<String> getDefaultInterpreterSettingList() { // this list will contain default interpreter setting list List<String> defaultSettings = new LinkedList<String>(); // to ignore the same interpreter group Map<String, Boolean> interpreterGroupCheck = new HashMap<String, Boolean>(); List<InterpreterSetting> sortedSettings = get(); for (InterpreterSetting setting : sortedSettings) { if (defaultSettings.contains(setting.id())) { continue; } if (!interpreterGroupCheck.containsKey(setting.getGroup())) { defaultSettings.add(setting.id()); interpreterGroupCheck.put(setting.getGroup(), true); } } return defaultSettings; } public List<RegisteredInterpreter> getRegisteredInterpreterList() { List<RegisteredInterpreter> registeredInterpreters = new LinkedList<RegisteredInterpreter>(); for (String className : interpreterClassList) { RegisteredInterpreter ri = Interpreter.findRegisteredInterpreterByClassName(className); if (ri != null) { registeredInterpreters.add(ri); } } return registeredInterpreters; } /** * @param name user defined name * @param groupName interpreter group name to instantiate * @param properties * @return * @throws InterpreterException * @throws IOException */ public InterpreterGroup add(String name, String groupName, List<Dependency> dependencies, InterpreterOption option, Properties properties) throws InterpreterException, IOException, RepositoryException { synchronized (interpreterSettings) { List<InterpreterSetting.InterpreterInfo> interpreterInfos = new LinkedList<InterpreterSetting.InterpreterInfo>(); for (RegisteredInterpreter registeredInterpreter : Interpreter.registeredInterpreters.values()) { if (registeredInterpreter.getGroup().equals(groupName)) { for (String className : interpreterClassList) { if (registeredInterpreter.getClassName().equals(className)) { interpreterInfos.add( new InterpreterSetting.InterpreterInfo( className, registeredInterpreter.getName())); } } } } InterpreterSetting intpSetting = new InterpreterSetting( name, groupName, interpreterInfos, properties, dependencies, option); if (dependencies.size() > 0) { loadInterpreterDependencies(intpSetting); } InterpreterGroup interpreterGroup = createInterpreterGroup(intpSetting.id(), option); intpSetting.setInterpreterGroup(interpreterGroup); interpreterSettings.put(intpSetting.id(), intpSetting); saveToFile(); return interpreterGroup; } } private InterpreterGroup createInterpreterGroup(String id, InterpreterOption option) throws InterpreterException, NullArgumentException { //When called from REST API without option we receive NPE if (option == null) throw new NullArgumentException("option"); AngularObjectRegistry angularObjectRegistry; InterpreterGroup interpreterGroup = new InterpreterGroup(id); if (option.isRemote()) { angularObjectRegistry = new RemoteAngularObjectRegistry( id, angularObjectRegistryListener, interpreterGroup ); } else { angularObjectRegistry = new AngularObjectRegistry( id, angularObjectRegistryListener); } interpreterGroup.setAngularObjectRegistry(angularObjectRegistry); return interpreterGroup; } public void removeInterpretersForNote(InterpreterSetting interpreterSetting, String noteId) { if (!interpreterSetting.getOption().isPerNoteSession()) { return; } InterpreterGroup interpreterGroup = interpreterSetting.getInterpreterGroup(); interpreterGroup.close(noteId); interpreterGroup.destroy(noteId); synchronized (interpreterGroup) { interpreterGroup.remove(noteId); interpreterGroup.notifyAll(); // notify createInterpreterForNote() } logger.info("Interpreter instance {} for note {} is removed", interpreterSetting.getName(), noteId); } public void createInterpretersForNote( InterpreterSetting interpreterSetting, String noteId) { InterpreterGroup interpreterGroup = interpreterSetting.getInterpreterGroup(); String groupName = interpreterSetting.getGroup(); InterpreterOption option = interpreterSetting.getOption(); Properties properties = interpreterSetting.getProperties(); // if interpreters are already there, wait until they're being removed synchronized (interpreterGroup) { long interpreterRemovalWaitStart = System.nanoTime(); // interpreter process supposed to be terminated by RemoteInterpreterProcess.dereference() // in ZEPPELIN_INTERPRETER_CONNECT_TIMEOUT msec. However, if termination of the process and // removal from interpreter group take too long, throw an error. long minTimeout = 10 * 1000 * 1000000; // 10 sec long interpreterRemovalWaitTimeout = Math.max(minTimeout, conf.getInt(ConfVars.ZEPPELIN_INTERPRETER_CONNECT_TIMEOUT) * 2); while (interpreterGroup.containsKey(noteId)) { if (System.nanoTime() - interpreterRemovalWaitStart > interpreterRemovalWaitTimeout) { throw new InterpreterException("Can not create interpreter"); } try { interpreterGroup.wait(1000); } catch (InterruptedException e) { logger.debug(e.getMessage(), e); } } } logger.info("Create interpreter instance {} for note {}", interpreterSetting.getName(), noteId); for (String className : interpreterClassList) { Set<String> keys = Interpreter.registeredInterpreters.keySet(); for (String intName : keys) { RegisteredInterpreter info = Interpreter.registeredInterpreters.get(intName); if (info.getClassName().equals(className) && info.getGroup().equals(groupName)) { Interpreter intp; if (option.isRemote()) { intp = createRemoteRepl(info.getPath(), noteId, info.getClassName(), properties, interpreterGroup.id); } else { intp = createRepl(info.getPath(), info.getClassName(), properties); } synchronized (interpreterGroup) { List<Interpreter> interpreters = interpreterGroup.get(noteId); if (interpreters == null) { interpreters = new LinkedList<Interpreter>(); interpreterGroup.put(noteId, interpreters); } interpreters.add(intp); } logger.info("Interpreter " + intp.getClassName() + " " + intp.hashCode() + " created"); intp.setInterpreterGroup(interpreterGroup); break; } } } } public void remove(String id) throws IOException { synchronized (interpreterSettings) { if (interpreterSettings.containsKey(id)) { InterpreterSetting intp = interpreterSettings.get(id); intp.getInterpreterGroup().close(); intp.getInterpreterGroup().destroy(); interpreterSettings.remove(id); for (List<String> settings : interpreterBindings.values()) { Iterator<String> it = settings.iterator(); while (it.hasNext()) { String settingId = it.next(); if (settingId.equals(id)) { it.remove(); } } } saveToFile(); } } File localRepoDir = new File(conf.getInterpreterLocalRepoPath() + "/" + id); FileUtils.deleteDirectory(localRepoDir); } /** * Get interpreter settings * @return */ public List<InterpreterSetting> get() { synchronized (interpreterSettings) { List<InterpreterSetting> orderedSettings = new LinkedList<InterpreterSetting>(); List<InterpreterSetting> settings = new LinkedList<InterpreterSetting>( interpreterSettings.values()); Collections.sort(settings, new Comparator<InterpreterSetting>(){ @Override public int compare(InterpreterSetting o1, InterpreterSetting o2) { return o1.getName().compareTo(o2.getName()); } }); for (String className : interpreterClassList) { for (InterpreterSetting setting : settings) { for (InterpreterSetting orderedSetting : orderedSettings) { if (orderedSetting.id().equals(setting.id())) { continue; } } for (InterpreterSetting.InterpreterInfo intp : setting.getInterpreterInfos()) { if (className.equals(intp.getClassName())) { boolean alreadyAdded = false; for (InterpreterSetting st : orderedSettings) { if (setting.id().equals(st.id())) { alreadyAdded = true; } } if (alreadyAdded == false) { orderedSettings.add(setting); } } } } } return orderedSettings; } } public InterpreterSetting get(String name) { synchronized (interpreterSettings) { return interpreterSettings.get(name); } } public void putNoteInterpreterSettingBinding(String noteId, List<String> settingList) throws IOException { List<String> unBindedSettings = new LinkedList<String>(); synchronized (interpreterSettings) { List<String> oldSettings = interpreterBindings.get(noteId); if (oldSettings != null) { for (String oldSettingId : oldSettings) { if (!settingList.contains(oldSettingId)) { unBindedSettings.add(oldSettingId); } } } interpreterBindings.put(noteId, settingList); saveToFile(); for (String settingId : unBindedSettings) { InterpreterSetting setting = get(settingId); removeInterpretersForNote(setting, noteId); } } } public void removeNoteInterpreterSettingBinding(String noteId) { synchronized (interpreterSettings) { List<String> settingIds = interpreterBindings.remove(noteId); for (String settingId : settingIds) { this.removeInterpretersForNote(get(settingId), noteId); } } } public List<String> getNoteInterpreterSettingBinding(String noteId) { LinkedList<String> bindings = new LinkedList<String>(); synchronized (interpreterSettings) { List<String> settingIds = interpreterBindings.get(noteId); if (settingIds != null) { bindings.addAll(settingIds); } } return bindings; } /** * Change interpreter property and restart * @param id * @param option * @param properties * @throws IOException */ public void setPropertyAndRestart(String id, InterpreterOption option, Properties properties, List<Dependency> dependencies) throws IOException, RepositoryException { synchronized (interpreterSettings) { InterpreterSetting intpsetting = interpreterSettings.get(id); if (intpsetting != null) { stopJobAllInterpreter(intpsetting); intpsetting.getInterpreterGroup().close(); intpsetting.getInterpreterGroup().destroy(); intpsetting.setOption(option); intpsetting.setDependencies(dependencies); InterpreterGroup interpreterGroup = createInterpreterGroup(intpsetting.id(), option); intpsetting.setInterpreterGroup(interpreterGroup); loadInterpreterDependencies(intpsetting); saveToFile(); } else { throw new InterpreterException("Interpreter setting id " + id + " not found"); } } } public void restart(String id) { synchronized (interpreterSettings) { InterpreterSetting intpsetting = interpreterSettings.get(id); if (intpsetting != null) { stopJobAllInterpreter(intpsetting); intpsetting.getInterpreterGroup().close(); intpsetting.getInterpreterGroup().destroy(); InterpreterGroup interpreterGroup = createInterpreterGroup( intpsetting.id(), intpsetting.getOption()); intpsetting.setInterpreterGroup(interpreterGroup); } else { throw new InterpreterException("Interpreter setting id " + id + " not found"); } } } private void stopJobAllInterpreter(InterpreterSetting intpsetting) { if (intpsetting != null) { for (List<Interpreter> interpreters : intpsetting.getInterpreterGroup().values()) { for (Interpreter intp : interpreters) { for (Job job : intp.getScheduler().getJobsRunning()) { job.abort(); job.setStatus(Status.ABORT); logger.info("Job " + job.getJobName() + " aborted "); } for (Job job : intp.getScheduler().getJobsWaiting()) { job.abort(); job.setStatus(Status.ABORT); logger.info("Job " + job.getJobName() + " aborted "); } } } } } public void close() { List<Thread> closeThreads = new LinkedList<Thread>(); synchronized (interpreterSettings) { Collection<InterpreterSetting> intpsettings = interpreterSettings.values(); for (final InterpreterSetting intpsetting : intpsettings) { Thread t = new Thread() { public void run() { intpsetting.getInterpreterGroup().close(); intpsetting.getInterpreterGroup().destroy(); } }; t.start(); closeThreads.add(t); } } for (Thread t : closeThreads) { try { t.join(); } catch (InterruptedException e) { logger.error("Can't close interpreterGroup", e); } } } private Interpreter createRepl(String dirName, String className, Properties property) throws InterpreterException { logger.info("Create repl {} from {}", className, dirName); ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); try { URLClassLoader ccl = cleanCl.get(dirName); if (ccl == null) { // classloader fallback ccl = URLClassLoader.newInstance(new URL[] {}, oldcl); } boolean separateCL = true; try { // check if server's classloader has driver already. Class cls = this.getClass().forName(className); if (cls != null) { separateCL = false; } } catch (Exception e) { logger.error("exception checking server classloader driver" , e); } URLClassLoader cl; if (separateCL == true) { cl = URLClassLoader.newInstance(new URL[] {}, ccl); } else { cl = ccl; } Thread.currentThread().setContextClassLoader(cl); Class<Interpreter> replClass = (Class<Interpreter>) cl.loadClass(className); Constructor<Interpreter> constructor = replClass.getConstructor(new Class[] {Properties.class}); Interpreter repl = constructor.newInstance(property); repl.setClassloaderUrls(ccl.getURLs()); LazyOpenInterpreter intp = new LazyOpenInterpreter( new ClassloaderInterpreter(repl, cl)); return intp; } catch (SecurityException e) { throw new InterpreterException(e); } catch (NoSuchMethodException e) { throw new InterpreterException(e); } catch (IllegalArgumentException e) { throw new InterpreterException(e); } catch (InstantiationException e) { throw new InterpreterException(e); } catch (IllegalAccessException e) { throw new InterpreterException(e); } catch (InvocationTargetException e) { throw new InterpreterException(e); } catch (ClassNotFoundException e) { throw new InterpreterException(e); } finally { Thread.currentThread().setContextClassLoader(oldcl); } } private Interpreter createRemoteRepl(String interpreterPath, String noteId, String className, Properties property, String interpreterId) { int connectTimeout = conf.getInt(ConfVars.ZEPPELIN_INTERPRETER_CONNECT_TIMEOUT); String localRepoPath = conf.getInterpreterLocalRepoPath() + "/" + interpreterId; int maxPoolSize = conf.getInt(ConfVars.ZEPPELIN_INTERPRETER_MAX_POOL_SIZE); LazyOpenInterpreter intp = new LazyOpenInterpreter(new RemoteInterpreter( property, noteId, className, conf.getInterpreterRemoteRunnerPath(), interpreterPath, localRepoPath, connectTimeout, maxPoolSize, remoteInterpreterProcessListener)); return intp; } private URL[] recursiveBuildLibList(File path) throws MalformedURLException { URL[] urls = new URL[0]; if (path == null || path.exists() == false) { return urls; } else if (path.getName().startsWith(".")) { return urls; } else if (path.isDirectory()) { File[] files = path.listFiles(); if (files != null) { for (File f : files) { urls = (URL[]) ArrayUtils.addAll(urls, recursiveBuildLibList(f)); } } return urls; } else { return new URL[] {path.toURI().toURL()}; } } public List<RemoteRepository> getRepositories() { return this.interpreterRepositories; } public void addRepository(String id, String url, boolean snapshot, Authentication auth) throws IOException { depResolver.addRepo(id, url, snapshot, auth); saveToFile(); } public void removeRepository(String id) throws IOException { depResolver.delRepo(id); saveToFile(); } }
package com.bagri.xquery.saxon; import static com.bagri.core.Constants.bg_prefix; import static com.bagri.xquery.saxon.SaxonUtils.*; import java.io.Reader; import java.io.StringReader; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Properties; import javax.xml.transform.TransformerException; import javax.xml.transform.stream.StreamSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import net.sf.saxon.Configuration; import net.sf.saxon.expr.instruct.UserFunction; import net.sf.saxon.expr.instruct.UserFunctionParameter; import net.sf.saxon.functions.FunctionLibrary; import net.sf.saxon.functions.FunctionLibraryList; import net.sf.saxon.lib.ExtensionFunctionDefinition; import net.sf.saxon.lib.ModuleURIResolver; import net.sf.saxon.lib.UnfailingErrorListener; import net.sf.saxon.lib.Validation; import net.sf.saxon.query.Annotation; import net.sf.saxon.query.AnnotationList; import net.sf.saxon.query.StaticQueryContext; import net.sf.saxon.query.XQueryExpression; import net.sf.saxon.query.XQueryFunction; import net.sf.saxon.query.XQueryFunctionLibrary; import net.sf.saxon.trans.XPathException; import net.sf.saxon.value.AtomicValue; import com.bagri.core.api.BagriException; import com.bagri.core.system.DataType; import com.bagri.core.system.Function; import com.bagri.core.system.Library; import com.bagri.core.system.Module; import com.bagri.core.system.Parameter; import com.bagri.core.system.XQueryTrigger; import com.bagri.core.xquery.api.XQCompiler; import com.bagri.xquery.saxon.ext.util.StaticFunctionExtension; public class XQCompilerImpl implements XQCompiler { private static final Logger logger = LoggerFactory.getLogger(XQCompilerImpl.class); private Properties props = new Properties(); private Configuration config; private List<Library> libraries = new ArrayList<>(); public XQCompilerImpl() { initializeConfig(); } @Override public Properties getProperties() { return props; } @Override public void setProperty(String name, Object value) { props.setProperty(name, value.toString()); } private String getError(XPathException ex, StaticQueryContext sqc) { StringBuilder buff = new StringBuilder(); if (sqc.getErrorListener() instanceof LocalErrorListener) { List<TransformerException> errors = ((LocalErrorListener) sqc.getErrorListener()).getErrors(); for (TransformerException tex: errors) { buff.append(tex.getMessageAndLocation()).append("\n"); } } else { Throwable err = ex; while (err != null) { buff.append(err.getMessage()).append("\n"); err = err.getCause(); } } return buff.toString(); } @Override public void compileQuery(String query) throws BagriException { long stamp = System.currentTimeMillis(); logger.trace("compileQuery.enter; got query: {}", query); StaticQueryContext sqc = null; try { sqc = prepareStaticContext(null); sqc.compileQuery(query); } catch (XPathException ex) { String error = getError(ex, sqc); logger.info("compileQuery.error; message: {}", error); throw new BagriException(error, BagriException.ecQueryCompile); } stamp = System.currentTimeMillis() - stamp; logger.trace("compileQuery.exit; time taken: {}", stamp); } @Override public void compileModule(Module module) throws BagriException { long stamp = System.currentTimeMillis(); logger.trace("compileModule.enter; got module: {}", module); getModuleExpression(module); stamp = System.currentTimeMillis() - stamp; logger.trace("compileModule.exit; time taken: {}", stamp); } @Override public String compileTrigger(Module module, XQueryTrigger trigger) throws BagriException { long stamp = System.currentTimeMillis(); logger.trace("compileTrigger.enter; got trigger: {}", trigger); String query = "import module namespace " + module.getPrefix() + "=\"" + module.getNamespace() + "\" at \"" + module.getName() + "\";\n" + "declare variable $doc external;\n\n" + trigger.getFunction() + "($doc)\n"; StaticQueryContext sqc = prepareStaticContext(module.getBody()); logger.trace("getModuleExpression; compiling query: {}", query); try { sqc.compileQuery(query); } catch (XPathException ex) { String error = getError(ex, sqc); //logger.error("compileQuery.error", ex); logger.info("compileTrigger.error; message: {}", error); throw new BagriException(error, BagriException.ecQueryCompile); } stamp = System.currentTimeMillis() - stamp; logger.trace("compileTrigger.exit; time taken: {}", stamp); return query; } @Override public List<String> getModuleFunctions(Module module) throws BagriException { long stamp = System.currentTimeMillis(); logger.trace("getModuleFunctions.enter; got module: {}", module); XQueryExpression exp = getModuleExpression(module); List<String> result = lookupFunctions(exp.getExecutable().getFunctionLibrary(), new FunctionExtractor<String>() { @Override public String extractFunction(UserFunction fn) { String decl = getFunctionDeclaration(fn); AnnotationList atns = fn.getAnnotations(); // AnnotationList.toString() throws NPE! logger.trace("lookupFunctions; fn annotations: {}", atns.size()); StringBuilder buff = new StringBuilder(); for (Annotation atn: atns) { if (Annotation.PRIVATE.equals(atn.getAnnotationQName())) { // do not expose private functions return null; } buff.append(atn.getAnnotationQName().getDisplayName()); if (atn.getAnnotationParameters() != null) { buff.append("("); int cnt = 0; for (AtomicValue av: atn.getAnnotationParameters()) { if (cnt > 0) { buff.append(", "); } buff.append("\"").append(av.getStringValue()).append("\""); cnt++; } buff.append(")"); } buff.append("\n"); } decl = buff.toString() + decl; return decl; } }); stamp = System.currentTimeMillis() - stamp; logger.trace("getModuleFunctions.exit; time taken: {}; returning: {}", stamp, result); return result; } private String getFunctionDeclaration(UserFunction function) { //declare function hw:helloworld($name as xs:string) logger.trace("getFunctionDeclaration.enter; function: {}", function); StringBuilder buff = new StringBuilder("function "); buff.append(function.getFunctionName()); buff.append("("); int idx =0; for (UserFunctionParameter ufp: function.getParameterDefinitions()) { if (idx > 0) { buff.append(", "); } buff.append("$"); buff.append(ufp.getVariableQName()); buff.append(" as "); buff.append(ufp.getRequiredType().toString()); idx++; } buff.append(") as "); // TODO: get rid of Q{} notation.. buff.append(function.getDeclaredResultType().toString()); String result = buff.toString(); logger.trace("getFunctionDeclaration.exit; returning: {}", result); return result; } @Override public boolean getModuleState(Module module) { try { String query = "import module namespace test=\"" + module.getNamespace() + "\" at \"" + module.getName() + "\";\n\n"; query += "1213"; StaticQueryContext sqc = prepareStaticContext(module.getBody()); logger.trace("getModuleExpression; compiling query: {}", query); sqc.compileQuery(query); return true; } catch (XPathException ex) { return false; } } @Override public void setLibraries(Collection<Library> libraries) { this.libraries.clear(); this.libraries.addAll(libraries); //config.registerExtensionFunction(function); initializeConfig(); } private void initializeConfig() { logger.trace("initializeConfig.enter; current config: {}", config); config = Configuration.newConfiguration(); //config.setHostLanguage(Configuration.XQUERY); config.setSchemaValidationMode(Validation.STRIP); //config.setConfigurationProperty(FeatureKeys.ALLOW_EXTERNAL_FUNCTIONS, Boolean.TRUE); SaxonUtils.registerExtensions(config, null); if (libraries != null) { registerExtensions(config, libraries); } logger.trace("initializeConfig.exit; new config: {}", config); } static void registerExtensions(Configuration config, Collection<Library> libraries) { for (Library lib: libraries) { for (Function func: lib.getFunctions()) { try { ExtensionFunctionDefinition efd = new StaticFunctionExtension(func, config); logger.trace("registerExtensions; funtion {} registered as {}", func.toString(), efd.getFunctionQName()); config.registerExtensionFunction(efd); } catch (Exception ex) { logger.warn("registerExtensions; error registering function {}: {}; skipped", func.toString(), ex.getMessage()); } } } } private StaticQueryContext prepareStaticContext(String body) { StaticQueryContext sqc = config.newStaticQueryContext(); sqc.setErrorListener(new LocalErrorListener()); //sqc.setSchemaAware(true); - requires Saxon-EE sqc.setLanguageVersion(saxon_xquery_version); if (body != null) { sqc.setModuleURIResolver(new LocalModuleURIResolver(body)); } return sqc; } private XQueryExpression getModuleExpression(Module module) throws BagriException { //logger.trace("getModuleExpression.enter; got namespace: {}, name: {}, body: {}", namespace, name, body); String query = "import module namespace test=\"" + module.getNamespace() + "\" at \"" + module.getName() + "\";\n\n1213"; StaticQueryContext sqc = null; try { //sqc.compileLibrary(query); - works in Saxon-EE only sqc = prepareStaticContext(module.getBody()); logger.trace("getModuleExpression; compiling query: {}", query); //logger.trace("getModuleExpression.exit; time taken: {}", stamp); return sqc.compileQuery(query); //sqc.getCompiledLibrary("test")... } catch (XPathException ex) { String error = getError(ex, sqc); logger.error("getModuleExpression.error; " + error, ex); //logger.info("getModuleExpression.error; message: {}", error); throw new BagriException(error, BagriException.ecQueryCompile); } } private <R> List<R> lookupFunctions(FunctionLibraryList fll, FunctionExtractor<R> ext) { List<R> fl = new ArrayList<>(); for (FunctionLibrary lib: fll.getLibraryList()) { logger.trace("lookupFunctions; function library: {}; class: {}", lib.toString(), lib.getClass().getName()); if (lib instanceof FunctionLibraryList) { fl.addAll(lookupFunctions((FunctionLibraryList) lib, ext)); //} else if (lib instanceof ExecutableFunctionLibrary) { // ExecutableFunctionLibrary efl = (ExecutableFunctionLibrary) lib; // Iterator<UserFunction> itr = efl.iterateFunctions(); // while (itr.hasNext()) { // fl.add(getFunctionDeclaration(itr.next())); // } } else if (lib instanceof XQueryFunctionLibrary) { XQueryFunctionLibrary xqfl = (XQueryFunctionLibrary) lib; Iterator<XQueryFunction> itr = xqfl.getFunctionDefinitions(); while (itr.hasNext()) { XQueryFunction fn = itr.next(); logger.trace("lookupFunctions; fn: {}", fn.getDisplayName()); R result = ext.extractFunction(fn.getUserFunction()); if (result != null) { fl.add(result); } } } } return fl; } @Override public List<Function> getRestFunctions(Module module) throws BagriException { long stamp = System.currentTimeMillis(); logger.trace("getRestFunctions.enter; got module: {}", module); XQueryExpression exp = getModuleExpression(module); List<Function> result = lookupFunctions(exp.getExecutable().getFunctionLibrary(), new FunctionExtractor<Function>() { @Override public Function extractFunction(UserFunction fn) { logger.trace("extractFunction.enter; function: {}", fn); AnnotationList atns = fn.getAnnotations(); if (!hasRestAnnotations(atns)) { logger.debug("extractFunction; no REST annotations found for function {}, skipping it", fn.getFunctionName().getDisplayName()); return null; } DataType type = new DataType(getTypeName(fn.getResultType().getPrimaryType()), getCardinality(fn.getResultType().getCardinality())); Function result = new Function(null, fn.getFunctionName().getLocalPart(), type, null, fn.getFunctionName().getPrefix()); for (UserFunctionParameter ufp: fn.getParameterDefinitions()) { Parameter param = new Parameter(ufp.getVariableQName().getLocalPart(), getTypeName(ufp.getRequiredType().getPrimaryType()), getCardinality(ufp.getRequiredType().getCardinality())); result.getParameters().add(param); } for (Annotation atn: atns) { String aName = atn.getAnnotationQName().getDisplayName(); if (aName.startsWith(bg_prefix) || aName.startsWith("rest:")) { List<String> values = null; if (atn.getAnnotationParameters() != null) { values = new ArrayList<>(atn.getAnnotationParameters().size()); for (AtomicValue av: atn.getAnnotationParameters()) { values.add(av.getStringValue()); } } result.addAnnotation(aName, values); } } logger.trace("extractFunction.exit; returning: {}", result); return result; } }); stamp = System.currentTimeMillis() - stamp; logger.trace("getRestFunctions.exit; time taken: {}; returning: {}", stamp, result); return result; } private boolean hasRestAnnotations(AnnotationList annotations) { for (Annotation atn: annotations) { if ("rest".equalsIgnoreCase(atn.getAnnotationQName().getPrefix())) { return true; } } return false; } private interface FunctionExtractor<R> { R extractFunction(UserFunction fn); } private class LocalErrorListener implements UnfailingErrorListener { private List<TransformerException> errors = new ArrayList<>(); public List<TransformerException> getErrors() { return errors; } @Override public void error(TransformerException txEx) { errors.add(txEx); } @Override public void fatalError(TransformerException txEx) { errors.add(txEx); } @Override public void warning(TransformerException txEx) { errors.add(txEx); } } private class LocalModuleURIResolver implements ModuleURIResolver { private String body; LocalModuleURIResolver(String body) { this.body = body; } @Override public StreamSource[] resolve(String moduleURI, String baseURI, String[] locations) throws XPathException { logger.trace("resolve.enter; got moduleURI: {}, baseURI: {}, locations: {}, body: {}", moduleURI, baseURI, locations, body); Reader reader = new StringReader(body); return new StreamSource[] {new StreamSource(reader)}; } } }
package sagex.phoenix.plugin; import org.apache.commons.lang.BooleanUtils; import org.apache.commons.lang.StringUtils; import sage.SageTVEventListener; import sage.SageTVPluginRegistry; import sagex.api.Configuration; import sagex.api.MediaFileAPI; import sagex.api.PluginAPI; import sagex.phoenix.Phoenix; import sagex.phoenix.common.HasFileConfigurations; import sagex.phoenix.common.SystemConfigurationFileManager; import sagex.phoenix.configuration.ConfigurationManager; import sagex.phoenix.configuration.Group; import sagex.phoenix.configuration.proxy.GroupProxy; import sagex.phoenix.event.PhoenixEventID; import sagex.phoenix.event.SageEventBus; import sagex.phoenix.event.SageSystemMessageListener; import sagex.phoenix.event.SystemMessageID; import sagex.phoenix.fanart.FanartUtil; import sagex.phoenix.metadata.*; import sagex.phoenix.task.ITaskOperation; import sagex.phoenix.task.ITaskProgressHandler; import sagex.phoenix.task.TaskItem; import sagex.phoenix.util.Hints; import sagex.phoenix.util.LogUtil; import sagex.phoenix.util.Loggers; import sagex.phoenix.util.PropertiesUtils; import sagex.phoenix.vfs.IMediaFile; import sagex.phoenix.vfs.IMediaFolder; import sagex.phoenix.vfs.IMediaResource; import sagex.phoenix.vfs.filters.HomeVideosConfiguration; import sagex.phoenix.vfs.filters.HomeVideosFilter; import sagex.phoenix.vfs.sage.SageMediaFile; import sagex.phoenix.vfs.util.PathUtils; import sagex.plugin.*; import java.io.File; import java.io.IOException; import java.util.*; /** * Sage7 Plugin for Phoenix * * @author seans */ public class PhoenixPlugin extends AbstractPlugin implements ITaskOperation, ITaskProgressHandler { private MetadataConfiguration config = null; private HomeVideosFilter homeVideoFilter = new HomeVideosFilter(); private HomeVideosConfiguration homeVideoCfg = null; private static PluginPropertyProgressMonitor monitor = null; public PhoenixPlugin(SageTVPluginRegistry registry) { super(registry); } @SageEvent(value = SageEvents.ImportingCompleted, background = true) public void importingCompleted(@SuppressWarnings("rawtypes") Map vars) { checkForMissingEpisodes(); } //if the system level configuration options is enabled we will check for missing episodes private void checkForMissingEpisodes(){ checkForMissingEpisodes(null, null); } private void checkForMissingEpisodes(String seriesID, String seasonNum){ if (config.getEnableSystemMessagesForTVEpisodeGaps()) { String BaseView = "phoenix.view.util.missingEpisodes"; Map<String,Object> viewOptions = new HashMap<String,Object>(); IMediaFolder folder = null; if (seriesID==null){ folder = phoenix.umb.CreateView(BaseView); }else{ viewOptions.put("seriesID", seriesID); viewOptions.put("seasonNum", seasonNum); folder = phoenix.umb.CreateView(BaseView, viewOptions); } if (folder.getChildren().isEmpty()){ LogUtil.logTVEpisodeGapReview("Missing episode review found NO missing episodes"); return; } for (IMediaResource show : folder.getChildren()) { //first level is the show StringBuilder sb = new StringBuilder(); for (IMediaResource episode : phoenix.media.GetChildren(show)) { sb.append("S").append(phoenix.metadata.GetSeasonNumber(episode)); sb.append("E").append(phoenix.metadata.GetEpisodeNumber(episode)).append(";"); if (phoenix.media.GetChildren(show).size()<=7){ sb.append(phoenix.metadata.GetEpisodeName(episode)); sb.append("\n"); } } //raise an event for each show that has missing episodes Phoenix.getInstance() .getEventBus() .fireEvent( PhoenixEventID.SystemMessageEvent, SageSystemMessageListener.createEvent(SystemMessageID.PHOENIX_MISSING_EPISODES, SageSystemMessageListener.INFO, "Missing Episodes: " + show.getTitle() , sb.toString(), null), false); LogUtil.logTVEpisodeGapReview("System message created for missing episodes for show: " + show.getTitle()); } } } @SageEvent(value = SageEvents.MediaFileImported, background = true) public void mediaFileImported(@SuppressWarnings("rawtypes") Map vars) { if (config != null && config.isAutomatedFanartEnabled()) { Object mediaFile = vars.get("MediaFile"); if (mediaFile == null) { log.warn("MediaFileImported was called, but no mediafile was passed"); return; } if (MediaFileAPI.IsPictureFile(mediaFile)) { // just skip images return; } SageMediaFile smf = new SageMediaFile(null, mediaFile); File propFile = FanartUtil.resolvePropertiesFile(PathUtils.getFirstFile(smf)); if (propFile != null && propFile.exists()) { // Assume that SageTV has ALREADY updated the metadata, or // something else has. // but check to see if we should see if the watched/library // flags are set. // X- flags are not processed by sagetv's native metadata // parser, so we // update those X- flags from here try { Properties props = PropertiesUtils.load(propFile); if (props.containsKey(IMetadata.XWatched)) { smf.setWatched(BooleanUtils.toBoolean(props.getProperty(IMetadata.XWatched))); } if (props.containsKey(IMetadata.XLibraryFile)) { smf.setLibraryFile(BooleanUtils.toBoolean(props.getProperty(IMetadata.XLibraryFile))); } } catch (IOException e) { } // don't process other metadata return; } // check for home videos if (homeVideoFilter.accept(smf)) { // we have a home video if (!StringUtils.isEmpty(homeVideoCfg.getCategory())) { // assign the home video category to this smf.getMetadata().getGenres().add(homeVideoCfg.getCategory()); } return; } // regular file, let's update. LogUtil.logAutoUpdate("MEDIA", smf); updateMetadata(smf, false); } } @SageEvent(value = SageEvents.RecordingCompleted, background = true) public void recordingCompleted(@SuppressWarnings("rawtypes") Map vars) { if (config != null && config.isAutomatedFanartEnabled()) { Object mediaFile = vars.get("MediaFile"); if (mediaFile == null) { log.warn("MediaFileImported was called, but no mediafile was passed"); return; } SageMediaFile smf = new SageMediaFile(null, mediaFile); LogUtil.logAutoUpdate("RECORDING", smf); updateMetadata(smf, true); //see if this is a TV item and if so check for missing episodes if(MediaFileAPI.IsTVFile(mediaFile)){ String seriesID = phoenix.metadata.GetMediaProviderDataID(smf); Integer seasonNum = phoenix.metadata.GetSeasonNumber(smf); checkForMissingEpisodes(seriesID, seasonNum.toString()); } } } private void updateMetadata(IMediaFile file, boolean recording) { try { Hints options = Phoenix.getInstance().getMetadataManager().getDefaultMetadataOptions(); options.setBooleanHint(MetadataHints.KNOWN_RECORDING, recording); options.setBooleanHint(MetadataHints.AUTOMATIC, true); if (Phoenix.getInstance().getMetadataManager().canScanMediaFile(file, options)) { Phoenix.getInstance().getMetadataManager().automaticUpdate(file, options); } else { LogUtil.logMetadataSkipped(file); } } catch (Exception me) { if (canRetry(me)) { log.info("Automatic Metadata Failed. Will try to re-queued for later for " + file); TaskItem ti = new TaskItem(); ti.getUserData().put("file", file); ti.getUserData().put("recording", recording); ti.setOperation(this); ti.setHandler(this); // waits 5 minutes for a retry Phoenix.getInstance().getTaskManager().submitTaskWithRetry(ti); } else { reportFailure(file, me); } } } private void reportFailure(IMediaFile file, Throwable e) { LogUtil.logMetadataUpdatedError(file, e); if (config.getEnableSystemMessagesForFailures()) { Phoenix.getInstance() .getEventBus() .fireEvent( PhoenixEventID.SystemMessageEvent, SageSystemMessageListener.createEvent(SystemMessageID.AUTOMATIC_METADATA_LOOKUP_FAILED, SageSystemMessageListener.STATUS, "Automatic Metadata Failed: " + file.getTitle(), e.getMessage(), e), false); } } @SageEvent(value = SageEvents.AllPluginsLoaded, background = false) public void onPluginsLoaded() { log.info("Begin: Phoenix looking for plugins that contribute to the Phoenix Core..."); List<HasFileConfigurations> managers = new ArrayList<HasFileConfigurations>(); try { Object[] plugins = PluginAPI.GetAllAvailablePlugins(); if (plugins != null && plugins.length > 0) { for (Object plugin : plugins) { File dir = null; String pluginArea = PluginAPI.GetPluginResourcePath(plugin); if (pluginArea != null) { dir = new File(pluginArea); } if (dir == null || !dir.exists()) { dir = new File(new File("plugins"), PluginAPI.GetPluginIdentifier(plugin)); } if (dir == null || !dir.exists()) { // skip this plugin, it has nothing to offer continue; } dir = new File(dir, "Phoenix"); if (dir == null || !dir.exists()) { // skip this plugin, it has no phoenix contribution continue; } managePlugin(new File(dir, "Configuration"), Phoenix.getInstance().getConfigurationMetadataManager(), managers); managePlugin(new File(dir, "vfs"), Phoenix.getInstance().getVFSManager(), managers); managePlugin(new File(dir, "Menus"), Phoenix.getInstance().getMenuManager(), managers); managePlugin(new File(dir, "metadata"), Phoenix.getInstance().getMetadataManager(), managers); managePlugin(new File(dir, "Skins"), Phoenix.getInstance().getSkinManager(), managers); managePlugin(new File(dir, "scrapers"), Phoenix.getInstance().getMediaTitlesManager(), managers); managePlugin(new File(dir, "scrapers/movies"), Phoenix.getInstance().getMovieScrapers(), managers); managePlugin(new File(dir, "scrapers/tv"), Phoenix.getInstance().getTVScrapers(), managers); } } if (managers.size() > 0) { log.info("Begin Reloading some configurations because of plugin contributions"); for (HasFileConfigurations m : managers) { m.loadConfigurations(); } log.info("End Reloading some configurations because of plugin contributions"); } } catch (Throwable t) { log.warn("Phoenix failed to discover additional phoenix enhancements from other plugins", t); } finally { log.info("Reloading Phoenix Services in case Plugins have contributed."); // now that the plugins are loaded, start the services. Phoenix.getInstance().initServices(); } log.info("End: Phoenix looking for plugins that contribute to the Phoenix Core"); } private void managePlugin(File dir, SystemConfigurationFileManager manager, List<HasFileConfigurations> managers) { if (dir.exists()) { manager.addPluginConfiguration(dir); managers.add(manager); } } @Override public void start() { try { log.info("Phoenix Plugin starting..."); super.start(); try { log.info("Registering SageTV EventListener in Phoenix..."); // bind sagetv event system to phoenix Phoenix.getInstance().getEventBus().setEventBus(new SageEventBus(pluginRegistry)); // get the plugin configuration Group el = (Group) Phoenix.getInstance().getConfigurationMetadataManager().findElement("phoenix"); PluginConfigurationHelper.addConfiguration(this, el); config = GroupProxy.get(MetadataConfiguration.class); homeVideoCfg = GroupProxy.get(HomeVideosConfiguration.class); // register ourself to listen configuration button events // and dispatch them to the setConfigValue so that it triggers // an button event Phoenix.getInstance().getEventBus().addListener(ConfigurationManager.BUTTON_EVENT, new SageTVEventListener() { @Override public void sageEvent(String evt, Map args) { if (ConfigurationManager.BUTTON_EVENT.equals(evt)) { setConfigValue((String) args.get(ConfigurationManager.EVENT_PROPERTY), "true"); } } }); } catch (Throwable t) { t.printStackTrace(); } if (StringUtils.isEmpty((String) phoenix.config.GetServerProperty("phoenix/configured"))) { phoenix.config.SetServerProperty("phoenix/configured", String.valueOf(Calendar.getInstance().getTime().getTime())); try { upgradeFromBMT(); } catch (Throwable t) { log.warn("Failed to upgrade BMT to Phoenix", t); } } updateCustomMetadataFields(); } catch (Throwable e) { e.printStackTrace(); log.warn("Phoenix Plugin failed to start!", e); } } public static void updateCustomMetadataFields() { String fieldProp = Configuration.GetServerProperty("custom_metadata_properties", ""); String fields[] = fieldProp.split(";"); Set<String> fieldList = new TreeSet<String>(Arrays.asList(fields)); // remove al the know props // sean 2010-01-08 // disabling this because sagetv apparently adds the fields // automatically // String all[] = MetadataUtil.getPropertyKeys(IMetadata.class); // for (String s : all) { // fieldList.remove(s); // } // add known custom props String custom[] = MetadataUtil.getPropertyKeys(ISageCustomMetadataRW.class); for (String s : custom) { fieldList.add(s); } fieldProp = StringUtils.join(fieldList, ";"); Configuration.SetServerProperty("custom_metadata_properties", fieldProp); Loggers.LOG.info("Setting Custom Metadata Fields: " + fieldProp); } private void upgradeFromBMT() { boolean autoplugin = false; // check if bmt is configured, and if so, then remove it. String plugin = Configuration.GetProperty("mediafile_metadata_parser_plugins", null); if (plugin != null && plugin.contains("org.jdna.sage.MetadataUpdaterPlugin")) { Configuration.SetProperty("mediafile_metadata_parser_plugins", null); autoplugin = true; } plugin = Configuration.GetServerProperty("mediafile_metadata_parser_plugins", null); if (plugin != null && plugin.contains("org.jdna.sage.MetadataUpdaterPlugin")) { Configuration.SetServerProperty("mediafile_metadata_parser_plugins", null); autoplugin = true; } if (autoplugin) { MetadataConfiguration config = GroupProxy.get(MetadataConfiguration.class); config.setAutomatedFanartEnabled(true); } removeFile("STVs/Phoenix/Configuration/ext/bmt.xml"); removeFile("STVs/Phoenix/Configuration/ext/log4j.xml"); removeFile("STVs/Phoenix/Configuration/ext/Sage.xml"); removeFile("STVs/Phoenix/vfs/ext/bmt.xml"); // reload configuration metadata and vfs, since it may have changed. Phoenix.getInstance().getConfigurationMetadataManager().loadConfigurations(); Phoenix.getInstance().getVFSManager().loadConfigurations(); // move the media titles, in case it's been created, modified. File titles = new File("scrapers/MediaTitles.xml"); File newTitles = new File("STVs/Phoenix/scrapers/MediaTitles.xml"); if (titles.exists() && !newTitles.exists()) { try { org.apache.commons.io.FileUtils.moveFile(titles, newTitles); } catch (IOException e) { log.warn("Failed to copy/move the MediaTitles.xml"); } } File scrapers = new File("scrapers"); if (scrapers.exists()) { File oldScrapers = new File("scrapers.old"); boolean renamed = scrapers.renameTo(oldScrapers); if (!renamed) { log.warn("Failed to rename scraper dir: " + scrapers); } } // send a system message stating that we've upgrade the bmt plugin Phoenix.getInstance() .getEventBus() .fireEvent( PhoenixEventID.SystemMessageEvent, SageSystemMessageListener.createEvent(SystemMessageID.PHOENIX_METADATA, SageSystemMessageListener.INFO, "Batch Metadata Tools updated", "Phoenix Metadata has been installed and the legacy bmt files/plugin have been removed.", null), false); } private void removeFile(String f) { File file = new File(f); if (file.exists()) { if (!file.delete()) { file.deleteOnExit(); } } } @Override public void onStart(TaskItem item) { // ignore, already logged } @Override public void onComplete(TaskItem item) { // ignore, it'll be logged when the item is written item.setHandler(null); item.setOperation(null); item.getUserData().clear(); } @Override public void onError(TaskItem item) { // finally failed, so report the error now reportFailure((IMediaFile) item.getUserData().get("file"), item.getError()); item.setHandler(null); item.setOperation(null); item.getUserData().clear(); } @Override public void performAction(TaskItem item) throws Throwable { IMediaFile file = (IMediaFile) item.getUserData().get("file"); boolean recording = (Boolean) item.getUserData().get("recording"); log.info("Retrying Metadata Scan for " + file); Hints options = Phoenix.getInstance().getMetadataManager().getDefaultMetadataOptions(); options.setBooleanHint(MetadataHints.KNOWN_RECORDING, recording); options.setBooleanHint(MetadataHints.AUTOMATIC, true); Phoenix.getInstance().getMetadataManager().automaticUpdate(file, options); } @Override public boolean canRetry(Throwable t) { if (t != null && t instanceof MetadataException) { return ((MetadataException) t).canRetry(); } return false; } @ButtonClickHandler("phoenix/fanart/rescaleFanart") public void rescaleFanart() { final PluginProperty prop = getPluginPropertyForSetting("phoenix/fanart/rescaleFanart"); if (monitor!=null && !(monitor.isCancelled() || monitor.isDone())) { log.info("Cancelling Rescale fanart"); monitor.setTaskName("Cancelling..."); monitor.setCancelled(true); return; } monitor = new PluginPropertyProgressMonitor(prop); Phoenix.getInstance().getTaskManager().submit(new Runnable() { @Override public void run() { log.info("Rescaling Fanart started..."); FanartUtil.applyScreenScalingToAllImageFiles(new File(config.getFanartCentralFolder()), config.getMaxScreenSize(), monitor); log.info("Rescaling Fanart complete..."); } }); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version * 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package org.apache.storm.trident.topology; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.TreeMap; import org.apache.storm.Config; import org.apache.storm.spout.SpoutOutputCollector; import org.apache.storm.task.TopologyContext; import org.apache.storm.topology.OutputFieldsDeclarer; import org.apache.storm.topology.base.BaseRichSpout; import org.apache.storm.trident.spout.ITridentSpout; import org.apache.storm.trident.topology.state.TransactionalState; import org.apache.storm.tuple.Fields; import org.apache.storm.tuple.Values; import org.apache.storm.utils.WindowedTimeThrottler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MasterBatchCoordinator extends BaseRichSpout { public static final Logger LOG = LoggerFactory.getLogger(MasterBatchCoordinator.class); public static final long INIT_TXID = 1L; public static final String BATCH_STREAM_ID = "$batch"; public static final String COMMIT_STREAM_ID = "$commit"; public static final String SUCCESS_STREAM_ID = "$success"; private static final String CURRENT_TX = "currtx"; private static final String CURRENT_ATTEMPTS = "currattempts"; TreeMap<Long, TransactionStatus> _activeTx = new TreeMap<Long, TransactionStatus>(); TreeMap<Long, Integer> _attemptIds; Long _currTransaction; int _maxTransactionActive; List<ITridentSpout.BatchCoordinator> _coordinators = new ArrayList(); List<String> _managedSpoutIds; List<ITridentSpout> _spouts; WindowedTimeThrottler _throttler; boolean _active = true; private List<TransactionalState> _states = new ArrayList(); private SpoutOutputCollector _collector; public MasterBatchCoordinator(List<String> spoutIds, List<ITridentSpout> spouts) { if (spoutIds.isEmpty()) { throw new IllegalArgumentException("Must manage at least one spout"); } _managedSpoutIds = spoutIds; _spouts = spouts; LOG.debug("Created {}", this); } public List<String> getManagedSpoutIds() { return _managedSpoutIds; } @Override public void activate() { _active = true; } @Override public void deactivate() { _active = false; } @Override public void open(Map<String, Object> conf, TopologyContext context, SpoutOutputCollector collector) { _throttler = new WindowedTimeThrottler((Number) conf.get(Config.TOPOLOGY_TRIDENT_BATCH_EMIT_INTERVAL_MILLIS), 1); for (String spoutId : _managedSpoutIds) { _states.add(TransactionalState.newCoordinatorState(conf, spoutId)); } _currTransaction = getStoredCurrTransaction(); _collector = collector; Number active = (Number) conf.get(Config.TOPOLOGY_MAX_SPOUT_PENDING); if (active == null) { _maxTransactionActive = 1; } else { _maxTransactionActive = active.intValue(); } _attemptIds = getStoredCurrAttempts(_currTransaction, _maxTransactionActive); for (int i = 0; i < _spouts.size(); i++) { String txId = _managedSpoutIds.get(i); _coordinators.add(_spouts.get(i).getCoordinator(txId, conf, context)); } LOG.debug("Opened {}", this); } @Override public void close() { for (TransactionalState state : _states) { state.close(); } LOG.debug("Closed {}", this); } @Override public void nextTuple() { sync(); } @Override public void ack(Object msgId) { TransactionAttempt tx = (TransactionAttempt) msgId; TransactionStatus status = _activeTx.get(tx.getTransactionId()); LOG.debug("Ack. [tx_attempt = {}], [tx_status = {}], [{}]", tx, status, this); if (status != null && tx.equals(status.attempt)) { if (status.status == AttemptStatus.PROCESSING) { status.status = AttemptStatus.PROCESSED; LOG.debug("Changed status. [tx_attempt = {}] [tx_status = {}]", tx, status); } else if (status.status == AttemptStatus.COMMITTING) { _activeTx.remove(tx.getTransactionId()); _attemptIds.remove(tx.getTransactionId()); _collector.emit(SUCCESS_STREAM_ID, new Values(tx)); _currTransaction = nextTransactionId(tx.getTransactionId()); for (TransactionalState state : _states) { state.setData(CURRENT_TX, _currTransaction); } LOG.debug("Emitted on [stream = {}], [tx_attempt = {}], [tx_status = {}], [{}]", SUCCESS_STREAM_ID, tx, status, this); } sync(); } } @Override public void fail(Object msgId) { TransactionAttempt tx = (TransactionAttempt) msgId; TransactionStatus stored = _activeTx.remove(tx.getTransactionId()); LOG.debug("Fail. [tx_attempt = {}], [tx_status = {}], [{}]", tx, stored, this); if (stored != null && tx.equals(stored.attempt)) { _activeTx.tailMap(tx.getTransactionId()).clear(); sync(); } } @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { // in partitioned example, in case an emitter task receives a later transaction than it's emitted so far, // when it sees the earlier txid it should know to emit nothing declarer.declareStream(BATCH_STREAM_ID, new Fields("tx")); declarer.declareStream(COMMIT_STREAM_ID, new Fields("tx")); declarer.declareStream(SUCCESS_STREAM_ID, new Fields("tx")); } private void sync() { // note that sometimes the tuples active may be less than max_spout_pending, e.g. // max_spout_pending = 3 // tx 1, 2, 3 active, tx 2 is acked. there won't be a commit for tx 2 (because tx 1 isn't committed yet), // and there won't be a batch for tx 4 because there's max_spout_pending tx active TransactionStatus maybeCommit = _activeTx.get(_currTransaction); if (maybeCommit != null && maybeCommit.status == AttemptStatus.PROCESSED) { maybeCommit.status = AttemptStatus.COMMITTING; _collector.emit(COMMIT_STREAM_ID, new Values(maybeCommit.attempt), maybeCommit.attempt); LOG.debug("Emitted on [stream = {}], [tx_status = {}], [{}]", COMMIT_STREAM_ID, maybeCommit, this); } if (_active) { if (_activeTx.size() < _maxTransactionActive) { Long curr = _currTransaction; for (int i = 0; i < _maxTransactionActive; i++) { if (!_activeTx.containsKey(curr) && isReady(curr)) { // by using a monotonically increasing attempt id, downstream tasks // can be memory efficient by clearing out state for old attempts // as soon as they see a higher attempt id for a transaction Integer attemptId = _attemptIds.get(curr); if (attemptId == null) { attemptId = 0; } else { attemptId++; } _attemptIds.put(curr, attemptId); for (TransactionalState state : _states) { state.setData(CURRENT_ATTEMPTS, _attemptIds); } TransactionAttempt attempt = new TransactionAttempt(curr, attemptId); final TransactionStatus newTransactionStatus = new TransactionStatus(attempt); _activeTx.put(curr, newTransactionStatus); _collector.emit(BATCH_STREAM_ID, new Values(attempt), attempt); LOG.debug("Emitted on [stream = {}], [tx_attempt = {}], [tx_status = {}], [{}]", BATCH_STREAM_ID, attempt, newTransactionStatus, this); _throttler.markEvent(); } curr = nextTransactionId(curr); } } } } private boolean isReady(long txid) { if (_throttler.isThrottled()) { return false; } //TODO: make this strategy configurable?... right now it goes if anyone is ready for (ITridentSpout.BatchCoordinator coord : _coordinators) { if (coord.isReady(txid)) { return true; } } return false; } @Override public Map<String, Object> getComponentConfiguration() { Config ret = new Config(); ret.setMaxTaskParallelism(1); ret.registerSerialization(TransactionAttempt.class); return ret; } private Long nextTransactionId(Long id) { return id + 1; } private Long getStoredCurrTransaction() { Long ret = INIT_TXID; for (TransactionalState state : _states) { Long curr = (Long) state.getData(CURRENT_TX); if (curr != null && curr.compareTo(ret) > 0) { ret = curr; } } return ret; } private TreeMap<Long, Integer> getStoredCurrAttempts(long currTransaction, int maxBatches) { TreeMap<Long, Integer> ret = new TreeMap<Long, Integer>(); for (TransactionalState state : _states) { Map<Object, Number> attempts = (Map) state.getData(CURRENT_ATTEMPTS); if (attempts == null) { attempts = new HashMap(); } for (Entry<Object, Number> e : attempts.entrySet()) { // this is because json doesn't allow numbers as keys... // TODO: replace json with a better form of encoding Number txidObj; if (e.getKey() instanceof String) { txidObj = Long.parseLong((String) e.getKey()); } else { txidObj = (Number) e.getKey(); } long txid = ((Number) txidObj).longValue(); int attemptId = ((Number) e.getValue()).intValue(); Integer curr = ret.get(txid); if (curr == null || attemptId > curr) { ret.put(txid, attemptId); } } } ret.headMap(currTransaction).clear(); ret.tailMap(currTransaction + maxBatches - 1).clear(); return ret; } @Override public String toString() { return "MasterBatchCoordinator{" + "_states=" + _states + ", _activeTx=" + _activeTx + ", _attemptIds=" + _attemptIds + ", _collector=" + _collector + ", _currTransaction=" + _currTransaction + ", _maxTransactionActive=" + _maxTransactionActive + ", _coordinators=" + _coordinators + ", _managedSpoutIds=" + _managedSpoutIds + ", _spouts=" + _spouts + ", _throttler=" + _throttler + ", _active=" + _active + "}"; } private static enum AttemptStatus { PROCESSING, PROCESSED, COMMITTING } private static class TransactionStatus { TransactionAttempt attempt; AttemptStatus status; public TransactionStatus(TransactionAttempt attempt) { this.attempt = attempt; this.status = AttemptStatus.PROCESSING; } @Override public String toString() { return attempt.toString() + " <" + status.toString() + ">"; } } }
package com.anttoolkit.general.tasks.concurrent.util; import java.io.*; import java.util.*; import org.apache.tools.ant.*; import org.apache.tools.ant.property.LocalProperties; import org.apache.tools.ant.util.*; import com.anttoolkit.general.loggers.*; import com.anttoolkit.general.entities.*; import com.anttoolkit.general.entities.EntityManager.*; import com.anttoolkit.general.tasks.orchestration.util.*; public class TasksThread extends Thread implements Comparable, StepProcessor { private String threadGroup = null; private String logFile = null; private List<Task> tasks = null; private Throwable error = null; private boolean echo = true; private String[] associatedValues = null; private boolean forbidNewThreadsOnFailure = true; private boolean isThreadAwareLoggerRegistered = false; private List<Map<EntityType, Map<String, Object[]>>> scopeStack = null; private Orchestration orchestration = null; private OrchestrationStep orcStep = null; TasksThread(String threadName, String threadGroup, String[] associatedValues, String logFile, List<Task> tasks, OrchestrationStep step, boolean echo, boolean forbidNewThreadsOnFailure) { super(threadName); if (threadGroup == null || threadGroup.trim().isEmpty()) { throw new IllegalArgumentException("Thread group can't be empty"); } this.threadGroup = threadGroup; if (tasks == null || tasks.isEmpty()) { throw new IllegalArgumentException("No tasks specified for thread: " + threadName); } this.logFile = logFile; this.tasks = tasks; this.echo = echo; this.associatedValues = associatedValues; this.forbidNewThreadsOnFailure = forbidNewThreadsOnFailure; isThreadAwareLoggerRegistered = ThreadAwareLogger.isThreadAwareLoggerRegistered(this.tasks.get(0).getProject()); scopeStack = EntityManager.createScopeStackForNewThread(); orcStep = step; orchestration = OrchestrationManager.getCurrentOrchestration(); } public String getGroup() { return threadGroup; } @Override public void run() { LocalProperties.get(getProject()).copy(); if (orchestration != null) { OrchestrationManager.joinOrchestration(orchestration); } if (orcStep != null && orchestration != null) { orchestration.processStep(orcStep, this); return; } processStep(orchestration, null); } @Override public void processStep(Orchestration orc, OrchestrationStep step) { Task currentTask = null; try { EntityManager.initScopeStackForNewThread(scopeStack); scopeStack = null; long startTime = System.currentTimeMillis(); log("Thread execution started"); for (Task task : tasks) { currentTask = task; task.perform(); } log("Thread execution completed, duration: " + getDurationInfo(startTime)); } catch (Throwable e) { if (forbidNewThreadsOnFailure) { ThreadManager.forbidNewThreadsCreation(); } error = e; log(currentTask, "Thread exception occured:", e); if (orc != null && step != null) { if (e instanceof BuildException) { throw (BuildException)e; } throw new BuildException(e); } } finally { scopeStack = null; EntityManager.destroyScopeStackForCompletedThread(); } } public boolean isFailed() { return error != null; } public boolean isInterrupted() { return isFailed() || super.isInterrupted(); } public boolean showEcho() { return echo; } public Throwable getError() { return error; } public String getLogFile() { return logFile; } public boolean hasLogFile() { return logFile != null; } public String[] getAssociatedValues() { return associatedValues; } public String toString() { return getThreadDisplayName(); } public int compareTo(Object obj) { return this.toString().compareTo(obj.toString()); } public synchronized void writeToLogFile(String message) { if (logFile == null || logFile.trim().length() == 0) { return; } PrintWriter writer = null; try { writer = new PrintWriter(new FileOutputStream(logFile, true)); writer.println(message); } catch (Throwable e) { } finally { if (writer != null) { try { writer.close(); } catch (Throwable e) {} } } } public void log(String message) { getProject().log(getLoggingPrefix() + message); if (!isThreadAwareLoggerRegistered) { writeToLogFile(message); } } private void log(Task task, String message, Throwable e) { if (task == null) { getProject().log(getLoggingPrefix() + message, e, Project.MSG_ERR); } else { getProject().log(task, getLoggingPrefix() + message, e, Project.MSG_ERR); } if (!isThreadAwareLoggerRegistered) { writeToLogFile(message + StringUtils.getStackTrace(e)); } } private String getThreadDisplayName() { if (getName() != null && getName().trim().length() != 0) { return getName(); } return Long.toString(getId()); } private String getDurationInfo(long startTime) { long milliseconds = System.currentTimeMillis() - startTime; if (milliseconds < 1000) { return milliseconds + " milliseconds"; } long seconds = milliseconds / 1000; return seconds + " seconds"; } private Project getProject() { return tasks.get(0).getProject(); } private String getLoggingPrefix() { return isThreadAwareLoggerRegistered ? "" : "<" + getThreadDisplayName() + "> "; } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.trace.database; import java.io.IOException; import java.util.*; import java.util.function.Consumer; import com.google.common.cache.RemovalNotification; import com.google.common.collect.Range; import db.DBHandle; import generic.depends.DependentService; import generic.depends.err.ServiceConstructionException; import ghidra.framework.options.Options; import ghidra.lifecycle.Internal; import ghidra.program.model.address.*; import ghidra.program.model.data.*; import ghidra.program.model.lang.*; import ghidra.program.util.DefaultLanguageService; import ghidra.trace.database.address.DBTraceOverlaySpaceAdapter; import ghidra.trace.database.address.TraceAddressFactory; import ghidra.trace.database.bookmark.DBTraceBookmarkManager; import ghidra.trace.database.breakpoint.DBTraceBreakpointManager; import ghidra.trace.database.context.DBTraceRegisterContextManager; import ghidra.trace.database.data.DBTraceDataSettingsAdapter; import ghidra.trace.database.data.DBTraceDataTypeManager; import ghidra.trace.database.language.DBTraceLanguageManager; import ghidra.trace.database.listing.DBTraceCodeManager; import ghidra.trace.database.listing.DBTraceCommentAdapter; import ghidra.trace.database.memory.DBTraceMemoryManager; import ghidra.trace.database.module.DBTraceModuleManager; import ghidra.trace.database.module.DBTraceStaticMappingManager; import ghidra.trace.database.program.DBTraceProgramView; import ghidra.trace.database.program.DBTraceVariableSnapProgramView; import ghidra.trace.database.property.DBTraceAddressPropertyManager; import ghidra.trace.database.stack.DBTraceStackManager; import ghidra.trace.database.symbol.*; import ghidra.trace.database.target.DBTraceObjectManager; import ghidra.trace.database.thread.DBTraceThreadManager; import ghidra.trace.database.time.DBTraceTimeManager; import ghidra.trace.model.Trace; import ghidra.trace.model.memory.TraceMemoryRegion; import ghidra.trace.util.TraceChangeManager; import ghidra.trace.util.TraceChangeRecord; import ghidra.util.*; import ghidra.util.database.*; import ghidra.util.datastruct.WeakValueHashMap; import ghidra.util.exception.CancelledException; import ghidra.util.exception.VersionException; import ghidra.util.task.TaskMonitor; // TODO: Need some subscription model to ensure record lifespans stay within lifespan of threads // Applies to creation, and to setting end snap // Also to deleting a thread altogether. public class DBTrace extends DBCachedDomainObjectAdapter implements Trace, TraceChangeManager { protected static final String TRACE_INFO = "Trace Information"; protected static final String NAME = "Name"; protected static final String DATE_CREATED = "Date Created"; protected static final String BASE_LANGUAGE = "Base Language"; protected static final String BASE_COMPILER = "Base Compiler"; protected static final String PLATFORM = "Platform"; protected static final String EXECUTABLE_PATH = "Executable Location"; protected static final int DB_TIME_INTERVAL = 500; protected static final int DB_BUFFER_SIZE = 1000; // NOTE: Using a list ensures they are invalidated in the same order as created // Useful since refreshing likely follows the same dependency graph and creation. protected List<DBTraceManager> managers = new ArrayList<>(20); @DependentService protected DBTraceAddressPropertyManager addressPropertyManager; @DependentService protected DBTraceBookmarkManager bookmarkManager; @DependentService protected DBTraceBreakpointManager breakpointManager; @DependentService protected DBTraceCodeManager codeManager; @DependentService protected DBTraceCommentAdapter commentAdapter; @DependentService protected DBTraceDataSettingsAdapter dataSettingsAdapter; @DependentService protected DBTraceDataTypeManager dataTypeManager; @DependentService protected DBTraceEquateManager equateManager; @DependentService protected DBTraceLanguageManager languageManager; @DependentService protected DBTraceMemoryManager memoryManager; @DependentService protected DBTraceModuleManager moduleManager; @DependentService protected DBTraceObjectManager objectManager; @DependentService protected DBTraceOverlaySpaceAdapter overlaySpaceAdapter; @DependentService protected DBTraceReferenceManager referenceManager; @DependentService protected DBTraceRegisterContextManager registerContextManager; @DependentService protected DBTraceStackManager stackManager; @DependentService protected DBTraceStaticMappingManager staticMappingManager; @DependentService protected DBTraceSymbolManager symbolManager; @DependentService protected DBTraceThreadManager threadManager; @DependentService protected DBTraceTimeManager timeManager; private final DBCachedObjectStoreFactory storeFactory; protected Language baseLanguage; protected CompilerSpec baseCompilerSpec; protected TraceAddressFactory baseAddressFactory; protected DBTraceChangeSet traceChangeSet; protected boolean recordChanges = false; protected DBTraceVariableSnapProgramView programView; protected Map<DBTraceVariableSnapProgramView, Void> programViews = new WeakHashMap<>(); protected Map<Long, DBTraceProgramView> fixedProgramViews = new WeakValueHashMap<>(); public DBTrace(String name, CompilerSpec baseCompilerSpec, Object consumer) throws IOException, LanguageNotFoundException { super(new DBHandle(), DBOpenMode.CREATE, TaskMonitor.DUMMY, name, DB_TIME_INTERVAL, DB_BUFFER_SIZE, consumer); this.storeFactory = new DBCachedObjectStoreFactory(this); this.baseLanguage = baseCompilerSpec.getLanguage(); // Need to "downgrade" the compiler spec, so nothing program-specific seeps in // TODO: Should there be a TraceCompilerSpec? this.baseCompilerSpec = baseLanguage.getCompilerSpecByID(baseCompilerSpec.getCompilerSpecID()); this.baseAddressFactory = new TraceAddressFactory(this.baseLanguage, this.baseCompilerSpec); try (UndoableTransaction tid = UndoableTransaction.start(this, "Create", false)) { initOptions(DBOpenMode.CREATE); init(); tid.commit(); } catch (VersionException | CancelledException e) { throw new AssertionError(e); } catch (ServiceConstructionException e) { e.unwrap(LanguageNotFoundException.class); throw new AssertionError(e); } changeSet = traceChangeSet = new DBTraceChangeSet(); recordChanges = true; programView = createProgramView(0); } public DBTrace(DBHandle dbh, DBOpenMode openMode, TaskMonitor monitor, Object consumer) throws CancelledException, VersionException, IOException, LanguageNotFoundException { super(dbh, openMode, monitor, "Untitled", DB_TIME_INTERVAL, DB_BUFFER_SIZE, consumer); this.storeFactory = new DBCachedObjectStoreFactory(this); try { initOptions(openMode); init(); } catch (ServiceConstructionException e) { e.unwrap(LanguageNotFoundException.class); throw new AssertionError(e); } changeSet = traceChangeSet = new DBTraceChangeSet(); recordChanges = true; programView = createProgramView(0); } protected void initOptions(DBOpenMode openMode) throws IOException, CancelledException { Options traceInfo = getOptions(TRACE_INFO); if (openMode == DBOpenMode.CREATE) { traceInfo.setString(NAME, name); traceInfo.setDate(DATE_CREATED, new Date()); traceInfo.setString(BASE_LANGUAGE, baseLanguage.getLanguageID().getIdAsString()); traceInfo.setString(BASE_COMPILER, baseCompilerSpec.getCompilerSpecID().getIdAsString()); } else { name = traceInfo.getString(NAME, "Unnamed?"); baseLanguage = DefaultLanguageService.getLanguageService() .getLanguage( new LanguageID(traceInfo.getString(BASE_LANGUAGE, null))); baseCompilerSpec = baseLanguage.getCompilerSpecByID( new CompilerSpecID(traceInfo.getString(BASE_COMPILER, null))); baseAddressFactory = new TraceAddressFactory(baseLanguage, baseCompilerSpec); } } protected void fixedProgramViewRemoved(RemovalNotification<Long, DBTraceProgramView> rn) { Msg.debug(this, "Dropped cached fixed view at snap=" + rn.getKey()); } @Internal public void assertValidAddress(Address pc) { if (pc == null) { return; } assertValidSpace(pc.getAddressSpace()); } @Internal public void assertValidSpace(AddressSpace as) { if (as == AddressSpace.OTHER_SPACE) { return; } if (baseAddressFactory.getAddressSpace(as.getSpaceID()) != as) { throw new IllegalArgumentException( "AddressSpace '" + as + "' is not in this trace (language=" + getBaseLanguage() + ")"); } } @Override public DBTraceChangeSet getChangeSet() { return traceChangeSet; } // Internal public DBCachedObjectStoreFactory getStoreFactory() { return storeFactory; } @Override public String getDescription() { return "Trace"; } protected <T extends DBTraceManager> T createTraceManager(String managerName, ManagerSupplier<T> supplier) throws CancelledException, IOException { T manager = createManager(managerName, supplier); managers.add(manager); return manager; } @DependentService protected DBTraceAddressPropertyManager createAddressPropertyManager( DBTraceThreadManager threadManager) throws CancelledException, IOException { return createTraceManager("Address Property Manager", (openMode, monitor) -> new DBTraceAddressPropertyManager(dbh, openMode, rwLock, monitor, baseLanguage, this, threadManager)); } @DependentService protected DBTraceBookmarkManager createBookmarkManager(DBTraceThreadManager threadManager) throws CancelledException, IOException { return createTraceManager("Bookmark Manager", (openMode, monitor) -> new DBTraceBookmarkManager(dbh, openMode, rwLock, monitor, baseLanguage, this, threadManager)); } @DependentService protected DBTraceBreakpointManager createBreakpointManager(DBTraceThreadManager threadManager) throws CancelledException, IOException { return createTraceManager("Breakpoint Manager", (openMode, monitor) -> new DBTraceBreakpointManager(dbh, openMode, rwLock, monitor, baseLanguage, this, threadManager)); } @DependentService protected DBTraceCodeManager createCodeManager(DBTraceThreadManager threadManager, DBTraceLanguageManager languageManager, DBTraceDataTypeManager dataTypeManager, DBTraceOverlaySpaceAdapter overlayAdapter, DBTraceReferenceManager referenceManager) throws CancelledException, IOException { return createTraceManager("Code Manager", (openMode, monitor) -> new DBTraceCodeManager(dbh, openMode, rwLock, monitor, baseLanguage, this, threadManager, languageManager, dataTypeManager, overlayAdapter, referenceManager)); } @DependentService protected DBTraceCommentAdapter createCommentAdapter(DBTraceThreadManager threadManager) throws CancelledException, IOException { return createTraceManager("Comment Adapter", (openMode, monitor) -> new DBTraceCommentAdapter(dbh, openMode, rwLock, monitor, baseLanguage, this, threadManager)); } @DependentService protected DBTraceDataSettingsAdapter createDataSettingsAdapter( DBTraceThreadManager threadManager) throws CancelledException, IOException { return createTraceManager("Data Settings Adapter", (openMode, monitor) -> new DBTraceDataSettingsAdapter(dbh, openMode, rwLock, monitor, baseLanguage, this, threadManager)); } @DependentService protected DBTraceDataTypeManager createDataTypeManager() throws CancelledException, IOException { return createTraceManager("Data Type Manager", (openMode, monitor) -> new DBTraceDataTypeManager(dbh, openMode, rwLock, monitor, this)); } @DependentService protected DBTraceEquateManager createEquateManager(DBTraceThreadManager threadManager) throws CancelledException, IOException { return createTraceManager("Equate Manager", (openMode, monitor) -> new DBTraceEquateManager(dbh, openMode, rwLock, monitor, baseLanguage, this, threadManager)); } @DependentService protected DBTraceLanguageManager createLanguageManager() throws CancelledException, IOException { return createTraceManager("Language Manager", (openMode, monitor) -> new DBTraceLanguageManager(dbh, openMode, rwLock, monitor, baseLanguage, this)); } @DependentService protected DBTraceMemoryManager createMemoryManager(DBTraceThreadManager threadManager, DBTraceOverlaySpaceAdapter overlayAdapter) throws IOException, CancelledException { return createTraceManager("Memory Manager", (openMode, monitor) -> new DBTraceMemoryManager(dbh, openMode, rwLock, monitor, baseLanguage, this, threadManager, overlayAdapter)); } @DependentService protected DBTraceModuleManager createModuleManager() throws CancelledException, IOException { return createTraceManager("Module Manager", (openMode, monitor) -> new DBTraceModuleManager(dbh, openMode, rwLock, monitor, baseLanguage, this)); } @DependentService protected DBTraceObjectManager createObjectManager() throws CancelledException, IOException { return createTraceManager("Object Manager", (openMode, monitor) -> new DBTraceObjectManager(dbh, openMode, rwLock, monitor, baseLanguage, this)); } @DependentService protected DBTraceOverlaySpaceAdapter createOverlaySpaceAdapter() throws CancelledException, IOException { return createTraceManager("Overlay Space Adapter", (openMode, monitor) -> new DBTraceOverlaySpaceAdapter(dbh, openMode, rwLock, monitor, this)); } @DependentService protected DBTraceReferenceManager createReferenceManager(DBTraceThreadManager threadManager, DBTraceOverlaySpaceAdapter overlayAdapter) throws CancelledException, IOException { return createTraceManager("Reference Manager", (openMode, monitor) -> new DBTraceReferenceManager(dbh, openMode, rwLock, monitor, baseLanguage, this, threadManager, overlayAdapter)); } @DependentService protected DBTraceRegisterContextManager createRegisterContextManager( DBTraceThreadManager threadManager, DBTraceLanguageManager languageManager) throws CancelledException, IOException { return createTraceManager("Context Manager", (openMode, monitor) -> new DBTraceRegisterContextManager(dbh, openMode, rwLock, monitor, baseLanguage, this, threadManager, languageManager)); } @DependentService protected DBTraceStackManager createStackManager(DBTraceThreadManager threadManager, DBTraceOverlaySpaceAdapter overlayAdapter) throws CancelledException, IOException { return createTraceManager("Stack Manager", (openMode, monitor) -> new DBTraceStackManager(dbh, openMode, rwLock, monitor, this, threadManager, overlayAdapter)); } @DependentService protected DBTraceStaticMappingManager createStaticMappingManager( DBTraceOverlaySpaceAdapter overlayAdapter) throws CancelledException, IOException { return createTraceManager("Static Mapping Manager", (openMode, monitor) -> new DBTraceStaticMappingManager(dbh, openMode, rwLock, monitor, this, overlayAdapter)); } @DependentService protected DBTraceSymbolManager createSymbolManager(DBTraceThreadManager threadManager, DBTraceDataTypeManager dataTypeManager, DBTraceOverlaySpaceAdapter overlayAdapter) throws CancelledException, IOException { return createTraceManager("Symbol Manager", (openMode, monitor) -> new DBTraceSymbolManager(dbh, openMode, rwLock, monitor, baseLanguage, this, threadManager, dataTypeManager, overlayAdapter)); } @DependentService protected DBTraceThreadManager createThreadManager(DBTraceObjectManager objectManager) throws IOException, CancelledException { return createTraceManager("Thread Manager", (openMode, monitor) -> new DBTraceThreadManager(dbh, openMode, rwLock, monitor, this, objectManager)); } @DependentService protected DBTraceTimeManager createTimeManager(DBTraceThreadManager threadManager) throws IOException, CancelledException { return createTraceManager("Time Manager", (openMode, monitor) -> new DBTraceTimeManager(dbh, openMode, rwLock, monitor, this, threadManager)); } @Override public Language getBaseLanguage() { return baseLanguage; } @Override public CompilerSpec getBaseCompilerSpec() { // TODO: Incorporate guest specs into guest languages? return baseCompilerSpec; } protected void setTraceUserData(DBTraceUserData traceUserData) { // TODO: } @Override // Make accessible in this package protected void setChanged(boolean b) { super.setChanged(b); } @Override public boolean isChangeable() { return true; } @Override public AddressFactory getBaseAddressFactory() { return baseAddressFactory; } @Internal public TraceAddressFactory getInternalAddressFactory() { return baseAddressFactory; } @Internal public DBTraceAddressPropertyManager getAddressPropertyManager() { return addressPropertyManager; } @Override public DBTraceBookmarkManager getBookmarkManager() { return bookmarkManager; } @Override public DBTraceBreakpointManager getBreakpointManager() { return breakpointManager; } @Override public DBTraceCodeManager getCodeManager() { return codeManager; } @Internal public DBTraceCommentAdapter getCommentAdapter() { return commentAdapter; } @Internal public DBTraceDataSettingsAdapter getDataSettingsAdapter() { return dataSettingsAdapter; } @Override public DBTraceDataTypeManager getDataTypeManager() { return dataTypeManager; } @Override public DBTraceEquateManager getEquateManager() { return equateManager; } @Override public DBTraceLanguageManager getLanguageManager() { return languageManager; } @Override public DBTraceMemoryManager getMemoryManager() { return memoryManager; } @Override public DBTraceModuleManager getModuleManager() { return moduleManager; } @Override public DBTraceObjectManager getObjectManager() { return objectManager; } @Internal public DBTraceOverlaySpaceAdapter getOverlaySpaceAdapter() { return overlaySpaceAdapter; } @Override public DBTraceReferenceManager getReferenceManager() { return referenceManager; } @Override public DBTraceRegisterContextManager getRegisterContextManager() { return registerContextManager; } @Override public DBTraceStackManager getStackManager() { return stackManager; } @Override public DBTraceStaticMappingManager getStaticMappingManager() { return staticMappingManager; } @Override public DBTraceSymbolManager getSymbolManager() { return symbolManager; } @Override public DBTraceThreadManager getThreadManager() { return threadManager; } @Override public DBTraceTimeManager getTimeManager() { return timeManager; } @Override public void setChanged(TraceChangeRecord<?, ?> event) { changed = true; fireEvent(event); } @Override // NOTE: addListener synchronizes on this and might generate callbacks immediately public synchronized DBTraceProgramView getFixedProgramView(long snap) { // NOTE: The new viewport will need to read from the time manager during init try (LockHold hold = lockRead()) { synchronized (fixedProgramViews) { DBTraceProgramView view = fixedProgramViews.computeIfAbsent(snap, t -> { Msg.debug(this, "Creating fixed view at snap=" + snap); return new DBTraceProgramView(this, snap, baseCompilerSpec); }); return view; } } } @Override // NOTE: Ditto getFixedProgramView public synchronized DBTraceVariableSnapProgramView createProgramView(long snap) { // NOTE: The new viewport will need to read from the time manager during init try (LockHold hold = lockRead()) { synchronized (programViews) { DBTraceVariableSnapProgramView view = new DBTraceVariableSnapProgramView(this, snap, baseCompilerSpec); programViews.put(view, null); return view; } } } @Override public DBTraceVariableSnapProgramView getProgramView() { return programView; } @Override public LockHold lockRead() { return LockHold.lock(rwLock.readLock()); } @Override public LockHold lockWrite() { return LockHold.lock(rwLock.writeLock()); } public void sourceArchiveChanged(UniversalID sourceArchiveID) { if (recordChanges) { traceChangeSet.sourceArchiveChanged(sourceArchiveID.getValue()); } setChanged( new TraceChangeRecord<>(TraceSourceArchiveChangeType.CHANGED, null, sourceArchiveID)); } public void sourceArchiveAdded(UniversalID sourceArchiveID) { if (recordChanges) { traceChangeSet.sourceArchiveAdded(sourceArchiveID.getValue()); } setChanged( new TraceChangeRecord<>(TraceSourceArchiveChangeType.ADDED, null, sourceArchiveID)); } public void dataTypeChanged(long changedID, DataType changedType) { if (recordChanges) { traceChangeSet.dataTypeChanged(changedID); } setChanged( new TraceChangeRecord<>(TraceDataTypeChangeType.CHANGED, null, changedID, changedType)); } public void dataTypeAdded(long addedID, DataType addedType) { if (recordChanges) { traceChangeSet.dataTypeAdded(addedID); } setChanged( new TraceChangeRecord<>(TraceDataTypeChangeType.ADDED, null, addedID, addedType)); } public void dataTypeReplaced(long replacedID, DataTypePath replacedPath, DataTypePath newPath) { if (recordChanges) { traceChangeSet.dataTypeChanged(replacedID); } setChanged(new TraceChangeRecord<>(TraceDataTypeChangeType.REPLACED, null, replacedID, replacedPath, newPath)); } public void dataTypeMoved(long movedID, DataTypePath oldPath, DataTypePath newPath) { if (recordChanges) { traceChangeSet.dataTypeChanged(movedID); } setChanged(new TraceChangeRecord<>(TraceDataTypeChangeType.MOVED, null, movedID, oldPath, newPath)); } public void dataTypeNameChanged(long renamedID, String oldName, String newName) { if (recordChanges) { traceChangeSet.dataTypeChanged(renamedID); } setChanged(new TraceChangeRecord<>(TraceDataTypeChangeType.RENAMED, null, renamedID, oldName, newName)); } public void dataTypeDeleted(long deletedID, DataTypePath deletedPath) { if (recordChanges) { traceChangeSet.dataTypeChanged(deletedID); } setChanged(new TraceChangeRecord<>(TraceDataTypeChangeType.DELETED, null, deletedID, deletedPath, null)); } public void categoryAdded(long addedID, Category addedCategory) { if (recordChanges) { traceChangeSet.categoryAdded(addedID); } setChanged( new TraceChangeRecord<>(TraceCategoryChangeType.ADDED, null, addedID, addedCategory)); } public void categoryMoved(long movedID, CategoryPath oldPath, CategoryPath newPath) { if (recordChanges) { traceChangeSet.categoryChanged(movedID); } setChanged(new TraceChangeRecord<>(TraceCategoryChangeType.MOVED, null, movedID, oldPath, newPath)); } public void categoryRenamed(long renamedID, String oldName, String newName) { if (recordChanges) { traceChangeSet.categoryChanged(renamedID); } setChanged(new TraceChangeRecord<>(TraceCategoryChangeType.RENAMED, null, renamedID, oldName, newName)); } public void categoryDeleted(long deletedID, CategoryPath deletedPath) { if (recordChanges) { traceChangeSet.categoryChanged(deletedID); } setChanged(new TraceChangeRecord<>(TraceCategoryChangeType.DELETED, null, deletedID, deletedPath, null)); } @Override protected void clearCache(boolean all) { try (LockHold hold = LockHold.lock(rwLock.writeLock())) { for (DBTraceManager m : managers) { m.invalidateCache(all); } } } // TODO: Platform option? public void setExecutablePath(String path) { getOptions(TRACE_INFO).setString(EXECUTABLE_PATH, path); } public String getExecutablePath() { return getOptions(TRACE_INFO).getString(EXECUTABLE_PATH, null); } public Date getCreationDate() { return getOptions(TRACE_INFO).getDate(DATE_CREATED, new Date(0)); } protected void allViews(Consumer<DBTraceProgramView> action) { Collection<DBTraceProgramView> all = new ArrayList<>(); synchronized (programViews) { all.addAll(programViews.keySet()); } synchronized (fixedProgramViews) { all.addAll(fixedProgramViews.values()); } for (DBTraceProgramView view : all) { action.accept(view); } } public void updateViewsAddRegionBlock(TraceMemoryRegion region) { allViews(v -> v.updateMemoryAddRegionBlock(region)); } public void updateViewsChangeRegionBlockName(TraceMemoryRegion region) { allViews(v -> v.updateMemoryChangeRegionBlockName(region)); } public void updateViewsChangeRegionBlockFlags(TraceMemoryRegion region, Range<Long> lifespan) { allViews(v -> v.updateMemoryChangeRegionBlockFlags(region, lifespan)); } public void updateViewsChangeRegionBlockRange(TraceMemoryRegion region, AddressRange oldRange, AddressRange newRange) { allViews(v -> v.updateMemoryChangeRegionBlockRange(region, oldRange, newRange)); } public void updateViewsChangeRegionBlockLifespan(TraceMemoryRegion region, Range<Long> oldLifespan, Range<Long> newLifespan) { allViews(v -> v.updateMemoryChangeRegionBlockLifespan(region, oldLifespan, newLifespan)); } public void updateViewsDeleteRegionBlock(TraceMemoryRegion region) { allViews(v -> v.updateMemoryDeleteRegionBlock(region)); } public void updateViewsAddSpaceBlock(AddressSpace space) { allViews(v -> v.updateMemoryAddSpaceBlock(space)); } public void updateViewsDeleteSpaceBlock(AddressSpace space) { allViews(v -> v.updateMemoryDeleteSpaceBlock(space)); } public void updateViewsRefreshBlocks() { allViews(v -> v.updateMemoryRefreshBlocks()); } }
/* * Yet Another UserAgent Analyzer * Copyright (C) 2013-2022 Niels Basjes * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.basjes.parse.useragent.utils; import com.esotericsoftware.kryo.Kryo; import nl.basjes.collections.prefixmap.StringPrefixMap; import nl.basjes.parse.useragent.AbstractUserAgentAnalyzerDirect; import nl.basjes.parse.useragent.AgentField; import nl.basjes.parse.useragent.UserAgent; import nl.basjes.parse.useragent.analyze.Analyzer; import nl.basjes.parse.useragent.analyze.Matcher; import nl.basjes.parse.useragent.analyze.MatcherAction; import nl.basjes.parse.useragent.analyze.MatcherExtractAction; import nl.basjes.parse.useragent.analyze.MatcherFailIfFoundAction; import nl.basjes.parse.useragent.analyze.MatcherList; import nl.basjes.parse.useragent.analyze.MatcherRequireAction; import nl.basjes.parse.useragent.analyze.MatcherVariableAction; import nl.basjes.parse.useragent.analyze.MatchesList; import nl.basjes.parse.useragent.analyze.WordRangeVisitor; import nl.basjes.parse.useragent.analyze.treewalker.TreeExpressionEvaluator; import nl.basjes.parse.useragent.analyze.treewalker.steps.WalkList; import nl.basjes.parse.useragent.analyze.treewalker.steps.compare.StepContains; import nl.basjes.parse.useragent.analyze.treewalker.steps.compare.StepDefaultIfNull; import nl.basjes.parse.useragent.analyze.treewalker.steps.compare.StepEndsWith; import nl.basjes.parse.useragent.analyze.treewalker.steps.compare.StepEquals; import nl.basjes.parse.useragent.analyze.treewalker.steps.compare.StepIsInSet; import nl.basjes.parse.useragent.analyze.treewalker.steps.compare.StepIsNotInSet; import nl.basjes.parse.useragent.analyze.treewalker.steps.compare.StepIsNull; import nl.basjes.parse.useragent.analyze.treewalker.steps.compare.StepNotContains; import nl.basjes.parse.useragent.analyze.treewalker.steps.compare.StepNotEquals; import nl.basjes.parse.useragent.analyze.treewalker.steps.compare.StepStartsWith; import nl.basjes.parse.useragent.analyze.treewalker.steps.lookup.StepIsInLookupContains; import nl.basjes.parse.useragent.analyze.treewalker.steps.lookup.StepIsInLookupPrefix; import nl.basjes.parse.useragent.analyze.treewalker.steps.lookup.StepIsNotInLookupContains; import nl.basjes.parse.useragent.analyze.treewalker.steps.lookup.StepIsNotInLookupPrefix; import nl.basjes.parse.useragent.analyze.treewalker.steps.lookup.StepLookup; import nl.basjes.parse.useragent.analyze.treewalker.steps.lookup.StepLookupContains; import nl.basjes.parse.useragent.analyze.treewalker.steps.lookup.StepLookupPrefix; import nl.basjes.parse.useragent.analyze.treewalker.steps.value.StepBackToFull; import nl.basjes.parse.useragent.analyze.treewalker.steps.value.StepCleanVersion; import nl.basjes.parse.useragent.analyze.treewalker.steps.value.StepConcat; import nl.basjes.parse.useragent.analyze.treewalker.steps.value.StepConcatPostfix; import nl.basjes.parse.useragent.analyze.treewalker.steps.value.StepConcatPrefix; import nl.basjes.parse.useragent.analyze.treewalker.steps.value.StepExtractBrandFromUrl; import nl.basjes.parse.useragent.analyze.treewalker.steps.value.StepNormalizeBrand; import nl.basjes.parse.useragent.analyze.treewalker.steps.value.StepReplaceString; import nl.basjes.parse.useragent.analyze.treewalker.steps.value.StepSegmentRange; import nl.basjes.parse.useragent.analyze.treewalker.steps.value.StepWordRange; import nl.basjes.parse.useragent.analyze.treewalker.steps.walk.StepDown; import nl.basjes.parse.useragent.analyze.treewalker.steps.walk.StepNext; import nl.basjes.parse.useragent.analyze.treewalker.steps.walk.StepNextN; import nl.basjes.parse.useragent.analyze.treewalker.steps.walk.StepPrev; import nl.basjes.parse.useragent.analyze.treewalker.steps.walk.StepPrevN; import nl.basjes.parse.useragent.analyze.treewalker.steps.walk.StepUp; import nl.basjes.parse.useragent.calculate.CalculateAgentClass; import nl.basjes.parse.useragent.calculate.CalculateAgentEmail; import nl.basjes.parse.useragent.calculate.CalculateAgentName; import nl.basjes.parse.useragent.calculate.CalculateDeviceBrand; import nl.basjes.parse.useragent.calculate.CalculateDeviceName; import nl.basjes.parse.useragent.calculate.CalculateNetworkType; import nl.basjes.parse.useragent.calculate.ConcatNONDuplicatedCalculator; import nl.basjes.parse.useragent.calculate.FieldCalculator; import nl.basjes.parse.useragent.calculate.MajorVersionCalculator; import nl.basjes.parse.useragent.config.AnalyzerConfig; import nl.basjes.parse.useragent.config.MatcherConfig; import nl.basjes.parse.useragent.config.TestCase; import nl.basjes.parse.useragent.parse.UserAgentTreeFlattener; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.TreeMap; import java.util.TreeSet; public final class KryoConfig { private KryoConfig() { } /** * This is used to configure the provided Kryo instance if Kryo serialization is desired. * @param kryo The instance of com.esotericsoftware.kryo.Kryo that needs to be configured. */ public static void configureKryo(Kryo kryo) { // Since kryo 5.0.0-RC3 the default is to not use references. // With Yauaa you will go into a StackOverflow if you do not support references in Kryo because of // circulair references in the data structures. // See https://github.com/EsotericSoftware/kryo/issues/617 // https://github.com/EsotericSoftware/kryo/issues/789 kryo.setReferences(true); // Let Kryo output a lot of debug information // Log.DEBUG(); // kryo.setRegistrationRequired(true); // kryo.setWarnUnregisteredClasses(true); // Register the Java classes we need kryo.register(Collections.emptySet().getClass()); kryo.register(Collections.emptyList().getClass()); kryo.register(Collections.emptyMap().getClass()); kryo.register(ArrayList.class); kryo.register(LinkedHashSet.class); kryo.register(LinkedHashMap.class); kryo.register(HashSet.class); kryo.register(HashMap.class); kryo.register(TreeSet.class); kryo.register(TreeMap.class); // This class kryo.register(AbstractUserAgentAnalyzerDirect.class); // The config classes kryo.register(MatcherConfig.class); kryo.register(MatcherConfig.ConfigLine.class); kryo.register(MatcherConfig.ConfigLine.Type.class); kryo.register(AnalyzerConfig.class); kryo.register(MatcherConfig.class); kryo.register(TestCase.class); // All classes we have under this. kryo.register(Analyzer.class); kryo.register(UserAgent.ImmutableUserAgent.class); kryo.register(AgentField.ImmutableAgentField.class); kryo.register(UserAgent.MutableUserAgent.class); kryo.register(AgentField.MutableAgentField.class); kryo.register(Matcher.class); kryo.register(Matcher.MatcherDemotedExtractAction.class); kryo.register(MatcherAction.class); kryo.register(MatcherList.class); kryo.register(MatchesList.class); kryo.register(MatcherExtractAction.class); kryo.register(MatcherVariableAction.class); kryo.register(MatcherRequireAction.class); kryo.register(MatcherFailIfFoundAction.class); kryo.register(WordRangeVisitor.Range.class); kryo.register(CalculateAgentEmail.class); kryo.register(CalculateAgentName.class); kryo.register(CalculateAgentClass.class); kryo.register(CalculateDeviceBrand.class); kryo.register(CalculateDeviceName.class); kryo.register(CalculateNetworkType.class); kryo.register(ConcatNONDuplicatedCalculator.class); kryo.register(FieldCalculator.class); kryo.register(MajorVersionCalculator.class); kryo.register(UserAgentTreeFlattener.class); kryo.register(TreeExpressionEvaluator.class); kryo.register(WalkList.class); kryo.register(StepContains.class); kryo.register(StepNotContains.class); kryo.register(StepDefaultIfNull.class); kryo.register(StepEndsWith.class); kryo.register(StepEquals.class); kryo.register(StepIsInSet.class); kryo.register(StepIsNotInSet.class); kryo.register(StepIsNull.class); kryo.register(StepNotEquals.class); kryo.register(StepStartsWith.class); kryo.register(StepIsInLookupContains.class); kryo.register(StepIsNotInLookupContains.class); kryo.register(StepIsInLookupPrefix.class); kryo.register(StepIsNotInLookupPrefix.class); kryo.register(StepLookup.class); kryo.register(StepLookupContains.class); kryo.register(StepLookupPrefix.class); kryo.register(StepBackToFull.class); kryo.register(StepCleanVersion.class); kryo.register(StepConcat.class); kryo.register(StepConcatPostfix.class); kryo.register(StepConcatPrefix.class); kryo.register(StepNormalizeBrand.class); kryo.register(StepExtractBrandFromUrl.class); kryo.register(StepReplaceString.class); kryo.register(StepSegmentRange.class); kryo.register(StepWordRange.class); kryo.register(StepDown.class); kryo.register(StepNext.class); kryo.register(StepNextN.class); kryo.register(StepPrev.class); kryo.register(StepPrevN.class); kryo.register(StepUp.class); StringPrefixMap.configureKryo(kryo); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jorphan.gui; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import javax.swing.table.DefaultTableModel; import org.apache.jorphan.reflect.Functor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The ObjectTableModel is a TableModel whose rows are objects; * columns are defined as Functors on the object. */ public class ObjectTableModel extends DefaultTableModel { private static final Logger log = LoggerFactory.getLogger(ObjectTableModel.class); private static final long serialVersionUID = 240L; private transient ArrayList<Object> objects = new ArrayList<>(); private transient List<String> headers = new ArrayList<>(); private transient ArrayList<Class<?>> classes = new ArrayList<>(); private transient ArrayList<Functor> readFunctors = new ArrayList<>(); private transient ArrayList<Functor> writeFunctors = new ArrayList<>(); private transient Class<?> objectClass = null; // if provided private transient boolean cellEditable = true; /** * The ObjectTableModel is a TableModel whose rows are objects; * columns are defined as Functors on the object. * * @param headers - Column names * @param _objClass - Object class that will be used * @param readFunctors - used to get the values * @param writeFunctors - used to set the values * @param editorClasses - class for each column */ public ObjectTableModel(String[] headers, Class<?> _objClass, Functor[] readFunctors, Functor[] writeFunctors, Class<?>[] editorClasses) { this(headers, readFunctors, writeFunctors, editorClasses); this.objectClass=_objClass; } /** * The ObjectTableModel is a TableModel whose rows are objects; * columns are defined as Functors on the object. * * @param headers - Column names * @param _objClass - Object class that will be used * @param readFunctors - used to get the values * @param writeFunctors - used to set the values * @param editorClasses - class for each column * @param cellEditable - if cell must editable (false to allow double click on cell) */ public ObjectTableModel(String[] headers, Class<?> _objClass, Functor[] readFunctors, Functor[] writeFunctors, Class<?>[] editorClasses, boolean cellEditable) { this(headers, readFunctors, writeFunctors, editorClasses); this.objectClass=_objClass; this.cellEditable = cellEditable; } /** * The ObjectTableModel is a TableModel whose rows are objects; * columns are defined as Functors on the object. * * @param headers - Column names * @param readFunctors - used to get the values * @param writeFunctors - used to set the values * @param editorClasses - class for each column */ public ObjectTableModel(String[] headers, Functor[] readFunctors, Functor[] writeFunctors, Class<?>[] editorClasses) { this.headers.addAll(Arrays.asList(headers)); this.classes.addAll(Arrays.asList(editorClasses)); this.readFunctors = new ArrayList<>(Arrays.asList(readFunctors)); this.writeFunctors = new ArrayList<>(Arrays.asList(writeFunctors)); int numHeaders = headers.length; int numClasses = classes.size(); if (numClasses != numHeaders){ log.warn("Header count={} but classes count={}", numHeaders, numClasses); } // Functor count = 0 is handled specially int numWrite = writeFunctors.length; if (numWrite > 0 && numWrite != numHeaders){ log.warn("Header count={} but writeFunctor count={}", numHeaders, numWrite); } int numRead = readFunctors.length; if (numRead > 0 && numRead != numHeaders){ log.warn("Header count={} but readFunctor count={}", numHeaders, numRead); } } private Object readResolve() { objects = new ArrayList<>(); headers = new ArrayList<>(); classes = new ArrayList<>(); readFunctors = new ArrayList<>(); writeFunctors = new ArrayList<>(); return this; } public Iterator<?> iterator() { return objects.iterator(); } public void clearData() { objects.clear(); super.fireTableDataChanged(); } public void addRow(Object value) { log.debug("Adding row value: {}", value); if (objectClass != null) { final Class<?> valueClass = value.getClass(); if (!objectClass.isAssignableFrom(valueClass)){ throw new IllegalArgumentException("Trying to add class: "+valueClass.getName() +"; expecting class: "+objectClass.getName()); } } objects.add(value); super.fireTableRowsInserted(objects.size() - 1, objects.size() - 1); } public void insertRow(Object value, int index) { objects.add(index, value); super.fireTableRowsInserted(index, index); } /** {@inheritDoc} */ @Override public int getColumnCount() { return headers.size(); } /** {@inheritDoc} */ @Override public String getColumnName(int col) { return headers.get(col); } /** {@inheritDoc} */ @Override public int getRowCount() { if (objects == null) { return 0; } return objects.size(); } /** {@inheritDoc} */ @Override public Object getValueAt(int row, int col) { log.debug("Getting row value"); Object value = objects.get(row); if(headers.size() == 1 && col >= readFunctors.size()) { return value; } Functor getMethod = readFunctors.get(col); if (getMethod != null && value != null) { return getMethod.invoke(value); } return null; } /** {@inheritDoc} */ @Override public boolean isCellEditable(int arg0, int arg1) { return cellEditable; } /** {@inheritDoc} */ @Override public void moveRow(int start, int end, int to) { List<Object> subList = objects.subList(start, end); List<Object> backup = new ArrayList<>(subList); subList.clear(); objects.addAll(to, backup); super.fireTableDataChanged(); } /** {@inheritDoc} */ @Override public void removeRow(int row) { objects.remove(row); super.fireTableRowsDeleted(row, row); } /** {@inheritDoc} */ @Override public void setValueAt(Object cellValue, int row, int col) { if (row < objects.size()) { Object value = objects.get(row); if (col < writeFunctors.size()) { Functor setMethod = writeFunctors.get(col); if (setMethod != null) { setMethod.invoke(value, new Object[] { cellValue }); super.fireTableDataChanged(); } } else if(headers.size() == 1) { objects.set(row,cellValue); } } } /** {@inheritDoc} */ @Override public Class<?> getColumnClass(int arg0) { return classes.get(arg0); } /** * Check all registered functors. * <p> * <b>** only for use in unit test code **</b> * </p> * * @param _value - an instance of the table model row data item * (if null, use the class passed to the constructor). * * @param caller - class of caller. * * @return false if at least one Functor cannot be found. */ @SuppressWarnings("deprecation") public boolean checkFunctors(Object _value, Class<?> caller){ Object value; if (_value == null && objectClass != null) { try { value = objectClass.getDeclaredConstructor().newInstance(); } catch (ReflectiveOperationException e) { log.error("Cannot create instance of class {}", objectClass.getName(),e); return false; } } else { value = _value; } boolean status = true; for(int i=0;i<getColumnCount();i++){ Functor setMethod = writeFunctors.get(i); if (setMethod != null && !setMethod.checkMethod(value,getColumnClass(i))) { status=false; log.warn("{} is attempting to use nonexistent {}", caller.getName(), setMethod); } Functor getMethod = readFunctors.get(i); if (getMethod != null && !getMethod.checkMethod(value)) { status=false; log.warn("{} is attempting to use nonexistent {}", caller.getName(), getMethod); } } return status; } /** * @return Object (List of Object) */ public Object getObjectList() { // used by TableEditor return objects; } /** * @return List of Object */ public List<Object> getObjectListAsList() { return objects; } public void setRows(Iterable<?> rows) { // used by TableEditor clearData(); for(Object val : rows) { addRow(val); } } }
package org.flowninja.persistence.mongodb.services; import static org.fest.assertions.api.Assertions.assertThat; import java.util.HashMap; import org.bson.types.Binary; import org.fest.assertions.data.MapEntry; import org.flowninja.persistence.generic.types.IOAuth2Authentication; import org.flowninja.persistence.generic.types.IOAuth2Request; import org.flowninja.persistence.generic.types.impl.OAuth2AuthenticationImpl; import org.flowninja.persistence.generic.types.impl.OAuth2RequestImpl; import org.flowninja.persistence.mongodb.data.MongoOAuth2Authentication; import org.flowninja.persistence.mongodb.repositories.IMongoOAuth2AuthenticationRepository; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import com.google.common.collect.Lists; import com.google.common.collect.Sets; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes=MongoTestConfig.class) public class MongoOAuth2AuthenticationPersistenceManagerTest { @Autowired private MongoOAuth2AuthenticationPersistenceManager persistence; @Autowired private IMongoOAuth2AuthenticationRepository repository; @Test public void persistBasicAuthentication() { IOAuth2Authentication auth = basicAuthentication(); MongoOAuth2Authentication ma = persistence.persistWapiAuthentication(auth); assertThat(ma).isNotNull(); assertThat(ma.getId()).isNotNull(); MongoOAuth2Authentication pma = repository.findOne(ma.getId()); assertThat(pma).isNotNull(); assertThat(pma.getClientId()).isEqualTo("foo@bar.com"); assertThat(pma.getExtensions()).isNull(); assertThat(pma.getGrantedAuthorities()).isNull(); assertThat(pma.getRedirectUri()).isNull(); assertThat(pma.getRequestParameters()).isNull(); assertThat(pma.getResourceIds()).isNull(); assertThat(pma.getResponseTypes()).isNull(); assertThat(pma.getScope()).isNull(); assertThat(pma.getUserAuthentication()).isNull(); } @Test public void persistuthenticationSimpleFields() { IOAuth2Authentication auth = basicAuthentication(); auth.getStoredRequest().setRedirectUri("http://foo.bar/boo"); MongoOAuth2Authentication ma = persistence.persistWapiAuthentication(auth); assertThat(ma).isNotNull(); assertThat(ma.getId()).isNotNull(); MongoOAuth2Authentication pma = repository.findOne(ma.getId()); assertThat(pma).isNotNull(); assertThat(pma.getClientId()).isEqualTo("foo@bar.com"); assertThat(pma.getExtensions()).isNull(); assertThat(pma.getGrantedAuthorities()).isNull(); assertThat(pma.getRedirectUri()).isEqualTo("http://foo.bar/boo"); assertThat(pma.getRequestParameters()).isNull(); assertThat(pma.getResourceIds()).isNull(); assertThat(pma.getResponseTypes()).isNull(); assertThat(pma.getScope()).isNull(); assertThat(pma.getUserAuthentication()).isNull(); } @Test public void persistAuthenticationWithUser() { IOAuth2Authentication auth = basicAuthentication(); byte[] data = createData(0); auth.setAuthentication(data); MongoOAuth2Authentication ma = persistence.persistWapiAuthentication(auth); assertThat(ma).isNotNull(); assertThat(ma.getId()).isNotNull(); MongoOAuth2Authentication pma = repository.findOne(ma.getId()); assertThat(pma).isNotNull(); assertThat(pma.getClientId()).isEqualTo("foo@bar.com"); assertThat(pma.getExtensions()).isNull(); assertThat(pma.getGrantedAuthorities()).isNull(); assertThat(pma.getRedirectUri()).isNull(); assertThat(pma.getRequestParameters()).isNull(); assertThat(pma.getResourceIds()).isNull(); assertThat(pma.getResponseTypes()).isNull(); assertThat(pma.getScope()).isNull(); assertThat(pma.getUserAuthentication()).isEqualTo(new Binary(data)); } @Test public void persistAuthenticationWithExtensions() { IOAuth2Authentication auth = basicAuthentication(); byte[] dataOne = createData(1); byte[] dataTwo = createData(2); auth.getStoredRequest().setExtensions(new HashMap<String, byte[]>()); auth.getStoredRequest().getExtensions().put("one", dataOne); auth.getStoredRequest().getExtensions().put("two", dataTwo); MongoOAuth2Authentication ma = persistence.persistWapiAuthentication(auth); assertThat(ma).isNotNull(); assertThat(ma.getId()).isNotNull(); MongoOAuth2Authentication pma = repository.findOne(ma.getId()); assertThat(pma).isNotNull(); assertThat(pma.getClientId()).isEqualTo("foo@bar.com"); assertThat(pma.getExtensions()).contains(MapEntry.entry("one", new Binary(dataOne))).contains(MapEntry.entry("two", new Binary(dataTwo))).hasSize(2); assertThat(pma.getGrantedAuthorities()).isNull(); assertThat(pma.getRedirectUri()).isNull(); assertThat(pma.getRequestParameters()).isNull(); assertThat(pma.getResourceIds()).isNull(); assertThat(pma.getResponseTypes()).isNull(); assertThat(pma.getScope()).isNull(); assertThat(pma.getUserAuthentication()).isNull(); } @Test public void persistBasicAuthenticationWithGrantedAuthorities() { IOAuth2Authentication auth = basicAuthentication(); auth.getStoredRequest().setGrantedAuthorities(Sets.newHashSet("one", "two")); MongoOAuth2Authentication ma = persistence.persistWapiAuthentication(auth); assertThat(ma).isNotNull(); assertThat(ma.getId()).isNotNull(); MongoOAuth2Authentication pma = repository.findOne(ma.getId()); assertThat(pma).isNotNull(); assertThat(pma.getClientId()).isEqualTo("foo@bar.com"); assertThat(pma.getExtensions()).isNull(); assertThat(pma.getGrantedAuthorities()).containsOnly("one", "two"); assertThat(pma.getRedirectUri()).isNull(); assertThat(pma.getRequestParameters()).isNull(); assertThat(pma.getResourceIds()).isNull(); assertThat(pma.getResponseTypes()).isNull(); assertThat(pma.getScope()).isNull(); assertThat(pma.getUserAuthentication()).isNull(); } @Test public void persistBasicAuthenticationWithRequestParameters() { IOAuth2Authentication auth = basicAuthentication(); auth.getStoredRequest().setRequestParameters(new HashMap<>()); auth.getStoredRequest().getRequestParameters().put("one", "alpha"); auth.getStoredRequest().getRequestParameters().put("two", "beta"); MongoOAuth2Authentication ma = persistence.persistWapiAuthentication(auth); assertThat(ma).isNotNull(); assertThat(ma.getId()).isNotNull(); MongoOAuth2Authentication pma = repository.findOne(ma.getId()); assertThat(pma).isNotNull(); assertThat(pma.getClientId()).isEqualTo("foo@bar.com"); assertThat(pma.getExtensions()).isNull(); assertThat(pma.getGrantedAuthorities()).isNull(); assertThat(pma.getRedirectUri()).isNull(); assertThat(pma.getRequestParameters()).contains(MapEntry.entry("one", "alpha")).contains(MapEntry.entry("two", "beta")).hasSize(2); assertThat(pma.getResourceIds()).isNull(); assertThat(pma.getResponseTypes()).isNull(); assertThat(pma.getScope()).isNull(); assertThat(pma.getUserAuthentication()).isNull(); } @Test public void persistBasicAuthenticationWithResourceIds() { IOAuth2Authentication auth = basicAuthentication(); auth.getStoredRequest().setResourceIds(Sets.newHashSet("one", "two")); MongoOAuth2Authentication ma = persistence.persistWapiAuthentication(auth); assertThat(ma).isNotNull(); assertThat(ma.getId()).isNotNull(); MongoOAuth2Authentication pma = repository.findOne(ma.getId()); assertThat(pma).isNotNull(); assertThat(pma.getClientId()).isEqualTo("foo@bar.com"); assertThat(pma.getExtensions()).isNull(); assertThat(pma.getGrantedAuthorities()).isNull(); assertThat(pma.getRedirectUri()).isNull(); assertThat(pma.getRequestParameters()).isNull(); assertThat(pma.getResourceIds()).containsOnly("one", "two"); assertThat(pma.getResponseTypes()).isNull(); assertThat(pma.getScope()).isNull(); assertThat(pma.getUserAuthentication()).isNull(); } @Test public void persistBasicAuthenticationWithResponseTypes() { IOAuth2Authentication auth = basicAuthentication(); auth.getStoredRequest().setResponseTypes(Sets.newHashSet("one", "two")); MongoOAuth2Authentication ma = persistence.persistWapiAuthentication(auth); assertThat(ma).isNotNull(); assertThat(ma.getId()).isNotNull(); MongoOAuth2Authentication pma = repository.findOne(ma.getId()); assertThat(pma).isNotNull(); assertThat(pma.getClientId()).isEqualTo("foo@bar.com"); assertThat(pma.getExtensions()).isNull(); assertThat(pma.getGrantedAuthorities()).isNull(); assertThat(pma.getRedirectUri()).isNull(); assertThat(pma.getRequestParameters()).isNull(); assertThat(pma.getResourceIds()).isNull(); assertThat(pma.getResponseTypes()).containsOnly("one", "two"); assertThat(pma.getScope()).isNull(); assertThat(pma.getUserAuthentication()).isNull(); } @Test public void persistBasicAuthenticationWithScope() { IOAuth2Authentication auth = basicAuthentication(); auth.getStoredRequest().setScope(Sets.newHashSet("one", "two")); MongoOAuth2Authentication ma = persistence.persistWapiAuthentication(auth); assertThat(ma).isNotNull(); assertThat(ma.getId()).isNotNull(); MongoOAuth2Authentication pma = repository.findOne(ma.getId()); assertThat(pma).isNotNull(); assertThat(pma.getClientId()).isEqualTo("foo@bar.com"); assertThat(pma.getExtensions()).isNull(); assertThat(pma.getGrantedAuthorities()).isNull(); assertThat(pma.getRedirectUri()).isNull(); assertThat(pma.getRequestParameters()).isNull(); assertThat(pma.getResourceIds()).isNull(); assertThat(pma.getResponseTypes()).isNull(); assertThat(pma.getScope()).containsOnly("one", "two"); assertThat(pma.getUserAuthentication()).isNull(); } @Test public void restoreBasicAuthentication() { MongoOAuth2Authentication auth = basicMongoAuthentication(); IOAuth2Authentication wa = persistence.restoreAuthentication(auth); assertThat(wa).isNotNull(); assertThat(wa.getAuthentication()).isNull(); assertThat(wa.getStoredRequest()).isNotNull(); IOAuth2Request req = wa.getStoredRequest(); assertThat(req.getClientId()).isEqualTo("foo@bar.com"); assertThat(req.isApproved()).isTrue(); assertThat(req.getExtensions()).isNull(); assertThat(req.getGrantedAuthorities()).isNull(); assertThat(req.getRedirectUri()).isNull(); assertThat(req.getRequestParameters()).isNull(); assertThat(req.getResourceIds()).isNull(); assertThat(req.getResponseTypes()).isNull(); assertThat(req.getScope()).isNull(); } @Test public void restoreAuthenticationWithExtensions() { MongoOAuth2Authentication auth = basicMongoAuthentication(); byte[] dataOne = createData(1); byte[] dataTwo = createData(2); auth.setExtensions(new HashMap<>()); auth.getExtensions().put("one", new Binary(dataOne)); auth.getExtensions().put("two", new Binary(dataTwo)); IOAuth2Authentication wa = persistence.restoreAuthentication(auth); assertThat(wa).isNotNull(); assertThat(wa.getAuthentication()).isNull(); assertThat(wa.getStoredRequest()).isNotNull(); IOAuth2Request req = wa.getStoredRequest(); assertThat(req.getClientId()).isEqualTo("foo@bar.com"); assertThat(req.isApproved()).isTrue(); assertThat(req.getExtensions()).contains(MapEntry.entry("one", dataOne), MapEntry.entry("two", dataTwo)).hasSize(2); assertThat(req.getGrantedAuthorities()).isNull(); assertThat(req.getRedirectUri()).isNull(); assertThat(req.getRequestParameters()).isNull(); assertThat(req.getResourceIds()).isNull(); assertThat(req.getResponseTypes()).isNull(); assertThat(req.getScope()).isNull(); } @Test public void restoreAuthenticationWithGrantedAuthorities() { MongoOAuth2Authentication auth = basicMongoAuthentication(); auth.setGrantedAuthorities(Lists.newArrayList("one", "two")); IOAuth2Authentication wa = persistence.restoreAuthentication(auth); assertThat(wa).isNotNull(); assertThat(wa.getAuthentication()).isNull(); assertThat(wa.getStoredRequest()).isNotNull(); IOAuth2Request req = wa.getStoredRequest(); assertThat(req.getClientId()).isEqualTo("foo@bar.com"); assertThat(req.isApproved()).isTrue(); assertThat(req.getExtensions()).isNull(); assertThat(req.getGrantedAuthorities()).containsOnly("one", "two"); assertThat(req.getRedirectUri()).isNull(); assertThat(req.getRequestParameters()).isNull(); assertThat(req.getResourceIds()).isNull(); assertThat(req.getResponseTypes()).isNull(); assertThat(req.getScope()).isNull(); } @Test public void restoreAuthenticationWithRedirectUri() { MongoOAuth2Authentication auth = basicMongoAuthentication(); auth.setRedirectUri("http://localhost/"); IOAuth2Authentication wa = persistence.restoreAuthentication(auth); assertThat(wa).isNotNull(); assertThat(wa.getAuthentication()).isNull(); assertThat(wa.getStoredRequest()).isNotNull(); IOAuth2Request req = wa.getStoredRequest(); assertThat(req.getClientId()).isEqualTo("foo@bar.com"); assertThat(req.isApproved()).isTrue(); assertThat(req.getExtensions()).isNull(); assertThat(req.getGrantedAuthorities()).isNull(); assertThat(req.getRedirectUri()).isEqualTo("http://localhost/"); assertThat(req.getRequestParameters()).isNull(); assertThat(req.getResourceIds()).isNull(); assertThat(req.getResponseTypes()).isNull(); assertThat(req.getScope()).isNull(); } @Test public void restoreAuthenticationWithRequestParameters() { MongoOAuth2Authentication auth = basicMongoAuthentication(); auth.setRequestParameters(new HashMap<>()); auth.getRequestParameters().put("one", "alpha"); auth.getRequestParameters().put("two", "beta"); IOAuth2Authentication wa = persistence.restoreAuthentication(auth); assertThat(wa).isNotNull(); assertThat(wa.getAuthentication()).isNull(); assertThat(wa.getStoredRequest()).isNotNull(); IOAuth2Request req = wa.getStoredRequest(); assertThat(req.getClientId()).isEqualTo("foo@bar.com"); assertThat(req.isApproved()).isTrue(); assertThat(req.getExtensions()).isNull(); assertThat(req.getGrantedAuthorities()).isNull(); assertThat(req.getRedirectUri()).isNull(); assertThat(req.getRequestParameters()).contains(MapEntry.entry("one", "alpha"), MapEntry.entry("two", "beta")).hasSize(2); assertThat(req.getResourceIds()).isNull(); assertThat(req.getResponseTypes()).isNull(); assertThat(req.getScope()).isNull(); } @Test public void restoreAuthenticationWithResourceIds() { MongoOAuth2Authentication auth = basicMongoAuthentication(); auth.setResourceIds(Lists.newArrayList("one", "two")); IOAuth2Authentication wa = persistence.restoreAuthentication(auth); assertThat(wa).isNotNull(); assertThat(wa.getAuthentication()).isNull(); assertThat(wa.getStoredRequest()).isNotNull(); IOAuth2Request req = wa.getStoredRequest(); assertThat(req.getClientId()).isEqualTo("foo@bar.com"); assertThat(req.isApproved()).isTrue(); assertThat(req.getExtensions()).isNull(); assertThat(req.getGrantedAuthorities()).isNull(); assertThat(req.getRedirectUri()).isNull(); assertThat(req.getRequestParameters()).isNull(); assertThat(req.getResourceIds()).containsOnly("one", "two"); assertThat(req.getResponseTypes()).isNull(); assertThat(req.getScope()).isNull(); } @Test public void restoreAuthenticationWithResponseTypes() { MongoOAuth2Authentication auth = basicMongoAuthentication(); auth.setResponseTypes(Lists.newArrayList("one", "two")); IOAuth2Authentication wa = persistence.restoreAuthentication(auth); assertThat(wa).isNotNull(); assertThat(wa.getAuthentication()).isNull(); assertThat(wa.getStoredRequest()).isNotNull(); IOAuth2Request req = wa.getStoredRequest(); assertThat(req.getClientId()).isEqualTo("foo@bar.com"); assertThat(req.isApproved()).isTrue(); assertThat(req.getExtensions()).isNull(); assertThat(req.getGrantedAuthorities()).isNull(); assertThat(req.getRedirectUri()).isNull(); assertThat(req.getRequestParameters()).isNull(); assertThat(req.getResourceIds()).isNull(); assertThat(req.getResponseTypes()).containsOnly("one", "two"); assertThat(req.getScope()).isNull(); } @Test public void restoreAuthenticationWithScopes() { MongoOAuth2Authentication auth = basicMongoAuthentication(); auth.setScope(Lists.newArrayList("one", "two")); IOAuth2Authentication wa = persistence.restoreAuthentication(auth); assertThat(wa).isNotNull(); assertThat(wa.getAuthentication()).isNull(); assertThat(wa.getStoredRequest()).isNotNull(); IOAuth2Request req = wa.getStoredRequest(); assertThat(req.getClientId()).isEqualTo("foo@bar.com"); assertThat(req.isApproved()).isTrue(); assertThat(req.getExtensions()).isNull(); assertThat(req.getGrantedAuthorities()).isNull(); assertThat(req.getRedirectUri()).isNull(); assertThat(req.getRequestParameters()).isNull(); assertThat(req.getResourceIds()).isNull(); assertThat(req.getResponseTypes()).isNull(); assertThat(req.getScope()).containsOnly("one", "two"); } @Test public void restoreAuthenticationWithUser() { MongoOAuth2Authentication auth = basicMongoAuthentication(); byte[] data = createData(0); auth.setUserAuthentication(new Binary(data)); IOAuth2Authentication wa = persistence.restoreAuthentication(auth); assertThat(wa).isNotNull(); assertThat(wa.getAuthentication()).isEqualTo(data); assertThat(wa.getStoredRequest()).isNotNull(); IOAuth2Request req = wa.getStoredRequest(); assertThat(req.getClientId()).isEqualTo("foo@bar.com"); assertThat(req.isApproved()).isTrue(); assertThat(req.getExtensions()).isNull(); assertThat(req.getGrantedAuthorities()).isNull(); assertThat(req.getRedirectUri()).isNull(); assertThat(req.getRequestParameters()).isNull(); assertThat(req.getResourceIds()).isNull(); assertThat(req.getResponseTypes()).isNull(); assertThat(req.getScope()).isNull();; } private IOAuth2Authentication basicAuthentication() { OAuth2AuthenticationImpl auth = new OAuth2AuthenticationImpl(); OAuth2RequestImpl request = new OAuth2RequestImpl(); request.setApproved(true); request.setClientId("foo@bar.com"); auth.setStoredRequest(request); return auth; } public MongoOAuth2Authentication basicMongoAuthentication() { MongoOAuth2Authentication auth = new MongoOAuth2Authentication(); auth.setApproved(true); auth.setClientId("foo@bar.com"); return auth; } private byte[] createData(int base) { byte[] data = new byte[32768]; for(int i=0; i<data.length; i++) data[i] = (byte)((base + i) % 256); return data; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred.uploader; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.NotLinkException; import java.nio.file.Paths; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.GZIPOutputStream; import static org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY; /** * Upload a MapReduce framework tarball to HDFS. * Usage: * sudo -u mapred mapred frameworkuploader -fs hdfs://`hostname`:8020 -target * /tmp/upload.tar.gz#mr-framework */ public class FrameworkUploader implements Runnable { private static final Pattern VAR_SUBBER = Pattern.compile(Shell.getEnvironmentVariableRegex()); private static final Logger LOG = LoggerFactory.getLogger(FrameworkUploader.class); private Configuration conf = new Configuration(); // Minimal required permissions for the uploaded framework private static final FsPermission FRAMEWORK_PERMISSION = new FsPermission(0644); @VisibleForTesting String input = null; @VisibleForTesting String whitelist = null; @VisibleForTesting String blacklist = null; @VisibleForTesting String target = null; @VisibleForTesting Path targetPath = null; @VisibleForTesting short initialReplication = 3; @VisibleForTesting short finalReplication = 10; @VisibleForTesting short acceptableReplication = 9; @VisibleForTesting int timeout = 10; private boolean ignoreSymlink = false; @VisibleForTesting Set<String> filteredInputFiles = new HashSet<>(); @VisibleForTesting List<Pattern> whitelistedFiles = new LinkedList<>(); @VisibleForTesting List<Pattern> blacklistedFiles = new LinkedList<>(); private OutputStream targetStream = null; private FSDataOutputStream fsDataStream = null; private String alias = null; @VisibleForTesting void setConf(Configuration configuration) { conf = configuration; } private void printHelp(Options options) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("mapred frameworkuploader", options); } public void run() { try { collectPackages(); buildPackage(); LOG.info("Uploaded " + target); System.out.println("Suggested mapreduce.application.framework.path " + target); LOG.info( "Suggested mapreduce.application.classpath $PWD/" + alias + "/*"); System.out.println("Suggested classpath $PWD/" + alias + "/*"); } catch (UploaderException|IOException|InterruptedException e) { LOG.error("Error in execution " + e.getMessage()); e.printStackTrace(); throw new RuntimeException(e); } } @VisibleForTesting void collectPackages() throws UploaderException { parseLists(); String[] list = StringUtils.split(input, File.pathSeparatorChar); for (String item : list) { LOG.info("Original source " + item); String expanded = expandEnvironmentVariables(item, System.getenv()); LOG.info("Expanded source " + expanded); if (expanded.endsWith("*")) { File path = new File(expanded.substring(0, expanded.length() - 1)); if (path.isDirectory()) { File[] files = path.listFiles(); if (files != null) { for (File jar : files) { if (!jar.isDirectory()) { addJar(jar); } else { LOG.info("Ignored " + jar + " because it is a directory"); } } } else { LOG.warn("Could not list directory " + path); } } else { LOG.warn("Ignored " + expanded + ". It is not a directory"); } } else if (expanded.endsWith(".jar")) { File jarFile = new File(expanded); addJar(jarFile); } else if (!expanded.isEmpty()) { LOG.warn("Ignored " + expanded + " only jars are supported"); } } } @VisibleForTesting void beginUpload() throws IOException, UploaderException { if (targetStream == null) { validateTargetPath(); int lastIndex = target.indexOf('#'); targetPath = new Path( target.substring( 0, lastIndex == -1 ? target.length() : lastIndex)); alias = lastIndex != -1 ? target.substring(lastIndex + 1) : targetPath.getName(); LOG.info("Target " + targetPath); FileSystem fileSystem = targetPath.getFileSystem(conf); targetStream = null; if (fileSystem instanceof DistributedFileSystem) { LOG.info("Set replication to " + initialReplication + " for path: " + targetPath); LOG.info("Disabling Erasure Coding for path: " + targetPath); DistributedFileSystem dfs = (DistributedFileSystem)fileSystem; DistributedFileSystem.HdfsDataOutputStreamBuilder builder = dfs.createFile(targetPath) .overwrite(true) .ecPolicyName( SystemErasureCodingPolicies.getReplicationPolicy().getName()); if (initialReplication > 0) { builder.replication(initialReplication); } targetStream = builder.build(); } else { LOG.warn("Cannot set replication to " + initialReplication + " for path: " + targetPath + " on a non-distributed fileystem " + fileSystem.getClass().getName()); } if (targetStream == null) { targetStream = fileSystem.create(targetPath, true); } if (!FRAMEWORK_PERMISSION.equals( FRAMEWORK_PERMISSION.applyUMask(FsPermission.getUMask(conf)))) { LOG.info("Modifying permissions to " + FRAMEWORK_PERMISSION); fileSystem.setPermission(targetPath, FRAMEWORK_PERMISSION); } fsDataStream = (FSDataOutputStream) targetStream; if (targetPath.getName().endsWith("gz") || targetPath.getName().endsWith("tgz")) { LOG.info("Creating GZip"); targetStream = new GZIPOutputStream(targetStream); } Path current = targetPath.getParent(); // Walk the path backwards to verify that the uploaded // framework is accessible for all users while (current != null) { try { FileStatus fstat = fileSystem.getFileStatus(current); FsPermission perm = fstat.getPermission(); // Note: READ is not necessary to enter the directory. // We need to check only the EXECUTE flag boolean userCanEnter = perm.getUserAction() .implies(FsAction.EXECUTE); boolean groupCanEnter = perm.getGroupAction() .implies(FsAction.EXECUTE); boolean othersCanEnter = perm.getOtherAction() .implies(FsAction.EXECUTE); if (!userCanEnter || !groupCanEnter || !othersCanEnter) { LOG.warn("Path " + current + " is not accessible" + " for all users. Current permissions are: " + perm); LOG.warn("Please set EXECUTE permissions on this directory"); } current = current.getParent(); } catch (AccessControlException e) { LOG.warn("Path " + current + " is not accessible," + " cannot retrieve permissions"); LOG.warn("Please set EXECUTE permissions on this directory"); LOG.debug("Stack trace", e); break; } } } } private long getSmallestReplicatedBlockCount() throws IOException { FileSystem fileSystem = targetPath.getFileSystem(conf); FileStatus status = fileSystem.getFileStatus(targetPath); long length = status.getLen(); HashMap<Long, Integer> blockCount = new HashMap<>(); // Start with 0s for each offset for (long offset = 0; offset < length; offset +=status.getBlockSize()) { blockCount.put(offset, 0); } // Count blocks BlockLocation[] locations = fileSystem.getFileBlockLocations( targetPath, 0, length); for(BlockLocation location: locations) { final int replicas = location.getHosts().length; blockCount.compute( location.getOffset(), (key, value) -> value == null ? 0 : value + replicas); } // Print out the results for (long offset = 0; offset < length; offset +=status.getBlockSize()) { LOG.info(String.format( "Replication counts offset:%d blocks:%d", offset, blockCount.get(offset))); } return Collections.min(blockCount.values()); } private void endUpload() throws IOException, InterruptedException { FileSystem fileSystem = targetPath.getFileSystem(conf); if (fileSystem instanceof DistributedFileSystem) { fileSystem.setReplication(targetPath, finalReplication); LOG.info("Set replication to " + finalReplication + " for path: " + targetPath); if (timeout == 0) { LOG.info("Timeout is set to 0. Skipping replication check."); } else { long startTime = System.currentTimeMillis(); long endTime = startTime; long currentReplication = 0; while(endTime - startTime < timeout * 1000 && currentReplication < acceptableReplication) { Thread.sleep(1000); endTime = System.currentTimeMillis(); currentReplication = getSmallestReplicatedBlockCount(); } if (endTime - startTime >= timeout * 1000) { LOG.error(String.format( "Timed out after %d seconds while waiting for acceptable" + " replication of %d (current replication is %d)", timeout, acceptableReplication, currentReplication)); } } } else { LOG.info("Cannot set replication to " + finalReplication + " for path: " + targetPath + " on a non-distributed fileystem " + fileSystem.getClass().getName()); } } @VisibleForTesting void buildPackage() throws IOException, UploaderException, InterruptedException { beginUpload(); LOG.info("Compressing tarball"); try (TarArchiveOutputStream out = new TarArchiveOutputStream( targetStream)) { for (String fullPath : filteredInputFiles) { LOG.info("Adding " + fullPath); File file = new File(fullPath); try (FileInputStream inputStream = new FileInputStream(file)) { ArchiveEntry entry = out.createArchiveEntry(file, file.getName()); out.putArchiveEntry(entry); IOUtils.copyBytes(inputStream, out, 1024 * 1024); out.closeArchiveEntry(); } } // Necessary to see proper replication counts in endUpload() fsDataStream.hflush(); endUpload(); } finally { if (targetStream != null) { targetStream.close(); } } } private void parseLists() throws UploaderException { Map<String, String> env = System.getenv(); for(Map.Entry<String, String> item : env.entrySet()) { LOG.info("Environment " + item.getKey() + " " + item.getValue()); } String[] whiteListItems = StringUtils.split(whitelist); for (String pattern : whiteListItems) { String expandedPattern = expandEnvironmentVariables(pattern, env); Pattern compiledPattern = Pattern.compile("^" + expandedPattern + "$"); LOG.info("Whitelisted " + compiledPattern.toString()); whitelistedFiles.add(compiledPattern); } String[] blacklistItems = StringUtils.split(blacklist); for (String pattern : blacklistItems) { String expandedPattern = expandEnvironmentVariables(pattern, env); Pattern compiledPattern = Pattern.compile("^" + expandedPattern + "$"); LOG.info("Blacklisted " + compiledPattern.toString()); blacklistedFiles.add(compiledPattern); } } @VisibleForTesting String expandEnvironmentVariables(String innerInput, Map<String, String> env) throws UploaderException { boolean found; do { found = false; Matcher matcher = VAR_SUBBER.matcher(innerInput); StringBuffer stringBuffer = new StringBuffer(); while (matcher.find()) { found = true; String var = matcher.group(1); // replace $env with the child's env constructed by tt's String replace = env.get(var); // the env key is not present anywhere .. simply set it if (replace == null) { throw new UploaderException("Environment variable does not exist " + var); } matcher.appendReplacement( stringBuffer, Matcher.quoteReplacement(replace)); } matcher.appendTail(stringBuffer); innerInput = stringBuffer.toString(); } while (found); return innerInput; } private void addJar(File jar) throws UploaderException{ boolean found = false; if (!jar.getName().endsWith(".jar")) { LOG.info("Ignored non-jar " + jar.getAbsolutePath()); } for (Pattern pattern : whitelistedFiles) { Matcher matcher = pattern.matcher(jar.getAbsolutePath()); if (matcher.matches()) { LOG.info("Whitelisted " + jar.getAbsolutePath()); found = true; break; } } boolean excluded = false; for (Pattern pattern : blacklistedFiles) { Matcher matcher = pattern.matcher(jar.getAbsolutePath()); if (matcher.matches()) { LOG.info("Blacklisted " + jar.getAbsolutePath()); excluded = true; break; } } if (ignoreSymlink && !excluded) { excluded = checkSymlink(jar); } if (found && !excluded) { LOG.info("Whitelisted " + jar.getAbsolutePath()); if (!filteredInputFiles.add(jar.getAbsolutePath())) { throw new UploaderException("Duplicate jar" + jar.getAbsolutePath()); } } if (!found) { LOG.info("Ignored " + jar.getAbsolutePath() + " because it is missing " + "from the whitelist"); } else if (excluded) { LOG.info("Ignored " + jar.getAbsolutePath() + " because it is on " + "the the blacklist"); } } /** * Check if the file is a symlink to the same directory. * @param jar The file to check * @return true, to ignore the directory */ @VisibleForTesting boolean checkSymlink(File jar) { if (Files.isSymbolicLink(jar.toPath())) { try { java.nio.file.Path link = Files.readSymbolicLink(jar.toPath()); java.nio.file.Path jarPath = Paths.get(jar.getAbsolutePath()); String linkString = link.toString(); java.nio.file.Path jarParent = jarPath.getParent(); java.nio.file.Path linkPath = jarParent == null ? null : jarParent.resolve(linkString); java.nio.file.Path linkPathParent = linkPath == null ? null : linkPath.getParent(); java.nio.file.Path normalizedLinkPath = linkPathParent == null ? null : linkPathParent.normalize(); if (normalizedLinkPath != null && jarParent.normalize().equals( normalizedLinkPath)) { LOG.info(String.format("Ignoring same directory link %s to %s", jarPath.toString(), link.toString())); return true; } } catch (NotLinkException ex) { LOG.debug("Not a link", jar); } catch (IOException ex) { LOG.warn("Cannot read symbolic link on", jar); } } return false; } private void validateTargetPath() throws UploaderException { if (!target.startsWith("hdfs:/") && !target.startsWith("file:/")) { throw new UploaderException("Target path is not hdfs or local " + target); } } @VisibleForTesting boolean parseArguments(String[] args) throws IOException { Options opts = new Options(); opts.addOption(OptionBuilder.create("h")); opts.addOption(OptionBuilder.create("help")); opts.addOption(OptionBuilder .withDescription("Input class path. Defaults to the default classpath.") .hasArg().create("input")); opts.addOption(OptionBuilder .withDescription( "Regex specifying the full path of jars to include in the" + " framework tarball. Default is a hardcoded set of jars" + " considered necessary to include") .hasArg().create("whitelist")); opts.addOption(OptionBuilder .withDescription( "Regex specifying the full path of jars to exclude in the" + " framework tarball. Default is a hardcoded set of jars" + " considered unnecessary to include") .hasArg().create("blacklist")); opts.addOption(OptionBuilder .withDescription( "Target file system to upload to." + " Example: hdfs://foo.com:8020") .hasArg().create("fs")); opts.addOption(OptionBuilder .withDescription( "Target file to upload to with a reference name." + " Example: /usr/mr-framework.tar.gz#mr-framework") .hasArg().create("target")); opts.addOption(OptionBuilder .withDescription( "Desired initial replication count. Default 3.") .hasArg().create("initialReplication")); opts.addOption(OptionBuilder .withDescription( "Desired final replication count. Default 10.") .hasArg().create("finalReplication")); opts.addOption(OptionBuilder .withDescription( "Desired acceptable replication count. Default 9.") .hasArg().create("acceptableReplication")); opts.addOption(OptionBuilder .withDescription( "Desired timeout for the acceptable" + " replication in seconds. Default 10") .hasArg().create("timeout")); opts.addOption(OptionBuilder .withDescription("Ignore symlinks into the same directory") .create("nosymlink")); GenericOptionsParser parser = new GenericOptionsParser(opts, args); if (parser.getCommandLine().hasOption("help") || parser.getCommandLine().hasOption("h")) { printHelp(opts); return false; } input = parser.getCommandLine().getOptionValue( "input", System.getProperty("java.class.path")); whitelist = parser.getCommandLine().getOptionValue( "whitelist", DefaultJars.DEFAULT_MR_JARS); blacklist = parser.getCommandLine().getOptionValue( "blacklist", DefaultJars.DEFAULT_EXCLUDED_MR_JARS); initialReplication = Short.parseShort(parser.getCommandLine().getOptionValue( "initialReplication", "3")); finalReplication = Short.parseShort(parser.getCommandLine().getOptionValue( "finalReplication", "10")); acceptableReplication = Short.parseShort( parser.getCommandLine().getOptionValue( "acceptableReplication", "9")); timeout = Integer.parseInt( parser.getCommandLine().getOptionValue("timeout", "10")); if (parser.getCommandLine().hasOption("nosymlink")) { ignoreSymlink = true; } String fs = parser.getCommandLine() .getOptionValue("fs", null); String path = parser.getCommandLine().getOptionValue("target", "/usr/lib/mr-framework.tar.gz#mr-framework"); boolean isFullPath = path.startsWith("hdfs://") || path.startsWith("file://"); if (fs == null) { fs = conf.get(FS_DEFAULT_NAME_KEY); if (fs == null && !isFullPath) { LOG.error("No filesystem specified in either fs or target."); printHelp(opts); return false; } else { LOG.info(String.format( "Target file system not specified. Using default %s", fs)); } } if (path.isEmpty()) { LOG.error("Target directory not specified"); printHelp(opts); return false; } StringBuilder absolutePath = new StringBuilder(); if (!isFullPath) { absolutePath.append(fs); absolutePath.append(path.startsWith("/") ? "" : "/"); } absolutePath.append(path); target = absolutePath.toString(); if (parser.getRemainingArgs().length > 0) { LOG.warn("Unexpected parameters"); printHelp(opts); return false; } return true; } /** * Tool entry point. * @param args arguments * @throws IOException thrown on configuration errors */ public static void main(String[] args) throws IOException { FrameworkUploader uploader = new FrameworkUploader(); if(uploader.parseArguments(args)) { uploader.run(); } } }
package com.intellij.gwt.inspections; import com.intellij.codeHighlighting.HighlightDisplayLevel; import com.intellij.codeInspection.InspectionManager; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.ProblemHighlightType; import com.intellij.gwt.GwtBundle; import com.intellij.gwt.rpc.RemoteServiceUtil; import com.intellij.gwt.sdk.GwtVersion; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.ReadonlyStatusHandler; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.util.PsiFormatUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.SmartList; import consulo.annotation.access.RequiredReadAction; import consulo.gwt.module.extension.GoogleGwtModuleExtension; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; /** * @author nik */ public class GwtRawAsyncCallbackInspection extends BaseGwtInspection { private static final Logger LOG = Logger.getInstance("#com.intellij.gwt.inspections.GwtRawAsyncCallbackInspection"); @RequiredReadAction @Override public ProblemDescriptor[] checkClassImpl(@Nonnull GoogleGwtModuleExtension extension, @Nonnull GwtVersion version, @Nonnull final PsiClass aClass, @Nonnull final InspectionManager manager, final boolean isOnTheFly) { if(!version.isGenericsSupported()) { return null; } PsiClass sync = RemoteServiceUtil.findSynchronousInterface(aClass); if(sync != null) { return checkAsyncronousInterface(aClass, sync, manager); } final List<ProblemDescriptor> problems = new SmartList<ProblemDescriptor>(); JavaRecursiveElementVisitor visitor = new JavaRecursiveElementVisitor() { @Override public void visitReferenceExpression(final PsiReferenceExpression expression) { } @Override public void visitMethodCallExpression(final PsiMethodCallExpression expression) { PsiMethod method = expression.resolveMethod(); if(method != null) { PsiClass async = method.getContainingClass(); PsiClass sync = RemoteServiceUtil.findSynchronousInterface(async); if(sync != null) { checkAsyncMethod(method, sync, manager, problems, expression); } } } }; aClass.accept(visitor); return problems.isEmpty() ? null : problems.toArray(new ProblemDescriptor[problems.size()]); } private static ProblemDescriptor[] checkAsyncronousInterface(final @Nonnull PsiClass async, final @Nonnull PsiClass sync, final InspectionManager manager) { final List<ProblemDescriptor> problems = new ArrayList<ProblemDescriptor>(); for(PsiMethod method : async.getMethods()) { checkAsyncMethod(method, sync, manager, problems, null); } return problems.toArray(new ProblemDescriptor[problems.size()]); } private static void checkAsyncMethod(final PsiMethod method, final PsiClass sync, final InspectionManager manager, final List<ProblemDescriptor> problems, final @Nullable PsiMethodCallExpression expression) { PsiParameter[] parameters = method.getParameterList().getParameters(); if(parameters.length == 0) { return; } PsiParameter lastParameter = parameters[parameters.length - 1]; PsiType type = lastParameter.getType(); if(!(type instanceof PsiClassType)) { return; } final PsiClassType classType = (PsiClassType) type; PsiClass psiClass = classType.resolve(); if(psiClass == null || !RemoteServiceUtil.ASYNC_CALLBACK_INTERFACE_NAME.equals(psiClass.getQualifiedName())) { return; } PsiMethod syncMethod = RemoteServiceUtil.findMethodInSync(method, sync); if(syncMethod == null) { return; } PsiType returnType = syncMethod.getReturnType(); if(returnType == PsiType.VOID || returnType == null) { return; } PsiAnonymousClass rawAnonymous = null; if(expression != null) { PsiExpression[] arguments = expression.getArgumentList().getExpressions(); if(arguments.length == parameters.length) { PsiExpression lastArg = arguments[arguments.length - 1]; if(lastArg instanceof PsiNewExpression) { PsiAnonymousClass anonymousClass = ((PsiNewExpression) lastArg).getAnonymousClass(); if(anonymousClass != null) { final PsiReferenceParameterList parameterList = anonymousClass.getBaseClassReference().getParameterList(); if(parameterList != null && parameterList.getTypeParameterElements().length == 0) { rawAnonymous = anonymousClass; } } } } } if(classType.isRaw() || rawAnonymous != null) { final PsiMethod methodToFix = classType.isRaw() ? method : null; LocalQuickFix fix; if(methodToFix != null || rawAnonymous != null) { fix = new GenerifyAsyncCallbackFix(returnType, methodToFix, rawAnonymous); } else { fix = null; } final String methodDescription = PsiFormatUtil.formatMethod(method, PsiSubstitutor.EMPTY, PsiFormatUtil.SHOW_NAME | PsiFormatUtil .SHOW_PARAMETERS, PsiFormatUtil.SHOW_TYPE); final String message = GwtBundle.message("problem.description.raw.use.of.asynccallback.interface", methodDescription); PsiElement place = rawAnonymous != null ? getElementToHighlight(rawAnonymous) : expression != null ? expression : lastParameter; problems.add(manager.createProblemDescriptor(place, message, fix, ProblemHighlightType.GENERIC_ERROR_OR_WARNING)); } } @Override @Nonnull public String getDisplayName() { return GwtBundle.message("inspection.name.raw.use.of.asynccallback.in.asynchronous.service.interfaces"); } @Override @Nonnull public String getShortName() { return "gwtRawAsyncCallback"; } @Override @Nonnull public HighlightDisplayLevel getDefaultLevel() { return HighlightDisplayLevel.WARNING; } private static class GenerifyAsyncCallbackFix extends BaseGwtLocalQuickFix { private final PsiType myType; private final PsiMethod myMethodToFix; private final PsiAnonymousClass myAnonymousToFix; private GenerifyAsyncCallbackFix(final @Nonnull PsiType type, final @Nullable PsiMethod methodToFix, final @Nullable PsiAnonymousClass anonymousToFix) { super(GwtBundle.message("quickfix.name.replace.asynccallback.by.asynccallback.0", type.getCanonicalText())); myType = type; myMethodToFix = methodToFix; myAnonymousToFix = anonymousToFix; } @Override public void applyFix(@Nonnull final Project project, @Nonnull final ProblemDescriptor descriptor) { List<VirtualFile> affectedFiles = new ArrayList<VirtualFile>(); if(myMethodToFix != null) { affectedFiles.add(myMethodToFix.getContainingFile().getVirtualFile()); } if(myAnonymousToFix != null) { affectedFiles.add(myAnonymousToFix.getContainingFile().getVirtualFile()); } if(ReadonlyStatusHandler.getInstance(project).ensureFilesWritable(affectedFiles.toArray(new VirtualFile[affectedFiles.size()])) .hasReadonlyFiles()) { return; } try { PsiElementFactory elementFactory = JavaPsiFacade.getInstance(project).getElementFactory(); if(myMethodToFix != null) { generifyMethod(myMethodToFix, myType, elementFactory); } if(myAnonymousToFix != null) { generifyAnonymous(myAnonymousToFix, myType, project, elementFactory); } } catch(IncorrectOperationException e) { LOG.error(e); } } private static void generifyAnonymous(final @Nonnull PsiAnonymousClass anonymous, final @Nonnull PsiType type, final Project project, final PsiElementFactory elementFactory) throws IncorrectOperationException { PsiReferenceParameterList list = anonymous.getBaseClassReference().getParameterList(); if(list != null) { list.add(elementFactory.createTypeElement(type)); } PsiMethod[] methods = anonymous.findMethodsByName("onSuccess", false); for(PsiMethod method : methods) { PsiParameter[] parameters = method.getParameterList().getParameters(); if(parameters.length == 1) { PsiParameter parameter = parameters[0]; if(PsiType.getJavaLangObject(PsiManager.getInstance(project), anonymous.getResolveScope()).equals(parameter.getType())) { parameter.getTypeElement().replace(elementFactory.createTypeElement(type)); break; } } } } private static void generifyMethod(final @Nonnull PsiMethod method, final @Nonnull PsiType type, final PsiElementFactory elementFactory) throws IncorrectOperationException { PsiParameter[] parameters = method.getParameterList().getParameters(); if(parameters.length == 0) { return; } PsiParameter last = parameters[parameters.length - 1]; last.getTypeElement().replace(elementFactory.createTypeElement(RemoteServiceUtil.createAsynchCallbackType(method, type))); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.pgp.service.standard; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnDisabled; import org.apache.nifi.annotation.lifecycle.OnEnabled; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.context.PropertyContext; import org.apache.nifi.controller.AbstractControllerService; import org.apache.nifi.controller.ConfigurationContext; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.pgp.service.api.PGPPrivateKeyService; import org.apache.nifi.pgp.service.standard.exception.PGPConfigurationException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.reporting.InitializationException; import org.apache.nifi.util.StringUtils; import org.bouncycastle.openpgp.PGPException; import org.bouncycastle.openpgp.PGPPrivateKey; import org.bouncycastle.openpgp.PGPSecretKey; import org.bouncycastle.openpgp.PGPSecretKeyRing; import org.bouncycastle.openpgp.PGPSecretKeyRingCollection; import org.bouncycastle.openpgp.PGPUtil; import org.bouncycastle.openpgp.operator.KeyFingerPrintCalculator; import org.bouncycastle.openpgp.operator.PBESecretKeyDecryptor; import org.bouncycastle.openpgp.operator.jcajce.JcaKeyFingerprintCalculator; import org.bouncycastle.openpgp.operator.jcajce.JcePBESecretKeyDecryptorBuilder; import java.io.ByteArrayInputStream; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; /** * Standard Pretty Good Privacy Private Key Service reads Private Keys from configured Keyring files or properties */ @Tags({"PGP", "GPG", "OpenPGP", "Encryption", "Private", "Key", "RFC 4880"}) @CapabilityDescription("PGP Private Key Service provides Private Keys loaded from files or properties") public class StandardPGPPrivateKeyService extends AbstractControllerService implements PGPPrivateKeyService { public static final PropertyDescriptor KEYRING_FILE = new PropertyDescriptor.Builder() .name("keyring-file") .displayName("Keyring File") .description("File path to PGP Keyring or Secret Key encoded in binary or ASCII Armor") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.FILE_EXISTS_VALIDATOR) .build(); public static final PropertyDescriptor KEYRING = new PropertyDescriptor.Builder() .name("keyring") .displayName("Keyring") .description("PGP Keyring or Secret Key encoded in ASCII Armor") .required(false) .sensitive(true) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor KEY_PASSWORD = new PropertyDescriptor.Builder() .name("key-password") .displayName("Key Password") .description("Password used for decrypting Private Keys") .required(true) .sensitive(true) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); private static final Charset KEY_CHARSET = StandardCharsets.US_ASCII; private static final List<PropertyDescriptor> DESCRIPTORS = Arrays.asList( KEYRING_FILE, KEYRING, KEY_PASSWORD ); private volatile Map<Long, PGPPrivateKey> privateKeys = Collections.emptyMap(); /** * On Enabled reads Private Keys using configured properties * * @param context Configuration Context with properties * @throws InitializationException Thrown when unable to load keys */ @OnEnabled public void onEnabled(final ConfigurationContext context) throws InitializationException { try { final PBESecretKeyDecryptor keyDecryptor = getKeyDecryptor(context); final List<PGPPrivateKey> extractedPrivateKeys = new ArrayList<>(readKeyringFile(keyDecryptor, context)); extractedPrivateKeys.addAll(readKeyring(keyDecryptor, context)); privateKeys = extractedPrivateKeys.stream().collect( Collectors.toMap( privateKey -> privateKey.getKeyID(), privateKey -> privateKey ) ); } catch (final RuntimeException e) { throw new InitializationException("Reading Private Keys Failed", e); } } /** * On Disabled clears Private Keys */ @OnDisabled public void onDisabled() { privateKeys = Collections.emptyMap(); } /** * Find Private Key matching Key Identifier * * @param keyIdentifier Private Key Identifier * @return Optional container for PGP Private Key empty when no matching Key found */ @Override public Optional<PGPPrivateKey> findPrivateKey(final long keyIdentifier) { getLogger().debug("Find Private Key [{}]", Long.toHexString(keyIdentifier).toUpperCase()); return Optional.ofNullable(privateKeys.get(keyIdentifier)); } /** * Get Supported Property Descriptors * * @return Supported Property Descriptors */ @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { return DESCRIPTORS; } /** * Custom Validate reads keyring using provided password * * @param context Validation Context * @return Validation Results */ @Override protected Collection<ValidationResult> customValidate(final ValidationContext context) { final Collection<ValidationResult> results = new ArrayList<>(); final PBESecretKeyDecryptor keyDecryptor = getKeyDecryptor(context); final List<PGPPrivateKey> extractedPrivateKeys = new ArrayList<>(); try { extractedPrivateKeys.addAll(readKeyringFile(keyDecryptor, context)); } catch (final RuntimeException e) { final ValidationResult result = new ValidationResult.Builder() .valid(false) .subject(KEYRING_FILE.getDisplayName()) .explanation(String.format("Reading Secret Keyring File Failed: %s", e.getMessage())) .build(); results.add(result); } try { extractedPrivateKeys.addAll(readKeyring(keyDecryptor, context)); } catch (final RuntimeException e) { final ValidationResult result = new ValidationResult.Builder() .valid(false) .subject(KEYRING.getDisplayName()) .explanation(String.format("Reading Secret Keyring Failed: %s", e.getMessage())) .build(); results.add(result); } if (extractedPrivateKeys.isEmpty()) { final String explanation = String.format("No Private Keys Read from [%s] or [%s]", KEYRING_FILE.getDisplayName(), KEYRING.getDisplayName()); final ValidationResult result = new ValidationResult.Builder() .valid(false) .subject(getClass().getSimpleName()) .explanation(explanation) .build(); results.add(result); } return results; } private List<PGPPrivateKey> readKeyringFile(final PBESecretKeyDecryptor keyDecryptor, final PropertyContext context) { final List<PGPPrivateKey> extractedPrivateKeys = new ArrayList<>(); final String keyringFile = context.getProperty(KEYRING_FILE).evaluateAttributeExpressions().getValue(); if (StringUtils.isNotBlank(keyringFile)) { try (final InputStream inputStream = new FileInputStream(keyringFile)) { extractedPrivateKeys.addAll(extractPrivateKeys(inputStream, keyDecryptor)); } catch (final IOException | RuntimeException e) { final String message = String.format("Reading Secret Keyring File [%s] Failed", keyringFile); throw new PGPConfigurationException(message, e); } } return extractedPrivateKeys; } private List<PGPPrivateKey> readKeyring(final PBESecretKeyDecryptor keyDecryptor, final PropertyContext context) { final List<PGPPrivateKey> extractedPrivateKeys = new ArrayList<>(); final String keyring = context.getProperty(KEYRING).getValue(); if (StringUtils.isNotBlank(keyring)) { final byte[] keyringBytes = keyring.getBytes(KEY_CHARSET); try (final InputStream inputStream = new ByteArrayInputStream(keyringBytes)) { extractedPrivateKeys.addAll(extractPrivateKeys(inputStream, keyDecryptor)); } catch (final IOException | RuntimeException e) { throw new PGPConfigurationException("Reading Secret Keyring Failed", e); } } return extractedPrivateKeys; } private List<PGPPrivateKey> extractPrivateKeys(final InputStream inputStream, final PBESecretKeyDecryptor keyDecryptor) { try (final InputStream decoderStream = PGPUtil.getDecoderStream(inputStream)) { final PGPSecretKeyRingCollection keyRings = readKeyRings(decoderStream); return extractPrivateKeys(keyRings, keyDecryptor); } catch (final IOException e) { throw new PGPConfigurationException("Reading Secret Keyring Stream Failed", e); } } private PGPSecretKeyRingCollection readKeyRings(final InputStream inputStream) throws IOException { final KeyFingerPrintCalculator calculator = new JcaKeyFingerprintCalculator(); try { return new PGPSecretKeyRingCollection(inputStream, calculator); } catch (final PGPException e) { throw new PGPConfigurationException("Reading Secret Keyring Collection Failed", e); } } private List<PGPPrivateKey> extractPrivateKeys(final PGPSecretKeyRingCollection keyRings, final PBESecretKeyDecryptor keyDecryptor) { final List<PGPPrivateKey> extractedPrivateKeys = new ArrayList<>(); for (final PGPSecretKeyRing keyRing : keyRings) { for (final PGPSecretKey secretKey : keyRing) { final long keyId = secretKey.getKeyID(); final String keyIdentifier = Long.toHexString(keyId).toUpperCase(); try { final PGPPrivateKey privateKey = secretKey.extractPrivateKey(keyDecryptor); extractedPrivateKeys.add(privateKey); getLogger().debug("Extracted Private Key [{}]", keyIdentifier); } catch (final PGPException e) { final String message = String.format("Private Key [%s] Extraction Failed: check password", keyIdentifier); throw new PGPConfigurationException(message, e); } } } return Collections.unmodifiableList(extractedPrivateKeys); } private PBESecretKeyDecryptor getKeyDecryptor(final PropertyContext context) { final String keyPassword = context.getProperty(KEY_PASSWORD).getValue(); try { return new JcePBESecretKeyDecryptorBuilder().build(keyPassword.toCharArray()); } catch (final PGPException e) { throw new PGPConfigurationException("Building Secret Key Decryptor using password failed", e); } } }
/* * Copyright 2020 Hazelcast Inc. * * Licensed under the Hazelcast Community License (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://hazelcast.com/hazelcast-community-license * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.elastic; import com.hazelcast.function.FunctionEx; import com.hazelcast.function.SupplierEx; import com.hazelcast.jet.elastic.impl.ElasticSourceConfiguration; import com.hazelcast.jet.elastic.impl.ElasticSourcePMetaSupplier; import com.hazelcast.jet.pipeline.BatchSource; import com.hazelcast.jet.pipeline.Sources; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.search.SearchHit; import javax.annotation.Nonnull; import static com.hazelcast.jet.impl.util.Util.checkNonNullAndSerializable; import static com.hazelcast.jet.impl.util.Util.checkSerializable; import static java.util.Objects.requireNonNull; /** * Builder for Elasticsearch source which reads data from Elasticsearch and * converts SearchHits using provided {@code mapToItemFn} * <p> * Usage: * <pre>{@code * BatchSource<String> source = new ElasticSourceBuilder<String>() * .clientFn(() -> client(host, port)) * .searchRequestFn(() -> new SearchRequest("my-index")) * .mapToItemFn(SearchHit::getSourceAsString) * .build(); * * BatchStage<String> stage = p.readFrom(source); * }</pre> * * Requires {@link #clientFn(SupplierEx)}, * {@link #searchRequestFn(SupplierEx)} and {@link #mapToItemFn(FunctionEx)}. * * @param <T> type of the output of the mapping function from {@link SearchHit} -> T * @since 4.2 */ public final class ElasticSourceBuilder<T> { private static final String DEFAULT_NAME = "elasticSource"; private static final int DEFAULT_RETRIES = 5; private SupplierEx<RestClientBuilder> clientFn; private SupplierEx<SearchRequest> searchRequestFn; private FunctionEx<? super SearchHit, T> mapToItemFn; private boolean slicing; private boolean coLocatedReading; private String scrollKeepAlive = "1m"; // Using String because it needs to be Serializable private int retries = DEFAULT_RETRIES; /** * Build Elasticsearch {@link BatchSource} with supplied parameters * * @return configured source which is to be used in the pipeline */ @Nonnull public BatchSource<T> build() { requireNonNull(clientFn, "clientFn must be set"); requireNonNull(searchRequestFn, "searchRequestFn must be set"); requireNonNull(mapToItemFn, "mapToItemFn must be set"); ElasticSourceConfiguration<T> configuration = new ElasticSourceConfiguration<>( restHighLevelClientFn(clientFn), searchRequestFn, mapToItemFn, slicing, coLocatedReading, scrollKeepAlive, retries ); ElasticSourcePMetaSupplier<T> metaSupplier = new ElasticSourcePMetaSupplier<>(configuration); return Sources.batchFromProcessor(DEFAULT_NAME, metaSupplier); } // Don't inline - it would capture this.clientFn and would need to serialize whole builder instance private SupplierEx<RestHighLevelClient> restHighLevelClientFn(SupplierEx<RestClientBuilder> clientFn) { return () -> new RestHighLevelClient(clientFn.get().build()); } /** * Set the client supplier function * <p> * The connector uses the returned instance to access Elasticsearch. * Also see {@link ElasticClients} for convenience * factory methods. * <p> * For example, to provide an authenticated client: * <pre>{@code * builder.clientFn(() -> client(host, port, username, password)) * }</pre> * * This parameter is required. * * @param clientFn supplier function returning configured Elasticsearch * REST client */ @Nonnull public ElasticSourceBuilder<T> clientFn(@Nonnull SupplierEx<RestClientBuilder> clientFn) { this.clientFn = checkNonNullAndSerializable(clientFn, "clientFn"); return this; } /** * Set the search request supplier function * <p> * The connector executes this search request to retrieve documents * from Elasticsearch. * <p> * For example, to create SearchRequest limited to an index `logs`: * <pre>{@code * builder.searchRequestFn(() -> new SearchRequest("logs")) * }</pre> * * This parameter is required. * * @param searchRequestFn search request supplier function */ @Nonnull public ElasticSourceBuilder<T> searchRequestFn(@Nonnull SupplierEx<SearchRequest> searchRequestFn) { this.searchRequestFn = checkSerializable(searchRequestFn, "searchRequestFn"); return this; } /** * Set the function to map SearchHit to a pipeline item * <p> * For example, to map a SearchHit to a value of a field `productId`: * <pre>{@code * builder.mapToItemFn(hit -> (String) hit.getSourceAsMap().get("productId")) * }</pre> * * This parameter is required. * * @param mapToItemFn maps search hits to output items */ @Nonnull @SuppressWarnings("unchecked") public <T_NEW> ElasticSourceBuilder<T_NEW> mapToItemFn(@Nonnull FunctionEx<? super SearchHit, T_NEW> mapToItemFn) { ElasticSourceBuilder<T_NEW> newThis = (ElasticSourceBuilder<T_NEW>) this; newThis.mapToItemFn = checkSerializable(mapToItemFn, "mapToItemFn"); return newThis; } /** * Enable slicing * <p> * Number of slices is equal to {@code globalParallelism * (localParallelism * numberOfNodes)} when only slicing is enabled. When * co-located reading is enabled as well then number of slices for * particular node is equal to {@code localParallelism}. * <p> * Use this option to read from multiple shards in parallel. It can * also be used on single shard, but it may increase initial latency. * See Elastic documentation for * <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-body.html#sliced-scroll"> * Sliced Scroll</a> for details. */ @Nonnull public ElasticSourceBuilder<T> enableSlicing() { this.slicing = true; return this; } /** * Enable co-located reading * * Jet cluster member must run exactly on the same nodes as Elastic cluster. */ @Nonnull public ElasticSourceBuilder<T> enableCoLocatedReading() { this.coLocatedReading = true; return this; } /** * Set the keepAlive for Elastic search scroll * <p> * The value must be in Elastic time unit format, e.g. 500ms for 500 milliseconds, 30s for 30 seconds, * 5m for 5 minutes. See {@link SearchRequest#scroll(String)}. * * @param scrollKeepAlive keepAlive value, this must be high enough to * process all results from a single scroll, default * value 1m */ @Nonnull public ElasticSourceBuilder<T> scrollKeepAlive(@Nonnull String scrollKeepAlive) { this.scrollKeepAlive = requireNonNull(scrollKeepAlive, scrollKeepAlive); return this; } /** * Number of retries the connector will do in addition to Elastic * client retries * * Elastic client tries to connect to a node only once for each * request. When a request fails the node is marked dead and is * not retried again for the request. This causes problems with * single node clusters or in a situation where whole cluster * becomes unavailable at the same time (e.g. due to a network * issue). * * The initial delay is 2s, increasing by factor of 2 with each retry (4s, 8s, 16s, ..). * * @param retries number of retries, defaults to 5 */ @Nonnull public ElasticSourceBuilder<T> retries(int retries) { if (retries < 0) { throw new IllegalArgumentException("retries must be positive"); } this.retries = retries; return this; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jasper.servlet; import org.apache.jasper.Constants; import org.apache.jasper.JasperException; import org.apache.jasper.compiler.Localizer; import org.apache.jasper.util.ExceptionUtils; import org.apache.tomcat.JarScanType; import org.apache.tomcat.util.descriptor.web.FragmentJarScannerCallback; import org.apache.tomcat.util.descriptor.web.WebXml; import org.apache.tomcat.util.descriptor.web.WebXmlParser; import org.apache.tomcat.util.scan.StandardJarScanFilter; import org.apache.tomcat.util.scan.StandardJarScanner; import javax.servlet.*; import javax.servlet.FilterRegistration.Dynamic; import javax.servlet.descriptor.JspConfigDescriptor; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.net.MalformedURLException; import java.net.URL; import java.util.*; import java.util.concurrent.ConcurrentHashMap; /** * Simple <code>ServletContext</code> implementation without * HTTP-specific methods. * * @author Peter Rossbach (pr@webapp.de) */ public class JspCServletContext implements ServletContext { // ----------------------------------------------------- Instance Variables /** * Servlet context attributes. */ private final Map<String,Object> myAttributes; /** * Servlet context initialization parameters. */ private final ConcurrentHashMap<String,String> myParameters; /** * The log writer we will write log messages to. */ private final PrintWriter myLogWriter; /** * The base URL (document root) for this context. */ private final URL myResourceBaseURL; /** * Merged web.xml for the application. */ private WebXml webXml; private JspConfigDescriptor jspConfigDescriptor; /** * Web application class loader. */ private final ClassLoader loader; // ----------------------------------------------------------- Constructors /** * Create a new instance of this ServletContext implementation. * * @param aLogWriter PrintWriter which is used for <code>log()</code> calls * @param aResourceBaseURL Resource base URL * @param classLoader Class loader for this {@link ServletContext} * @param validate Should a validating parser be used to parse web.xml? * @param blockExternal Should external entities be blocked when parsing * web.xml? * @throws JasperException */ public JspCServletContext(PrintWriter aLogWriter, URL aResourceBaseURL, ClassLoader classLoader, boolean validate, boolean blockExternal) throws JasperException { myAttributes = new HashMap<>(); myParameters = new ConcurrentHashMap<>(); myParameters.put(Constants.XML_BLOCK_EXTERNAL_INIT_PARAM, String.valueOf(blockExternal)); myLogWriter = aLogWriter; myResourceBaseURL = aResourceBaseURL; this.loader = classLoader; this.webXml = buildMergedWebXml(validate, blockExternal); jspConfigDescriptor = webXml.getJspConfigDescriptor(); } private WebXml buildMergedWebXml(boolean validate, boolean blockExternal) throws JasperException { WebXml webXml = new WebXml(); WebXmlParser webXmlParser = new WebXmlParser(validate, validate, blockExternal); // Use this class's classloader as Ant will have set the TCCL to its own webXmlParser.setClassLoader(getClass().getClassLoader()); try { URL url = getResource( org.apache.tomcat.util.descriptor.web.Constants.WEB_XML_LOCATION); if (!webXmlParser.parseWebXml(url, webXml, false)) { throw new JasperException(Localizer.getMessage("jspc.error.invalidWebXml")); } } catch (IOException e) { throw new JasperException(e); } // if the application is metadata-complete then we can skip fragment processing if (webXml.isMetadataComplete()) { return webXml; } // If an empty absolute ordering element is present, fragment processing // may be skipped. Set<String> absoluteOrdering = webXml.getAbsoluteOrdering(); if (absoluteOrdering != null && absoluteOrdering.isEmpty()) { return webXml; } Map<String, WebXml> fragments = scanForFragments(webXmlParser); Set<WebXml> orderedFragments = WebXml.orderWebFragments(webXml, fragments, this); // JspC is not affected by annotations so skip that processing, proceed to merge webXml.merge(orderedFragments); return webXml; } private Map<String, WebXml> scanForFragments(WebXmlParser webXmlParser) throws JasperException { StandardJarScanner scanner = new StandardJarScanner(); // TODO - enabling this means initializing the classloader first in JspC scanner.setScanClassPath(false); // TODO - configure filter rules from Ant rather then system properties scanner.setJarScanFilter(new StandardJarScanFilter()); FragmentJarScannerCallback callback = new FragmentJarScannerCallback(webXmlParser, false, true); scanner.scan(JarScanType.PLUGGABILITY, this, callback); if (!callback.isOk()) { throw new JasperException(Localizer.getMessage("jspc.error.invalidFragment")); } return callback.getFragments(); } // --------------------------------------------------------- Public Methods /** * Return the specified context attribute, if any. * * @param name Name of the requested attribute */ @Override public Object getAttribute(String name) { return myAttributes.get(name); } /** * Return an enumeration of context attribute names. */ @Override public Enumeration<String> getAttributeNames() { return Collections.enumeration(myAttributes.keySet()); } /** * Return the servlet context for the specified path. * * @param uripath Server-relative path starting with '/' */ @Override public ServletContext getContext(String uripath) { return null; } /** * Return the context path. */ @Override public String getContextPath() { return null; } /** * Return the specified context initialization parameter. * * @param name Name of the requested parameter */ @Override public String getInitParameter(String name) { return myParameters.get(name); } /** * Return an enumeration of the names of context initialization * parameters. */ @Override public Enumeration<String> getInitParameterNames() { return myParameters.keys(); } /** * Return the Servlet API major version number. */ @Override public int getMajorVersion() { return 3; } /** * Return the MIME type for the specified filename. * * @param file Filename whose MIME type is requested */ @Override public String getMimeType(String file) { return null; } /** * Return the Servlet API minor version number. */ @Override public int getMinorVersion() { return 1; } /** * Return a request dispatcher for the specified servlet name. * * @param name Name of the requested servlet */ @Override public RequestDispatcher getNamedDispatcher(String name) { return null; } /** * Return the real path for the specified context-relative * virtual path. * * @param path The context-relative virtual path to resolve */ @Override public String getRealPath(String path) { if (!myResourceBaseURL.getProtocol().equals("file")) return (null); if (!path.startsWith("/")) return (null); try { return (getResource(path).getFile().replace('/', File.separatorChar)); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); return (null); } } /** * Return a request dispatcher for the specified context-relative path. * * @param path Context-relative path for which to acquire a dispatcher */ @Override public RequestDispatcher getRequestDispatcher(String path) { return null; } /** * Return a URL object of a resource that is mapped to the * specified context-relative path. * * @param path Context-relative path of the desired resource * * @exception MalformedURLException if the resource path is * not properly formed */ @Override public URL getResource(String path) throws MalformedURLException { if (!path.startsWith("/")) throw new MalformedURLException("Path '" + path + "' does not start with '/'"); URL url = new URL(myResourceBaseURL, path.substring(1)); try (InputStream is = url.openStream()) { } catch (Throwable t) { ExceptionUtils.handleThrowable(t); url = null; } return url; } /** * Return an InputStream allowing access to the resource at the * specified context-relative path. * * @param path Context-relative path of the desired resource */ @Override public InputStream getResourceAsStream(String path) { try { return (getResource(path).openStream()); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); return (null); } } /** * Return the set of resource paths for the "directory" at the * specified context path. * * @param path Context-relative base path */ @Override public Set<String> getResourcePaths(String path) { Set<String> thePaths = new HashSet<>(); if (!path.endsWith("/")) path += "/"; String basePath = getRealPath(path); if (basePath == null) return (thePaths); File theBaseDir = new File(basePath); if (!theBaseDir.exists() || !theBaseDir.isDirectory()) return (thePaths); String theFiles[] = theBaseDir.list(); for (int i = 0; i < theFiles.length; i++) { File testFile = new File(basePath + File.separator + theFiles[i]); if (testFile.isFile()) thePaths.add(path + theFiles[i]); else if (testFile.isDirectory()) thePaths.add(path + theFiles[i] + "/"); } return (thePaths); } /** * Return descriptive information about this server. */ @Override public String getServerInfo() { return ("JspC/ApacheTomcat8"); } /** * Return a null reference for the specified servlet name. * * @param name Name of the requested servlet * * @deprecated This method has been deprecated with no replacement */ @Override @Deprecated public Servlet getServlet(String name) throws ServletException { return null; } /** * Return the name of this servlet context. */ @Override public String getServletContextName() { return (getServerInfo()); } /** * Return an empty enumeration of servlet names. * * @deprecated This method has been deprecated with no replacement */ @Override @Deprecated public Enumeration<String> getServletNames() { return (new Vector<String>().elements()); } /** * Return an empty enumeration of servlets. * * @deprecated This method has been deprecated with no replacement */ @Override @Deprecated public Enumeration<Servlet> getServlets() { return (new Vector<Servlet>().elements()); } /** * Log the specified message. * * @param message The message to be logged */ @Override public void log(String message) { myLogWriter.println(message); } /** * Log the specified message and exception. * * @param exception The exception to be logged * @param message The message to be logged * * @deprecated Use log(String,Throwable) instead */ @Override @Deprecated public void log(Exception exception, String message) { log(message, exception); } /** * Log the specified message and exception. * * @param message The message to be logged * @param exception The exception to be logged */ @Override public void log(String message, Throwable exception) { myLogWriter.println(message); exception.printStackTrace(myLogWriter); } /** * Remove the specified context attribute. * * @param name Name of the attribute to remove */ @Override public void removeAttribute(String name) { myAttributes.remove(name); } /** * Set or replace the specified context attribute. * * @param name Name of the context attribute to set * @param value Corresponding attribute value */ @Override public void setAttribute(String name, Object value) { myAttributes.put(name, value); } @Override public FilterRegistration.Dynamic addFilter(String filterName, String className) { return null; } @Override public ServletRegistration.Dynamic addServlet(String servletName, String className) { return null; } @Override public Set<SessionTrackingMode> getDefaultSessionTrackingModes() { return EnumSet.noneOf(SessionTrackingMode.class); } @Override public Set<SessionTrackingMode> getEffectiveSessionTrackingModes() { return EnumSet.noneOf(SessionTrackingMode.class); } @Override public SessionCookieConfig getSessionCookieConfig() { return null; } @Override public void setSessionTrackingModes( Set<SessionTrackingMode> sessionTrackingModes) { // Do nothing } @Override public Dynamic addFilter(String filterName, Filter filter) { return null; } @Override public Dynamic addFilter(String filterName, Class<? extends Filter> filterClass) { return null; } @Override public ServletRegistration.Dynamic addServlet(String servletName, Servlet servlet) { return null; } @Override public ServletRegistration.Dynamic addServlet(String servletName, Class<? extends Servlet> servletClass) { return null; } @Override public <T extends Filter> T createFilter(Class<T> c) throws ServletException { return null; } @Override public <T extends Servlet> T createServlet(Class<T> c) throws ServletException { return null; } @Override public FilterRegistration getFilterRegistration(String filterName) { return null; } @Override public ServletRegistration getServletRegistration(String servletName) { return null; } @Override public boolean setInitParameter(String name, String value) { return myParameters.putIfAbsent(name, value) == null; } @Override public void addListener(Class<? extends EventListener> listenerClass) { // NOOP } @Override public void addListener(String className) { // NOOP } @Override public <T extends EventListener> void addListener(T t) { // NOOP } @Override public <T extends EventListener> T createListener(Class<T> c) throws ServletException { return null; } @Override public void declareRoles(String... roleNames) { // NOOP } @Override public ClassLoader getClassLoader() { return loader; } @Override public int getEffectiveMajorVersion() { return webXml.getMajorVersion(); } @Override public int getEffectiveMinorVersion() { return webXml.getMinorVersion(); } @Override public Map<String, ? extends FilterRegistration> getFilterRegistrations() { return null; } @Override public JspConfigDescriptor getJspConfigDescriptor() { return jspConfigDescriptor; } @Override public Map<String, ? extends ServletRegistration> getServletRegistrations() { return null; } @Override public String getVirtualServerName() { return null; } }
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.siddhi.core.query.join; import io.siddhi.core.SiddhiAppRuntime; import io.siddhi.core.SiddhiManager; import io.siddhi.core.event.Event; import io.siddhi.core.query.output.callback.QueryCallback; import io.siddhi.core.stream.input.InputHandler; import io.siddhi.core.util.EventPrinter; import io.siddhi.core.util.SiddhiTestHelper; import io.siddhi.query.api.exception.SiddhiAppValidationException; import org.apache.log4j.Logger; import org.testng.AssertJUnit; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.util.concurrent.atomic.AtomicInteger; public class JoinTestCase { private static final Logger log = Logger.getLogger(JoinTestCase.class); private AtomicInteger inEventCount; private AtomicInteger removeEventCount; private boolean eventArrived; @BeforeMethod public void init() { inEventCount = new AtomicInteger(0); removeEventCount = new AtomicInteger(0); eventArrived = false; } @Test public void joinTest1() throws InterruptedException { log.info("Join test1"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, company string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.time(1 sec) join twitterStream#window.time(1 sec) " + "on cseEventStream.symbol== twitterStream.company " + "select cseEventStream.symbol as symbol, twitterStream.tweet, cseEventStream.price " + "insert all events into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); if (inEvents != null) { inEventCount.addAndGet(inEvents.length); } if (removeEvents != null) { removeEventCount.addAndGet(removeEvents.length); } eventArrived = true; } }); InputHandler cseEventStreamHandler = siddhiAppRuntime.getInputHandler("cseEventStream"); InputHandler twitterStreamHandler = siddhiAppRuntime.getInputHandler("twitterStream"); siddhiAppRuntime.start(); cseEventStreamHandler.send(new Object[]{"WSO2", 55.6f, 100}); twitterStreamHandler.send(new Object[]{"User1", "Hello World", "WSO2"}); cseEventStreamHandler.send(new Object[]{"IBM", 75.6f, 100}); Thread.sleep(500); cseEventStreamHandler.send(new Object[]{"WSO2", 57.6f, 100}); SiddhiTestHelper.waitForEvents(100, 2, inEventCount, 6000); SiddhiTestHelper.waitForEvents(100, 2, removeEventCount, 6000); AssertJUnit.assertEquals(2, inEventCount.get()); AssertJUnit.assertEquals(2, removeEventCount.get()); AssertJUnit.assertTrue(eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest2() throws InterruptedException { log.info("Join test2"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, company string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.time(1 sec) as a join twitterStream#window.time(1 sec) as b " + "on a.symbol== b.company " + "select a.symbol as symbol, b.tweet, a.price " + "insert all events into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); if (inEvents != null) { inEventCount.addAndGet(inEvents.length); } if (removeEvents != null) { removeEventCount.addAndGet(removeEvents.length); } eventArrived = true; } }); InputHandler cseEventStreamHandler = siddhiAppRuntime.getInputHandler("cseEventStream"); InputHandler twitterStreamHandler = siddhiAppRuntime.getInputHandler("twitterStream"); siddhiAppRuntime.start(); cseEventStreamHandler.send(new Object[]{"WSO2", 55.6f, 100}); twitterStreamHandler.send(new Object[]{"User1", "Hello World", "WSO2"}); cseEventStreamHandler.send(new Object[]{"IBM", 75.6f, 100}); Thread.sleep(500); cseEventStreamHandler.send(new Object[]{"WSO2", 57.6f, 100}); SiddhiTestHelper.waitForEvents(100, 2, inEventCount, 60000); SiddhiTestHelper.waitForEvents(100, 2, removeEventCount, 60000); AssertJUnit.assertEquals(2, inEventCount.get()); AssertJUnit.assertEquals(2, removeEventCount.get()); AssertJUnit.assertTrue(eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest3() throws InterruptedException { log.info("Join test3"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.time(500 milliseconds) as a " + "join cseEventStream#window.time(500 milliseconds) as b " + "on a.symbol== b.symbol " + "select a.symbol as symbol, a.price as priceA, b.price as priceB " + "insert all events into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); if (inEvents != null) { inEventCount.addAndGet(inEvents.length); } if (removeEvents != null) { removeEventCount.getAndAdd(removeEvents.length); } eventArrived = true; } }); InputHandler cseEventStreamHandler = siddhiAppRuntime.getInputHandler("cseEventStream"); siddhiAppRuntime.start(); cseEventStreamHandler.send(new Object[]{"IBM", 75.6f, 100}); cseEventStreamHandler.send(new Object[]{"WSO2", 57.6f, 100}); SiddhiTestHelper.waitForEvents(100, 2, inEventCount, 60000); SiddhiTestHelper.waitForEvents(100, 2, removeEventCount, 60000); AssertJUnit.assertEquals(2, inEventCount.get()); AssertJUnit.assertEquals(2, removeEventCount.get()); AssertJUnit.assertTrue(eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest3_1() throws InterruptedException { log.info("Join test3"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.time(500 milliseconds) as a " + "join cseEventStream#window.time(500 milliseconds) as b " + "on a.symbol== b.symbol " + "select a.symbol as symbol, a.price as priceA, b.price as priceB " + "insert all events into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); if (inEvents != null) { inEventCount.addAndGet(inEvents.length); } if (removeEvents != null) { removeEventCount.getAndAdd(removeEvents.length); } eventArrived = true; } }); InputHandler cseEventStreamHandler = siddhiAppRuntime.getInputHandler("cseEventStream"); siddhiAppRuntime.start(); long timestamp = System.currentTimeMillis(); Event[] events = new Event[]{new Event(timestamp, new Object[]{"IBM", 75.6f, 100}), new Event(timestamp, new Object[]{"WSO2", 57.6f, 100})}; cseEventStreamHandler.send(events); SiddhiTestHelper.waitForEvents(100, 2, inEventCount, 60000); SiddhiTestHelper.waitForEvents(100, 2, removeEventCount, 60000); AssertJUnit.assertEquals(2, inEventCount.get()); AssertJUnit.assertEquals(2, removeEventCount.get()); AssertJUnit.assertTrue(eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest4() throws InterruptedException { log.info("Join test4"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, company string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.time(2 sec) join twitterStream#window.time(2 sec) " + "on cseEventStream.symbol== twitterStream.company " + "select cseEventStream.symbol as symbol, twitterStream.tweet, cseEventStream.price " + "insert all events into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); if (inEvents != null) { for (Event event : inEvents) { org.testng.AssertJUnit.assertTrue("IBM".equals(event.getData(0)) || "WSO2".equals(event.getData(0))); } inEventCount.addAndGet(inEvents.length); } if (removeEvents != null) { for (Event event : removeEvents) { org.testng.AssertJUnit.assertTrue("IBM".equals(event.getData(0)) || "WSO2".equals(event.getData(0))); } removeEventCount.getAndAdd(removeEvents.length); } eventArrived = true; } }); InputHandler cseEventStreamHandler = siddhiAppRuntime.getInputHandler("cseEventStream"); InputHandler twitterStreamHandler = siddhiAppRuntime.getInputHandler("twitterStream"); siddhiAppRuntime.start(); cseEventStreamHandler.send(new Object[]{"WSO2", 55.6f, 100}); twitterStreamHandler.send(new Object[]{"User1", "Hello World", "WSO2"}); cseEventStreamHandler.send(new Object[]{"IBM", 75.6f, 100}); Thread.sleep(1000); cseEventStreamHandler.send(new Object[]{"WSO2", 57.6f, 100}); SiddhiTestHelper.waitForEvents(100, 2, inEventCount, 60000); SiddhiTestHelper.waitForEvents(100, 2, removeEventCount, 60000); AssertJUnit.assertEquals(2, inEventCount.get()); AssertJUnit.assertEquals(2, removeEventCount.get()); AssertJUnit.assertTrue(eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest5() throws InterruptedException { log.info("Join test5"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, company string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.length(1) join twitterStream#window.length(1) " + "select cseEventStream.symbol as symbol, twitterStream.tweet, cseEventStream.price " + "insert all events into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); eventArrived = true; } }); InputHandler cseEventStreamHandler = siddhiAppRuntime.getInputHandler("cseEventStream"); InputHandler twitterStreamHandler = siddhiAppRuntime.getInputHandler("twitterStream"); siddhiAppRuntime.start(); cseEventStreamHandler.send(new Object[]{"WSO2", 55.6f, 100}); twitterStreamHandler.send(new Object[]{"User1", "Hello World", "WSO2"}); cseEventStreamHandler.send(new Object[]{"IBM", 75.6f, 100}); cseEventStreamHandler.send(new Object[]{"WSO2", 57.6f, 100}); Thread.sleep(500); AssertJUnit.assertTrue(eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test(expectedExceptions = SiddhiAppValidationException.class) public void joinTest6() throws InterruptedException { log.info("Join test6"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, symbol string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream join twitterStream " + "select symbol, twitterStream.tweet, cseEventStream.price " + "insert all events into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); siddhiAppRuntime.shutdown(); } @Test(expectedExceptions = SiddhiAppValidationException.class) public void joinTest7() throws InterruptedException { log.info("Join test7"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, symbol string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream as a join twitterStream as b " + "select a.symbol, twitterStream.tweet, a.price " + "insert all events into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); siddhiAppRuntime.shutdown(); } @Test public void joinTest8() throws InterruptedException { log.info("Join test8"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, company string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.length(1) join twitterStream#window.length(1) " + "select cseEventStream.symbol as symbol, tweet, price " + "insert all events into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); eventArrived = true; } }); InputHandler cseEventStreamHandler = siddhiAppRuntime.getInputHandler("cseEventStream"); InputHandler twitterStreamHandler = siddhiAppRuntime.getInputHandler("twitterStream"); siddhiAppRuntime.start(); cseEventStreamHandler.send(new Object[]{"WSO2", 55.6f, 100}); twitterStreamHandler.send(new Object[]{"User1", "Hello World", "WSO2"}); cseEventStreamHandler.send(new Object[]{"IBM", 75.6f, 100}); cseEventStreamHandler.send(new Object[]{"WSO2", 57.6f, 100}); Thread.sleep(500); AssertJUnit.assertTrue(eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest9() throws InterruptedException { log.info("Join test9"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, company string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream join twitterStream " + "select count() as events, symbol " + "insert all events into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); eventArrived = true; } }); InputHandler cseEventStreamHandler = siddhiAppRuntime.getInputHandler("cseEventStream"); InputHandler twitterStreamHandler = siddhiAppRuntime.getInputHandler("twitterStream"); siddhiAppRuntime.start(); twitterStreamHandler.send(new Object[]{"User1", "Hello World", "WSO2"}); cseEventStreamHandler.send(new Object[]{"WSO2", 55.6f, 100}); cseEventStreamHandler.send(new Object[]{"IBM", 75.6f, 100}); cseEventStreamHandler.send(new Object[]{"WSO2", 57.6f, 100}); Thread.sleep(500); AssertJUnit.assertFalse(eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest10() throws InterruptedException { log.info("Join test10"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, company string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream join twitterStream#window.length(1) " + "select count() as events, symbol " + "insert into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); if (inEvents != null) { inEventCount.getAndAdd(inEvents.length); } if (removeEvents != null) { removeEventCount.getAndAdd(removeEvents.length); } eventArrived = true; } }); InputHandler cseEventStreamHandler = siddhiAppRuntime.getInputHandler("cseEventStream"); InputHandler twitterStreamHandler = siddhiAppRuntime.getInputHandler("twitterStream"); siddhiAppRuntime.start(); cseEventStreamHandler.send(new Object[]{"WSO2", 55.6f, 100}); twitterStreamHandler.send(new Object[]{"User1", "Hello World", "WSO2"}); cseEventStreamHandler.send(new Object[]{"IBM", 75.6f, 100}); cseEventStreamHandler.send(new Object[]{"WSO2", 57.6f, 100}); SiddhiTestHelper.waitForEvents(100, 2, inEventCount, 60000); AssertJUnit.assertEquals("inEventCount", 2, inEventCount.get()); AssertJUnit.assertEquals("removeEventCount", 0, removeEventCount.get()); AssertJUnit.assertTrue(eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest11() throws InterruptedException { log.info("Join test11"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, company string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream unidirectional join twitterStream#window.length(1) " + "select count() as events, symbol, tweet " + "insert all events into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); if (inEvents != null) { inEventCount.getAndAdd(inEvents.length); } if (removeEvents != null) { removeEventCount.getAndAdd(removeEvents.length); } eventArrived = true; } }); InputHandler cseEventStreamHandler = siddhiAppRuntime.getInputHandler("cseEventStream"); InputHandler twitterStreamHandler = siddhiAppRuntime.getInputHandler("twitterStream"); siddhiAppRuntime.start(); cseEventStreamHandler.send(new Object[]{"WSO2", 55.6f, 100}); twitterStreamHandler.send(new Object[]{"User1", "Hello World", "WSO2"}); cseEventStreamHandler.send(new Object[]{"IBM", 75.6f, 100}); cseEventStreamHandler.send(new Object[]{"WSO2", 57.6f, 100}); SiddhiTestHelper.waitForEvents(100, 2, inEventCount, 60000); SiddhiTestHelper.waitForEvents(100, 2, removeEventCount, 60000); AssertJUnit.assertEquals("inEventCount", 2, inEventCount.get()); AssertJUnit.assertEquals("removeEventCount", 2, removeEventCount.get()); AssertJUnit.assertTrue(eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest12() throws InterruptedException { log.info("Join test12"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, company string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.time(1 sec) join twitterStream#window.time(1 sec) " + "on cseEventStream.symbol== twitterStream.company " + "select * " + "insert into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); if (inEvents != null) { inEventCount.getAndAdd(inEvents.length); } if (removeEvents != null) { removeEventCount.getAndAdd(removeEvents.length); } eventArrived = true; } }); InputHandler cseEventStreamHandler = siddhiAppRuntime.getInputHandler("cseEventStream"); InputHandler twitterStreamHandler = siddhiAppRuntime.getInputHandler("twitterStream"); siddhiAppRuntime.start(); cseEventStreamHandler.send(new Object[]{"WSO2", 55.6f, 100}); twitterStreamHandler.send(new Object[]{"User1", "Hello World", "WSO2"}); SiddhiTestHelper.waitForEvents(100, 1, inEventCount, 60000); AssertJUnit.assertEquals("inEventCount", 1, inEventCount.get()); AssertJUnit.assertEquals("removeEventCount", 0, removeEventCount.get()); AssertJUnit.assertTrue(eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test(expectedExceptions = SiddhiAppValidationException.class) public void joinTest13() throws InterruptedException { log.info("Join test13"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, symbol string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.time(1 sec) join twitterStream#window.time(1 sec) " + "on cseEventStream.symbol== twitterStream.symbol " + "select * " + "insert into outputStream ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { siddhiAppRuntime.start(); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest14() throws InterruptedException { log.info("Join test14"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream order (billnum string, custid string, items string, dow string, timestamp long); " + "define table dow_items (custid string, dow string, item string) ; " + "define stream dow_items_stream (custid string, dow string, item string); "; String query = "" + "@info(name = 'query1') " + "from order join dow_items \n" + "on order.custid == dow_items.custid \n" + "select dow_items.item\n" + "having order.items == \"item1\" \n" + "insert into recommendationStream ;" + "@info(name = 'query2') " + "from dow_items_stream " + "insert into dow_items ;" + "" + ""; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { InputHandler orderStream = siddhiAppRuntime.getInputHandler("order"); InputHandler itemsStream = siddhiAppRuntime.getInputHandler("dow_items_stream"); siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); eventArrived = true; } }); siddhiAppRuntime.start(); Thread.sleep(100); itemsStream.send(new Object[]{"cust1", "bill1", "item1"}); orderStream.send(new Object[]{"bill1", "cust1", "item1", "dow1", 12323232L}); Thread.sleep(100); AssertJUnit.assertEquals("Event Arrived", true, eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest15() throws InterruptedException { log.info("Join test15"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream order (billnum string, custid string, items string, dow string, timestamp long); " + "define table dow_items (custid string, dow string, item string) ; " + "define stream dow_items_stream (custid string, dow string, item string); "; String query = "" + "@info(name = 'query1') " + "from order join dow_items \n" + "on order.custid == dow_items.custid \n" + "select dow_items.item\n" + "having dow_items.item == \"item1\" \n" + "insert into recommendationStream ;" + "@info(name = 'query2') " + "from dow_items_stream " + "insert into dow_items ;" + "" + ""; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { InputHandler orderStream = siddhiAppRuntime.getInputHandler("order"); InputHandler itemsStream = siddhiAppRuntime.getInputHandler("dow_items_stream"); siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); eventArrived = true; } }); siddhiAppRuntime.start(); Thread.sleep(100); itemsStream.send(new Object[]{"cust1", "bill1", "item1"}); orderStream.send(new Object[]{"bill1", "cust1", "item1", "dow1", 12323232L}); Thread.sleep(100); AssertJUnit.assertEquals("Event Arrived", true, eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest16() throws InterruptedException { log.info("Join test16"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream order (billnum string, custid string, items string, dow string, timestamp long); " + "define table dow_items (custid string, dow string, item string) ; " + "define stream dow_items_stream (custid string, dow string, item string); "; String query = "" + "@info(name = 'query1') " + "from order join dow_items \n" + "on order.custid == dow_items.custid \n" + "select order.custid\n" + "having dow_items.item == \"item1\" \n" + "insert into recommendationStream ;" + "@info(name = 'query2') " + "from dow_items_stream " + "insert into dow_items ;" + "" + ""; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { InputHandler orderStream = siddhiAppRuntime.getInputHandler("order"); InputHandler itemsStream = siddhiAppRuntime.getInputHandler("dow_items_stream"); siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); eventArrived = true; } }); siddhiAppRuntime.start(); Thread.sleep(100); itemsStream.send(new Object[]{"cust1", "bill1", "item1"}); orderStream.send(new Object[]{"bill1", "cust1", "item1", "dow1", 12323232L}); Thread.sleep(100); AssertJUnit.assertEquals("Event Arrived", true, eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest17() throws InterruptedException { log.info("Join test17"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream order (billnum string, custid string, items string, dow string, timestamp long); " + "define table dow_items (custid string, dow string, item string) ; " + "define stream dow_items_stream (custid string, dow string, item string); "; String query = "" + "@info(name = 'query1') " + "from order join dow_items \n" + "select dow_items.custid\n" + "having order.items == \"item1\" \n" + "insert into recommendationStream ;" + "@info(name = 'query2') " + "from dow_items_stream " + "insert into dow_items ;" + "" + ""; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { InputHandler orderStream = siddhiAppRuntime.getInputHandler("order"); InputHandler itemsStream = siddhiAppRuntime.getInputHandler("dow_items_stream"); siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timestamp, inEvents, removeEvents); eventArrived = true; } }); siddhiAppRuntime.start(); Thread.sleep(100); itemsStream.send(new Object[]{"cust1", "bill1", "item1"}); orderStream.send(new Object[]{"bill1", "cust1", "item1", "dow1", 12323232L}); Thread.sleep(100); AssertJUnit.assertEquals("Event Arrived", true, eventArrived); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest18() throws InterruptedException { log.info("Join test18"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream dataIn (id int, data string); " + "define stream countOutA (count long); " + "define stream countOutB (count long); " + "define stream deleteIn (id int); " + "define table dataTable (id int, data string); " + "define table dow_items (custid string, dow string, item string) ; " + "define stream dow_items_stream (custid string, dow string, item string); "; String query = "" + "from dataIn\n" + "insert into dataTable;\n" + "" + "from deleteIn \n" + "delete dataTable\n" + " on dataTable.id == id;\n" + "" + "from deleteIn \n" + "select id " + "insert into countIn;\n" + "" + "@info(name = 'query1') " + "from countIn#window.length(0) as c join dataTable as d\n" + "select count() as count\n" + "insert into countOutA;\n" + "\n" + "" + "@info(name = 'query2') " + "from countIn#window.length(1) as c join dataTable as d\n" + "select count() as count\n" + "insert into countOutB;\n"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { InputHandler dataIn = siddhiAppRuntime.getInputHandler("dataIn"); InputHandler countIn = siddhiAppRuntime.getInputHandler("countIn"); InputHandler deleteIn = siddhiAppRuntime.getInputHandler("deleteIn"); siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { inEventCount.incrementAndGet(); if (inEventCount.get() == 1) { EventPrinter.print(timestamp, inEvents, removeEvents); AssertJUnit.assertTrue((Long) inEvents[0].getData(0) == 4L); } else { EventPrinter.print(timestamp, inEvents, removeEvents); AssertJUnit.assertTrue((Long) inEvents[0].getData(0) == 3L); } } }); siddhiAppRuntime.addCallback("query2", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { removeEventCount.incrementAndGet(); if (removeEventCount.get() == 1) { EventPrinter.print(timestamp, inEvents, removeEvents); AssertJUnit.assertTrue((Long) inEvents[0].getData(0) == 4L); } else { EventPrinter.print(timestamp, inEvents, removeEvents); AssertJUnit.assertTrue((Long) inEvents[0].getData(0) == 4L); } } }); siddhiAppRuntime.start(); Thread.sleep(100); dataIn.send(new Object[]{1, "item1"}); dataIn.send(new Object[]{2, "item2"}); dataIn.send(new Object[]{3, "item3"}); dataIn.send(new Object[]{4, "item4"}); countIn.send(new Object[]{1}); deleteIn.send(new Object[]{1}); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest19() throws InterruptedException { log.info("Join test19"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream dataIn (id int, data string); " + "define stream countIn (id int); " + "define stream deleteIn (id int); " + "define table dataTable (id int, data string); "; String query = "" + "from dataIn\n" + "insert into dataTable;\n" + "" + "from deleteIn \n" + "delete dataTable\n" + " on dataTable.id == id;\n" + "" + "@info(name = 'query1') " + "from countIn as c join dataTable as d\n" + "select count() as count\n" + "insert into countOut; "; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { InputHandler dataIn = siddhiAppRuntime.getInputHandler("dataIn"); InputHandler countIn = siddhiAppRuntime.getInputHandler("countIn"); InputHandler deleteIn = siddhiAppRuntime.getInputHandler("deleteIn"); siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { inEventCount.incrementAndGet(); if (inEventCount.get() == 1) { EventPrinter.print(timestamp, inEvents, removeEvents); AssertJUnit.assertTrue((Long) inEvents[0].getData(0) == 3L); } else { EventPrinter.print(timestamp, inEvents, removeEvents); AssertJUnit.assertTrue((Long) inEvents[0].getData(0) == 2L); } } }); siddhiAppRuntime.start(); Thread.sleep(100); dataIn.send(new Object[]{1, "item1"}); dataIn.send(new Object[]{2, "item2"}); dataIn.send(new Object[]{3, "item3"}); countIn.send(new Object[]{1}); deleteIn.send(new Object[]{1}); countIn.send(new Object[]{1}); } finally { siddhiAppRuntime.shutdown(); } } @Test public void joinTest20() throws InterruptedException { log.info("Join test20"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream dataIn (id int, data string); " + "define stream countIn (id int); " + "define stream deleteIn (id int); " + "define table dataTable (id int, data string); "; String query = "" + "from dataIn\n" + "insert into dataTable;\n" + "" + "from deleteIn \n" + "delete dataTable\n" + " on dataTable.id == id;\n" + "" + "@info(name = 'query1') " + "from countIn as c left outer join dataTable as d\n" + "on d.data == 'abc'\n" + "select count() as count\n" + "insert into countOut; "; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { InputHandler dataIn = siddhiAppRuntime.getInputHandler("dataIn"); InputHandler countIn = siddhiAppRuntime.getInputHandler("countIn"); InputHandler deleteIn = siddhiAppRuntime.getInputHandler("deleteIn"); siddhiAppRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) { inEventCount.incrementAndGet(); if (inEventCount.get() == 1) { EventPrinter.print(timestamp, inEvents, removeEvents); AssertJUnit.assertTrue((Long) inEvents[0].getData(0) == 3L); } else { EventPrinter.print(timestamp, inEvents, removeEvents); AssertJUnit.assertTrue((Long) inEvents[0].getData(0) == 2L); } } }); siddhiAppRuntime.start(); Thread.sleep(100); dataIn.send(new Object[]{1, "abc"}); dataIn.send(new Object[]{2, "abc"}); dataIn.send(new Object[]{3, "abc"}); countIn.send(new Object[]{1}); deleteIn.send(new Object[]{1}); countIn.send(new Object[]{1}); } finally { siddhiAppRuntime.shutdown(); } } }
/* * NOTE: This copyright does *not* cover user programs that use HQ * program services by normal system calls through the application * program interfaces provided as part of the Hyperic Plug-in Development * Kit or the Hyperic Client Development Kit - this is merely considered * normal use of the program, and does *not* fall under the heading of * "derived work". * * Copyright (C) [2004, 2005, 2006], Hyperic, Inc. * This file is part of HQ. * * HQ is free software; you can redistribute it and/or modify * it under the terms version 2 of the GNU General Public License as * published by the Free Software Foundation. This program is distributed * in the hope that it will be useful, but WITHOUT ANY WARRANTY; without * even the implied warranty of MERCHANTABILITY or FITNESS FOR A * PARTICULAR PURPOSE. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA. */ package org.hyperic.hq.plugin.websphere; import java.io.File; import java.util.Arrays; import java.util.List; import org.hyperic.util.InetPortPinger; import org.hyperic.hq.product.PluginManager; import org.hyperic.hq.product.ServerControlPlugin; import org.hyperic.hq.product.PluginException; import org.hyperic.util.config.ConfigResponse; public class WebsphereControlPlugin extends ServerControlPlugin { private static final String actions[] = {"start", "stop", "restart"}; private static final List commands = Arrays.asList(actions); private InetPortPinger portPinger; private String[] ctlArgs = new String[0]; protected String getAdminHost() { return getConfig().getValue(WebsphereProductPlugin.PROP_ADMIN_HOST); } protected String getAdminPort() { return getConfig().getValue(WebsphereProductPlugin.PROP_ADMIN_PORT); } protected String getServerNode() { return getConfig().getValue(WebsphereProductPlugin.PROP_SERVER_NODE); } protected String getServerName() { return getConfig().getValue(WebsphereProductPlugin.PROP_SERVER_NAME); } protected String getUsername() { return getConfig().getValue(WebsphereProductPlugin.PROP_USERNAME); } protected String getPassword() { return getConfig().getValue(WebsphereProductPlugin.PROP_PASSWORD); } //for isRunning() protected String getRunningHost() { return getAdminHost(); } protected String getRunningPort() { return getAdminPort(); } public void configure(ConfigResponse config) throws PluginException { super.configure(config); String username = getUsername(); String password = getPassword(); if ((username != null) && (password != null)) { this.ctlArgs = new String[] { getServerName(), "-username", username, "-password", password }; } else { this.ctlArgs = new String[] { getServerName() }; } try { int iport = Integer.parseInt(getRunningPort()); this.portPinger = new InetPortPinger(getRunningHost(), iport, 30000); } catch (NumberFormatException e) { //unlikely: already validated by ConfigSchema } } //XXX websphere has a status port we should try first //com.ibm.ws.management.tools.WsServerLauncher falls back //to similar code as below; it'll do for now. protected boolean isRunning() { if (this.portPinger == null) { return false; //unlikely } return this.portPinger.check(); } protected int checkIsRunning(String action) { if (action.equals("start")) { if (isRunning()) { setMessage("Server already running on port " + getRunningPort()); return RESULT_FAILURE; } } else if (action.equals("stop")) { if (!isRunning()) { setMessage("No server running on port " + getRunningPort()); return RESULT_FAILURE; } } return RESULT_SUCCESS; } public List getActions() { return commands; } public void doAction(String action, String[] args) throws PluginException { if (action.equals("start")) { setResult(start(args)); } else if (action.equals("stop")) { setResult(stop(args)); } else if (action.equals("restart")) { setResult(restart(args)); } else { // Shouldn't happen throw new PluginException("Action '" + action + "' not supported"); } } protected int doCommand(String action, String[] args) { String script=getConfig().getValue(PROP_PROGRAM+"."+action); setControlProgram(script); getLog().debug("command script=" + script); if ((args == null) || (args.length == 0)) { args = this.ctlArgs; } return super.doCommand(script, args); } // Define control methods private int start(String[] args) { int res = doCommand("start", args); if (res == RESULT_SUCCESS) { waitForState(STATE_STARTED); } return res; } private int stop(String[] args) { int res = doCommand("stop", args); if (res == RESULT_SUCCESS) { waitForState(STATE_STOPPED); } return res; } private int restart(String[] args) { int res = stop(args); if (res != RESULT_SUCCESS) { return res; } return start(args); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.recovery; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.recovery.RecoveryState.*; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.Matchers.*; public class RecoveryStateTests extends ESTestCase { abstract class Streamer<T extends Streamable> extends Thread { private T lastRead; final private AtomicBoolean shouldStop; final private T source; final AtomicReference<Throwable> error = new AtomicReference<>(); final Version streamVersion; Streamer(AtomicBoolean shouldStop, T source) { this(shouldStop, source, randomVersion(random())); } Streamer(AtomicBoolean shouldStop, T source, Version streamVersion) { this.shouldStop = shouldStop; this.source = source; this.streamVersion = streamVersion; } public T lastRead() throws Throwable { Throwable t = error.get(); if (t != null) { throw t; } return lastRead; } public T serializeDeserialize() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); source.writeTo(out); out.close(); StreamInput in = StreamInput.wrap(out.bytes()); T obj = deserialize(in); lastRead = obj; return obj; } protected T deserialize(StreamInput in) throws IOException { T obj = createObj(); obj.readFrom(in); return obj; } abstract T createObj(); @Override public void run() { try { while (shouldStop.get() == false) { serializeDeserialize(); } serializeDeserialize(); } catch (Throwable t) { error.set(t); } } } public void testTimers() throws Throwable { final Timer timer; Streamer<Timer> streamer; AtomicBoolean stop = new AtomicBoolean(); if (randomBoolean()) { timer = new Timer(); streamer = new Streamer<Timer>(stop, timer) { @Override Timer createObj() { return new Timer(); } }; } else if (randomBoolean()) { timer = new Index(); streamer = new Streamer<Timer>(stop, timer) { @Override Timer createObj() { return new Index(); } }; } else if (randomBoolean()) { timer = new VerifyIndex(); streamer = new Streamer<Timer>(stop, timer) { @Override Timer createObj() { return new VerifyIndex(); } }; } else { timer = new Translog(); streamer = new Streamer<Timer>(stop, timer) { @Override Timer createObj() { return new Translog(); } }; } timer.start(); assertThat(timer.startTime(), greaterThan(0l)); assertThat(timer.stopTime(), equalTo(0l)); Timer lastRead = streamer.serializeDeserialize(); final long time = lastRead.time(); assertThat(time, lessThanOrEqualTo(timer.time())); assertBusy(new Runnable() { @Override public void run() { assertThat("timer timer should progress compared to captured one ", time, lessThan(timer.time())); } }); assertThat("captured time shouldn't change", lastRead.time(), equalTo(time)); if (randomBoolean()) { timer.stop(); assertThat(timer.stopTime(), greaterThanOrEqualTo(timer.startTime())); assertThat(timer.time(), greaterThan(0l)); lastRead = streamer.serializeDeserialize(); assertThat(lastRead.startTime(), equalTo(timer.startTime())); assertThat(lastRead.time(), equalTo(timer.time())); assertThat(lastRead.stopTime(), equalTo(timer.stopTime())); } timer.reset(); assertThat(timer.startTime(), equalTo(0l)); assertThat(timer.time(), equalTo(0l)); assertThat(timer.stopTime(), equalTo(0l)); lastRead = streamer.serializeDeserialize(); assertThat(lastRead.startTime(), equalTo(0l)); assertThat(lastRead.time(), equalTo(0l)); assertThat(lastRead.stopTime(), equalTo(0l)); } public void testIndex() throws Throwable { File[] files = new File[randomIntBetween(1, 20)]; ArrayList<File> filesToRecover = new ArrayList<>(); long totalFileBytes = 0; long totalReusedBytes = 0; int totalReused = 0; for (int i = 0; i < files.length; i++) { final int fileLength = randomIntBetween(1, 1000); final boolean reused = randomBoolean(); totalFileBytes += fileLength; files[i] = new RecoveryState.File("f_" + i, fileLength, reused); if (reused) { totalReused++; totalReusedBytes += fileLength; } else { filesToRecover.add(files[i]); } } Collections.shuffle(Arrays.asList(files), random()); final RecoveryState.Index index = new RecoveryState.Index(); if (randomBoolean()) { // initialize with some data and then reset index.start(); for (int i = randomIntBetween(0, 10); i > 0; i--) { index.addFileDetail("t_" + i, randomIntBetween(1, 100), randomBoolean()); if (randomBoolean()) { index.addSourceThrottling(randomIntBetween(0, 20)); } if (randomBoolean()) { index.addTargetThrottling(randomIntBetween(0, 20)); } } if (randomBoolean()) { index.stop(); } index.reset(); } // before we start we must report 0 assertThat(index.recoveredFilesPercent(), equalTo((float) 0.0)); assertThat(index.recoveredBytesPercent(), equalTo((float) 0.0)); assertThat(index.sourceThrottling().nanos(), equalTo(Index.UNKNOWN)); assertThat(index.targetThrottling().nanos(), equalTo(Index.UNKNOWN)); index.start(); for (File file : files) { index.addFileDetail(file.name(), file.length(), file.reused()); } logger.info("testing initial information"); assertThat(index.totalBytes(), equalTo(totalFileBytes)); assertThat(index.reusedBytes(), equalTo(totalReusedBytes)); assertThat(index.totalRecoverBytes(), equalTo(totalFileBytes - totalReusedBytes)); assertThat(index.totalFileCount(), equalTo(files.length)); assertThat(index.reusedFileCount(), equalTo(totalReused)); assertThat(index.totalRecoverFiles(), equalTo(filesToRecover.size())); assertThat(index.recoveredFileCount(), equalTo(0)); assertThat(index.recoveredBytes(), equalTo(0l)); assertThat(index.recoveredFilesPercent(), equalTo(filesToRecover.size() == 0 ? 100.0f : 0.0f)); assertThat(index.recoveredBytesPercent(), equalTo(filesToRecover.size() == 0 ? 100.0f : 0.0f)); long bytesToRecover = totalFileBytes - totalReusedBytes; boolean completeRecovery = bytesToRecover == 0 || randomBoolean(); if (completeRecovery == false) { bytesToRecover = randomIntBetween(1, (int) bytesToRecover); logger.info("performing partial recovery ([{}] bytes of [{}])", bytesToRecover, totalFileBytes - totalReusedBytes); } AtomicBoolean streamShouldStop = new AtomicBoolean(); Streamer<Index> backgroundReader = new Streamer<RecoveryState.Index>(streamShouldStop, index) { @Override Index createObj() { return new Index(); } }; backgroundReader.start(); long recoveredBytes = 0; long sourceThrottling = Index.UNKNOWN; long targetThrottling = Index.UNKNOWN; while (bytesToRecover > 0) { File file = randomFrom(filesToRecover); final long toRecover = Math.min(bytesToRecover, randomIntBetween(1, (int) (file.length() - file.recovered()))); final long throttledOnSource = rarely() ? randomIntBetween(10, 200) : 0; index.addSourceThrottling(throttledOnSource); if (sourceThrottling == Index.UNKNOWN) { sourceThrottling = throttledOnSource; } else { sourceThrottling += throttledOnSource; } index.addRecoveredBytesToFile(file.name(), toRecover); file.addRecoveredBytes(toRecover); final long throttledOnTarget = rarely() ? randomIntBetween(10, 200) : 0; if (targetThrottling == Index.UNKNOWN) { targetThrottling = throttledOnTarget; } else { targetThrottling += throttledOnTarget; } index.addTargetThrottling(throttledOnTarget); bytesToRecover -= toRecover; recoveredBytes += toRecover; if (file.reused() || file.fullyRecovered()) { filesToRecover.remove(file); } } if (completeRecovery) { assertThat(filesToRecover.size(), equalTo(0)); index.stop(); assertThat(index.time(), greaterThanOrEqualTo(0l)); } logger.info("testing serialized information"); streamShouldStop.set(true); backgroundReader.join(); final Index lastRead = backgroundReader.lastRead(); assertThat(lastRead.fileDetails().toArray(), arrayContainingInAnyOrder(index.fileDetails().toArray())); assertThat(lastRead.startTime(), equalTo(index.startTime())); if (completeRecovery) { assertThat(lastRead.time(), equalTo(index.time())); } else { assertThat(lastRead.time(), lessThanOrEqualTo(index.time())); } assertThat(lastRead.stopTime(), equalTo(index.stopTime())); assertThat(lastRead.targetThrottling(), equalTo(index.targetThrottling())); assertThat(lastRead.sourceThrottling(), equalTo(index.sourceThrottling())); logger.info("testing post recovery"); assertThat(index.totalBytes(), equalTo(totalFileBytes)); assertThat(index.reusedBytes(), equalTo(totalReusedBytes)); assertThat(index.totalRecoverBytes(), equalTo(totalFileBytes - totalReusedBytes)); assertThat(index.totalFileCount(), equalTo(files.length)); assertThat(index.reusedFileCount(), equalTo(totalReused)); assertThat(index.totalRecoverFiles(), equalTo(files.length - totalReused)); assertThat(index.recoveredFileCount(), equalTo(index.totalRecoverFiles() - filesToRecover.size())); assertThat(index.recoveredBytes(), equalTo(recoveredBytes)); assertThat(index.targetThrottling().nanos(), equalTo(targetThrottling)); assertThat(index.sourceThrottling().nanos(), equalTo(sourceThrottling)); if (index.totalRecoverFiles() == 0) { assertThat((double) index.recoveredFilesPercent(), equalTo(100.0)); assertThat((double) index.recoveredBytesPercent(), equalTo(100.0)); } else { assertThat((double) index.recoveredFilesPercent(), closeTo(100.0 * index.recoveredFileCount() / index.totalRecoverFiles(), 0.1)); assertThat((double) index.recoveredBytesPercent(), closeTo(100.0 * index.recoveredBytes() / index.totalRecoverBytes(), 0.1)); } } public void testStageSequenceEnforcement() { final DiscoveryNode discoveryNode = new DiscoveryNode("1", DummyTransportAddress.INSTANCE, Version.CURRENT); Stage[] stages = Stage.values(); int i = randomIntBetween(0, stages.length - 1); int j; do { j = randomIntBetween(0, stages.length - 1); } while (j == i); Stage t = stages[i]; stages[i] = stages[j]; stages[j] = t; try { RecoveryState state = new RecoveryState(new ShardId("bla", 0), randomBoolean(), randomFrom(Type.values()), discoveryNode, discoveryNode); for (Stage stage : stages) { state.setStage(stage); } fail("succeeded in performing the illegal sequence [" + Strings.arrayToCommaDelimitedString(stages) + "]"); } catch (IllegalStateException e) { // cool } // but reset should be always possible. stages = Stage.values(); i = randomIntBetween(1, stages.length - 1); ArrayList<Stage> list = new ArrayList<>(Arrays.asList(Arrays.copyOfRange(stages, 0, i))); list.addAll(Arrays.asList(stages)); RecoveryState state = new RecoveryState(new ShardId("bla", 0), randomBoolean(), randomFrom(Type.values()), discoveryNode, discoveryNode); for (Stage stage : list) { state.setStage(stage); } assertThat(state.getStage(), equalTo(Stage.DONE)); } public void testTranslog() throws Throwable { final Translog translog = new Translog(); AtomicBoolean stop = new AtomicBoolean(); Streamer<Translog> streamer = new Streamer<Translog>(stop, translog) { @Override Translog createObj() { return new Translog(); } }; // we don't need to test the time aspect, it's done in the timer test translog.start(); assertThat(translog.recoveredOperations(), equalTo(0)); assertThat(translog.totalOperations(), equalTo(Translog.UNKNOWN)); assertThat(translog.totalOperationsOnStart(), equalTo(Translog.UNKNOWN)); streamer.start(); // force one streamer.serializeDeserialize(); int ops = 0; int totalOps = 0; int totalOpsOnStart = randomIntBetween(10, 200); translog.totalOperationsOnStart(totalOpsOnStart); for (int i = scaledRandomIntBetween(10, 200); i > 0; i--) { final int iterationOps = randomIntBetween(1, 10); totalOps += iterationOps; translog.totalOperations(totalOps); assertThat((double) translog.recoveredPercent(), closeTo(100.0 * ops / totalOps, 0.1)); for (int j = iterationOps; j > 0; j--) { ops++; translog.incrementRecoveredOperations(); if (randomBoolean()) { translog.decrementRecoveredOperations(1); translog.incrementRecoveredOperations(); } } assertThat(translog.recoveredOperations(), equalTo(ops)); assertThat(translog.totalOperations(), equalTo(totalOps)); assertThat(translog.recoveredPercent(), equalTo(100.f)); assertThat(streamer.lastRead().recoveredOperations(), greaterThanOrEqualTo(0)); assertThat(streamer.lastRead().recoveredOperations(), lessThanOrEqualTo(ops)); assertThat(streamer.lastRead().totalOperations(), lessThanOrEqualTo(totalOps)); assertThat(streamer.lastRead().totalOperationsOnStart(), lessThanOrEqualTo(totalOpsOnStart)); assertThat(streamer.lastRead().recoveredPercent(), either(greaterThanOrEqualTo(0.f)).or(equalTo(-1.f))); } boolean stopped = false; if (randomBoolean()) { translog.stop(); stopped = true; } if (randomBoolean()) { translog.reset(); ops = 0; totalOps = Translog.UNKNOWN; totalOpsOnStart = Translog.UNKNOWN; assertThat(translog.recoveredOperations(), equalTo(0)); assertThat(translog.totalOperationsOnStart(), equalTo(Translog.UNKNOWN)); assertThat(translog.totalOperations(), equalTo(Translog.UNKNOWN)); } stop.set(true); streamer.join(); final Translog lastRead = streamer.lastRead(); assertThat(lastRead.recoveredOperations(), equalTo(ops)); assertThat(lastRead.totalOperations(), equalTo(totalOps)); assertThat(lastRead.totalOperationsOnStart(), equalTo(totalOpsOnStart)); assertThat(lastRead.startTime(), equalTo(translog.startTime())); assertThat(lastRead.stopTime(), equalTo(translog.stopTime())); if (stopped) { assertThat(lastRead.time(), equalTo(translog.time())); } else { assertThat(lastRead.time(), lessThanOrEqualTo(translog.time())); } } public void testStart() throws IOException { final VerifyIndex verifyIndex = new VerifyIndex(); AtomicBoolean stop = new AtomicBoolean(); Streamer<VerifyIndex> streamer = new Streamer<VerifyIndex>(stop, verifyIndex) { @Override VerifyIndex createObj() { return new VerifyIndex(); } }; // we don't need to test the time aspect, it's done in the timer test verifyIndex.start(); assertThat(verifyIndex.checkIndexTime(), equalTo(0l)); // force one VerifyIndex lastRead = streamer.serializeDeserialize(); assertThat(lastRead.checkIndexTime(), equalTo(0l)); long took = randomLong(); if (took < 0) { took = -took; took = Math.max(0l, took); } verifyIndex.checkIndexTime(took); assertThat(verifyIndex.checkIndexTime(), equalTo(took)); boolean stopped = false; if (randomBoolean()) { verifyIndex.stop(); stopped = true; } if (randomBoolean()) { verifyIndex.reset(); took = 0; assertThat(verifyIndex.checkIndexTime(), equalTo(took)); } lastRead = streamer.serializeDeserialize(); assertThat(lastRead.checkIndexTime(), equalTo(took)); assertThat(lastRead.startTime(), equalTo(verifyIndex.startTime())); assertThat(lastRead.stopTime(), equalTo(verifyIndex.stopTime())); if (stopped) { assertThat(lastRead.time(), equalTo(verifyIndex.time())); } else { assertThat(lastRead.time(), lessThanOrEqualTo(verifyIndex.time())); } } public void testConcurrentModificationIndexFileDetailsMap() throws InterruptedException { final Index index = new Index(); final AtomicBoolean stop = new AtomicBoolean(false); Streamer<Index> readWriteIndex = new Streamer<Index>(stop, index) { @Override Index createObj() { return new Index(); } }; Thread modifyThread = new Thread() { @Override public void run() { for (int i = 0; i < 1000; i++) { index.addFileDetail(randomAsciiOfLength(10), 100, true); } stop.set(true); } }; readWriteIndex.start(); modifyThread.start(); modifyThread.join(); readWriteIndex.join(); assertThat(readWriteIndex.error.get(), equalTo(null)); } public void testFileHashCodeAndEquals() { File f = new File("foo", randomIntBetween(0, 100), randomBoolean()); File anotherFile = new File(f.name(), f.length(), f.reused()); assertEquals(f, anotherFile); assertEquals(f.hashCode(), anotherFile.hashCode()); int iters = randomIntBetween(10, 100); for (int i = 0; i < iters; i++) { f = new File("foo", randomIntBetween(0, 100), randomBoolean()); anotherFile = new File(f.name(), randomIntBetween(0, 100), randomBoolean()); if (f.equals(anotherFile)) { assertEquals(f.hashCode(), anotherFile.hashCode()); } else if (f.hashCode() != anotherFile.hashCode()) { assertFalse(f.equals(anotherFile)); } } } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.apple; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertThat; import com.facebook.buck.step.ExecutionContext; import com.facebook.buck.step.TestExecutionContext; import com.facebook.buck.testutil.FakeProjectFilesystem; import com.facebook.buck.util.FakeProcess; import com.facebook.buck.util.FakeProcessExecutor; import com.facebook.buck.util.ProcessExecutorParams; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.junit.Test; import java.nio.file.Path; import java.nio.file.Paths; public class XctoolRunTestsStepTest { @Test public void xctoolCommandWithOnlyLogicTests() throws Exception { FakeProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); XctoolRunTestsStep step = new XctoolRunTestsStep( projectFilesystem, Paths.get("/path/to/xctool"), Optional.<Long>absent(), "iphonesimulator", Optional.<String>absent(), ImmutableSet.of(Paths.get("/path/to/Foo.xctest")), ImmutableMap.<Path, Path>of(), Paths.get("/path/to/output.json"), Optional.<XctoolRunTestsStep.StdoutReadingCallback>absent()); ProcessExecutorParams xctoolParams = ProcessExecutorParams.builder() .setCommand( ImmutableList.of( "/path/to/xctool", "-reporter", "json-stream", "-sdk", "iphonesimulator", "run-tests", "-logicTest", "/path/to/Foo.xctest")) .setDirectory(projectFilesystem.getRootPath().toAbsolutePath().toFile()) .setRedirectOutput(ProcessBuilder.Redirect.PIPE) .build(); FakeProcess fakeXctoolSuccess = new FakeProcess(0, "", ""); FakeProcessExecutor processExecutor = new FakeProcessExecutor( ImmutableMap.of(xctoolParams, fakeXctoolSuccess)); ExecutionContext executionContext = TestExecutionContext.newBuilder() .setProcessExecutor(processExecutor) .setEnvironment(ImmutableMap.<String, String>of()) .build(); assertThat( step.execute(executionContext), equalTo(0)); } @Test public void xctoolCommandWithOnlyAppTests() throws Exception { FakeProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); XctoolRunTestsStep step = new XctoolRunTestsStep( projectFilesystem, Paths.get("/path/to/xctool"), Optional.<Long>absent(), "iphonesimulator", Optional.of("name=iPhone 5s"), ImmutableSet.<Path>of(), ImmutableMap.of( Paths.get("/path/to/FooAppTest.xctest"), Paths.get("/path/to/Foo.app")), Paths.get("/path/to/output.json"), Optional.<XctoolRunTestsStep.StdoutReadingCallback>absent()); ProcessExecutorParams xctoolParams = ProcessExecutorParams.builder() .setCommand( ImmutableList.of( "/path/to/xctool", "-reporter", "json-stream", "-sdk", "iphonesimulator", "-destination", "name=iPhone 5s", "run-tests", "-appTest", "/path/to/FooAppTest.xctest:/path/to/Foo.app")) .setDirectory(projectFilesystem.getRootPath().toAbsolutePath().toFile()) .setRedirectOutput(ProcessBuilder.Redirect.PIPE) .build(); FakeProcess fakeXctoolSuccess = new FakeProcess(0, "", ""); FakeProcessExecutor processExecutor = new FakeProcessExecutor( ImmutableMap.of(xctoolParams, fakeXctoolSuccess)); ExecutionContext executionContext = TestExecutionContext.newBuilder() .setProcessExecutor(processExecutor) .setEnvironment(ImmutableMap.<String, String>of()) .build(); assertThat( step.execute(executionContext), equalTo(0)); } @Test public void xctoolCommandWithAppAndLogicTests() throws Exception { FakeProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); XctoolRunTestsStep step = new XctoolRunTestsStep( projectFilesystem, Paths.get("/path/to/xctool"), Optional.<Long>absent(), "iphonesimulator", Optional.of("name=iPhone 5s,OS=8.2"), ImmutableSet.of( Paths.get("/path/to/FooLogicTest.xctest")), ImmutableMap.of( Paths.get("/path/to/FooAppTest.xctest"), Paths.get("/path/to/Foo.app")), Paths.get("/path/to/output.json"), Optional.<XctoolRunTestsStep.StdoutReadingCallback>absent()); ProcessExecutorParams xctoolParams = ProcessExecutorParams.builder() .setCommand( ImmutableList.of( "/path/to/xctool", "-reporter", "json-stream", "-sdk", "iphonesimulator", "-destination", "name=iPhone 5s,OS=8.2", "run-tests", "-logicTest", "/path/to/FooLogicTest.xctest", "-appTest", "/path/to/FooAppTest.xctest:/path/to/Foo.app")) .setDirectory(projectFilesystem.getRootPath().toAbsolutePath().toFile()) .setRedirectOutput(ProcessBuilder.Redirect.PIPE) .build(); FakeProcess fakeXctoolSuccess = new FakeProcess(0, "", ""); FakeProcessExecutor processExecutor = new FakeProcessExecutor( ImmutableMap.of(xctoolParams, fakeXctoolSuccess)); ExecutionContext executionContext = TestExecutionContext.newBuilder() .setProcessExecutor(processExecutor) .setEnvironment(ImmutableMap.<String, String>of()) .build(); assertThat( step.execute(executionContext), equalTo(0)); } @Test public void xctoolCommandWhichReturnsExitCode1DoesNotFailStep() throws Exception { FakeProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); XctoolRunTestsStep step = new XctoolRunTestsStep( projectFilesystem, Paths.get("/path/to/xctool"), Optional.<Long>absent(), "iphonesimulator", Optional.<String>absent(), ImmutableSet.of(Paths.get("/path/to/Foo.xctest")), ImmutableMap.<Path, Path>of(), Paths.get("/path/to/output.json"), Optional.<XctoolRunTestsStep.StdoutReadingCallback>absent()); ProcessExecutorParams xctoolParams = ProcessExecutorParams.builder() .setCommand( ImmutableList.of( "/path/to/xctool", "-reporter", "json-stream", "-sdk", "iphonesimulator", "run-tests", "-logicTest", "/path/to/Foo.xctest")) .setDirectory(projectFilesystem.getRootPath().toAbsolutePath().toFile()) .setRedirectOutput(ProcessBuilder.Redirect.PIPE) .build(); FakeProcess fakeXctoolSuccess = new FakeProcess(1, "", ""); FakeProcessExecutor processExecutor = new FakeProcessExecutor( ImmutableMap.of(xctoolParams, fakeXctoolSuccess)); ExecutionContext executionContext = TestExecutionContext.newBuilder() .setProcessExecutor(processExecutor) .setEnvironment(ImmutableMap.<String, String>of()) .build(); assertThat( step.execute(executionContext), equalTo(0)); } @Test public void xctoolCommandWhichReturnsExitCode400FailsStep() throws Exception { FakeProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); XctoolRunTestsStep step = new XctoolRunTestsStep( projectFilesystem, Paths.get("/path/to/xctool"), Optional.<Long>absent(), "iphonesimulator", Optional.<String>absent(), ImmutableSet.of(Paths.get("/path/to/Foo.xctest")), ImmutableMap.<Path, Path>of(), Paths.get("/path/to/output.json"), Optional.<XctoolRunTestsStep.StdoutReadingCallback>absent()); ProcessExecutorParams xctoolParams = ProcessExecutorParams.builder() .setCommand( ImmutableList.of( "/path/to/xctool", "-reporter", "json-stream", "-sdk", "iphonesimulator", "run-tests", "-logicTest", "/path/to/Foo.xctest")) .setDirectory(projectFilesystem.getRootPath().toAbsolutePath().toFile()) .setRedirectOutput(ProcessBuilder.Redirect.PIPE) .build(); FakeProcess fakeXctoolSuccess = new FakeProcess(400, "", ""); FakeProcessExecutor processExecutor = new FakeProcessExecutor( ImmutableMap.of(xctoolParams, fakeXctoolSuccess)); ExecutionContext executionContext = TestExecutionContext.newBuilder() .setProcessExecutor(processExecutor) .setEnvironment(ImmutableMap.<String, String>of()) .build(); assertThat( step.execute(executionContext), not(equalTo(0))); } }
package com.thaiopensource.datatype.xsd; import com.thaiopensource.datatype.Datatype2; import com.thaiopensource.util.Localizer; import org.relaxng.datatype.DatatypeException; import org.relaxng.datatype.DatatypeStreamingValidator; import org.relaxng.datatype.ValidationContext; import org.relaxng.datatype.helpers.StreamingValidatorImpl; abstract class DatatypeBase implements Datatype2 { abstract boolean lexicallyAllows(String str); private final int whiteSpace; static final int WHITE_SPACE_PRESERVE = 0; static final int WHITE_SPACE_REPLACE = 1; static final int WHITE_SPACE_COLLAPSE = 2; DatatypeBase() { whiteSpace = WHITE_SPACE_COLLAPSE; } DatatypeBase(int whiteSpace) { this.whiteSpace = whiteSpace; } int getWhiteSpace() { return whiteSpace; } public boolean isValid(String str, ValidationContext vc) { str = normalizeWhiteSpace(str); return lexicallyAllows(str) && allowsValue(str, vc); } public void checkValid(String str, ValidationContext vc) throws DatatypeException { str = normalizeWhiteSpace(str); checkLexicallyAllows(str); getValue(str, vc); } public Object createValue(String str, ValidationContext vc) { str = normalizeWhiteSpace(str); if (!lexicallyAllows(str)) return null; try { return getValue(str, vc); } catch (DatatypeException e) { return null; } } final String normalizeWhiteSpace(String str) { switch (whiteSpace) { case WHITE_SPACE_COLLAPSE: return collapseWhiteSpace(str); case WHITE_SPACE_REPLACE: return replaceWhiteSpace(str); } return str; } void checkLexicallyAllows(String str) throws DatatypeException { if (!lexicallyAllows(str)) throw createLexicallyInvalidException(); } String getDescriptionForRestriction() { return getLexicalSpaceDescription(getLexicalSpaceKey()); } final String getLexicalSpaceDescription(String key) { return localizer().message("lexical_space_" + key); } abstract String getLexicalSpaceKey(); DatatypeException createLexicallyInvalidException() { return new DatatypeException(localizer().message("lexical_violation", getLexicalSpaceDescription(getLexicalSpaceKey()))); } // Requires lexicallyAllows to be true boolean allowsValue(String str, ValidationContext vc) { try { getValue(str, vc); return true; } catch (DatatypeException e) { return false; } } /* Requires lexicallyAllows to be true. Throws DatatypeException if value does not satisfy constraints on value space. */ abstract Object getValue(String str, ValidationContext vc) throws DatatypeException; OrderRelation getOrderRelation() { return null; } /* For datatypes that have a length. */ Measure getMeasure() { return null; } static private String collapseWhiteSpace(String s) { int i = collapseStart(s); if (i < 0) return s; StringBuffer buf = new StringBuffer(s.substring(0, i)); boolean collapsing = (i == 0 || s.charAt(i - 1) == ' '); for (int len = s.length(); i < len; i++) { char c = s.charAt(i); switch (c) { case '\r': case '\n': case '\t': case ' ': if (!collapsing) { buf.append(' '); collapsing = true; } break; default: collapsing = false; buf.append(c); break; } } if (buf.length() > 0 && buf.charAt(buf.length() - 1) == ' ') buf.setLength(buf.length() - 1); return buf.toString(); } static private int collapseStart(String s) { for (int i = 0, len = s.length(); i < len; i++) { switch (s.charAt(i)) { case ' ': if (i == 0 || s.charAt(i - 1) == ' ' || i == len - 1) return i; break; case '\r': case '\n': case '\t': return i; } } return -1; } static private String replaceWhiteSpace(String s) { int len = s.length(); for (int i = 0; i < len; i++) switch (s.charAt(i)) { case '\r': case '\n': case '\t': { char[] buf = s.toCharArray(); buf[i] = ' '; for (++i; i < len; i++) switch (buf[i]) { case '\r': case '\n': case '\t': buf[i] = ' '; } return new String(buf); } } return s; } DatatypeBase getPrimitive() { return this; } public boolean isContextDependent() { return false; } public boolean alwaysValid() { return false; } public int getIdType() { return ID_TYPE_NULL; } public int valueHashCode(Object value) { return value.hashCode(); } public boolean sameValue(Object value1, Object value2) { return value1.equals(value2); } public DatatypeStreamingValidator createStreamingValidator(ValidationContext vc) { return new StreamingValidatorImpl(this, vc); } protected static Localizer localizer() { return DatatypeBuilderImpl.localizer; } }
/* Copyright 2011-2016 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.google.security.zynamics.binnavi.Database.PostgreSQL.Functions; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import com.google.common.base.Preconditions; import com.google.security.zynamics.binnavi.Database.AbstractSQLProvider; import com.google.security.zynamics.binnavi.Database.CConnection; import com.google.security.zynamics.binnavi.Database.CTableNames; import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntLoadDataException; import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntSaveDataException; import com.google.security.zynamics.binnavi.Database.PostgreSQL.PostgreSQLHelpers; import com.google.security.zynamics.binnavi.disassembly.INaviProject; import com.google.security.zynamics.binnavi.disassembly.Modules.CModule; /** * This class provides PostgreSQL queries for working with settings. */ public final class PostgreSQLSettingsFunctions { /** * Do not instantiate this class. */ private PostgreSQLSettingsFunctions() { // You are not supposed to instantiate this class } /** * Reads a view container setting from the database. * * @param connection The connection to the database. * @param containerId The ID of the view container. * @param key The name of the setting to read. * @param column The name of the column that identifies the view container ID. * @param table The name of the table that identifies the view container table. * * @return The loaded setting. * * @throws CouldntLoadDataException Thrown if the setting could not be loaded. */ private static String readSetting(final CConnection connection, final int containerId, final String key, final String column, final String table) throws CouldntLoadDataException { try { final PreparedStatement statement = connection.getConnection().prepareStatement( "select value from " + table + " where name = ? and " + column + " = ?"); try { statement.setString(1, key); statement.setInt(2, containerId); final ResultSet resultSet = statement.executeQuery(); try { while (resultSet.next()) { return PostgreSQLHelpers.readString(resultSet, "value"); } } finally { resultSet.close(); } return null; } finally { statement.close(); } } catch (final SQLException exception) { throw new CouldntLoadDataException(exception); } } /** * Writes a view container setting to the database. * * @param connection The connection to the database. * @param containerId The ID of the view container. * @param key The name of the setting to write. * @param value The value of the setting to write. * @param table The name of the table that identifies the view container table. * * @throws CouldntSaveDataException Thrown if the setting could not be written to the database. */ private static void writeSetting(final CConnection connection, final int containerId, final String key, final String value, final String table) throws CouldntSaveDataException { String id_column = ""; if (table.equalsIgnoreCase(CTableNames.MODULE_SETTINGS_TABLE)) { id_column = "module_id"; } else { id_column = "project_id"; } final String deleteQuery = "DELETE FROM " + table + " WHERE " + id_column + " = " + containerId + " AND \"name\" = \'" + key + "\'"; final String insertQuery = "INSERT INTO " + table + " VALUES(\'" + containerId + "\',\'" + key + "\'," + value + ")"; try { PostgreSQLHelpers.beginTransaction(connection); connection.executeUpdate(deleteQuery, true); connection.executeUpdate(insertQuery, true); PostgreSQLHelpers.endTransaction(connection); } catch (final SQLException exception) { throw new CouldntSaveDataException("E00058: Could not update setting on " + table); } } /** * Reads a module setting from the database. * * @param provider The connection to the database. * @param module The module whose setting is read. * @param key The name of the setting to read. * * @return The loaded setting. * * @throws CouldntLoadDataException Thrown if the setting could not be read. */ public static String readSetting(final AbstractSQLProvider provider, final CModule module, final String key) throws CouldntLoadDataException { Preconditions.checkNotNull(module, "IE00534: Module argument can not be null"); Preconditions.checkNotNull(key, "IE00535: Key argument can not be null"); Preconditions.checkArgument(module.inSameDatabase(provider), "IE00536: Module is not part of this database"); return readSetting(provider.getConnection(), module.getConfiguration().getId(), key, "module_id", CTableNames.MODULE_SETTINGS_TABLE); } /** * Reads a project setting from the database. * * @param provider The connection to the database. * @param project The project whose setting is read. * @param key The name of the setting to read. * * @return The loaded setting. * * @throws CouldntLoadDataException Thrown if the setting could not be read. */ public static String readSetting(final AbstractSQLProvider provider, final INaviProject project, final String key) throws CouldntLoadDataException { Preconditions.checkNotNull(project, "IE00537: Project argument can not be null"); Preconditions.checkNotNull(key, "IE00538: Key argument can not be null"); Preconditions.checkArgument(project.inSameDatabase(provider), "IE00539: Project is not part of this database"); return readSetting(provider.getConnection(), project.getConfiguration().getId(), key, "project_id", CTableNames.PROJECT_SETTINGS_TABLE); } /** * Writes a module setting to the database. * * @param provider The connection to the database. * @param module The module whose setting is written. * @param key Name of the setting to write. * @param value Value of the setting to write. * * @throws CouldntSaveDataException Thrown if the setting could not be written. */ public static void writeSetting(final AbstractSQLProvider provider, final CModule module, final String key, final String value) throws CouldntSaveDataException { Preconditions.checkNotNull(provider, "IE01999: Provider argument can not be null"); Preconditions.checkNotNull(module, "IE00540: Module argument can not be null"); Preconditions.checkNotNull(key, "IE00541: Key argument can not be null"); Preconditions.checkNotNull(value, "IE02011: Value argument can not be null"); Preconditions.checkArgument(module.inSameDatabase(provider), "IE00542: Module is not part of this database"); writeSetting(provider.getConnection(), module.getConfiguration().getId(), key, value, CTableNames.MODULE_SETTINGS_TABLE); } /** * Writes a project setting to the database. * * @param provider The connection to the database. * @param project The project whose setting is written. * @param key Name of the setting to write. * @param value Value of the setting to write. * * @throws CouldntSaveDataException Thrown if the setting could not be written. */ public static void writeSetting(final AbstractSQLProvider provider, final INaviProject project, final String key, final String value) throws CouldntSaveDataException { Preconditions.checkNotNull(provider, "IE02050: Provider argument can not be null"); Preconditions.checkNotNull(project, "IE00543: Project argument can not be null"); Preconditions.checkNotNull(key, "IE00544: Key argument can not be null"); Preconditions.checkNotNull(value, "IE02082: Value argument can not be null"); Preconditions.checkArgument(project.inSameDatabase(provider), "IE00545: Project is not part of this database"); writeSetting(provider.getConnection(), project.getConfiguration().getId(), key, value, CTableNames.PROJECT_SETTINGS_TABLE); } }
/* * Copyright (C) 2013 Chen Hui <calmer91@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package master.flame.danmaku.danmaku.model.android; import master.flame.danmaku.danmaku.model.BaseDanmaku; import master.flame.danmaku.danmaku.model.Danmaku; import master.flame.danmaku.danmaku.model.IDanmakuIterator; import master.flame.danmaku.danmaku.model.IDanmakus; import master.flame.danmaku.danmaku.util.DanmakuUtils; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; public class Danmakus implements IDanmakus { public static final int ST_BY_TIME = 0; public static final int ST_BY_YPOS = 1; public static final int ST_BY_YPOS_DESC = 2; /** * this type is used to iterate/remove/insert elements, not support sub/subnew */ public static final int ST_BY_LIST = 4; public Collection<BaseDanmaku> items; private Danmakus subItems; private BaseDanmaku startItem, endItem; private BaseDanmaku endSubItem; private BaseDanmaku startSubItem; private DanmakuIterator iterator; private int mSize = 0; private int mSortType = ST_BY_TIME; private BaseComparator mComparator; private boolean mDuplicateMergingEnabled; public Danmakus() { this(ST_BY_TIME, false); } public Danmakus(int sortType) { this(sortType, false); } public Danmakus(int sortType, boolean duplicateMergingEnabled) { BaseComparator comparator = null; if (sortType == ST_BY_TIME) { comparator = new TimeComparator(duplicateMergingEnabled); } else if (sortType == ST_BY_YPOS) { comparator = new YPosComparator(duplicateMergingEnabled); } else if (sortType == ST_BY_YPOS_DESC) { comparator = new YPosDescComparator(duplicateMergingEnabled); } if(sortType == ST_BY_LIST) { items = new ArrayList<BaseDanmaku>(); } else { mDuplicateMergingEnabled = duplicateMergingEnabled; comparator.setDuplicateMergingEnabled(duplicateMergingEnabled); items = new TreeSet<BaseDanmaku>(comparator); mComparator = comparator; } mSortType = sortType; mSize = 0; iterator = new DanmakuIterator(items); } public Danmakus(Collection<BaseDanmaku> items) { setItems(items); } public Danmakus(boolean duplicateMergingEnabled) { this(ST_BY_TIME, duplicateMergingEnabled); } public void setItems(Collection<BaseDanmaku> items) { if (mDuplicateMergingEnabled && mSortType != ST_BY_LIST) { this.items.clear(); this.items.addAll(items); items = this.items; } else { this.items = items; } if (items instanceof List) { mSortType = ST_BY_LIST; } mSize = (items == null ? 0 : items.size()); if (iterator == null) { iterator = new DanmakuIterator(items); } else { iterator.setDatas(items); } } public IDanmakuIterator iterator() { iterator.reset(); return iterator; } @Override public boolean addItem(BaseDanmaku item) { if (items != null) { try { if (items.add(item)) { mSize++; return true; } } catch (Exception e) { e.printStackTrace(); } } return false; } @Override public boolean removeItem(BaseDanmaku item) { if (item == null) { return false; } if (item.isOutside()) { item.setVisibility(false); } if (items.remove(item)) { mSize--; return true; } return false; } private Collection<BaseDanmaku> subset(long startTime, long endTime) { if (mSortType == ST_BY_LIST || items == null || items.size() == 0) { return null; } if (subItems == null) { subItems = new Danmakus(mDuplicateMergingEnabled); } if (startSubItem == null) { startSubItem = createItem("start"); } if (endSubItem == null) { endSubItem = createItem("end"); } startSubItem.time = startTime; endSubItem.time = endTime; return ((SortedSet<BaseDanmaku>) items).subSet(startSubItem, endSubItem); } @Override public IDanmakus subnew(long startTime, long endTime) { Collection<BaseDanmaku> subset = subset(startTime, endTime); if (subset == null || subset.isEmpty()) { return null; } ArrayList<BaseDanmaku> newSet = new ArrayList<BaseDanmaku>(subset); return new Danmakus(newSet); } @Override public IDanmakus sub(long startTime, long endTime) { if (items == null || items.size() == 0) { return null; } if (subItems == null) { if(mSortType == ST_BY_LIST) { subItems = new Danmakus(Danmakus.ST_BY_LIST); subItems.setItems(items); } else { subItems = new Danmakus(mDuplicateMergingEnabled); } } if (mSortType == ST_BY_LIST) { return subItems; } if (startItem == null) { startItem = createItem("start"); } if (endItem == null) { endItem = createItem("end"); } if (subItems != null) { long dtime = startTime - startItem.time; if (dtime >= 0 && endTime <= endItem.time) { return subItems; } } startItem.time = startTime; endItem.time = endTime; subItems.setItems(((SortedSet<BaseDanmaku>) items).subSet(startItem, endItem)); return subItems; } private BaseDanmaku createItem(String text) { return new Danmaku(text); } public int size() { return mSize; } @Override public void clear() { if (items != null){ items.clear(); mSize = 0; } if (subItems != null) { subItems.clear(); } } @Override public BaseDanmaku first() { if (items != null && !items.isEmpty()) { if (mSortType == ST_BY_LIST) { return ((ArrayList<BaseDanmaku>) items).get(0); } return ((SortedSet<BaseDanmaku>) items).first(); } return null; } @Override public BaseDanmaku last() { if (items != null && !items.isEmpty()) { if (mSortType == ST_BY_LIST) { return ((ArrayList<BaseDanmaku>) items).get(items.size() - 1); } return ((SortedSet<BaseDanmaku>) items).last(); } return null; } private class DanmakuIterator implements IDanmakuIterator{ private Collection<BaseDanmaku> mData; private Iterator<BaseDanmaku> it; private boolean mIteratorUsed; public DanmakuIterator(Collection<BaseDanmaku> datas){ setDatas(datas); } public synchronized void reset() { if (!mIteratorUsed && it != null) { return; } if (mData != null && mSize > 0) { it = mData.iterator(); } else { it = null; } } public synchronized void setDatas(Collection<BaseDanmaku> datas){ if (mData != datas) { mIteratorUsed = false; it = null; } mData = datas; } @Override public synchronized BaseDanmaku next() { mIteratorUsed = true; return it != null ? it.next() : null; } @Override public synchronized boolean hasNext() { return it != null && it.hasNext(); } @Override public synchronized void remove() { mIteratorUsed = true; if (it != null) { it.remove(); mSize--; } } } private class BaseComparator implements Comparator<BaseDanmaku> { protected boolean mDuplicateMergingEnable; public BaseComparator(boolean duplicateMergingEnabled) { setDuplicateMergingEnabled(duplicateMergingEnabled); } public void setDuplicateMergingEnabled(boolean enable) { mDuplicateMergingEnable = enable; } @Override public int compare(BaseDanmaku obj1, BaseDanmaku obj2) { if (mDuplicateMergingEnable && DanmakuUtils.isDuplicate(obj1, obj2)) { return 0; } return DanmakuUtils.compare(obj1, obj2); } } private class TimeComparator extends BaseComparator { public TimeComparator(boolean duplicateMergingEnabled) { super(duplicateMergingEnabled); } @Override public int compare(BaseDanmaku obj1, BaseDanmaku obj2) { return super.compare(obj1, obj2); } } private class YPosComparator extends BaseComparator { public YPosComparator(boolean duplicateMergingEnabled) { super(duplicateMergingEnabled); } @Override public int compare(BaseDanmaku obj1, BaseDanmaku obj2) { if (mDuplicateMergingEnable && DanmakuUtils.isDuplicate(obj1, obj2)) { return 0; } return Float.compare(obj1.getTop(), obj2.getTop()); } } private class YPosDescComparator extends BaseComparator { public YPosDescComparator(boolean duplicateMergingEnabled) { super(duplicateMergingEnabled); } @Override public int compare(BaseDanmaku obj1, BaseDanmaku obj2) { if (mDuplicateMergingEnable && DanmakuUtils.isDuplicate(obj1, obj2)) { return 0; } return Float.compare(obj2.getTop(), obj1.getTop()); } } @Override public boolean contains(BaseDanmaku item) { return this.items != null && this.items.contains(item); } @Override public boolean isEmpty() { return this.items == null || this.items.isEmpty(); } private void setDuplicateMergingEnabled(boolean enable) { mComparator.setDuplicateMergingEnabled(enable); mDuplicateMergingEnabled = enable; } @Override public void setSubItemsDuplicateMergingEnabled(boolean enable) { mDuplicateMergingEnabled = enable; startItem = endItem = null; if (subItems == null) { subItems = new Danmakus(enable); } subItems.setDuplicateMergingEnabled(enable); } }
package kg.apc.charting; import kg.apc.charting.rows.GraphRowAverages; import kg.apc.emulators.TestGraphics; import org.apache.jorphan.gui.NumberRenderer; import org.junit.*; import java.awt.*; import java.awt.datatransfer.Clipboard; import java.awt.datatransfer.Transferable; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.ObjectOutputStream; import java.util.concurrent.ConcurrentSkipListMap; public class GraphPanelChartTest { /** * */ public GraphPanelChartTest() { } /** * @throws Exception */ @BeforeClass public static void setUpClass() throws Exception { } /** * @throws Exception */ @AfterClass public static void tearDownClass() throws Exception { } /** * */ @Before public void setUp() { } /** * */ @After public void tearDown() { } /** * Test of paintComponent method, of class GraphPanelChart. */ @Test public void testPaintComponent() { System.out.println("paintComponent"); Graphics g = new TestGraphics(); GraphPanelChart instance = new GraphPanelChart(); instance.setSize(500, 500); instance.getChartSettings().setDrawFinalZeroingLines(true); instance.getChartSettings().setDrawCurrentX(true); instance.getChartSettings().setExpendRows(true); final ConcurrentSkipListMap<String, AbstractGraphRow> rows = new ConcurrentSkipListMap<String, AbstractGraphRow>(); instance.setRows(rows); final GraphRowAverages row1 = new GraphRowAverages(); row1.setDrawThickLines(true); row1.setDrawLine(true); row1.setDrawBar(true); row1.setDrawValueLabel(true); row1.setMarkerSize(AbstractGraphRow.MARKER_SIZE_BIG); rows.put("test 1", row1); row1.add(System.currentTimeMillis(), 20); instance.paintComponent(g); row1.add(System.currentTimeMillis(), 540); instance.setxAxisLabelRenderer(new DateTimeRenderer("HH:mm:ss")); instance.paintComponent(g); row1.add(System.currentTimeMillis(), 8530); instance.paintComponent(g); } /** * */ @Test public void testPaintComponent_empty() { System.out.println("paintComponent_empty"); Graphics g = new TestGraphics(); GraphPanelChart instance = new GraphPanelChart(); instance.setSize(500, 500); instance.getChartSettings().setDrawFinalZeroingLines(false); final ConcurrentSkipListMap<String, AbstractGraphRow> rows = new ConcurrentSkipListMap<String, AbstractGraphRow>(); instance.setRows(rows); instance.paintComponent(g); } /** * Test of setRows method, of class GraphPanelChart. */ @Test public void testSetRows() { System.out.println("setRows"); ConcurrentSkipListMap<String, AbstractGraphRow> aRows = null; GraphPanelChart instance = new GraphPanelChart(); instance.setRows(aRows); } /** * Test of setyAxisLabelRenderer method, of class GraphPanelChart. */ @Test public void testSetyAxisLabelRenderer() { System.out.println("setyAxisLabelRenderer"); NumberRenderer yAxisLabelRenderer = null; GraphPanelChart instance = new GraphPanelChart(); instance.setyAxisLabelRenderer(yAxisLabelRenderer); } /** * Test of setxAxisLabelRenderer method, of class GraphPanelChart. */ @Test public void testSetxAxisLabelRenderer() { System.out.println("setxAxisLabelRenderer"); NumberRenderer xAxisLabelRenderer = null; GraphPanelChart instance = new GraphPanelChart(); instance.setxAxisLabelRenderer(xAxisLabelRenderer); } /** * Test of setCurrentX method, of class GraphPanelChart. */ @Test public void testSetCurrentX() { System.out.println("setCurrentX"); long currentX = 0L; GraphPanelChart instance = new GraphPanelChart(); instance.setCurrentX(currentX); } /** * Test of setForcedMinX method, of class GraphPanelChart. */ @Test public void testSetForcedMinX() { System.out.println("setForcedMinX"); int i = 0; GraphPanelChart instance = new GraphPanelChart(); instance.setForcedMinX(i); } /** * Test of lostOwnership method, of class GraphPanelChart. */ @Test public void testLostOwnership() { System.out.println("lostOwnership"); Clipboard clipboard = null; Transferable contents = null; GraphPanelChart instance = new GraphPanelChart(); instance.lostOwnership(clipboard, contents); } /** * Test of clearErrorMessage method, of class GraphPanelChart. */ @Test public void testClearErrorMessage() { System.out.println("clearErrorMessage"); GraphPanelChart instance = new GraphPanelChart(); instance.clearErrorMessage(); } /** * Test of setErrorMessage method, of class GraphPanelChart. */ @Test public void testSetErrorMessage() { System.out.println("setErrorMessage"); String msg = "error"; GraphPanelChart instance = new GraphPanelChart(); instance.setErrorMessage(msg); msg = ""; instance.setErrorMessage(msg); msg = null; instance.setErrorMessage(msg); } /** * Test of setChartType method, of class GraphPanelChart. */ @Test public void testSetChartType() { System.out.println("setChartType"); int type = GraphPanelChart.CHART_PERCENTAGE; GraphPanelChart instance = new GraphPanelChart(); instance.setChartType(type); } /** * Test of isModelContainsRow method, of class GraphPanelChart. */ @Test public void testIsModelContainsRow() { System.out.println("isModelContainsRow"); ConcurrentSkipListMap<String, AbstractGraphRow> testModel = new ConcurrentSkipListMap<String, AbstractGraphRow>(); AbstractGraphRow rowIncluded = new GraphRowAverages(); rowIncluded.setLabel("rowIncluded"); AbstractGraphRow rowExcluded = new GraphRowAverages(); rowExcluded.setLabel("rowExcluded"); testModel.put("rowIncluded", rowIncluded); GraphPanelChart instance = new GraphPanelChart(); instance.setRows(testModel); boolean expResult = true; boolean result = instance.isModelContainsRow(rowIncluded); Assert.assertEquals(expResult, result); expResult = false; result = instance.isModelContainsRow(rowExcluded); Assert.assertEquals(expResult, result); } /** * Test of setReSetColors method, of class GraphPanelChart. */ @Test public void testSetReSetColors() { System.out.println("setReSetColors"); boolean reSetColors = false; GraphPanelChart instance = new GraphPanelChart(); instance.setReSetColors(reSetColors); } /** * Test of setDisplayPrecision method, of class GraphPanelChart. */ @Test public void testSetDisplayPrecision() { System.out.println("setDisplayPrecision"); boolean displayPrecision = false; GraphPanelChart instance = new GraphPanelChart(); instance.setDisplayPrecision(displayPrecision); } /** * Test of setxAxisLabel method, of class GraphPanelChart. */ @Test public void testSetxAxisLabel() { System.out.println("setxAxisLabel"); String xAxisLabel = ""; GraphPanelChart instance = new GraphPanelChart(); instance.setxAxisLabel(xAxisLabel); } /** * Test of setYAxisLabel method, of class GraphPanelChart. */ @Test public void testSetyAxisLabel() { System.out.println("setyAxisLabel"); String yAxisLabel = ""; GraphPanelChart instance = new GraphPanelChart(); instance.setYAxisLabel(yAxisLabel); } /** * Test of setPrecisionLabel method, of class GraphPanelChart. */ @Test public void testSetPrecisionLabel() { System.out.println("setPrecisionLabel"); int precision = 0; GraphPanelChart instance = new GraphPanelChart(); instance.setPrecisionLabel(precision); } /** * Test of setIsPreview method, of class GraphPanelChart. */ @Test public void testSetIsPreview() { System.out.println("setIsPreview"); boolean isPreview = false; GraphPanelChart instance = new GraphPanelChart(); instance.setIsPreview(isPreview); } @Test public void testSetUseRelativeTime() { System.out.println("setUseRelativeTime"); boolean selected = false; GraphPanelChart instance = new GraphPanelChart(); instance.setUseRelativeTime(selected); } /** * Test of setTestStartTime method, of class GraphPanelChart. */ @Test public void testSetTestStartTime() { System.out.println("setTestStartTime"); long time = System.currentTimeMillis(); GraphPanelChart instance = new GraphPanelChart(); instance.setTestStartTime(time); } /** * Test of saveGraphToPNG method, of class GraphPanelChart. */ @Test public void testSaveGraphToPNG() throws Exception { System.out.println("saveGraphToPNG"); File file = File.createTempFile("test", ".png"); int w = 100; int h = 100; GraphPanelChart instance = new GraphPanelChart(); final ConcurrentSkipListMap<String, AbstractGraphRow> rows = new ConcurrentSkipListMap<String, AbstractGraphRow>(); instance.setRows(rows); instance.saveGraphToPNG(file, w, h); } /** * Test of saveGraphToCSV method, of class GraphPanelChart. */ @Test public void testSaveGraphToCSV() throws Exception { System.out.println("saveGraphToCSV"); File file = File.createTempFile("test", ".csv"); GraphPanelChart instance = new GraphPanelChart(); final ConcurrentSkipListMap<String, AbstractGraphRow> rows = new ConcurrentSkipListMap<String, AbstractGraphRow>(); instance.setRows(rows); instance.saveGraphToCSV(file); } @Test public void testSetYAxisLabel() { System.out.println("setYAxisLabel"); String yAxisLabel = ""; GraphPanelChart instance = new GraphPanelChart(); instance.setYAxisLabel(yAxisLabel); } /** * Test of getChartSettings method, of class GraphPanelChart. */ @Test public void testGetChartSettings() { System.out.println("getChartSettings"); GraphPanelChart instance = new GraphPanelChart(); ChartSettings result = instance.getChartSettings(); Assert.assertNotNull(result); } /** * Test of invalidateCache method, of class GraphPanelChart. */ @Test public void testInvalidateCache() { System.out.println("invalidateCache"); GraphPanelChart instance = new GraphPanelChart(); instance.invalidateCache(); } @Test public void testSerialization() throws IOException { GraphPanelChart instance = new GraphPanelChart(); ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(instance); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.azure.storage; import com.azure.storage.file.datalake.models.DataLakeStorageException; import com.google.common.collect.Sets; import org.apache.nifi.processor.Processor; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.provenance.ProvenanceEventRecord; import org.apache.nifi.provenance.ProvenanceEventType; import org.apache.nifi.util.MockFlowFile; import org.junit.Ignore; import org.junit.Test; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.stream.Collectors; import static org.junit.Assert.assertEquals; public class ITFetchAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT { @Override protected Class<? extends Processor> getProcessorClass() { return FetchAzureDataLakeStorage.class; } @Test public void testFetchFileFromDirectory() { // GIVEN String directory = "TestDirectory"; String filename = "testFile.txt"; String fileContent = "AzureFileContent"; String inputFlowFileContent = "InputFlowFileContent"; createDirectoryAndUploadFile(directory, filename, fileContent); // WHEN // THEN testSuccessfulFetch(fileSystemName, directory, filename, inputFlowFileContent, fileContent); } @Test public void testFetchFileFromRoot() { // GIVEN String directory= ""; String filename = "testFile.txt"; String fileContent = "AzureFileContent"; String inputFlowFileContent = "InputFlowFileContent"; uploadFile(directory, filename, fileContent); // WHEN // THEN testSuccessfulFetch(fileSystemName, directory, filename, inputFlowFileContent, fileContent); } @Test public void testFetchFileFromDirectoryWithWhitespace() { // GIVEN String directory= "A Test Directory"; String filename = "testFile.txt"; String fileContent = "AzureFileContent"; String inputFlowFileContent = "InputFlowFileContent"; createDirectoryAndUploadFile(directory, filename, fileContent); // WHEN // THEN testSuccessfulFetch(fileSystemName, directory, filename, inputFlowFileContent, fileContent); } @Test public void testFetchFileWithWhitespaceFromDirectory() { // GIVEN String directory= "TestDirectory"; String filename = "A test file.txt"; String fileContent = "AzureFileContent"; String inputFlowFileContent = "InputFlowFileContent"; createDirectoryAndUploadFile(directory, filename, fileContent); // WHEN // THEN testSuccessfulFetch(fileSystemName, directory, filename, inputFlowFileContent, fileContent); } @Test public void testFetchFileCaseSensitiveFilename() { // GIVEN String directory = "TestDirectory"; String filename1 = "testFile.txt"; String filename2 = "testfile.txt"; String fileContent1 = "ContentOfFile1"; String fileContent2 = "ContentOfFile2"; String inputFlowFileContent = "InputFlowFileContent"; createDirectoryAndUploadFile(directory, filename1, fileContent1); uploadFile(directory, filename2, fileContent2); // WHEN // THEN testSuccessfulFetch(fileSystemName, directory, filename1, inputFlowFileContent, fileContent1); runner.clearProvenanceEvents(); runner.clearTransferState(); testSuccessfulFetch(fileSystemName, directory, filename2, inputFlowFileContent, fileContent2); } @Test public void testFetchFileCaseSensitiveDirectoryName() { // GIVEN String directory1 = "TestDirectory"; String directory2 = "Testdirectory"; String filename1 = "testFile1.txt"; String filename2 = "testFile2.txt"; String fileContent1 = "ContentOfFile1"; String fileContent2 = "ContentOfFile2"; String inputFlowFileContent = "InputFlowFileContent"; createDirectoryAndUploadFile(directory1, filename1, fileContent1); createDirectoryAndUploadFile(directory2, filename2, fileContent2); // WHEN // THEN testSuccessfulFetch(fileSystemName, directory1, filename1, inputFlowFileContent, fileContent1); runner.clearProvenanceEvents(); runner.clearTransferState(); testSuccessfulFetch(fileSystemName, directory2, filename2, inputFlowFileContent, fileContent2); } @Test public void testFetchFileFromDeepDirectoryStructure() { // GIVEN String directory= "Directory01/Directory02/Directory03/Directory04/Directory05/Directory06/Directory07/" + "Directory08/Directory09/Directory10/Directory11/Directory12/Directory13/Directory14/Directory15/" + "Directory16/Directory17/Directory18/Directory19/Directory20/TestDirectory"; String filename = "testFile.txt"; String fileContent = "AzureFileContent"; String inputFlowFileContent = "InputFlowFileContent"; createDirectoryAndUploadFile(directory, filename, fileContent); // WHEN // THEN testSuccessfulFetch(fileSystemName, directory, filename, inputFlowFileContent, fileContent); } @Test public void testFetchDirectory() { // GIVEN String parentDirectory = "ParentDirectory"; String childDirectory = "ChildDirectory"; String filename = "testFile.txt"; String fileContent = "AzureFileContent"; String inputFlowFileContent = "InputFlowFileContent"; createDirectoryAndUploadFile(parentDirectory + "/" + childDirectory, filename, fileContent); // WHEN // THEN testFailedFetchWithProcessException(fileSystemName, parentDirectory, childDirectory, inputFlowFileContent, inputFlowFileContent); } @Test public void testFetchNonExistentFileSystem() { // GIVEN String fileSystem = "NonExistentFileSystem"; String directory = "TestDirectory"; String filename = "testFile.txt"; String inputFlowFileContent = "InputFlowFileContent"; // WHEN // THEN testFailedFetch(fileSystem, directory, filename, inputFlowFileContent, inputFlowFileContent, 400); } @Test public void testFetchNonExistentDirectory() { // GIVEN String directory = "TestDirectory"; String filename = "testFile.txt"; String inputFlowFileContent = "InputFlowFileContent"; // WHEN // THEN testFailedFetch(fileSystemName, directory, filename, inputFlowFileContent, inputFlowFileContent, 404); } @Test public void testFetchNonExistentFile() { // GIVEN String directory = "TestDirectory"; String filename = "testFile.txt"; String inputFlowFileContent = "InputFlowFileContent"; fileSystemClient.createDirectory(directory); // WHEN // THEN testFailedFetch(fileSystemName, directory, filename, inputFlowFileContent, inputFlowFileContent, 404); } @Ignore("Takes some time, only recommended for manual testing.") @Test public void testFetchLargeFile() { // GIVEN String directory = "TestDirectory"; String filename = "testFile.txt"; Random random = new Random(); byte[] fileContentBytes = new byte[120_000_000]; random.nextBytes(fileContentBytes); String fileContent = new String(fileContentBytes); String inputFlowFileContent = "InputFlowFileContent"; createDirectoryAndUploadFile(directory, filename, fileContent); // WHEN // THEN testSuccessfulFetch(fileSystemName, directory, filename, inputFlowFileContent, fileContent); } @Test public void testFetchInvalidDirectoryName() { // GIVEN String directory = "TestDirectory"; String invalidDirectoryName = "Test/\\Directory"; String filename = "testFile.txt"; String fileContent = "AzureFileContent"; String inputFlowFileContent = "InputFlowFileContent"; createDirectoryAndUploadFile(directory, filename, fileContent); // WHEN // THEN testFailedFetch(fileSystemName, invalidDirectoryName, filename, inputFlowFileContent, inputFlowFileContent, 404); } @Test public void testFetchInvalidFilename() { // GIVEN String directory = "TestDirectory"; String filename = "testFile.txt"; String invalidFilename = "test/\\File.txt"; String fileContent = "AzureFileContent"; String inputFlowFileContent = "InputFlowFileContent"; createDirectoryAndUploadFile(directory, filename, fileContent); // WHEN // THEN testFailedFetch(fileSystemName, directory, invalidFilename, inputFlowFileContent, inputFlowFileContent, 404); } @Test public void testFetchUsingExpressionLanguage() { // GIVEN String expLangFileSystem = "az.filesystem"; String expLangDirectory = "az.directory"; String expLangFilename = "az.filename"; String directory = "TestDirectory"; String filename = "testFile.txt"; String fileContent = "AzureFileContent"; String inputFlowFileContent = "InputFlowFileContent"; Map<String, String> attributes = new HashMap<>(); attributes.put(expLangFileSystem, fileSystemName); attributes.put(expLangDirectory, directory); attributes.put(expLangFilename, filename); createDirectoryAndUploadFile(directory, filename, fileContent); // WHEN // THEN testSuccessfulFetch("${" + expLangFileSystem + "}", "${" + expLangDirectory + "}", "${" + expLangFilename + "}", attributes, inputFlowFileContent, fileContent); } @Test public void testFetchUsingExpressionLanguageFileSystemIsNotSpecified() { // GIVEN String expLangFileSystem = "az.filesystem"; String expLangDirectory = "az.directory"; String expLangFilename = "az.filename"; String directory = "TestDirectory"; String filename = "testFile.txt"; String fileContent = "AzureFileContent"; String inputFlowFileContent = "InputFlowFileContent"; Map<String, String> attributes = new HashMap<>(); attributes.put(expLangDirectory, directory); attributes.put(expLangFilename, filename); createDirectoryAndUploadFile(directory, filename, fileContent); // WHEN // THEN testFailedFetchWithProcessException("${" + expLangFileSystem + "}", "${" + expLangDirectory + "}", "${" + expLangFilename + "}", attributes, inputFlowFileContent, inputFlowFileContent); } @Test public void testFetchUsingExpressionLanguageFilenameIsNotSpecified() { // GIVEN String expLangFileSystem = "az.filesystem"; String expLangDirectory = "az.directory"; String expLangFilename = "az.filename"; String directory = "TestDirectory"; String filename = "testFile.txt"; String fileContent = "AzureFileContent"; String inputFlowFileContent = "InputFlowFileContent"; Map<String, String> attributes = new HashMap<>(); attributes.put(expLangFileSystem, fileSystemName); attributes.put(expLangDirectory, directory); createDirectoryAndUploadFile(directory, filename, fileContent); // WHEN // THEN testFailedFetchWithProcessException("${" + expLangFileSystem + "}", "${" + expLangDirectory + "}", "${" + expLangFilename + "}", attributes, inputFlowFileContent, inputFlowFileContent); } private void testSuccessfulFetch(String fileSystem, String directory, String filename, String inputFlowFileContent, String expectedFlowFileContent) { testSuccessfulFetch(fileSystem, directory, filename, Collections.emptyMap(), inputFlowFileContent, expectedFlowFileContent); } private void testSuccessfulFetch(String fileSystem, String directory, String filename, Map<String, String> attributes, String inputFlowFileContent, String expectedFlowFileContent) { // GIVEN Set<ProvenanceEventType> expectedEventTypes = Sets.newHashSet(ProvenanceEventType.CONTENT_MODIFIED, ProvenanceEventType.FETCH); setRunnerProperties(fileSystem, directory, filename); // WHEN startRunner(inputFlowFileContent, attributes); // THEN assertSuccess(expectedFlowFileContent, expectedEventTypes); } private void testFailedFetch(String fileSystem, String directory, String filename, String inputFlowFileContent, String expectedFlowFileContent, int expectedErrorCode) { testFailedFetch(fileSystem, directory, filename, Collections.emptyMap(), inputFlowFileContent, expectedFlowFileContent, expectedErrorCode); } private void testFailedFetch(String fileSystem, String directory, String filename, Map<String, String> attributes, String inputFlowFileContent, String expectedFlowFileContent, int expectedErrorCode) { // GIVEN setRunnerProperties(fileSystem, directory, filename); // WHEN startRunner(inputFlowFileContent, attributes); // THEN DataLakeStorageException e = (DataLakeStorageException)runner.getLogger().getErrorMessages().get(0).getThrowable(); assertEquals(expectedErrorCode, e.getStatusCode()); assertFailure(expectedFlowFileContent); } private void testFailedFetchWithProcessException(String fileSystem, String directory, String filename, String inputFlowFileContent, String expectedFlowFileContent) { testFailedFetchWithProcessException(fileSystem, directory, filename, Collections.emptyMap(), inputFlowFileContent, expectedFlowFileContent); } private void testFailedFetchWithProcessException(String fileSystem, String directory, String filename, Map<String, String> attributes, String inputFlowFileContent, String expectedFlowFileContent) { // GIVEN setRunnerProperties(fileSystem, directory, filename); // WHEN startRunner(inputFlowFileContent, attributes); // THEN Throwable exception = runner.getLogger().getErrorMessages().get(0).getThrowable(); assertEquals(ProcessException.class, exception.getClass()); assertFailure(expectedFlowFileContent); } private void setRunnerProperties(String fileSystem, String directory, String filename) { runner.setProperty(FetchAzureDataLakeStorage.FILESYSTEM, fileSystem); runner.setProperty(FetchAzureDataLakeStorage.DIRECTORY, directory); runner.setProperty(FetchAzureDataLakeStorage.FILE, filename); runner.assertValid(); } private void startRunner(String inputFlowFileContent, Map<String, String> attributes) { runner.enqueue(inputFlowFileContent, attributes); runner.run(); } private void assertSuccess(String expectedFlowFileContent, Set<ProvenanceEventType> expectedEventTypes) { runner.assertAllFlowFilesTransferred(FetchAzureDataLakeStorage.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(FetchAzureDataLakeStorage.REL_SUCCESS).get(0); flowFile.assertContentEquals(expectedFlowFileContent); Set<ProvenanceEventType> actualEventTypes = runner.getProvenanceEvents().stream() .map(ProvenanceEventRecord::getEventType) .collect(Collectors.toSet()); assertEquals(expectedEventTypes, actualEventTypes); } private void assertFailure(String expectedFlowFileContent) { runner.assertAllFlowFilesTransferred(FetchAzureDataLakeStorage.REL_FAILURE, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(FetchAzureDataLakeStorage.REL_FAILURE).get(0); flowFile.assertContentEquals(expectedFlowFileContent); } }
package controller; import java.io.IOException; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import model.dao.MemoDAO; import model.dao.MusicDAO; import model.domain.MemberBean; import model.domain.MemoBean; import model.domain.MusicBean; import emotionExtractor.EmotionExtractor; import emotionExtractor.TendencyCheck; public class Memo extends javax.servlet.http.HttpServlet { protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException{ process(request,response); } protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException{ process(request,response); } protected void process(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException{ request.setCharacterEncoding("euc-kr"); String command=request.getParameter("command"); if(command == null){ command = "list"; } if(command.equals("list")){ HttpSession session = request.getSession(); int memberNum = (int)(session.getAttribute("memberNum")); MemoBean [] list = MemoDAO.getAllContents(memberNum); request.setAttribute("list", list); RequestDispatcher rd = request.getRequestDispatcher("list.jsp"); rd.forward(request, response); return; }else if(command.equals("write")){ String member = request.getParameter("member"); String title = request.getParameter("title"); String content = request.getParameter("content"); String hashTag1 = request.getParameter("hashtag1"); String hashTag2 = request.getParameter("hashtag2"); String hashTag3 = request.getParameter("hashtag3"); String back = request.getParameter("back"); if(hashTag1 == null || hashTag1.trim().length() == 0 || hashTag2 == null || hashTag2.trim().length() == 0 || hashTag3 == null || hashTag3.trim().length() == 0){ response.sendRedirect("write.jsp"); return; } String[] hash = {hashTag1, hashTag2, hashTag3, ""}; HttpSession session = request.getSession(); MemberBean bean = (MemberBean) session.getAttribute("member"); String emotionResult = EmotionExtractor.test(hash, back); if(emotionResult!=null){ int memberNum = Integer.parseInt(member); MemoBean gContent = new MemoBean(memberNum, title, content, hashTag1, hashTag2, hashTag3, back); if(bean.getTendency() != null){ emotionResult = TendencyCheck.result(bean.getTendency(), emotionResult); } MusicBean music = MusicDAO.selectMusic(emotionResult); if(music == null){ response.sendRedirect("error.jsp"); return; }else{ request.setAttribute("resultContent", gContent); request.setAttribute("resultMusic", music); RequestDispatcher rd = request.getRequestDispatcher("/recommend.jsp"); rd.forward(request, response); return; } } }else if(command.equals("read")){ String strNum = request.getParameter("num"); if(strNum == null){ response.sendRedirect("memo.do"); return; } int num = Integer.parseInt(strNum); MemoBean gContent = MemoDAO.getContent(num, true); MusicBean music = MusicDAO.selectMusic(gContent.getMusicNum()); if(gContent == null){ response.sendRedirect("error.jsp"); return; }else{ request.setAttribute("resultContent", gContent); request.setAttribute("resultMusic", music); RequestDispatcher rd = request.getRequestDispatcher("/read.jsp"); rd.forward(request, response); return; } }else if(command.equals("updateForm")){ String strNum=request.getParameter("num"); if(strNum == null || strNum.trim().length() == 0){ response.sendRedirect("memo.do"); return; } int num = Integer.parseInt(strNum); MemoBean gContent = MemoDAO.getContent(num, false); if(gContent == null){ response.sendRedirect("error.jsp"); return; }else{ request.setAttribute("resultContent", gContent); RequestDispatcher rd=request.getRequestDispatcher("update.jsp"); rd.forward(request, response); return; } }else if(command.equals("update")){ String strNum = request.getParameter("num"); String member = request.getParameter("member"); String title = request.getParameter("title"); String content = request.getParameter("content"); if(strNum == null || strNum.trim().length() == 0 || content == null || content.trim().length() == 0 ){ response.sendRedirect("memo.do"); return; } int num = Integer.parseInt(strNum); int memberNum = Integer.parseInt(member); MemoBean gContent = new MemoBean(num, memberNum, title, content); boolean result = MemoDAO.updateContent(gContent); if(result){ response.sendRedirect("memo.do"); return; }else{ response.sendRedirect("error.jsp"); return; } }else if(command.equals("delete")){ String strNum=request.getParameter("num"); if(strNum == null || strNum.trim().length() == 0){ response.sendRedirect("memo.do"); return; } int num = Integer.parseInt(strNum); boolean result = MemoDAO.deleteContent(num); if(result){ response.sendRedirect("memo.do"); return; }else{ response.sendRedirect("error.jsp"); return; } }else if(command.equals("save")){ String member = request.getParameter("member"); String title = request.getParameter("title"); String content = request.getParameter("content"); String hashTag1 = request.getParameter("hashtag1"); String hashTag2 = request.getParameter("hashtag2"); String hashTag3 = request.getParameter("hashtag3"); String back = request.getParameter("back"); String music = request.getParameter("musicNum"); int memberNum = Integer.parseInt(member); int musicNum = Integer.parseInt(music); MemoBean gContent = new MemoBean(memberNum, title, content, hashTag1, hashTag2, hashTag3, back, musicNum); boolean result = MemoDAO.writeContent(gContent); if(result){ response.sendRedirect("memo.do"); return; }else{ response.sendRedirect("error.jsp"); return; } } } private char[] getPath() { // TODO Auto-generated method stub return null; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myriad; import com.codahale.metrics.JmxReporter; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.health.HealthCheckRegistry; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.google.inject.Guice; import com.google.inject.Injector; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.commons.collections.MapUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler; import org.apache.myriad.configuration.MyriadBadConfigurationException; import org.apache.myriad.configuration.MyriadConfiguration; import org.apache.myriad.configuration.NodeManagerConfiguration; import org.apache.myriad.configuration.ServiceConfiguration; import org.apache.myriad.health.MesosDriverHealthCheck; import org.apache.myriad.health.MesosMasterHealthCheck; import org.apache.myriad.health.ZookeeperHealthCheck; import org.apache.myriad.scheduler.ExtendedResourceProfile; import org.apache.myriad.scheduler.MyriadDriverManager; import org.apache.myriad.scheduler.MyriadOperations; import org.apache.myriad.scheduler.NMProfile; import org.apache.myriad.scheduler.Rebalancer; import org.apache.myriad.scheduler.ServiceProfileManager; import org.apache.myriad.scheduler.ServiceResourceProfile; import org.apache.myriad.scheduler.ServiceTaskConstraints; import org.apache.myriad.scheduler.TaskConstraintsManager; import org.apache.myriad.scheduler.TaskFactory; import org.apache.myriad.scheduler.TaskTerminator; import org.apache.myriad.scheduler.TaskUtils; import org.apache.myriad.scheduler.yarn.interceptor.InterceptorRegistry; import org.apache.myriad.state.SchedulerState; import org.apache.myriad.webapp.MyriadWebServer; import org.apache.myriad.webapp.WebAppGuiceModule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Main entry point for myriad scheduler */ public class Main { private static final Logger LOGGER = LoggerFactory.getLogger(Main.class); private MyriadWebServer webServer; private ScheduledExecutorService terminatorService; private ScheduledExecutorService rebalancerService; private HealthCheckRegistry healthCheckRegistry; private static Injector injector; public static void initialize(Configuration hadoopConf, AbstractYarnScheduler yarnScheduler, RMContext rmContext, InterceptorRegistry registry) throws Exception { ObjectMapper mapper = new ObjectMapper(new YAMLFactory()); MyriadConfiguration cfg = mapper.readValue(Thread.currentThread().getContextClassLoader().getResource( "myriad-config-default.yml"), MyriadConfiguration.class); MyriadModule myriadModule = new MyriadModule(cfg, hadoopConf, yarnScheduler, rmContext, registry); MesosModule mesosModule = new MesosModule(); injector = Guice.createInjector(myriadModule, mesosModule, new WebAppGuiceModule()); new Main().run(cfg); } // TODO (Kannan Rajah) Hack to get injector in unit test. public static Injector getInjector() { return injector; } public void run(MyriadConfiguration cfg) throws Exception { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Bindings: " + injector.getAllBindings()); } JmxReporter.forRegistry(new MetricRegistry()).build().start(); initWebApp(injector); initHealthChecks(injector); initProfiles(injector); validateNMInstances(injector); initServiceConfigurations(cfg, injector); initDisruptors(injector); initRebalancerService(cfg, injector); initTerminatorService(injector); startMesosDriver(injector); startNMInstances(injector); startJavaBasedTaskInstance(injector); } private void startMesosDriver(Injector injector) { LOGGER.info("starting mesosDriver.."); injector.getInstance(MyriadDriverManager.class).startDriver(); LOGGER.info("started mesosDriver.."); } /** * Brings up the embedded jetty webserver for serving REST APIs. * * @param injector */ private void initWebApp(Injector injector) throws Exception { webServer = injector.getInstance(MyriadWebServer.class); webServer.start(); } /** * Initializes health checks. * * @param injector */ private void initHealthChecks(Injector injector) { LOGGER.info("Initializing HealthChecks"); healthCheckRegistry = new HealthCheckRegistry(); healthCheckRegistry.register(MesosMasterHealthCheck.NAME, injector.getInstance(MesosMasterHealthCheck.class)); healthCheckRegistry.register(ZookeeperHealthCheck.NAME, injector.getInstance(ZookeeperHealthCheck.class)); healthCheckRegistry.register(MesosDriverHealthCheck.NAME, injector.getInstance(MesosDriverHealthCheck.class)); } private void initProfiles(Injector injector) { LOGGER.info("Initializing Profiles"); ServiceProfileManager profileManager = injector.getInstance(ServiceProfileManager.class); TaskConstraintsManager taskConstraintsManager = injector.getInstance(TaskConstraintsManager.class); taskConstraintsManager.addTaskConstraints(NodeManagerConfiguration.NM_TASK_PREFIX, new TaskFactory.NMTaskConstraints()); Map<String, Map<String, String>> profiles = injector.getInstance(MyriadConfiguration.class).getProfiles(); TaskUtils taskUtils = injector.getInstance(TaskUtils.class); if (MapUtils.isNotEmpty(profiles)) { for (Map.Entry<String, Map<String, String>> profile : profiles.entrySet()) { Map<String, String> profileResourceMap = profile.getValue(); if (MapUtils.isNotEmpty(profiles) && profileResourceMap.containsKey("cpu") && profileResourceMap.containsKey("mem")) { Long cpu = Long.parseLong(profileResourceMap.get("cpu")); Long mem = Long.parseLong(profileResourceMap.get("mem")); ServiceResourceProfile serviceProfile = new ExtendedResourceProfile(new NMProfile(profile.getKey(), cpu, mem), taskUtils.getNodeManagerCpus(), taskUtils.getNodeManagerMemory()); serviceProfile.setExecutorCpu(taskUtils.getExecutorCpus()); serviceProfile.setExecutorMemory(taskUtils.getExecutorMemory()); profileManager.add(serviceProfile); } else { LOGGER.error("Invalid definition for profile: " + profile.getKey()); } } } } private void validateNMInstances(Injector injector) { LOGGER.info("Validating nmInstances.."); Map<String, Integer> nmInstances = injector.getInstance(MyriadConfiguration.class).getNmInstances(); ServiceProfileManager profileManager = injector.getInstance(ServiceProfileManager.class); long maxCpu = Long.MIN_VALUE; long maxMem = Long.MIN_VALUE; for (Map.Entry<String, Integer> entry : nmInstances.entrySet()) { String profile = entry.getKey(); ServiceResourceProfile nodeManager = profileManager.get(profile); if (nodeManager == null) { throw new RuntimeException("Invalid profile name '" + profile + "' specified in 'nmInstances'"); } if (entry.getValue() > 0) { if (nodeManager.getCpus() > maxCpu) { // find the profile with largest number of cpus maxCpu = nodeManager.getCpus().longValue(); maxMem = nodeManager.getMemory().longValue(); // use the memory from the same profile } } } if (maxCpu <= 0 || maxMem <= 0) { throw new RuntimeException( "Please configure 'nmInstances' with at least one instance/profile " + "with non-zero cpu/mem resources."); } } private void startNMInstances(Injector injector) { Map<String, Integer> nmInstances = injector.getInstance(MyriadConfiguration.class).getNmInstances(); MyriadOperations myriadOperations = injector.getInstance(MyriadOperations.class); ServiceProfileManager profileManager = injector.getInstance(ServiceProfileManager.class); SchedulerState schedulerState = injector.getInstance(SchedulerState.class); Set<org.apache.myriad.state.NodeTask> launchedNMTasks = new HashSet<>(); launchedNMTasks.addAll(schedulerState.getPendingTasksByType(NodeManagerConfiguration.NM_TASK_PREFIX)); if (!launchedNMTasks.isEmpty()) { LOGGER.info("{} NM(s) in pending state. Not launching additional NMs", launchedNMTasks.size()); return; } launchedNMTasks.addAll(schedulerState.getStagingTasksByType(NodeManagerConfiguration.NM_TASK_PREFIX)); if (!launchedNMTasks.isEmpty()) { LOGGER.info("{} NM(s) in staging state. Not launching additional NMs", launchedNMTasks.size()); return; } launchedNMTasks.addAll(schedulerState.getActiveTasksByType(NodeManagerConfiguration.NM_TASK_PREFIX)); if (!launchedNMTasks.isEmpty()) { LOGGER.info("{} NM(s) in active state. Not launching additional NMs", launchedNMTasks.size()); return; } for (Map.Entry<String, Integer> entry : nmInstances.entrySet()) { LOGGER.info("Launching {} NM(s) with profile {}", entry.getValue(), entry.getKey()); myriadOperations.flexUpCluster(profileManager.get(entry.getKey()), entry.getValue(), null); } } /** * Create ServiceProfile for any configured service * * @param cfg * @param injector */ private void initServiceConfigurations(MyriadConfiguration cfg, Injector injector) { LOGGER.info("Initializing initServiceConfigurations"); ServiceProfileManager profileManager = injector.getInstance(ServiceProfileManager.class); TaskConstraintsManager taskConstraintsManager = injector.getInstance(TaskConstraintsManager.class); Map<String, ServiceConfiguration> servicesConfigs = injector.getInstance(MyriadConfiguration.class).getServiceConfigurations(); if (servicesConfigs != null) { for (Map.Entry<String, ServiceConfiguration> entry : servicesConfigs.entrySet()) { final String taskPrefix = entry.getKey(); ServiceConfiguration config = entry.getValue(); final Double cpu = config.getCpus().or(ServiceConfiguration.DEFAULT_CPU); final Double mem = config.getJvmMaxMemoryMB().or(ServiceConfiguration.DEFAULT_MEMORY); profileManager.add(new ServiceResourceProfile(taskPrefix, cpu, mem)); taskConstraintsManager.addTaskConstraints(taskPrefix, new ServiceTaskConstraints(cfg, taskPrefix)); } } } private void initTerminatorService(Injector injector) { LOGGER.info("Initializing Terminator"); terminatorService = Executors.newScheduledThreadPool(1); final int initialDelay = 100; final int period = 2000; terminatorService.scheduleAtFixedRate(injector.getInstance(TaskTerminator.class), initialDelay, period, TimeUnit.MILLISECONDS); } private void initRebalancerService(MyriadConfiguration cfg, Injector injector) { if (cfg.isRebalancer()) { LOGGER.info("Initializing Rebalancer"); rebalancerService = Executors.newScheduledThreadPool(1); final int initialDelay = 100; final int period = 5000; rebalancerService.scheduleAtFixedRate(injector.getInstance(Rebalancer.class), initialDelay, period, TimeUnit.MILLISECONDS); } else { LOGGER.info("Rebalancer is not turned on"); } } private void initDisruptors(Injector injector) { LOGGER.info("Initializing Disruptors"); DisruptorManager disruptorManager = injector.getInstance(DisruptorManager.class); disruptorManager.init(injector); } /** * Start tasks for configured services * * @param injector */ private void startJavaBasedTaskInstance(Injector injector) { Map<String, ServiceConfiguration> auxServicesConfigs = injector.getInstance(MyriadConfiguration.class) .getServiceConfigurations(); if (auxServicesConfigs != null) { MyriadOperations myriadOperations = injector.getInstance(MyriadOperations.class); for (Map.Entry<String, ServiceConfiguration> entry : auxServicesConfigs.entrySet()) { try { myriadOperations.flexUpAService(entry.getValue().getMaxInstances().or(1), entry.getKey()); } catch (MyriadBadConfigurationException e) { LOGGER.warn("Exception while trying to flexup service: {}", entry.getKey(), e); } } } } }
/** * Copyright (C) 2009-2013 Enstratius, Inc. * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud.aws.compute; import java.io.UnsupportedEncodingException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import org.apache.log4j.Logger; import org.dasein.cloud.CloudException; import org.dasein.cloud.InternalException; import org.dasein.cloud.ProviderContext; import org.dasein.cloud.ResourceStatus; import org.dasein.cloud.aws.AWSCloud; import org.dasein.cloud.compute.AutoScalingSupport; import org.dasein.cloud.compute.LaunchConfiguration; import org.dasein.cloud.compute.ScalingGroup; import org.dasein.cloud.compute.VirtualMachineProduct; import org.dasein.cloud.identity.ServiceAction; import org.dasein.cloud.util.APITrace; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; import javax.annotation.Nullable; public class AutoScaling implements AutoScalingSupport { static private final Logger logger = Logger.getLogger(AutoScaling.class); private AWSCloud provider = null; AutoScaling(AWSCloud provider) { this.provider = provider; } @Override public String createAutoScalingGroup(String name, String launchConfigurationId, int minServers, int maxServers, int cooldown, String ... zoneIds) throws InternalException, CloudException { APITrace.begin(provider, "AutoScaling.createAutoScalingGroup"); try { Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.CREATE_AUTO_SCALING_GROUP); EC2Method method; if( minServers < 0 ) { minServers = 0; } if( maxServers < minServers ) { maxServers = minServers; } parameters.put("AutoScalingGroupName", name); parameters.put("LaunchConfigurationName", launchConfigurationId); parameters.put("MinSize", String.valueOf(minServers)); parameters.put("MaxSize", String.valueOf(maxServers)); parameters.put("Cooldown", String.valueOf(cooldown)); int i = 1; for( String zoneId : zoneIds ) { parameters.put("AvailabilityZones.member." + (i++), zoneId); } method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { method.invoke(); } catch( EC2Exception e ) { logger.error(e.getSummary()); throw new CloudException(e); } return name; } finally { APITrace.end(); } } @Override public String createLaunchConfiguration(String name, String imageId, VirtualMachineProduct size, String ... firewalls) throws InternalException, CloudException { APITrace.begin(provider, "AutoScaling.createLaunchConfigursation"); try { Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.CREATE_LAUNCH_CONFIGURATION); EC2Method method; parameters.put("LaunchConfigurationName", name); parameters.put("ImageId", imageId); parameters.put("InstanceType", size.getProviderProductId()); int i = 1; for( String fw : firewalls ) { parameters.put("SecurityGroup.member." + (i++), fw); } method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { method.invoke(); } catch( EC2Exception e ) { logger.error(e.getSummary()); throw new CloudException(e); } return name; } finally { APITrace.end(); } } @Override public void deleteAutoScalingGroup(String providerAutoScalingGroupId) throws InternalException, CloudException { APITrace.begin(provider, "AutoScaling.deleteAutoScalingGroup"); try { Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.DELETE_AUTO_SCALING_GROUP); EC2Method method; parameters.put("AutoScalingGroupName", providerAutoScalingGroupId); method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { method.invoke(); } catch( EC2Exception e ) { logger.error(e.getSummary()); throw new CloudException(e); } } finally { APITrace.end(); } } @Override public void deleteLaunchConfiguration(String providerLaunchConfigurationId) throws InternalException, CloudException { APITrace.begin(provider, "AutoScaling.deleteLaunchConfiguration"); try { Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.DELETE_LAUNCH_CONFIGURATION); EC2Method method; parameters.put("LaunchConfigurationName", providerLaunchConfigurationId); method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { method.invoke(); } catch( EC2Exception e ) { logger.error(e.getSummary()); throw new CloudException(e); } } finally { APITrace.end(); } } @Override public String setTrigger(String name, String scalingGroupId, String statistic, String unitOfMeasure, String metric, int periodInSeconds, double lowerThreshold, double upperThreshold, int lowerIncrement, boolean lowerIncrementAbsolute, int upperIncrement, boolean upperIncrementAbsolute, int breachDuration) throws InternalException, CloudException { APITrace.begin(provider, "AutoScaling.setTrigger"); try { Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.CREATE_OR_UPDATE_SCALING_TRIGGER); EC2Method method; parameters.put("AutoScalingGroupName", scalingGroupId); parameters.put("MeasureName", metric); parameters.put("Period", String.valueOf(periodInSeconds)); parameters.put("LowerThreshold", String.valueOf(lowerThreshold)); parameters.put("UpperThreshold", String.valueOf(upperThreshold)); parameters.put("UpperBreachScaleIncrement", String.valueOf(upperIncrement)); parameters.put("LowerBreachScaleIncrement", String.valueOf(lowerIncrement)); parameters.put("BreachDuration", String.valueOf(breachDuration)); parameters.put("TriggerName", name); parameters.put("Unit", unitOfMeasure); parameters.put("Statistic", statistic); parameters.put("Dimensions.member.1.Name", "AutoScalingGroupName"); parameters.put("Dimensions.member.1.Value", scalingGroupId); method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { method.invoke(); } catch( EC2Exception e ) { logger.error(e.getSummary()); throw new CloudException(e); } return name; } finally { APITrace.end(); } } private Map<String,String> getAutoScalingParameters(ProviderContext ctx, String action) throws InternalException { APITrace.begin(provider, "AutoScaling.getAutoScalingParameters"); try { HashMap<String,String> parameters = new HashMap<String,String>(); parameters.put(AWSCloud.P_ACTION, action); parameters.put(AWSCloud.P_SIGNATURE_VERSION, AWSCloud.SIGNATURE); try { parameters.put(AWSCloud.P_ACCESS, new String(ctx.getAccessPublic(), "utf-8")); } catch( UnsupportedEncodingException e ) { logger.error(e); e.printStackTrace(); throw new InternalException(e); } parameters.put(AWSCloud.P_SIGNATURE_METHOD, AWSCloud.EC2_ALGORITHM); parameters.put(AWSCloud.P_TIMESTAMP, provider.getTimestamp(System.currentTimeMillis(), true)); parameters.put(AWSCloud.P_VERSION, provider.getAutoScaleVersion()); return parameters; } finally { APITrace.end(); } } private String getAutoScalingUrl() throws CloudException { ProviderContext ctx = provider.getContext(); if( ctx == null ) { throw new CloudException("No context has been set for this request"); } return "https://autoscaling." + ctx.getRegionId() + ".amazonaws.com"; } @Override public LaunchConfiguration getLaunchConfiguration(String providerLaunchConfigurationId) throws CloudException, InternalException { APITrace.begin(provider, "AutoScaling.getLaunchConfiguration"); try { Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.DESCRIBE_LAUNCH_CONFIGURATIONS); EC2Method method; NodeList blocks; Document doc; parameters.put("LaunchConfigurationNames.member.1", providerLaunchConfigurationId); method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { doc = method.invoke(); } catch( EC2Exception e ) { logger.error(e.getSummary()); throw new CloudException(e); } blocks = doc.getElementsByTagName("LaunchConfigurations"); for( int i=0; i<blocks.getLength(); i++ ) { NodeList items = blocks.item(i).getChildNodes(); for( int j=0; j<items.getLength(); j++ ) { Node item = items.item(j); if( item.getNodeName().equals("member") ) { LaunchConfiguration cfg = toLaunchConfiguration(item); if( cfg != null ) { return cfg; } } } } return null; } finally { APITrace.end(); } } @Override public ScalingGroup getScalingGroup(String providerScalingGroupId) throws CloudException, InternalException { APITrace.begin(provider, "AutoScaling.getScalingGroup"); try { ProviderContext ctx = provider.getContext(); if( ctx == null ) { throw new CloudException("No context has been set for this request"); } Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.DESCRIBE_AUTO_SCALING_GROUPS); EC2Method method; NodeList blocks; Document doc; parameters.put("AutoScalingGroupNames.member.1", providerScalingGroupId); method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { doc = method.invoke(); } catch( EC2Exception e ) { logger.error(e.getSummary()); throw new CloudException(e); } blocks = doc.getElementsByTagName("AutoScalingGroups"); for( int i=0; i<blocks.getLength(); i++ ) { NodeList members = blocks.item(i).getChildNodes(); for( int j=0; j<members.getLength(); j++ ) { Node item = members.item(j); if( item.getNodeName().equals("member") ) { ScalingGroup group = toScalingGroup(ctx, item); if( group != null ) { return group; } } } } return null; } finally { APITrace.end(); } } @Override public boolean isSubscribed() throws CloudException, InternalException { APITrace.begin(provider, "AutoScaling.isSubscribed"); try { Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.DESCRIBE_AUTO_SCALING_GROUPS); EC2Method method; method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { method.invoke(); return true; } catch( EC2Exception e ) { String msg = e.getSummary(); if( msg != null && msg.contains("not able to validate the provided access credentials") ) { return false; } logger.error("AWS Error checking subscription: " + e.getCode() + "/" + e.getSummary()); if( logger.isDebugEnabled() ) { e.printStackTrace(); } throw new CloudException(e); } } finally { APITrace.end(); } } @Override public @Nonnull Iterable<ResourceStatus> listLaunchConfigurationStatus() throws CloudException, InternalException { APITrace.begin(provider, "AutoScaling.listLaunchConfigurationStatus"); try { Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.DESCRIBE_LAUNCH_CONFIGURATIONS); ArrayList<ResourceStatus> list = new ArrayList<ResourceStatus>(); EC2Method method; NodeList blocks; Document doc; method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { doc = method.invoke(); } catch( EC2Exception e ) { logger.error(e.getSummary()); throw new CloudException(e); } blocks = doc.getElementsByTagName("LaunchConfigurations"); for( int i=0; i<blocks.getLength(); i++ ) { NodeList items = blocks.item(i).getChildNodes(); for( int j=0; j<items.getLength(); j++ ) { Node item = items.item(j); if( item.getNodeName().equals("member") ) { ResourceStatus status = toLCStatus(item); if( status != null ) { list.add(status); } } } } return list; } finally { APITrace.end(); } } @Override public Collection<LaunchConfiguration> listLaunchConfigurations() throws CloudException, InternalException { APITrace.begin(provider, "AutoScaling.listLaunchConfigurations"); try { Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.DESCRIBE_LAUNCH_CONFIGURATIONS); ArrayList<LaunchConfiguration> list = new ArrayList<LaunchConfiguration>(); EC2Method method; NodeList blocks; Document doc; method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { doc = method.invoke(); } catch( EC2Exception e ) { logger.error(e.getSummary()); throw new CloudException(e); } blocks = doc.getElementsByTagName("LaunchConfigurations"); for( int i=0; i<blocks.getLength(); i++ ) { NodeList items = blocks.item(i).getChildNodes(); for( int j=0; j<items.getLength(); j++ ) { Node item = items.item(j); if( item.getNodeName().equals("member") ) { LaunchConfiguration cfg = toLaunchConfiguration(item); if( cfg != null ) { list.add(cfg); } } } } return list; } finally { APITrace.end(); } } @Override public Iterable<ResourceStatus> listScalingGroupStatus() throws CloudException, InternalException { APITrace.begin(provider, "AutoScaling.listScalingGroupStatus"); try { ProviderContext ctx = provider.getContext(); if( ctx == null ) { throw new CloudException("No context has been set for this request"); } ArrayList<ResourceStatus> list = new ArrayList<ResourceStatus>(); Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.DESCRIBE_AUTO_SCALING_GROUPS); EC2Method method; NodeList blocks; Document doc; method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { doc = method.invoke(); } catch( EC2Exception e ) { logger.error(e.getSummary()); throw new CloudException(e); } blocks = doc.getElementsByTagName("AutoScalingGroups"); for( int i=0; i<blocks.getLength(); i++ ) { NodeList items = blocks.item(i).getChildNodes(); for( int j=0; j<items.getLength(); j++ ) { Node item = items.item(j); if( item.getNodeName().equals("member") ) { ResourceStatus status = toGroupStatus(item); if( status != null ) { list.add(status); } } } } return list; } finally { APITrace.end(); } } @Override public Collection<ScalingGroup> listScalingGroups() throws CloudException, InternalException { APITrace.begin(provider, "AutoScaling.listScalingGroups"); try { ProviderContext ctx = provider.getContext(); if( ctx == null ) { throw new CloudException("No context has been set for this request"); } ArrayList<ScalingGroup> list = new ArrayList<ScalingGroup>(); Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.DESCRIBE_AUTO_SCALING_GROUPS); EC2Method method; NodeList blocks; Document doc; method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { doc = method.invoke(); } catch( EC2Exception e ) { logger.error(e.getSummary()); throw new CloudException(e); } blocks = doc.getElementsByTagName("AutoScalingGroups"); for( int i=0; i<blocks.getLength(); i++ ) { NodeList items = blocks.item(i).getChildNodes(); for( int j=0; j<items.getLength(); j++ ) { Node item = items.item(j); if( item.getNodeName().equals("member") ) { ScalingGroup group = toScalingGroup(ctx, item); if( group != null ) { list.add(group); } } } } return list; } finally { APITrace.end(); } } @Override public @Nonnull String[] mapServiceAction(@Nonnull ServiceAction action) { if( action.equals(AutoScalingSupport.ANY) ) { return new String[] { EC2Method.AUTOSCALING_PREFIX + "*" }; } if( action.equals(AutoScalingSupport.CREATE_LAUNCH_CONFIGURATION) ) { return new String[] { EC2Method.AUTOSCALING_PREFIX + EC2Method.CREATE_LAUNCH_CONFIGURATION }; } else if( action.equals(AutoScalingSupport.CREATE_SCALING_GROUP) ) { return new String[] { EC2Method.AUTOSCALING_PREFIX + EC2Method.CREATE_AUTO_SCALING_GROUP }; } else if( action.equals(AutoScalingSupport.GET_LAUNCH_CONFIGURATION) ) { return new String[] { EC2Method.AUTOSCALING_PREFIX + EC2Method.DESCRIBE_LAUNCH_CONFIGURATIONS }; } else if( action.equals(AutoScalingSupport.GET_SCALING_GROUP) ) { return new String[] { EC2Method.AUTOSCALING_PREFIX + EC2Method.DESCRIBE_AUTO_SCALING_GROUPS }; } else if( action.equals(AutoScalingSupport.LIST_LAUNCH_CONFIGURATION) ) { return new String[] { EC2Method.AUTOSCALING_PREFIX + EC2Method.DESCRIBE_LAUNCH_CONFIGURATIONS }; } else if( action.equals(AutoScalingSupport.LIST_SCALING_GROUP) ) { return new String[] { EC2Method.AUTOSCALING_PREFIX + EC2Method.DESCRIBE_AUTO_SCALING_GROUPS }; } else if( action.equals(AutoScalingSupport.REMOVE_LAUNCH_CONFIGURATION) ) { return new String[] { EC2Method.AUTOSCALING_PREFIX + EC2Method.DELETE_LAUNCH_CONFIGURATION }; } else if( action.equals(AutoScalingSupport.REMOVE_SCALING_GROUP) ) { return new String[] { EC2Method.AUTOSCALING_PREFIX + EC2Method.DELETE_AUTO_SCALING_GROUP }; } else if( action.equals(AutoScalingSupport.SET_CAPACITY) ) { return new String[] { EC2Method.AUTOSCALING_PREFIX + EC2Method.SET_DESIRED_CAPACITY }; } else if( action.equals(AutoScalingSupport.SET_SCALING_TRIGGER) ) { return new String[] { EC2Method.AUTOSCALING_PREFIX + EC2Method.CREATE_OR_UPDATE_SCALING_TRIGGER }; } else if( action.equals(AutoScalingSupport.UPDATE_SCALING_GROUP) ) { return new String[] { EC2Method.AUTOSCALING_PREFIX + EC2Method.UPDATE_AUTO_SCALING_GROUP }; } return new String[0]; } @Override public void setDesiredCapacity(String scalingGroupId, int capacity) throws CloudException, InternalException { APITrace.begin(provider, "AutoScaling.setDesiredCapacity"); try { Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.SET_DESIRED_CAPACITY); EC2Method method; parameters.put("AutoScalingGroupName", scalingGroupId); parameters.put("DesiredCapacity", String.valueOf(capacity)); method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { method.invoke(); } catch( EC2Exception e ) { logger.error(e.getSummary()); throw new CloudException(e); } } finally { APITrace.end(); } } private @Nullable ResourceStatus toGroupStatus( @Nullable Node item) { if( item == null ) { return null; } NodeList attrs = item.getChildNodes(); String groupId = null; for( int i=0; i<attrs.getLength(); i++ ) { Node attr = attrs.item(i); if( attr.getNodeName().equalsIgnoreCase("AutoScalingGroupName") ) { groupId = attr.getFirstChild().getNodeValue(); } } if( groupId == null ) { return null; } return new ResourceStatus(groupId, true); } private @Nullable LaunchConfiguration toLaunchConfiguration(@Nullable Node item) { if( item == null ) { return null; } LaunchConfiguration cfg = new LaunchConfiguration(); NodeList attrs = item.getChildNodes(); for( int i=0; i<attrs.getLength(); i++ ) { Node attr = attrs.item(i); String name; name = attr.getNodeName(); if( name.equalsIgnoreCase("ImageId") ) { cfg.setProviderImageId(attr.getFirstChild().getNodeValue()); } else if( name.equalsIgnoreCase("InstanceType") ) { cfg.setServerSizeId(attr.getFirstChild().getNodeValue()); } else if( name.equalsIgnoreCase("LaunchConfigurationName") ) { String lcname = attr.getFirstChild().getNodeValue(); cfg.setProviderLaunchConfigurationId(lcname); cfg.setName(lcname); } else if( name.equalsIgnoreCase("CreatedTime") ) { SimpleDateFormat fmt = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); try { cfg.setCreationTimestamp(fmt.parse(attr.getFirstChild().getNodeValue()).getTime()); } catch( ParseException e ) { logger.error("Could not parse timestamp: " + attr.getFirstChild().getNodeValue()); cfg.setCreationTimestamp(System.currentTimeMillis()); } } else if( name.equalsIgnoreCase("SecurityGroups") ) { String[] ids; if( attr.hasChildNodes() ) { ArrayList<String> instanceIds = new ArrayList<String>(); NodeList instances = attr.getChildNodes(); for( int j=0; j<instances.getLength(); j++ ) { Node instance = instances.item(j); if( instance.getNodeName().equalsIgnoreCase("member") ) { if( instance.hasChildNodes() ) { NodeList items = instance.getChildNodes(); for( int k=0; k<items.getLength(); k++ ) { Node val = items.item(k); if( val.getNodeName().equalsIgnoreCase("InstanceId") ) { instanceIds.add(val.getFirstChild().getNodeValue()); } } } } } ids = new String[instanceIds.size()]; int j=0; for( String id : instanceIds ) { ids[j++] = id; } } else { ids = new String[0]; } cfg.setProviderFirewallIds(ids); } } return cfg; } private @Nullable ResourceStatus toLCStatus(@Nullable Node item) { if( item == null ) { return null; } NodeList attrs = item.getChildNodes(); String lcId = null; for( int i=0; i<attrs.getLength(); i++ ) { Node attr = attrs.item(i); if( attr.getNodeName().equalsIgnoreCase("LaunchConfigurationName") ) { lcId = attr.getFirstChild().getNodeValue(); } } if( lcId == null ) { return null; } return new ResourceStatus(lcId, true); } private @Nullable ScalingGroup toScalingGroup(@Nonnull ProviderContext ctx, @Nullable Node item) { if( item == null ) { return null; } NodeList attrs = item.getChildNodes(); ScalingGroup group = new ScalingGroup(); group.setProviderOwnerId(ctx.getAccountNumber()); group.setProviderRegionId(ctx.getRegionId()); for( int i=0; i<attrs.getLength(); i++ ) { Node attr = attrs.item(i); String name; name = attr.getNodeName(); if( name.equalsIgnoreCase("MinSize") ) { group.setMinServers(Integer.parseInt(attr.getFirstChild().getNodeValue())); } else if( name.equalsIgnoreCase("MaxSize") ) { group.setMaxServers(Integer.parseInt(attr.getFirstChild().getNodeValue())); } else if( name.equalsIgnoreCase("Cooldown") ) { group.setCooldown(Integer.parseInt(attr.getFirstChild().getNodeValue())); } else if( name.equalsIgnoreCase("CreatedTime") ) { SimpleDateFormat fmt = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); try { group.setCreationTimestamp(fmt.parse(attr.getFirstChild().getNodeValue()).getTime()); } catch( ParseException e ) { logger.error("Could not parse timestamp: " + attr.getFirstChild().getNodeValue()); group.setCreationTimestamp(System.currentTimeMillis()); } } else if( name.equalsIgnoreCase("DesiredCapacity") ) { group.setTargetCapacity(Integer.parseInt(attr.getFirstChild().getNodeValue())); } else if( name.equalsIgnoreCase("LaunchConfigurationName") ) { group.setProviderLaunchConfigurationId(attr.getFirstChild().getNodeValue()); } else if( name.equalsIgnoreCase("AutoScalingGroupName") ) { String gname = attr.getFirstChild().getNodeValue(); group.setProviderScalingGroupId(gname); group.setName(gname); group.setDescription(gname); } else if( name.equalsIgnoreCase("Instances") ) { String[] ids; if( attr.hasChildNodes() ) { ArrayList<String> instanceIds = new ArrayList<String>(); NodeList instances = attr.getChildNodes(); for( int j=0; j<instances.getLength(); j++ ) { Node instance = instances.item(j); if( instance.getNodeName().equals("member") ) { if( instance.hasChildNodes() ) { NodeList items = instance.getChildNodes(); for( int k=0; k<items.getLength(); k++ ) { Node val = items.item(k); if( val.getNodeName().equalsIgnoreCase("InstanceId") ) { instanceIds.add(val.getFirstChild().getNodeValue()); } } } } } ids = new String[instanceIds.size()]; int j=0; for( String id : instanceIds ) { ids[j++] = id; } } else { ids = new String[0]; } group.setProviderServerIds(ids); } else if( name.equalsIgnoreCase("AvailabilityZones") ) { String[] ids; if( attr.hasChildNodes() ) { ArrayList<String> zoneIds = new ArrayList<String>(); NodeList zones = attr.getChildNodes(); for( int j=0; j<zones.getLength(); j++ ) { Node zone = zones.item(j); if( zone.getNodeName().equalsIgnoreCase("member") ) { zoneIds.add(zone.getFirstChild().getNodeValue()); } } ids = new String[zoneIds.size()]; int j=0; for( String zoneId : zoneIds ) { ids[j++] = zoneId; } } else { ids = new String[0]; } group.setProviderDataCenterIds(ids); } } return group; } @Override public void updateAutoScalingGroup(@Nonnull String scalingGroupId, @Nonnull String launchConfigurationId, @Nonnegative int minServers, @Nonnegative int maxServers, @Nonnegative int cooldown, @Nonnull String ... zoneIds) throws InternalException, CloudException { APITrace.begin(provider, "AutoScaling.updateAutoScalingGroup"); try { Map<String,String> parameters = getAutoScalingParameters(provider.getContext(), EC2Method.UPDATE_AUTO_SCALING_GROUP); EC2Method method; if( minServers < 0 ) { minServers = 0; } if( maxServers < minServers ) { maxServers = minServers; } parameters.put("AutoScalingGroupName", scalingGroupId); parameters.put("LaunchConfigurationName", launchConfigurationId); parameters.put("MinSize", String.valueOf(minServers)); parameters.put("MaxSize", String.valueOf(maxServers)); parameters.put("Cooldown", String.valueOf(cooldown)); int i = 1; for( String zoneId : zoneIds ) { parameters.put("AvailabilityZones.member." + (i++), zoneId); } method = new EC2Method(provider, getAutoScalingUrl(), parameters); try { method.invoke(); } catch( EC2Exception e ) { logger.error(e.getSummary()); throw new CloudException(e); } } finally { APITrace.end(); } } }
package com.instacart.library.truetime; import android.content.Context; import io.reactivex.BackpressureStrategy; import io.reactivex.Flowable; import io.reactivex.FlowableEmitter; import io.reactivex.FlowableOnSubscribe; import io.reactivex.FlowableTransformer; import io.reactivex.Single; import io.reactivex.annotations.NonNull; import io.reactivex.functions.Consumer; import io.reactivex.functions.Function; import io.reactivex.functions.Predicate; import io.reactivex.schedulers.Schedulers; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.List; import org.reactivestreams.Publisher; public class TrueTimeRx extends TrueTime { private static final TrueTimeRx RX_INSTANCE = new TrueTimeRx(); private static final String TAG = TrueTimeRx.class.getSimpleName(); private int _retryCount = 50; public static TrueTimeRx build() { return RX_INSTANCE; } public TrueTimeRx withSharedPreferencesCache(Context context) { super.withSharedPreferencesCache(context); return this; } /** * Provide your own cache interface to cache the true time information. * @param cacheInterface the customized cache interface to save the true time data. */ public TrueTimeRx withCustomizedCache(CacheInterface cacheInterface) { super.withCustomizedCache(cacheInterface); return this; } public TrueTimeRx withConnectionTimeout(int timeout) { super.withConnectionTimeout(timeout); return this; } public TrueTimeRx withRootDelayMax(float rootDelay) { super.withRootDelayMax(rootDelay); return this; } public TrueTimeRx withRootDispersionMax(float rootDispersion) { super.withRootDispersionMax(rootDispersion); return this; } public TrueTimeRx withServerResponseDelayMax(int serverResponseDelayInMillis) { super.withServerResponseDelayMax(serverResponseDelayInMillis); return this; } public TrueTimeRx withLoggingEnabled(boolean isLoggingEnabled) { super.withLoggingEnabled(isLoggingEnabled); return this; } public TrueTimeRx withRetryCount(int retryCount) { _retryCount = retryCount; return this; } /** * Initialize TrueTime * See {@link #initializeNtp(String)} for details on working * * @return accurate NTP Date */ public Single<Date> initializeRx(String ntpPoolAddress) { return isInitialized() ? Single.just(now()) : initializeNtp(ntpPoolAddress).map(new Function<long[], Date>() { @Override public Date apply(long[] longs) throws Exception { return now(); } }); } /** * Initialize TrueTime * A single NTP pool server is provided. * Using DNS we resolve that to multiple IP hosts (See {@link #initializeNtp(List)} for manually resolved IPs) * * Use this instead of {@link #initializeRx(String)} if you wish to also get additional info for * instrumentation/tracking actual NTP response data * * @param ntpPool NTP pool server e.g. time.apple.com, 0.us.pool.ntp.org * @return Observable of detailed long[] containing most important parts of the actual NTP response * See RESPONSE_INDEX_ prefixes in {@link SntpClient} for details */ public Single<long[]> initializeNtp(String ntpPool) { return Flowable .just(ntpPool) .compose(resolveNtpPoolToIpAddresses()) .compose(performNtpAlgorithm()) .firstOrError(); } /** * Initialize TrueTime * Use this if you want to resolve the NTP Pool address to individual IPs yourself * * See https://github.com/instacart/truetime-android/issues/42 * to understand why you may want to do something like this. * * @param resolvedNtpAddresses list of resolved IP addresses for an NTP * @return Observable of detailed long[] containing most important parts of the actual NTP response * See RESPONSE_INDEX_ prefixes in {@link SntpClient} for details */ public Single<long[]> initializeNtp(List<InetAddress> resolvedNtpAddresses) { return Flowable.fromIterable(resolvedNtpAddresses) .compose(performNtpAlgorithm()) .firstOrError(); } /** * Transformer that takes in a pool of NTP addresses * Against each IP host we issue a UDP call and retrieve the best response using the NTP algorithm */ private FlowableTransformer<InetAddress, long[]> performNtpAlgorithm() { return new FlowableTransformer<InetAddress, long[]>() { @Override public Flowable<long[]> apply(Flowable<InetAddress> inetAddressObservable) { return inetAddressObservable .map(new Function<InetAddress, String>() { @Override public String apply(InetAddress inetAddress) { return inetAddress.getHostAddress(); } }) .flatMap(bestResponseAgainstSingleIp(5)) // get best response from querying the ip 5 times .take(5) // take 5 of the best results .toList() .toFlowable() .filter(new Predicate<List<long[]>>() { @Override public boolean test(List<long[]> longs) throws Exception { return longs.size() > 0; } }) .map(filterMedianResponse()) .doOnNext(new Consumer<long[]>() { @Override public void accept(long[] ntpResponse) { cacheTrueTimeInfo(ntpResponse); saveTrueTimeInfoToDisk(); } }); } }; } private FlowableTransformer<String, InetAddress> resolveNtpPoolToIpAddresses() { return new FlowableTransformer<String, InetAddress>() { @Override public Publisher<InetAddress> apply(Flowable<String> ntpPoolFlowable) { return ntpPoolFlowable .observeOn(Schedulers.io()) .flatMap(new Function<String, Flowable<InetAddress>>() { @Override public Flowable<InetAddress> apply(String ntpPoolAddress) { try { TrueLog.d(TAG, "---- resolving ntpHost : " + ntpPoolAddress); return Flowable.fromArray(InetAddress.getAllByName(ntpPoolAddress)); } catch (UnknownHostException e) { return Flowable.error(e); } } }); } }; } private Function<String, Flowable<long[]>> bestResponseAgainstSingleIp(final int repeatCount) { return new Function<String, Flowable<long[]>>() { @Override public Flowable<long[]> apply(String singleIp) { return Flowable .just(singleIp) .repeat(repeatCount) .flatMap(new Function<String, Flowable<long[]>>() { @Override public Flowable<long[]> apply(final String singleIpHostAddress) { return Flowable.create(new FlowableOnSubscribe<long[]>() { @Override public void subscribe(@NonNull FlowableEmitter<long[]> o) throws Exception { TrueLog.d(TAG, "---- requestTime from: " + singleIpHostAddress); try { o.onNext(requestTime(singleIpHostAddress)); o.onComplete(); } catch (IOException e) { o.tryOnError(e); } } }, BackpressureStrategy.BUFFER) .subscribeOn(Schedulers.io()) .doOnError(new Consumer<Throwable>() { @Override public void accept(Throwable throwable) { TrueLog.e(TAG, "---- Error requesting time", throwable); } }) .retry(_retryCount); } }) .toList() .toFlowable() .map(filterLeastRoundTripDelay()); // pick best response for each ip } }; } private Function<List<long[]>, long[]> filterLeastRoundTripDelay() { return new Function<List<long[]>, long[]>() { @Override public long[] apply(List<long[]> responseTimeList) { Collections.sort(responseTimeList, new Comparator<long[]>() { @Override public int compare(long[] lhsParam, long[] rhsLongParam) { long lhs = SntpClient.getRoundTripDelay(lhsParam); long rhs = SntpClient.getRoundTripDelay(rhsLongParam); return lhs < rhs ? -1 : (lhs == rhs ? 0 : 1); } }); TrueLog.d(TAG, "---- filterLeastRoundTrip: " + responseTimeList); return responseTimeList.get(0); } }; } private Function<List<long[]>, long[]> filterMedianResponse() { return new Function<List<long[]>, long[]>() { @Override public long[] apply(List<long[]> bestResponses) { Collections.sort(bestResponses, new Comparator<long[]>() { @Override public int compare(long[] lhsParam, long[] rhsParam) { long lhs = SntpClient.getClockOffset(lhsParam); long rhs = SntpClient.getClockOffset(rhsParam); return lhs < rhs ? -1 : (lhs == rhs ? 0 : 1); } }); TrueLog.d(TAG, "---- bestResponse: " + Arrays.toString(bestResponses.get(bestResponses.size() / 2))); return bestResponses.get(bestResponses.size() / 2); } }; } }
/* * Copyright (c) 2014, Victor Nazarov <asviraspossible@gmail.com> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Victor Nazarov nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.github.sviperll.stream; import java.util.concurrent.BlockingQueue; import java.util.concurrent.SynchronousQueue; /** * * @author Victor Nazarov &lt;asviraspossible@gmail.com&gt; */ class Drainer<T> implements Runnable, SaturableConsuming<T> { private final Streamable<T> streamable; private final BlockingQueue<DrainerRequest> requestQueue = new SynchronousQueue<>(); private final BlockingQueue<DrainerResponse<T>> responseQueue = new SynchronousQueue<>(); private DrainerState<T> state = new CommunicatingState(); Drainer(Streamable<T> streamable) { this.streamable = streamable; } @Override public void run() { try { streamable.forEach(this); } catch (RuntimeException ex) { state.setException(ex); } state.finish(); } @Override public void accept(T value) { state.accept(value); } @Override public boolean needsMore() { return state.needsMore(); } public DrainerResponse<T> fetch() { return request(DrainerRequest.fetch()); } public DrainerResponse<T> close() { return request(DrainerRequest.close()); } private void setExceptionCaughtState(RuntimeException ex) { state = new ExceptionCaughtState(ex); } private void setClosedState() { state = new ClosedState(); } private DrainerResponse<T> request(DrainerRequest request) { for (;;) { try { requestQueue.put(request); break; } catch (InterruptedException ex) { } } for (;;) { try { return responseQueue.take(); } catch (InterruptedException ex) { } } } private DrainerRequest takeRequest() { for (;;) { try { DrainerRequest request = requestQueue.take(); return request; } catch (InterruptedException ex) { } } } private void putResponse(DrainerResponse<T> response) { for (;;) { try { responseQueue.put(response); return; } catch (InterruptedException ex) { } } } private interface DrainerState<T> extends SaturableConsuming<T> { void finish(); void setException(RuntimeException ex); } private class CommunicatingState implements DrainerState<T> { @Override public void accept(final T value) { DrainerRequest request = takeRequest(); try { request.accept(new DrainerRequestVisitor<Void>() { @Override public Void fetch() { putResponse(DrainerResponse.<T>fetched(value)); return null; } @Override public Void close() { setClosedState(); return null; } }); } catch (RuntimeException ex) { setExceptionCaughtState(ex); throw ex; } } @Override public boolean needsMore() { return true; } @Override public void finish() { takeRequest(); putResponse(DrainerResponse.<T>closed()); } @Override public void setException(RuntimeException ex) { takeRequest(); setExceptionCaughtState(ex); } } private class ClosedState implements DrainerState<T> { @Override public void accept(T value) { } @Override public boolean needsMore() { return false; } @Override public void finish() { putResponse(DrainerResponse.<T>closed()); } @Override public void setException(RuntimeException ex) { setExceptionCaughtState(ex); } } private class ExceptionCaughtState implements DrainerState<T> { private final RuntimeException exception; private ExceptionCaughtState(RuntimeException exception) { this.exception = exception; } @Override public void accept(T value) { } @Override public boolean needsMore() { return false; } @Override public void finish() { putResponse(DrainerResponse.<T>error(exception)); } @Override public void setException(RuntimeException ex) { setExceptionCaughtState(ex); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.utils; import java.lang.management.CompilationMXBean; import java.lang.management.GarbageCollectorMXBean; import java.lang.management.ManagementFactory; import java.text.DecimalFormat; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.atomic.LongAdder; import org.apache.sysml.api.DMLScript; import org.apache.sysml.conf.ConfigurationManager; import org.apache.sysml.hops.OptimizerUtils; import org.apache.sysml.runtime.controlprogram.caching.CacheStatistics; import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext; import org.apache.sysml.runtime.instructions.Instruction; import org.apache.sysml.runtime.instructions.InstructionUtils; import org.apache.sysml.runtime.instructions.MRJobInstruction; import org.apache.sysml.runtime.instructions.cp.FunctionCallCPInstruction; import org.apache.sysml.runtime.instructions.spark.SPInstruction; import org.apache.sysml.runtime.matrix.data.LibMatrixDNN; /** * This class captures all statistics. */ public class Statistics { private static long compileStartTime = 0; private static long compileEndTime = 0; private static long execStartTime = 0; private static long execEndTime = 0; // number of compiled/executed MR jobs private static final LongAdder numExecutedMRJobs = new LongAdder(); private static final LongAdder numCompiledMRJobs = new LongAdder(); // number of compiled/executed SP instructions private static final LongAdder numExecutedSPInst = new LongAdder(); private static final LongAdder numCompiledSPInst = new LongAdder(); //JVM stats (low frequency updates) private static long jitCompileTime = 0; //in milli sec private static long jvmGCTime = 0; //in milli sec private static long jvmGCCount = 0; //count //HOP DAG recompile stats (potentially high update frequency) private static final LongAdder hopRecompileTime = new LongAdder(); //in nano sec private static final LongAdder hopRecompilePred = new LongAdder(); //count private static final LongAdder hopRecompileSB = new LongAdder(); //count //CODEGEN private static final LongAdder codegenCompileTime = new LongAdder(); //in nano private static final LongAdder codegenClassCompileTime = new LongAdder(); //in nano private static final LongAdder codegenHopCompile = new LongAdder(); //count private static final LongAdder codegenCPlanCompile = new LongAdder(); //count private static final LongAdder codegenClassCompile = new LongAdder(); //count private static final LongAdder codegenEnumAll = new LongAdder(); //count private static final LongAdder codegenEnumEval = new LongAdder(); //count private static final LongAdder codegenEnumEvalP = new LongAdder(); //count private static final LongAdder codegenPlanCacheHits = new LongAdder(); //count private static final LongAdder codegenPlanCacheTotal = new LongAdder(); //count //Function recompile stats private static final LongAdder funRecompileTime = new LongAdder(); //in nano sec private static final LongAdder funRecompiles = new LongAdder(); //count //Spark-specific stats private static long sparkCtxCreateTime = 0; private static final LongAdder sparkParallelize = new LongAdder(); private static final LongAdder sparkParallelizeCount = new LongAdder(); private static final LongAdder sparkCollect = new LongAdder(); private static final LongAdder sparkCollectCount = new LongAdder(); private static final LongAdder sparkBroadcast = new LongAdder(); private static final LongAdder sparkBroadcastCount = new LongAdder(); //PARFOR optimization stats (low frequency updates) private static long parforOptTime = 0; //in milli sec private static long parforOptCount = 0; //count private static long parforInitTime = 0; //in milli sec private static long parforMergeTime = 0; //in milli sec //heavy hitter counts and times private static HashMap<String,Long> _cpInstTime = new HashMap<String, Long>(); private static HashMap<String,Long> _cpInstCounts = new HashMap<String, Long>(); private static final LongAdder lTotalUIPVar = new LongAdder(); private static final LongAdder lTotalLix = new LongAdder(); private static final LongAdder lTotalLixUIP = new LongAdder(); public static synchronized long getNoOfExecutedMRJobs() { return numExecutedMRJobs.longValue(); } private static LongAdder numNativeFailures = new LongAdder(); public static LongAdder numNativeLibMatrixMultCalls = new LongAdder(); public static LongAdder numNativeConv2dCalls = new LongAdder(); public static LongAdder numNativeConv2dBwdDataCalls = new LongAdder(); public static LongAdder numNativeConv2dBwdFilterCalls = new LongAdder(); public static LongAdder numNativeSparseConv2dCalls = new LongAdder(); public static LongAdder numNativeSparseConv2dBwdFilterCalls = new LongAdder(); public static LongAdder numNativeSparseConv2dBwdDataCalls = new LongAdder(); public static long nativeLibMatrixMultTime = 0; public static long nativeConv2dTime = 0; public static long nativeConv2dBwdDataTime = 0; public static long nativeConv2dBwdFilterTime = 0; public static long recomputeNNZTime = 0; public static long examSparsityTime = 0; public static long allocateDoubleArrTime = 0; public static void incrementNativeFailuresCounter() { numNativeFailures.increment(); // This is very rare and am not sure it is possible at all. Our initial experiments never encountered this case. // Note: all the native calls have a fallback to Java; so if the user wants she can recompile SystemML by // commenting this exception and everything should work fine. throw new RuntimeException("Unexpected ERROR: OOM caused during JNI transfer. Please disable native BLAS by setting enviroment variable: SYSTEMML_BLAS=none"); } public static void incrementNoOfExecutedMRJobs() { numExecutedMRJobs.increment(); } public static void decrementNoOfExecutedMRJobs() { numExecutedMRJobs.decrement(); } public static long getNoOfCompiledMRJobs() { return numCompiledMRJobs.longValue(); } public static void incrementNoOfCompiledMRJobs() { numCompiledMRJobs.increment(); } public static long getNoOfExecutedSPInst() { return numExecutedSPInst.longValue(); } public static void incrementNoOfExecutedSPInst() { numExecutedSPInst.increment(); } public static void decrementNoOfExecutedSPInst() { numExecutedSPInst.decrement(); } public static long getNoOfCompiledSPInst() { return numCompiledSPInst.longValue(); } public static void incrementNoOfCompiledSPInst() { numCompiledSPInst.increment(); } public static long getTotalUIPVar() { return lTotalUIPVar.longValue(); } public static void incrementTotalUIPVar() { lTotalUIPVar.increment(); } public static long getTotalLixUIP() { return lTotalLixUIP.longValue(); } public static void incrementTotalLixUIP() { lTotalLixUIP.increment(); } public static long getTotalLix() { return lTotalLix.longValue(); } public static void incrementTotalLix() { lTotalLix.increment(); } public static void resetNoOfCompiledJobs( int count ) { //reset both mr/sp for multiple tests within one jvm numCompiledSPInst.reset(); numCompiledMRJobs.reset(); if( OptimizerUtils.isSparkExecutionMode() ) numCompiledSPInst.add(count); else numCompiledMRJobs.add(count); } public static void resetNoOfExecutedJobs() { //reset both mr/sp for multiple tests within one jvm numExecutedSPInst.reset(); numExecutedMRJobs.reset(); if( DMLScript.USE_ACCELERATOR ) GPUStatistics.setNoOfExecutedGPUInst(0); } public static synchronized void incrementJITCompileTime( long time ) { jitCompileTime += time; } public static synchronized void incrementJVMgcTime( long time ) { jvmGCTime += time; } public static synchronized void incrementJVMgcCount( long delta ) { jvmGCCount += delta; } public static void incrementHOPRecompileTime( long delta ) { hopRecompileTime.add(delta); } public static void incrementHOPRecompilePred() { hopRecompilePred.increment(); } public static void incrementHOPRecompilePred(long delta) { hopRecompilePred.add(delta); } public static void incrementHOPRecompileSB() { hopRecompileSB.increment(); } public static void incrementHOPRecompileSB(long delta) { hopRecompileSB.add(delta); } public static void incrementCodegenDAGCompile() { codegenHopCompile.increment(); } public static void incrementCodegenCPlanCompile(long delta) { codegenCPlanCompile.add(delta); } public static void incrementCodegenEnumAll(long delta) { codegenEnumAll.add(delta); } public static void incrementCodegenEnumEval(long delta) { codegenEnumEval.add(delta); } public static void incrementCodegenEnumEvalP(long delta) { codegenEnumEvalP.add(delta); } public static void incrementCodegenClassCompile() { codegenClassCompile.increment(); } public static void incrementCodegenCompileTime(long delta) { codegenCompileTime.add(delta); } public static void incrementCodegenClassCompileTime(long delta) { codegenClassCompileTime.add(delta); } public static void incrementCodegenPlanCacheHits() { codegenPlanCacheHits.increment(); } public static void incrementCodegenPlanCacheTotal() { codegenPlanCacheTotal.increment(); } public static long getCodegenDAGCompile() { return codegenHopCompile.longValue(); } public static long getCodegenCPlanCompile() { return codegenCPlanCompile.longValue(); } public static long getCodegenEnumAll() { return codegenEnumAll.longValue(); } public static long getCodegenEnumEval() { return codegenEnumEval.longValue(); } public static long getCodegenEnumEvalP() { return codegenEnumEvalP.longValue(); } public static long getCodegenClassCompile() { return codegenClassCompile.longValue(); } public static long getCodegenCompileTime() { return codegenCompileTime.longValue(); } public static long getCodegenClassCompileTime() { return codegenClassCompileTime.longValue(); } public static long getCodegenPlanCacheHits() { return codegenPlanCacheHits.longValue(); } public static long getCodegenPlanCacheTotal() { return codegenPlanCacheTotal.longValue(); } public static void incrementFunRecompileTime( long delta ) { funRecompileTime.add(delta); } public static void incrementFunRecompiles() { funRecompiles.increment(); } public static synchronized void incrementParForOptimCount(){ parforOptCount ++; } public static synchronized void incrementParForOptimTime( long time ) { parforOptTime += time; } public static synchronized void incrementParForInitTime( long time ) { parforInitTime += time; } public static synchronized void incrementParForMergeTime( long time ) { parforMergeTime += time; } public static void startCompileTimer() { if( DMLScript.STATISTICS ) compileStartTime = System.nanoTime(); } public static void stopCompileTimer() { if( DMLScript.STATISTICS ) compileEndTime = System.nanoTime(); } public static long getCompileTime() { return compileEndTime - compileStartTime; } /** * Starts the timer, should be invoked immediately before invoking * Program.execute() */ public static void startRunTimer() { execStartTime = System.nanoTime(); } /** * Stops the timer, should be invoked immediately after invoking * Program.execute() */ public static void stopRunTimer() { execEndTime = System.nanoTime(); } /** * Returns the total time of run in nanoseconds. * * @return run time in nanoseconds */ public static long getRunTime() { return execEndTime - execStartTime; } public static void reset() { hopRecompileTime.reset(); hopRecompilePred.reset(); hopRecompileSB.reset(); funRecompiles.reset(); funRecompileTime.reset(); codegenHopCompile.reset(); codegenCPlanCompile.reset(); codegenClassCompile.reset(); codegenEnumAll.reset(); codegenEnumEval.reset(); codegenEnumEvalP.reset(); codegenCompileTime.reset(); codegenClassCompileTime.reset(); codegenPlanCacheHits.reset(); codegenPlanCacheTotal.reset(); parforOptCount = 0; parforOptTime = 0; parforInitTime = 0; parforMergeTime = 0; lTotalLix.reset(); lTotalLixUIP.reset(); lTotalUIPVar.reset(); CacheStatistics.reset(); resetJITCompileTime(); resetJVMgcTime(); resetJVMgcCount(); resetCPHeavyHitters(); GPUStatistics.reset(); numNativeLibMatrixMultCalls.reset(); numNativeSparseConv2dCalls.reset(); numNativeSparseConv2dBwdDataCalls.reset(); numNativeSparseConv2dBwdFilterCalls.reset(); numNativeConv2dCalls.reset(); numNativeConv2dBwdDataCalls.reset(); numNativeConv2dBwdFilterCalls.reset(); numNativeFailures.reset(); nativeLibMatrixMultTime = 0; nativeConv2dTime = 0; nativeConv2dBwdFilterTime = 0; nativeConv2dBwdDataTime = 0; LibMatrixDNN.resetStatistics(); } public static void resetJITCompileTime(){ jitCompileTime = -1 * getJITCompileTime(); } public static void resetJVMgcTime(){ jvmGCTime = -1 * getJVMgcTime(); } public static void resetJVMgcCount(){ jvmGCTime = -1 * getJVMgcCount(); } public static void resetCPHeavyHitters(){ _cpInstTime.clear(); _cpInstCounts.clear(); } public static void setSparkCtxCreateTime(long ns) { sparkCtxCreateTime = ns; } public static void accSparkParallelizeTime(long t) { sparkParallelize.add(t); } public static void incSparkParallelizeCount(long c) { sparkParallelizeCount.add(c); } public static void accSparkCollectTime(long t) { sparkCollect.add(t); } public static void incSparkCollectCount(long c) { sparkCollectCount.add(c); } public static void accSparkBroadCastTime(long t) { sparkBroadcast.add(t); } public static void incSparkBroadcastCount(long c) { sparkBroadcastCount.add(c); } public static String getCPHeavyHitterCode( Instruction inst ) { String opcode = null; if( inst instanceof MRJobInstruction ) { MRJobInstruction mrinst = (MRJobInstruction) inst; opcode = "MR-Job_"+mrinst.getJobType(); } else if( inst instanceof SPInstruction ) { opcode = "SP_"+InstructionUtils.getOpCode(inst.toString()); if( inst instanceof FunctionCallCPInstruction ) { FunctionCallCPInstruction extfunct = (FunctionCallCPInstruction)inst; opcode = extfunct.getFunctionName(); } } else //CPInstructions { opcode = InstructionUtils.getOpCode(inst.toString()); if( inst instanceof FunctionCallCPInstruction ) { FunctionCallCPInstruction extfunct = (FunctionCallCPInstruction)inst; opcode = extfunct.getFunctionName(); } } return opcode; } /** * "Maintains" or adds time to per instruction/op timers, also increments associated count * @param instructionName name of the instruction/op * @param timeNanos time in nano seconds */ public synchronized static void maintainCPHeavyHitters( String instructionName, long timeNanos ) { Long oldVal = _cpInstTime.getOrDefault(instructionName, 0L); _cpInstTime.put(instructionName, oldVal + timeNanos); Long oldCnt = _cpInstCounts.getOrDefault(instructionName, 0L); _cpInstCounts.put(instructionName, oldCnt + 1); } public static Set<String> getCPHeavyHitterOpCodes() { return _cpInstTime.keySet(); } public static long getCPHeavyHitterCount(String opcode) { return _cpInstCounts.get(opcode); } /** * Obtain a string tabular representation of the heavy hitter instructions * that displays the time, instruction count, and optionally GPU stats about * each instruction. * * @param num * the maximum number of heavy hitters to display * @return string representing the heavy hitter instructions in tabular * format */ @SuppressWarnings("unchecked") public static String getHeavyHitters(int num) { int len = _cpInstTime.size(); if (num <= 0 || len <= 0) return "-"; // get top k via sort Entry<String, Long>[] tmp = _cpInstTime.entrySet().toArray(new Entry[len]); Arrays.sort(tmp, new Comparator<Entry<String, Long>>() { public int compare(Entry<String, Long> e1, Entry<String, Long> e2) { return e1.getValue().compareTo(e2.getValue()); } }); final String numCol = "#"; final String instCol = "Instruction"; final String timeSCol = "Time(s)"; final String countCol = "Count"; final String gpuCol = "Misc Timers"; StringBuilder sb = new StringBuilder(); int numHittersToDisplay = Math.min(num, len); int maxNumLen = String.valueOf(numHittersToDisplay).length(); int maxInstLen = instCol.length(); int maxTimeSLen = timeSCol.length(); int maxCountLen = countCol.length(); DecimalFormat sFormat = new DecimalFormat("#,##0.000"); for (int i = 0; i < numHittersToDisplay; i++) { Entry<String, Long> hh = tmp[len - 1 - i]; String instruction = hh.getKey(); Long timeNs = hh.getValue(); double timeS = (double) timeNs / 1000000000.0; maxInstLen = Math.max(maxInstLen, instruction.length()); String timeSString = sFormat.format(timeS); maxTimeSLen = Math.max(maxTimeSLen, timeSString.length()); maxCountLen = Math.max(maxCountLen, String.valueOf(_cpInstCounts.get(instruction)).length()); } maxInstLen = Math.min(maxInstLen, DMLScript.STATISTICS_MAX_WRAP_LEN); sb.append(String.format( " %" + maxNumLen + "s %-" + maxInstLen + "s %" + maxTimeSLen + "s %" + maxCountLen + "s", numCol, instCol, timeSCol, countCol)); if (GPUStatistics.DISPLAY_STATISTICS || DMLScript.FINEGRAINED_STATISTICS) { sb.append(" "); sb.append(gpuCol); } sb.append("\n"); for (int i = 0; i < numHittersToDisplay; i++) { String instruction = tmp[len - 1 - i].getKey(); String [] wrappedInstruction = wrap(instruction, maxInstLen); Long timeNs = tmp[len - 1 - i].getValue(); double timeS = (double) timeNs / 1000000000.0; String timeSString = sFormat.format(timeS); Long count = _cpInstCounts.get(instruction); int numLines = wrappedInstruction.length; String [] miscTimers = null; if (GPUStatistics.DISPLAY_STATISTICS || DMLScript.FINEGRAINED_STATISTICS) { miscTimers = wrap(GPUStatistics.getStringForCPMiscTimesPerInstruction(instruction), DMLScript.STATISTICS_MAX_WRAP_LEN); numLines = Math.max(numLines, miscTimers.length); } String miscFormatString = (GPUStatistics.DISPLAY_STATISTICS || DMLScript.FINEGRAINED_STATISTICS) ? " %" + DMLScript.STATISTICS_MAX_WRAP_LEN + "s" : "%s"; for(int wrapIter = 0; wrapIter < numLines; wrapIter++) { String instStr = (wrapIter < wrappedInstruction.length) ? wrappedInstruction[wrapIter] : ""; String miscTimerStr = ( (GPUStatistics.DISPLAY_STATISTICS || DMLScript.FINEGRAINED_STATISTICS) && wrapIter < miscTimers.length) ? miscTimers[wrapIter] : ""; if(wrapIter == 0) { // Display instruction count sb.append(String.format( " %" + maxNumLen + "d %-" + maxInstLen + "s %" + maxTimeSLen + "s %" + maxCountLen + "d" + miscFormatString, (i + 1), instStr, timeSString, count, miscTimerStr)); } else { sb.append(String.format( " %" + maxNumLen + "s %-" + maxInstLen + "s %" + maxTimeSLen + "s %" + maxCountLen + "s" + miscFormatString, "", instStr, "", "", miscTimerStr)); } sb.append("\n"); } } return sb.toString(); } /** * Returns the total time of asynchronous JIT compilation in milliseconds. * * @return JIT compile time */ public static long getJITCompileTime(){ long ret = -1; //unsupported CompilationMXBean cmx = ManagementFactory.getCompilationMXBean(); if( cmx.isCompilationTimeMonitoringSupported() ) { ret = cmx.getTotalCompilationTime(); ret += jitCompileTime; //add from remote processes } return ret; } public static long getJVMgcTime(){ long ret = 0; List<GarbageCollectorMXBean> gcxs = ManagementFactory.getGarbageCollectorMXBeans(); for( GarbageCollectorMXBean gcx : gcxs ) ret += gcx.getCollectionTime(); if( ret>0 ) ret += jvmGCTime; return ret; } public static long getJVMgcCount(){ long ret = 0; List<GarbageCollectorMXBean> gcxs = ManagementFactory.getGarbageCollectorMXBeans(); for( GarbageCollectorMXBean gcx : gcxs ) ret += gcx.getCollectionCount(); if( ret>0 ) ret += jvmGCCount; return ret; } public static long getHopRecompileTime(){ return hopRecompileTime.longValue(); } public static long getHopRecompiledPredDAGs(){ return hopRecompilePred.longValue(); } public static long getHopRecompiledSBDAGs(){ return hopRecompileSB.longValue(); } public static long getFunRecompileTime(){ return funRecompileTime.longValue(); } public static long getFunRecompiles(){ return funRecompiles.longValue(); } public static long getParforOptCount(){ return parforOptCount; } public static long getParforOptTime(){ return parforOptTime; } public static long getParforInitTime(){ return parforInitTime; } public static long getParforMergeTime(){ return parforMergeTime; } /** * Returns statistics of the DML program that was recently completed as a string * @return statistics as a string */ public static String display() { return display(DMLScript.STATISTICS_COUNT); } private static String [] wrap(String str, int wrapLength) { int numLines = (int) Math.ceil( ((double)str.length()) / wrapLength); int len = str.length(); String [] ret = new String[numLines]; for(int i = 0; i < numLines; i++) { ret[i] = str.substring(i*wrapLength, Math.min((i+1)*wrapLength, len)); } return ret; } /** * Returns statistics as a string * @param maxHeavyHitters The maximum number of heavy hitters that are printed * @return statistics as string */ public static String display(int maxHeavyHitters) { StringBuilder sb = new StringBuilder(); sb.append("SystemML Statistics:\n"); if( DMLScript.STATISTICS ) { sb.append("Total elapsed time:\t\t" + String.format("%.3f", (getCompileTime()+getRunTime())*1e-9) + " sec.\n"); // nanoSec --> sec sb.append("Total compilation time:\t\t" + String.format("%.3f", getCompileTime()*1e-9) + " sec.\n"); // nanoSec --> sec } sb.append("Total execution time:\t\t" + String.format("%.3f", getRunTime()*1e-9) + " sec.\n"); // nanoSec --> sec if( OptimizerUtils.isSparkExecutionMode() ) { if( DMLScript.STATISTICS ) //moved into stats on Shiv's request sb.append("Number of compiled Spark inst:\t" + getNoOfCompiledSPInst() + ".\n"); sb.append("Number of executed Spark inst:\t" + getNoOfExecutedSPInst() + ".\n"); } else { if( DMLScript.STATISTICS ) //moved into stats on Shiv's request sb.append("Number of compiled MR Jobs:\t" + getNoOfCompiledMRJobs() + ".\n"); sb.append("Number of executed MR Jobs:\t" + getNoOfExecutedMRJobs() + ".\n"); } if( DMLScript.USE_ACCELERATOR && DMLScript.STATISTICS) sb.append(GPUStatistics.getStringForCudaTimers()); //show extended caching/compilation statistics if( DMLScript.STATISTICS ) { if(NativeHelper.blasType != null) { String blas = NativeHelper.blasType != null ? NativeHelper.blasType : ""; sb.append("Native " + blas + " calls (dense mult/conv/bwdF/bwdD):\t" + numNativeLibMatrixMultCalls.longValue() + "/" + numNativeConv2dCalls.longValue() + "/" + numNativeConv2dBwdFilterCalls.longValue() + "/" + numNativeConv2dBwdDataCalls.longValue() + ".\n"); sb.append("Native " + blas + " calls (sparse conv/bwdF/bwdD):\t" + numNativeSparseConv2dCalls.longValue() + "/" + numNativeSparseConv2dBwdFilterCalls.longValue() + "/" + numNativeSparseConv2dBwdDataCalls.longValue() + ".\n"); sb.append("Native " + blas + " times (dense mult/conv/bwdF/bwdD):\t" + String.format("%.3f", nativeLibMatrixMultTime*1e-9) + "/" + String.format("%.3f", nativeConv2dTime*1e-9) + "/" + String.format("%.3f", nativeConv2dBwdFilterTime*1e-9) + "/" + String.format("%.3f", nativeConv2dBwdDataTime*1e-9) + ".\n"); } if(recomputeNNZTime != 0 || examSparsityTime != 0 || allocateDoubleArrTime != 0) { sb.append("MatrixBlock times (recomputeNNZ/examSparsity/allocateDoubleArr):\t" + String.format("%.3f", recomputeNNZTime*1e-9) + "/" + String.format("%.3f", examSparsityTime*1e-9) + "/" + String.format("%.3f", allocateDoubleArrTime*1e-9) + ".\n"); } sb.append("Cache hits (Mem, WB, FS, HDFS):\t" + CacheStatistics.displayHits() + ".\n"); sb.append("Cache writes (WB, FS, HDFS):\t" + CacheStatistics.displayWrites() + ".\n"); sb.append("Cache times (ACQr/m, RLS, EXP):\t" + CacheStatistics.displayTime() + " sec.\n"); sb.append("HOP DAGs recompiled (PRED, SB):\t" + getHopRecompiledPredDAGs() + "/" + getHopRecompiledSBDAGs() + ".\n"); sb.append("HOP DAGs recompile time:\t" + String.format("%.3f", ((double)getHopRecompileTime())/1000000000) + " sec.\n"); if( getFunRecompiles()>0 ) { sb.append("Functions recompiled:\t\t" + getFunRecompiles() + ".\n"); sb.append("Functions recompile time:\t" + String.format("%.3f", ((double)getFunRecompileTime())/1000000000) + " sec.\n"); } if( ConfigurationManager.isCodegenEnabled() ) { sb.append("Codegen compile (DAG,CP,JC):\t" + getCodegenDAGCompile() + "/" + getCodegenCPlanCompile() + "/" + getCodegenClassCompile() + ".\n"); sb.append("Codegen enum (All,Eval,EvalP):\t" + getCodegenEnumAll() + "/" + getCodegenEnumEval() + "/" + getCodegenEnumEvalP() + ".\n"); sb.append("Codegen compile times (DAG,JC):\t" + String.format("%.3f", (double)getCodegenCompileTime()/1000000000) + "/" + String.format("%.3f", (double)getCodegenClassCompileTime()/1000000000) + " sec.\n"); sb.append("Codegen plan cache hits:\t" + getCodegenPlanCacheHits() + "/" + getCodegenPlanCacheTotal() + ".\n"); } if( OptimizerUtils.isSparkExecutionMode() ){ String lazy = SparkExecutionContext.isLazySparkContextCreation() ? "(lazy)" : "(eager)"; sb.append("Spark ctx create time "+lazy+":\t"+ String.format("%.3f", ((double)sparkCtxCreateTime)*1e-9) + " sec.\n" ); // nanoSec --> sec sb.append("Spark trans counts (par,bc,col):" + String.format("%d/%d/%d.\n", sparkParallelizeCount.longValue(), sparkBroadcastCount.longValue(), sparkCollectCount.longValue())); sb.append("Spark trans times (par,bc,col):\t" + String.format("%.3f/%.3f/%.3f secs.\n", ((double)sparkParallelize.longValue())*1e-9, ((double)sparkBroadcast.longValue())*1e-9, ((double)sparkCollect.longValue())*1e-9)); } if( parforOptCount>0 ){ sb.append("ParFor loops optimized:\t\t" + getParforOptCount() + ".\n"); sb.append("ParFor optimize time:\t\t" + String.format("%.3f", ((double)getParforOptTime())/1000) + " sec.\n"); sb.append("ParFor initialize time:\t\t" + String.format("%.3f", ((double)getParforInitTime())/1000) + " sec.\n"); sb.append("ParFor result merge time:\t" + String.format("%.3f", ((double)getParforMergeTime())/1000) + " sec.\n"); sb.append("ParFor total update in-place:\t" + lTotalUIPVar + "/" + lTotalLixUIP + "/" + lTotalLix + "\n"); } sb.append("Total JIT compile time:\t\t" + ((double)getJITCompileTime())/1000 + " sec.\n"); sb.append("Total JVM GC count:\t\t" + getJVMgcCount() + ".\n"); sb.append("Total JVM GC time:\t\t" + ((double)getJVMgcTime())/1000 + " sec.\n"); LibMatrixDNN.appendStatistics(sb); sb.append("Heavy hitter instructions:\n" + getHeavyHitters(maxHeavyHitters)); } return sb.toString(); } }
package org.bouncycastle.cert.ocsp; import java.io.IOException; import java.io.OutputStream; import java.util.Date; import java.util.List; import java.util.Set; import org.bouncycastle.asn1.ASN1Encoding; import org.bouncycastle.asn1.ASN1ObjectIdentifier; import org.bouncycastle.asn1.ASN1Sequence; import org.bouncycastle.asn1.ocsp.BasicOCSPResponse; import org.bouncycastle.asn1.ocsp.ResponseData; import org.bouncycastle.asn1.ocsp.SingleResponse; import org.bouncycastle.asn1.x509.AlgorithmIdentifier; import org.bouncycastle.asn1.x509.Certificate; import org.bouncycastle.asn1.x509.Extension; import org.bouncycastle.asn1.x509.Extensions; import org.bouncycastle.cert.X509CertificateHolder; import org.bouncycastle.operator.ContentVerifier; import org.bouncycastle.operator.ContentVerifierProvider; import org.bouncycastle.util.Encodable; /** * <pre> * BasicOCSPResponse ::= SEQUENCE { * tbsResponseData ResponseData, * signatureAlgorithm AlgorithmIdentifier, * signature BIT STRING, * certs [0] EXPLICIT SEQUENCE OF Certificate OPTIONAL } * </pre> */ public class BasicOCSPResp implements Encodable { private BasicOCSPResponse resp; private ResponseData data; private Extensions extensions; public BasicOCSPResp( BasicOCSPResponse resp) { this.resp = resp; this.data = resp.getTbsResponseData(); this.extensions = Extensions.getInstance(resp.getTbsResponseData().getResponseExtensions()); } /** * Return the DER encoding of the tbsResponseData field. * @return DER encoding of tbsResponseData */ public byte[] getTBSResponseData() { try { return resp.getTbsResponseData().getEncoded(ASN1Encoding.DER); } catch (IOException e) { return null; } } /** * Return the algorithm identifier describing the signature used in the response. * * @return an AlgorithmIdentifier */ public AlgorithmIdentifier getSignatureAlgorithmID() { return resp.getSignatureAlgorithm(); } public int getVersion() { return data.getVersion().getValue().intValue() + 1; } public RespID getResponderId() { return new RespID(data.getResponderID()); } public Date getProducedAt() { return OCSPUtils.extractDate(data.getProducedAt()); } public SingleResp[] getResponses() { ASN1Sequence s = data.getResponses(); SingleResp[] rs = new SingleResp[s.size()]; for (int i = 0; i != rs.length; i++) { rs[i] = new SingleResp(SingleResponse.getInstance(s.getObjectAt(i))); } return rs; } public boolean hasExtensions() { return extensions != null; } public Extension getExtension(ASN1ObjectIdentifier oid) { if (extensions != null) { return extensions.getExtension(oid); } return null; } public List getExtensionOIDs() { return OCSPUtils.getExtensionOIDs(extensions); } public Set getCriticalExtensionOIDs() { return OCSPUtils.getCriticalExtensionOIDs(extensions); } public Set getNonCriticalExtensionOIDs() { return OCSPUtils.getNonCriticalExtensionOIDs(extensions); } public ASN1ObjectIdentifier getSignatureAlgOID() { return resp.getSignatureAlgorithm().getAlgorithm(); } public byte[] getSignature() { return resp.getSignature().getOctets(); } public X509CertificateHolder[] getCerts() { // // load the certificates if we have any // if (resp.getCerts() != null) { ASN1Sequence s = resp.getCerts(); if (s != null) { X509CertificateHolder[] certs = new X509CertificateHolder[s.size()]; for (int i = 0; i != certs.length; i++) { certs[i] = new X509CertificateHolder(Certificate.getInstance(s.getObjectAt(i))); } return certs; } return OCSPUtils.EMPTY_CERTS; } else { return OCSPUtils.EMPTY_CERTS; } } /** * verify the signature against the tbsResponseData object we contain. */ public boolean isSignatureValid( ContentVerifierProvider verifierProvider) throws OCSPException { try { ContentVerifier verifier = verifierProvider.get(resp.getSignatureAlgorithm()); OutputStream vOut = verifier.getOutputStream(); vOut.write(resp.getTbsResponseData().getEncoded(ASN1Encoding.DER)); vOut.close(); return verifier.verify(this.getSignature()); } catch (Exception e) { throw new OCSPException("exception processing sig: " + e, e); } } /** * return the ASN.1 encoded representation of this object. */ public byte[] getEncoded() throws IOException { return resp.getEncoded(); } public boolean equals(Object o) { if (o == this) { return true; } if (!(o instanceof BasicOCSPResp)) { return false; } BasicOCSPResp r = (BasicOCSPResp)o; return resp.equals(r.resp); } public int hashCode() { return resp.hashCode(); } }
/* Bento * * $Id: BentoRunner.java,v 1.2 2014/11/01 19:49:43 sthippo Exp $ * * Copyright (c) 2014 bentodev.org * * Use of this code in source or compiled form is subject to the * Bento Poetic License at http://www.bentodev.org/poetic-license.html */ package bento.runtime; import bento.lang.*; import java.io.*; import java.util.*; /** * Run a Bento program from the command line. * * BentoRunner compiles a Bento site, instantiates an object and outputs the result. * * @author * @version */ public class BentoRunner implements bento_processor { public static final String NAME = "BentoRunner"; public static final String VERSION = "1.0"; public static final String NAME_AND_VERSION = NAME + " " + VERSION; protected Exception exception = null; protected BentoSite mainSite = null; protected Map<String, BentoSite> sites = new HashMap<String, BentoSite>(); private boolean initedOk = false; private String logFileName = null; private boolean appendToLog = true; private String fileBase = "."; private boolean multithreaded = false; private String bentoPath = "."; private boolean recursive = false; private boolean customCore = false; private Core sharedCore = null; private HashMap<String, Object> properties = new HashMap<String, Object>(); protected String fileHandlerName = null; private String request = null; /** Main entry point. The following flags are recognized (in any order). All flags are optional. * <table><tr><th>argument</th><th>default</th><th>effect</th></tr><tr> * * <td> -bentopath <path>[<sep><path>]* </td><td> current directory </td><td> Sets the initial bentopath, which is a string of pathnames separated by the * platform-specific path separator character (e.g., colon on Unix and semicolon * on Windows). Pathnames may specify either files or directories. At startup, * for each pathname, the Bento server loads either the indicated file (if the * pathname specifies a file) or all the files with a .bento extension in the * indicated directory (if the pathname specifies a directory). * <td> -multithreaded </td><td> not multithreaded </td><td> Multithreaded compilation. If this flag is present, then bento * files are compiled in independent threads. </td> * <td> -recursive </td><td> not recursive </td><td> Recursive bentopath option. </td> * <td> -log <path> </td><td> no logging </td><td> All output messages are logged in the specified file. The file is overwritten * if it already exists. </td> * <td> -log.append <path> </td><td> no logging </td><td> All output messages are logged in the specified file. If the file exists, the * current content is preserved, and messages are appended to the end of the file. </td> * <td> -verbose </td><td> not verbose </td><td> Verbose output messages for debugging. </td>. * <td> -debug </td><td> debugging not enabled </td><td> Enable the built-in debugger. </td>. * */ public static void main(String[] args) { boolean noProblems = true; Map<String, String> initParams = paramsFromArgs(args); String problems = initParams.get("problems"); if (problems != null && !problems.equals("0")) { noProblems = false; } if (noProblems) { BentoRunner runner = new BentoRunner(initParams); Writer writer = new OutputStreamWriter(System.out); if (runner.initedOk) { try { runner.loadSite(); runner.run(writer); } catch (Throwable t) { noProblems = false; System.err.println("Problem running BentoRunner: " + t.getMessage()); t.printStackTrace(System.err); } } else { noProblems = false; } } else { System.out.println("Usage:"); System.out.println(" java -jar bento.jar [flags] obj_name\n"); System.out.println("where obj_name is the name of the object to be instantiated and\n"); System.out.println("the optional flags are among the following (in any order):\n"); System.out.println("Flag Effect"); System.out.println("----------------------------------------------------------------------------"); System.out.println("-bentopath <pathnames> Sets the initial bentopath, which is a string"); System.out.println(" of pathnames separated by the platform-specific"); System.out.println(" path separator character (e.g., colon on Unix"); System.out.println(" and semicolon on Windows). Pathnames may"); System.out.println(" specify either files or directories. At"); System.out.println(" startup, for each pathname, the Bento runner"); System.out.println(" loads either the indicated file (if the pathname"); System.out.println(" specifies a file) or all the files with a .bento"); System.out.println(" extension in the indicated directory (if the"); System.out.println(" pathname specifies a directory).\n"); System.out.println("-recursive Recursive bentopath option.\n"); System.out.println("-multithreaded Multithreaded compilation. If this flag is"); System.out.println(" present, then bento files are compiled in"); System.out.println(" independent threads.\n"); System.out.println("-customcore Custom core definitions supplied in bentopath;"); System.out.println(" core files will not be autoloaded from"); System.out.println(" bento.jar.\n"); System.out.println("-log <path> All output messages are logged in the specified"); System.out.println(" file. The file is overwritten if it already"); System.out.println(" exists.\n"); System.out.println("-log.append <path> All output messages are logged in the specified"); System.out.println(" file. If the file exists, the current content"); System.out.println(" is preserved, and messages are appended to the"); System.out.println(" end of the file./n"); System.out.println("-verbose Verbose output messages for debugging.\n"); System.out.println("-debug Enable the built-in debugger.\n"); System.out.println("-? This screen.\n\n"); System.out.println("Flags may be abbreviated to the initial letters, e.g. -r instead of -recursive,"); System.out.println("or -l.a instead of -log.append.\n"); } } /** Constructor */ public BentoRunner(Map<String, String> initParams) { initedOk = init(initParams); request = initParams.get("request"); } private static Map<String, String> paramsFromArgs(String[] args) { Map<String, String> initParams = new HashMap<String, String>(); int numProblems = 0; for (int i = 0; i < args.length; i++) { String arg = args[i]; String nextArg = (i + 1 < args.length ? args[i + 1] : null); boolean noNextArg = (nextArg == null || nextArg.startsWith("-")); if (arg.equals("-site") || arg.equals("-s")) { if (noNextArg) { numProblems++; String msg = "site name not provided"; initParams.put("problem" + numProblems, msg); i++; } else { initParams.put("site", nextArg); } } else if (arg.equals("-address") || arg.equals("-a")) { if (noNextArg) { numProblems++; String msg = "address not provided"; initParams.put("problem" + numProblems, msg); } else { initParams.put("address", nextArg); i++; } } else if (arg.equals("-port") || arg.equals("-p")) { if (noNextArg) { numProblems++; String msg = "port not provided"; initParams.put("problem" + numProblems, msg); } else { initParams.put("port", nextArg); i++; } } else if (arg.equals("-host") || arg.equals("-h")) { if (noNextArg) { numProblems++; String msg = "host not provided"; initParams.put("problem" + numProblems, msg); } else { initParams.put("host", nextArg); i++; } } else if (arg.equals("-filebase") || arg.equals("-docbase") || arg.equals("-d")) { if (noNextArg) { numProblems++; String msg = "filebase not provided"; initParams.put("problem" + numProblems, msg); } else { initParams.put("filebase", nextArg); i++; } } else if (arg.equals("-filesfirst") || arg.equals("-f")) { initParams.put("filesfirst", "true"); } else if (arg.equals("-bentopath") || arg.equals("-b")) { if (noNextArg) { numProblems++; String msg = "bentopath not provided"; initParams.put("problem" + numProblems, msg); } else { initParams.put("bentopath", nextArg); i++; } } else if (arg.equals("-recursive") || arg.equals("-r")) { initParams.put("recursive", "true"); } else if (arg.equals("-multithreaded") || arg.equals("-m")) { initParams.put("multithreaded", "true"); } else if (arg.equals("-customcore") || arg.equals("-cc")) { initParams.put("customcore", "true"); } else if (arg.equals("-sharecore") || arg.equals("-sc")) { initParams.put("sharecore", "true"); } else if (arg.equals("-log") || arg.equals("-l")) { if (noNextArg) { numProblems++; String msg = "log file not provided"; initParams.put("problem" + numProblems, msg); } else { initParams.put("log", nextArg); i++; } } else if (arg.equals("-log.append") || arg.equals("-l.a")) { if (noNextArg) { numProblems++; String msg = "log.append file not provided"; initParams.put("problem" + numProblems, msg); } else { initParams.put("log", nextArg); initParams.put("log.append", "true"); i++; } } else if (arg.equals("-verbose") || arg.equals("-v")) { initParams.put("verbose", "true"); } else if (arg.equals("-debug")) { initParams.put("debug", "true"); } else if (arg.startsWith("-")) { numProblems++; String msg = "unrecognized option: " + arg; initParams.put("problem" + numProblems, msg); } else { while (i++ < args.length - 1) { arg = arg + " " + args[i]; } initParams.put("request", arg); break; } } initParams.put("problems", Integer.toString(numProblems)); return initParams; } private boolean init(Map<String, String> initParams) { try { initGlobalSettings(initParams); } catch (Exception e) { exception = e; return false; } return true; } protected void initGlobalSettings(Map<String, String> initParams) throws Exception { String param; param = initParams.get("verbose"); if ("true".equalsIgnoreCase(param)) { SiteBuilder.verbosity = SiteBuilder.VERBOSE; } logFileName = initParams.get("log"); String appendLog = initParams.get("log.append"); appendToLog = isTrue(appendLog); if (logFileName != null) { SiteBuilder.setLogFile(logFileName, appendToLog); } bentoPath = initParams.get("bentopath"); if (bentoPath == null) { bentoPath = "."; } fileBase = initParams.get("filebase"); if (fileBase == null) { fileBase = "."; } recursive = isTrue(initParams.get("recursive")); multithreaded = isTrue(initParams.get("multithreaded")); //shareCore = isTrue(initParams.get("sharecore")); //debuggingEnabled = isTrue(initParams.get("debug")); } /** Returns true if the passed string is a valid servlet parameter representation * of true. */ private static boolean isTrue(String param) { return ("true".equalsIgnoreCase(param) || "yes".equalsIgnoreCase(param) || "1".equalsIgnoreCase(param)); } private void loadSite() throws Exception { mainSite = load("[runner]", bentoPath, recursive); if (mainSite == null) { System.err.println("Unable to run."); return; } else if (mainSite.getException() != null) { throw mainSite.getException(); } mainSite.globalInit(); mainSite.siteInit(); } // // Run a program // private void run(Writer out) throws Redirection, IOException { String req = request; BentoSite site = mainSite; if (req != null && req.length() > 0) { if (sites != null) { int ix = req.indexOf('/'); while (ix == 0) { req = req.substring(1); ix = req.indexOf('/'); } if (ix < 0) { if (sites.containsKey(req)) { site = (BentoSite) sites.get(req); req = null; } } else if (ix > 0) { String siteName = req.substring(0, ix); if (sites.containsKey(siteName)) { site = (BentoSite) sites.get(siteName); req = req.substring(ix + 1); } } } } if (req == null || req.length() == 0) { req = "run"; } if (site != null) { site.run(req, out); } } // // BentoProcessor interface // /** Returns the name of this processor. **/ public String name() { return NAME; } /** The highest Bento version number supported by this processor. **/ public String version() { return VERSION; } /** Properties associated with this processor. **/ public Map<String, Object> props() { return properties; } public BentoSite getMainSite () { return mainSite; } public Map<String, BentoSite> getSiteMap() { return sites; } /** Compile the Bento source files found at the locations specified in <code>bentopath</code> * and return a bento_domain object. If a location is a directory and <code>recursive</code> * is true, scan subdirectories recursively for Bento source files. If <code>autoloadCore</code> * is true, and the core definitions required by the system cannot be found in the files * specified in <code>bentopath</code>, the processor will attempt to load the core * definitions automatically from a known source (e.g. from the same jar file that the * processor was loaded from). */ public bento_domain compile(String siteName, String bentopath, boolean recursive, boolean autoloadCore) { BentoSite site = new BentoSite(siteName, this); site.load(bentopath, "*.bento", recursive, multithreaded, autoloadCore, sharedCore); return site; } /** Compile Bento source code passed in as a string and return a bento_domain object. If * <code>autoloadCore</code> is true, and the core definitions required by the system cannot * be found in the files specified in <code>bentopath</code>, the processor will attempt to * load the core definitions automatically from a known source (e.g. from the same jar file * that the processor was loaded from). */ public bento_domain compile(String siteName, String bentotext, boolean autoloadCore) { return null; } /** Compile Bento source code passed in as a string and merge the result into the specified * bento_domain. If there is a fatal error in the code, the result is not merged and * a Redirection is thrown. */ public void compile_into(bento_domain domain, String bentotext) throws Redirection { ; } public String domain_type() { return Name.SITE; } /** Writes to log file and system out. **/ static void slog(String msg) { SiteBuilder.log(msg); // avoid redundant echo if (!SiteBuilder.echoSystemOut) { System.out.println(msg); } } /** Load the site files */ public BentoSite load(String sitename, String bentoPath, boolean recurse) throws Exception { BentoSite site = null; slog(NAME_AND_VERSION); slog("Loading site " + (sitename == null ? "(no name yet)" : sitename)); site = (BentoSite) compile(sitename, bentoPath, recurse, !customCore); Exception e = site.getException(); if (e != null) { slog("Exception loading site " + site.getName() + ": " + e); throw e; } return site; } }
/************************************************************************* * Copyright (c) 2015 Lemberg Solutions * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **************************************************************************/ package com.ls.mappwidget.slicingtool.cutter; import java.awt.Graphics2D; import java.awt.RenderingHints; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import javax.imageio.ImageIO; import com.ls.mappwidget.slicingtool.vo.PointVO; public class Cutter { private OnProgressUpdateListener onUpdate; private OnCompliteListener onComplite; public Cutter(OnProgressUpdateListener listener, OnCompliteListener onComplite) { this.onUpdate = listener; this.onComplite = onComplite; } public void startCuttingAndroid(final String inFile, final String outDir, final String mapName, final int tileSize, final PointVO pointTopLeft, final PointVO pointBottomRight) { new Thread(new Runnable() { @Override public void run() { File fileXml = new File(outDir, mapName); createDir(fileXml); File file = new File(outDir, mapName + File.separator + mapName + "_files"); createDir(file); String temp = file.getAbsolutePath() + File.separator + "tmp.png"; saveImage(inFile, "png", temp); int count = (int) Math.ceil(Math.log(getMaxSide(temp)) / Math.log(2)); for (int i = count; i >= 0; i--) { File fdir = new File(file.getAbsoluteFile(), File.separator + i); createDir(fdir); if (i == count) { imageCut(temp, fdir.getAbsolutePath(), tileSize, fileXml.getAbsoluteFile() + File.separator + mapName + ".xml", true, "", pointTopLeft, pointBottomRight); } else { imageCut(temp, fdir.getAbsolutePath(), tileSize, null, false, "", pointTopLeft, pointBottomRight); } imageResize(temp, temp, 50); onUpdate.onProgressUpdate(count - i + 1); } File tmp = new File(temp); tmp.delete(); onComplite.onComplite(); } }).start(); } private void createDir(File file) { if (!file.exists()) { file.mkdirs(); } } private void imageCut(String inFile, String outDir, int tileSize, String mapName, boolean xml, String concut, PointVO pointTopLeft, PointVO pointBottomRight) { String s = ""; if (!outDir.endsWith(File.separator)) { s = File.separator; } BufferedImage image = getImage(inFile); int w = image.getWidth(); int h = image.getHeight(); if (xml) { ImageXML.createXML(mapName, tileSize, w, h, pointTopLeft, pointBottomRight); } if (w < tileSize && h < tileSize) { saveImage(image, "png", outDir + s + concut + "0_0.png"); return; } for (int i = 0, k = 0; i < w - 1; i += tileSize, k++) { for (int j = 0, l = 0; j < h - 1; j += tileSize, l++) { int tileWidth = tileSize; int tileHeight = tileSize; if (tileWidth > (w - i - 1)) { tileWidth = w - i - 1; } if (tileHeight > (h - j - 1)) { tileHeight = h - j - 1; } BufferedImage part = image.getSubimage(i, j, tileWidth, tileHeight); saveImage(part, "png", outDir + s + concut + k + "_" + l + ".png"); } } } private boolean imageResize(String outFile, String inFile, int percents) { BufferedImage originalImage; originalImage = getImage(inFile); int type = originalImage.getType() == 0 ? BufferedImage.TYPE_INT_ARGB : originalImage.getType(); BufferedImage resizedImage = doResize(originalImage, type, percents); saveImage(resizedImage, "png", outFile); return true; } private BufferedImage doResize(BufferedImage originalImage, int type, int percents) { int h = (int) Math .round((originalImage.getHeight() * percents / 100.0)); int w = (int) Math.round((originalImage.getWidth() * percents / 100.0)); if (h <= 0) { h = 1; } if (w <= 0) { w = 1; } BufferedImage resizedImage = new BufferedImage(w, h, type); Graphics2D g = resizedImage.createGraphics(); g.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC); g.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g.drawImage(originalImage, 0, 0, w, h, null); g.dispose(); return resizedImage; } public int getWidth(String fileName) { BufferedImage image = getImage(fileName); if (image == null) { return -1; } int width = image.getWidth(); return width; } private int getMaxSide(String fileName) { BufferedImage image = getImage(fileName); int width = image.getWidth(); int height = image.getHeight(); if (width > height) { return width; } else { return height; } } private BufferedImage getImage(String fileName) { try { BufferedImage image = ImageIO.read(new File(fileName)); return image; } catch (IOException e) { e.printStackTrace(); } return null; } private boolean saveImage(String inFile, String formatName, String fileName) { try { BufferedImage image = getImage(inFile); ImageIO.write(image, formatName, new File(fileName)); return true; } catch (IOException e) { e.printStackTrace(); } return false; } private boolean saveImage(BufferedImage image, String formatName, String fileName) { try { ImageIO.write(image, formatName, new File(fileName)); return true; } catch (IOException e) { e.printStackTrace(); } return false; } }
// ======================================================================== // Copyright (c) 2009-2010 Mortbay, Inc. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // You may elect to redistribute this code under either of these licenses. // Contributors: // Greg Wilkins - initial API and implementation // ======================================================================== package org.eclipse.jetty.osgi.boot; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.util.HashSet; import java.util.Iterator; import java.util.Map.Entry; import java.util.Set; import org.eclipse.jetty.deploy.App; import org.eclipse.jetty.deploy.AppProvider; import org.eclipse.jetty.deploy.DeploymentManager; import org.eclipse.jetty.deploy.providers.ContextProvider; import org.eclipse.jetty.deploy.providers.ScanningAppProvider; import org.eclipse.jetty.osgi.boot.utils.internal.PackageAdminServiceTracker; import org.eclipse.jetty.server.handler.ContextHandler; import org.eclipse.jetty.util.Scanner; import org.eclipse.jetty.util.log.Log; import org.eclipse.jetty.util.resource.Resource; import org.eclipse.jetty.webapp.WebAppContext; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.BundleException; import org.osgi.framework.Constants; /** * AppProvider for OSGi. Supports the configuration of ContextHandlers and * WebApps. Extends the AbstractAppProvider to support the scanning of context * files located outside of the bundles. * <p> * This provider must not be called outside of jetty.boot: it should always be * called via the OSGi service listener. * </p> * <p> * This provider supports the same set of parameters than the WebAppProvider as * it supports the deployment of WebAppContexts. Except for the scanning of the * webapps directory. * </p> * <p> * When the parameter autoInstallOSGiBundles is set to true, OSGi bundles that * are located in the monitored directory are installed and started after the * framework as finished auto-starting all the other bundles. * Warning: only use this for development. * </p> */ public class OSGiAppProvider extends ScanningAppProvider implements AppProvider { private boolean _extractWars = true; private boolean _parentLoaderPriority = false; private String _defaultsDescriptor; private String _tldBundles; private String[] _configurationClasses; private boolean _autoInstallOSGiBundles = true; //Keep track of the bundles that were installed and that are waiting for the //framework to complete its initialization. Set<Bundle> _pendingBundlesToStart = null; /** * When a context file corresponds to a deployed bundle and is changed we * reload the corresponding bundle. */ private static class Filter implements FilenameFilter { OSGiAppProvider _enclosedInstance; public boolean accept(File dir, String name) { File file = new File(dir,name); if (fileMightBeAnOSGiBundle(file)) { return true; } if (!file.isDirectory()) { String contextName = getDeployedAppName(name); if (contextName != null) { App app = _enclosedInstance.getDeployedApps().get(contextName); return app != null; } } return false; } } /** * @param contextFileName * for example myContext.xml * @return The context, for example: myContext; null if this was not a * suitable contextFileName. */ private static String getDeployedAppName(String contextFileName) { String lowername = contextFileName.toLowerCase(); if (lowername.endsWith(".xml")) { String contextName = contextFileName.substring(0,lowername.length() - ".xml".length()); return contextName; } return null; } /** * Reading the display name of a webapp is really not sufficient for indexing the various * deployed ContextHandlers. * * @param context * @return */ private String getContextHandlerAppName(ContextHandler context) { String appName = context.getDisplayName(); if (appName == null || appName.length() == 0 || getDeployedApps().containsKey(appName)) { if (context instanceof WebAppContext) { appName = ((WebAppContext)context).getContextPath(); if (getDeployedApps().containsKey(appName)) { appName = "noDisplayName"+context.getClass().getSimpleName()+context.hashCode(); } } else { appName = "noDisplayName"+context.getClass().getSimpleName()+context.hashCode(); } } return appName; } /** * Default OSGiAppProvider consutructed when none are defined in the * jetty.xml configuration. */ public OSGiAppProvider() { super(new Filter()); ((Filter)super._filenameFilter)._enclosedInstance = this; } /** * Default OSGiAppProvider consutructed when none are defined in the * jetty.xml configuration. * * @param contextsDir */ public OSGiAppProvider(File contextsDir) throws IOException { this(); setMonitoredDirResource(Resource.newResource(contextsDir.toURI())); } /** * Returns the ContextHandler that was created by WebappRegistractionHelper * * @see AppProvider */ public ContextHandler createContextHandler(App app) throws Exception { // return pre-created Context ContextHandler wah = app.getContextHandler(); if (wah == null) { // for some reason it was not defined when the App was constructed. // we don't support this situation at this point. // once the WebAppRegistrationHelper is refactored, the code // that creates the ContextHandler will actually be here. throw new IllegalStateException("The App must be passed the " + "instance of the ContextHandler when it is construsted"); } if (_configurationClasses != null && wah instanceof WebAppContext) { ((WebAppContext)wah).setConfigurationClasses(_configurationClasses); } return app.getContextHandler(); } /** * @see AppProvider */ public void setDeploymentManager(DeploymentManager deploymentManager) { // _manager=deploymentManager; super.setDeploymentManager(deploymentManager); } private static String getOriginId(Bundle contributor, String pathInBundle) { return contributor.getSymbolicName() + "-" + contributor.getVersion().toString() + (pathInBundle.startsWith("/") ? pathInBundle : "/" + pathInBundle); } /** * @param context * @throws Exception */ public void addContext(Bundle contributor, String pathInBundle, ContextHandler context) throws Exception { addContext(getOriginId(contributor, pathInBundle), context); } /** * @param context * @throws Exception */ public void addContext(String originId, ContextHandler context) throws Exception { // TODO apply configuration specific to this provider if (context instanceof WebAppContext) { ((WebAppContext)context).setExtractWAR(isExtract()); } // wrap context as an App App app = new App(getDeploymentManager(),this,originId,context); String appName = getContextHandlerAppName(context); getDeployedApps().put(appName,app); getDeploymentManager().addApp(app); } /** * Called by the scanner of the context files directory. If we find the * corresponding deployed App we reload it by returning the App. Otherwise * we return null and nothing happens: presumably the corresponding OSGi * webapp is not ready yet. * * @return the corresponding already deployed App so that it will be * reloaded. Otherwise returns null. */ @Override protected App createApp(String filename) { // find the corresponding bundle and ContextHandler or WebAppContext // and reload the corresponding App. // see the 2 pass of the refactoring of the WebAppRegistrationHelper. String name = getDeployedAppName(filename); if (name != null) { return getDeployedApps().get(name); } return null; } public void removeContext(ContextHandler context) throws Exception { String appName = getContextHandlerAppName(context); App app = getDeployedApps().remove(context.getDisplayName()); if (app == null) { //try harder to undeploy this context handler. //see bug https://bugs.eclipse.org/bugs/show_bug.cgi?id=330098 appName = null; for (Entry<String,App> deployedApp : getDeployedApps().entrySet()) { if (deployedApp.getValue().getContextHandler() == context) { app = deployedApp.getValue(); appName = deployedApp.getKey(); break; } } if (appName != null) { getDeployedApps().remove(appName); } } if (app != null) { getDeploymentManager().removeApp(app); } } // //copied from WebAppProvider as the parameters are identical. // //only removed the parameer related to extractWars. /* ------------------------------------------------------------ */ /** * Get the parentLoaderPriority. * * @return the parentLoaderPriority */ public boolean isParentLoaderPriority() { return _parentLoaderPriority; } /* ------------------------------------------------------------ */ /** * Set the parentLoaderPriority. * * @param parentLoaderPriority * the parentLoaderPriority to set */ public void setParentLoaderPriority(boolean parentLoaderPriority) { _parentLoaderPriority = parentLoaderPriority; } /* ------------------------------------------------------------ */ /** * Get the defaultsDescriptor. * * @return the defaultsDescriptor */ public String getDefaultsDescriptor() { return _defaultsDescriptor; } /* ------------------------------------------------------------ */ /** * Set the defaultsDescriptor. * * @param defaultsDescriptor * the defaultsDescriptor to set */ public void setDefaultsDescriptor(String defaultsDescriptor) { _defaultsDescriptor = defaultsDescriptor; } /** * The context xml directory. In fact it is the directory watched by the * scanner. */ public File getContextXmlDirAsFile() { try { Resource monitoredDir = getMonitoredDirResource(); if (monitoredDir == null) return null; return monitoredDir.getFile(); } catch (IOException e) { e.printStackTrace(); return null; } } /* ------------------------------------------------------------ */ /** * The context xml directory. In fact it is the directory watched by the * scanner. */ public String getContextXmlDir() { try { Resource monitoredDir = getMonitoredDirResource(); if (monitoredDir == null) return null; return monitoredDir.getFile().toURI().toString(); } catch (IOException e) { e.printStackTrace(); return null; } } public boolean isExtract() { return _extractWars; } public void setExtract(boolean extract) { _extractWars=extract; } /** * @return true when this app provider locates osgi bundles and features in * its monitored directory and installs them. By default true if there is a folder to monitor. */ public boolean isAutoInstallOSGiBundles() { return _autoInstallOSGiBundles; } /** * &lt;autoInstallOSGiBundles&gt;true&lt;/autoInstallOSGiBundles&gt; * @param installingOSGiBundles */ public void setAutoInstallOSGiBundles(boolean installingOSGiBundles) { _autoInstallOSGiBundles=installingOSGiBundles; } /* ------------------------------------------------------------ */ /** * Set the directory in which to look for context XML files. * <p> * If a webapp call "foo/" or "foo.war" is discovered in the monitored * directory, then the ContextXmlDir is examined to see if a foo.xml file * exists. If it does, then this deployer will not deploy the webapp and the * ContextProvider should be used to act on the foo.xml file. * </p> * <p> * Also if this directory contains some osgi bundles, it will install them. * </p> * * @see ContextProvider * @param contextsDir */ public void setContextXmlDir(String contextsDir) { setMonitoredDirName(contextsDir); } /** * @param tldBundles Comma separated list of bundles that contain tld jars * that should be setup on the jetty instances created here. */ public void setTldBundles(String tldBundles) { _tldBundles = tldBundles; } /** * @return The list of bundles that contain tld jars that should be setup * on the jetty instances created here. */ public String getTldBundles() { return _tldBundles; } /** * @param configurations The configuration class names. */ public void setConfigurationClasses(String[] configurations) { _configurationClasses = configurations==null?null:(String[])configurations.clone(); } /* ------------------------------------------------------------ */ /** * */ public String[] getConfigurationClasses() { return _configurationClasses; } /** * Overridden to install the OSGi bundles found in the monitored folder. */ protected void doStart() throws Exception { if (isAutoInstallOSGiBundles()) { if (getMonitoredDirResource() == null) { setAutoInstallOSGiBundles(false); Log.info("Disable autoInstallOSGiBundles as there is not contexts folder to monitor."); } else { File scandir = null; try { scandir = getMonitoredDirResource().getFile(); if (!scandir.exists() || !scandir.isDirectory()) { setAutoInstallOSGiBundles(false); Log.warn("Disable autoInstallOSGiBundles as the contexts folder '" + scandir.getAbsolutePath() + " does not exist."); scandir = null; } } catch (IOException ioe) { setAutoInstallOSGiBundles(false); Log.warn("Disable autoInstallOSGiBundles as the contexts folder '" + getMonitoredDirResource().getURI() + " does not exist."); scandir = null; } if (scandir != null) { for (File file : scandir.listFiles()) { if (fileMightBeAnOSGiBundle(file)) { installBundle(file, false); } } } } } super.doStart(); if (isAutoInstallOSGiBundles()) { Scanner.ScanCycleListener scanCycleListner = new AutoStartWhenFrameworkHasCompleted(this); super.addScannerListener(scanCycleListner); } } /** * When the file is a jar or a folder, we look if it looks like an OSGi bundle. * In that case we install it and start it. * <p> * Really a simple trick to get going quickly with development. * </p> */ @Override protected void fileAdded(String filename) throws Exception { File file = new File(filename); if (isAutoInstallOSGiBundles() && file.exists() && fileMightBeAnOSGiBundle(file)) { installBundle(file, true); } else { super.fileAdded(filename); } } /** * @param file * @return */ private static boolean fileMightBeAnOSGiBundle(File file) { if (file.isDirectory()) { if (new File(file,"META-INF/MANIFEST.MF").exists()) { return true; } } else if (file.getName().endsWith(".jar")) { return true; } return false; } @Override protected void fileChanged(String filename) throws Exception { File file = new File(filename); if (isAutoInstallOSGiBundles() && fileMightBeAnOSGiBundle(file)) { updateBundle(file); } else { super.fileChanged(filename); } } @Override protected void fileRemoved(String filename) throws Exception { File file = new File(filename); if (isAutoInstallOSGiBundles() && fileMightBeAnOSGiBundle(file)) { uninstallBundle(file); } else { super.fileRemoved(filename); } } /** * Returns a bundle according to its location. * In the version 1.6 of org.osgi.framework, BundleContext.getBundle(String) is what we want. * However to support older versions of OSGi. We use our own local refrence mechanism. * @param location * @return */ protected Bundle getBundle(BundleContext bc, String location) { //not available in older versions of OSGi: //return bc.getBundle(location); for (Bundle b : bc.getBundles()) { if (b.getLocation().equals(location)) { return b; } } return null; } protected synchronized Bundle installBundle(File file, boolean start) { try { BundleContext bc = JettyBootstrapActivator.getBundleContext(); String location = file.toURI().toString(); Bundle b = getBundle(bc, location); if (b == null) { b = bc.installBundle(location); } if (b == null) { //not sure we will ever be here, //most likely a BundleException was thrown Log.warn("The file " + location + " is not an OSGi bundle."); return null; } if (start && b.getHeaders().get(Constants.FRAGMENT_HOST) == null) {//not a fragment, try to start it. if the framework has finished auto-starting. if (!PackageAdminServiceTracker.INSTANCE.frameworkHasCompletedAutostarts()) { if (_pendingBundlesToStart == null) { _pendingBundlesToStart = new HashSet<Bundle>(); } _pendingBundlesToStart.add(b); return null; } else { b.start(); } } return b; } catch (BundleException e) { Log.warn("Unable to " + (start? "start":"install") + " the bundle " + file.getAbsolutePath(), e); } return null; } protected void uninstallBundle(File file) { try { Bundle b = getBundle(JettyBootstrapActivator.getBundleContext(), file.toURI().toString()); b.stop(); b.uninstall(); } catch (BundleException e) { Log.warn("Unable to uninstall the bundle " + file.getAbsolutePath(), e); } } protected void updateBundle(File file) { try { Bundle b = getBundle(JettyBootstrapActivator.getBundleContext(), file.toURI().toString()); if (b == null) { installBundle(file, true); } else if (b.getState() == Bundle.ACTIVE) { b.update(); } else { b.start(); } } catch (BundleException e) { Log.warn("Unable to update the bundle " + file.getAbsolutePath(), e); } } } /** * At the end of each scan, if there are some bundles to be started, * look if the framework has completed its autostart. In that case start those bundles. */ class AutoStartWhenFrameworkHasCompleted implements Scanner.ScanCycleListener { private final OSGiAppProvider _appProvider; AutoStartWhenFrameworkHasCompleted(OSGiAppProvider appProvider) { _appProvider = appProvider; } public void scanStarted(int cycle) throws Exception { } public void scanEnded(int cycle) throws Exception { if (_appProvider._pendingBundlesToStart != null && PackageAdminServiceTracker.INSTANCE.frameworkHasCompletedAutostarts()) { Iterator<Bundle> it = _appProvider._pendingBundlesToStart.iterator(); while (it.hasNext()) { Bundle b = it.next(); if (b.getHeaders().get(Constants.FRAGMENT_HOST) != null) { continue; } try { b.start(); } catch (BundleException e) { Log.warn("Unable to start the bundle " + b.getLocation(), e); } } _appProvider._pendingBundlesToStart = null; } } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * GetConsoleOutputType.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST) */ package com.amazon.ec2; /** * GetConsoleOutputType bean class */ public class GetConsoleOutputType implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = GetConsoleOutputType Namespace URI = http://ec2.amazonaws.com/doc/2012-08-15/ Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for InstanceId */ protected java.lang.String localInstanceId ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getInstanceId(){ return localInstanceId; } /** * Auto generated setter method * @param param InstanceId */ public void setInstanceId(java.lang.String param){ this.localInstanceId=param; } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { GetConsoleOutputType.this.serialize(parentQName,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( parentQName,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2012-08-15/"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":GetConsoleOutputType", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "GetConsoleOutputType", xmlWriter); } } namespace = "http://ec2.amazonaws.com/doc/2012-08-15/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"instanceId", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"instanceId"); } } else { xmlWriter.writeStartElement("instanceId"); } if (localInstanceId==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("instanceId cannot be null!!"); }else{ xmlWriter.writeCharacters(localInstanceId); } xmlWriter.writeEndElement(); xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/", "instanceId")); if (localInstanceId != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localInstanceId)); } else { throw new org.apache.axis2.databinding.ADBException("instanceId cannot be null!!"); } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static GetConsoleOutputType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ GetConsoleOutputType object = new GetConsoleOutputType(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"GetConsoleOutputType".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (GetConsoleOutputType)com.amazon.ec2.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); reader.next(); while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","instanceId").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setInstanceId( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement()) // A start element we are not expecting indicates a trailing invalid property throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable * law or agreed to in writing, software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License * for the specific language governing permissions and limitations under the License. */ package org.apache.phoenix.end2end; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.util.Map; import java.util.Properties; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData; import org.apache.phoenix.jdbc.PhoenixDriver; import org.apache.phoenix.query.QueryServices; import org.apache.phoenix.query.QueryServicesOptions; import org.apache.phoenix.schema.PIndexState; import org.apache.phoenix.schema.PMetaData; import org.apache.phoenix.schema.PTable; import org.apache.phoenix.schema.PTableKey; import org.apache.phoenix.schema.TableNotFoundException; import org.apache.phoenix.util.IndexUtil; import org.apache.phoenix.util.PhoenixRuntime; import org.apache.phoenix.util.ReadOnlyProps; import org.apache.phoenix.util.SchemaUtil; import org.junit.BeforeClass; import org.junit.Test; import com.google.common.collect.Maps; public class UpdateCacheAcrossDifferentClientsIT extends BaseUniqueNamesOwnClusterIT { @BeforeClass public static void doSetup() throws Exception { Map<String, String> props = Maps.newConcurrentMap(); props.put(QueryServices.DROP_METADATA_ATTRIB, Boolean.TRUE.toString()); props.put(QueryServices.MUTATE_BATCH_SIZE_ATTRIB, Integer.toString(3000)); //When we run all tests together we are using global cluster(driver) //so to make drop work we need to re register driver with DROP_METADATA_ATTRIB property destroyDriver(); setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator())); //Registering real Phoenix driver to have multiple ConnectionQueryServices created across connections //so that metadata changes doesn't get propagated across connections DriverManager.registerDriver(PhoenixDriver.INSTANCE); } @Test public void testUpdateCacheFrequencyWithAddAndDropTable() throws Exception { // Create connections 1 and 2 Properties longRunningProps = new Properties(); // Must update config before starting server longRunningProps.put(QueryServices.EXTRA_JDBC_ARGUMENTS_ATTRIB, QueryServicesOptions.DEFAULT_EXTRA_JDBC_ARGUMENTS); longRunningProps.put(QueryServices.DROP_METADATA_ATTRIB, Boolean.TRUE.toString()); Connection conn1 = DriverManager.getConnection(getUrl(), longRunningProps); String url2 = getUrl() + PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR + "LongRunningQueries"; Connection conn2 = DriverManager.getConnection(url2, longRunningProps); conn1.setAutoCommit(true); conn2.setAutoCommit(true); String tableName = generateUniqueName(); String tableCreateQuery = "create table "+tableName+" (k VARCHAR PRIMARY KEY, v1 VARCHAR, v2 VARCHAR)" + " UPDATE_CACHE_FREQUENCY=1000000000"; String dropTableQuery = "DROP table "+tableName; try { conn1.createStatement().execute(tableCreateQuery); conn1.createStatement() .execute("upsert into "+tableName+" values ('row1', 'value1', 'key1')"); conn1.createStatement() .execute("upsert into "+tableName+" values ('row2', 'value2', 'key2')"); conn1.commit(); ResultSet rs =conn1.createStatement() .executeQuery("select * from "+tableName); assertTrue(rs.next()); assertTrue(rs.next()); rs = conn2.createStatement().executeQuery("select * from "+tableName); assertTrue(rs.next()); assertTrue(rs.next()); //Drop table from conn1 conn1.createStatement().execute(dropTableQuery); try { rs = conn1.createStatement().executeQuery("select * from "+tableName); fail("Should throw TableNotFoundException after dropping table"); } catch (TableNotFoundException e) { //Expected } try { rs = conn2.createStatement().executeQuery("select * from "+tableName); fail("Should throw TableNotFoundException after dropping table"); } catch (TableNotFoundException e) { //Expected } } finally { conn1.close(); conn2.close(); } } @Test public void testTableSentWhenIndexStateChanges() throws Throwable { // Create connections 1 and 2 Properties longRunningProps = new Properties(); // Must update config before starting server longRunningProps.put(QueryServices.EXTRA_JDBC_ARGUMENTS_ATTRIB, QueryServicesOptions.DEFAULT_EXTRA_JDBC_ARGUMENTS); longRunningProps.put(QueryServices.DROP_METADATA_ATTRIB, Boolean.TRUE.toString()); Connection conn1 = DriverManager.getConnection(getUrl(), longRunningProps); String url2 = getUrl() + PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR + "LongRunningQueries"; Connection conn2 = DriverManager.getConnection(url2, longRunningProps); conn1.setAutoCommit(true); conn2.setAutoCommit(true); try { String schemaName = generateUniqueName(); String tableName = generateUniqueName(); String indexName = generateUniqueName(); final String fullTableName = SchemaUtil.getTableName(schemaName, tableName); String fullIndexName = SchemaUtil.getTableName(schemaName, indexName); conn1.createStatement().execute("CREATE TABLE " + fullTableName + "(k INTEGER PRIMARY KEY, v1 INTEGER, v2 INTEGER) COLUMN_ENCODED_BYTES = 0, STORE_NULLS=true"); conn1.createStatement().execute("CREATE INDEX " + indexName + " ON " + fullTableName + " (v1) INCLUDE (v2)"); HTableInterface metaTable = conn2.unwrap(PhoenixConnection.class).getQueryServices().getTable(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES); IndexUtil.updateIndexState(fullIndexName, 0, metaTable, PIndexState.DISABLE); conn2.createStatement().execute("UPSERT INTO " + fullTableName + " VALUES(1,2,3)"); conn2.commit(); conn1.createStatement().execute("UPSERT INTO " + fullTableName + " VALUES(4,5,6)"); conn1.commit(); PTableKey key = new PTableKey(null,fullTableName); PMetaData metaCache = conn1.unwrap(PhoenixConnection.class).getMetaDataCache(); PTable table = metaCache.getTableRef(key).getTable(); for (PTable index : table.getIndexes()) { assertEquals(PIndexState.DISABLE, index.getIndexState()); } } finally { conn1.close(); conn2.close(); } } @Test public void testUpdateCacheFrequencyWithAddColumn() throws Exception { // Create connections 1 and 2 Properties longRunningProps = new Properties(); // Must update config before starting server Connection conn1 = DriverManager.getConnection(getUrl(), longRunningProps); Connection conn2 = DriverManager.getConnection(getUrl(), longRunningProps); conn1.setAutoCommit(true); conn2.setAutoCommit(true); String tableName = generateUniqueName(); String createTableQuery = "create table "+tableName+" (k UNSIGNED_DOUBLE not null primary key, " + "v1 UNSIGNED_DOUBLE, v2 UNSIGNED_DOUBLE, v3 UNSIGNED_DOUBLE, " + "v4 UNSIGNED_DOUBLE) UPDATE_CACHE_FREQUENCY=1000000000"; try { conn1.createStatement().execute(createTableQuery); conn1.createStatement() .execute("upsert into "+tableName+" values (1, 2, 3, 4, 5)"); conn1.createStatement() .execute("upsert into "+tableName+" values (6, 7, 8, 9, 10)"); conn1.commit(); ResultSet rs = conn1.createStatement() .executeQuery("select k,v1,v2,v3 from "+tableName); assertTrue(rs.next()); assertTrue(rs.next()); rs = conn2.createStatement() .executeQuery("select k,v1,v2,v3 from "+tableName); assertTrue(rs.next()); assertTrue(rs.next()); PreparedStatement alterStatement = conn1.prepareStatement( "ALTER TABLE "+tableName+" ADD v9 UNSIGNED_DOUBLE"); alterStatement.execute(); rs = conn1.createStatement() .executeQuery("select k,v1,v2,v3,v9 from "+tableName); assertTrue(rs.next()); assertTrue(rs.next()); rs = conn2.createStatement() .executeQuery("select k,v1,v2,v3,V9 from "+tableName); assertTrue(rs.next()); assertTrue(rs.next()); } finally { conn1.close(); conn2.close(); } } @Test public void testUpdateCacheFrequencyWithAddAndDropIndex() throws Exception { // Create connections 1 and 2 Properties longRunningProps = new Properties(); longRunningProps.put(QueryServices.EXTRA_JDBC_ARGUMENTS_ATTRIB, QueryServicesOptions.DEFAULT_EXTRA_JDBC_ARGUMENTS); Connection conn1 = DriverManager.getConnection(getUrl(), longRunningProps); String url2 = getUrl() + PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR + "LongRunningQueries"; Connection conn2 = DriverManager.getConnection(url2, longRunningProps); conn1.setAutoCommit(true); conn2.setAutoCommit(true); String tableName = generateUniqueName(); String indexName = "I_"+tableName; String tableCreateQuery = "create table "+tableName+" (k VARCHAR PRIMARY KEY, v1 VARCHAR, v2 VARCHAR)" + " UPDATE_CACHE_FREQUENCY=1000000000"; String value1SelQuery = "SELECT v2 FROM "+tableName+" WHERE v1 = 'value1'"; String indexCreateQuery = "CREATE INDEX "+indexName+" ON "+tableName+" (v1) INCLUDE (v2)"; String indexDropQuery = "DROP INDEX "+indexName+" ON "+tableName; try { conn1.createStatement().execute(tableCreateQuery); conn1.createStatement() .execute("upsert into "+tableName+" values ('row1', 'value1', 'key1')"); conn1.createStatement() .execute("upsert into "+tableName+" values ('row2', 'value2', 'key2')"); conn1.commit(); ResultSet rs =conn1.createStatement() .executeQuery("select k,v1,v2 from "+tableName); assertTrue(rs.next()); assertTrue(rs.next()); rs = conn2.createStatement().executeQuery("select k,v1,v2 from "+tableName); assertTrue(rs.next()); assertTrue(rs.next()); PreparedStatement createIndexStatement =conn1.prepareStatement(indexCreateQuery); createIndexStatement.execute(); rs = conn1.createStatement().executeQuery(value1SelQuery); assertTrue(rs.next()); rs = conn2.createStatement().executeQuery(value1SelQuery); assertTrue(rs.next()); PreparedStatement dropIndexStatement = conn1.prepareStatement(indexDropQuery); dropIndexStatement.execute(); rs = conn2.createStatement().executeQuery(value1SelQuery); assertTrue(rs.next()); rs = conn1.createStatement().executeQuery(value1SelQuery); assertTrue(rs.next()); } finally { conn1.close(); conn2.close(); } } @Test public void testUpdateCacheFrequencyWithAddAndDropView() throws Exception { // Create connections 1 and 2 Properties longRunningProps = new Properties(); longRunningProps.put(QueryServices.EXTRA_JDBC_ARGUMENTS_ATTRIB, QueryServicesOptions.DEFAULT_EXTRA_JDBC_ARGUMENTS); Connection conn1 = DriverManager.getConnection(getUrl(), longRunningProps); String url2 = getUrl() + PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR + "LongRunningQueries"; Connection conn2 = DriverManager.getConnection(url2, longRunningProps); conn1.setAutoCommit(true); conn2.setAutoCommit(true); String tableName = generateUniqueName(); String viewName = "V_"+tableName; String createQry = "create table "+tableName+" (k VARCHAR PRIMARY KEY, v1 VARCHAR, v2 VARCHAR)" + " UPDATE_CACHE_FREQUENCY=1000000000"; String valueSelQuery = "SELECT * FROM "+tableName+" WHERE v1 = 'value1'"; String viewCreateQuery = "CREATE VIEW "+viewName+" (v43 VARCHAR) AS SELECT * FROM "+tableName+" WHERE v1 = 'value1'"; try { conn1.createStatement().execute(createQry); conn1.createStatement() .execute("upsert into "+tableName+" values ('row1', 'value1', 'key1')"); conn1.createStatement() .execute("upsert into "+tableName+" values ('row2', 'value2', 'key2')"); conn1.commit(); ResultSet rs = conn1.createStatement().executeQuery("select k,v1,v2 from "+tableName); assertTrue(rs.next()); assertTrue(rs.next()); rs = conn2.createStatement().executeQuery("select k,v1,v2 from "+tableName); assertTrue(rs.next()); assertTrue(rs.next()); conn1.createStatement().execute(viewCreateQuery); rs = conn2.createStatement().executeQuery(valueSelQuery); assertTrue(rs.next()); rs = conn1.createStatement().executeQuery(valueSelQuery); assertTrue(rs.next()); conn1.createStatement().execute("DROP VIEW "+viewName); rs = conn2.createStatement().executeQuery(valueSelQuery); assertTrue(rs.next()); rs = conn1.createStatement().executeQuery(valueSelQuery); assertTrue(rs.next()); } finally { conn1.close(); conn2.close(); } } @Test public void testUpdateCacheFrequencyWithCreateTableAndViewOnDiffConns() throws Exception { // Create connections 1 and 2 Properties longRunningProps = new Properties(); longRunningProps.put(QueryServices.EXTRA_JDBC_ARGUMENTS_ATTRIB, QueryServicesOptions.DEFAULT_EXTRA_JDBC_ARGUMENTS); Connection conn1 = DriverManager.getConnection(getUrl(), longRunningProps); String url2 = getUrl() + PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR + "LongRunningQueries"; Connection conn2 = DriverManager.getConnection(url2, longRunningProps); conn1.setAutoCommit(true); conn2.setAutoCommit(true); String tableName = generateUniqueName(); String viewName = "V1_"+tableName; String valueSelQuery = "SELECT * FROM "+tableName+" WHERE v1 = 'value1'"; try { //Create table on conn1 String createQry = "create table "+tableName+" (k VARCHAR PRIMARY KEY, v1 VARCHAR, v2 VARCHAR)" + " UPDATE_CACHE_FREQUENCY=1000000000"; conn1.createStatement().execute(createQry); //Load few rows conn1.createStatement() .execute("upsert into "+tableName+" values ('row1', 'value1', 'key1')"); conn1.createStatement() .execute("upsert into "+tableName+" values ('row2', 'value2', 'key2')"); conn1.commit(); ResultSet rs = conn1.createStatement().executeQuery("select k,v1,v2 from "+tableName); assertTrue(rs.next()); assertTrue(rs.next()); //Create View on conn2 String viewCreateQuery = "CREATE VIEW "+viewName+" (v43 VARCHAR) AS SELECT * FROM "+tableName+" WHERE v1 = 'value1'"; conn2.createStatement().execute(viewCreateQuery); //Read from view on conn2 rs = conn2.createStatement().executeQuery(valueSelQuery); assertTrue(rs.next()); //Read from view on conn1 rs = conn1.createStatement().executeQuery(valueSelQuery); assertTrue(rs.next()); } finally { conn1.close(); conn2.close(); } } }
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.CaseFormat; import com.google.debugging.sourcemap.proto.Mapping.OriginalMapping; import com.google.javascript.jscomp.JsMessage.Builder; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.jscomp.parsing.parser.util.format.SimpleFormat; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; import javax.annotation.Nullable; /** * Traverses across parsed tree and finds I18N messages. Then it passes it to * {@link JsMessageVisitor#processJsMessage(JsMessage, JsMessageDefinition)}. * * @author anatol@google.com (Anatol Pomazau) */ @GwtIncompatible("JsMessage, java.util.regex") public abstract class JsMessageVisitor extends AbstractPostOrderCallback implements CompilerPass { private static final String MSG_FUNCTION_NAME = "goog.getMsg"; private static final String MSG_FALLBACK_FUNCTION_NAME = "goog.getMsgWithFallback"; static final DiagnosticType MESSAGE_HAS_NO_DESCRIPTION = DiagnosticType.warning("JSC_MSG_HAS_NO_DESCRIPTION", "Message {0} has no description. Add @desc JsDoc tag."); static final DiagnosticType MESSAGE_HAS_NO_TEXT = DiagnosticType.warning("JSC_MSG_HAS_NO_TEXT", "Message value of {0} is just an empty string. " + "Empty messages are forbidden."); static final DiagnosticType MESSAGE_TREE_MALFORMED = DiagnosticType.error("JSC_MSG_TREE_MALFORMED", "Message parse tree malformed. {0}"); static final DiagnosticType MESSAGE_HAS_NO_VALUE = DiagnosticType.error("JSC_MSG_HAS_NO_VALUE", "message node {0} has no value"); static final DiagnosticType MESSAGE_DUPLICATE_KEY = DiagnosticType.error("JSC_MSG_KEY_DUPLICATED", "duplicate message variable name found for {0}, " + "initial definition {1}:{2}"); static final DiagnosticType MESSAGE_NODE_IS_ORPHANED = DiagnosticType.warning("JSC_MSG_ORPHANED_NODE", MSG_FUNCTION_NAME + "() function could be used only with MSG_* property or variable"); static final DiagnosticType MESSAGE_NOT_INITIALIZED_USING_NEW_SYNTAX = DiagnosticType.warning("JSC_MSG_NOT_INITIALIZED_USING_NEW_SYNTAX", "message not initialized using " + MSG_FUNCTION_NAME); static final DiagnosticType BAD_FALLBACK_SYNTAX = DiagnosticType.error("JSC_MSG_BAD_FALLBACK_SYNTAX", SimpleFormat.format( "Bad syntax. " + "Expected syntax: %s(MSG_1, MSG_2)", MSG_FALLBACK_FUNCTION_NAME)); static final DiagnosticType FALLBACK_ARG_ERROR = DiagnosticType.error("JSC_MSG_FALLBACK_ARG_ERROR", "Could not find message entry for fallback argument {0}"); private static final String PH_JS_PREFIX = "{$"; private static final String PH_JS_SUFFIX = "}"; static final String MSG_PREFIX = "MSG_"; /** * Pattern for unnamed messages. * <p> * All JS messages in JS code should have unique name but messages in * generated code (i.e. from soy template) could have duplicated message names. * Later we replace the message names with ids constructed as a hash of the * message content. * <p> * <a href="https://github.com/google/closure-templates"> * Soy</a> generates messages with names MSG_UNNAMED.* . This * pattern recognizes such messages. */ private static final Pattern MSG_UNNAMED_PATTERN = Pattern.compile("MSG_UNNAMED.*"); private static final Pattern CAMELCASE_PATTERN = Pattern.compile("[a-z][a-zA-Z\\d]*[_\\d]*"); static final String HIDDEN_DESC_PREFIX = "@hidden"; // For old-style JS messages private static final String DESC_SUFFIX = "_HELP"; private final boolean needToCheckDuplications; private final JsMessage.Style style; private final JsMessage.IdGenerator idGenerator; final AbstractCompiler compiler; /** * The names encountered associated with their defining node and source. We * use it for tracking duplicated message ids in the source code. */ private final Map<String, MessageLocation> messageNames = new HashMap<>(); private final Map<Var, JsMessage> unnamedMessages = new HashMap<>(); /** * List of found goog.getMsg call nodes. * * When we visit goog.getMsg() node we add it, and later * when we visit its parent we remove it. All nodes that are left at * the end of traversing are orphaned nodes. It means have no corresponding * var or property node. */ private final Set<Node> googMsgNodes = new HashSet<>(); private final CheckLevel checkLevel; /** * Creates JS message visitor. * * @param compiler the compiler instance * @param needToCheckDuplications whether to check duplicated messages in * traversed * @param style style that should be used during parsing * @param idGenerator generator that used for creating unique ID for the * message */ protected JsMessageVisitor(AbstractCompiler compiler, boolean needToCheckDuplications, JsMessage.Style style, JsMessage.IdGenerator idGenerator) { this.compiler = compiler; this.needToCheckDuplications = needToCheckDuplications; this.style = style; this.idGenerator = idGenerator; checkLevel = (style == JsMessage.Style.CLOSURE) ? CheckLevel.ERROR : CheckLevel.WARNING; // TODO(anatol): add flag that decides whether to process UNNAMED messages. // Some projects would not want such functionality (unnamed) as they don't // use SOY templates. } @Override public void process(Node externs, Node root) { NodeTraversal.traverseEs6(compiler, root, this); for (Node msgNode : googMsgNodes) { compiler.report(JSError.make(msgNode, checkLevel, MESSAGE_NODE_IS_ORPHANED)); } } @Override public void visit(NodeTraversal traversal, Node node, Node parent) { String messageKey; String originalMessageKey; boolean isVar; Node msgNode; switch (node.getToken()) { case NAME: // var MSG_HELLO = 'Message' if ((parent != null) && (NodeUtil.isNameDeclaration(parent))) { messageKey = node.getString(); originalMessageKey = node.getOriginalName(); isVar = true; } else { return; } msgNode = node.getFirstChild(); break; case ASSIGN: // somenamespace.someclass.MSG_HELLO = 'Message' isVar = false; Node getProp = node.getFirstChild(); if (!getProp.isGetProp()) { return; } Node propNode = getProp.getLastChild(); messageKey = propNode.getString(); originalMessageKey = getProp.getOriginalName(); msgNode = node.getLastChild(); break; case STRING_KEY: if (node.isQuotedString() || node.getFirstChild() == null) { return; } isVar = false; messageKey = node.getString(); originalMessageKey = node.getOriginalName(); msgNode = node.getFirstChild(); break; case CALL: // goog.getMsg() if (node.getFirstChild().matchesQualifiedName(MSG_FUNCTION_NAME)) { googMsgNodes.add(node); } else if (node.getFirstChild().matchesQualifiedName( MSG_FALLBACK_FUNCTION_NAME)) { visitFallbackFunctionCall(traversal, node); } return; default: return; } if (originalMessageKey != null) { messageKey = originalMessageKey; } // Is this a message name? boolean isNewStyleMessage = msgNode != null && msgNode.isCall(); if (!isMessageName(messageKey, isNewStyleMessage)) { return; } if (msgNode == null) { compiler.report( traversal.makeError(node, MESSAGE_HAS_NO_VALUE, messageKey)); return; } if (msgNode.isGetProp() && msgNode.isQualifiedName() && msgNode.getLastChild().getString().equals(messageKey)) { // foo.Thing.MSG_EXAMPLE = bar.OtherThing.MSG_EXAMPLE; // This kind of construct is created by Es6ToEs3ClassSideInheritance. Just ignore it; the // message will have already been extracted from the base class. return; } // Report a warning if a qualified messageKey that looks like a message // (e.g. "a.b.MSG_X") doesn't use goog.getMsg(). if (isNewStyleMessage) { googMsgNodes.remove(msgNode); } else if (style != JsMessage.Style.LEGACY) { // TODO(johnlenz): promote this to an error once existing conflicts have been // cleaned up. compiler.report(traversal.makeError(node, MESSAGE_NOT_INITIALIZED_USING_NEW_SYNTAX)); if (style == JsMessage.Style.CLOSURE) { // Don't extract the message if we aren't accepting LEGACY messages return; } } boolean isUnnamedMsg = isUnnamedMessageName(messageKey); Builder builder = new Builder( isUnnamedMsg ? null : messageKey); OriginalMapping mapping = compiler.getSourceMapping( traversal.getSourceName(), traversal.getLineNumber(), traversal.getCharno()); if (mapping != null) { builder.setSourceName(mapping.getOriginalFile()); } else { builder.setSourceName(traversal.getSourceName()); } try { if (isVar) { extractMessageFromVariable(builder, node, parent, parent.getParent()); } else { extractMessageFrom(builder, msgNode, node); } } catch (MalformedException ex) { compiler.report(traversal.makeError(ex.getNode(), MESSAGE_TREE_MALFORMED, ex.getMessage())); return; } JsMessage extractedMessage = builder.build(idGenerator); // If asked to check named internal messages. if (needToCheckDuplications && !isUnnamedMsg && !extractedMessage.isExternal()) { checkIfMessageDuplicated(messageKey, msgNode); } trackMessage(traversal, extractedMessage, messageKey, msgNode, isUnnamedMsg); if (extractedMessage.isEmpty()) { // value of the message is an empty string. Translators do not like it. compiler.report(traversal.makeError(node, MESSAGE_HAS_NO_TEXT, messageKey)); } // New-style messages must have descriptions. We don't emit a warning // for legacy-style messages, because there are thousands of // them in legacy code that are not worth the effort to fix, since they've // already been translated anyway. String desc = extractedMessage.getDesc(); if (isNewStyleMessage && (desc == null || desc.trim().isEmpty()) && !extractedMessage.isExternal()) { compiler.report(traversal.makeError(node, MESSAGE_HAS_NO_DESCRIPTION, messageKey)); } JsMessageDefinition msgDefinition = new JsMessageDefinition(msgNode); processJsMessage(extractedMessage, msgDefinition); } /** * Track a message for later retrieval. * * This is used for tracking duplicates, and for figuring out message * fallback. Not all message types are trackable, because that would * require a more sophisticated analysis. e.g., * function f(s) { s.MSG_UNNAMED_X = 'Some untrackable message'; } */ private void trackMessage( NodeTraversal t, JsMessage message, String msgName, Node msgNode, boolean isUnnamedMessage) { if (!isUnnamedMessage) { MessageLocation location = new MessageLocation(message, msgNode); messageNames.put(msgName, location); } else { Var var = t.getScope().getVar(msgName); if (var != null) { unnamedMessages.put(var, message); } } } /** Get a previously tracked message. */ private JsMessage getTrackedMessage(NodeTraversal t, String msgName) { boolean isUnnamedMessage = isUnnamedMessageName(msgName); if (!isUnnamedMessage) { MessageLocation location = messageNames.get(msgName); return location == null ? null : location.message; } else { Var var = t.getScope().getVar(msgName); if (var != null) { return unnamedMessages.get(var); } } return null; } /** * Checks if message already processed. If so - it generates 'message * duplicated' compiler error. * * @param msgName the name of the message * @param msgNode the node that represents JS message */ private void checkIfMessageDuplicated(String msgName, Node msgNode) { if (messageNames.containsKey(msgName)) { MessageLocation location = messageNames.get(msgName); compiler.report(JSError.make(msgNode, MESSAGE_DUPLICATE_KEY, msgName, location.messageNode.getSourceFileName(), Integer.toString(location.messageNode.getLineno()))); } } /** * Creates a {@link JsMessage} for a JS message defined using a JS variable * declaration (e.g <code>var MSG_X = ...;</code>). * * @param builder the message builder * @param nameNode a NAME node for a JS message variable * @param parentNode a VAR node, parent of {@code nameNode} * @param grandParentNode the grandparent of {@code nameNode}. This node is * only used to get meta data about the message that might be * surrounding it (e.g. a message description). This argument may be * null if the meta data is not needed. * @throws MalformedException if {@code varNode} does not * correspond to a valid JS message VAR node */ private void extractMessageFromVariable( Builder builder, Node nameNode, Node parentNode, @Nullable Node grandParentNode) throws MalformedException { // Determine the message's value Node valueNode = nameNode.getFirstChild(); switch (valueNode.getToken()) { case STRING: case ADD: maybeInitMetaDataFromJsDocOrHelpVar(builder, parentNode, grandParentNode); builder.appendStringPart(extractStringFromStringExprNode(valueNode)); break; case FUNCTION: maybeInitMetaDataFromJsDocOrHelpVar(builder, parentNode, grandParentNode); extractFromFunctionNode(builder, valueNode); break; case CALL: maybeInitMetaDataFromJsDoc(builder, parentNode); extractFromCallNode(builder, valueNode); break; default: throw new MalformedException("Cannot parse value of message " + builder.getKey(), valueNode); } } /** * Creates a {@link JsMessage} for a JS message defined using an assignment to * a qualified name (e.g <code>a.b.MSG_X = goog.getMsg(...);</code>). * * @param builder the message builder * @param valueNode a node in a JS message value * @param docNode the node containing the jsdoc. * @throws MalformedException if {@code getPropNode} does not * correspond to a valid JS message node */ private void extractMessageFrom( Builder builder, Node valueNode, Node docNode) throws MalformedException { maybeInitMetaDataFromJsDoc(builder, docNode); extractFromCallNode(builder, valueNode); } /** * Initializes the meta data in a JsMessage by examining the nodes just before * and after a message VAR node. * * @param builder the message builder whose meta data will be initialized * @param varNode the message VAR node * @param parentOfVarNode {@code varNode}'s parent node */ private void maybeInitMetaDataFromJsDocOrHelpVar( Builder builder, Node varNode, @Nullable Node parentOfVarNode) throws MalformedException { // First check description in @desc if (maybeInitMetaDataFromJsDoc(builder, varNode)) { return; } // Check the preceding node for meta data if ((parentOfVarNode != null) && maybeInitMetaDataFromHelpVar(builder, varNode.getPrevious())) { return; } // Check the subsequent node for meta data maybeInitMetaDataFromHelpVar(builder, varNode.getNext()); } /** * Initializes the meta data in a JsMessage by examining a node just before or * after a message VAR node. * * @param builder the message builder whose meta data will be initialized * @param sibling a node adjacent to the message VAR node * @return true iff message has corresponding description variable */ private static boolean maybeInitMetaDataFromHelpVar(Builder builder, @Nullable Node sibling) throws MalformedException { if ((sibling != null) && (sibling.isVar())) { Node nameNode = sibling.getFirstChild(); String name = nameNode.getString(); if (name.equals(builder.getKey() + DESC_SUFFIX)) { Node valueNode = nameNode.getFirstChild(); String desc = extractStringFromStringExprNode(valueNode); if (desc.startsWith(HIDDEN_DESC_PREFIX)) { builder.setDesc(desc.substring(HIDDEN_DESC_PREFIX.length()).trim()); builder.setIsHidden(true); } else { builder.setDesc(desc); } return true; } } return false; } /** * Initializes the meta data in a message builder given a node that may * contain JsDoc properties. * * @param builder the message builder whose meta data will be initialized * @param node the node with the message's JSDoc properties * @return true if message has JsDoc with valid description in @desc * annotation */ private static boolean maybeInitMetaDataFromJsDoc(Builder builder, Node node) { boolean messageHasDesc = false; JSDocInfo info = node.getJSDocInfo(); if (info != null) { String desc = info.getDescription(); if (desc != null) { builder.setDesc(desc); messageHasDesc = true; } if (info.isHidden()) { builder.setIsHidden(true); } if (info.getMeaning() != null) { builder.setMeaning(info.getMeaning()); } } return messageHasDesc; } /** * Returns the string value associated with a node representing a JS string or * several JS strings added together (e.g. {@code 'str'} or {@code 's' + 't' + * 'r'}). * * @param node the node from where we extract the string * @return String representation of the node * @throws MalformedException if the parsed message is invalid */ private static String extractStringFromStringExprNode(Node node) throws MalformedException { switch (node.getToken()) { case STRING: return node.getString(); case ADD: StringBuilder sb = new StringBuilder(); for (Node child : node.children()) { sb.append(extractStringFromStringExprNode(child)); } return sb.toString(); default: throw new MalformedException( "STRING or ADD node expected; found: " + node.getToken(), node); } } /** * Initializes a message builder from a FUNCTION node. * <p> * <pre> * The tree should look something like: * * function * |-- name * |-- lp * | |-- name <arg1> * | -- name <arg2> * -- block * | * --return * | * --add * |-- string foo * -- name <arg1> * </pre> * * @param builder the message builder * @param node the function node that contains a message * @throws MalformedException if the parsed message is invalid */ private void extractFromFunctionNode(Builder builder, Node node) throws MalformedException { Set<String> phNames = new HashSet<>(); for (Node fnChild : node.children()) { switch (fnChild.getToken()) { case NAME: // This is okay. The function has a name, but it is empty. break; case PARAM_LIST: // Parse the placeholder names from the function argument list. for (Node argumentNode : fnChild.children()) { if (argumentNode.isName()) { String phName = argumentNode.getString(); if (phNames.contains(phName)) { throw new MalformedException("Duplicate placeholder name: " + phName, argumentNode); } else { phNames.add(phName); } } } break; case BLOCK: // Build the message's value by examining the return statement Node returnNode = fnChild.getFirstChild(); if (!returnNode.isReturn()) { throw new MalformedException( "RETURN node expected; found: " + returnNode.getToken(), returnNode); } for (Node child : returnNode.children()) { extractFromReturnDescendant(builder, child); } // Check that all placeholders from the message text have appropriate // object literal keys for (String phName : builder.getPlaceholders()) { if (!phNames.contains(phName)) { throw new MalformedException( "Unrecognized message placeholder referenced: " + phName, returnNode); } } break; default: throw new MalformedException( "NAME, PARAM_LIST, or BLOCK node expected; found: " + node, fnChild); } } } /** * Appends value parts to the message builder by traversing the descendants * of the given RETURN node. * * @param builder the message builder * @param node the node from where we extract a message * @throws MalformedException if the parsed message is invalid */ private static void extractFromReturnDescendant(Builder builder, Node node) throws MalformedException { switch (node.getToken()) { case STRING: builder.appendStringPart(node.getString()); break; case NAME: builder.appendPlaceholderReference(node.getString()); break; case ADD: for (Node child : node.children()) { extractFromReturnDescendant(builder, child); } break; default: throw new MalformedException( "STRING, NAME, or ADD node expected; found: " + node.getToken(), node); } } /** * Initializes a message builder from a CALL node. * <p> * The tree should look something like: * * <pre> * call * |-- getprop * | |-- name 'goog' * | +-- string 'getMsg' * | * |-- string 'Hi {$userName}! Welcome to {$product}.' * +-- objlit * |-- string 'userName' * |-- name 'someUserName' * |-- string 'product' * +-- call * +-- name 'getProductName' * </pre> * * @param builder the message builder * @param node the call node from where we extract the message * @throws MalformedException if the parsed message is invalid */ private void extractFromCallNode(Builder builder, Node node) throws MalformedException { // Check the function being called if (!node.isCall()) { throw new MalformedException( "Message must be initialized using " + MSG_FUNCTION_NAME + " function.", node); } Node fnNameNode = node.getFirstChild(); if (!fnNameNode.matchesQualifiedName(MSG_FUNCTION_NAME)) { throw new MalformedException( "Message initialized using unrecognized function. " + "Please use " + MSG_FUNCTION_NAME + "() instead.", fnNameNode); } // Get the message string Node stringLiteralNode = fnNameNode.getNext(); if (stringLiteralNode == null) { throw new MalformedException("Message string literal expected", stringLiteralNode); } // Parse the message string and append parts to the builder parseMessageTextNode(builder, stringLiteralNode); Node objLitNode = stringLiteralNode.getNext(); Set<String> phNames = new HashSet<>(); if (objLitNode != null) { // Register the placeholder names if (!objLitNode.isObjectLit()) { throw new MalformedException("OBJLIT node expected", objLitNode); } for (Node aNode = objLitNode.getFirstChild(); aNode != null; aNode = aNode.getNext()) { if (!aNode.isStringKey()) { throw new MalformedException("STRING_KEY node expected as OBJLIT key", aNode); } String phName = aNode.getString(); if (!isLowerCamelCaseWithNumericSuffixes(phName)) { throw new MalformedException( "Placeholder name not in lowerCamelCase: " + phName, aNode); } if (phNames.contains(phName)) { throw new MalformedException("Duplicate placeholder name: " + phName, aNode); } phNames.add(phName); } } // Check that all placeholders from the message text have appropriate objlit // values Set<String> usedPlaceholders = builder.getPlaceholders(); for (String phName : usedPlaceholders) { if (!phNames.contains(phName)) { throw new MalformedException( "Unrecognized message placeholder referenced: " + phName, node); } } // Check that objLiteral have only names that are present in the // message text for (String phName : phNames) { if (!usedPlaceholders.contains(phName)) { throw new MalformedException( "Unused message placeholder: " + phName, node); } } } /** * Appends the message parts in a JS message value extracted from the given * text node. * * @param builder the JS message builder to append parts to * @param node the node with string literal that contains the message text * @throws MalformedException if {@code value} contains a reference to * an unregistered placeholder */ private static void parseMessageTextNode(Builder builder, Node node) throws MalformedException { String value = extractStringFromStringExprNode(node); while (true) { int phBegin = value.indexOf(PH_JS_PREFIX); if (phBegin < 0) { // Just a string literal builder.appendStringPart(value); return; } else { if (phBegin > 0) { // A string literal followed by a placeholder builder.appendStringPart(value.substring(0, phBegin)); } // A placeholder. Find where it ends int phEnd = value.indexOf(PH_JS_SUFFIX, phBegin); if (phEnd < 0) { throw new MalformedException( "Placeholder incorrectly formatted in: " + builder.getKey(), node); } String phName = value.substring(phBegin + PH_JS_PREFIX.length(), phEnd); builder.appendPlaceholderReference(phName); int nextPos = phEnd + PH_JS_SUFFIX.length(); if (nextPos < value.length()) { // Iterate on the rest of the message value value = value.substring(nextPos); } else { // The message is parsed return; } } } } /** Visit a call to goog.getMsgWithFallback. */ private void visitFallbackFunctionCall(NodeTraversal t, Node call) { // Check to make sure the function call looks like: // goog.getMsgWithFallback(MSG_1, MSG_2); if (call.getChildCount() != 3 || !call.getSecondChild().isName() || !call.getLastChild().isName()) { compiler.report(t.makeError(call, BAD_FALLBACK_SYNTAX)); return; } Node firstArg = call.getSecondChild(); JsMessage firstMessage = getTrackedMessage(t, firstArg.getString()); if (firstMessage == null) { compiler.report( t.makeError(firstArg, FALLBACK_ARG_ERROR, firstArg.getString())); return; } Node secondArg = firstArg.getNext(); JsMessage secondMessage = getTrackedMessage( t, call.getChildAtIndex(2).getString()); if (secondMessage == null) { compiler.report( t.makeError(secondArg, FALLBACK_ARG_ERROR, secondArg.getString())); return; } processMessageFallback(call, firstMessage, secondMessage); } /** * Processes found JS message. Several examples of "standard" processing * routines are: * <ol> * <li>extract all JS messages * <li>replace JS messages with localized versions for some specific language * <li>check that messages have correct syntax and present in localization * bundle * </ol> * * @param message the found message * @param definition the definition of the object and usually contains all * additional message information like message node/parent's node */ protected abstract void processJsMessage(JsMessage message, JsMessageDefinition definition); /** * Processes the goog.getMsgWithFallback primitive. * goog.getMsgWithFallback(MSG_1, MSG_2); * * By default, does nothing. */ void processMessageFallback(Node callNode, JsMessage message1, JsMessage message2) {} /** * Returns whether the given JS identifier is a valid JS message name. */ boolean isMessageName(String identifier, boolean isNewStyleMessage) { return identifier.startsWith(MSG_PREFIX) && (style == JsMessage.Style.CLOSURE || isNewStyleMessage || !identifier.endsWith(DESC_SUFFIX)); } /** * Returns whether the given message name is in the unnamed namespace. */ private static boolean isUnnamedMessageName(String identifier) { return MSG_UNNAMED_PATTERN.matcher(identifier).matches(); } /** * Returns whether a string is nonempty, begins with a lowercase letter, and * contains only digits and underscores after the first underscore. */ static boolean isLowerCamelCaseWithNumericSuffixes(String input) { return CAMELCASE_PATTERN.matcher(input).matches(); } /** * Converts the given string from upper-underscore case to lower-camel case, * preserving numeric suffixes. For example: "NAME" -> "name" "A4_LETTER" -> * "a4Letter" "START_SPAN_1_23" -> "startSpan_1_23". */ static String toLowerCamelCaseWithNumericSuffixes(String input) { // Determine where the numeric suffixes begin int suffixStart = input.length(); while (suffixStart > 0) { char ch = '\0'; int numberStart = suffixStart; while (numberStart > 0) { ch = input.charAt(numberStart - 1); if (Character.isDigit(ch)) { numberStart--; } else { break; } } if ((numberStart > 0) && (numberStart < suffixStart) && (ch == '_')) { suffixStart = numberStart - 1; } else { break; } } if (suffixStart == input.length()) { return CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.LOWER_CAMEL, input); } else { return CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.LOWER_CAMEL, input.substring(0, suffixStart)) + input.substring(suffixStart); } } /** * Checks a node's type. * * @throws MalformedException if the node is null or the wrong type */ protected void checkNode(@Nullable Node node, Token type) throws MalformedException { if (node == null) { throw new MalformedException( "Expected node type " + type + "; found: null", node); } if (node.getToken() != type) { throw new MalformedException( "Expected node type " + type + "; found: " + node.getToken(), node); } } static class MalformedException extends Exception { private static final long serialVersionUID = 1L; private final Node node; MalformedException(String message, Node node) { super(message); this.node = node; } Node getNode() { return node; } } private static class MessageLocation { private final JsMessage message; private final Node messageNode; private MessageLocation(JsMessage message, Node messageNode) { this.message = message; this.messageNode = messageNode; } } }
/* * Jython Database Specification API 2.0 * * * Copyright (c) 2001 brian zimmer <bzimmer@ziclix.com> * */ package com.ziclix.python.sql; import java.sql.Connection; import java.sql.SQLException; import java.util.Collections; import java.util.WeakHashMap; import java.util.Set; import org.python.core.ClassDictInit; import org.python.core.ContextManager; import org.python.core.Py; import org.python.core.PyBuiltinMethodSet; import org.python.core.PyException; import org.python.core.PyInteger; import org.python.core.PyList; import org.python.core.PyObject; import org.python.core.PyString; import org.python.core.PyUnicode; import org.python.core.ThreadState; import org.python.core.Traverseproc; import org.python.core.Visitproc; import com.ziclix.python.sql.util.PyArgParser; /** * A connection to the database. * * @author brian zimmer */ public class PyConnection extends PyObject implements ClassDictInit, ContextManager, Traverseproc { /** True if closed. */ protected boolean closed; /** Whether transactions are supported. */ protected boolean supportsTransactions; /** Whether multiple ResultSets are supported. */ protected boolean supportsMultipleResultSets; /** The underlying java.sql.Connection. */ protected Connection connection; /** Underlying cursors. */ private Set<PyCursor> cursors; /** Underlying statements. */ private Set<PyStatement> statements; /** Field __members__ */ protected static PyList __members__; /** Field __methods__ */ protected static PyList __methods__; static { PyObject[] m = new PyObject[5]; m[0] = new PyString("close"); m[1] = new PyString("commit"); m[2] = new PyString("cursor"); m[3] = new PyString("rollback"); m[4] = new PyString("nativesql"); __methods__ = new PyList(m); m = new PyObject[10]; m[0] = new PyString("autocommit"); m[1] = new PyString("dbname"); m[2] = new PyString("dbversion"); m[3] = new PyString("drivername"); m[4] = new PyString("driverversion"); m[5] = new PyString("url"); m[6] = new PyString("__connection__"); m[7] = new PyString("__cursors__"); m[8] = new PyString("__statements__"); m[9] = new PyString("closed"); __members__ = new PyList(m); } /** * Create a PyConnection with the open connection. * * @param connection * @throws SQLException */ public PyConnection(Connection connection) throws SQLException { this.closed = false; cursors = Collections.newSetFromMap(new WeakHashMap<PyCursor, Boolean>()); cursors = Collections.synchronizedSet(cursors); this.connection = connection; statements = Collections.newSetFromMap(new WeakHashMap<PyStatement, Boolean>()); statements = Collections.synchronizedSet(statements); this.supportsTransactions = this.connection.getMetaData().supportsTransactions(); this.supportsMultipleResultSets = this.connection.getMetaData().supportsMultipleResultSets(); if (this.supportsTransactions) { this.connection.setAutoCommit(false); } } /** * Produces a string representation of the object. * * @return string representation of the object. */ @Override public String toString() { try { return String.format("<PyConnection object at %s user='%s', url='%s'>", Py.idstr(this), connection.getMetaData().getUserName(), connection.getMetaData().getURL()); } catch (SQLException e) { return String.format("<PyConnection object at %s", Py.idstr(this)); } } /** * Method classDictInit * * @param dict */ static public void classDictInit(PyObject dict) { dict.__setitem__("autocommit", new PyInteger(0)); dict.__setitem__("close", new ConnectionFunc("close", 0, 0, 0, zxJDBC.getString("close"))); dict.__setitem__("commit", new ConnectionFunc("commit", 1, 0, 0, zxJDBC.getString("commit"))); dict.__setitem__("cursor", new ConnectionFunc("cursor", 2, 0, 4, zxJDBC.getString("cursor"))); dict.__setitem__("rollback", new ConnectionFunc("rollback", 3, 0, 0, zxJDBC.getString("rollback"))); dict.__setitem__("nativesql", new ConnectionFunc("nativesql", 4, 1, 1, zxJDBC.getString("nativesql"))); dict.__setitem__("__enter__", new ConnectionFunc("__enter__", 5, 0, 0, "__enter__")); dict.__setitem__("__exit__", new ConnectionFunc("__exit__", 6, 3, 3, "__exit__")); // hide from python dict.__setitem__("initModule", null); dict.__setitem__("toString", null); dict.__setitem__("setConnection", null); dict.__setitem__("getPyClass", null); dict.__setitem__("connection", null); dict.__setitem__("classDictInit", null); dict.__setitem__("cursors", null); } /** * Sets the attribute. * * @param name * @param value */ @Override public void __setattr__(String name, PyObject value) { if ("autocommit".equals(name)) { try { if (this.supportsTransactions) { this.connection.setAutoCommit(value.__nonzero__()); } } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } return; } super.__setattr__(name, value); } /** * Finds the attribute. * * @param name the name of the attribute of interest * @return the value for the attribute of the specified name */ @Override public PyObject __findattr_ex__(String name) { if ("autocommit".equals(name)) { try { return connection.getAutoCommit() ? Py.One : Py.Zero; } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } } else if ("dbname".equals(name)) { try { return Py.newString(this.connection.getMetaData().getDatabaseProductName()); } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } } else if ("dbversion".equals(name)) { try { return Py.newString(this.connection.getMetaData().getDatabaseProductVersion()); } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } } else if ("drivername".equals(name)) { try { return Py.newString(this.connection.getMetaData().getDriverName()); } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } } else if ("driverversion".equals(name)) { try { return Py.newString(this.connection.getMetaData().getDriverVersion()); } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } } else if ("url".equals(name)) { try { return Py.newString(this.connection.getMetaData().getURL()); } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } } else if ("__connection__".equals(name)) { return Py.java2py(this.connection); } else if ("__cursors__".equals(name)) { return Py.java2py(Collections.unmodifiableSet(this.cursors)); } else if ("__statements__".equals(name)) { return Py.java2py(Collections.unmodifiableSet(this.statements)); } else if ("__methods__".equals(name)) { return __methods__; } else if ("__members__".equals(name)) { return __members__; } else if ("closed".equals(name)) { return Py.newBoolean(closed); } return super.__findattr_ex__(name); } /** * Close the connection now (rather than whenever __del__ is called). The connection * will be unusable from this point forward; an Error (or subclass) exception will be * raised if any operation is attempted with the connection. The same applies to all * cursor objects trying to use the connection. */ public void close() { if (closed) { throw zxJDBC.makeException(zxJDBC.ProgrammingError, "connection is closed"); } // mark ourselves closed now so that any callbacks we get from closing down // cursors and statements to not try and modify our internal sets this.closed = true; synchronized (this.cursors) { for (PyCursor cursor: cursors) { cursor.close(); } this.cursors.clear(); } synchronized (this.statements) { for (PyStatement statement : statements) { statement.close(); } this.statements.clear(); } try { this.connection.close(); } catch (SQLException e) { throw zxJDBC.makeException(e); } } /** * Commit any pending transaction to the database. Note that if the database supports * an auto-commit feature, this must be initially off. An interface method may be * provided to turn it back on. * <p/> * Database modules that do not support transactions should implement this method with * void functionality. */ public void commit() { if (closed) { throw zxJDBC.makeException(zxJDBC.ProgrammingError, "connection is closed"); } if (!this.supportsTransactions) { return; } try { this.connection.commit(); } catch (SQLException e) { throw zxJDBC.makeException(e); } } /** * <i>This method is optional since not all databases provide transaction support.</i> * <p/> * In case a database does provide transactions this method causes the database to * roll back to the start of any pending transaction. Closing a connection without * committing the changes first will cause an implicit rollback to be performed. */ public void rollback() { if (closed) { throw zxJDBC.makeException(zxJDBC.ProgrammingError, "connection is closed"); } if (!this.supportsTransactions) { return; } try { this.connection.rollback(); } catch (SQLException e) { throw zxJDBC.makeException(e); } } /** * Converts the given SQL statement into the system's native SQL grammar. A driver may * convert the JDBC sql grammar into its system's native SQL grammar prior to sending * it; this method returns the native form of the statement that the driver would have * sent. * * @param nativeSQL * @return the native form of this statement */ public PyObject nativesql(PyObject nativeSQL) { if (closed) { throw zxJDBC.makeException(zxJDBC.ProgrammingError, "connection is closed"); } if (nativeSQL == Py.None) { return Py.None; } try { if (nativeSQL instanceof PyUnicode) { return Py.newUnicode(this.connection.nativeSQL(nativeSQL.toString())); } return Py.newString(this.connection.nativeSQL(nativeSQL.__str__().toString())); } catch (SQLException e) { throw zxJDBC.makeException(e); } } /** * Return a new Cursor Object using the connection. If the database does not provide a * direct cursor concept, the module will have to emulate cursors using other means to * the extent needed by this specification. * * @return a new cursor using this connection */ public PyCursor cursor() { return cursor(false); } /** * Return a new Cursor Object using the connection. If the database does not provide a * direct cursor concept, the module will have to emulate cursors using other means to * the extent needed by this specification. * * @param dynamicFetch if true, dynamically iterate the result * @return a new cursor using this connection */ public PyCursor cursor(boolean dynamicFetch) { return this.cursor(dynamicFetch, Py.None, Py.None); } /** * Return a new Cursor Object using the connection. If the database does not provide a * direct cursor concept, the module will have to emulate cursors using other means to * the extent needed by this specification. * * @param dynamicFetch if true, dynamically iterate the result * @param rsType the type of the underlying ResultSet * @param rsConcur the concurrency of the underlying ResultSet * @return a new cursor using this connection */ public PyCursor cursor(boolean dynamicFetch, PyObject rsType, PyObject rsConcur) { if (closed) { throw zxJDBC.makeException(zxJDBC.ProgrammingError, "connection is closed"); } PyCursor cursor = new PyExtendedCursor(this, dynamicFetch, rsType, rsConcur); this.cursors.add(cursor); return cursor; } /** * Remove an open PyCursor. * * @param cursor */ void remove(PyCursor cursor) { if (closed) { return; } this.cursors.remove(cursor); } /** * Method register * * @param statement statement */ void add(PyStatement statement) { if (closed) { return; } this.statements.add(statement); } /** * Method contains * * @param statement statement * @return boolean */ boolean contains(PyStatement statement) { if (closed) { return false; } return this.statements.contains(statement); } public PyObject __enter__(ThreadState ts) { return this; } public PyObject __enter__() { return this; } public boolean __exit__(ThreadState ts, PyException exception) { if (exception == null) { commit(); } else { rollback(); } return false; } public boolean __exit__(PyObject type, PyObject value, PyObject traceback) { if (type == null || type == Py.None) { commit(); } else { rollback(); } return false; } /* Traverseproc implementation */ @Override public int traverse(Visitproc visit, Object arg) { int retVal; for (PyObject ob: cursors) { if (ob != null) { retVal = visit.visit(ob, arg); if (retVal != 0) { return retVal; } } } for (PyObject ob: statements) { if (ob != null) { retVal = visit.visit(ob, arg); if (retVal != 0) { return retVal; } } } return 0; } @Override public boolean refersDirectlyTo(PyObject ob) { if (ob == null) { return false; } if (cursors != null && cursors.contains(ob)) { return true; } else if (statements != null && statements.contains(ob)) { return true; } else { return false; } } } class ConnectionFunc extends PyBuiltinMethodSet { ConnectionFunc(String name, int index, int minargs, int maxargs, String doc) { super(name, index, minargs, maxargs, doc, PyConnection.class); } @Override public PyObject __call__() { PyConnection c = (PyConnection) __self__; switch (index) { case 0: c.close(); return Py.None; case 1: c.commit(); return Py.None; case 2: return c.cursor(); case 3: c.rollback(); return Py.None; case 5: return c.__enter__(); default: throw info.unexpectedCall(0, false); } } @Override public PyObject __call__(PyObject arg) { PyConnection c = (PyConnection) __self__; switch (index) { case 2: return c.cursor(arg.__nonzero__()); case 4: return c.nativesql(arg); default: throw info.unexpectedCall(1, false); } } @Override public PyObject __call__(PyObject arg1, PyObject arg2, PyObject arg3) { PyConnection c = (PyConnection) __self__; switch (index) { case 2: return c.cursor(arg1.__nonzero__(), arg2, arg3); case 6: return Py.newBoolean(c.__exit__(arg1, arg2, arg3)); default: throw info.unexpectedCall(3, false); } } @Override public PyObject __call__(PyObject[] args, String[] keywords) { PyConnection c = (PyConnection) __self__; PyArgParser parser = new PyArgParser(args, keywords); switch (index) { case 2: PyObject dynamic = parser.kw("dynamic", Py.None); PyObject rstype = parser.kw("rstype", Py.None); PyObject rsconcur = parser.kw("rsconcur", Py.None); dynamic = (parser.numArg() >= 1) ? parser.arg(0) : dynamic; rstype = (parser.numArg() >= 2) ? parser.arg(1) : rstype; rsconcur = (parser.numArg() >= 3) ? parser.arg(2) : rsconcur; return c.cursor(dynamic.__nonzero__(), rstype, rsconcur); default: throw info.unexpectedCall(args.length, true); } } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.externalSystem.service.project.manage; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.externalSystem.model.DataNode; import com.intellij.openapi.externalSystem.model.Key; import com.intellij.openapi.externalSystem.model.ProjectKeys; import com.intellij.openapi.externalSystem.model.ProjectSystemId; import com.intellij.openapi.externalSystem.model.project.ContentRootData; import com.intellij.openapi.externalSystem.model.project.ContentRootData.SourceRoot; import com.intellij.openapi.externalSystem.model.project.ExternalSystemSourceType; import com.intellij.openapi.externalSystem.model.project.ModuleData; import com.intellij.openapi.externalSystem.model.project.ProjectData; import com.intellij.openapi.externalSystem.service.project.IdeModifiableModelsProvider; import com.intellij.openapi.externalSystem.settings.AbstractExternalSystemSettings; import com.intellij.openapi.externalSystem.settings.ExternalProjectSettings; import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil; import com.intellij.openapi.externalSystem.util.ExternalSystemConstants; import com.intellij.openapi.externalSystem.util.Order; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ContentEntry; import com.intellij.openapi.roots.ModifiableRootModel; import com.intellij.openapi.roots.SourceFolder; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.changes.ChangeListManager; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.ContainerUtilRt; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jps.model.java.JavaModuleSourceRootTypes; import org.jetbrains.jps.model.java.JavaResourceRootType; import org.jetbrains.jps.model.java.JavaSourceRootProperties; import org.jetbrains.jps.model.java.JavaSourceRootType; import org.jetbrains.jps.model.module.JpsModuleSourceRootType; import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Set; /** * @author Denis Zhdanov * @since 2/7/12 3:20 PM */ @Order(ExternalSystemConstants.BUILTIN_SERVICE_ORDER) public class ContentRootDataService extends AbstractProjectDataService<ContentRootData, ContentEntry> { private static final Logger LOG = Logger.getInstance("#" + ContentRootDataService.class.getName()); @NotNull @Override public Key<ContentRootData> getTargetDataKey() { return ProjectKeys.CONTENT_ROOT; } @Override public void importData(@NotNull Collection<DataNode<ContentRootData>> toImport, @Nullable ProjectData projectData, @NotNull Project project, @NotNull IdeModifiableModelsProvider modelsProvider) { if (toImport.isEmpty()) { return; } MultiMap<DataNode<ModuleData>, DataNode<ContentRootData>> byModule = ExternalSystemApiUtil.groupBy(toImport, ModuleData.class); for (Map.Entry<DataNode<ModuleData>, Collection<DataNode<ContentRootData>>> entry : byModule.entrySet()) { Module module = entry.getKey().getUserData(AbstractModuleDataService.MODULE_KEY); module = module != null ? module : modelsProvider.findIdeModule(entry.getKey().getData()); if (module == null) { LOG.warn(String.format( "Can't import content roots. Reason: target module (%s) is not found at the ide. Content roots: %s", entry.getKey(), entry.getValue() )); continue; } importData(modelsProvider, entry.getValue(), module); } } private static void importData(@NotNull IdeModifiableModelsProvider modelsProvider, @NotNull final Collection<DataNode<ContentRootData>> data, @NotNull final Module module) { final ModifiableRootModel modifiableRootModel = modelsProvider.getModifiableRootModel(module); final ContentEntry[] contentEntries = modifiableRootModel.getContentEntries(); final Map<String, ContentEntry> contentEntriesMap = ContainerUtilRt.newHashMap(); for (ContentEntry contentEntry : contentEntries) { contentEntriesMap.put(contentEntry.getUrl(), contentEntry); } boolean createEmptyContentRootDirectories = false; if (!data.isEmpty()) { ProjectSystemId projectSystemId = data.iterator().next().getData().getOwner(); AbstractExternalSystemSettings externalSystemSettings = ExternalSystemApiUtil.getSettings(module.getProject(), projectSystemId); String path = module.getOptionValue(ExternalSystemConstants.ROOT_PROJECT_PATH_KEY); if (path != null) { ExternalProjectSettings projectSettings = externalSystemSettings.getLinkedProjectSettings(path); createEmptyContentRootDirectories = projectSettings != null && projectSettings.isCreateEmptyContentRootDirectories(); } } final Set<ContentEntry> importedContentEntries = ContainerUtil.newIdentityTroveSet(); for (final DataNode<ContentRootData> node : data) { final ContentRootData contentRoot = node.getData(); final ContentEntry contentEntry = findOrCreateContentRoot(modifiableRootModel, contentRoot.getRootPath()); if(!importedContentEntries.contains(contentEntry)) { // clear source folders but do not remove existing excluded folders contentEntry.clearSourceFolders(); importedContentEntries.add(contentEntry); } if (LOG.isDebugEnabled()) { LOG.debug(String.format("Importing content root '%s' for module '%s'", contentRoot.getRootPath(), module.getName())); } for (SourceRoot path : contentRoot.getPaths(ExternalSystemSourceType.SOURCE)) { createSourceRootIfAbsent( contentEntry, path, module.getName(), JavaSourceRootType.SOURCE, false, createEmptyContentRootDirectories); } for (SourceRoot path : contentRoot.getPaths(ExternalSystemSourceType.TEST)) { createSourceRootIfAbsent( contentEntry, path, module.getName(), JavaSourceRootType.TEST_SOURCE, false, createEmptyContentRootDirectories); } for (SourceRoot path : contentRoot.getPaths(ExternalSystemSourceType.SOURCE_GENERATED)) { createSourceRootIfAbsent( contentEntry, path, module.getName(), JavaSourceRootType.SOURCE, true, createEmptyContentRootDirectories); } for (SourceRoot path : contentRoot.getPaths(ExternalSystemSourceType.TEST_GENERATED)) { createSourceRootIfAbsent( contentEntry, path, module.getName(), JavaSourceRootType.TEST_SOURCE, true, createEmptyContentRootDirectories); } for (SourceRoot path : contentRoot.getPaths(ExternalSystemSourceType.RESOURCE)) { createSourceRootIfAbsent( contentEntry, path, module.getName(), JavaResourceRootType.RESOURCE, false, createEmptyContentRootDirectories); } for (SourceRoot path : contentRoot.getPaths(ExternalSystemSourceType.TEST_RESOURCE)) { createSourceRootIfAbsent( contentEntry, path, module.getName(), JavaResourceRootType.TEST_RESOURCE, false, createEmptyContentRootDirectories); } for (SourceRoot path : contentRoot.getPaths(ExternalSystemSourceType.EXCLUDED)) { createExcludedRootIfAbsent(contentEntry, path, module.getName(), module.getProject()); } contentEntriesMap.remove(contentEntry.getUrl()); } for (ContentEntry contentEntry : contentEntriesMap.values()) { modifiableRootModel.removeContentEntry(contentEntry); } } @NotNull private static ContentEntry findOrCreateContentRoot(@NotNull ModifiableRootModel model, @NotNull String path) { ContentEntry[] entries = model.getContentEntries(); for (ContentEntry entry : entries) { VirtualFile file = entry.getFile(); if (file == null) { continue; } if (ExternalSystemApiUtil.getLocalFileSystemPath(file).equals(path)) { return entry; } } return model.addContentEntry(toVfsUrl(path)); } private static void createSourceRootIfAbsent( @NotNull ContentEntry entry, @NotNull final SourceRoot root, @NotNull String moduleName, @NotNull JpsModuleSourceRootType<?> sourceRootType, boolean generated, boolean createEmptyContentRootDirectories) { SourceFolder[] folders = entry.getSourceFolders(); for (SourceFolder folder : folders) { VirtualFile file = folder.getFile(); if (file == null) { continue; } if (ExternalSystemApiUtil.getLocalFileSystemPath(file).equals(root.getPath())) { final JpsModuleSourceRootType<?> folderRootType = folder.getRootType(); if(JavaSourceRootType.SOURCE.equals(folderRootType) || sourceRootType.equals(folderRootType)) { return; } if(JavaSourceRootType.TEST_SOURCE.equals(folderRootType) && JavaResourceRootType.TEST_RESOURCE.equals(sourceRootType)) { return; } entry.removeSourceFolder(folder); } } if (LOG.isDebugEnabled()) { LOG.debug(String.format("Importing %s for content root '%s' of module '%s'", root, entry.getUrl(), moduleName)); } SourceFolder sourceFolder = entry.addSourceFolder(toVfsUrl(root.getPath()), sourceRootType); if (!StringUtil.isEmpty(root.getPackagePrefix())) { sourceFolder.setPackagePrefix(root.getPackagePrefix()); } if (generated) { JavaSourceRootProperties properties = sourceFolder.getJpsElement().getProperties(JavaModuleSourceRootTypes.SOURCES); if(properties != null) { properties.setForGeneratedSources(true); } } if(createEmptyContentRootDirectories) { ExternalSystemApiUtil.doWriteAction(new Runnable() { @Override public void run() { try { VfsUtil.createDirectoryIfMissing(root.getPath()); } catch (IOException e) { LOG.warn(String.format("Unable to create directory for the path: %s", root.getPath()), e); } } }); } } private static void createExcludedRootIfAbsent(@NotNull ContentEntry entry, @NotNull SourceRoot root, @NotNull String moduleName, @NotNull Project project) { String rootPath = root.getPath(); for (VirtualFile file : entry.getExcludeFolderFiles()) { if (ExternalSystemApiUtil.getLocalFileSystemPath(file).equals(rootPath)) { return; } } if (LOG.isDebugEnabled()) { LOG.debug(String.format("Importing excluded root '%s' for content root '%s' of module '%s'", root, entry.getUrl(), moduleName)); } entry.addExcludeFolder(toVfsUrl(rootPath)); if (!Registry.is("ide.hide.excluded.files")) { ChangeListManager.getInstance(project).addDirectoryToIgnoreImplicitly(rootPath); } } private static String toVfsUrl(@NotNull String path) { return LocalFileSystem.PROTOCOL_PREFIX + path; } }
package com.winterwell.utils.io; import java.io.BufferedWriter; import java.io.Closeable; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.Flushable; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.Collection; import java.util.List; import com.winterwell.utils.StrUtils; import com.winterwell.utils.Utils; import com.winterwell.utils.WrappedException; import com.winterwell.utils.containers.Containers; /** * Support for creating .csv files. * * Implements "standard" CSV behaviour as per * http://en.wikipedia.org/wiki/Comma-separated_values * <p> * TODO is this thread safe?? depends on whether {@link BufferedWriter} is. if * needed maybe use a lock-free queue instead of a lock for high concurrency?? * * TODO: Add method for writing a comment, quote fields containing comment * chars?? * * @author daniel * @testedby CSVWriterTest} */ public class CSVWriter implements Closeable, Flushable { /** * Convenience method to write a list of rows out to a String in CSV format (comma separated, " encoded). * @param rows * @return csv */ public static String writeToString(Collection rows) { StringWriter sout = new StringWriter(); CSVWriter w = new CSVWriter(sout, ',', '"'); for (Object row : rows) { // class check to pick the correct write method if (row instanceof List) { w.write(row); } else if (row instanceof String[]) { w.write((String[])row); } else { w.write((Object[])row); } } w.close(); return sout.toString(); } private static BufferedWriter CSVWriter2_fileWriter(File file, boolean append) { try { return FileUtils.getWriter(new FileOutputStream(file, append)); } catch (FileNotFoundException e) { throw Utils.runtime(e); } } private final CSVSpec spec; private File file; private CharSequence LINEEND = StrUtils.LINEEND; int linesWritten = 0; private BufferedWriter out; private final String quotedQuote; /** * Create a CSV file with the standard double-quote quote character. * * @param file * This will be overwritten if it does exist. * @param delimiter * @throws FileNotFoundException */ public CSVWriter(File file, char delimiter) { this(file, delimiter, '"', false); } /** * Work with CSV file with the standard double-quote quote character. * * @param file * @param delimiter * @throws FileNotFoundException */ public CSVWriter(File file, char delimiter, boolean append) { this(file, delimiter, '"', append); } public CSVWriter(File file, char delimiter, char quote) throws FileNotFoundException { this(file, delimiter, quote, false); } public CSVWriter(File file, char delimiter, char quote, boolean append) { this(file, new CSVSpec(delimiter, quote, CSVSpec.UNSET), append); } public CSVWriter(Writer out, char delimiter, char quote) { this(out, new CSVSpec(delimiter, quote, CSVSpec.UNSET)); } public CSVSpec getSpec() { return spec; } /** * The root constructor, often called via others * @param out * @param spec */ public CSVWriter(Writer out, CSVSpec spec) { Utils.check4null(out, spec); file = null; this.out = out instanceof BufferedWriter ? (BufferedWriter) out : new BufferedWriter(out); this.spec = spec; // Possibly this is too restrictive, but actually other values don't // really make sense assert spec.quote == '\'' || spec.quote == '"'; this.quotedQuote = "" + spec.quote + spec.quote; } public CSVWriter(File dest, CSVSpec spec) { this(dest, spec, false); } public CSVWriter(File dest, CSVSpec spec, boolean append) { this(CSVWriter2_fileWriter(dest, append), spec); this.file = dest; } public CSVWriter(File dest) { this(dest, new CSVSpec()); } /** * Flush & close the underlying file writer */ @Override public void close() { FileUtils.close(out); } public void flush() { try { out.flush(); } catch (IOException e) { throw new WrappedException(e); } } /** * @return file (if created using the file constructor) or null. null does * not imply that this is not a file-based writer. */ public File getFile() { return file; } /** * Set this writer to append to the end of an existing file. Must be called * before any lines are written * * @param append */ public void setAppend(boolean append) { assert linesWritten == 0; if (!append) return; try { out = FileUtils.getWriter(new FileOutputStream(file, true)); } catch (FileNotFoundException e) { throw new WrappedException(e); } } public void setCommentMarker(char commentMarker) { setCommentMarker(Character.toString(commentMarker)); } /** * @param commentMarker * If set (eg to '#'), then items beginning with this character * will be quoted to avoid them being interpreted as comments at * the other end. 0 by default. Comment markers are not standard * csv to the extent that there is such a thing. * @return */ public CSVWriter setCommentMarker(String commentMarker) { this.spec.comment = commentMarker.charAt(0); // can be null, but "" is not allowed assert commentMarker==null || commentMarker.length()==1 : commentMarker; return this; } /** * Change the default line-end. E.g. if you want to force M$ style \r\n * output * * @param lineEnd */ public void setLineEnd(CharSequence lineEnd) { LINEEND = lineEnd; } @Override public String toString() { return file == null ? getClass().getSimpleName() : getClass() .getSimpleName() + "[" + file + "]"; } /** * Convenience for {@link #write(Object[])} * * @param line */ public void write(List line) { write(line.toArray()); } /** * Write out a row. * * @param objects * These will be converted by {@link String#valueOf(Object)}, * with escaping of the delimiter and the escape char. Quotes: * added if set, otherwise line-breaks are converted into spaces. */ public void write(Object... strings) { // defend against accidentally routing to the wrong method if (strings.length == 1 && strings[0].getClass().isArray()) {// instanceof String[]) { List<Object> array = Containers.asList(strings[0]); write(array); return; } String[] ss = new String[strings.length]; for (int i = 0; i < strings.length; i++) { ss[i] = strings[i] == null ? null : String.valueOf(strings[i]); } write(ss); } /** * Write out a row. * * @param objects * These will be escaping for the delimiter and the escape char. * Quotes: added if set, otherwise line-breaks are converted into * spaces.<br> * No checking is done on the line-length. Can be length 0 to * output an empty line. */ public void write(String... strings) { linesWritten++; try { // empty line? if (strings.length == 0) { out.append(LINEEND); return; } StringBuilder sb = new StringBuilder(); for (int i = 0, n = strings.length; i < n; i++) { String si = strings[i] == null ? "" : strings[i]; // TODO: Add an option to suppress in-field line breaks // NB: Line breaking within a quote is okay per the standard // If field contains the delimiter, quote-char, newline, or // comment-char it must be quoted if (si.indexOf(spec.delimiter) != -1 || si.indexOf(spec.quote) != -1 || si.indexOf('\n') != -1 || (spec.comment != 0 && si.indexOf(spec.comment) != -1)) { // Quote character must be replaced by double quote si = si.replace(String.valueOf(spec.quote), quotedQuote); si = spec.quote + si + spec.quote; } sb.append(si); sb.append(spec.delimiter); } // remove final delimiter StrUtils.pop(sb, 1); sb.append(LINEEND); // write out.append(sb); } catch (IOException ex) { throw new WrappedException(ex); } } public void writeComment(String comment) { if (spec.comment == 0) throw new IllegalStateException( "You must specify a comment marker before writing comments"); // ?? if (comment.startsWith(String.valueOf(spec.comment))) { comment = comment.substring(1); } try { out.append(spec.comment); out.append(' '); out.append(comment); out.append(LINEEND); } catch (IOException e) { throw Utils.runtime(e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.pdmodel.graphics.image; import java.awt.Transparency; import java.awt.color.ColorSpace; import java.awt.color.ICC_ColorSpace; import java.awt.image.BufferedImage; import java.awt.image.ColorConvertOp; import java.awt.image.WritableRaster; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.IOException; import java.io.OutputStream; import java.util.Iterator; import javax.imageio.IIOImage; import javax.imageio.ImageIO; import javax.imageio.ImageReader; import javax.imageio.ImageTypeSpecifier; import javax.imageio.ImageWriter; import javax.imageio.metadata.IIOMetadata; import javax.imageio.plugins.jpeg.JPEGImageWriteParam; import javax.imageio.stream.ImageInputStream; import javax.imageio.stream.ImageOutputStream; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.filter.MissingImageReaderException; import org.apache.pdfbox.io.IOUtils; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.graphics.color.PDColorSpace; import org.apache.pdfbox.pdmodel.graphics.color.PDDeviceCMYK; import org.apache.pdfbox.pdmodel.graphics.color.PDDeviceGray; import org.apache.pdfbox.pdmodel.graphics.color.PDDeviceRGB; import org.w3c.dom.Element; /** * Factory for creating a PDImageXObject containing a JPEG compressed image. * @author John Hewson */ public final class JPEGFactory { private JPEGFactory() { } /** * Creates a new JPEG Image XObject from an input stream containing JPEG data. * * The input stream data will be preserved and embedded in the PDF file without modification. * @param document the document where the image will be created * @param stream a stream of JPEG data * @return a new Image XObject * * @throws IOException if the input stream cannot be read */ public static PDImageXObject createFromStream(PDDocument document, InputStream stream) throws IOException { // copy stream ByteArrayInputStream byteStream = new ByteArrayInputStream(IOUtils.toByteArray(stream)); // read image BufferedImage awtImage = readJPEG(byteStream); byteStream.reset(); // create Image XObject from stream PDImageXObject pdImage = new PDImageXObject(document, byteStream, COSName.DCT_DECODE, awtImage.getWidth(), awtImage.getHeight(), awtImage.getColorModel().getComponentSize(0), getColorSpaceFromAWT(awtImage)); // no alpha if (awtImage.getColorModel().hasAlpha()) { throw new UnsupportedOperationException("alpha channel not implemented"); } return pdImage; } private static BufferedImage readJPEG(InputStream stream) throws IOException { // find suitable image reader Iterator readers = ImageIO.getImageReadersByFormatName("JPEG"); ImageReader reader = null; while (readers.hasNext()) { reader = (ImageReader) readers.next(); if (reader.canReadRaster()) { break; } } if (reader == null) { throw new MissingImageReaderException("Cannot read JPEG image: " + "a suitable JAI I/O image filter is not installed"); } ImageInputStream iis = null; try { iis = ImageIO.createImageInputStream(stream); reader.setInput(iis); ImageIO.setUseCache(false); return reader.read(0); } finally { if (iis != null) { iis.close(); } reader.dispose(); } } /** * Creates a new JPEG Image XObject from a Buffered Image. * @param document the document where the image will be created * @param image the buffered image to embed * @return a new Image XObject * @throws IOException if the JPEG data cannot be written */ public static PDImageXObject createFromImage(PDDocument document, BufferedImage image) throws IOException { return createFromImage(document, image, 0.75f); } /** * Creates a new JPEG Image XObject from a Buffered Image and a given quality. * The image will be created at 72 DPI. * @param document the document where the image will be created * @param image the buffered image to embed * @param quality the desired JPEG compression quality * @return a new Image XObject * @throws IOException if the JPEG data cannot be written */ public static PDImageXObject createFromImage(PDDocument document, BufferedImage image, float quality) throws IOException { return createFromImage(document, image, quality, 72); } /** * Creates a new JPEG Image XObject from a Buffered Image, a given quality and DPI. * @param document the document where the image will be created * @param image the buffered image to embed * @param quality the desired JPEG compression quality * @param dpi the desired DPI (resolution) of the JPEG * @return a new Image XObject * @throws IOException if the JPEG data cannot be written */ public static PDImageXObject createFromImage(PDDocument document, BufferedImage image, float quality, int dpi) throws IOException { return createJPEG(document, image, quality, dpi); } // returns the alpha channel of an image private static BufferedImage getAlphaImage(BufferedImage image) throws IOException { if (!image.getColorModel().hasAlpha()) { return null; } if (image.getTransparency() == Transparency.BITMASK) { throw new UnsupportedOperationException("BITMASK Transparency JPEG compression is not" + " useful, use LosslessImageFactory instead"); } WritableRaster alphaRaster = image.getAlphaRaster(); if (alphaRaster == null) { // happens sometimes (PDFBOX-2654) despite colormodel claiming to have alpha return null; } BufferedImage alphaImage = new BufferedImage(image.getWidth(), image.getHeight(), BufferedImage.TYPE_BYTE_GRAY); alphaImage.setData(alphaRaster); return alphaImage; } // Creates an Image XObject from a Buffered Image using JAI Image I/O private static PDImageXObject createJPEG(PDDocument document, BufferedImage image, float quality, int dpi) throws IOException { // extract alpha channel (if any) BufferedImage awtColorImage = getColorImage(image); BufferedImage awtAlphaImage = getAlphaImage(image); // create XObject ByteArrayOutputStream baos = new ByteArrayOutputStream(); encodeImageToJPEGStream(awtColorImage, quality, dpi, baos); ByteArrayInputStream byteStream = new ByteArrayInputStream(baos.toByteArray()); PDImageXObject pdImage = new PDImageXObject(document, byteStream, COSName.DCT_DECODE, awtColorImage.getWidth(), awtColorImage.getHeight(), awtColorImage.getColorModel().getComponentSize(0), getColorSpaceFromAWT(awtColorImage)); // alpha -> soft mask if (awtAlphaImage != null) { PDImage xAlpha = JPEGFactory.createFromImage(document, awtAlphaImage, quality); pdImage.getCOSObject().setItem(COSName.SMASK, xAlpha); } return pdImage; } private static void encodeImageToJPEGStream(BufferedImage image, float quality, int dpi, OutputStream out) throws IOException { // encode to JPEG ImageOutputStream ios = null; ImageWriter imageWriter = null; try { // find JAI writer imageWriter = ImageIO.getImageWritersBySuffix("jpeg").next(); ios = ImageIO.createImageOutputStream(out); imageWriter.setOutput(ios); // add compression JPEGImageWriteParam jpegParam = (JPEGImageWriteParam)imageWriter.getDefaultWriteParam(); jpegParam.setCompressionMode(JPEGImageWriteParam.MODE_EXPLICIT); jpegParam.setCompressionQuality(quality); // add metadata ImageTypeSpecifier imageTypeSpecifier = new ImageTypeSpecifier(image); IIOMetadata data = imageWriter.getDefaultImageMetadata(imageTypeSpecifier, jpegParam); Element tree = (Element)data.getAsTree("javax_imageio_jpeg_image_1.0"); Element jfif = (Element)tree.getElementsByTagName("app0JFIF").item(0); jfif.setAttribute("Xdensity", Integer.toString(dpi)); jfif.setAttribute("Ydensity", Integer.toString(dpi)); jfif.setAttribute("resUnits", "1"); // 1 = dots/inch // write imageWriter.write(data, new IIOImage(image, null, null), jpegParam); } finally { // clean up IOUtils.closeQuietly(out); if (ios != null) { ios.close(); } if (imageWriter != null) { imageWriter.dispose(); } } } // returns a PDColorSpace for a given BufferedImage private static PDColorSpace getColorSpaceFromAWT(BufferedImage awtImage) { if (awtImage.getColorModel().getNumComponents() == 1) { // 256 color (gray) JPEG return PDDeviceGray.INSTANCE; } ColorSpace awtColorSpace = awtImage.getColorModel().getColorSpace(); if (awtColorSpace instanceof ICC_ColorSpace && !awtColorSpace.isCS_sRGB()) { throw new UnsupportedOperationException("ICC color spaces not implemented"); } switch (awtColorSpace.getType()) { case ColorSpace.TYPE_RGB: return PDDeviceRGB.INSTANCE; case ColorSpace.TYPE_GRAY: return PDDeviceGray.INSTANCE; case ColorSpace.TYPE_CMYK: return PDDeviceCMYK.INSTANCE; default: throw new UnsupportedOperationException("color space not implemented: " + awtColorSpace.getType()); } } // returns the color channels of an image private static BufferedImage getColorImage(BufferedImage image) { if (!image.getColorModel().hasAlpha()) { return image; } if (image.getColorModel().getColorSpace().getType() != ColorSpace.TYPE_RGB) { throw new UnsupportedOperationException("only RGB color spaces are implemented"); } // create an RGB image without alpha //BEWARE: the previous solution in the history // g.setComposite(AlphaComposite.Src) and g.drawImage() // didn't work properly for TYPE_4BYTE_ABGR. // alpha values of 0 result in a black dest pixel!!! BufferedImage rgbImage = new BufferedImage( image.getWidth(), image.getHeight(), BufferedImage.TYPE_3BYTE_BGR); return new ColorConvertOp(null).filter(image, rgbImage); } }
// Copyright (C) 2010 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.reviewdb.client; import com.google.gwtorm.client.Column; /** Diff formatting preferences of an account */ public class AccountDiffPreference { /** Default number of lines of context. */ public static final short DEFAULT_CONTEXT = 10; /** Context setting to display the entire file. */ public static final short WHOLE_FILE_CONTEXT = -1; /** Typical valid choices for the default context setting. */ public static final short[] CONTEXT_CHOICES = {3, 10, 25, 50, 75, 100, WHOLE_FILE_CONTEXT}; public static enum Whitespace implements CodedEnum { IGNORE_NONE('N'), // IGNORE_SPACE_AT_EOL('E'), // IGNORE_SPACE_CHANGE('S'), // IGNORE_ALL_SPACE('A'); private final char code; private Whitespace(final char c) { code = c; } public char getCode() { return code; } public static Whitespace forCode(final char c) { for (final Whitespace s : Whitespace.values()) { if (s.code == c) { return s; } } return null; } } public static enum Theme { // Light themes DEFAULT, ECLIPSE, ELEGANT, NEAT, // Dark themes MIDNIGHT, NIGHT, TWILIGHT; public boolean isDark() { switch (this) { case MIDNIGHT: case NIGHT: case TWILIGHT: return true; default: return false; } } } public static AccountDiffPreference createDefault(Account.Id accountId) { AccountDiffPreference p = new AccountDiffPreference(accountId); p.setIgnoreWhitespace(Whitespace.IGNORE_NONE); p.setTheme(Theme.DEFAULT); p.setTabSize(8); p.setLineLength(100); p.setSyntaxHighlighting(true); p.setShowWhitespaceErrors(true); p.setShowLineEndings(true); p.setIntralineDifference(true); p.setShowTabs(true); p.setContext(DEFAULT_CONTEXT); p.setManualReview(false); return p; } @Column(id = 1, name = Column.NONE) protected Account.Id accountId; @Column(id = 2) protected char ignoreWhitespace; @Column(id = 3) protected int tabSize; @Column(id = 4) protected int lineLength; @Column(id = 5) protected boolean syntaxHighlighting; @Column(id = 6) protected boolean showWhitespaceErrors; @Column(id = 7) protected boolean intralineDifference; @Column(id = 8) protected boolean showTabs; /** Number of lines of context when viewing a patch. */ @Column(id = 9) protected short context; @Column(id = 10) protected boolean skipDeleted; @Column(id = 11) protected boolean skipUncommented; @Column(id = 12) protected boolean expandAllComments; @Column(id = 13) protected boolean retainHeader; @Column(id = 14) protected boolean manualReview; @Column(id = 15) protected boolean showLineEndings; @Column(id = 16) protected boolean hideTopMenu; @Column(id = 17) protected boolean hideLineNumbers; @Column(id = 18) protected boolean renderEntireFile; @Column(id = 19, length = 20, notNull = false) protected String theme; protected AccountDiffPreference() { } public AccountDiffPreference(Account.Id accountId) { this.accountId = accountId; } public AccountDiffPreference(AccountDiffPreference p) { this.accountId = p.accountId; this.ignoreWhitespace = p.ignoreWhitespace; this.tabSize = p.tabSize; this.lineLength = p.lineLength; this.syntaxHighlighting = p.syntaxHighlighting; this.showWhitespaceErrors = p.showWhitespaceErrors; this.showLineEndings = p.showLineEndings; this.intralineDifference = p.intralineDifference; this.showTabs = p.showTabs; this.skipDeleted = p.skipDeleted; this.skipUncommented = p.skipUncommented; this.expandAllComments = p.expandAllComments; this.context = p.context; this.retainHeader = p.retainHeader; this.manualReview = p.manualReview; this.hideTopMenu = p.hideTopMenu; this.hideLineNumbers = p.hideLineNumbers; this.renderEntireFile = p.renderEntireFile; } public Account.Id getAccountId() { return accountId; } public Whitespace getIgnoreWhitespace() { return Whitespace.forCode(ignoreWhitespace); } public void setIgnoreWhitespace(Whitespace ignoreWhitespace) { this.ignoreWhitespace = ignoreWhitespace.getCode(); } public int getTabSize() { return tabSize; } public void setTabSize(int tabSize) { this.tabSize = tabSize; } public int getLineLength() { return lineLength; } public void setLineLength(int lineLength) { this.lineLength = lineLength; } public boolean isSyntaxHighlighting() { return syntaxHighlighting; } public void setSyntaxHighlighting(boolean syntaxHighlighting) { this.syntaxHighlighting = syntaxHighlighting; } public boolean isShowWhitespaceErrors() { return showWhitespaceErrors; } public void setShowWhitespaceErrors(boolean showWhitespaceErrors) { this.showWhitespaceErrors = showWhitespaceErrors; } public boolean isShowLineEndings() { return showLineEndings; } public void setShowLineEndings(boolean showLineEndings) { this.showLineEndings = showLineEndings; } public boolean isIntralineDifference() { return intralineDifference; } public void setIntralineDifference(boolean intralineDifference) { this.intralineDifference = intralineDifference; } public boolean isShowTabs() { return showTabs; } public void setShowTabs(boolean showTabs) { this.showTabs = showTabs; } /** Get the number of lines of context when viewing a patch. */ public short getContext() { return context; } /** Set the number of lines of context when viewing a patch. */ public void setContext(final short context) { assert 0 <= context || context == WHOLE_FILE_CONTEXT; this.context = context; } public boolean isSkipDeleted() { return skipDeleted; } public void setSkipDeleted(boolean skip) { skipDeleted = skip; } public boolean isSkipUncommented() { return skipUncommented; } public void setSkipUncommented(boolean skip) { skipUncommented = skip; } public boolean isExpandAllComments() { return expandAllComments; } public void setExpandAllComments(boolean expand) { expandAllComments = expand; } public boolean isRetainHeader() { return retainHeader; } public void setRetainHeader(boolean retain) { retainHeader = retain; } public boolean isManualReview() { return manualReview; } public void setManualReview(boolean manual) { manualReview = manual; } public boolean isHideTopMenu() { return hideTopMenu; } public void setHideTopMenu(boolean hide) { hideTopMenu = hide; } public boolean isHideLineNumbers() { return hideLineNumbers; } public void setHideLineNumbers(boolean hide) { hideLineNumbers = hide; } public boolean isRenderEntireFile() { return renderEntireFile; } public void setRenderEntireFile(boolean render) { renderEntireFile = render; } public Theme getTheme() { return theme != null ? Theme.valueOf(theme) : null; } public void setTheme(Theme theme) { this.theme = theme != null ? theme.name() : null; } }
package org.hisp.dhis.preheat; /* * Copyright (c) 2004-2016, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.hisp.dhis.DhisSpringTest; import org.hisp.dhis.attribute.Attribute; import org.hisp.dhis.attribute.AttributeService; import org.hisp.dhis.attribute.AttributeValue; import org.hisp.dhis.common.IdentifiableObject; import org.hisp.dhis.common.IdentifiableObjectManager; import org.hisp.dhis.common.ValueType; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementCategoryCombo; import org.hisp.dhis.dataelement.DataElementGroup; import org.hisp.dhis.legend.LegendSet; import org.hisp.dhis.option.OptionSet; import org.hisp.dhis.render.RenderFormat; import org.hisp.dhis.render.RenderService; import org.hisp.dhis.user.User; import org.junit.Ignore; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.io.ClassPathResource; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import static org.junit.Assert.*; /** * @author Morten Olav Hansen <mortenoh@gmail.com> */ public class PreheatServiceTest extends DhisSpringTest { @Autowired private PreheatService preheatService; @Autowired private IdentifiableObjectManager manager; @Autowired private RenderService _renderService; @Autowired private AttributeService attributeService; @Override protected void setUpTest() throws Exception { renderService = _renderService; } @Ignore @Test( expected = PreheatException.class ) public void testValidateAllFail() { PreheatParams params = new PreheatParams().setPreheatMode( PreheatMode.ALL ); preheatService.validate( params ); } @Test public void testValidateAll() { PreheatParams params = new PreheatParams().setPreheatMode( PreheatMode.ALL ); params.getClasses().add( DataElement.class ); preheatService.validate( params ); } @Test public void testCollectNoObjectsDE() { DataElement dataElement = createDataElement( 'A' ); Map<Class<? extends IdentifiableObject>, Set<String>> references = preheatService.collectReferences( dataElement ).get( PreheatIdentifier.UID ); assertFalse( references.containsKey( OptionSet.class ) ); assertFalse( references.containsKey( LegendSet.class ) ); assertTrue( references.containsKey( DataElementCategoryCombo.class ) ); assertFalse( references.containsKey( User.class ) ); } @Test public void testCollectNoObjectsDEG() { DataElementGroup dataElementGroup = createDataElementGroup( 'A' ); Map<Class<? extends IdentifiableObject>, Set<String>> references = preheatService.collectReferences( dataElementGroup ).get( PreheatIdentifier.UID ); assertFalse( references.containsKey( DataElement.class ) ); assertFalse( references.containsKey( User.class ) ); } @Test public void testCollectReferenceUidDEG1() { DataElementGroup deg1 = createDataElementGroup( 'A' ); DataElement de1 = createDataElement( 'A' ); DataElement de2 = createDataElement( 'B' ); DataElement de3 = createDataElement( 'C' ); User user = createUser( 'A' ); deg1.addDataElement( de1 ); deg1.addDataElement( de2 ); deg1.addDataElement( de3 ); deg1.setUser( user ); Map<Class<? extends IdentifiableObject>, Set<String>> references = preheatService.collectReferences( deg1 ) .get( PreheatIdentifier.UID ); assertTrue( references.containsKey( DataElement.class ) ); assertTrue( references.containsKey( User.class ) ); assertEquals( 3, references.get( DataElement.class ).size() ); assertEquals( 1, references.get( User.class ).size() ); assertTrue( references.get( DataElement.class ).contains( de1.getUid() ) ); assertTrue( references.get( DataElement.class ).contains( de2.getUid() ) ); assertTrue( references.get( DataElement.class ).contains( de3.getUid() ) ); assertTrue( references.get( User.class ).contains( user.getUid() ) ); } @Test public void testCollectReferenceUidDEG2() { DataElementGroup deg1 = createDataElementGroup( 'A' ); DataElementGroup deg2 = createDataElementGroup( 'B' ); DataElement de1 = createDataElement( 'A' ); DataElement de2 = createDataElement( 'B' ); DataElement de3 = createDataElement( 'C' ); deg1.addDataElement( de1 ); deg1.addDataElement( de2 ); deg2.addDataElement( de3 ); Map<Class<? extends IdentifiableObject>, Set<String>> references = preheatService.collectReferences( Lists.newArrayList( deg1, deg2 ) ).get( PreheatIdentifier.UID ); assertTrue( references.containsKey( DataElement.class ) ); assertEquals( 3, references.get( DataElement.class ).size() ); assertTrue( references.get( DataElement.class ).contains( de1.getUid() ) ); assertTrue( references.get( DataElement.class ).contains( de2.getUid() ) ); assertTrue( references.get( DataElement.class ).contains( de3.getUid() ) ); } @Test public void testCollectReferenceCodeDEG1() { DataElementGroup dataElementGroup = createDataElementGroup( 'A' ); DataElement de1 = createDataElement( 'A' ); DataElement de2 = createDataElement( 'B' ); DataElement de3 = createDataElement( 'C' ); User user = createUser( 'A' ); dataElementGroup.addDataElement( de1 ); dataElementGroup.addDataElement( de2 ); dataElementGroup.addDataElement( de3 ); dataElementGroup.setUser( user ); Map<Class<? extends IdentifiableObject>, Set<String>> references = preheatService.collectReferences( dataElementGroup ) .get( PreheatIdentifier.CODE ); assertTrue( references.containsKey( DataElement.class ) ); assertTrue( references.containsKey( User.class ) ); assertEquals( 3, references.get( DataElement.class ).size() ); assertEquals( 1, references.get( User.class ).size() ); assertTrue( references.get( DataElement.class ).contains( de1.getCode() ) ); assertTrue( references.get( DataElement.class ).contains( de2.getCode() ) ); assertTrue( references.get( DataElement.class ).contains( de3.getCode() ) ); assertTrue( references.get( User.class ).contains( user.getCode() ) ); } @Test public void testCollectReferenceCodeDEG2() { DataElementGroup deg1 = createDataElementGroup( 'A' ); DataElementGroup deg2 = createDataElementGroup( 'B' ); DataElement de1 = createDataElement( 'A' ); DataElement de2 = createDataElement( 'B' ); DataElement de3 = createDataElement( 'C' ); deg1.addDataElement( de1 ); deg1.addDataElement( de2 ); deg2.addDataElement( de3 ); Map<Class<? extends IdentifiableObject>, Set<String>> references = preheatService.collectReferences( Lists.newArrayList( deg1, deg2 ) ).get( PreheatIdentifier.CODE ); assertTrue( references.containsKey( DataElement.class ) ); assertEquals( 3, references.get( DataElement.class ).size() ); assertTrue( references.get( DataElement.class ).contains( de1.getCode() ) ); assertTrue( references.get( DataElement.class ).contains( de2.getCode() ) ); assertTrue( references.get( DataElement.class ).contains( de3.getCode() ) ); } @Test @SuppressWarnings( "unchecked" ) public void testPreheatAllUID() { DataElementGroup dataElementGroup = new DataElementGroup( "DataElementGroupA" ); dataElementGroup.setAutoFields(); DataElement de1 = createDataElement( 'A' ); DataElement de2 = createDataElement( 'B' ); DataElement de3 = createDataElement( 'C' ); manager.save( de1 ); manager.save( de2 ); manager.save( de3 ); User user = createUser( 'A' ); manager.save( user ); dataElementGroup.addDataElement( de1 ); dataElementGroup.addDataElement( de2 ); dataElementGroup.addDataElement( de3 ); dataElementGroup.setUser( user ); manager.save( dataElementGroup ); PreheatParams params = new PreheatParams(); params.setPreheatMode( PreheatMode.ALL ); params.setClasses( Sets.newHashSet( DataElement.class, DataElementGroup.class, User.class ) ); preheatService.validate( params ); Preheat preheat = preheatService.preheat( params ); assertFalse( preheat.isEmpty() ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID, DataElement.class ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID, DataElementGroup.class ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID, User.class ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, DataElement.class, de1.getUid() ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, DataElement.class, de2.getUid() ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, DataElement.class, de3.getUid() ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, DataElementGroup.class, dataElementGroup.getUid() ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, User.class, user.getUid() ) ); } @Test public void testPreheatAllMetadataUID() { DataElementGroup dataElementGroup = new DataElementGroup( "DataElementGroupA" ); dataElementGroup.setAutoFields(); DataElement de1 = createDataElement( 'A' ); DataElement de2 = createDataElement( 'B' ); DataElement de3 = createDataElement( 'C' ); manager.save( de1 ); manager.save( de2 ); manager.save( de3 ); User user = createUser( 'A' ); manager.save( user ); dataElementGroup.addDataElement( de1 ); dataElementGroup.addDataElement( de2 ); dataElementGroup.addDataElement( de3 ); dataElementGroup.setUser( user ); manager.save( dataElementGroup ); PreheatParams params = new PreheatParams(); params.setPreheatMode( PreheatMode.ALL ); preheatService.validate( params ); Preheat preheat = preheatService.preheat( params ); assertFalse( preheat.isEmpty() ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID, DataElement.class ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID, DataElementGroup.class ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID, User.class ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, DataElement.class, de1.getUid() ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, DataElement.class, de2.getUid() ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, DataElement.class, de3.getUid() ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, DataElementGroup.class, dataElementGroup.getUid() ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, User.class, user.getUid() ) ); } @Test public void testPreheatReferenceUID() { DataElementGroup dataElementGroup = new DataElementGroup( "DataElementGroupA" ); dataElementGroup.setAutoFields(); DataElement de1 = createDataElement( 'A' ); DataElement de2 = createDataElement( 'B' ); DataElement de3 = createDataElement( 'C' ); manager.save( de1 ); manager.save( de2 ); manager.save( de3 ); User user = createUser( 'A' ); manager.save( user ); dataElementGroup.addDataElement( de1 ); dataElementGroup.addDataElement( de2 ); dataElementGroup.addDataElement( de3 ); dataElementGroup.setUser( user ); manager.save( dataElementGroup ); PreheatParams params = new PreheatParams(); params.setPreheatMode( PreheatMode.REFERENCE ); params.getObjects().put( DataElement.class, Lists.newArrayList( de1, de2 ) ); params.getObjects().put( User.class, Lists.newArrayList( user ) ); preheatService.validate( params ); Preheat preheat = preheatService.preheat( params ); assertFalse( preheat.isEmpty() ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID, DataElement.class ) ); assertTrue( preheat.isEmpty( PreheatIdentifier.UID, DataElementGroup.class ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID, User.class ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, DataElement.class, de1.getUid() ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, DataElement.class, de2.getUid() ) ); assertFalse( preheat.containsKey( PreheatIdentifier.UID, DataElement.class, de3.getUid() ) ); assertFalse( preheat.containsKey( PreheatIdentifier.UID, DataElementGroup.class, dataElementGroup.getUid() ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, User.class, user.getUid() ) ); } @Test public void testPreheatReferenceCODE() { DataElementGroup dataElementGroup = new DataElementGroup( "DataElementGroupA" ); dataElementGroup.setAutoFields(); DataElement de1 = createDataElement( 'A' ); DataElement de2 = createDataElement( 'B' ); DataElement de3 = createDataElement( 'C' ); manager.save( de1 ); manager.save( de2 ); manager.save( de3 ); User user = createUser( 'A' ); manager.save( user ); dataElementGroup.addDataElement( de1 ); dataElementGroup.addDataElement( de2 ); dataElementGroup.addDataElement( de3 ); dataElementGroup.setUser( user ); manager.save( dataElementGroup ); PreheatParams params = new PreheatParams(); params.setPreheatIdentifier( PreheatIdentifier.CODE ); params.setPreheatMode( PreheatMode.REFERENCE ); params.getObjects().put( DataElement.class, Lists.newArrayList( de1, de2 ) ); params.getObjects().put( User.class, Lists.newArrayList( user ) ); preheatService.validate( params ); Preheat preheat = preheatService.preheat( params ); assertFalse( preheat.isEmpty() ); assertFalse( preheat.isEmpty( PreheatIdentifier.CODE ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.CODE, DataElement.class ) ); assertTrue( preheat.isEmpty( PreheatIdentifier.CODE, DataElementGroup.class ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.CODE, User.class ) ); assertTrue( preheat.containsKey( PreheatIdentifier.CODE, DataElement.class, de1.getCode() ) ); assertTrue( preheat.containsKey( PreheatIdentifier.CODE, DataElement.class, de2.getCode() ) ); assertFalse( preheat.containsKey( PreheatIdentifier.CODE, DataElement.class, de3.getCode() ) ); assertFalse( preheat.containsKey( PreheatIdentifier.CODE, DataElementGroup.class, dataElementGroup.getCode() ) ); assertTrue( preheat.containsKey( PreheatIdentifier.CODE, User.class, user.getCode() ) ); } @Test public void testPreheatReferenceWithScanUID() { DataElementGroup dataElementGroup = fromJson( "preheat/degAUidRef.json", DataElementGroup.class ); defaultSetup(); PreheatParams params = new PreheatParams(); params.setPreheatMode( PreheatMode.REFERENCE ); params.getObjects().put( DataElementGroup.class, Lists.newArrayList( dataElementGroup ) ); preheatService.validate( params ); Preheat preheat = preheatService.preheat( params ); assertFalse( preheat.isEmpty() ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID, DataElement.class ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID, DataElementGroup.class ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.UID, User.class ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, DataElement.class, "deabcdefghA" ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, DataElement.class, "deabcdefghB" ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, DataElement.class, "deabcdefghC" ) ); assertTrue( preheat.containsKey( PreheatIdentifier.UID, User.class, "userabcdefA" ) ); } @Test public void testPreheatReferenceWithScanCODE() { DataElementGroup dataElementGroup = fromJson( "preheat/degACodeRef.json", DataElementGroup.class ); defaultSetup(); PreheatParams params = new PreheatParams(); params.setPreheatIdentifier( PreheatIdentifier.CODE ); params.setPreheatMode( PreheatMode.REFERENCE ); params.getObjects().put( DataElementGroup.class, Lists.newArrayList( dataElementGroup ) ); preheatService.validate( params ); Preheat preheat = preheatService.preheat( params ); assertFalse( preheat.isEmpty() ); assertFalse( preheat.isEmpty( PreheatIdentifier.CODE ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.CODE, DataElement.class ) ); assertTrue( preheat.isEmpty( PreheatIdentifier.CODE, DataElementGroup.class ) ); assertFalse( preheat.isEmpty( PreheatIdentifier.CODE, User.class ) ); assertTrue( preheat.containsKey( PreheatIdentifier.CODE, DataElement.class, "DataElementCodeA" ) ); assertTrue( preheat.containsKey( PreheatIdentifier.CODE, DataElement.class, "DataElementCodeB" ) ); assertTrue( preheat.containsKey( PreheatIdentifier.CODE, DataElement.class, "DataElementCodeC" ) ); assertTrue( preheat.containsKey( PreheatIdentifier.CODE, User.class, "UserCodeA" ) ); } @Test public void testPreheatReferenceCheckUID() { DataElementGroup dataElementGroup = fromJson( "preheat/degAUidRef_invalid.json", DataElementGroup.class ); defaultSetup(); PreheatParams params = new PreheatParams(); params.setPreheatMode( PreheatMode.REFERENCE ); params.getObjects().put( DataElementGroup.class, Lists.newArrayList( dataElementGroup ) ); preheatService.validate( params ); Preheat preheat = preheatService.preheat( params ); List<PreheatErrorReport> referenceErrors = preheatService.checkReferences( DataElementGroup.class, dataElementGroup, preheat, PreheatIdentifier.UID ); assertEquals( 3, referenceErrors.size() ); assertEquals( PreheatIdentifier.UID, referenceErrors.get( 0 ).getPreheatIdentifier() ); assertEquals( PreheatIdentifier.UID, referenceErrors.get( 1 ).getPreheatIdentifier() ); assertEquals( PreheatIdentifier.UID, referenceErrors.get( 2 ).getPreheatIdentifier() ); } @Test public void testPreheatReferenceConnectUID() { DataElementGroup dataElementGroup = fromJson( "preheat/degAUidRef.json", DataElementGroup.class ); defaultSetup(); PreheatParams params = new PreheatParams(); params.setPreheatMode( PreheatMode.REFERENCE ); params.getObjects().put( DataElementGroup.class, Lists.newArrayList( dataElementGroup ) ); preheatService.validate( params ); Preheat preheat = preheatService.preheat( params ); preheatService.connectReferences( dataElementGroup, preheat, PreheatIdentifier.UID ); List<DataElement> members = new ArrayList<>( dataElementGroup.getMembers() ); assertEquals( "DataElementA", members.get( 0 ).getName() ); assertEquals( "DataElementCodeA", members.get( 0 ).getCode() ); assertEquals( "DataElementB", members.get( 1 ).getName() ); assertEquals( "DataElementCodeB", members.get( 1 ).getCode() ); assertEquals( "DataElementC", members.get( 2 ).getName() ); assertEquals( "DataElementCodeC", members.get( 2 ).getCode() ); assertEquals( "FirstNameA", dataElementGroup.getUser().getFirstName() ); assertEquals( "SurnameA", dataElementGroup.getUser().getSurname() ); assertEquals( "UserCodeA", dataElementGroup.getUser().getCode() ); } @Test public void testPreheatReferenceConnectCODE() { DataElementGroup dataElementGroup = fromJson( "preheat/degACodeRef.json", DataElementGroup.class ); defaultSetup(); PreheatParams params = new PreheatParams(); params.setPreheatIdentifier( PreheatIdentifier.CODE ); params.setPreheatMode( PreheatMode.REFERENCE ); params.getObjects().put( DataElementGroup.class, Lists.newArrayList( dataElementGroup ) ); preheatService.validate( params ); Preheat preheat = preheatService.preheat( params ); preheatService.connectReferences( dataElementGroup, preheat, PreheatIdentifier.CODE ); List<DataElement> members = new ArrayList<>( dataElementGroup.getMembers() ); assertEquals( "DataElementA", members.get( 0 ).getName() ); assertEquals( "DataElementCodeA", members.get( 0 ).getCode() ); assertEquals( "DataElementB", members.get( 1 ).getName() ); assertEquals( "DataElementCodeB", members.get( 1 ).getCode() ); assertEquals( "DataElementC", members.get( 2 ).getName() ); assertEquals( "DataElementCodeC", members.get( 2 ).getCode() ); assertEquals( "FirstNameA", dataElementGroup.getUser().getFirstName() ); assertEquals( "SurnameA", dataElementGroup.getUser().getSurname() ); assertEquals( "UserCodeA", dataElementGroup.getUser().getCode() ); } @Test public void testPreheatReferenceConnectAUTO() { DataElementGroup dataElementGroup = fromJson( "preheat/degAAutoRef.json", DataElementGroup.class ); defaultSetup(); PreheatParams params = new PreheatParams(); params.setPreheatIdentifier( PreheatIdentifier.AUTO ); params.setPreheatMode( PreheatMode.REFERENCE ); params.getObjects().put( DataElementGroup.class, Lists.newArrayList( dataElementGroup ) ); preheatService.validate( params ); Preheat preheat = preheatService.preheat( params ); preheatService.connectReferences( dataElementGroup, preheat, PreheatIdentifier.AUTO ); List<DataElement> members = new ArrayList<>( dataElementGroup.getMembers() ); assertEquals( "DataElementA", members.get( 0 ).getName() ); assertEquals( "DataElementCodeA", members.get( 0 ).getCode() ); assertEquals( "DataElementB", members.get( 1 ).getName() ); assertEquals( "DataElementCodeB", members.get( 1 ).getCode() ); assertEquals( "DataElementC", members.get( 2 ).getName() ); assertEquals( "DataElementCodeC", members.get( 2 ).getCode() ); assertEquals( "FirstNameA", dataElementGroup.getUser().getFirstName() ); assertEquals( "SurnameA", dataElementGroup.getUser().getSurname() ); assertEquals( "UserCodeA", dataElementGroup.getUser().getCode() ); } /** * Fails with: * java.lang.ClassCastException: java.util.HashMap cannot be cast to java.util.Set at org.hisp.dhis.preheat.PreheatServiceTest.testPreheatWithAttributeValues(PreheatServiceTest.java:597) * @throws IOException */ @Ignore @Test public void testPreheatWithAttributeValues() throws IOException { defaultSetupWithAttributes(); Map<Class<? extends IdentifiableObject>, List<IdentifiableObject>> metadata = renderService.fromMetadata( new ClassPathResource( "preheat/dataset_with_sections.json" ).getInputStream(), RenderFormat.JSON ); PreheatParams params = new PreheatParams(); params.setPreheatIdentifier( PreheatIdentifier.AUTO ); params.setPreheatMode( PreheatMode.REFERENCE ); params.setObjects( metadata ); preheatService.validate( params ); Preheat preheat = preheatService.preheat( params ); assertEquals( 1, preheat.getUniqueAttributeValues().get( DataElement.class ).size() ); List<String> keys = new ArrayList<>( preheat.getUniqueAttributeValues().get( DataElement.class ).keySet() ); assertEquals( 3, preheat.getUniqueAttributeValues().get( DataElement.class ).get( keys.get( 0 ) ).size() ); assertFalse( preheat.getMandatoryAttributes().isEmpty() ); assertEquals( 1, preheat.getMandatoryAttributes().get( DataElement.class ).size() ); } private void defaultSetup() { DataElement de1 = createDataElement( 'A' ); DataElement de2 = createDataElement( 'B' ); DataElement de3 = createDataElement( 'C' ); manager.save( de1 ); manager.save( de2 ); manager.save( de3 ); User user = createUser( 'A' ); manager.save( user ); } private void defaultSetupWithAttributes() { Attribute attribute = new Attribute( "AttributeA", ValueType.TEXT ); attribute.setUnique( true ); attribute.setMandatory( true ); attribute.setDataElementAttribute( true ); manager.save( attribute ); AttributeValue attributeValue1 = new AttributeValue( "Value1", attribute ); AttributeValue attributeValue2 = new AttributeValue( "Value2", attribute ); AttributeValue attributeValue3 = new AttributeValue( "Value3", attribute ); DataElement de1 = createDataElement( 'A' ); DataElement de2 = createDataElement( 'B' ); DataElement de3 = createDataElement( 'C' ); attributeService.addAttributeValue( de1, attributeValue1 ); attributeService.addAttributeValue( de2, attributeValue2 ); attributeService.addAttributeValue( de3, attributeValue3 ); manager.save( de1 ); manager.save( de2 ); manager.save( de3 ); User user = createUser( 'A' ); manager.save( user ); } }
/** * Package: MAG - VistA Imaging WARNING: Per VHA Directive 2004-038, this routine should not be modified. Date Created: Jan 4, 2013 Site Name: Washington OI Field Office, Silver Spring, MD Developer: VHAISWWERFEJ Description: ;; +--------------------------------------------------------------------+ ;; Property of the US Government. ;; No permission to copy or redistribute this software is given. ;; Use of unreleased versions of this software requires the user ;; to execute a written test agreement with the VistA Imaging ;; Development Office of the Department of Veterans Affairs, ;; telephone (301) 734-0100. ;; ;; The Food and Drug Administration classifies this software as ;; a Class II medical device. As such, it may not be changed ;; in any way. Modifications to this software may result in an ;; adulterated medical device under 21CFR820, the use of which ;; is considered to be a violation of US Federal Statutes. ;; +--------------------------------------------------------------------+ */ package gov.va.med.imaging; import gov.va.med.PatientIdentifierType; import gov.va.med.SERIALIZATION_FORMAT; import gov.va.med.URNFactory; import org.junit.Test; import static org.junit.Assert.*; /** * @author VHAISWWERFEJ * */ public class ImageURNPatientIdentifierTest { @Test public void testParsingBhiePatientIdentifier() { try { ImageURN imageUrn = URNFactory.create("urn:vaimage:200-haims%2df7890fb1%2dc1ad%2d4606%2d99e5%2dcfe31e31ae65%3a1-[1006184063V088473][haims%2de15506fe%2d454f%2d4298%2db328%2d93c34fd9c3e3][US]", SERIALIZATION_FORMAT.CDTP, ImageURN.class); assertTrue(imageUrn instanceof BhieImageURN); BhieImageURN bhieImageUrn = (BhieImageURN)imageUrn; assertNull(bhieImageUrn.getPatientIdentifierType()); assertEquals(PatientIdentifierType.icn, bhieImageUrn.getPatientIdentifierTypeOrDefault()); // test another incoming format to be sure its good ImageURN anotherImageUrn = URNFactory.create("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1[1006184063V088473][haims-e15506fe-454f-4298-b328-93c34fd9c3e3][US]", SERIALIZATION_FORMAT.RAW, ImageURN.class); compareImageURNs(bhieImageUrn, anotherImageUrn); assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1", bhieImageUrn.toString()); assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1", bhieImageUrn.toString(SERIALIZATION_FORMAT.NATIVE)); assertEquals("urn:vaimage:200-haims%2df7890fb1%2dc1ad%2d4606%2d99e5%2dcfe31e31ae65%3a1-[1006184063V088473][haims%2de15506fe%2d454f%2d4298%2db328%2d93c34fd9c3e3][US]", bhieImageUrn.toString(SERIALIZATION_FORMAT.CDTP)); assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1[1006184063V088473][haims-e15506fe-454f-4298-b328-93c34fd9c3e3][US]", bhieImageUrn.toString(SERIALIZATION_FORMAT.RAW)); assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1%5b1006184063V088473%5d%5bhaims%2de15506fe%2d454f%2d4298%2db328%2d93c34fd9c3e3%5d%5bUS%5d", bhieImageUrn.toString(SERIALIZATION_FORMAT.VFTP)); // --------------------------------------------------------------------------------------------------- // do it again using a DFN imageUrn = URNFactory.create("urn:vaimage:200-haims%2df7890fb1%2dc1ad%2d4606%2d99e5%2dcfe31e31ae65%3a1-[12345][urn%3abhiestudy%3ahaims%2de15506fe%2d454f%2d4298%2db328%2d93c34fd9c3e3][CR][dfn]", SERIALIZATION_FORMAT.CDTP, ImageURN.class); assertTrue(imageUrn instanceof BhieImageURN); bhieImageUrn = (BhieImageURN)imageUrn; assertNotNull(bhieImageUrn.getPatientIdentifierType()); assertEquals(PatientIdentifierType.dfn, bhieImageUrn.getPatientIdentifierType()); // test another incoming format to be sure its good anotherImageUrn = URNFactory.create("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1[12345][urn:bhiestudy:haims-e15506fe-454f-4298-b328-93c34fd9c3e3][CR][dfn]", SERIALIZATION_FORMAT.RAW, ImageURN.class); compareImageURNs(bhieImageUrn, anotherImageUrn); /* System.out.println("toString(): " + bhieImageUrn.toString()); System.out.println("toString(NATIVE): " + bhieImageUrn.toString(SERIALIZATION_FORMAT.NATIVE)); System.out.println("toStringCDTP(): " + bhieImageUrn.toString(SERIALIZATION_FORMAT.CDTP)); System.out.println("toString(RAW): " + bhieImageUrn.toString(SERIALIZATION_FORMAT.RAW)); System.out.println("toString(VFTP): " + bhieImageUrn.toString(SERIALIZATION_FORMAT.VFTP)); */ assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1", bhieImageUrn.toString()); assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1", bhieImageUrn.toString(SERIALIZATION_FORMAT.NATIVE)); assertEquals("urn:vaimage:200-haims%2df7890fb1%2dc1ad%2d4606%2d99e5%2dcfe31e31ae65%3a1-[12345][urn%3abhiestudy%3ahaims%2de15506fe%2d454f%2d4298%2db328%2d93c34fd9c3e3][CR][dfn]", bhieImageUrn.toString(SERIALIZATION_FORMAT.CDTP)); assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1[12345][urn:bhiestudy:haims-e15506fe-454f-4298-b328-93c34fd9c3e3][CR][dfn]", bhieImageUrn.toString(SERIALIZATION_FORMAT.RAW)); assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1%5b12345%5d%5burn%3abhiestudy%3ahaims%2de15506fe%2d454f%2d4298%2db328%2d93c34fd9c3e3%5d%5bCR%5d%5bdfn%5d", bhieImageUrn.toString(SERIALIZATION_FORMAT.VFTP)); } catch(Exception ex) { fail(ex.getMessage()); } } private void compareImageURNs(ImageURN imageUrn1, ImageURN imageUrn2) { assertEquals(imageUrn1, imageUrn2); assertEquals(imageUrn1.getImageId(), imageUrn2.getImageId()); assertEquals(imageUrn1.getStudyId(), imageUrn2.getStudyId()); assertEquals(imageUrn1.getPatientId(), imageUrn2.getPatientId()); assertEquals(imageUrn1.getPatientIdentifierTypeOrDefault(), imageUrn2.getPatientIdentifierTypeOrDefault()); assertEquals(imageUrn1.getImageModality(), imageUrn2.getImageModality()); assertEquals(imageUrn1.toString(), imageUrn2.toString()); assertEquals(imageUrn1.toString(SERIALIZATION_FORMAT.CDTP), imageUrn2.toString(SERIALIZATION_FORMAT.CDTP)); // vftp is weird... //assertEquals(imageUrn1.toString(SERIALIZATION_FORMAT.VFTP), imageUrn2.toString(SERIALIZATION_FORMAT.VFTP)); assertEquals(imageUrn1.toString(SERIALIZATION_FORMAT.RAW), imageUrn2.toString(SERIALIZATION_FORMAT.RAW)); assertEquals(imageUrn1.toString(SERIALIZATION_FORMAT.NATIVE), imageUrn2.toString(SERIALIZATION_FORMAT.NATIVE)); } @Test public void testCreatingBhiePatientIdentifier() { try { ImageURN imageUrn = URNFactory.create("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1", ImageURN.class); assertTrue(imageUrn instanceof BhieImageURN); BhieImageURN bhieImageUrn = (BhieImageURN)imageUrn; assertNull(bhieImageUrn.getPatientIdentifierType()); assertNull(bhieImageUrn.getPatientId()); assertNull(bhieImageUrn.getPatientIdentifier()); assertNull(bhieImageUrn.getStudyId()); assertNull(bhieImageUrn.getImageModality()); bhieImageUrn.setImageModality("CR"); bhieImageUrn.setStudyId("urn:bhiestudy:haims-e15506fe-454f-4298-b328-93c34fd9c3e3"); bhieImageUrn.setPatientId("12345"); assertNotNull(bhieImageUrn.getPatientId()); assertNotNull(bhieImageUrn.getPatientIdentifier()); assertEquals("12345", bhieImageUrn.getPatientId()); assertEquals("CR", bhieImageUrn.getImageModality()); assertEquals("urn:bhiestudy:haims-e15506fe-454f-4298-b328-93c34fd9c3e3", bhieImageUrn.getStudyId()); // still null assertNull(bhieImageUrn.getPatientIdentifierType()); // assume ICN by default assertEquals(PatientIdentifierType.icn, bhieImageUrn.getPatientIdentifierTypeOrDefault()); assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1", bhieImageUrn.toString()); assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1", bhieImageUrn.toString(SERIALIZATION_FORMAT.NATIVE)); assertEquals("urn:vaimage:200-haims%2df7890fb1%2dc1ad%2d4606%2d99e5%2dcfe31e31ae65%3a1-[12345][urn%3abhiestudy%3ahaims%2de15506fe%2d454f%2d4298%2db328%2d93c34fd9c3e3][CR]", bhieImageUrn.toString(SERIALIZATION_FORMAT.CDTP)); assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1[12345][urn:bhiestudy:haims-e15506fe-454f-4298-b328-93c34fd9c3e3][CR]", bhieImageUrn.toString(SERIALIZATION_FORMAT.RAW)); assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1", bhieImageUrn.toString(SERIALIZATION_FORMAT.VFTP)); bhieImageUrn.setPatientIdentifierType(PatientIdentifierType.dfn); assertNotNull(bhieImageUrn.getPatientIdentifierType()); assertEquals(PatientIdentifierType.dfn, bhieImageUrn.getPatientIdentifierType()); assertEquals("CR", bhieImageUrn.getImageModality()); /* System.out.println("toString(): " + bhieImageUrn.toString()); System.out.println("toString(NATIVE): " + bhieImageUrn.toString(SERIALIZATION_FORMAT.NATIVE)); System.out.println("toStringCDTP(): " + bhieImageUrn.toString(SERIALIZATION_FORMAT.CDTP)); System.out.println("toString(RAW): " + bhieImageUrn.toString(SERIALIZATION_FORMAT.RAW)); System.out.println("toString(VFTP): " + bhieImageUrn.toString(SERIALIZATION_FORMAT.VFTP)); */ assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1", bhieImageUrn.toString()); assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1", bhieImageUrn.toString(SERIALIZATION_FORMAT.NATIVE)); assertEquals("urn:vaimage:200-haims%2df7890fb1%2dc1ad%2d4606%2d99e5%2dcfe31e31ae65%3a1-[12345][urn%3abhiestudy%3ahaims%2de15506fe%2d454f%2d4298%2db328%2d93c34fd9c3e3][CR][dfn]", bhieImageUrn.toString(SERIALIZATION_FORMAT.CDTP)); assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1[12345][urn:bhiestudy:haims-e15506fe-454f-4298-b328-93c34fd9c3e3][CR][dfn]", bhieImageUrn.toString(SERIALIZATION_FORMAT.RAW)); // i don't really think this one is right but i don't think we are really using it either assertEquals("urn:bhieimage:haims-f7890fb1-c1ad-4606-99e5-cfe31e31ae65:1%5b%5d%5b%5d%5b%5d%5bdfn%5d", bhieImageUrn.toString(SERIALIZATION_FORMAT.VFTP)); } catch(Exception ex) { fail(ex.getMessage()); } } @Test public void testCreatingImageURNPatientIdentifier() { try { ImageURN imageUrn = ImageURN.create("660", "123", "456", "789V432", "CR"); assertEquals("660", imageUrn.getOriginatingSiteId()); assertEquals("123", imageUrn.getImageId()); assertEquals("456", imageUrn.getStudyId()); assertEquals("789V432", imageUrn.getPatientId()); assertEquals("CR", imageUrn.getImageModality()); assertNull(imageUrn.getPatientIdentifierType()); assertEquals(PatientIdentifierType.icn, imageUrn.getPatientIdentifierTypeOrDefault()); assertEquals("urn:vaimage:660-123-456-789V432-CR", imageUrn.toString()); assertEquals("urn:vaimage:660-123-456-789V432-CR", imageUrn.toString(SERIALIZATION_FORMAT.NATIVE)); assertEquals("urn:vaimage:660-123-456-789V432-CR", imageUrn.toString(SERIALIZATION_FORMAT.CDTP)); assertEquals("urn:vaimage:660-123-456-789V432-CR", imageUrn.toString(SERIALIZATION_FORMAT.RAW)); assertEquals("urn:vaimage:660-123-456-789V432-CR", imageUrn.toString(SERIALIZATION_FORMAT.VFTP)); imageUrn.setPatientIdentifierType(PatientIdentifierType.dfn); assertNotNull(imageUrn.getPatientIdentifierType()); assertEquals(PatientIdentifierType.dfn, imageUrn.getPatientIdentifierTypeOrDefault()); assertEquals(PatientIdentifierType.dfn, imageUrn.getPatientIdentifierType()); /* System.out.println("toString(): " + imageUrn.toString()); System.out.println("toString(NATIVE): " + imageUrn.toString(SERIALIZATION_FORMAT.NATIVE)); System.out.println("toStringCDTP(): " + imageUrn.toString(SERIALIZATION_FORMAT.CDTP)); System.out.println("toString(RAW): " + imageUrn.toString(SERIALIZATION_FORMAT.RAW)); System.out.println("toString(VFTP): " + imageUrn.toString(SERIALIZATION_FORMAT.VFTP)); */ assertEquals("urn:vaimage:660-123-456-789V432-CR", imageUrn.toString()); assertEquals("urn:vaimage:660-123-456-789V432-CR", imageUrn.toString(SERIALIZATION_FORMAT.NATIVE)); assertEquals("urn:vaimage:660-123-456-789V432-CR[dfn]", imageUrn.toString(SERIALIZATION_FORMAT.CDTP)); assertEquals("urn:vaimage:660-123-456-789V432-CR[dfn]", imageUrn.toString(SERIALIZATION_FORMAT.RAW)); assertEquals("urn:vaimage:660-123-456-789V432-CR%5bdfn%5d", imageUrn.toString(SERIALIZATION_FORMAT.VFTP)); } catch(Exception ex) { ex.printStackTrace(); } } @Test public void testParsingImageURNPatientIdentifier() { try { ImageURN imageUrn = URNFactory.create("urn:vaimage:660-123-456-789V432-CR"); assertEquals("660", imageUrn.getOriginatingSiteId()); assertEquals("123", imageUrn.getImageId()); assertEquals("456", imageUrn.getStudyId()); assertEquals("789V432", imageUrn.getPatientId()); assertEquals("CR", imageUrn.getImageModality()); assertNull(imageUrn.getPatientIdentifierType()); assertEquals(PatientIdentifierType.icn, imageUrn.getPatientIdentifierTypeOrDefault()); assertEquals("urn:vaimage:660-123-456-789V432-CR", imageUrn.toString()); assertEquals("urn:vaimage:660-123-456-789V432-CR", imageUrn.toString(SERIALIZATION_FORMAT.NATIVE)); assertEquals("urn:vaimage:660-123-456-789V432-CR", imageUrn.toString(SERIALIZATION_FORMAT.CDTP)); assertEquals("urn:vaimage:660-123-456-789V432-CR", imageUrn.toString(SERIALIZATION_FORMAT.RAW)); assertEquals("urn:vaimage:660-123-456-789V432-CR", imageUrn.toString(SERIALIZATION_FORMAT.VFTP)); ImageURN anotherImageUrn = URNFactory.create("urn:vaimage:660-123-456-789V432-CR[icn]"); assertEquals("660", anotherImageUrn.getOriginatingSiteId()); assertEquals("123", anotherImageUrn.getImageId()); assertEquals("456", anotherImageUrn.getStudyId()); assertEquals("789V432", anotherImageUrn.getPatientId()); assertEquals("CR", anotherImageUrn.getImageModality()); assertEquals(PatientIdentifierType.icn, anotherImageUrn.getPatientIdentifierType()); assertEquals("urn:vaimage:660-123-456-789V432-CR", anotherImageUrn.toString()); assertEquals("urn:vaimage:660-123-456-789V432-CR", anotherImageUrn.toString(SERIALIZATION_FORMAT.NATIVE)); assertEquals("urn:vaimage:660-123-456-789V432-CR[icn]", anotherImageUrn.toString(SERIALIZATION_FORMAT.CDTP)); assertEquals("urn:vaimage:660-123-456-789V432-CR[icn]", anotherImageUrn.toString(SERIALIZATION_FORMAT.RAW)); assertEquals("urn:vaimage:660-123-456-789V432-CR%5bicn%5d", anotherImageUrn.toString(SERIALIZATION_FORMAT.VFTP)); anotherImageUrn = URNFactory.create("urn:vaimage:660-123-456-789V432-CR[dfn]"); assertEquals("660", anotherImageUrn.getOriginatingSiteId()); assertEquals("123", anotherImageUrn.getImageId()); assertEquals("456", anotherImageUrn.getStudyId()); assertEquals("789V432", anotherImageUrn.getPatientId()); assertEquals("CR", anotherImageUrn.getImageModality()); assertEquals(PatientIdentifierType.dfn, anotherImageUrn.getPatientIdentifierType()); assertEquals("urn:vaimage:660-123-456-789V432-CR", anotherImageUrn.toString()); assertEquals("urn:vaimage:660-123-456-789V432-CR", anotherImageUrn.toString(SERIALIZATION_FORMAT.NATIVE)); assertEquals("urn:vaimage:660-123-456-789V432-CR[dfn]", anotherImageUrn.toString(SERIALIZATION_FORMAT.CDTP)); assertEquals("urn:vaimage:660-123-456-789V432-CR[dfn]", anotherImageUrn.toString(SERIALIZATION_FORMAT.RAW)); assertEquals("urn:vaimage:660-123-456-789V432-CR%5bdfn%5d", anotherImageUrn.toString(SERIALIZATION_FORMAT.VFTP)); } catch(Exception ex) { fail(ex.getMessage()); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred.lib; import java.io.IOException; import java.util.Iterator; import java.util.TreeMap; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.RecordWriter; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.Progressable; /** * This abstract class extends the FileOutputFormat, allowing to write the * output data to different output files. There are three basic use cases for * this class. * * Case one: This class is used for a map reduce job with at least one reducer. * The reducer wants to write data to different files depending on the actual * keys. It is assumed that a key (or value) encodes the actual key (value) * and the desired location for the actual key (value). * * Case two: This class is used for a map only job. The job wants to use an * output file name that is either a part of the input file name of the input * data, or some derivation of it. * * Case three: This class is used for a map only job. The job wants to use an * output file name that depends on both the keys and the input file name, */ public abstract class MultipleOutputFormat<K, V> extends FileOutputFormat<K, V> { /** * Create a composite record writer that can write key/value data to different * output files * * @param fs * the file system to use * @param job * the job conf for the job * @param name * the leaf file name for the output file (such as part-00000") * @param arg3 * a progressable for reporting progress. * @return a composite record writer * @throws IOException */ public RecordWriter<K, V> getRecordWriter(FileSystem fs, JobConf job, String name, Progressable arg3) throws IOException { final FileSystem myFS = fs; final String myName = generateLeafFileName(name); final JobConf myJob = job; final Progressable myProgressable = arg3; return new RecordWriter<K, V>() { // a cache storing the record writers for different output files. TreeMap<String, RecordWriter<K, V>> recordWriters = new TreeMap<String, RecordWriter<K, V>>(); public void write(K key, V value) throws IOException { // get the file name based on the key String keyBasedPath = generateFileNameForKeyValue(key, value, myName); // get the file name based on the input file name String finalPath = getInputFileBasedOutputFileName(myJob, keyBasedPath); // get the actual key K actualKey = generateActualKey(key, value); V actualValue = generateActualValue(key, value); RecordWriter<K, V> rw = this.recordWriters.get(finalPath); if (rw == null) { // if we don't have the record writer yet for the final path, create // one // and add it to the cache rw = getBaseRecordWriter(myFS, myJob, finalPath, myProgressable); this.recordWriters.put(finalPath, rw); } rw.write(actualKey, actualValue); }; public void close(Reporter reporter) throws IOException { Iterator<String> keys = this.recordWriters.keySet().iterator(); while (keys.hasNext()) { RecordWriter<K, V> rw = this.recordWriters.get(keys.next()); rw.close(reporter); } this.recordWriters.clear(); }; }; } /** * Generate the leaf name for the output file name. The default behavior does * not change the leaf file name (such as part-00000) * * @param name * the leaf file name for the output file * @return the given leaf file name */ protected String generateLeafFileName(String name) { return name; } /** * Generate the file output file name based on the given key and the leaf file * name. The default behavior is that the file name does not depend on the * key. * * @param key * the key of the output data * @param name * the leaf file name * @return generated file name */ protected String generateFileNameForKeyValue(K key, V value, String name) { return name; } /** * Generate the actual key from the given key/value. The default behavior is that * the actual key is equal to the given key * * @param key * the key of the output data * @param value * the value of the output data * @return the actual key derived from the given key/value */ protected K generateActualKey(K key, V value) { return key; } /** * Generate the actual value from the given key and value. The default behavior is that * the actual value is equal to the given value * * @param key * the key of the output data * @param value * the value of the output data * @return the actual value derived from the given key/value */ protected V generateActualValue(K key, V value) { return value; } /** * Generate the outfile name based on a given anme and the input file name. If * the map input file does not exists (i.e. this is not for a map only job), * the given name is returned unchanged. If the config value for * "num.of.trailing.legs.to.use" is not set, or set 0 or negative, the given * name is returned unchanged. Otherwise, return a file name consisting of the * N trailing legs of the input file name where N is the config value for * "num.of.trailing.legs.to.use". * * @param job * the job config * @param name * the output file name * @return the outfile name based on a given anme and the input file name. */ protected String getInputFileBasedOutputFileName(JobConf job, String name) { String infilepath = job.get("map.input.file"); if (infilepath == null) { // if the map input file does not exists, then return the given name return name; } int numOfTrailingLegsToUse = job.getInt("mapred.outputformat.numOfTrailingLegs", 0); if (numOfTrailingLegsToUse <= 0) { return name; } Path infile = new Path(infilepath); Path parent = infile.getParent(); String midName = infile.getName(); Path outPath = new Path(midName); for (int i = 1; i < numOfTrailingLegsToUse; i++) { if (parent == null) break; midName = parent.getName(); if (midName.length() == 0) break; parent = parent.getParent(); outPath = new Path(midName, outPath); } return outPath.toString(); } /** * * @param fs * the file system to use * @param job * a job conf object * @param name * the name of the file over which a record writer object will be * constructed * @param arg3 * a progressable object * @return A RecordWriter object over the given file * @throws IOException */ abstract protected RecordWriter<K, V> getBaseRecordWriter(FileSystem fs, JobConf job, String name, Progressable arg3) throws IOException; }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.mapreduce.examples; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.TextOutputFormat; import org.apache.hadoop.util.ToolRunner; import org.apache.tez.client.TezClient; import org.apache.tez.common.io.NonSyncByteArrayInputStream; import org.apache.tez.common.io.NonSyncByteArrayOutputStream; import org.apache.tez.common.io.NonSyncDataOutputStream; import org.apache.tez.dag.api.DAG; import org.apache.tez.dag.api.DataSinkDescriptor; import org.apache.tez.dag.api.GroupInputEdge; import org.apache.tez.dag.api.InputDescriptor; import org.apache.tez.dag.api.ProcessorDescriptor; import org.apache.tez.dag.api.TezConfiguration; import org.apache.tez.dag.api.UserPayload; import org.apache.tez.dag.api.Vertex; import org.apache.tez.dag.api.VertexGroup; import org.apache.tez.examples.TezExampleBase; import org.apache.tez.mapreduce.output.MROutput; import org.apache.tez.mapreduce.processor.SimpleMRProcessor; import org.apache.tez.runtime.api.ProcessorContext; import org.apache.tez.runtime.library.api.KeyValueWriter; import org.apache.tez.runtime.library.conf.OrderedPartitionedKVEdgeConfig; import org.apache.tez.runtime.library.input.ConcatenatedMergedKeyValuesInput; import org.apache.tez.runtime.library.partitioner.HashPartitioner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * v1 -> v3 <br/> * v2 -> v3 <br/> * (v1,v2) is connected to v3 as vertex group. <br/> * (v1,v2) have multiple shared outputs, each of them have its own multiple outputs. * And v3 also has multiple outputs. </br> */ public class MultipleCommitsExample extends TezExampleBase { private static final Logger LOG = LoggerFactory.getLogger(MultipleCommitsExample.class); private static final String UV12OutputNamePrefix = "uv12Output"; private static final String V1OutputNamePrefix = "v1Output"; private static final String V2OutputNamePrefix = "v2Output"; private static final String V3OutputNamePrefix = "v3Output"; public static final String CommitOnVertexSuccessOption = "commitOnVertexSuccess"; @Override protected void printUsage() { System.err.println("Usage: " + " multiplecommitsExample v1OutputPrefix v1OutputNum v2OutputPrefix v2OutputNum" + " uv12OutputPrefix uv12OutputNum v3OutputPrefix v3OutputNum" + " [" + CommitOnVertexSuccessOption + "]" + "(default false)"); } @Override protected int validateArgs(String[] otherArgs) { if (otherArgs.length != 8 && otherArgs.length != 9) { return 2; } if (otherArgs.length == 9 && !otherArgs[8].equals(CommitOnVertexSuccessOption)) { return 2; } return 0; } public static class MultipleOutputProcessor extends SimpleMRProcessor { MultipleOutputProcessorConfig config; public MultipleOutputProcessor(ProcessorContext context) { super(context); } @Override public void initialize() throws Exception { super.initialize(); config = MultipleOutputProcessorConfig.fromUserPayload(getContext().getUserPayload()); } @Override public void run() throws Exception { for (int i=0;i < config.outputNum;++i) { KeyValueWriter writer = (KeyValueWriter) getOutputs().get(config.outputNamePrefix+"_" + i).getWriter(); writer.write(NullWritable.get(), new Text("dummy")); } for (int i=0;i < config.sharedOutputNum; ++i) { KeyValueWriter writer = (KeyValueWriter) getOutputs().get(config.sharedOutputNamePrefix +"_" + i).getWriter(); writer.write(NullWritable.get(), new Text("dummy")); } } public static class MultipleOutputProcessorConfig implements Writable { String outputNamePrefix; int outputNum; String sharedOutputNamePrefix = null; int sharedOutputNum; public MultipleOutputProcessorConfig(){ } public MultipleOutputProcessorConfig(String outputNamePrefix, int outputNum) { this.outputNamePrefix = outputNamePrefix; this.outputNum = outputNum; } public MultipleOutputProcessorConfig(String outputNamePrefix, int outputNum, String sharedOutputNamePrefix, int sharedOutputNum) { this.outputNamePrefix = outputNamePrefix; this.outputNum = outputNum; this.sharedOutputNamePrefix = sharedOutputNamePrefix; this.sharedOutputNum = sharedOutputNum; } @Override public void write(DataOutput out) throws IOException { new Text(outputNamePrefix).write(out); out.writeInt(outputNum); if (sharedOutputNamePrefix != null) { new BooleanWritable(true).write(out); new Text(sharedOutputNamePrefix).write(out); out.writeInt(sharedOutputNum); } else { new BooleanWritable(false).write(out); } } @Override public void readFields(DataInput in) throws IOException { Text outputNameText = new Text(); outputNameText.readFields(in); outputNamePrefix = outputNameText.toString(); outputNum = in.readInt(); BooleanWritable hasSharedOutputs = new BooleanWritable(); hasSharedOutputs.readFields(in); if (hasSharedOutputs.get()) { Text sharedOutputNamePrefixText = new Text(); sharedOutputNamePrefixText.readFields(in); sharedOutputNamePrefix = sharedOutputNamePrefixText.toString(); sharedOutputNum = in.readInt(); } } public UserPayload toUserPayload() throws IOException { NonSyncByteArrayOutputStream out = new NonSyncByteArrayOutputStream(); this.write(new NonSyncDataOutputStream(out)); return UserPayload.create(ByteBuffer.wrap(out.toByteArray())); } public static MultipleOutputProcessorConfig fromUserPayload(UserPayload payload) throws IOException { MultipleOutputProcessorConfig config = new MultipleOutputProcessorConfig(); config.readFields(new DataInputStream( new NonSyncByteArrayInputStream(payload.deepCopyAsArray()))); return config; } } } @Override protected int runJob(String[] args, TezConfiguration tezConf, TezClient tezClient) throws Exception { boolean commitOnVertexSuccess = args.length == 5 && args[4].equals(CommitOnVertexSuccessOption) ? true : false; DAG dag = createDAG(tezConf, args[0], Integer.parseInt(args[1]), args[2], Integer.parseInt(args[3]), args[4], Integer.parseInt(args[5]), args[6], Integer.parseInt(args[7]), commitOnVertexSuccess); LOG.info("Running MultipleCommitsExample"); return runDag(dag, false, LOG); } private DAG createDAG(TezConfiguration tezConf, String v1OutputPathPrefix, int v1OutputNum, String v2OutputPathPrefix, int v2OutputNum, String uv12OutputPathPrefix, int uv12OutputNum, String v3OutputPathPrefix, int v3OutputNum, boolean commitOnVertexSuccess) throws IOException { DAG dag = DAG.create("multipleCommitsDAG"); dag.setConf(TezConfiguration.TEZ_AM_COMMIT_ALL_OUTPUTS_ON_DAG_SUCCESS, !commitOnVertexSuccess + ""); Vertex v1 = Vertex.create("v1", ProcessorDescriptor.create(MultipleOutputProcessor.class.getName()) .setUserPayload( new MultipleOutputProcessor.MultipleOutputProcessorConfig( V1OutputNamePrefix, v1OutputNum, UV12OutputNamePrefix, uv12OutputNum) .toUserPayload()), 2); Vertex v2 = Vertex.create("v2", ProcessorDescriptor.create(MultipleOutputProcessor.class.getName()) .setUserPayload( new MultipleOutputProcessor.MultipleOutputProcessorConfig( V2OutputNamePrefix, v2OutputNum, UV12OutputNamePrefix, uv12OutputNum) .toUserPayload()), 2); // add data sinks for v1 for (int i=0;i<v1OutputNum;++i) { DataSinkDescriptor sink = MROutput.createConfigBuilder( new Configuration(tezConf), TextOutputFormat.class, v1OutputPathPrefix + "_" + i).build(); v1.addDataSink(V1OutputNamePrefix + "_" + i, sink); } // add data sinks for v2 for (int i=0;i<v2OutputNum;++i) { DataSinkDescriptor sink = MROutput.createConfigBuilder( new Configuration(tezConf), TextOutputFormat.class, v2OutputPathPrefix + "_" + i).build(); v2.addDataSink(V2OutputNamePrefix + "_" + i, sink); } // add data sinks for (v1,v2) VertexGroup uv12 = dag.createVertexGroup("uv12", v1,v2); for (int i=0;i<uv12OutputNum;++i) { DataSinkDescriptor sink = MROutput.createConfigBuilder( new Configuration(tezConf), TextOutputFormat.class, uv12OutputPathPrefix + "_" + i).build(); uv12.addDataSink(UV12OutputNamePrefix + "_" + i, sink); } Vertex v3 = Vertex.create("v3", ProcessorDescriptor.create(MultipleOutputProcessor.class.getName()) .setUserPayload( new MultipleOutputProcessor.MultipleOutputProcessorConfig(V3OutputNamePrefix, v3OutputNum) .toUserPayload()), 2); // add data sinks for v3 for (int i=0;i<v3OutputNum;++i) { DataSinkDescriptor sink = MROutput.createConfigBuilder( new Configuration(tezConf), TextOutputFormat.class, v3OutputPathPrefix + "_" + i).build(); v3.addDataSink(V3OutputNamePrefix + "_" + i, sink); } OrderedPartitionedKVEdgeConfig edgeConfig = OrderedPartitionedKVEdgeConfig.newBuilder( NullWritable.class.getName(), Text.class.getName(), HashPartitioner.class.getName()) .setFromConfiguration(tezConf) .build(); GroupInputEdge edge = GroupInputEdge.create(uv12, v3, edgeConfig.createDefaultEdgeProperty(), InputDescriptor.create( ConcatenatedMergedKeyValuesInput.class.getName())); dag.addVertex(v1) .addVertex(v2) .addVertex(v3) .addEdge(edge); return dag; } public static void main(String[] args) throws Exception { int res = ToolRunner.run(new MultipleCommitsExample(), args); System.exit(res); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.web; import javax.servlet.ServletContext; import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.impl.ContextAnchor; import org.easymock.Capture; import org.easymock.IAnswer; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.easymock.EasyMock.capture; import static org.easymock.EasyMock.createStrictMock; import static org.easymock.EasyMock.eq; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.expectLastCall; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.reset; import static org.easymock.EasyMock.verify; import static org.junit.Assert.*; public class Log4jWebInitializerImplTest { private ServletContext servletContext; private Log4jWebInitializerImpl initializer; @Before public void setUp() { final Capture<Log4jWebLifeCycle> initializerCapture = new Capture<>(); this.servletContext = createStrictMock(ServletContext.class); expect(this.servletContext.getAttribute(Log4jWebSupport.SUPPORT_ATTRIBUTE)).andReturn(null); this.servletContext.setAttribute(eq(Log4jWebSupport.SUPPORT_ATTRIBUTE), capture(initializerCapture)); expectLastCall(); replay(this.servletContext); final Log4jWebLifeCycle initializer = WebLoggerContextUtils.getWebLifeCycle(this.servletContext); assertNotNull("The initializer should not be null.", initializer); assertSame("The capture is not correct.", initializer, initializerCapture.getValue()); assertTrue("The initializer is not correct.", initializer instanceof Log4jWebInitializerImpl); verify(this.servletContext); reset(this.servletContext); this.initializer = (Log4jWebInitializerImpl)initializer; } @After public void tearDown() { verify(this.servletContext); } @Test public void testDeinitializeBeforeInitialize() { replay(this.servletContext); try { this.initializer.stop(); fail("Expected an IllegalStateException."); } catch (final IllegalStateException ignore) { } } @Test public void testSetLoggerContextBeforeInitialize() { replay(this.servletContext); assertNull("The context should be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.setLoggerContext(); assertNull("The context should still be null.", ContextAnchor.THREAD_CONTEXT.get()); } @Test public void testClearLoggerContextBeforeInitialize() { replay(this.servletContext); assertNull("The context should be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.clearLoggerContext(); assertNull("The context should still be null.", ContextAnchor.THREAD_CONTEXT.get()); } @Test public void testInitializeWithNoParametersThenSetLoggerContextThenDeinitialize() throws Exception { final Capture<Object> loggerContextCapture = new Capture<>(); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONTEXT_NAME)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONFIG_LOCATION)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.IS_LOG4J_CONTEXT_SELECTOR_NAMED)) .andReturn(null); expect(this.servletContext.getServletContextName()).andReturn("helloWorld01"); expect(this.servletContext.getResourcePaths("/WEB-INF/")).andReturn(null); this.servletContext.setAttribute(eq(Log4jWebSupport.CONTEXT_ATTRIBUTE), capture(loggerContextCapture)); expectLastCall(); replay(this.servletContext); assertNull("The context should be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.start(); assertNotNull("The context attribute should not be null.", loggerContextCapture.getValue()); assertTrue("The context attribute is not correct.", loggerContextCapture.getValue() instanceof org.apache.logging.log4j.spi.LoggerContext); final org.apache.logging.log4j.spi.LoggerContext loggerContext = (org.apache.logging.log4j.spi.LoggerContext)loggerContextCapture.getValue(); verify(this.servletContext); reset(this.servletContext); replay(this.servletContext); assertNull("The context should still be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.setLoggerContext(); final LoggerContext context = ContextAnchor.THREAD_CONTEXT.get(); assertNotNull("The context should not be null.", context); assertSame("The context is not correct.", loggerContext, context); this.initializer.clearLoggerContext(); assertNull("The context should be null again.", ContextAnchor.THREAD_CONTEXT.get()); verify(this.servletContext); reset(this.servletContext); this.servletContext.removeAttribute(Log4jWebSupport.CONTEXT_ATTRIBUTE); expectLastCall(); replay(this.servletContext); this.initializer.stop(); verify(this.servletContext); reset(this.servletContext); replay(this.servletContext); assertNull("The context should again still be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.setLoggerContext(); assertNull("The context should finally still be null.", ContextAnchor.THREAD_CONTEXT.get()); } @Test public void testInitializeWithClassLoaderNoParametersThenSetLoggerContextThenDeinitialize() throws Exception { final Capture<Object> loggerContextCapture = new Capture<>(); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONTEXT_NAME)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONFIG_LOCATION)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.IS_LOG4J_CONTEXT_SELECTOR_NAMED)) .andReturn("false"); expect(this.servletContext.getServletContextName()).andReturn("helloWorld02"); expect(this.servletContext.getResourcePaths("/WEB-INF/")).andReturn(null); expect(this.servletContext.getClassLoader()).andReturn(this.getClass().getClassLoader()); this.servletContext.setAttribute(eq(Log4jWebSupport.CONTEXT_ATTRIBUTE), capture(loggerContextCapture)); expectLastCall(); replay(this.servletContext); assertNull("The context should be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.start(); assertNotNull("The context attribute should not be null.", loggerContextCapture.getValue()); assertTrue("The context attribute is not correct.", loggerContextCapture.getValue() instanceof org.apache.logging.log4j.spi.LoggerContext); final org.apache.logging.log4j.spi.LoggerContext loggerContext = (org.apache.logging.log4j.spi.LoggerContext)loggerContextCapture.getValue(); verify(this.servletContext); reset(this.servletContext); replay(this.servletContext); assertNull("The context should still be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.setLoggerContext(); final LoggerContext context = ContextAnchor.THREAD_CONTEXT.get(); assertNotNull("The context should not be null.", context); assertSame("The context is not correct.", loggerContext, context); this.initializer.clearLoggerContext(); assertNull("The context should be null again.", ContextAnchor.THREAD_CONTEXT.get()); verify(this.servletContext); reset(this.servletContext); this.servletContext.removeAttribute(Log4jWebSupport.CONTEXT_ATTRIBUTE); expectLastCall(); replay(this.servletContext); this.initializer.stop(); verify(this.servletContext); reset(this.servletContext); replay(this.servletContext); assertNull("The context should again still be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.setLoggerContext(); assertNull("The context should finally still be null.", ContextAnchor.THREAD_CONTEXT.get()); } @Test public void testInitializeIsIdempotent() throws Exception { final Capture<Object> loggerContextCapture = new Capture<>(); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONTEXT_NAME)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONFIG_LOCATION)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.IS_LOG4J_CONTEXT_SELECTOR_NAMED)) .andReturn("nothing"); expect(this.servletContext.getServletContextName()).andReturn("helloWorld03"); expect(this.servletContext.getResourcePaths("/WEB-INF/")).andReturn(null); expect(this.servletContext.getClassLoader()).andReturn(this.getClass().getClassLoader()); this.servletContext.setAttribute(eq(Log4jWebSupport.CONTEXT_ATTRIBUTE), capture(loggerContextCapture)); expectLastCall(); replay(this.servletContext); assertNull("The context should be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.start(); assertNotNull("The context attribute should not be null.", loggerContextCapture.getValue()); assertTrue("The context attribute is not correct.", loggerContextCapture.getValue() instanceof org.apache.logging.log4j.spi.LoggerContext); verify(this.servletContext); reset(this.servletContext); replay(this.servletContext); this.initializer.start(); this.initializer.start(); this.initializer.start(); verify(this.servletContext); reset(this.servletContext); this.servletContext.removeAttribute(Log4jWebSupport.CONTEXT_ATTRIBUTE); expectLastCall(); replay(this.servletContext); this.initializer.stop(); } @Test public void testInitializeFailsAfterDeinitialize() throws Exception { final Capture<Object> loggerContextCapture = new Capture<>(); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONTEXT_NAME)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONFIG_LOCATION)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.IS_LOG4J_CONTEXT_SELECTOR_NAMED)) .andReturn(null); expect(this.servletContext.getServletContextName()).andReturn("helloWorld04"); expect(this.servletContext.getResourcePaths("/WEB-INF/")).andReturn(null); expect(this.servletContext.getClassLoader()).andReturn(this.getClass().getClassLoader()); this.servletContext.setAttribute(eq(Log4jWebSupport.CONTEXT_ATTRIBUTE), capture(loggerContextCapture)); expectLastCall(); replay(this.servletContext); assertNull("The context should be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.start(); assertNotNull("The context attribute should not be null.", loggerContextCapture.getValue()); assertTrue("The context attribute is not correct.", loggerContextCapture.getValue() instanceof org.apache.logging.log4j.spi.LoggerContext); verify(this.servletContext); reset(this.servletContext); this.servletContext.removeAttribute(Log4jWebSupport.CONTEXT_ATTRIBUTE); expectLastCall(); replay(this.servletContext); this.initializer.stop(); try { this.initializer.start(); fail("Expected an IllegalStateException."); } catch (final IllegalStateException ignore) { } } @Test public void testDeinitializeIsIdempotent() throws Exception { final Capture<Object> loggerContextCapture = new Capture<>(); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONTEXT_NAME)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONFIG_LOCATION)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.IS_LOG4J_CONTEXT_SELECTOR_NAMED)) .andReturn(null); expect(this.servletContext.getServletContextName()).andReturn("helloWorld05"); expect(this.servletContext.getResourcePaths("/WEB-INF/")).andReturn(null); expect(this.servletContext.getClassLoader()).andReturn(this.getClass().getClassLoader()); this.servletContext.setAttribute(eq(Log4jWebSupport.CONTEXT_ATTRIBUTE), capture(loggerContextCapture)); expectLastCall(); replay(this.servletContext); assertNull("The context should be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.start(); assertNotNull("The context attribute should not be null.", loggerContextCapture.getValue()); assertTrue("The context attribute is not correct.", loggerContextCapture.getValue() instanceof org.apache.logging.log4j.spi.LoggerContext); verify(this.servletContext); reset(this.servletContext); this.servletContext.removeAttribute(Log4jWebSupport.CONTEXT_ATTRIBUTE); expectLastCall(); replay(this.servletContext); this.initializer.stop(); this.initializer.stop(); this.initializer.stop(); } @Test public void testInitializeUsingJndiSelectorFails() throws Exception { expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONTEXT_NAME)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONFIG_LOCATION)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.IS_LOG4J_CONTEXT_SELECTOR_NAMED)) .andReturn("true"); expect(this.servletContext.getResourcePaths("/WEB-INF/")).andReturn(null); replay(this.servletContext); assertNull("The context should be null.", ContextAnchor.THREAD_CONTEXT.get()); try { this.initializer.start(); fail("Expected an IllegalStateException."); } catch (final IllegalStateException ignore) { // ignore } } @Test public void testInitializeUsingJndiSelector() throws Exception { final Capture<Object> loggerContextCapture = new Capture<>(); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONTEXT_NAME)).andReturn("helloWorld6"); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONFIG_LOCATION)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.IS_LOG4J_CONTEXT_SELECTOR_NAMED)) .andReturn("true"); expect(this.servletContext.getResourcePaths("/WEB-INF/")).andReturn(null); this.servletContext.setAttribute(eq(Log4jWebSupport.CONTEXT_ATTRIBUTE), capture(loggerContextCapture)); expectLastCall(); replay(this.servletContext); assertNull("The context should be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.start(); assertNull("The context attribute should be null.", loggerContextCapture.getValue()); verify(this.servletContext); reset(this.servletContext); replay(this.servletContext); assertNull("The context should still be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.setLoggerContext(); assertNull("The context should still be null because no named selector.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.clearLoggerContext(); assertNull("The context should be null again.", ContextAnchor.THREAD_CONTEXT.get()); verify(this.servletContext); reset(this.servletContext); replay(this.servletContext); this.initializer.stop(); verify(this.servletContext); reset(this.servletContext); replay(this.servletContext); assertNull("The context should again still be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.setLoggerContext(); assertNull("The context should finally still be null.", ContextAnchor.THREAD_CONTEXT.get()); } @Test public void testWrapExecutionWithNoParameters() throws Exception { final Capture<Object> loggerContextCapture = new Capture<>(); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONTEXT_NAME)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.LOG4J_CONFIG_LOCATION)).andReturn(null); expect(this.servletContext.getInitParameter(Log4jWebSupport.IS_LOG4J_CONTEXT_SELECTOR_NAMED)) .andReturn(null); expect(this.servletContext.getServletContextName()).andReturn("helloWorld01"); expect(this.servletContext.getResourcePaths("/WEB-INF/")).andReturn(null); this.servletContext.setAttribute(eq(Log4jWebSupport.CONTEXT_ATTRIBUTE), capture(loggerContextCapture)); expectLastCall(); replay(this.servletContext); assertNull("The context should be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.start(); assertNotNull("The context attribute should not be null.", loggerContextCapture.getValue()); assertTrue("The context attribute is not correct.", loggerContextCapture.getValue() instanceof org.apache.logging.log4j.spi.LoggerContext); final org.apache.logging.log4j.spi.LoggerContext loggerContext = (org.apache.logging.log4j.spi.LoggerContext)loggerContextCapture.getValue(); verify(this.servletContext); reset(this.servletContext); assertNull("The context should still be null.", ContextAnchor.THREAD_CONTEXT.get()); final Runnable runnable = createStrictMock(Runnable.class); runnable.run(); expectLastCall().andAnswer(new IAnswer<Void>() { @Override public Void answer() { final LoggerContext context = ContextAnchor.THREAD_CONTEXT.get(); assertNotNull("The context should not be null.", context); assertSame("The context is not correct.", loggerContext, context); return null; } }); replay(this.servletContext, runnable); this.initializer.wrapExecution(runnable); assertNull("The context should be null again.", ContextAnchor.THREAD_CONTEXT.get()); verify(this.servletContext, runnable); reset(this.servletContext); this.servletContext.removeAttribute(Log4jWebSupport.CONTEXT_ATTRIBUTE); expectLastCall(); replay(this.servletContext); this.initializer.stop(); verify(this.servletContext); reset(this.servletContext); replay(this.servletContext); assertNull("The context should again still be null.", ContextAnchor.THREAD_CONTEXT.get()); this.initializer.setLoggerContext(); assertNull("The context should finally still be null.", ContextAnchor.THREAD_CONTEXT.get()); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client; import org.apache.http.util.EntityUtils; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.ccr.CcrStatsRequest; import org.elasticsearch.client.ccr.CcrStatsResponse; import org.elasticsearch.client.ccr.DeleteAutoFollowPatternRequest; import org.elasticsearch.client.ccr.FollowInfoRequest; import org.elasticsearch.client.ccr.FollowInfoResponse; import org.elasticsearch.client.ccr.FollowStatsRequest; import org.elasticsearch.client.ccr.FollowStatsResponse; import org.elasticsearch.client.ccr.ForgetFollowerRequest; import org.elasticsearch.client.ccr.GetAutoFollowPatternRequest; import org.elasticsearch.client.ccr.GetAutoFollowPatternResponse; import org.elasticsearch.client.ccr.IndicesFollowStats; import org.elasticsearch.client.ccr.IndicesFollowStats.ShardFollowStats; import org.elasticsearch.client.ccr.PauseFollowRequest; import org.elasticsearch.client.ccr.PutAutoFollowPatternRequest; import org.elasticsearch.client.ccr.PutFollowRequest; import org.elasticsearch.client.ccr.PutFollowResponse; import org.elasticsearch.client.ccr.ResumeFollowRequest; import org.elasticsearch.client.ccr.UnfollowRequest; import org.elasticsearch.client.core.AcknowledgedResponse; import org.elasticsearch.client.core.BroadcastResponse; import org.elasticsearch.client.indices.CloseIndexRequest; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.test.rest.yaml.ObjectPath; import org.junit.Before; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; public class CCRIT extends ESRestHighLevelClientTestCase { @Before public void setupRemoteClusterConfig() throws Exception { // Configure local cluster as remote cluster: // TODO: replace with nodes info highlevel rest client code when it is available: final Request request = new Request("GET", "/_nodes"); Map<?, ?> nodesResponse = (Map<?, ?>) toMap(client().performRequest(request)).get("nodes"); // Select node info of first node (we don't know the node id): nodesResponse = (Map<?, ?>) nodesResponse.get(nodesResponse.keySet().iterator().next()); String transportAddress = (String) nodesResponse.get("transport_address"); ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); updateSettingsRequest.transientSettings(Collections.singletonMap("cluster.remote.local_cluster.seeds", transportAddress)); ClusterUpdateSettingsResponse updateSettingsResponse = highLevelClient().cluster().putSettings(updateSettingsRequest, RequestOptions.DEFAULT); assertThat(updateSettingsResponse.isAcknowledged(), is(true)); assertBusy(() -> { Map<?, ?> localConnection = (Map<?, ?>) toMap(client() .performRequest(new Request("GET", "/_remote/info"))) .get("local_cluster"); assertThat(localConnection, notNullValue()); assertThat(localConnection.get("connected"), is(true)); }); } public void testIndexFollowing() throws Exception { CcrClient ccrClient = highLevelClient().ccr(); CreateIndexRequest createIndexRequest = new CreateIndexRequest("leader"); createIndexRequest.settings(Collections.singletonMap("index.soft_deletes.enabled", true)); CreateIndexResponse response = highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); assertThat(response.isAcknowledged(), is(true)); PutFollowRequest putFollowRequest = new PutFollowRequest("local_cluster", "leader", "follower", ActiveShardCount.ONE); PutFollowResponse putFollowResponse = execute(putFollowRequest, ccrClient::putFollow, ccrClient::putFollowAsync); assertThat(putFollowResponse.isFollowIndexCreated(), is(true)); assertThat(putFollowResponse.isFollowIndexShardsAcked(), is(true)); assertThat(putFollowResponse.isIndexFollowingStarted(), is(true)); IndexRequest indexRequest = new IndexRequest("leader") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .source("{}", XContentType.JSON); highLevelClient().index(indexRequest, RequestOptions.DEFAULT); SearchRequest leaderSearchRequest = new SearchRequest("leader"); SearchResponse leaderSearchResponse = highLevelClient().search(leaderSearchRequest, RequestOptions.DEFAULT); assertThat(leaderSearchResponse.getHits().getTotalHits().value, equalTo(1L)); try { assertBusy(() -> { FollowInfoRequest followInfoRequest = new FollowInfoRequest("follower"); FollowInfoResponse followInfoResponse = execute(followInfoRequest, ccrClient::getFollowInfo, ccrClient::getFollowInfoAsync); assertThat(followInfoResponse.getInfos().size(), equalTo(1)); assertThat(followInfoResponse.getInfos().get(0).getFollowerIndex(), equalTo("follower")); assertThat(followInfoResponse.getInfos().get(0).getLeaderIndex(), equalTo("leader")); assertThat(followInfoResponse.getInfos().get(0).getRemoteCluster(), equalTo("local_cluster")); assertThat(followInfoResponse.getInfos().get(0).getStatus(), equalTo(FollowInfoResponse.Status.ACTIVE)); FollowStatsRequest followStatsRequest = new FollowStatsRequest("follower"); FollowStatsResponse followStatsResponse = execute(followStatsRequest, ccrClient::getFollowStats, ccrClient::getFollowStatsAsync); List<ShardFollowStats> shardFollowStats = followStatsResponse.getIndicesFollowStats().getShardFollowStats("follower"); long followerGlobalCheckpoint = shardFollowStats.stream() .mapToLong(ShardFollowStats::getFollowerGlobalCheckpoint) .max() .getAsLong(); assertThat(followerGlobalCheckpoint, equalTo(0L)); SearchRequest followerSearchRequest = new SearchRequest("follower"); SearchResponse followerSearchResponse = highLevelClient().search(followerSearchRequest, RequestOptions.DEFAULT); assertThat(followerSearchResponse.getHits().getTotalHits().value, equalTo(1L)); }); } catch (Exception e) { IndicesFollowStats followStats = ccrClient.getCcrStats(new CcrStatsRequest(), RequestOptions.DEFAULT).getIndicesFollowStats(); for (Map.Entry<String, List<ShardFollowStats>> entry : followStats.getShardFollowStats().entrySet()) { for (ShardFollowStats shardFollowStats : entry.getValue()) { if (shardFollowStats.getFatalException() != null) { logger.warn(new ParameterizedMessage("fatal shard follow exception {}", shardFollowStats.getShardId()), shardFollowStats.getFatalException()); } } } } PauseFollowRequest pauseFollowRequest = new PauseFollowRequest("follower"); AcknowledgedResponse pauseFollowResponse = execute(pauseFollowRequest, ccrClient::pauseFollow, ccrClient::pauseFollowAsync); assertThat(pauseFollowResponse.isAcknowledged(), is(true)); highLevelClient().index(indexRequest, RequestOptions.DEFAULT); ResumeFollowRequest resumeFollowRequest = new ResumeFollowRequest("follower"); AcknowledgedResponse resumeFollowResponse = execute(resumeFollowRequest, ccrClient::resumeFollow, ccrClient::resumeFollowAsync); assertThat(resumeFollowResponse.isAcknowledged(), is(true)); assertBusy(() -> { FollowStatsRequest followStatsRequest = new FollowStatsRequest("follower"); FollowStatsResponse followStatsResponse = execute(followStatsRequest, ccrClient::getFollowStats, ccrClient::getFollowStatsAsync); List<ShardFollowStats> shardFollowStats = followStatsResponse.getIndicesFollowStats().getShardFollowStats("follower"); long followerGlobalCheckpoint = shardFollowStats.stream() .mapToLong(ShardFollowStats::getFollowerGlobalCheckpoint) .max() .getAsLong(); assertThat(followerGlobalCheckpoint, equalTo(1L)); SearchRequest followerSearchRequest = new SearchRequest("follower"); SearchResponse followerSearchResponse = highLevelClient().search(followerSearchRequest, RequestOptions.DEFAULT); assertThat(followerSearchResponse.getHits().getTotalHits().value, equalTo(2L)); }); // Need to pause prior to unfollowing it: pauseFollowRequest = new PauseFollowRequest("follower"); pauseFollowResponse = execute(pauseFollowRequest, ccrClient::pauseFollow, ccrClient::pauseFollowAsync); assertThat(pauseFollowResponse.isAcknowledged(), is(true)); assertBusy(() -> { FollowInfoRequest followInfoRequest = new FollowInfoRequest("follower"); FollowInfoResponse followInfoResponse = execute(followInfoRequest, ccrClient::getFollowInfo, ccrClient::getFollowInfoAsync); assertThat(followInfoResponse.getInfos().size(), equalTo(1)); assertThat(followInfoResponse.getInfos().get(0).getFollowerIndex(), equalTo("follower")); assertThat(followInfoResponse.getInfos().get(0).getLeaderIndex(), equalTo("leader")); assertThat(followInfoResponse.getInfos().get(0).getRemoteCluster(), equalTo("local_cluster")); assertThat(followInfoResponse.getInfos().get(0).getStatus(), equalTo(FollowInfoResponse.Status.PAUSED)); }); // Need to close index prior to unfollowing it: CloseIndexRequest closeIndexRequest = new CloseIndexRequest("follower"); org.elasticsearch.action.support.master.AcknowledgedResponse closeIndexReponse = highLevelClient().indices().close(closeIndexRequest, RequestOptions.DEFAULT); assertThat(closeIndexReponse.isAcknowledged(), is(true)); UnfollowRequest unfollowRequest = new UnfollowRequest("follower"); AcknowledgedResponse unfollowResponse = execute(unfollowRequest, ccrClient::unfollow, ccrClient::unfollowAsync); assertThat(unfollowResponse.isAcknowledged(), is(true)); } public void testForgetFollower() throws IOException { final CcrClient ccrClient = highLevelClient().ccr(); final CreateIndexRequest createIndexRequest = new CreateIndexRequest("leader"); final Map<String, String> settings = new HashMap<>(3); final int numberOfShards = randomIntBetween(1, 2); settings.put("index.number_of_replicas", "0"); settings.put("index.number_of_shards", Integer.toString(numberOfShards)); settings.put("index.soft_deletes.enabled", Boolean.TRUE.toString()); createIndexRequest.settings(settings); final CreateIndexResponse response = highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); assertThat(response.isAcknowledged(), is(true)); final PutFollowRequest putFollowRequest = new PutFollowRequest("local_cluster", "leader", "follower", ActiveShardCount.ONE); final PutFollowResponse putFollowResponse = execute(putFollowRequest, ccrClient::putFollow, ccrClient::putFollowAsync); assertTrue(putFollowResponse.isFollowIndexCreated()); assertTrue(putFollowResponse.isFollowIndexShardsAcked()); assertTrue(putFollowResponse.isIndexFollowingStarted()); final String clusterName = highLevelClient().info(RequestOptions.DEFAULT).getClusterName(); final Request statsRequest = new Request("GET", "/follower/_stats"); final Response statsResponse = client().performRequest(statsRequest); final ObjectPath statsObjectPath = ObjectPath.createFromResponse(statsResponse); final String followerIndexUUID = statsObjectPath.evaluate("indices.follower.uuid"); final PauseFollowRequest pauseFollowRequest = new PauseFollowRequest("follower"); AcknowledgedResponse pauseFollowResponse = execute(pauseFollowRequest, ccrClient::pauseFollow, ccrClient::pauseFollowAsync); assertTrue(pauseFollowResponse.isAcknowledged()); final ForgetFollowerRequest forgetFollowerRequest = new ForgetFollowerRequest(clusterName, "follower", followerIndexUUID, "local_cluster", "leader"); final BroadcastResponse forgetFollowerResponse = execute(forgetFollowerRequest, ccrClient::forgetFollower, ccrClient::forgetFollowerAsync); assertThat(forgetFollowerResponse.shards().total(), equalTo(numberOfShards)); assertThat(forgetFollowerResponse.shards().successful(), equalTo(numberOfShards)); assertThat(forgetFollowerResponse.shards().skipped(), equalTo(0)); assertThat(forgetFollowerResponse.shards().failed(), equalTo(0)); assertThat(forgetFollowerResponse.shards().failures(), empty()); final Request retentionLeasesRequest = new Request("GET", "/leader/_stats"); retentionLeasesRequest.addParameter("level", "shards"); final Response retentionLeasesResponse = client().performRequest(retentionLeasesRequest); final Map<?, ?> shardsStats = ObjectPath.createFromResponse(retentionLeasesResponse).evaluate("indices.leader.shards"); assertThat(shardsStats.keySet(), hasSize(numberOfShards)); for (int i = 0; i < numberOfShards; i++) { final List<?> shardStats = (List<?>) shardsStats.get(Integer.toString(i)); assertThat(shardStats, hasSize(1)); final Map<?, ?> shardStatsAsMap = (Map<?, ?>) shardStats.get(0); final Map<?, ?> retentionLeasesStats = (Map<?, ?>) shardStatsAsMap.get("retention_leases"); final List<?> leases = (List<?>) retentionLeasesStats.get("leases"); for (final Object lease : leases) { assertThat(((Map<?, ?>) lease).get("source"), equalTo(ReplicationTracker.PEER_RECOVERY_RETENTION_LEASE_SOURCE)); } } } public void testAutoFollowing() throws Exception { CcrClient ccrClient = highLevelClient().ccr(); PutAutoFollowPatternRequest putAutoFollowPatternRequest = new PutAutoFollowPatternRequest("pattern1", "local_cluster", Collections.singletonList("logs-*")); putAutoFollowPatternRequest.setFollowIndexNamePattern("copy-{{leader_index}}"); AcknowledgedResponse putAutoFollowPatternResponse = execute(putAutoFollowPatternRequest, ccrClient::putAutoFollowPattern, ccrClient::putAutoFollowPatternAsync); assertThat(putAutoFollowPatternResponse.isAcknowledged(), is(true)); CreateIndexRequest createIndexRequest = new CreateIndexRequest("logs-20200101"); createIndexRequest.settings(Collections.singletonMap("index.soft_deletes.enabled", true)); CreateIndexResponse response = highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); assertThat(response.isAcknowledged(), is(true)); assertBusy(() -> { CcrStatsRequest ccrStatsRequest = new CcrStatsRequest(); CcrStatsResponse ccrStatsResponse = execute(ccrStatsRequest, ccrClient::getCcrStats, ccrClient::getCcrStatsAsync); assertThat(ccrStatsResponse.getAutoFollowStats().getNumberOfSuccessfulFollowIndices(), equalTo(1L)); assertThat(ccrStatsResponse.getIndicesFollowStats().getShardFollowStats("copy-logs-20200101"), notNullValue()); }); assertThat(indexExists("copy-logs-20200101"), is(true)); GetAutoFollowPatternRequest getAutoFollowPatternRequest = randomBoolean() ? new GetAutoFollowPatternRequest("pattern1") : new GetAutoFollowPatternRequest(); GetAutoFollowPatternResponse getAutoFollowPatternResponse = execute(getAutoFollowPatternRequest, ccrClient::getAutoFollowPattern, ccrClient::getAutoFollowPatternAsync); assertThat(getAutoFollowPatternResponse.getPatterns().size(), equalTo(1)); GetAutoFollowPatternResponse.Pattern pattern = getAutoFollowPatternResponse.getPatterns().get("pattern1"); assertThat(pattern, notNullValue()); assertThat(pattern.getRemoteCluster(), equalTo(putAutoFollowPatternRequest.getRemoteCluster())); assertThat(pattern.getLeaderIndexPatterns(), equalTo(putAutoFollowPatternRequest.getLeaderIndexPatterns())); assertThat(pattern.getFollowIndexNamePattern(), equalTo(putAutoFollowPatternRequest.getFollowIndexNamePattern())); // Cleanup: final DeleteAutoFollowPatternRequest deleteAutoFollowPatternRequest = new DeleteAutoFollowPatternRequest("pattern1"); AcknowledgedResponse deleteAutoFollowPatternResponse = execute(deleteAutoFollowPatternRequest, ccrClient::deleteAutoFollowPattern, ccrClient::deleteAutoFollowPatternAsync); assertThat(deleteAutoFollowPatternResponse.isAcknowledged(), is(true)); PauseFollowRequest pauseFollowRequest = new PauseFollowRequest("copy-logs-20200101"); AcknowledgedResponse pauseFollowResponse = ccrClient.pauseFollow(pauseFollowRequest, RequestOptions.DEFAULT); assertThat(pauseFollowResponse.isAcknowledged(), is(true)); } private static Map<String, Object> toMap(Response response) throws IOException { return XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false); } }
package com.laytonsmith.persistence; import com.laytonsmith.PureUtilities.Common.StringUtils; import com.laytonsmith.PureUtilities.DaemonManager; import com.laytonsmith.PureUtilities.Web.WebUtility; import com.laytonsmith.annotations.datasource; import com.laytonsmith.core.MSVersion; import com.laytonsmith.persistence.io.ConnectionMixinFactory; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URLEncoder; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.HashMap; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; /** * */ @datasource("mysql") public class MySQLDataSource extends SQLDataSource { /* These values may not be changed without creating an upgrade routine */ private static final String KEY_HASH_COLUMN = "key_hash"; private String host; private int port; private String username; private String password; private String database; private String table; private Map<String, String> extraParameters = new HashMap<>(); private MySQLDataSource() { super(); } public MySQLDataSource(URI uri, ConnectionMixinFactory.ConnectionMixinOptions options) throws DataSourceException { super(uri, options); try { Class.forName(com.mysql.cj.jdbc.Driver.class.getName()); } catch (ClassNotFoundException ex) { throw new DataSourceException("Could not instantiate a MySQL data source, no driver appears to exist.", ex); } host = uri.getHost(); if(host == null) { throw new DataSourceException("Invalid URI specified for data source \"" + uri.toString() + "\""); } port = uri.getPort(); if(port < 0) { port = 3306; } if(uri.getUserInfo() != null) { String[] split = uri.getUserInfo().split(":"); username = split[0]; if(split.length > 1) { password = split[1]; } } if(uri.getPath().split("/").length != 3 || !uri.getPath().startsWith("/")) { throw new DataSourceException("Invalid path information for mysql connection \"" + uri.toString() + "\"." + " Path requires a database name and a table name, for instance \"/testDatabase/tableName"); } else { String[] split = uri.getPath().split("/"); //First one should be empty database = split[1]; table = split[2]; } //Escape any quotes in the table name, because we can't use prepared statements here table = table.replace("`", "``"); extraParameters.putAll(WebUtility.getQueryMap(uri.getQuery())); try { connect(); //Create the table if it doesn't exist //The columns in the table try(Statement statement = getConnection().createStatement()) { statement.executeUpdate(getTableCreationQuery(table)); } } catch (IOException | SQLException ex) { throw new DataSourceException("Could not connect to MySQL data source \"" + (password != null ? uri.toString().replace(password, "<password>") : uri.toString()) + "\"" + " (using \"" + (password != null ? getConnectionString().replace(password, "<password>") : getConnectionString()) + "\" to connect): " + ex.getMessage(), ex); } } /** * Returns the table creation query that should be used to create the table specified. This is public for * documentation, but is used internally. * * @param table * @return */ public final String getTableCreationQuery(String table) { return "CREATE TABLE IF NOT EXISTS `" + table + "` (\n" + " -- This is an UNHEX(MD5('key')) binary hash of the unlimited\n" + " -- length key column, so the table may have a primary key.\n" + " `" + KEY_HASH_COLUMN + "` BINARY(16) PRIMARY KEY NOT NULL,\n" + " -- This is the key itself, stored for plaintext readability,\n" + " -- and for full text searches for getting values\n" + " `" + getKeyColumn() + "` TEXT NOT NULL,\n" + " -- The value itself, which may be null\n" + " `" + getValueColumn() + "` MEDIUMTEXT\n" + ")\n" + " -- The engine is InnoDB, to support transactions\n" + "ENGINE = InnoDB,\n" + " -- The charset is utf8, since all keys are utf8, and values are utf8 json\n" + "CHARACTER SET = utf8,\n" + " -- The collation is case sensitive\n" + "COLLATE = utf8_bin,\n" + " -- Table comment\n" + "COMMENT = 'MethodScript storage table'\n" + ";"; } @Override protected String getConnectionString() { try { String s = "jdbc:mysql://" + host + ":" + port + "/" + database + "?generateSimpleParameterMetadata=true" + "&jdbcCompliantTruncation=false" + (username == null ? "" : "&user=" + URLEncoder.encode(username, "UTF-8")) + (password == null ? "" : "&password=" + URLEncoder.encode(password, "UTF-8")); if(!extraParameters.isEmpty()) { s += "&" + WebUtility.encodeParameters(extraParameters); } return s; } catch (UnsupportedEncodingException ex) { throw new Error(ex); } } @Override public String get0(String[] key) throws DataSourceException { try { connect(); String ret; try(PreparedStatement statement = getConnection().prepareStatement("SELECT `" + getValueColumn() + "` FROM `" + getEscapedTable() + "` WHERE `" + KEY_HASH_COLUMN + "`=UNHEX(MD5(?))" + " LIMIT 1")) { String joinedKey = StringUtils.Join(key, "."); statement.setString(1, joinedKey); ret = null; try(ResultSet result = statement.executeQuery()) { if(result.next()) { ret = result.getString(getValueColumn()); } } } updateLastConnected(); return ret; } catch (SQLException | IOException ex) { throw new DataSourceException(ex.getMessage(), ex); } } @Override public boolean set0(DaemonManager dm, String[] key, String value) throws ReadOnlyException, DataSourceException, IOException { try { connect(); if(value == null) { clearKey0(dm, key); } else { try(PreparedStatement statement = getConnection().prepareStatement("REPLACE INTO" + " `" + getEscapedTable() + "`" + " (`" + KEY_HASH_COLUMN + "`, `" + getKeyColumn() + "`, `" + getValueColumn() + "`)" + " VALUES (UNHEX(MD5(?)), ?, ?)")) { String joinedKey = StringUtils.Join(key, "."); statement.setString(1, joinedKey); statement.setString(2, joinedKey); statement.setString(3, value); statement.executeUpdate(); } } updateLastConnected(); return true; } catch (SQLException ex) { throw new DataSourceException(ex.getMessage(), ex); } } @Override protected void clearKey0(DaemonManager dm, String[] key) throws ReadOnlyException, DataSourceException, IOException { if(hasKey(key)) { try { connect(); try(PreparedStatement statement = getConnection().prepareStatement("DELETE FROM `" + getEscapedTable() + "`" + " WHERE `" + KEY_HASH_COLUMN + "`=UNHEX(MD5(?))")) { String joinedKey = StringUtils.Join(key, "."); statement.setString(1, joinedKey); statement.executeUpdate(); } updateLastConnected(); } catch (Exception e) { throw new DataSourceException(e.getMessage(), e); } } } @Override public String docs() { return "MySQL {mysql://[user[:password]@]host[:port]/database/table?extraParameters}" + " This type stores data in a MySQL database. Unlike the" + " file based systems, this is extremely efficient, but" + " requires a database connection already set up to work." + " This also always allows for simultaneous connections" + " from multiple data sink/sources at once, which is not" + " possible without the potential for corruption in file" + " based data sources, without risking either data corruption," + " or extremely low efficiency. The layout of the table" + " in the database is required to be of a specific format: <%SYNTAX|sql|" + getTableCreationQuery("testTable") + "%>\n\n" + "Extra parameters may provided to the MySQL connection, and they are" + " merged with the existing required parameters and sent through as" + " is to the server. They should be in the format \"a=1&b=2\"."; } @Override public MSVersion since() { return MSVersion.V3_3_1; } @Override protected void startTransaction0(DaemonManager dm) { try { try(Statement statement = getConnection().createStatement()) { statement.execute("START TRANSACTION"); } } catch (SQLException ex) { Logger.getLogger(MySQLDataSource.class.getName()).log(Level.SEVERE, null, ex); } } @Override protected void stopTransaction0(DaemonManager dm, boolean rollback) throws DataSourceException, IOException { try { if(rollback) { try(PreparedStatement statement = getConnection().prepareStatement("ROLLBACK")) { statement.execute(); } } else { try(PreparedStatement statement = getConnection().prepareStatement("COMMIT")) { statement.execute(); } } updateLastConnected(); } catch (SQLException ex) { Logger.getLogger(MySQLDataSource.class.getName()).log(Level.SEVERE, null, ex); } } @Override protected String getTable() { return table; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.cache.request; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.ObjectSet; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.cache.CacheLoader; import org.elasticsearch.common.cache.RemovalListener; import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.MemorySizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.query.QueryPhase; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.threadpool.ThreadPool; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.Iterator; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; /** * The indices request cache allows to cache a shard level request stage responses, helping with improving * similar requests that are potentially expensive (because of aggs for example). The cache is fully coherent * with the semantics of NRT (the index reader version is part of the cache key), and relies on size based * eviction to evict old reader associated cache entries as well as scheduler reaper to clean readers that * are no longer used or closed shards. * <p> * Currently, the cache is only enabled for count requests, and can only be opted in on an index * level setting that can be dynamically changed and defaults to false. * <p> * There are still several TODOs left in this class, some easily addressable, some more complex, but the support * is functional. */ public class IndicesRequestCache extends AbstractComponent implements RemovalListener<IndicesRequestCache.Key, IndicesRequestCache.Value> { /** * A setting to enable or disable request caching on an index level. Its dynamic by default * since we are checking on the cluster state IndexMetaData always. */ public static final String INDEX_CACHE_REQUEST_ENABLED = "index.requests.cache.enable"; public static final String INDICES_CACHE_REQUEST_CLEAN_INTERVAL = "indices.requests.cache.clean_interval"; public static final String INDICES_CACHE_QUERY_SIZE = "indices.requests.cache.size"; public static final String INDICES_CACHE_QUERY_EXPIRE = "indices.requests.cache.expire"; private static final Set<SearchType> CACHEABLE_SEARCH_TYPES = EnumSet.of(SearchType.QUERY_THEN_FETCH, SearchType.QUERY_AND_FETCH); private final ThreadPool threadPool; private final ClusterService clusterService; private final TimeValue cleanInterval; private final Reaper reaper; final ConcurrentMap<CleanupKey, Boolean> registeredClosedListeners = ConcurrentCollections.newConcurrentMap(); final Set<CleanupKey> keysToClean = ConcurrentCollections.newConcurrentSet(); //TODO make these changes configurable on the cluster level private final String size; private final TimeValue expire; private volatile Cache<Key, Value> cache; @Inject public IndicesRequestCache(Settings settings, ClusterService clusterService, ThreadPool threadPool) { super(settings); this.clusterService = clusterService; this.threadPool = threadPool; this.cleanInterval = settings.getAsTime(INDICES_CACHE_REQUEST_CLEAN_INTERVAL, TimeValue.timeValueSeconds(60)); this.size = settings.get(INDICES_CACHE_QUERY_SIZE, "1%"); this.expire = settings.getAsTime(INDICES_CACHE_QUERY_EXPIRE, null); buildCache(); this.reaper = new Reaper(); threadPool.schedule(cleanInterval, ThreadPool.Names.SAME, reaper); } private boolean isCacheEnabled(Settings settings, boolean defaultEnable) { return settings.getAsBoolean(INDEX_CACHE_REQUEST_ENABLED, defaultEnable); } private void buildCache() { long sizeInBytes = MemorySizeValue.parseBytesSizeValueOrHeapRatio(size, INDICES_CACHE_QUERY_SIZE).bytes(); CacheBuilder<Key, Value> cacheBuilder = CacheBuilder.<Key, Value>builder() .setMaximumWeight(sizeInBytes).weigher((k, v) -> k.ramBytesUsed() + v.ramBytesUsed()).removalListener(this); // cacheBuilder.concurrencyLevel(concurrencyLevel); if (expire != null) { cacheBuilder.setExpireAfterAccess(TimeUnit.MILLISECONDS.toNanos(expire.millis())); } cache = cacheBuilder.build(); } public void close() { reaper.close(); cache.invalidateAll(); } public void clear(IndexShard shard) { if (shard == null) { return; } keysToClean.add(new CleanupKey(shard, -1)); logger.trace("{} explicit cache clear", shard.shardId()); reaper.reap(); } @Override public void onRemoval(RemovalNotification<Key, Value> notification) { notification.getKey().shard.requestCache().onRemoval(notification); } /** * Can the shard request be cached at all? */ public boolean canCache(ShardSearchRequest request, SearchContext context) { if (request.template() != null) { return false; } // for now, only enable it for requests with no hits if (context.size() != 0) { return false; } // We cannot cache with DFS because results depend not only on the content of the index but also // on the overridden statistics. So if you ran two queries on the same index with different stats // (because an other shard was updated) you would get wrong results because of the scores // (think about top_hits aggs or scripts using the score) if (!CACHEABLE_SEARCH_TYPES.contains(context.searchType())) { return false; } IndexMetaData index = clusterService.state().getMetaData().index(request.index()); if (index == null) { // in case we didn't yet have the cluster state, or it just got deleted return false; } // if not explicitly set in the request, use the index setting, if not, use the request if (request.requestCache() == null) { if (!isCacheEnabled(index.getSettings(), Boolean.FALSE)) { return false; } } else if (!request.requestCache()) { return false; } // if the reader is not a directory reader, we can't get the version from it if (!(context.searcher().getIndexReader() instanceof DirectoryReader)) { return false; } // if now in millis is used (or in the future, a more generic "isDeterministic" flag // then we can't cache based on "now" key within the search request, as it is not deterministic if (context.nowInMillisUsed()) { return false; } return true; } /** * Loads the cache result, computing it if needed by executing the query phase and otherwise deserializing the cached * value into the {@link SearchContext#queryResult() context's query result}. The combination of load + compute allows * to have a single load operation that will cause other requests with the same key to wait till its loaded an reuse * the same cache. */ public void loadIntoContext(final ShardSearchRequest request, final SearchContext context, final QueryPhase queryPhase) throws Exception { assert canCache(request, context); Key key = buildKey(request, context); Loader loader = new Loader(queryPhase, context); Value value = cache.computeIfAbsent(key, loader); if (loader.isLoaded()) { key.shard.requestCache().onMiss(); // see if its the first time we see this reader, and make sure to register a cleanup key CleanupKey cleanupKey = new CleanupKey(context.indexShard(), ((DirectoryReader) context.searcher().getIndexReader()).getVersion()); if (!registeredClosedListeners.containsKey(cleanupKey)) { Boolean previous = registeredClosedListeners.putIfAbsent(cleanupKey, Boolean.TRUE); if (previous == null) { ElasticsearchDirectoryReader.addReaderCloseListener(context.searcher().getDirectoryReader(), cleanupKey); } } } else { key.shard.requestCache().onHit(); // restore the cached query result into the context final QuerySearchResult result = context.queryResult(); result.readFromWithId(context.id(), value.reference.streamInput()); result.shardTarget(context.shardTarget()); } } private static class Loader implements CacheLoader<Key, Value> { private final QueryPhase queryPhase; private final SearchContext context; private boolean loaded; Loader(QueryPhase queryPhase, SearchContext context) { this.queryPhase = queryPhase; this.context = context; } public boolean isLoaded() { return this.loaded; } @Override public Value load(Key key) throws Exception { queryPhase.execute(context); /* BytesStreamOutput allows to pass the expected size but by default uses * BigArrays.PAGE_SIZE_IN_BYTES which is 16k. A common cached result ie. * a date histogram with 3 buckets is ~100byte so 16k might be very wasteful * since we don't shrink to the actual size once we are done serializing. * By passing 512 as the expected size we will resize the byte array in the stream * slowly until we hit the page size and don't waste too much memory for small query * results.*/ final int expectedSizeInBytes = 512; try (BytesStreamOutput out = new BytesStreamOutput(expectedSizeInBytes)) { context.queryResult().writeToNoId(out); // for now, keep the paged data structure, which might have unused bytes to fill a page, but better to keep // the memory properly paged instead of having varied sized bytes final BytesReference reference = out.bytes(); loaded = true; Value value = new Value(reference, out.ramBytesUsed()); key.shard.requestCache().onCached(key, value); return value; } } } public static class Value implements Accountable { final BytesReference reference; final long ramBytesUsed; public Value(BytesReference reference, long ramBytesUsed) { this.reference = reference; this.ramBytesUsed = ramBytesUsed; } @Override public long ramBytesUsed() { return ramBytesUsed; } @Override public Collection<Accountable> getChildResources() { return Collections.emptyList(); } } public static class Key implements Accountable { public final IndexShard shard; // use as identity equality public final long readerVersion; // use the reader version to now keep a reference to a "short" lived reader until its reaped public final BytesReference value; Key(IndexShard shard, long readerVersion, BytesReference value) { this.shard = shard; this.readerVersion = readerVersion; this.value = value; } @Override public long ramBytesUsed() { return RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_LONG + value.length(); } @Override public Collection<Accountable> getChildResources() { // TODO: more detailed ram usage? return Collections.emptyList(); } @Override public boolean equals(Object o) { if (this == o) return true; Key key = (Key) o; if (readerVersion != key.readerVersion) return false; if (!shard.equals(key.shard)) return false; if (!value.equals(key.value)) return false; return true; } @Override public int hashCode() { int result = shard.hashCode(); result = 31 * result + Long.hashCode(readerVersion); result = 31 * result + value.hashCode(); return result; } } private class CleanupKey implements IndexReader.ReaderClosedListener { IndexShard indexShard; long readerVersion; // use the reader version to now keep a reference to a "short" lived reader until its reaped private CleanupKey(IndexShard indexShard, long readerVersion) { this.indexShard = indexShard; this.readerVersion = readerVersion; } @Override public void onClose(IndexReader reader) { Boolean remove = registeredClosedListeners.remove(this); if (remove != null) { keysToClean.add(this); } } @Override public boolean equals(Object o) { if (this == o) return true; CleanupKey that = (CleanupKey) o; if (readerVersion != that.readerVersion) return false; if (!indexShard.equals(that.indexShard)) return false; return true; } @Override public int hashCode() { int result = indexShard.hashCode(); result = 31 * result + Long.hashCode(readerVersion); return result; } } private class Reaper implements Runnable { private final ObjectSet<CleanupKey> currentKeysToClean = new ObjectHashSet<>(); private final ObjectSet<IndexShard> currentFullClean = new ObjectHashSet<>(); private volatile boolean closed; void close() { closed = true; } @Override public void run() { if (closed) { return; } if (keysToClean.isEmpty()) { schedule(); return; } try { threadPool.executor(ThreadPool.Names.GENERIC).execute(new Runnable() { @Override public void run() { reap(); schedule(); } }); } catch (EsRejectedExecutionException ex) { logger.debug("Can not run ReaderCleaner - execution rejected", ex); } } private void schedule() { try { threadPool.schedule(cleanInterval, ThreadPool.Names.SAME, this); } catch (EsRejectedExecutionException ex) { logger.debug("Can not schedule ReaderCleaner - execution rejected", ex); } } synchronized void reap() { currentKeysToClean.clear(); currentFullClean.clear(); for (Iterator<CleanupKey> iterator = keysToClean.iterator(); iterator.hasNext(); ) { CleanupKey cleanupKey = iterator.next(); iterator.remove(); if (cleanupKey.readerVersion == -1 || cleanupKey.indexShard.state() == IndexShardState.CLOSED) { // -1 indicates full cleanup, as does a closed shard currentFullClean.add(cleanupKey.indexShard); } else { currentKeysToClean.add(cleanupKey); } } if (!currentKeysToClean.isEmpty() || !currentFullClean.isEmpty()) { CleanupKey lookupKey = new CleanupKey(null, -1); for (Iterator<Key> iterator = cache.keys().iterator(); iterator.hasNext(); ) { Key key = iterator.next(); if (currentFullClean.contains(key.shard)) { iterator.remove(); } else { lookupKey.indexShard = key.shard; lookupKey.readerVersion = key.readerVersion; if (currentKeysToClean.contains(lookupKey)) { iterator.remove(); } } } } cache.refresh(); currentKeysToClean.clear(); currentFullClean.clear(); } } private static Key buildKey(ShardSearchRequest request, SearchContext context) throws Exception { // TODO: for now, this will create different keys for different JSON order // TODO: tricky to get around this, need to parse and order all, which can be expensive return new Key(context.indexShard(), ((DirectoryReader) context.searcher().getIndexReader()).getVersion(), request.cacheKey()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.QueueState; import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger; import org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger.AuditConstants; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeCleanContainerEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.UpdatedContainerInfo; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Queue; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppReport; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppUtils; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplication; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt.ContainersAndNMTokensAllocation; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.ContainerExpiredSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.server.utils.Lock; import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator; import org.apache.hadoop.yarn.util.resource.ResourceCalculator; import org.apache.hadoop.yarn.util.resource.Resources; import com.google.common.annotations.VisibleForTesting; @LimitedPrivate("yarn") @Evolving @SuppressWarnings("unchecked") public class FifoScheduler extends AbstractYarnScheduler implements Configurable { private static final Log LOG = LogFactory.getLog(FifoScheduler.class); private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); Configuration conf; protected Map<NodeId, FiCaSchedulerNode> nodes = new ConcurrentHashMap<NodeId, FiCaSchedulerNode>(); private boolean initialized; private Resource minimumAllocation; private Resource maximumAllocation; private boolean usePortForNodeName; private ActiveUsersManager activeUsersManager; private static final String DEFAULT_QUEUE_NAME = "default"; private QueueMetrics metrics; private final ResourceCalculator resourceCalculator = new DefaultResourceCalculator(); private final Queue DEFAULT_QUEUE = new Queue() { @Override public String getQueueName() { return DEFAULT_QUEUE_NAME; } @Override public QueueMetrics getMetrics() { return metrics; } @Override public QueueInfo getQueueInfo( boolean includeChildQueues, boolean recursive) { QueueInfo queueInfo = recordFactory.newRecordInstance(QueueInfo.class); queueInfo.setQueueName(DEFAULT_QUEUE.getQueueName()); queueInfo.setCapacity(1.0f); if (clusterResource.getMemory() == 0) { queueInfo.setCurrentCapacity(0.0f); } else { queueInfo.setCurrentCapacity((float) usedResource.getMemory() / clusterResource.getMemory()); } queueInfo.setMaximumCapacity(1.0f); queueInfo.setChildQueues(new ArrayList<QueueInfo>()); queueInfo.setQueueState(QueueState.RUNNING); return queueInfo; } public Map<QueueACL, AccessControlList> getQueueAcls() { Map<QueueACL, AccessControlList> acls = new HashMap<QueueACL, AccessControlList>(); for (QueueACL acl : QueueACL.values()) { acls.put(acl, new AccessControlList("*")); } return acls; } @Override public List<QueueUserACLInfo> getQueueUserAclInfo( UserGroupInformation unused) { QueueUserACLInfo queueUserAclInfo = recordFactory.newRecordInstance(QueueUserACLInfo.class); queueUserAclInfo.setQueueName(DEFAULT_QUEUE_NAME); queueUserAclInfo.setUserAcls(Arrays.asList(QueueACL.values())); return Collections.singletonList(queueUserAclInfo); } @Override public boolean hasAccess(QueueACL acl, UserGroupInformation user) { return getQueueAcls().get(acl).isUserAllowed(user); } @Override public ActiveUsersManager getActiveUsersManager() { return activeUsersManager; } }; @Override public synchronized void setConf(Configuration conf) { this.conf = conf; } private void validateConf(Configuration conf) { // validate scheduler memory allocation setting int minMem = conf.getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB); int maxMem = conf.getInt( YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB); if (minMem <= 0 || minMem > maxMem) { throw new YarnRuntimeException("Invalid resource scheduler memory" + " allocation configuration" + ", " + YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB + "=" + minMem + ", " + YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB + "=" + maxMem + ", min and max should be greater than 0" + ", max should be no smaller than min."); } } @Override public synchronized Configuration getConf() { return conf; } @Override public Resource getMinimumResourceCapability() { return minimumAllocation; } @Override public int getNumClusterNodes() { return nodes.size(); } @Override public Resource getMaximumResourceCapability() { return maximumAllocation; } @Override public synchronized void reinitialize(Configuration conf, RMContext rmContext) throws IOException { setConf(conf); if (!this.initialized) { validateConf(conf); this.rmContext = rmContext; //Use ConcurrentSkipListMap because applications need to be ordered this.applications = new ConcurrentSkipListMap<ApplicationId, SchedulerApplication>(); this.minimumAllocation = Resources.createResource(conf.getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB)); this.maximumAllocation = Resources.createResource(conf.getInt( YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB)); this.usePortForNodeName = conf.getBoolean( YarnConfiguration.RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME, YarnConfiguration.DEFAULT_RM_SCHEDULER_USE_PORT_FOR_NODE_NAME); this.metrics = QueueMetrics.forQueue(DEFAULT_QUEUE_NAME, null, false, conf); this.activeUsersManager = new ActiveUsersManager(metrics); this.initialized = true; } } @Override public Allocation allocate( ApplicationAttemptId applicationAttemptId, List<ResourceRequest> ask, List<ContainerId> release, List<String> blacklistAdditions, List<String> blacklistRemovals) { FiCaSchedulerApp application = getApplicationAttempt(applicationAttemptId); if (application == null) { LOG.error("Calling allocate on removed " + "or non existant application " + applicationAttemptId); return EMPTY_ALLOCATION; } // Sanity check SchedulerUtils.normalizeRequests(ask, resourceCalculator, clusterResource, minimumAllocation, maximumAllocation); // Release containers for (ContainerId releasedContainer : release) { RMContainer rmContainer = getRMContainer(releasedContainer); if (rmContainer == null) { RMAuditLogger.logFailure(application.getUser(), AuditConstants.RELEASE_CONTAINER, "Unauthorized access or invalid container", "FifoScheduler", "Trying to release container not owned by app or with invalid id", application.getApplicationId(), releasedContainer); } containerCompleted(rmContainer, SchedulerUtils.createAbnormalContainerStatus( releasedContainer, SchedulerUtils.RELEASED_CONTAINER), RMContainerEventType.RELEASED); } synchronized (application) { // make sure we aren't stopping/removing the application // when the allocate comes in if (application.isStopped()) { LOG.info("Calling allocate on a stopped " + "application " + applicationAttemptId); return EMPTY_ALLOCATION; } if (!ask.isEmpty()) { LOG.debug("allocate: pre-update" + " applicationId=" + applicationAttemptId + " application=" + application); application.showRequests(); // Update application requests application.updateResourceRequests(ask); LOG.debug("allocate: post-update" + " applicationId=" + applicationAttemptId + " application=" + application); application.showRequests(); LOG.debug("allocate:" + " applicationId=" + applicationAttemptId + " #ask=" + ask.size()); } application.updateBlacklist(blacklistAdditions, blacklistRemovals); ContainersAndNMTokensAllocation allocation = application.pullNewlyAllocatedContainersAndNMTokens(); return new Allocation(allocation.getContainerList(), application.getHeadroom(), null, null, null, allocation.getNMTokenList()); } } @VisibleForTesting FiCaSchedulerApp getApplicationAttempt(ApplicationAttemptId applicationAttemptId) { SchedulerApplication app = applications.get(applicationAttemptId.getApplicationId()); if (app != null) { return (FiCaSchedulerApp) app.getCurrentAppAttempt(); } return null; } @Override public SchedulerAppReport getSchedulerAppInfo( ApplicationAttemptId applicationAttemptId) { FiCaSchedulerApp app = getApplicationAttempt(applicationAttemptId); return app == null ? null : new SchedulerAppReport(app); } @Override public ApplicationResourceUsageReport getAppResourceUsageReport( ApplicationAttemptId applicationAttemptId) { FiCaSchedulerApp app = getApplicationAttempt(applicationAttemptId); return app == null ? null : app.getResourceUsageReport(); } private FiCaSchedulerNode getNode(NodeId nodeId) { return nodes.get(nodeId); } @VisibleForTesting public synchronized void addApplication(ApplicationId applicationId, String queue, String user) { SchedulerApplication application = new SchedulerApplication(DEFAULT_QUEUE, user); applications.put(applicationId, application); metrics.submitApp(user); LOG.info("Accepted application " + applicationId + " from user: " + user + ", currently num of applications: " + applications.size()); rmContext.getDispatcher().getEventHandler() .handle(new RMAppEvent(applicationId, RMAppEventType.APP_ACCEPTED)); } @VisibleForTesting public synchronized void addApplicationAttempt(ApplicationAttemptId appAttemptId, boolean transferStateFromPreviousAttempt) { SchedulerApplication application = applications.get(appAttemptId.getApplicationId()); String user = application.getUser(); // TODO: Fix store FiCaSchedulerApp schedulerApp = new FiCaSchedulerApp(appAttemptId, user, DEFAULT_QUEUE, activeUsersManager, this.rmContext); if (transferStateFromPreviousAttempt) { schedulerApp.transferStateFromPreviousAttempt(application .getCurrentAppAttempt()); } application.setCurrentAppAttempt(schedulerApp); metrics.submitAppAttempt(user); LOG.info("Added Application Attempt " + appAttemptId + " to scheduler from user " + application.getUser()); rmContext.getDispatcher().getEventHandler().handle( new RMAppAttemptEvent(appAttemptId, RMAppAttemptEventType.ATTEMPT_ADDED)); } private synchronized void doneApplication(ApplicationId applicationId, RMAppState finalState) { SchedulerApplication application = applications.get(applicationId); if (application == null){ LOG.warn("Couldn't find application " + applicationId); return; } // Inform the activeUsersManager activeUsersManager.deactivateApplication(application.getUser(), applicationId); application.stop(finalState); applications.remove(applicationId); } private synchronized void doneApplicationAttempt( ApplicationAttemptId applicationAttemptId, RMAppAttemptState rmAppAttemptFinalState, boolean keepContainers) throws IOException { FiCaSchedulerApp attempt = getApplicationAttempt(applicationAttemptId); SchedulerApplication application = applications.get(applicationAttemptId.getApplicationId()); if (application == null || attempt == null) { throw new IOException("Unknown application " + applicationAttemptId + " has completed!"); } // Kill all 'live' containers for (RMContainer container : attempt.getLiveContainers()) { if (keepContainers && container.getState().equals(RMContainerState.RUNNING)) { // do not kill the running container in the case of work-preserving AM // restart. LOG.info("Skip killing " + container.getContainerId()); continue; } containerCompleted(container, SchedulerUtils.createAbnormalContainerStatus( container.getContainerId(), SchedulerUtils.COMPLETED_APPLICATION), RMContainerEventType.KILL); } // Clean up pending requests, metrics etc. attempt.stop(rmAppAttemptFinalState); } /** * Heart of the scheduler... * * @param node node on which resources are available to be allocated */ private void assignContainers(FiCaSchedulerNode node) { LOG.debug("assignContainers:" + " node=" + node.getRMNode().getNodeAddress() + " #applications=" + applications.size()); // Try to assign containers to applications in fifo order for (Map.Entry<ApplicationId, SchedulerApplication> e : applications .entrySet()) { FiCaSchedulerApp application = (FiCaSchedulerApp) e.getValue().getCurrentAppAttempt(); if (application == null) { continue; } LOG.debug("pre-assignContainers"); application.showRequests(); synchronized (application) { // Check if this resource is on the blacklist if (SchedulerAppUtils.isBlacklisted(application, node, LOG)) { continue; } for (Priority priority : application.getPriorities()) { int maxContainers = getMaxAllocatableContainers(application, priority, node, NodeType.OFF_SWITCH); // Ensure the application needs containers of this priority if (maxContainers > 0) { int assignedContainers = assignContainersOnNode(node, application, priority); // Do not assign out of order w.r.t priorities if (assignedContainers == 0) { break; } } } } LOG.debug("post-assignContainers"); application.showRequests(); // Done if (Resources.lessThan(resourceCalculator, clusterResource, node.getAvailableResource(), minimumAllocation)) { break; } } // Update the applications' headroom to correctly take into // account the containers assigned in this update. for (SchedulerApplication application : applications.values()) { FiCaSchedulerApp attempt = (FiCaSchedulerApp) application.getCurrentAppAttempt(); if (attempt == null) { continue; } attempt.setHeadroom(Resources.subtract(clusterResource, usedResource)); } } private int getMaxAllocatableContainers(FiCaSchedulerApp application, Priority priority, FiCaSchedulerNode node, NodeType type) { int maxContainers = 0; ResourceRequest offSwitchRequest = application.getResourceRequest(priority, ResourceRequest.ANY); if (offSwitchRequest != null) { maxContainers = offSwitchRequest.getNumContainers(); } if (type == NodeType.OFF_SWITCH) { return maxContainers; } if (type == NodeType.RACK_LOCAL) { ResourceRequest rackLocalRequest = application.getResourceRequest(priority, node.getRMNode().getRackName()); if (rackLocalRequest == null) { return maxContainers; } maxContainers = Math.min(maxContainers, rackLocalRequest.getNumContainers()); } if (type == NodeType.NODE_LOCAL) { ResourceRequest nodeLocalRequest = application.getResourceRequest(priority, node.getRMNode().getNodeAddress()); if (nodeLocalRequest != null) { maxContainers = Math.min(maxContainers, nodeLocalRequest.getNumContainers()); } } return maxContainers; } private int assignContainersOnNode(FiCaSchedulerNode node, FiCaSchedulerApp application, Priority priority ) { // Data-local int nodeLocalContainers = assignNodeLocalContainers(node, application, priority); // Rack-local int rackLocalContainers = assignRackLocalContainers(node, application, priority); // Off-switch int offSwitchContainers = assignOffSwitchContainers(node, application, priority); LOG.debug("assignContainersOnNode:" + " node=" + node.getRMNode().getNodeAddress() + " application=" + application.getApplicationId().getId() + " priority=" + priority.getPriority() + " #assigned=" + (nodeLocalContainers + rackLocalContainers + offSwitchContainers)); return (nodeLocalContainers + rackLocalContainers + offSwitchContainers); } private int assignNodeLocalContainers(FiCaSchedulerNode node, FiCaSchedulerApp application, Priority priority) { int assignedContainers = 0; ResourceRequest request = application.getResourceRequest(priority, node.getNodeName()); if (request != null) { // Don't allocate on this node if we don't need containers on this rack ResourceRequest rackRequest = application.getResourceRequest(priority, node.getRMNode().getRackName()); if (rackRequest == null || rackRequest.getNumContainers() <= 0) { return 0; } int assignableContainers = Math.min( getMaxAllocatableContainers(application, priority, node, NodeType.NODE_LOCAL), request.getNumContainers()); assignedContainers = assignContainer(node, application, priority, assignableContainers, request, NodeType.NODE_LOCAL); } return assignedContainers; } private int assignRackLocalContainers(FiCaSchedulerNode node, FiCaSchedulerApp application, Priority priority) { int assignedContainers = 0; ResourceRequest request = application.getResourceRequest(priority, node.getRMNode().getRackName()); if (request != null) { // Don't allocate on this rack if the application doens't need containers ResourceRequest offSwitchRequest = application.getResourceRequest(priority, ResourceRequest.ANY); if (offSwitchRequest.getNumContainers() <= 0) { return 0; } int assignableContainers = Math.min( getMaxAllocatableContainers(application, priority, node, NodeType.RACK_LOCAL), request.getNumContainers()); assignedContainers = assignContainer(node, application, priority, assignableContainers, request, NodeType.RACK_LOCAL); } return assignedContainers; } private int assignOffSwitchContainers(FiCaSchedulerNode node, FiCaSchedulerApp application, Priority priority) { int assignedContainers = 0; ResourceRequest request = application.getResourceRequest(priority, ResourceRequest.ANY); if (request != null) { assignedContainers = assignContainer(node, application, priority, request.getNumContainers(), request, NodeType.OFF_SWITCH); } return assignedContainers; } private int assignContainer(FiCaSchedulerNode node, FiCaSchedulerApp application, Priority priority, int assignableContainers, ResourceRequest request, NodeType type) { LOG.debug("assignContainers:" + " node=" + node.getRMNode().getNodeAddress() + " application=" + application.getApplicationId().getId() + " priority=" + priority.getPriority() + " assignableContainers=" + assignableContainers + " request=" + request + " type=" + type); Resource capability = request.getCapability(); int availableContainers = node.getAvailableResource().getMemory() / capability.getMemory(); // TODO: A buggy // application // with this // zero would // crash the // scheduler. int assignedContainers = Math.min(assignableContainers, availableContainers); if (assignedContainers > 0) { for (int i=0; i < assignedContainers; ++i) { NodeId nodeId = node.getRMNode().getNodeID(); ContainerId containerId = BuilderUtils.newContainerId(application .getApplicationAttemptId(), application.getNewContainerId()); // Create the container Container container = BuilderUtils.newContainer(containerId, nodeId, node.getRMNode() .getHttpAddress(), capability, priority, null); // Allocate! // Inform the application RMContainer rmContainer = application.allocate(type, node, priority, request, container); // Inform the node node.allocateContainer(application.getApplicationId(), rmContainer); // Update usage for this container Resources.addTo(usedResource, capability); } } return assignedContainers; } private synchronized void nodeUpdate(RMNode rmNode) { FiCaSchedulerNode node = getNode(rmNode.getNodeID()); // Update resource if any change SchedulerUtils.updateResourceIfChanged(node, rmNode, clusterResource, LOG); List<UpdatedContainerInfo> containerInfoList = rmNode.pullContainerUpdates(); List<ContainerStatus> newlyLaunchedContainers = new ArrayList<ContainerStatus>(); List<ContainerStatus> completedContainers = new ArrayList<ContainerStatus>(); for(UpdatedContainerInfo containerInfo : containerInfoList) { newlyLaunchedContainers.addAll(containerInfo.getNewlyLaunchedContainers()); completedContainers.addAll(containerInfo.getCompletedContainers()); } // Processing the newly launched containers for (ContainerStatus launchedContainer : newlyLaunchedContainers) { containerLaunchedOnNode(launchedContainer.getContainerId(), node); } // Process completed containers for (ContainerStatus completedContainer : completedContainers) { ContainerId containerId = completedContainer.getContainerId(); LOG.debug("Container FINISHED: " + containerId); containerCompleted(getRMContainer(containerId), completedContainer, RMContainerEventType.FINISHED); } if (Resources.greaterThanOrEqual(resourceCalculator, clusterResource, node.getAvailableResource(),minimumAllocation)) { LOG.debug("Node heartbeat " + rmNode.getNodeID() + " available resource = " + node.getAvailableResource()); assignContainers(node); LOG.debug("Node after allocation " + rmNode.getNodeID() + " resource = " + node.getAvailableResource()); } metrics.setAvailableResourcesToQueue( Resources.subtract(clusterResource, usedResource)); } @Override public void handle(SchedulerEvent event) { switch(event.getType()) { case NODE_ADDED: { NodeAddedSchedulerEvent nodeAddedEvent = (NodeAddedSchedulerEvent)event; addNode(nodeAddedEvent.getAddedRMNode()); } break; case NODE_REMOVED: { NodeRemovedSchedulerEvent nodeRemovedEvent = (NodeRemovedSchedulerEvent)event; removeNode(nodeRemovedEvent.getRemovedRMNode()); } break; case NODE_UPDATE: { NodeUpdateSchedulerEvent nodeUpdatedEvent = (NodeUpdateSchedulerEvent)event; nodeUpdate(nodeUpdatedEvent.getRMNode()); } break; case APP_ADDED: { AppAddedSchedulerEvent appAddedEvent = (AppAddedSchedulerEvent) event; addApplication(appAddedEvent.getApplicationId(), appAddedEvent.getQueue(), appAddedEvent.getUser()); } break; case APP_REMOVED: { AppRemovedSchedulerEvent appRemovedEvent = (AppRemovedSchedulerEvent)event; doneApplication(appRemovedEvent.getApplicationID(), appRemovedEvent.getFinalState()); } break; case APP_ATTEMPT_ADDED: { AppAttemptAddedSchedulerEvent appAttemptAddedEvent = (AppAttemptAddedSchedulerEvent) event; addApplicationAttempt(appAttemptAddedEvent.getApplicationAttemptId(), appAttemptAddedEvent.getTransferStateFromPreviousAttempt()); } break; case APP_ATTEMPT_REMOVED: { AppAttemptRemovedSchedulerEvent appAttemptRemovedEvent = (AppAttemptRemovedSchedulerEvent) event; try { doneApplicationAttempt( appAttemptRemovedEvent.getApplicationAttemptID(), appAttemptRemovedEvent.getFinalAttemptState(), appAttemptRemovedEvent.getKeepContainersAcrossAppAttempts()); } catch(IOException ie) { LOG.error("Unable to remove application " + appAttemptRemovedEvent.getApplicationAttemptID(), ie); } } break; case CONTAINER_EXPIRED: { ContainerExpiredSchedulerEvent containerExpiredEvent = (ContainerExpiredSchedulerEvent) event; ContainerId containerid = containerExpiredEvent.getContainerId(); containerCompleted(getRMContainer(containerid), SchedulerUtils.createAbnormalContainerStatus( containerid, SchedulerUtils.EXPIRED_CONTAINER), RMContainerEventType.EXPIRE); } break; default: LOG.error("Invalid eventtype " + event.getType() + ". Ignoring!"); } } private void containerLaunchedOnNode(ContainerId containerId, FiCaSchedulerNode node) { // Get the application for the finished container FiCaSchedulerApp application = getCurrentAttemptForContainer(containerId); if (application == null) { LOG.info("Unknown application " + containerId.getApplicationAttemptId().getApplicationId() + " launched container " + containerId + " on node: " + node); // Some unknown container sneaked into the system. Kill it. this.rmContext.getDispatcher().getEventHandler() .handle(new RMNodeCleanContainerEvent(node.getNodeID(), containerId)); return; } application.containerLaunchedOnNode(containerId, node.getNodeID()); } @Lock(FifoScheduler.class) private synchronized void containerCompleted(RMContainer rmContainer, ContainerStatus containerStatus, RMContainerEventType event) { if (rmContainer == null) { LOG.info("Null container completed..."); return; } // Get the application for the finished container Container container = rmContainer.getContainer(); FiCaSchedulerApp application = getCurrentAttemptForContainer(container.getId()); ApplicationId appId = container.getId().getApplicationAttemptId().getApplicationId(); // Get the node on which the container was allocated FiCaSchedulerNode node = getNode(container.getNodeId()); if (application == null) { LOG.info("Unknown application: " + appId + " released container " + container.getId() + " on node: " + node + " with event: " + event); return; } // Inform the application application.containerCompleted(rmContainer, containerStatus, event); // Inform the node node.releaseContainer(container); // Update total usage Resources.subtractFrom(usedResource, container.getResource()); LOG.info("Application attempt " + application.getApplicationAttemptId() + " released container " + container.getId() + " on node: " + node + " with event: " + event); } private Resource clusterResource = recordFactory.newRecordInstance(Resource.class); private Resource usedResource = recordFactory.newRecordInstance(Resource.class); private synchronized void removeNode(RMNode nodeInfo) { FiCaSchedulerNode node = getNode(nodeInfo.getNodeID()); if (node == null) { return; } // Kill running containers for(RMContainer container : node.getRunningContainers()) { containerCompleted(container, SchedulerUtils.createAbnormalContainerStatus( container.getContainerId(), SchedulerUtils.LOST_CONTAINER), RMContainerEventType.KILL); } //Remove the node this.nodes.remove(nodeInfo.getNodeID()); // Update cluster metrics Resources.subtractFrom(clusterResource, node.getRMNode().getTotalCapability()); } @Override public QueueInfo getQueueInfo(String queueName, boolean includeChildQueues, boolean recursive) { return DEFAULT_QUEUE.getQueueInfo(false, false); } @Override public List<QueueUserACLInfo> getQueueUserAclInfo() { return DEFAULT_QUEUE.getQueueUserAclInfo(null); } private synchronized void addNode(RMNode nodeManager) { this.nodes.put(nodeManager.getNodeID(), new FiCaSchedulerNode(nodeManager, usePortForNodeName)); Resources.addTo(clusterResource, nodeManager.getTotalCapability()); } @Override public void recover(RMState state) { // NOT IMPLEMENTED } @Override public synchronized SchedulerNodeReport getNodeReport(NodeId nodeId) { FiCaSchedulerNode node = getNode(nodeId); return node == null ? null : new SchedulerNodeReport(node); } @Override public RMContainer getRMContainer(ContainerId containerId) { FiCaSchedulerApp attempt = getCurrentAttemptForContainer(containerId); return (attempt == null) ? null : attempt.getRMContainer(containerId); } private FiCaSchedulerApp getCurrentAttemptForContainer( ContainerId containerId) { SchedulerApplication app = applications.get(containerId.getApplicationAttemptId() .getApplicationId()); if (app != null) { return (FiCaSchedulerApp) app.getCurrentAppAttempt(); } return null; } @Override public QueueMetrics getRootQueueMetrics() { return DEFAULT_QUEUE.getMetrics(); } @Override public synchronized boolean checkAccess(UserGroupInformation callerUGI, QueueACL acl, String queueName) { return DEFAULT_QUEUE.hasAccess(acl, callerUGI); } @Override public synchronized List<ApplicationAttemptId> getAppsInQueue(String queueName) { if (queueName.equals(DEFAULT_QUEUE.getQueueName())) { List<ApplicationAttemptId> attempts = new ArrayList<ApplicationAttemptId>( applications.size()); for (SchedulerApplication app : applications.values()) { attempts.add(app.getCurrentAppAttempt().getApplicationAttemptId()); } return attempts; } else { return null; } } }
package org.frozenarc.zeframework.tag; import org.frozenarc.zeframework.datatypeconvert.DataTypeConvertor; import org.frozenarc.zeframework.util.FormatEvaluator; import org.frozenarc.zeframework.util.FormatInfo; import org.frozenarc.zeframework.util.ValueUtil; import org.frozenarc.zeframework.util.ViewValueUtil; import javax.servlet.jsp.JspException; import javax.servlet.jsp.JspWriter; import org.frozenarc.zeframework.applicationexception.ApplicationException; /** * Super classs for input tags. * @author Manan */ public abstract class InputTag extends BasicTag { protected String name; protected String value; protected String readonly; protected String disabled; protected String accesskey; protected String tabindex; protected String onblur; protected String onchange; protected String onfocus; protected String onselect; protected String onsubmit; protected String onload; protected String onunload; protected String valuefrom; protected String valueto; public abstract String getInputType(); @Override public int doEndTag() throws JspException { return super.doEndTag(); } @Override public int doStartTag() throws JspException { try { JspWriter out = pageContext.getOut(); StringBuilder builder = new StringBuilder(); builder.append("<input type=\""+getInputType()+"\" "); addAtributes(builder); handleValueFrom(builder); builder.append(" />"); handleValueTo(builder); out.print(builder.toString()); } catch(ApplicationException ex) { throw new JspException(ex.getMessage(), ex); } catch (Exception ex) { throw new JspException(ex); } return super.doStartTag(); } public void handleValueFrom(StringBuilder builder) throws Exception { if (valuefrom != null) { Object objFromValue = ViewValueUtil.handleValueFrom(pageContext, valuefrom); FormatInfo info = FormatEvaluator.getFormatInfo(valuefrom); String fromvalue = DataTypeConvertor.convertObject(objFromValue, info); builder.append(" value=\"" + fromvalue + "\""); } } public void handleValueTo(StringBuilder builder) throws ApplicationException { if(valueto!=null) { ViewValueUtil.handleValueTo(pageContext, builder, name, valueto, false); } } public String handleValue(String value) throws ApplicationException { Object objValue = ValueUtil.handleValue(pageContext, value); FormatInfo info = FormatEvaluator.getFormatInfo(value); return DataTypeConvertor.convertObject(objValue, info); } @Override public void addAtributes(StringBuilder builder) throws ApplicationException { super.addAtributes(builder); if(name!=null) { builder.append(" name=\""+name+"\""); } if(value!=null && valuefrom==null) { value=handleValue(value); builder.append(" value=\""+value+"\""); } if(readonly!=null) { builder.append(" readonly=\""+readonly+"\""); } if(disabled!=null) { builder.append(" disabled=\""+disabled+"\""); } if(accesskey!=null) { builder.append(" accesskey=\""+accesskey+"\""); } if(tabindex!=null) { builder.append(" tabindex=\""+tabindex+"\""); } if(onblur!=null) { builder.append(" onblur=\""+onblur+"\""); } if(onchange!=null) { builder.append(" onchange=\""+onchange+"\""); } if(onfocus!=null) { builder.append(" onfocus=\""+onfocus+"\""); } if(onselect!=null) { builder.append(" onselect=\""+onselect+"\""); } if(onsubmit!=null) { builder.append(" onsubmit=\""+onsubmit+"\""); } if(onload!=null) { builder.append(" onload=\""+onload+"\""); } if(onunload!=null) { builder.append(" onunload=\""+onunload+"\""); } } public String getReadonly() { return readonly; } public void setReadonly(String readonly) { this.readonly = readonly; } public String getAccesskey() { return accesskey; } public void setAccesskey(String accesskey) { this.accesskey = accesskey; } public String getDisabled() { return disabled; } public void setDisabled(String disabled) { this.disabled = disabled; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getOnblur() { return onblur; } public void setOnblur(String onblur) { this.onblur = onblur; } public String getOnchange() { return onchange; } public void setOnchange(String onchange) { this.onchange = onchange; } public String getOnfocus() { return onfocus; } public void setOnfocus(String onfocus) { this.onfocus = onfocus; } public String getOnselect() { return onselect; } public void setOnselect(String onselect) { this.onselect = onselect; } public String getTabindex() { return tabindex; } public void setTabindex(String tabindex) { this.tabindex = tabindex; } public String getValuefrom() { return valuefrom; } public void setValuefrom(String valuefrom) { this.valuefrom = valuefrom; } public String getValueto() { return valueto; } public void setValueto(String valueto) { this.valueto = valueto; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } public String getOnload() { return onload; } public void setOnload(String onload) { this.onload = onload; } public String getOnsubmit() { return onsubmit; } public void setOnsubmit(String onsubmit) { this.onsubmit = onsubmit; } public String getOnunload() { return onunload; } public void setOnunload(String onunload) { this.onunload = onunload; } }
/* * Copyright (C) 2012 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkState; import com.google.common.base.Equivalence; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Map; import java.util.Queue; import java.util.Random; import java.util.Set; import java.util.SortedMap; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentSkipListMap; /** * Helper classes for various benchmarks. * * @author Christopher Swenson */ final class BenchmarkHelpers { /** So far, this is the best way to test various implementations of {@link Set} subclasses. */ public interface CollectionsImplEnum { <E extends Comparable<E>> Collection<E> create(Collection<E> contents); String name(); } public interface MapsImplEnum { <K extends Comparable<K>, V> Map<K, V> create(Map<K, V> contents); String name(); } public interface InternerImplEnum { <E> Interner<E> create(Collection<E> contents); String name(); } public enum SetImpl implements CollectionsImplEnum { HashSetImpl { @Override public <E extends Comparable<E>> Set<E> create(Collection<E> contents) { return new HashSet<E>(contents); } }, LinkedHashSetImpl { @Override public <E extends Comparable<E>> Set<E> create(Collection<E> contents) { return new LinkedHashSet<E>(contents); } }, TreeSetImpl { @Override public <E extends Comparable<E>> Set<E> create(Collection<E> contents) { return new TreeSet<E>(contents); } }, UnmodifiableSetImpl { @Override public <E extends Comparable<E>> Set<E> create(Collection<E> contents) { return Collections.unmodifiableSet(new HashSet<E>(contents)); } }, SynchronizedSetImpl { @Override public <E extends Comparable<E>> Set<E> create(Collection<E> contents) { return Collections.synchronizedSet(new HashSet<E>(contents)); } }, ImmutableSetImpl { @Override public <E extends Comparable<E>> Set<E> create(Collection<E> contents) { return ImmutableSet.copyOf(contents); } }, ImmutableSortedSetImpl { @Override public <E extends Comparable<E>> Set<E> create(Collection<E> contents) { return ImmutableSortedSet.copyOf(contents); } }, ContiguousSetImpl { @Override public <E extends Comparable<E>> Set<E> create(Collection<E> contents) { return ContiguousSet.copyOf(contents); } }, ; } public enum ListMultimapImpl { ArrayListMultimapImpl { @Override <K, V> ListMultimap<K, V> create(Multimap<K, V> contents) { return ArrayListMultimap.create(contents); } }, LinkedListMultimapImpl { @Override <K, V> ListMultimap<K, V> create(Multimap<K, V> contents) { return LinkedListMultimap.create(contents); } }, ImmutableListMultimapImpl { @Override <K, V> ListMultimap<K, V> create(Multimap<K, V> contents) { return ImmutableListMultimap.copyOf(contents); } }; abstract <K, V> ListMultimap<K, V> create(Multimap<K, V> contents); } public enum RangeSetImpl { TreeRangeSetImpl { @Override <K extends Comparable<K>> RangeSet<K> create(RangeSet<K> contents) { return TreeRangeSet.create(contents); } }, ImmutableRangeSetImpl { @Override <K extends Comparable<K>> RangeSet<K> create(RangeSet<K> contents) { return ImmutableRangeSet.copyOf(contents); } }; abstract <K extends Comparable<K>> RangeSet<K> create(RangeSet<K> contents); } public enum SetMultimapImpl { HashMultimapImpl { @Override <K extends Comparable<K>, V extends Comparable<V>> SetMultimap<K, V> create( Multimap<K, V> contents) { return HashMultimap.create(contents); } }, LinkedHashMultimapImpl { @Override <K extends Comparable<K>, V extends Comparable<V>> SetMultimap<K, V> create( Multimap<K, V> contents) { return LinkedHashMultimap.create(contents); } }, TreeMultimapImpl { @Override <K extends Comparable<K>, V extends Comparable<V>> SetMultimap<K, V> create( Multimap<K, V> contents) { return TreeMultimap.create(contents); } }, ImmutableSetMultimapImpl { @Override <K extends Comparable<K>, V extends Comparable<V>> SetMultimap<K, V> create( Multimap<K, V> contents) { return ImmutableSetMultimap.copyOf(contents); } }; abstract <K extends Comparable<K>, V extends Comparable<V>> SetMultimap<K, V> create( Multimap<K, V> contents); } public enum MapImpl implements MapsImplEnum { HashMapImpl { @Override public <K extends Comparable<K>, V> Map<K, V> create(Map<K, V> map) { return Maps.newHashMap(map); } }, LinkedHashMapImpl { @Override public <K extends Comparable<K>, V> Map<K, V> create(Map<K, V> map) { return Maps.newLinkedHashMap(map); } }, ConcurrentHashMapImpl { @Override public <K extends Comparable<K>, V> Map<K, V> create(Map<K, V> map) { return new ConcurrentHashMap<>(map); } }, ImmutableMapImpl { @Override public <K extends Comparable<K>, V> Map<K, V> create(Map<K, V> map) { return ImmutableMap.copyOf(map); } }, MapMakerStrongKeysStrongValues { @Override public <K extends Comparable<K>, V> Map<K, V> create(Map<K, V> map) { // We use a "custom" equivalence to force MapMaker to make a MapMakerInternalMap. ConcurrentMap<K, V> newMap = new MapMaker().keyEquivalence(Equivalence.equals()).makeMap(); checkState(newMap instanceof MapMakerInternalMap); newMap.putAll(map); return newMap; } }, MapMakerStrongKeysWeakValues { @Override public <K extends Comparable<K>, V> Map<K, V> create(Map<K, V> map) { ConcurrentMap<K, V> newMap = new MapMaker().weakValues().makeMap(); checkState(newMap instanceof MapMakerInternalMap); newMap.putAll(map); return newMap; } }, MapMakerWeakKeysStrongValues { @Override public <K extends Comparable<K>, V> Map<K, V> create(Map<K, V> map) { ConcurrentMap<K, V> newMap = new MapMaker().weakKeys().makeMap(); checkState(newMap instanceof MapMakerInternalMap); newMap.putAll(map); return newMap; } }, MapMakerWeakKeysWeakValues { @Override public <K extends Comparable<K>, V> Map<K, V> create(Map<K, V> map) { ConcurrentMap<K, V> newMap = new MapMaker().weakKeys().weakValues().makeMap(); checkState(newMap instanceof MapMakerInternalMap); newMap.putAll(map); return newMap; } }; } enum SortedMapImpl implements MapsImplEnum { TreeMapImpl { @Override public <K extends Comparable<K>, V> SortedMap<K, V> create(Map<K, V> map) { SortedMap<K, V> result = Maps.newTreeMap(); result.putAll(map); return result; } }, ConcurrentSkipListImpl { @Override public <K extends Comparable<K>, V> SortedMap<K, V> create(Map<K, V> map) { return new ConcurrentSkipListMap<>(map); } }, ImmutableSortedMapImpl { @Override public <K extends Comparable<K>, V> SortedMap<K, V> create(Map<K, V> map) { return ImmutableSortedMap.copyOf(map); } }; } enum BiMapImpl implements MapsImplEnum { HashBiMapImpl { @Override public <K extends Comparable<K>, V> BiMap<K, V> create(Map<K, V> map) { return HashBiMap.create(map); } }, ImmutableBiMapImpl { @Override public <K extends Comparable<K>, V> BiMap<K, V> create(Map<K, V> map) { return ImmutableBiMap.copyOf(map); } }; @Override public abstract <K extends Comparable<K>, V> BiMap<K, V> create(Map<K, V> map); } enum MultisetImpl implements CollectionsImplEnum { HashMultisetImpl { @Override public <E extends Comparable<E>> Multiset<E> create(Collection<E> contents) { return HashMultiset.create(contents); } }, LinkedHashMultisetImpl { @Override public <E extends Comparable<E>> Multiset<E> create(Collection<E> contents) { return LinkedHashMultiset.create(contents); } }, ConcurrentHashMultisetImpl { @Override public <E extends Comparable<E>> Multiset<E> create(Collection<E> contents) { return ConcurrentHashMultiset.create(contents); } }, ImmutableMultisetImpl { @Override public <E extends Comparable<E>> Multiset<E> create(Collection<E> contents) { return ImmutableMultiset.copyOf(contents); } }; } enum SortedMultisetImpl implements CollectionsImplEnum { TreeMultisetImpl { @Override public <E extends Comparable<E>> SortedMultiset<E> create(Collection<E> contents) { return TreeMultiset.create(contents); } }, ImmutableSortedMultisetImpl { @Override public <E extends Comparable<E>> SortedMultiset<E> create(Collection<E> contents) { return ImmutableSortedMultiset.copyOf(contents); } }; } enum QueueImpl implements CollectionsImplEnum { MinMaxPriorityQueueImpl { @Override public <E extends Comparable<E>> Queue<E> create(Collection<E> contents) { return MinMaxPriorityQueue.create(contents); } }; } enum TableImpl { HashBasedTableImpl { @Override <R extends Comparable<R>, C extends Comparable<C>, V> Table<R, C, V> create( Table<R, C, V> contents) { return HashBasedTable.create(contents); } }, TreeBasedTableImpl { @Override <R extends Comparable<R>, C extends Comparable<C>, V> Table<R, C, V> create( Table<R, C, V> contents) { Table<R, C, V> table = TreeBasedTable.create(); table.putAll(contents); return table; } }, ArrayTableImpl { @Override <R extends Comparable<R>, C extends Comparable<C>, V> Table<R, C, V> create( Table<R, C, V> contents) { if (contents.isEmpty()) { return ImmutableTable.of(); } else { return ArrayTable.create(contents); } } }, ImmutableTableImpl { @Override <R extends Comparable<R>, C extends Comparable<C>, V> Table<R, C, V> create( Table<R, C, V> contents) { return ImmutableTable.copyOf(contents); } }; abstract <R extends Comparable<R>, C extends Comparable<C>, V> Table<R, C, V> create( Table<R, C, V> contents); } public enum InternerImpl implements InternerImplEnum { WeakInternerImpl { @Override public <E> Interner<E> create(Collection<E> contents) { Interner<E> interner = Interners.newWeakInterner(); for (E e : contents) { E unused = interner.intern(e); } return interner; } }, StrongInternerImpl { @Override public <E> Interner<E> create(Collection<E> contents) { Interner<E> interner = Interners.newStrongInterner(); for (E e : contents) { E unused = interner.intern(e); } return interner; } }; } public enum Value { INSTANCE; } public enum ListSizeDistribution { UNIFORM_0_TO_2(0, 2), UNIFORM_0_TO_9(0, 9), ALWAYS_0(0, 0), ALWAYS_10(10, 10); final int min; final int max; private ListSizeDistribution(int min, int max) { this.min = min; this.max = max; } public int chooseSize(Random random) { return random.nextInt(max - min + 1) + min; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version * 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package org.apache.storm.utils; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.LockSupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class implements time simulation support. When time simulation is enabled, methods on this class will use fixed time. When time * simulation is disabled, methods will pass through to relevant java.lang.System/java.lang.Thread calls. Methods using units higher than * nanoseconds will pass through to System.currentTimeMillis(). Methods supporting nanoseconds will pass through to System.nanoTime(). */ public class Time { private static final Logger LOG = LoggerFactory.getLogger(Time.class); private static final AtomicBoolean SIMULATING = new AtomicBoolean(false); private static final AtomicLong AUTO_ADVANCE_NANOS_ON_SLEEP = new AtomicLong(0); private static final Map<Thread, AtomicLong> THREAD_SLEEP_TIMES_NANOS = new ConcurrentHashMap<>(); private static final Object SLEEP_TIMES_LOCK = new Object(); private static final AtomicLong SIMULATED_CURR_TIME_NANOS = new AtomicLong(0); public static boolean isSimulating() { return SIMULATING.get(); } public static void sleepUntil(long targetTimeMs) throws InterruptedException { if (SIMULATING.get()) { simulatedSleepUntilNanos(millisToNanos(targetTimeMs)); } else { long sleepTimeMs = targetTimeMs - currentTimeMillis(); if (sleepTimeMs > 0) { Thread.sleep(sleepTimeMs); } } } public static void sleepUntilNanos(long targetTimeNanos) throws InterruptedException { if (SIMULATING.get()) { simulatedSleepUntilNanos(targetTimeNanos); } else { long sleepTimeNanos = targetTimeNanos - nanoTime(); long sleepTimeMs = nanosToMillis(sleepTimeNanos); int sleepTimeNanosSansMs = (int) (sleepTimeNanos % 1_000_000); if (sleepTimeNanos > 0) { Thread.sleep(sleepTimeMs, sleepTimeNanosSansMs); } } } private static void simulatedSleepUntilNanos(long targetTimeNanos) throws InterruptedException { try { synchronized (SLEEP_TIMES_LOCK) { if (!SIMULATING.get()) { LOG.debug("{} is still sleeping after simulated time disabled.", Thread.currentThread(), new RuntimeException("STACK TRACE")); throw new InterruptedException(); } THREAD_SLEEP_TIMES_NANOS.put(Thread.currentThread(), new AtomicLong(targetTimeNanos)); } while (SIMULATED_CURR_TIME_NANOS.get() < targetTimeNanos) { synchronized (SLEEP_TIMES_LOCK) { if (!SIMULATING.get()) { LOG.debug("{} is still sleeping after simulated time disabled.", Thread.currentThread(), new RuntimeException("STACK TRACE")); throw new InterruptedException(); } long autoAdvance = AUTO_ADVANCE_NANOS_ON_SLEEP.get(); if (autoAdvance > 0) { advanceTimeNanos(autoAdvance); } } Thread.sleep(10); } } finally { THREAD_SLEEP_TIMES_NANOS.remove(Thread.currentThread()); } } public static void sleep(long ms) throws InterruptedException { if (ms > 0) { if (SIMULATING.get()) { simulatedSleepUntilNanos(millisToNanos(currentTimeMillis() + ms)); } else { Thread.sleep(ms); } } } public static void parkNanos(long nanos) throws InterruptedException { if (nanos > 0) { if (SIMULATING.get()) { simulatedSleepUntilNanos(nanoTime() + nanos); } else { LockSupport.parkNanos(nanos); } } } public static void sleepSecs(long secs) throws InterruptedException { if (secs > 0) { sleep(secs * 1000); } } public static long nanoTime() { if (SIMULATING.get()) { return SIMULATED_CURR_TIME_NANOS.get(); } else { return System.nanoTime(); } } public static long currentTimeMillis() { if (SIMULATING.get()) { return nanosToMillis(SIMULATED_CURR_TIME_NANOS.get()); } else { return System.currentTimeMillis(); } } public static long nanosToMillis(long nanos) { return nanos / 1_000_000; } public static long millisToNanos(long millis) { return millis * 1_000_000; } public static long secsToMillis(int secs) { return 1000 * (long) secs; } public static long secsToMillisLong(double secs) { return (long) (1000 * secs); } public static int currentTimeSecs() { return (int) (currentTimeMillis() / 1000); } public static int deltaSecs(int timeInSeconds) { return Time.currentTimeSecs() - timeInSeconds; } public static long deltaMs(long timeInMilliseconds) { return Time.currentTimeMillis() - timeInMilliseconds; } public static void advanceTime(long ms) { advanceTimeNanos(millisToNanos(ms)); } public static void advanceTimeNanos(long nanos) { if (!SIMULATING.get()) { throw new IllegalStateException("Cannot simulate time unless in simulation mode"); } if (nanos < 0) { throw new IllegalArgumentException("advanceTime only accepts positive time as an argument"); } synchronized (SLEEP_TIMES_LOCK) { long newTime = SIMULATED_CURR_TIME_NANOS.addAndGet(nanos); Iterator<AtomicLong> sleepTimesIter = THREAD_SLEEP_TIMES_NANOS.values().iterator(); while (sleepTimesIter.hasNext()) { AtomicLong curr = sleepTimesIter.next(); if (SIMULATED_CURR_TIME_NANOS.get() >= curr.get()) { sleepTimesIter.remove(); } } LOG.debug("Advanced simulated time to {}", newTime); } } public static void advanceTimeSecs(long secs) { advanceTime(secs * 1_000); } public static boolean isThreadWaiting(Thread t) { if (!SIMULATING.get()) { throw new IllegalStateException("Must be in simulation mode"); } AtomicLong time = THREAD_SLEEP_TIMES_NANOS.get(t); return !t.isAlive() || time != null && nanoTime() < time.longValue(); } public static class SimulatedTime implements AutoCloseable { public SimulatedTime() { this(null); } public SimulatedTime(Number advanceTimeMs) { synchronized (Time.SLEEP_TIMES_LOCK) { Time.SIMULATING.set(true); Time.SIMULATED_CURR_TIME_NANOS.set(0); Time.THREAD_SLEEP_TIMES_NANOS.clear(); if (advanceTimeMs != null) { Time.AUTO_ADVANCE_NANOS_ON_SLEEP.set(millisToNanos(advanceTimeMs.longValue())); } else { Time.AUTO_ADVANCE_NANOS_ON_SLEEP.set(0); } LOG.warn("AutoCloseable Simulated Time Starting..."); } } @Override public void close() { synchronized (Time.SLEEP_TIMES_LOCK) { Time.SIMULATING.set(false); LOG.warn("AutoCloseable Simulated Time Ending..."); } } } }
package com.fsck.k9.mail.store.imap; import java.io.IOException; import java.net.UnknownHostException; import java.util.List; import android.app.Activity; import android.net.ConnectivityManager; import com.fsck.k9.mail.AuthType; import com.fsck.k9.mail.AuthenticationFailedException; import com.fsck.k9.mail.CertificateValidationException; import com.fsck.k9.mail.CertificateValidationException.Reason; import com.fsck.k9.mail.ConnectionSecurity; import com.fsck.k9.mail.K9LibRobolectricTestRunner; import com.fsck.k9.mail.K9MailLib; import com.fsck.k9.mail.MessagingException; import com.fsck.k9.mail.XOAuth2ChallengeParserTest; import com.fsck.k9.mail.helpers.TestTrustedSocketFactory; import com.fsck.k9.mail.oauth.OAuth2TokenProvider; import com.fsck.k9.mail.ssl.TrustedSocketFactory; import com.fsck.k9.mail.store.imap.mockserver.MockImapServer; import okio.ByteString; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.shadows.ShadowLog; import static org.hamcrest.core.StringContains.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; @RunWith(K9LibRobolectricTestRunner.class) public class ImapConnectionTest { private static final boolean DEBUGGING = false; private static final String USERNAME = "user"; private static final String PASSWORD = "123456"; private static final int SOCKET_CONNECT_TIMEOUT = 10000; private static final int SOCKET_READ_TIMEOUT = 10000; private static final String XOAUTH_TOKEN = "token"; private static final String XOAUTH_ANOTHER_TOKEN = "token2"; private static final String XOAUTH_STRING = ByteString.encodeUtf8( "user=" + USERNAME + "\001auth=Bearer " + XOAUTH_TOKEN + "\001\001").base64(); private static final String XOAUTH_STRING_RETRY = ByteString.encodeUtf8( "user=" + USERNAME + "\001auth=Bearer " + XOAUTH_ANOTHER_TOKEN + "\001\001").base64(); private TrustedSocketFactory socketFactory; private ConnectivityManager connectivityManager; private OAuth2TokenProvider oAuth2TokenProvider; private SimpleImapSettings settings; @Before public void setUp() throws Exception { connectivityManager = mock(ConnectivityManager.class); oAuth2TokenProvider = createOAuth2TokenProvider(); socketFactory = TestTrustedSocketFactory.newInstance(); settings = new SimpleImapSettings(); settings.setUsername(USERNAME); settings.setPassword(PASSWORD); if (DEBUGGING) { ShadowLog.stream = System.out; K9MailLib.setDebug(true); K9MailLib.setDebugSensitive(true); } } @Test public void open_withNoCapabilitiesInInitialResponse_shouldIssuePreAuthCapabilitiesCommand() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); server.output("* OK example.org server"); server.expect("1 CAPABILITY"); server.output("* CAPABILITY IMAP4 IMAP4REV1 AUTH=PLAIN"); server.output("1 OK CAPABILITY Completed"); server.expect("2 AUTHENTICATE PLAIN"); server.output("+"); server.expect(ByteString.encodeUtf8("\000" + USERNAME + "\000" + PASSWORD).base64()); server.output("2 OK Success"); postAuthenticationDialogRequestingCapabilities(server); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_withCapabilitiesInInitialResponse_shouldNotIssuePreAuthCapabilitiesCommand() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); server.output("* OK [CAPABILITY IMAP4 IMAP4REV1 AUTH=PLAIN]"); server.expect("1 AUTHENTICATE PLAIN"); server.output("+"); server.expect(ByteString.encodeUtf8("\000" + USERNAME + "\000" + PASSWORD).base64()); server.output("1 OK Success"); postAuthenticationDialogRequestingCapabilities(server, 2); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_authPlain() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=PLAIN"); server.expect("2 AUTHENTICATE PLAIN"); server.output("+"); server.expect(ByteString.encodeUtf8("\000" + USERNAME + "\000" + PASSWORD).base64()); server.output("2 OK Success"); postAuthenticationDialogRequestingCapabilities(server); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_afterCloseWasCalled_shouldThrow() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server); server.expect("2 LOGIN \"" + USERNAME + "\" \"" + PASSWORD + "\""); server.output("2 OK LOGIN completed"); postAuthenticationDialogRequestingCapabilities(server); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); imapConnection.close(); try { imapConnection.open(); fail("Expected exception"); } catch (IllegalStateException e) { assertEquals("open() called after close(). Check wrapped exception to see where close() was called.", e.getMessage()); } } @Test public void open_authPlainWithLoginDisabled_shouldThrow() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "LOGINDISABLED"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail("Expected exception"); } catch (MessagingException e) { assertEquals("Server doesn't support unencrypted passwords using AUTH=PLAIN and LOGIN is disabled.", e.getMessage()); } server.verifyConnectionClosed(); server.verifyInteractionCompleted(); } @Test public void open_authPlainWithAuthenticationFailure_shouldFallbackToLogin() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=PLAIN"); server.expect("2 AUTHENTICATE PLAIN"); server.output("+"); server.expect(ByteString.encodeUtf8("\000" + USERNAME + "\000" + PASSWORD).base64()); server.output("2 NO Login Failure"); server.expect("3 LOGIN \"" + USERNAME + "\" \"" + PASSWORD + "\""); server.output("3 OK LOGIN completed"); postAuthenticationDialogRequestingCapabilities(server, 4); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_authPlainAndLoginFallbackWithAuthenticationFailure_shouldThrow() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=PLAIN"); server.expect("2 AUTHENTICATE PLAIN"); server.output("+"); server.expect(ByteString.encodeUtf8("\000" + USERNAME + "\000" + PASSWORD).base64()); server.output("2 NO Login Failure"); server.expect("3 LOGIN \"" + USERNAME + "\" \"" + PASSWORD + "\""); server.output("3 NO Go away"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail("Expected exception"); } catch (AuthenticationFailedException e) { //FIXME: improve exception message assertThat(e.getMessage(), containsString("Go away")); } server.verifyConnectionClosed(); server.verifyInteractionCompleted(); } @Test public void open_authPlainFailureAndDisconnect_shouldThrow() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=PLAIN"); server.expect("2 AUTHENTICATE PLAIN"); server.output("+"); server.expect(ByteString.encodeUtf8("\000" + USERNAME + "\000" + PASSWORD).base64()); server.output("2 NO [UNAVAILABLE] Maximum number of connections from user+IP exceeded"); server.closeConnection(); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail("Expected exception"); } catch (NegativeImapResponseException e) { assertThat(e.getMessage(), containsString("Maximum number of connections from user+IP exceeded")); } assertFalse(imapConnection.isConnected()); server.verifyConnectionClosed(); server.verifyInteractionCompleted(); } @Test public void open_authPlainWithByeResponseAndConnectionClose_shouldThrowAuthenticationFailedException() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=PLAIN"); server.expect("2 AUTHENTICATE PLAIN"); server.output("+"); server.expect(ByteString.encodeUtf8("\000" + USERNAME + "\000" + PASSWORD).base64()); server.output("* BYE Go away"); server.output("2 NO Login Failure"); server.closeConnection(); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail("Expected exception"); } catch (AuthenticationFailedException e) { //FIXME: improve exception message assertThat(e.getMessage(), containsString("Login Failure")); } server.verifyConnectionClosed(); server.verifyInteractionCompleted(); } @Test public void open_authPlainWithoutAuthPlainCapability_shouldUseLoginMethod() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server); server.expect("2 LOGIN \"" + USERNAME + "\" \"" + PASSWORD + "\""); server.output("2 OK LOGIN completed"); postAuthenticationDialogRequestingCapabilities(server); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_authCramMd5() throws Exception { settings.setAuthType(AuthType.CRAM_MD5); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=CRAM-MD5"); server.expect("2 AUTHENTICATE CRAM-MD5"); server.output("+ " + ByteString.encodeUtf8("<0000.000000000@example.org>").base64()); server.expect("dXNlciA2ZjdiOTcyYjk5YTI4NDk4OTRhN2YyMmE3MGRhZDg0OQ=="); server.output("2 OK Success"); postAuthenticationDialogRequestingCapabilities(server); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_authCramMd5WithAuthenticationFailure_shouldThrow() throws Exception { settings.setAuthType(AuthType.CRAM_MD5); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=CRAM-MD5"); server.expect("2 AUTHENTICATE CRAM-MD5"); server.output("+ " + ByteString.encodeUtf8("<0000.000000000@example.org>").base64()); server.expect("dXNlciA2ZjdiOTcyYjk5YTI4NDk4OTRhN2YyMmE3MGRhZDg0OQ=="); server.output("2 NO Who are you?"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail("Expected exception"); } catch (AuthenticationFailedException e) { //FIXME: improve exception message assertThat(e.getMessage(), containsString("Who are you?")); } server.verifyConnectionClosed(); server.verifyInteractionCompleted(); } @Test public void open_authCramMd5WithoutAuthCramMd5Capability_shouldThrow() throws Exception { settings.setAuthType(AuthType.CRAM_MD5); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=PLAIN"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail("Expected exception"); } catch (MessagingException e) { assertEquals("Server doesn't support encrypted passwords using CRAM-MD5.", e.getMessage()); } server.verifyConnectionClosed(); server.verifyInteractionCompleted(); } @Test public void open_authXoauthWithSaslIr() throws Exception { settings.setAuthType(AuthType.XOAUTH2); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "SASL-IR AUTH=XOAUTH AUTH=XOAUTH2"); server.expect("2 AUTHENTICATE XOAUTH2 " + XOAUTH_STRING); server.output("2 OK Success"); postAuthenticationDialogRequestingCapabilities(server); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_authXoauthWithSaslIrThrowsExeptionOn401Response() throws Exception { settings.setAuthType(AuthType.XOAUTH2); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "SASL-IR AUTH=XOAUTH AUTH=XOAUTH2"); server.expect("2 AUTHENTICATE XOAUTH2 " + XOAUTH_STRING); server.output("+ " + XOAuth2ChallengeParserTest.STATUS_401_RESPONSE); server.expect(""); server.output("2 NO SASL authentication failed"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail(); } catch (AuthenticationFailedException e) { assertEquals("Command: AUTHENTICATE XOAUTH2; response: #2# [NO, SASL authentication failed]", e.getMessage()); } } @Test public void open_authXoauthWithSaslIrInvalidatesAndRetriesNewTokenOn400Response() throws Exception { settings.setAuthType(AuthType.XOAUTH2); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "SASL-IR AUTH=XOAUTH AUTH=XOAUTH2"); server.expect("2 AUTHENTICATE XOAUTH2 " + XOAUTH_STRING); server.output("+ " + XOAuth2ChallengeParserTest.STATUS_400_RESPONSE); server.expect(""); server.output("2 NO SASL authentication failed"); server.expect("3 AUTHENTICATE XOAUTH2 " + XOAUTH_STRING_RETRY); server.output("3 OK Success"); postAuthenticationDialogRequestingCapabilities(server, 4); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_authXoauthWithSaslIrInvalidatesAndRetriesNewTokenOnInvalidJsonResponse() throws Exception { settings.setAuthType(AuthType.XOAUTH2); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "SASL-IR AUTH=XOAUTH AUTH=XOAUTH2"); server.expect("2 AUTHENTICATE XOAUTH2 " + XOAUTH_STRING); server.output("+ " + XOAuth2ChallengeParserTest.INVALID_RESPONSE); server.expect(""); server.output("2 NO SASL authentication failed"); server.expect("3 AUTHENTICATE XOAUTH2 " + XOAUTH_STRING_RETRY); server.output("3 OK Success"); requestCapabilities(server, 4); simplePostAuthenticationDialog(server, 5); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_authXoauthWithSaslIrInvalidatesAndRetriesNewTokenOnMissingStatusJsonResponse() throws Exception { settings.setAuthType(AuthType.XOAUTH2); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "SASL-IR AUTH=XOAUTH AUTH=XOAUTH2"); server.expect("2 AUTHENTICATE XOAUTH2 " + XOAUTH_STRING); server.output("+ " + XOAuth2ChallengeParserTest.MISSING_STATUS_RESPONSE); server.expect(""); server.output("2 NO SASL authentication failed"); server.expect("3 AUTHENTICATE XOAUTH2 " + XOAUTH_STRING_RETRY); server.output("3 OK Success"); requestCapabilities(server, 4); simplePostAuthenticationDialog(server, 5); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_authXoauthWithSaslIrWithOldTokenThrowsExceptionIfRetryFails() throws Exception { settings.setAuthType(AuthType.XOAUTH2); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "SASL-IR AUTH=XOAUTH AUTH=XOAUTH2"); server.expect("2 AUTHENTICATE XOAUTH2 " + XOAUTH_STRING); server.output("+ r3j3krj3irj3oir3ojo"); server.expect(""); server.output("2 NO SASL authentication failed"); server.expect("3 AUTHENTICATE XOAUTH2 " + XOAUTH_STRING_RETRY); server.output("+ 433ba3a3a"); server.expect(""); server.output("3 NO SASL authentication failed"); postAuthenticationDialogRequestingCapabilities(server); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail(); } catch (AuthenticationFailedException e) { assertEquals("Command: AUTHENTICATE XOAUTH2; response: #3# [NO, SASL authentication failed]", e.getMessage()); } } @Test public void open_authXoauthWithSaslIrParsesCapabilities() throws Exception { settings.setAuthType(AuthType.XOAUTH2); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "SASL-IR AUTH=XOAUTH AUTH=XOAUTH2"); server.expect("2 AUTHENTICATE XOAUTH2 " + XOAUTH_STRING); server.output("2 OK [CAPABILITY IMAP4REV1 IDLE XM-GM-EXT-1]"); simplePostAuthenticationDialog(server, 3); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); assertTrue(imapConnection.hasCapability("XM-GM-EXT-1")); } @Test public void open_authExternal() throws Exception { settings.setAuthType(AuthType.EXTERNAL); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=EXTERNAL"); server.expect("2 AUTHENTICATE EXTERNAL " + ByteString.encodeUtf8(USERNAME).base64()); server.output("2 OK Success"); postAuthenticationDialogRequestingCapabilities(server); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_authExternalWithAuthenticationFailure_shouldThrow() throws Exception { settings.setAuthType(AuthType.EXTERNAL); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=EXTERNAL"); server.expect("2 AUTHENTICATE EXTERNAL " + ByteString.encodeUtf8(USERNAME).base64()); server.output("2 NO Bad certificate"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail("Expected exception"); } catch (CertificateValidationException e) { //FIXME: improve exception message assertThat(e.getMessage(), containsString("Bad certificate")); } server.verifyConnectionClosed(); server.verifyInteractionCompleted(); } @Test public void open_authExternalWithoutAuthExternalCapability_shouldThrow() throws Exception { settings.setAuthType(AuthType.EXTERNAL); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=PLAIN"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail("Expected exception"); } catch (CertificateValidationException e) { assertEquals(Reason.MissingCapability, e.getReason()); } server.verifyConnectionClosed(); server.verifyInteractionCompleted(); } @Test public void open_withNoPostAuthCapabilityResponse_shouldIssueCapabilityCommand() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=PLAIN"); server.expect("2 AUTHENTICATE PLAIN"); server.output("+"); server.expect(ByteString.encodeUtf8("\000" + USERNAME + "\000" + PASSWORD).base64()); server.output("2 OK Success"); server.expect("3 CAPABILITY"); server.output("* CAPABILITY IDLE"); server.output("3 OK CAPABILITY Completed"); simplePostAuthenticationDialog(server, 4); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); assertTrue(imapConnection.isIdleCapable()); } @Test public void open_withUntaggedPostAuthCapabilityResponse_shouldNotIssueCapabilityCommand() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=PLAIN"); server.expect("2 AUTHENTICATE PLAIN"); server.output("+"); server.expect(ByteString.encodeUtf8("\000" + USERNAME + "\000" + PASSWORD).base64()); server.output("* CAPABILITY IMAP4rev1 UNSELECT IDLE QUOTA ID XLIST CHILDREN X-GM-EXT-1 UIDPLUS " + "ENABLE MOVE CONDSTORE ESEARCH UTF8=ACCEPT LIST-EXTENDED LIST-STATUS LITERAL- SPECIAL-USE " + "APPENDLIMIT=35651584"); server.output("2 OK"); simplePostAuthenticationDialog(server, 3); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); assertTrue(imapConnection.isIdleCapable()); } @Test public void open_withPostAuthCapabilityResponse_shouldNotIssueCapabilityCommand() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "AUTH=PLAIN"); server.expect("2 AUTHENTICATE PLAIN"); server.output("+"); server.expect(ByteString.encodeUtf8("\000" + USERNAME + "\000" + PASSWORD).base64()); server.output("2 OK [CAPABILITY IDLE]"); simplePostAuthenticationDialog(server, 3); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); assertTrue(imapConnection.isIdleCapable()); } @Test public void open_withNamespaceCapability_shouldIssueNamespaceCommand() throws Exception { MockImapServer server = new MockImapServer(); simplePreAuthAndLoginDialog(server, "NAMESPACE"); server.expect("3 NAMESPACE"); server.output("* NAMESPACE ((\"\" \"/\")) NIL NIL"); server.output("3 OK command completed"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_withConnectionError_shouldThrow() throws Exception { settings.setHost("127.1.2.3"); settings.setPort(143); ImapConnection imapConnection = createImapConnection( settings, socketFactory, connectivityManager, oAuth2TokenProvider); try { imapConnection.open(); fail("Expected exception"); } catch (MessagingException e) { assertEquals("Cannot connect to host", e.getMessage()); assertTrue(e.getCause() instanceof IOException); } } @Test public void open_withInvalidHostname_shouldThrow() throws Exception { settings.setHost("host name"); settings.setPort(143); ImapConnection imapConnection = createImapConnection( settings, socketFactory, connectivityManager, oAuth2TokenProvider); try { imapConnection.open(); fail("Expected exception"); } catch (UnknownHostException ignored) { } assertFalse(imapConnection.isConnected()); } @Test public void open_withStartTlsCapability_shouldIssueStartTlsCommand() throws Exception { settings.setAuthType(AuthType.PLAIN); settings.setConnectionSecurity(ConnectionSecurity.STARTTLS_REQUIRED); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "STARTTLS LOGINDISABLED"); server.expect("2 STARTTLS"); server.output("2 OK [CAPABILITY IMAP4REV1 NAMESPACE]"); server.startTls(); server.expect("3 CAPABILITY"); server.output("* CAPABILITY IMAP4 IMAP4REV1"); server.output("3 OK"); server.expect("4 LOGIN \"" + USERNAME + "\" \"" + PASSWORD + "\""); server.output("4 OK [CAPABILITY NAMESPACE] LOGIN completed"); server.expect("5 NAMESPACE"); server.output("* NAMESPACE ((\"\" \"/\")) NIL NIL"); server.output("5 OK command completed"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_withStartTlsButWithoutStartTlsCapability_shouldThrow() throws Exception { settings.setConnectionSecurity(ConnectionSecurity.STARTTLS_REQUIRED); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail("Expected exception"); } catch (CertificateValidationException e) { //FIXME: CertificateValidationException seems wrong assertEquals("STARTTLS connection security not available", e.getMessage()); } server.verifyConnectionClosed(); server.verifyInteractionCompleted(); } @Test public void open_withNegativeResponseToStartTlsCommand_shouldThrow() throws Exception { settings.setAuthType(AuthType.PLAIN); settings.setConnectionSecurity(ConnectionSecurity.STARTTLS_REQUIRED); MockImapServer server = new MockImapServer(); preAuthenticationDialog(server, "STARTTLS"); server.expect("2 STARTTLS"); server.output("2 NO"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail("Expected exception"); } catch (NegativeImapResponseException e) { assertEquals(e.getMessage(), "Command: STARTTLS; response: #2# [NO]"); } server.verifyConnectionClosed(); server.verifyInteractionCompleted(); } @Test public void open_withCompressDeflateCapability_shouldEnableCompression() throws Exception { settings.setUseCompression(true); MockImapServer server = new MockImapServer(); simplePreAuthAndLoginDialog(server, "COMPRESS=DEFLATE"); server.expect("3 COMPRESS DEFLATE"); server.output("3 OK"); server.enableCompression(); simplePostAuthenticationDialog(server, 4); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_withNegativeResponseToCompressionCommand_shouldContinue() throws Exception { settings.setAuthType(AuthType.PLAIN); settings.setUseCompression(true); MockImapServer server = new MockImapServer(); simplePreAuthAndLoginDialog(server, "COMPRESS=DEFLATE"); server.expect("3 COMPRESS DEFLATE"); server.output("3 NO"); simplePostAuthenticationDialog(server, 4); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void open_withIoExceptionDuringCompressionCommand_shouldThrow() throws Exception { settings.setAuthType(AuthType.PLAIN); settings.setUseCompression(true); MockImapServer server = new MockImapServer(); simplePreAuthAndLoginDialog(server, "COMPRESS=DEFLATE"); server.expect("3 COMPRESS DEFLATE"); server.closeConnection(); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail("Exception expected"); } catch (IOException ignored) { } server.verifyConnectionClosed(); server.verifyInteractionCompleted(); } @Test public void open_withIoExceptionDuringListCommand_shouldThrow() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); simplePreAuthAndLoginDialog(server, ""); server.expect("3 LIST \"\" \"\""); server.output("* Now what?"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.open(); fail("Exception expected"); } catch (IOException ignored) { } server.verifyConnectionClosed(); server.verifyInteractionCompleted(); } @Test public void open_withNegativeResponseToListCommand() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); simplePreAuthAndLoginDialog(server, ""); server.expect("3 LIST \"\" \"\""); server.output("3 NO"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void isConnected_withoutPreviousOpen_shouldReturnFalse() throws Exception { ImapConnection imapConnection = createImapConnection( settings, socketFactory, connectivityManager, oAuth2TokenProvider); boolean result = imapConnection.isConnected(); assertFalse(result); } @Test public void isConnected_afterOpen_shouldReturnTrue() throws Exception { MockImapServer server = new MockImapServer(); ImapConnection imapConnection = simpleOpen(server); boolean result = imapConnection.isConnected(); assertTrue(result); server.verifyConnectionStillOpen(); server.shutdown(); } @Test public void isConnected_afterOpenAndClose_shouldReturnFalse() throws Exception { MockImapServer server = new MockImapServer(); ImapConnection imapConnection = simpleOpen(server); imapConnection.close(); boolean result = imapConnection.isConnected(); assertFalse(result); server.verifyConnectionClosed(); server.shutdown(); } @Test public void close_withoutOpen_shouldNotThrow() throws Exception { ImapConnection imapConnection = createImapConnection( settings, socketFactory, connectivityManager, oAuth2TokenProvider); imapConnection.close(); } @Test public void close_afterOpen_shouldCloseConnection() throws Exception { MockImapServer server = new MockImapServer(); ImapConnection imapConnection = simpleOpen(server); imapConnection.close(); server.verifyConnectionClosed(); server.shutdown(); } @Test public void isIdleCapable_withoutIdleCapability() throws Exception { MockImapServer server = new MockImapServer(); ImapConnection imapConnection = simpleOpen(server); boolean result = imapConnection.isIdleCapable(); assertFalse(result); server.shutdown(); } @Test public void isIdleCapable_withIdleCapability() throws Exception { MockImapServer server = new MockImapServer(); ImapConnection imapConnection = simpleOpenWithCapabilities(server, "IDLE"); boolean result = imapConnection.isIdleCapable(); assertTrue(result); server.shutdown(); } @Test public void sendContinuation() throws Exception { settings.setAuthType(AuthType.PLAIN); MockImapServer server = new MockImapServer(); simpleOpenDialog(server, "IDLE"); server.expect("4 IDLE"); server.output("+ idling"); server.expect("DONE"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); imapConnection.sendCommand("IDLE", false); imapConnection.readResponse(); imapConnection.sendContinuation("DONE"); server.waitForInteractionToComplete(); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void executeSingleCommand_withOkResponse_shouldReturnResult() throws Exception { MockImapServer server = new MockImapServer(); simpleOpenDialog(server, ""); server.expect("4 CREATE Folder"); server.output("4 OK Folder created"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); List<ImapResponse> result = imapConnection.executeSimpleCommand("CREATE Folder"); assertEquals(result.size(), 1); server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } @Test public void executeSingleCommand_withNoResponse_shouldThrowNegativeImapResponseException() throws Exception { MockImapServer server = new MockImapServer(); simpleOpenDialog(server, ""); server.expect("4 CREATE Folder"); server.output("4 NO Folder exists"); ImapConnection imapConnection = startServerAndCreateImapConnection(server); try { imapConnection.executeSimpleCommand("CREATE Folder"); fail("Expected exception"); } catch (NegativeImapResponseException e) { assertEquals("Folder exists", e.getLastResponse().getString(1)); } server.verifyConnectionStillOpen(); server.verifyInteractionCompleted(); } private ImapConnection createImapConnection(ImapSettings settings, TrustedSocketFactory socketFactory, ConnectivityManager connectivityManager, OAuth2TokenProvider oAuth2TokenProvider) { return new ImapConnection(settings, socketFactory, connectivityManager, oAuth2TokenProvider, SOCKET_CONNECT_TIMEOUT, SOCKET_READ_TIMEOUT); } private ImapConnection startServerAndCreateImapConnection(MockImapServer server) throws IOException { server.start(); settings.setHost(server.getHost()); settings.setPort(server.getPort()); return createImapConnection(settings, socketFactory, connectivityManager, oAuth2TokenProvider); } private ImapConnection simpleOpen(MockImapServer server) throws Exception { return simpleOpenWithCapabilities(server, ""); } private ImapConnection simpleOpenWithCapabilities(MockImapServer server, String postAuthCapabilities) throws Exception { simpleOpenDialog(server, postAuthCapabilities); ImapConnection imapConnection = startServerAndCreateImapConnection(server); imapConnection.open(); return imapConnection; } private void preAuthenticationDialog(MockImapServer server) { preAuthenticationDialog(server, ""); } private void preAuthenticationDialog(MockImapServer server, String capabilities) { server.output("* OK IMAP4rev1 Service Ready"); server.expect("1 CAPABILITY"); server.output("* CAPABILITY IMAP4 IMAP4REV1 " + capabilities); server.output("1 OK CAPABILITY"); } private void postAuthenticationDialogRequestingCapabilities(MockImapServer server) { postAuthenticationDialogRequestingCapabilities(server, 3); } private void postAuthenticationDialogRequestingCapabilities(MockImapServer server, int tag) { requestCapabilities(server, tag); simplePostAuthenticationDialog(server, tag + 1); } private void requestCapabilities(MockImapServer server, int tag) { server.expect(tag + " CAPABILITY"); server.output("* CAPABILITY IMAP4 IMAP4REV1 "); server.output(tag + " OK CAPABILITY"); } private void simplePostAuthenticationDialog(MockImapServer server, int tag) { server.expect(tag + " LIST \"\" \"\""); server.output("* LIST () \"/\" foo/bar"); server.output(tag + " OK"); } private void simpleOpenDialog(MockImapServer server, String postAuthCapabilities) { simplePreAuthAndLoginDialog(server, postAuthCapabilities); simplePostAuthenticationDialog(server, 3); } private void simplePreAuthAndLoginDialog(MockImapServer server, String postAuthCapabilities) { settings.setAuthType(AuthType.PLAIN); preAuthenticationDialog(server); server.expect("2 LOGIN \"" + USERNAME + "\" \"" + PASSWORD + "\""); server.output("2 OK [CAPABILITY " + postAuthCapabilities + "] LOGIN completed"); } private OAuth2TokenProvider createOAuth2TokenProvider() { return new OAuth2TokenProvider() { private int invalidationCount = 0; @Override public String getToken(String username, long timeoutMillis) throws AuthenticationFailedException { assertEquals(USERNAME, username); assertEquals(OAUTH2_TIMEOUT, timeoutMillis); switch (invalidationCount) { case 0: { return XOAUTH_TOKEN; } case 1: { return XOAUTH_ANOTHER_TOKEN; } default: { throw new AuthenticationFailedException("Ran out of auth tokens. invalidateToken() called too often?"); } } } @Override public void invalidateToken(String username) { assertEquals(USERNAME, username); invalidationCount++; } @Override public List<String> getAccounts() { throw new UnsupportedOperationException(); } @Override public void authorizeApi(String username, Activity activity, OAuth2TokenProviderAuthCallback callback) { throw new UnsupportedOperationException(); } }; } }
package net.krinsoft.chat; import net.krinsoft.chat.api.Manager; import net.krinsoft.chat.api.Target; import net.krinsoft.chat.targets.Channel; import net.krinsoft.chat.targets.ChatPlayer; import org.bukkit.configuration.file.FileConfiguration; import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.entity.Player; import java.io.File; import java.util.*; /** * * @author krinsdeath */ public class ChannelManager implements Manager { private ChatCore plugin; private HashMap<String, Channel> channels = new HashMap<String, Channel>(); private boolean world_channels; private boolean allow_channels; private boolean channel_logging; private FileConfiguration configuration; private File config; public ChannelManager(ChatCore instance) { clean(); plugin = instance; registerConfiguration(); registerChannels(); } public void clean() { for (Channel channel : channels.values()) { channel.persist(); } channels.clear(); } @Override public FileConfiguration getConfig() { if (configuration == null) { configuration = YamlConfiguration.loadConfiguration(config); configuration.setDefaults(YamlConfiguration.loadConfiguration(config)); } return configuration; } @Override public void saveConfig() { try { getConfig().save(config); } catch (Exception e) { plugin.warn("An error occurred while trying to save 'channels.yml'"); } } @Override public ChatCore getPlugin() { return plugin; } public void registerConfiguration() { config = new File(plugin.getDataFolder(), "channels.yml"); if (!config.exists()) { getConfig().setDefaults(YamlConfiguration.loadConfiguration(plugin.getClass().getResourceAsStream("/defaults/channels.yml"))); getConfig().options().copyDefaults(true); saveConfig(); } world_channels = getConfig().getBoolean("world_channels"); allow_channels = plugin.getConfig().getBoolean("plugin.allow_channels"); channel_logging = getConfig().getBoolean("logging", true); } public void registerChannels() { Set<String> channels = getConfig().getConfigurationSection("channels").getKeys(false); for (String channel : channels) { createChannel(null, channel); } plugin.debug("Default Channel: " + getDefaultChannel()); } public void log(String channel, String message) { if (channel_logging) { plugin.log("[" + channel + "] " + message); } } /** * Adds the specified player to the given channel * @param player The player to add to the channel * @param channel The name of the channel we're adding the player to * @return The handle of the channel the player was added to */ public Channel addPlayerToChannel(Player player, String channel) { Channel chan = channels.get(channel.toLowerCase()); if (chan == null) { chan = new Channel(this, channel, player); plugin.debug("Channel '" + channel + "' created"); } if (!chan.contains(player.getName())) { chan.join(player); } return channels.put(channel.toLowerCase(), chan); } /** * Removes the specified player from the given channel * @param player The player we're removing from the channel * @param channel The channel we're removing the player from * @return The handle of the channel the player was removed from */ public Channel removePlayerFromChannel(Player player, String channel) { // get the specified channel Channel chan = channels.get(channel.toLowerCase()); if (chan == null) { // no channel by that name existed, so we do nothing return null; } else { // remove the player from the channel if (chan.contains(player.getName()) && getPlayerChannelList(player).size() > 1) { chan.part(player); if (chan.getOccupants().size() < 1 && !chan.isPermanent()) { // channel is empty! let's get rid of it chan = channels.remove(channel.toLowerCase()); plugin.debug("Channel '" + chan.getName() + "' is empty: removing..."); } } return chan; } } /** * Changes the player's stored world alias and updates their world channel * @param p The player we're updating * @param from The name of the world the player is coming from * @param to The name of the world the player is moving to */ public void playerWorldChange(Player p, String from, String to) { if (plugin.getPlayerManager().isPlayerRegistered(p.getName())) { ChatPlayer player = plugin.getPlayerManager().getPlayer(p.getName()); player.setWorld(plugin.getWorldManager().getAlias(to)); if (world_channels && allow_channels) { removePlayerFromChannel(p, from); Target target = addPlayerToChannel(p, to); if (player.getTarget().getName().equals(from)) { player.setTarget(target); } } } } void removePlayerFromAllChannels(Player player) { for (Channel channel : new HashSet<Channel>(channels.values())) { if (channel.contains(player.getName())) { channel.part(player); if (channel.getOccupants().size() == 0 && !channel.isPermanent()) { channels.remove(channel.getName().toLowerCase()); plugin.debug("Channel '" + channel.getName() + "' is empty: removing..."); } } } } public Channel getChannel(String channel) { return channels.get(channel.toLowerCase()); } public Channel getGlobalChannel() { return getChannel(getDefaultChannel()); } public String getDefaultChannel() { return getConfig().getString("default", "Global"); } public Channel createChannel(Player player, String channel) { Channel chan = getChannel(channel); if (chan != null) { chan.join(player); return chan; } chan = new Channel(this, channel, player); if (player != null) { chan.join(player); } addChannel(chan); return chan; } public void addChannel(Channel channel) { if (channels.containsKey(channel.getName().toLowerCase())) { plugin.debug("A channel named '" + channel.getName() + "' already exists."); } channels.put(channel.getName().toLowerCase(), channel); } public boolean getAllowChannels() { return this.allow_channels; } public List<Channel> getChannels() { List<Channel> chans = new ArrayList<Channel>(); for (Channel chan : channels.values()) { chans.add(chan); } return chans; } public List<Channel> getPlayerChannelList(Player player) { List<Channel> list = new ArrayList<Channel>(); for (Channel chan : channels.values()) { if (chan.contains(player.getName())) { list.add(chan); } } return list; } public void connect() { for (Channel chan : channels.values()) { chan.connect(); } } }
package org.ieeeguc.ieeeguc.controllers; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.support.design.widget.Snackbar; import android.support.v7.app.AppCompatActivity; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.EditText; import com.google.gson.Gson; import org.ieeeguc.ieeeguc.HTTPResponse; import org.ieeeguc.ieeeguc.R; import org.ieeeguc.ieeeguc.models.User; import org.ieeeguc.ieeeguc.models.User.Gender; import org.ieeeguc.ieeeguc.models.User.Type; import org.json.JSONObject; import java.text.SimpleDateFormat; import java.util.Date; import static org.ieeeguc.ieeeguc.models.User.login; /** * A login screen that offers login via email/password. */ public class LoginActivity extends AppCompatActivity { private EditText email ; private EditText password ; private Button send ; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_login); email = (EditText) findViewById(R.id.email) ; password = (EditText) findViewById(R.id.password) ; send = (Button) findViewById(R.id.email_sign_in_button) ; send.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { attemptLogin() ; } }); } /** * this method is called when the user click the button and the app attempt to log in */ public void attemptLogin() { String emailText = email.getText().toString() ; String passwordText = password.getText().toString() ; if (emailText.length()!= 0 && passwordText.length()!= 0) { login(emailText, passwordText, new HTTPResponse() { @Override public void onSuccess(int statusCode, JSONObject body) { try { String token = body.getString("token"); SharedPreferences Sp = getApplicationContext().getSharedPreferences(getString(R.string.shared_preferences_name), Context.MODE_PRIVATE); SharedPreferences.Editor E = Sp.edit(); E.putString("token", token); MainActivity.token = token; JSONObject jsonUser = body.getJSONObject("user"); int id = jsonUser.getInt("id"); String stringType = jsonUser.getString("type"); Type type; switch (stringType) { case "Admin": type = Type.ADMIN; break; case "Upper Board": type = Type.UPPER_BOARD; break; case "High Board": type = Type.HIGH_BOARD; break; default: type = Type.MEMBER; break; } String FN = jsonUser.getString("first_name"); String LN = jsonUser.getString("last_name"); String stringGender = jsonUser.getString("gender"); Gender gender; switch (stringGender) { case "male": gender = Gender.MALE; break; default: gender = Gender.FEMALE; } String email = jsonUser.getString("email"); String PN = jsonUser.getString("phone_number"); String BDS = jsonUser.getString("birthdate"); SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-mm-dd"); Date BD = dateFormatter.parse(BDS.substring(0, 10)); String IEEE_membership_ID = jsonUser.getString("IEEE_membership_ID"); JSONObject settings = jsonUser.getJSONObject("settings"); String committeeName; int committeeID; if (jsonUser.has("committee")) { JSONObject committee = jsonUser.getJSONObject("committee"); committeeName = committee.getString("committeeName"); committeeID = committee.getInt("committeeID"); } else { committeeName = null; committeeID = 0; } User user = new User(id, type, FN, LN, gender, email, BD, IEEE_membership_ID, committeeID, committeeName, PN, settings); MainActivity.loggedInUser = user; E.putString("user", new Gson().toJson(user)); E.commit(); Intent intent = new Intent(LoginActivity.this, MainActivity.class); startActivity(intent); } catch (Exception e) { Snackbar.make(findViewById(R.id.email_sign_in_button), getString(R.string.error_server_down), Snackbar.LENGTH_INDEFINITE).setAction("Ok", new OnClickListener() { @Override public void onClick(View view) { } }).show(); } } @Override public void onFailure(int statusCode, JSONObject body) { if(statusCode == 401) { Snackbar.make(findViewById(R.id.email_sign_in_button), getString(R.string.error_incorrect_credentials), Snackbar.LENGTH_INDEFINITE).setAction("Ok", new OnClickListener() { @Override public void onClick(View view) { } }).show(); } else if(statusCode == 500) { Snackbar.make(findViewById(R.id.email_sign_in_button), getString(R.string.error_server_down), Snackbar.LENGTH_INDEFINITE).setAction("Ok", new OnClickListener() { @Override public void onClick(View view) { } }).show(); } else if(statusCode == -1) { Snackbar.make(findViewById(R.id.email_sign_in_button), getString(R.string.error_connection), Snackbar.LENGTH_INDEFINITE).setAction("Ok", new OnClickListener() { @Override public void onClick(View view) { } }).show(); } } }); } else { if (emailText.length() != 0) { Snackbar.make(findViewById(R.id.email_sign_in_button), getString(R.string.error_invalid_email), Snackbar.LENGTH_INDEFINITE).setAction("Ok", new OnClickListener() { @Override public void onClick(View view) {}}).show(); } else if(passwordText.length() != 0) { Snackbar.make(findViewById(R.id.email_sign_in_button), getString(R.string.error_empty_password), Snackbar.LENGTH_INDEFINITE).setAction("Ok", new OnClickListener() { @Override public void onClick(View view) { } }).show(); } else{ Snackbar.make(findViewById(R.id.email_sign_in_button), getString(R.string.error_empty_credentials), Snackbar.LENGTH_INDEFINITE).setAction("Ok", new OnClickListener() { @Override public void onClick(View view) { } }).show(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.datetime.markup.html.basic; import java.text.SimpleDateFormat; import java.util.Date; import org.apache.wicket.datetime.DateConverter; import org.apache.wicket.datetime.PatternDateConverter; import org.apache.wicket.datetime.StyleDateConverter; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.MarkupStream; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.model.IModel; import org.apache.wicket.util.convert.IConverter; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; /** * A label that is mapped to a <code>java.util.Date</code> object and that uses Joda time to * format values. * <p> * You can provide a date pattern in two of the constructors. When not provided, * {@link DateTimeFormat#shortDate()} will be used. * </p> * <p> * A special option is applyTimeZoneDifference which is an option that says whether to correct for * the difference between the client's time zone and server's time zone. This is true by default. * </p> * * @see DateTime * @see DateTimeFormat * @see DateTimeZone * * @author eelcohillenius */ public class DateLabel extends Label { private static final long serialVersionUID = 1L; /** * Creates a new DateLabel defaulting to using a short date pattern * * @param id * The id of the text field * @param model * The model * @param datePattern * The pattern to use. Must be not null. See {@link SimpleDateFormat} for available * patterns. * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel forDatePattern(String id, IModel<Date> model, String datePattern) { return new DateLabel(id, model, new PatternDateConverter(datePattern, true)); } /** * Creates a new DateLabel defaulting to using a short date pattern * * @param id * The id of the text field * @param datePattern * The pattern to use. Must be not null. See {@link SimpleDateFormat} for available * patterns. * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel forDatePattern(String id, String datePattern) { return forDatePattern(id, null, datePattern); } /** * Creates a new DateLabel defaulting to using a short date pattern * * @param id * The id of the text field * @param model * The model * @param dateStyle * style to use in case no pattern is provided. Must be two characters from the set * {"S", "M", "L", "F", "-"}. Must be not null. See * {@link DateTimeFormat#forStyle(String)} for options. * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel forDateStyle(String id, IModel<Date> model, String dateStyle) { return new DateLabel(id, model, new StyleDateConverter(dateStyle, true)); } /** * Creates a new DateLabel defaulting to using a short date pattern * * @param id * The id of the text field * @param dateStyle * style to use in case no pattern is provided. Must be two characters from the set * {"S", "M", "L", "F", "-"}. Must be not null. See * {@link DateTimeFormat#forStyle(String)} for options. * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel forDateStyle(String id, String dateStyle) { return forDateStyle(id, null, dateStyle); } /** * Creates a new DateLabel defaulting to using a short date pattern * * @param id * The id of the text field * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel forShortStyle(String id) { return forShortStyle(id, null); } /** * Creates a new DateLabel defaulting to using a short date pattern * * @param id * The id of the text field * @param model * The model * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel forShortStyle(String id, IModel<Date> model) { return new DateLabel(id, model, new StyleDateConverter(true)); } /** * Creates a new DateLabel using the provided converter. * * @param id * The id of the text field * @param converter * the date converter * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel withConverter(String id, DateConverter converter) { return withConverter(id, null, converter); } /** * Creates a new DateLabel using the provided converter. * * @param id * The id of the text field * @param model * The model * @param converter * the date converter * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel withConverter(String id, IModel<Date> model, DateConverter converter) { return new DateLabel(id, model, converter); } /** optionally prepend to label. */ private String after; /** optionally append to label. */ private String before; /** * The converter for the Label */ private final DateConverter converter; /** * Construct with a converter. * * @param id * The component id * @param converter * The converter to use */ public DateLabel(String id, DateConverter converter) { this(id, null, converter); } /** * Construct with a converter. * * @param id * The component id * @param model * The model * @param converter * The converter to use */ public DateLabel(String id, IModel<Date> model, DateConverter converter) { super(id, model); if (converter == null) { throw new IllegalStateException("converter may not be null"); } converter.setComponent(this); this.converter = converter; } /** * @return after append to label or null */ public String getAfter() { return after; } /** * @return before prepend to label or null */ public String getBefore() { return before; } /** * Returns the specialized converter. */ @SuppressWarnings("unchecked") @Override public IConverter getConverter(Class clazz) { return converter; } /** * @param after * append to label */ public void setAfter(String after) { this.after = after; } /** * @param before * prepend to label */ public void setBefore(String before) { this.before = before; } @Override protected void onComponentTagBody(MarkupStream markupStream, ComponentTag openTag) { String s = getDefaultModelObjectAsString(); if (before != null) { s = before + s; } if (after != null) { s = s + after; } replaceComponentTagBody(markupStream, openTag, s); } }
/** * <copyright> * </copyright> * * $Id$ */ package org.wso2.developerstudio.eclipse.gmf.esb.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.wso2.developerstudio.eclipse.gmf.esb.APIResourceEndpoint; import org.wso2.developerstudio.eclipse.gmf.esb.APIResourceEndpointInputConnector; import org.wso2.developerstudio.eclipse.gmf.esb.APIResourceEndpointOutputConnector; import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>API Resource Endpoint</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.APIResourceEndpointImpl#getInputConnector <em>Input Connector</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.APIResourceEndpointImpl#getOutputConnector <em>Output Connector</em>}</li> * </ul> * * @generated */ public class APIResourceEndpointImpl extends AbstractEndPointImpl implements APIResourceEndpoint { /** * The cached value of the '{@link #getInputConnector() <em>Input Connector</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getInputConnector() * @generated * @ordered */ protected APIResourceEndpointInputConnector inputConnector; /** * The cached value of the '{@link #getOutputConnector() <em>Output Connector</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOutputConnector() * @generated * @ordered */ protected APIResourceEndpointOutputConnector outputConnector; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected APIResourceEndpointImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return EsbPackage.Literals.API_RESOURCE_ENDPOINT; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public APIResourceEndpointInputConnector getInputConnector() { return inputConnector; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetInputConnector(APIResourceEndpointInputConnector newInputConnector, NotificationChain msgs) { APIResourceEndpointInputConnector oldInputConnector = inputConnector; inputConnector = newInputConnector; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.API_RESOURCE_ENDPOINT__INPUT_CONNECTOR, oldInputConnector, newInputConnector); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setInputConnector(APIResourceEndpointInputConnector newInputConnector) { if (newInputConnector != inputConnector) { NotificationChain msgs = null; if (inputConnector != null) msgs = ((InternalEObject)inputConnector).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.API_RESOURCE_ENDPOINT__INPUT_CONNECTOR, null, msgs); if (newInputConnector != null) msgs = ((InternalEObject)newInputConnector).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.API_RESOURCE_ENDPOINT__INPUT_CONNECTOR, null, msgs); msgs = basicSetInputConnector(newInputConnector, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.API_RESOURCE_ENDPOINT__INPUT_CONNECTOR, newInputConnector, newInputConnector)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public APIResourceEndpointOutputConnector getOutputConnector() { return outputConnector; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetOutputConnector(APIResourceEndpointOutputConnector newOutputConnector, NotificationChain msgs) { APIResourceEndpointOutputConnector oldOutputConnector = outputConnector; outputConnector = newOutputConnector; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.API_RESOURCE_ENDPOINT__OUTPUT_CONNECTOR, oldOutputConnector, newOutputConnector); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOutputConnector(APIResourceEndpointOutputConnector newOutputConnector) { if (newOutputConnector != outputConnector) { NotificationChain msgs = null; if (outputConnector != null) msgs = ((InternalEObject)outputConnector).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.API_RESOURCE_ENDPOINT__OUTPUT_CONNECTOR, null, msgs); if (newOutputConnector != null) msgs = ((InternalEObject)newOutputConnector).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.API_RESOURCE_ENDPOINT__OUTPUT_CONNECTOR, null, msgs); msgs = basicSetOutputConnector(newOutputConnector, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.API_RESOURCE_ENDPOINT__OUTPUT_CONNECTOR, newOutputConnector, newOutputConnector)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case EsbPackage.API_RESOURCE_ENDPOINT__INPUT_CONNECTOR: return basicSetInputConnector(null, msgs); case EsbPackage.API_RESOURCE_ENDPOINT__OUTPUT_CONNECTOR: return basicSetOutputConnector(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case EsbPackage.API_RESOURCE_ENDPOINT__INPUT_CONNECTOR: return getInputConnector(); case EsbPackage.API_RESOURCE_ENDPOINT__OUTPUT_CONNECTOR: return getOutputConnector(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case EsbPackage.API_RESOURCE_ENDPOINT__INPUT_CONNECTOR: setInputConnector((APIResourceEndpointInputConnector)newValue); return; case EsbPackage.API_RESOURCE_ENDPOINT__OUTPUT_CONNECTOR: setOutputConnector((APIResourceEndpointOutputConnector)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case EsbPackage.API_RESOURCE_ENDPOINT__INPUT_CONNECTOR: setInputConnector((APIResourceEndpointInputConnector)null); return; case EsbPackage.API_RESOURCE_ENDPOINT__OUTPUT_CONNECTOR: setOutputConnector((APIResourceEndpointOutputConnector)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case EsbPackage.API_RESOURCE_ENDPOINT__INPUT_CONNECTOR: return inputConnector != null; case EsbPackage.API_RESOURCE_ENDPOINT__OUTPUT_CONNECTOR: return outputConnector != null; } return super.eIsSet(featureID); } } // APIResourceEndpointImpl
/* * Copyright 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.analytics; import static com.google.android.exoplayer2.robolectric.TestPlayerRunHelper.runUntilPendingCommandsAreFullyHandled; import static com.google.common.truth.Truth.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import androidx.annotation.Nullable; import androidx.test.core.app.ApplicationProvider; import androidx.test.ext.junit.runners.AndroidJUnit4; import com.google.android.exoplayer2.ExoPlayer; import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.robolectric.TestPlayerRunHelper; import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.testutil.FakeMediaSource; import com.google.android.exoplayer2.testutil.FakeTimeline; import com.google.android.exoplayer2.testutil.TestExoPlayerBuilder; import com.google.common.collect.ImmutableList; import java.util.stream.Collectors; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.robolectric.shadows.ShadowLooper; /** Unit test for {@link PlaybackStatsListener}. */ @RunWith(AndroidJUnit4.class) public final class PlaybackStatsListenerTest { private ExoPlayer player; @Before public void setUp() { player = new TestExoPlayerBuilder(ApplicationProvider.getApplicationContext()).build(); } @After public void tearDown() { player.release(); } @Test public void events_duringInitialIdleState_dontCreateNewPlaybackStats() throws Exception { PlaybackStatsListener playbackStatsListener = new PlaybackStatsListener(/* keepHistory= */ true, /* callback= */ null); player.addAnalyticsListener(playbackStatsListener); player.seekTo(/* positionMs= */ 1234); runUntilPendingCommandsAreFullyHandled(player); player.setPlaybackParameters(new PlaybackParameters(/* speed= */ 2f)); runUntilPendingCommandsAreFullyHandled(player); player.play(); runUntilPendingCommandsAreFullyHandled(player); assertThat(playbackStatsListener.getPlaybackStats()).isNull(); } @Test public void stateChangeEvent_toEndedWithEmptyTimeline_doesNotCreateInitialPlaybackStats() throws Exception { PlaybackStatsListener.Callback callback = mock(PlaybackStatsListener.Callback.class); PlaybackStatsListener playbackStatsListener = new PlaybackStatsListener(/* keepHistory= */ true, callback); player.addAnalyticsListener(playbackStatsListener); player.prepare(); runUntilPendingCommandsAreFullyHandled(player); assertThat(playbackStatsListener.getPlaybackStats()).isNull(); verifyNoMoreInteractions(callback); } @Test public void timelineChangeEvent_toNonEmpty_createsInitialPlaybackStats() throws Exception { PlaybackStatsListener playbackStatsListener = new PlaybackStatsListener(/* keepHistory= */ true, /* callback= */ null); player.addAnalyticsListener(playbackStatsListener); player.setMediaItem(MediaItem.fromUri("http://test.org")); runUntilPendingCommandsAreFullyHandled(player); assertThat(playbackStatsListener.getPlaybackStats()).isNotNull(); } @Test public void playback_withKeepHistory_updatesStats() throws Exception { PlaybackStatsListener playbackStatsListener = new PlaybackStatsListener(/* keepHistory= */ true, /* callback= */ null); player.addAnalyticsListener(playbackStatsListener); player.setMediaSource(new FakeMediaSource(new FakeTimeline(/* windowCount= */ 1))); player.prepare(); player.play(); TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_ENDED); runUntilPendingCommandsAreFullyHandled(player); @Nullable PlaybackStats playbackStats = playbackStatsListener.getPlaybackStats(); assertThat(playbackStats).isNotNull(); assertThat(playbackStats.endedCount).isEqualTo(1); } @Test public void playback_withoutKeepHistory_updatesStats() throws Exception { PlaybackStatsListener playbackStatsListener = new PlaybackStatsListener(/* keepHistory= */ false, /* callback= */ null); player.addAnalyticsListener(playbackStatsListener); player.setMediaSource(new FakeMediaSource(new FakeTimeline(/* windowCount= */ 1))); player.prepare(); player.play(); TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_ENDED); runUntilPendingCommandsAreFullyHandled(player); @Nullable PlaybackStats playbackStats = playbackStatsListener.getPlaybackStats(); assertThat(playbackStats).isNotNull(); assertThat(playbackStats.endedCount).isEqualTo(1); } @Test public void finishedSession_callsCallback() throws Exception { PlaybackStatsListener.Callback callback = mock(PlaybackStatsListener.Callback.class); PlaybackStatsListener playbackStatsListener = new PlaybackStatsListener(/* keepHistory= */ true, callback); player.addAnalyticsListener(playbackStatsListener); // Create session with some events and finish it by removing it from the playlist. player.setMediaSource(new FakeMediaSource(new FakeTimeline(/* windowCount= */ 1))); player.prepare(); runUntilPendingCommandsAreFullyHandled(player); verify(callback, never()).onPlaybackStatsReady(any(), any()); player.clearMediaItems(); runUntilPendingCommandsAreFullyHandled(player); verify(callback).onPlaybackStatsReady(any(), any()); } @Test public void playlistClear_callsAllPendingCallbacks() throws Exception { PlaybackStatsListener.Callback callback = mock(PlaybackStatsListener.Callback.class); PlaybackStatsListener playbackStatsListener = new PlaybackStatsListener(/* keepHistory= */ true, callback); player.addAnalyticsListener(playbackStatsListener); MediaSource mediaSource = new FakeMediaSource(new FakeTimeline(/* windowCount= */ 1)); player.setMediaSources(ImmutableList.of(mediaSource, mediaSource)); player.prepare(); TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_READY); // Play close to the end of the first item to ensure the second session is already created, but // the first one isn't finished yet. TestPlayerRunHelper.playUntilPosition( player, /* windowIndex= */ 0, /* positionMs= */ player.getDuration()); runUntilPendingCommandsAreFullyHandled(player); player.clearMediaItems(); ShadowLooper.idleMainLooper(); ArgumentCaptor<AnalyticsListener.EventTime> eventTimeCaptor = ArgumentCaptor.forClass(AnalyticsListener.EventTime.class); verify(callback, times(2)).onPlaybackStatsReady(eventTimeCaptor.capture(), any()); assertThat( eventTimeCaptor.getAllValues().stream() .map(eventTime -> eventTime.windowIndex) .collect(Collectors.toList())) .containsExactly(0, 1); } @Test public void playerRelease_callsAllPendingCallbacks() throws Exception { PlaybackStatsListener.Callback callback = mock(PlaybackStatsListener.Callback.class); PlaybackStatsListener playbackStatsListener = new PlaybackStatsListener(/* keepHistory= */ true, callback); player.addAnalyticsListener(playbackStatsListener); MediaSource mediaSource = new FakeMediaSource(new FakeTimeline(/* windowCount= */ 1)); player.setMediaSources(ImmutableList.of(mediaSource, mediaSource)); player.prepare(); TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_READY); // Play close to the end of the first item to ensure the second session is already created, but // the first one isn't finished yet. TestPlayerRunHelper.playUntilPosition( player, /* windowIndex= */ 0, /* positionMs= */ player.getDuration()); runUntilPendingCommandsAreFullyHandled(player); player.release(); ShadowLooper.idleMainLooper(); ArgumentCaptor<AnalyticsListener.EventTime> eventTimeCaptor = ArgumentCaptor.forClass(AnalyticsListener.EventTime.class); verify(callback, times(2)).onPlaybackStatsReady(eventTimeCaptor.capture(), any()); assertThat( eventTimeCaptor.getAllValues().stream() .map(eventTime -> eventTime.windowIndex) .collect(Collectors.toList())) .containsExactly(0, 1); } }
/* * Ferox, a graphics and game library in Java * * Copyright (c) 2012, Michael Ludwig * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.ferox.physics.collision.algorithm; import com.ferox.math.Const; import com.ferox.math.Vector3; /** * Simplex is a data class that builds up a simplex for use by the GJK and EPA algorithms. Much of this class * was ported and restructured from the Simplex used in Bullet's GJK algorithms. * * @author Michael Ludwig */ public class Simplex { public static final int MAX_RANK = 4; private final MinkowskiShape shape; private final Vector3[] inputs; private final Vector3[] vertices; private final double[] weights; private int rank; private boolean isIntersection; public Simplex(MinkowskiShape shape) { inputs = new Vector3[MAX_RANK]; vertices = new Vector3[MAX_RANK]; weights = new double[MAX_RANK]; for (int i = 0; i < MAX_RANK; i++) { inputs[i] = new Vector3(); vertices[i] = new Vector3(); weights[i] = 0.0; } rank = 0; this.shape = shape; isIntersection = false; } public MinkowskiShape getShape() { return shape; } public boolean isIntersection() { return isIntersection; } public void setIntersection(boolean intersect) { isIntersection = intersect; } public void reset() { rank = 0; } public int getRank() { return rank; } public Vector3 getVertex(int i) { return vertices[i]; } public Vector3 getInput(int i) { return inputs[i]; } public double getWeight(int i) { return weights[i]; } public void setWeight(int i, double weight) { weights[i] = weight; } public void setRank(int rank) { this.rank = rank; } public Vector3 addVertex(@Const Vector3 dir) { weights[rank] = 0.0; inputs[rank].normalize(dir); shape.getSupport(inputs[rank], vertices[rank]); return vertices[rank++]; } public Vector3 addNegatedVertex(@Const Vector3 dir) { weights[rank] = 0.0; inputs[rank].scale(dir, -1.0).normalize(); shape.getSupport(inputs[rank], vertices[rank]); return vertices[rank++]; } public void discardLastVertex() { rank--; } public boolean encloseOrigin() { if (encloseOriginImpl()) { // orient the simplex if (Util.tripleProduct(new Vector3().sub(vertices[0], vertices[3]), new Vector3().sub(vertices[1], vertices[3]), new Vector3().sub(vertices[2], vertices[3])) < 0.0) { Vector3 temp = new Vector3(); temp.set(vertices[0]); vertices[0].set(vertices[1]); vertices[1].set(temp); temp.set(inputs[0]); inputs[0].set(inputs[1]); inputs[1].set(temp); double weight = weights[0]; weights[0] = weights[1]; weights[1] = weight; } return true; } else { return false; } } private boolean encloseOriginImpl() { switch (rank) { case 1: { Vector3 axis = new Vector3(); for (int i = 0; i < 3; i++) { axis.set(0, 0, 0).set(i, 1.0); addVertex(axis); if (encloseOriginImpl()) { return true; } discardLastVertex(); addNegatedVertex(axis); if (encloseOriginImpl()) { return true; } discardLastVertex(); } break; } case 2: { Vector3 d = new Vector3().sub(vertices[1], vertices[0]); Vector3 axis = new Vector3(); for (int i = 0; i < 3; i++) { axis.set(0, 0, 0).set(i, 1.0); axis.cross(d, axis); if (axis.lengthSquared() > 0) { addVertex(axis); if (encloseOriginImpl()) { return true; } discardLastVertex(); addNegatedVertex(axis); if (encloseOriginImpl()) { return true; } discardLastVertex(); } } break; } case 3: { Vector3 n = Util.normal(vertices[0], vertices[1], vertices[2], null); if (n.lengthSquared() > 0) { addVertex(n); if (encloseOriginImpl()) { return true; } discardLastVertex(); addNegatedVertex(n); if (encloseOriginImpl()) { return true; } discardLastVertex(); } break; } case 4: { if (Math.abs(Util.tripleProduct(new Vector3().sub(vertices[0], vertices[3]), new Vector3().sub(vertices[1], vertices[3]), new Vector3().sub(vertices[2], vertices[3]))) > 0.0) { return true; } break; } } // failed return false; } public boolean reduce() { int mask = projectOrigin(); if (mask > 0) { // compact simplex arrays for (int i = 0; i < rank; i++) { if ((mask & (1 << i)) != 0) { // find lowest empty vertex for (int j = 0; j < i; j++) { if ((mask & (1 << j)) == 0) { // found it, now shift i to j inputs[j].set(inputs[i]); vertices[j].set(vertices[i]); weights[j] = weights[i]; // mark j as used and i as unused mask |= (1 << j); mask &= ~(1 << i); break; } } } } // compute new rank rank = 0; for (int i = MAX_RANK; i >= 0; i--) { if ((mask & (1 << i)) != 0) { // all bits lower than this one will also be set rank = i + 1; break; } } // reduced successfully return true; } else { return false; } } private int projectOrigin() { Projection proj = null; switch (rank) { case 2: proj = projectOrigin2(vertices[0], vertices[1]); break; case 3: proj = projectOrigin3(vertices[0], vertices[1], vertices[2]); break; case 4: proj = projectOrigin4(vertices[0], vertices[1], vertices[2], vertices[3]); break; } if (proj != null) { for (int i = 0; i < rank; i++) { weights[i] = proj.weights[i]; } return proj.mask; } else { return -1; } } private Projection projectOrigin2(@Const Vector3 a, @Const Vector3 b) { Vector3 d = new Vector3().sub(b, a); double l = d.lengthSquared(); if (l > 0.0) { double t = -a.dot(d) / l; if (t >= 1.0) { return new Projection(b.lengthSquared(), new double[] { 0.0, 1.0 }, 2); } else if (t <= 0.0) { return new Projection(a.lengthSquared(), new double[] { 1.0, 0.0 }, 1); } else { return new Projection(d.scale(t).add(a).lengthSquared(), new double[] { 1 - t, t }, 3); } } else { return null; } } private Projection projectOrigin3(@Const Vector3 a, @Const Vector3 b, @Const Vector3 c) { Vector3[] vs = new Vector3[] { a, b, c }; Vector3[] ds = new Vector3[3]; ds[0] = new Vector3().sub(a, b); ds[1] = new Vector3().sub(b, c); ds[2] = new Vector3().sub(c, a); Vector3 n = new Vector3().cross(ds[0], ds[1]); double l = n.lengthSquared(); if (l > 0.0) { double minDist = -1.0; double[] weights = new double[3]; int mask = 0; Vector3 p = new Vector3(); for (int i = 0; i < 3; i++) { if (vs[i].dot(p.cross(ds[i], n)) > 0.0) { int j = (i + 1) % 3; Projection subProj = projectOrigin2(vs[i], vs[j]); if (subProj != null && (minDist < 0.0 || subProj.distance < minDist)) { minDist = subProj.distance; mask = ((subProj.mask & 1) != 0 ? (1 << i) : 0) | ((subProj.mask & 2) != 0 ? (1 << j) : 0); weights[i] = subProj.weights[0]; weights[j] = subProj.weights[1]; weights[(j + 1) % 3] = 0.0; } } } if (minDist < 0.0) { double d = a.dot(n); double s = Math.sqrt(l); n.scale(d / l); minDist = n.lengthSquared(); mask = 7; weights[0] = new Vector3().cross(ds[1], new Vector3().sub(b, n)).length() / s; weights[1] = new Vector3().cross(ds[2], new Vector3().sub(c, n)).length() / s; weights[2] = 1 - weights[0] - weights[1]; } return new Projection(minDist, weights, mask); } else { return null; } } private Projection projectOrigin4(@Const Vector3 a, @Const Vector3 b, @Const Vector3 c, @Const Vector3 d) { Vector3[] vs = new Vector3[] { a, b, c, d }; Vector3[] ds = new Vector3[3]; ds[0] = new Vector3().sub(a, d); ds[1] = new Vector3().sub(b, d); ds[2] = new Vector3().sub(c, d); double vl = Util.tripleProduct(ds[0], ds[1], ds[2]); boolean ng = (vl * a.dot(new Vector3().sub(b, c).cross(new Vector3().sub(a, b)))) <= 0.0; if (ng && Math.abs(vl) > 0.0) { double minDist = -1.0; double[] weights = new double[4]; int mask = 0; for (int i = 0; i < 3; i++) { int j = (i + 1) % 3; double s = vl * d.dot(new Vector3().cross(ds[i], ds[j])); if (s > 0.0) { Projection subProj = projectOrigin3(vs[i], vs[j], d); if (subProj != null && (minDist < 0.0 || subProj.distance < minDist)) { minDist = subProj.distance; mask = ((subProj.mask & 1) != 0 ? (1 << i) : 0) | ((subProj.mask & 2) != 0 ? (1 << j) : 0) | ((subProj.mask & 4) != 0 ? 8 : 0); weights[i] = subProj.weights[0]; weights[j] = subProj.weights[1]; weights[(j + 1) % 3] = 0.0; weights[3] = subProj.weights[2]; } } } if (minDist < 0.0) { minDist = 0.0; mask = 15; weights[0] = Util.tripleProduct(c, b, d) / vl; weights[1] = Util.tripleProduct(a, c, d) / vl; weights[2] = Util.tripleProduct(b, a, d) / vl; weights[3] = 1 - weights[0] - weights[1] - weights[2]; } return new Projection(minDist, weights, mask); } else { return null; } } // FIXME work out a way to reduce the allocation of Projection objects private static class Projection { final double[] weights; final double distance; final int mask; public Projection(double distance, double[] weights, int mask) { this.weights = weights; this.distance = distance; this.mask = mask; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.replication.management; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.RandomAccessFile; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.channels.AsynchronousCloseException; import java.nio.channels.FileChannel; import java.nio.channels.SocketChannel; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.atomic.AtomicBoolean; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.asterix.common.cluster.ClusterPartition; import org.apache.asterix.common.config.AsterixReplicationProperties; import org.apache.asterix.common.config.IAsterixPropertiesProvider; import org.apache.asterix.common.dataflow.AsterixLSMIndexUtil; import org.apache.asterix.common.exceptions.ACIDException; import org.apache.asterix.common.replication.AsterixReplicationJob; import org.apache.asterix.common.replication.IReplicaResourcesManager; import org.apache.asterix.common.replication.IReplicationManager; import org.apache.asterix.common.replication.Replica; import org.apache.asterix.common.replication.Replica.ReplicaState; import org.apache.asterix.common.replication.ReplicaEvent; import org.apache.asterix.common.transactions.IAsterixAppRuntimeContextProvider; import org.apache.asterix.common.transactions.ILogManager; import org.apache.asterix.common.transactions.ILogRecord; import org.apache.asterix.common.transactions.LogRecord; import org.apache.asterix.common.transactions.LogType; import org.apache.asterix.event.schema.cluster.Node; import org.apache.asterix.replication.functions.ReplicaFilesRequest; import org.apache.asterix.replication.functions.ReplicaIndexFlushRequest; import org.apache.asterix.replication.functions.ReplicaLogsRequest; import org.apache.asterix.replication.functions.ReplicationProtocol; import org.apache.asterix.replication.functions.ReplicationProtocol.ReplicationRequestType; import org.apache.asterix.replication.logging.ReplicationLogBuffer; import org.apache.asterix.replication.logging.ReplicationLogFlusher; import org.apache.asterix.replication.storage.LSMComponentProperties; import org.apache.asterix.replication.storage.LSMIndexFileProperties; import org.apache.asterix.replication.storage.ReplicaResourcesManager; import org.apache.asterix.transaction.management.resource.PersistentLocalResourceRepository; import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.api.replication.IReplicationJob; import org.apache.hyracks.api.replication.IReplicationJob.ReplicationExecutionType; import org.apache.hyracks.api.replication.IReplicationJob.ReplicationJobType; import org.apache.hyracks.api.replication.IReplicationJob.ReplicationOperation; import org.apache.hyracks.storage.am.common.api.IMetaDataPageManager; import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent; import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexReplicationJob; import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndex; /** * This class is used to process replication jobs and maintain remote replicas states */ public class ReplicationManager implements IReplicationManager { private static final Logger LOGGER = Logger.getLogger(ReplicationManager.class.getName()); private final int INITIAL_REPLICATION_FACTOR = 1; private final String nodeId; private ExecutorService replicationListenerThreads; private final Map<Integer, Set<String>> jobCommitAcks; private final Map<Integer, ILogRecord> replicationJobsPendingAcks; private ByteBuffer dataBuffer; private final LinkedBlockingQueue<IReplicationJob> replicationJobsQ; private final LinkedBlockingQueue<ReplicaEvent> replicaEventsQ; private int replicationFactor = 1; private final ReplicaResourcesManager replicaResourcesManager; private final ILogManager logManager; private final IAsterixAppRuntimeContextProvider asterixAppRuntimeContextProvider; private final AsterixReplicationProperties replicationProperties; private final Map<String, Replica> replicas; private final Map<String, Set<Integer>> replica2PartitionsMap; private final AtomicBoolean replicationSuspended; private AtomicBoolean terminateJobsReplication; private AtomicBoolean jobsReplicationSuspended; private final static int INITIAL_BUFFER_SIZE = 4000; //4KB private final Set<String> shuttingDownReplicaIds; //replication threads private ReplicationJobsProccessor replicationJobsProcessor; private final ReplicasEventsMonitor replicationMonitor; //dummy job used to stop ReplicationJobsProccessor thread. private static final IReplicationJob REPLICATION_JOB_POISON_PILL = new AsterixReplicationJob( ReplicationJobType.METADATA, ReplicationOperation.REPLICATE, ReplicationExecutionType.ASYNC, null); //used to identify the correct IP address when the node has multiple network interfaces private String hostIPAddressFirstOctet = null; private LinkedBlockingQueue<ReplicationLogBuffer> emptyLogBuffersQ; private LinkedBlockingQueue<ReplicationLogBuffer> pendingFlushLogBuffersQ; protected ReplicationLogBuffer currentTxnLogBuffer; private ReplicationLogFlusher txnlogsReplicator; private Future<? extends Object> txnLogReplicatorTask; private Map<String, SocketChannel> logsReplicaSockets = null; //TODO this class needs to be refactored by moving its private classes to separate files //and possibly using MessageBroker to send/receive remote replicas events. public ReplicationManager(String nodeId, AsterixReplicationProperties replicationProperties, IReplicaResourcesManager remoteResoucesManager, ILogManager logManager, IAsterixAppRuntimeContextProvider asterixAppRuntimeContextProvider) { this.nodeId = nodeId; this.replicationProperties = replicationProperties; this.replicaResourcesManager = (ReplicaResourcesManager) remoteResoucesManager; this.asterixAppRuntimeContextProvider = asterixAppRuntimeContextProvider; this.hostIPAddressFirstOctet = replicationProperties.getReplicaIPAddress(nodeId).substring(0, 3); this.logManager = logManager; replicationJobsQ = new LinkedBlockingQueue<IReplicationJob>(); replicaEventsQ = new LinkedBlockingQueue<ReplicaEvent>(); terminateJobsReplication = new AtomicBoolean(false); jobsReplicationSuspended = new AtomicBoolean(true); replicationSuspended = new AtomicBoolean(true); replicas = new HashMap<String, Replica>(); jobCommitAcks = new ConcurrentHashMap<Integer, Set<String>>(); replicationJobsPendingAcks = new ConcurrentHashMap<Integer, ILogRecord>(); shuttingDownReplicaIds = new HashSet<String>(); dataBuffer = ByteBuffer.allocate(INITIAL_BUFFER_SIZE); //Used as async listeners from replicas replicationListenerThreads = Executors.newCachedThreadPool(); replicationJobsProcessor = new ReplicationJobsProccessor(); replicationMonitor = new ReplicasEventsMonitor(); Map<String, ClusterPartition[]> nodePartitions = ((IAsterixPropertiesProvider) asterixAppRuntimeContextProvider .getAppContext()).getMetadataProperties().getNodePartitions(); //add list of replicas from configurations (To be read from another source e.g. Zookeeper) Set<Replica> replicaNodes = replicationProperties.getRemoteReplicas(nodeId); replica2PartitionsMap = new HashMap<>(replicaNodes.size()); for (Replica replica : replicaNodes) { replicas.put(replica.getNode().getId(), replica); //for each remote replica, get the list of replication clients Set<String> nodeReplicationClients = replicationProperties.getNodeReplicationClients(replica.getId()); //get the partitions of each client List<Integer> clientPartitions = new ArrayList<>(); for (String clientId : nodeReplicationClients) { for (ClusterPartition clusterPartition : nodePartitions.get(clientId)) { clientPartitions.add(clusterPartition.getPartitionId()); } } Set<Integer> clientPartitonsSet = new HashSet<>(clientPartitions.size()); clientPartitonsSet.addAll(clientPartitions); replica2PartitionsMap.put(replica.getId(), clientPartitonsSet); } int numLogBuffers = logManager.getNumLogPages(); emptyLogBuffersQ = new LinkedBlockingQueue<ReplicationLogBuffer>(numLogBuffers); pendingFlushLogBuffersQ = new LinkedBlockingQueue<ReplicationLogBuffer>(numLogBuffers); int logBufferSize = logManager.getLogPageSize(); for (int i = 0; i < numLogBuffers; i++) { emptyLogBuffersQ.offer(new ReplicationLogBuffer(this, logBufferSize)); } } @Override public void submitJob(IReplicationJob job) throws IOException { if (job.getExecutionType() == ReplicationExecutionType.ASYNC) { replicationJobsQ.offer(job); } else { //wait until replication is resumed while (replicationSuspended.get()) { synchronized (replicationSuspended) { try { replicationSuspended.wait(); } catch (InterruptedException e) { //ignore } } } processJob(job, null, null); } } @Override public void replicateLog(ILogRecord logRecord) { if (logRecord.getLogType() == LogType.JOB_COMMIT || logRecord.getLogType() == LogType.ABORT) { //if replication is suspended, wait until it is resumed. while (replicationSuspended.get()) { synchronized (replicationSuspended) { try { replicationSuspended.wait(); } catch (InterruptedException e) { //ignore } } } Set<String> replicaIds = Collections.synchronizedSet(new HashSet<String>()); replicaIds.add(nodeId); jobCommitAcks.put(logRecord.getJobId(), replicaIds); } appendToLogBuffer(logRecord); } protected void getAndInitNewPage() { currentTxnLogBuffer = null; while (currentTxnLogBuffer == null) { try { currentTxnLogBuffer = emptyLogBuffersQ.take(); } catch (InterruptedException e) { //ignore } } currentTxnLogBuffer.reset(); currentTxnLogBuffer.setReplicationSockets(logsReplicaSockets); pendingFlushLogBuffersQ.offer(currentTxnLogBuffer); } private synchronized void appendToLogBuffer(ILogRecord logRecord) { if (!currentTxnLogBuffer.hasSpace(logRecord)) { currentTxnLogBuffer.isFull(true); getAndInitNewPage(); } currentTxnLogBuffer.append(logRecord); } /** * Processes the replication job based on its specifications * * @param job * The replication job * @param replicasSockets * The remote replicas sockets to send the request to. * @param requestBuffer * The buffer to use to send the request. * @throws IOException */ private void processJob(IReplicationJob job, Map<String, SocketChannel> replicasSockets, ByteBuffer requestBuffer) throws IOException { try { //all of the job's files belong to a single storage partition. //get any of them to determine the partition from the file path. String jobFile = job.getJobFiles().iterator().next(); int jobPartitionId = PersistentLocalResourceRepository.getResourcePartition(jobFile); ByteBuffer responseBuffer = null; LSMIndexFileProperties asterixFileProperties = new LSMIndexFileProperties(); if (requestBuffer == null) { requestBuffer = ByteBuffer.allocate(INITIAL_BUFFER_SIZE); } boolean isLSMComponentFile = job.getJobType() == ReplicationJobType.LSM_COMPONENT; try { //if there isn't already a connection, establish a new one if (replicasSockets == null) { replicasSockets = getActiveRemoteReplicasSockets(); } int remainingFiles = job.getJobFiles().size(); if (job.getOperation() == ReplicationOperation.REPLICATE) { //if the replication job is an LSM_COMPONENT, its properties are sent first, then its files. ILSMIndexReplicationJob LSMComponentJob = null; if (job.getJobType() == ReplicationJobType.LSM_COMPONENT) { //send LSMComponent properties LSMComponentJob = (ILSMIndexReplicationJob) job; LSMComponentProperties lsmCompProp = new LSMComponentProperties(LSMComponentJob, nodeId); requestBuffer = ReplicationProtocol.writeLSMComponentPropertiesRequest(lsmCompProp, requestBuffer); sendRequest(replicasSockets, requestBuffer); } for (String filePath : job.getJobFiles()) { remainingFiles--; Path path = Paths.get(filePath); if (Files.notExists(path)) { LOGGER.log(Level.SEVERE, "File deleted before replication: " + filePath); continue; } LOGGER.log(Level.INFO, "Replicating file: " + filePath); //open file for reading try (RandomAccessFile fromFile = new RandomAccessFile(filePath, "r"); FileChannel fileChannel = fromFile.getChannel();) { long fileSize = fileChannel.size(); if (LSMComponentJob != null) { //since this is LSM_COMPONENT REPLICATE job, the job will contain only the component being replicated. ILSMComponent diskComponent = LSMComponentJob.getLSMIndexOperationContext() .getComponentsToBeReplicated().get(0); long LSNByteOffset = AsterixLSMIndexUtil.getComponentFileLSNOffset( (AbstractLSMIndex) LSMComponentJob.getLSMIndex(), diskComponent, filePath); asterixFileProperties.initialize(filePath, fileSize, nodeId, isLSMComponentFile, LSNByteOffset, remainingFiles == 0); } else { asterixFileProperties.initialize(filePath, fileSize, nodeId, isLSMComponentFile, IMetaDataPageManager.INVALID_LSN_OFFSET, remainingFiles == 0); } requestBuffer = ReplicationProtocol.writeFileReplicationRequest(requestBuffer, asterixFileProperties, ReplicationRequestType.REPLICATE_FILE); Iterator<Map.Entry<String, SocketChannel>> iterator = replicasSockets.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<String, SocketChannel> entry = iterator.next(); //if the remote replica is not interested in this partition, skip it. if (!replica2PartitionsMap.get(entry.getKey()).contains(jobPartitionId)) { continue; } SocketChannel socketChannel = entry.getValue(); //transfer request header & file try { NetworkingUtil.transferBufferToChannel(socketChannel, requestBuffer); NetworkingUtil.sendFile(fileChannel, socketChannel); if (asterixFileProperties.requiresAck()) { ReplicationRequestType responseType = waitForResponse(socketChannel, responseBuffer); if (responseType != ReplicationRequestType.ACK) { throw new IOException( "Could not receive ACK from replica " + entry.getKey()); } } } catch (IOException e) { reportFailedReplica(entry.getKey()); iterator.remove(); } finally { requestBuffer.position(0); } } } } } else if (job.getOperation() == ReplicationOperation.DELETE) { for (String filePath : job.getJobFiles()) { remainingFiles--; asterixFileProperties.initialize(filePath, -1, nodeId, isLSMComponentFile, IMetaDataPageManager.INVALID_LSN_OFFSET, remainingFiles == 0); ReplicationProtocol.writeFileReplicationRequest(requestBuffer, asterixFileProperties, ReplicationRequestType.DELETE_FILE); Iterator<Map.Entry<String, SocketChannel>> iterator = replicasSockets.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<String, SocketChannel> entry = iterator.next(); //if the remote replica is not interested in this partition, skip it. if (!replica2PartitionsMap.get(entry.getKey()).contains(jobPartitionId)) { continue; } SocketChannel socketChannel = entry.getValue(); try { sendRequest(replicasSockets, requestBuffer); if (asterixFileProperties.requiresAck()) { waitForResponse(socketChannel, responseBuffer); } } catch (IOException e) { reportFailedReplica(entry.getKey()); iterator.remove(); } finally { requestBuffer.position(0); } } } } } finally { //if sync, close sockets with replicas since they wont be reused if (job.getExecutionType() == ReplicationExecutionType.SYNC) { closeReplicaSockets(replicasSockets); } } } finally { exitReplicatedLSMComponent(job); } } private static void exitReplicatedLSMComponent(IReplicationJob job) throws HyracksDataException { if (job.getOperation() == ReplicationOperation.REPLICATE && job instanceof ILSMIndexReplicationJob) { //exit the replicated LSM components ILSMIndexReplicationJob aJob = (ILSMIndexReplicationJob) job; aJob.endReplication(); } } /** * Waits and reads a response from a remote replica * * @param socketChannel * The socket to read the response from * @param responseBuffer * The response buffer to read the response to. * @return The response type. * @throws IOException */ private static ReplicationRequestType waitForResponse(SocketChannel socketChannel, ByteBuffer responseBuffer) throws IOException { if (responseBuffer == null) { responseBuffer = ByteBuffer.allocate(ReplicationProtocol.REPLICATION_REQUEST_TYPE_SIZE); } else { responseBuffer.clear(); } //read response from remote replicas ReplicationRequestType responseFunction = ReplicationProtocol.getRequestType(socketChannel, responseBuffer); return responseFunction; } @Override public boolean isReplicationEnabled() { return replicationProperties.isReplicationEnabled(); } @Override public synchronized void updateReplicaInfo(Replica replicaNode) { Replica replica = replicas.get(replicaNode.getNode().getId()); //should not update the info of an active replica if (replica.getState() == ReplicaState.ACTIVE) { return; } replica.getNode().setClusterIp(replicaNode.getNode().getClusterIp()); } /** * Suspends proccessing replication jobs. * * @param force * a flag indicates if replication should be suspended right away or when the pending jobs are completed. */ private void suspendReplication(boolean force) { //suspend replication jobs processing if (replicationJobsProcessor != null && replicationJobsProcessor.isAlive()) { if (force) { terminateJobsReplication.set(true); } replicationJobsQ.offer(REPLICATION_JOB_POISON_PILL); //wait until the jobs are suspended synchronized (jobsReplicationSuspended) { while (!jobsReplicationSuspended.get()) { try { jobsReplicationSuspended.wait(); } catch (InterruptedException e) { //ignore } } } } //suspend logs replication if (txnlogsReplicator != null) { terminateTxnLogsReplicator(); } } /** * Opens a new connection with Active remote replicas and starts a listen thread per connection. */ private void establishTxnLogsReplicationConnection() { logsReplicaSockets = getActiveRemoteReplicasSockets(); //start a listener thread per connection for (Entry<String, SocketChannel> entry : logsReplicaSockets.entrySet()) { replicationListenerThreads .execute(new TxnLogsReplicationResponseListener(entry.getKey(), entry.getValue())); } } /** * Stops ReplicationFlusherThread and closes the sockets used to replicate logs. */ private void terminateTxnLogsReplicator() { LOGGER.log(Level.INFO, "Terminating ReplicationLogFlusher thread ..."); txnlogsReplicator.terminate(); try { txnLogReplicatorTask.get(); } catch (ExecutionException | InterruptedException e) { LOGGER.log(Level.WARNING, "RepicationLogFlusher thread terminated abnormally"); e.printStackTrace(); } LOGGER.log(Level.INFO, "LogFlusher thread is terminated."); if (logsReplicaSockets != null) { //wait for any ACK to arrive before closing sockets. synchronized (jobCommitAcks) { while (jobCommitAcks.size() != 0) { try { jobCommitAcks.wait(); } catch (InterruptedException e) { //ignore } } } //close log replication sockets closeReplicaSockets(logsReplicaSockets); logsReplicaSockets = null; } } /** * Sends a shutdown event to remote replicas notifying them * no more logs/files will be sent from this local replica. * * @throws IOException */ private void sendShutdownNotifiction() throws IOException { Node node = new Node(); node.setId(nodeId); node.setClusterIp(NetworkingUtil.getHostAddress(hostIPAddressFirstOctet)); Replica replica = new Replica(node); ReplicaEvent event = new ReplicaEvent(replica, ClusterEventType.NODE_SHUTTING_DOWN); ByteBuffer buffer = ReplicationProtocol.writeReplicaEventRequest(event); Map<String, SocketChannel> replicaSockets = getActiveRemoteReplicasSockets(); sendRequest(replicaSockets, buffer); closeReplicaSockets(replicaSockets); } /** * Sends a request to remote replicas * * @param replicaSockets * The sockets to send the request to. * @param requestBuffer * The buffer that contains the request. */ private void sendRequest(Map<String, SocketChannel> replicaSockets, ByteBuffer requestBuffer) { Iterator<Map.Entry<String, SocketChannel>> iterator = replicaSockets.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, SocketChannel> replicaSocket = iterator.next(); SocketChannel clientSocket = replicaSocket.getValue(); try { NetworkingUtil.transferBufferToChannel(clientSocket, requestBuffer); } catch (IOException e) { if (clientSocket.isOpen()) { try { clientSocket.close(); } catch (IOException e2) { e2.printStackTrace(); } } reportFailedReplica(replicaSocket.getKey()); iterator.remove(); } finally { requestBuffer.position(0); } } } /** * Closes the passed replication sockets by sending GOODBYE request to remote replicas. * * @param replicaSockets */ private void closeReplicaSockets(Map<String, SocketChannel> replicaSockets) { //send goodbye ByteBuffer goodbyeBuffer = ReplicationProtocol.getGoodbyeBuffer(); sendRequest(replicaSockets, goodbyeBuffer); Iterator<Map.Entry<String, SocketChannel>> iterator = replicaSockets.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, SocketChannel> replicaSocket = iterator.next(); SocketChannel clientSocket = replicaSocket.getValue(); if (clientSocket.isOpen()) { try { clientSocket.close(); } catch (IOException e) { e.printStackTrace(); } } } } @Override public void initializeReplicasState() { for (Replica replica : replicas.values()) { checkReplicaState(replica.getNode().getId(), false, false); } } /** * Checks the state of a remote replica by trying to ping it. * * @param replicaId * The replica to check the state for. * @param async * a flag indicating whether to wait for the result or not. * @param suspendReplication * a flag indicating whether to suspend replication on replica state change or not. */ private void checkReplicaState(String replicaId, boolean async, boolean suspendReplication) { Replica replica = replicas.get(replicaId); ReplicaStateChecker connector = new ReplicaStateChecker(replica, replicationProperties.getReplicationTimeOut(), this, replicationProperties, suspendReplication); Future<? extends Object> ft = asterixAppRuntimeContextProvider.getThreadExecutor().submit(connector); if (!async) { //wait until task is done while (!ft.isDone()) { try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } } } } /** * Updates the state of a remote replica. * * @param replicaId * The replica id to update. * @param newState * The new state of the replica. * @param suspendReplication * a flag indicating whether to suspend replication on state change or not. */ public synchronized void updateReplicaState(String replicaId, ReplicaState newState, boolean suspendReplication) { Replica replica = replicas.get(replicaId); if (replica.getState() == newState) { return; } if (suspendReplication) { //prevent new jobs/logs from coming in replicationSuspended.set(true); if (newState == ReplicaState.DEAD) { //assume the dead replica ACK has been received for all pending jobs synchronized (jobCommitAcks) { for (Integer jobId : jobCommitAcks.keySet()) { addAckToJob(jobId, replicaId); } } } //force replication threads to stop in order to change the replication factor suspendReplication(true); } replica.setState(newState); if (newState == ReplicaState.ACTIVE) { replicationFactor++; } else if (newState == ReplicaState.DEAD) { if (replicationFactor > INITIAL_REPLICATION_FACTOR) { replicationFactor--; } } LOGGER.log(Level.WARNING, "Replica " + replicaId + " state changed to: " + newState.name() + ". Replication factor changed to: " + replicationFactor); if (suspendReplication) { startReplicationThreads(); } } /** * When an ACK for a JOB_COMMIT is received, it is added to the corresponding job. * * @param jobId * @param replicaId * The remote replica id the ACK received from. */ private void addAckToJob(int jobId, String replicaId) { //add ACK to the job if (jobCommitAcks.containsKey(jobId)) { Set<String> replicaIds = jobCommitAcks.get(jobId); replicaIds.add(replicaId); } else { throw new IllegalStateException("Job ID not found in pending job commits " + jobId); } //if got ACKs from all remote replicas, notify pending jobs if any if (jobCommitAcks.get(jobId).size() == replicationFactor) { synchronized (replicationJobsPendingAcks) { if (replicationJobsPendingAcks.containsKey(jobId)) { ILogRecord pendingLog = replicationJobsPendingAcks.get(jobId); synchronized (pendingLog) { pendingLog.notify(); } } } } } @Override public boolean hasBeenReplicated(ILogRecord logRecord) { if (jobCommitAcks.containsKey(logRecord.getJobId())) { //check if all ACKs have been received if (jobCommitAcks.get(logRecord.getJobId()).size() == replicationFactor) { jobCommitAcks.remove(logRecord.getJobId()); if (replicationJobsPendingAcks.containsKey(logRecord.getJobId())) { replicationJobsPendingAcks.remove(logRecord); } //notify any threads waiting for all jobs to finish if (jobCommitAcks.size() == 0) { synchronized (jobCommitAcks) { jobCommitAcks.notifyAll(); } } return true; } else { replicationJobsPendingAcks.putIfAbsent(logRecord.getJobId(), logRecord); return false; } } //presume replicated return true; } private Map<String, SocketChannel> getActiveRemoteReplicasSockets() { Map<String, SocketChannel> replicaNodesSockets = new HashMap<String, SocketChannel>(); for (Replica replica : replicas.values()) { if (replica.getState() == ReplicaState.ACTIVE) { try { SocketChannel sc = getReplicaSocket(replica.getId()); replicaNodesSockets.put(replica.getId(), sc); } catch (IOException e) { reportFailedReplica(replica.getId()); } } } return replicaNodesSockets; } /** * Establishes a connection with a remote replica. * * @param replicaId * The replica to connect to. * @return The socket of the remote replica * @throws IOException */ private SocketChannel getReplicaSocket(String replicaId) throws IOException { Replica replica = replicas.get(replicaId); SocketChannel sc = SocketChannel.open(); sc.configureBlocking(true); InetSocketAddress address = replica.getAddress(replicationProperties); sc.connect(new InetSocketAddress(address.getHostString(), address.getPort())); return sc; } @Override public Set<String> getDeadReplicasIds() { Set<String> replicasIds = new HashSet<String>(); for (Replica replica : replicas.values()) { if (replica.getState() == ReplicaState.DEAD) { replicasIds.add(replica.getNode().getId()); } } return replicasIds; } @Override public Set<String> getActiveReplicasIds() { Set<String> replicasIds = new HashSet<String>(); for (Replica replica : replicas.values()) { if (replica.getState() == ReplicaState.ACTIVE) { replicasIds.add(replica.getNode().getId()); } } return replicasIds; } @Override public int getActiveReplicasCount() { return getActiveReplicasIds().size(); } @Override public void start() { //do nothing } @Override public void dumpState(OutputStream os) throws IOException { //do nothing } /** * Called during NC shutdown to notify remote replicas about the shutdown * and wait for remote replicas shutdown notification then closes the local replication channel. */ @Override public void stop(boolean dumpState, OutputStream ouputStream) throws IOException { try { //stop replication thread afters all jobs/logs have been processed suspendReplication(false); //send shutdown event to remote replicas sendShutdownNotifiction(); //wait until all shutdown events come from all remote replicas synchronized (shuttingDownReplicaIds) { while (!shuttingDownReplicaIds.containsAll(getActiveReplicasIds())) { try { shuttingDownReplicaIds.wait(1000); } catch (InterruptedException e) { e.printStackTrace(); } } } LOGGER.log(Level.INFO, "Got shutdown notification from all remote replicas"); //close replication channel asterixAppRuntimeContextProvider.getAppContext().getReplicationChannel().close(); LOGGER.log(Level.INFO, "Replication manager stopped."); } catch (Exception e) { e.printStackTrace(); } } @Override public void reportReplicaEvent(ReplicaEvent event) { synchronized (replicaEventsQ) { replicaEventsQ.offer(event); } } /** * Suspends replications and sends a remote replica failure event to ReplicasEventsMonitor. * * @param replicaId * the failed replica id. */ public void reportFailedReplica(String replicaId) { Replica replica = replicas.get(replicaId); if (replica.getState() == ReplicaState.DEAD) { return; } //need to stop processing any new logs or jobs terminateJobsReplication.set(true); ReplicaEvent event = new ReplicaEvent(replica, ClusterEventType.NODE_FAILURE); reportReplicaEvent(event); } @Override public void startReplicationThreads() { replicationJobsProcessor = new ReplicationJobsProccessor(); //start/continue processing jobs/logs if (logsReplicaSockets == null) { establishTxnLogsReplicationConnection(); getAndInitNewPage(); txnlogsReplicator = new ReplicationLogFlusher(emptyLogBuffersQ, pendingFlushLogBuffersQ); txnLogReplicatorTask = asterixAppRuntimeContextProvider.getThreadExecutor().submit(txnlogsReplicator); } replicationJobsProcessor.start(); if (!replicationMonitor.isAlive()) { replicationMonitor.start(); } //notify any waiting threads that replication has been resumed synchronized (replicationSuspended) { LOGGER.log(Level.INFO, "Replication started/resumed"); replicationSuspended.set(false); replicationSuspended.notifyAll(); } } @Override public void requestFlushLaggingReplicaIndexes(long nonSharpCheckpointTargetLSN) throws IOException { long startLSN = logManager.getAppendLSN(); Set<String> replicaIds = getActiveReplicasIds(); ByteBuffer requestBuffer = ByteBuffer.allocate(INITIAL_BUFFER_SIZE); for (String replicaId : replicaIds) { //1. identify replica indexes with LSN less than nonSharpCheckpointTargetLSN. Map<Long, String> laggingIndexes = replicaResourcesManager.getLaggingReplicaIndexesId2PathMap(replicaId, nonSharpCheckpointTargetLSN); if (laggingIndexes.size() > 0) { //2. send request to remote replicas that have lagging indexes. ReplicaIndexFlushRequest laggingIndexesResponse = null; try (SocketChannel socketChannel = getReplicaSocket(replicaId)) { ReplicaIndexFlushRequest laggingIndexesRequest = new ReplicaIndexFlushRequest( laggingIndexes.keySet()); requestBuffer = ReplicationProtocol.writeGetReplicaIndexFlushRequest(requestBuffer, laggingIndexesRequest); NetworkingUtil.transferBufferToChannel(socketChannel, requestBuffer); //3. remote replicas will respond with indexes that were not flushed. ReplicationRequestType responseFunction = waitForResponse(socketChannel, requestBuffer); if (responseFunction == ReplicationRequestType.FLUSH_INDEX) { requestBuffer = ReplicationProtocol.readRequest(socketChannel, requestBuffer); //returning the indexes that were not flushed laggingIndexesResponse = ReplicationProtocol.readReplicaIndexFlushRequest(requestBuffer); } //send goodbye ReplicationProtocol.sendGoodbye(socketChannel); } //4. update the LSN_MAP for indexes that were not flushed to the current append LSN to indicate no operations happend. if (laggingIndexesResponse != null) { for (Long resouceId : laggingIndexesResponse.getLaggingRescouresIds()) { String indexPath = laggingIndexes.get(resouceId); Map<Long, Long> indexLSNMap = replicaResourcesManager.getReplicaIndexLSNMap(indexPath); indexLSNMap.put(ReplicaResourcesManager.REPLICA_INDEX_CREATION_LSN, startLSN); replicaResourcesManager.updateReplicaIndexLSNMap(indexPath, indexLSNMap); } } } } } //Recovery Method @Override public long getMaxRemoteLSN(Set<String> remoteReplicas) throws IOException { long maxRemoteLSN = 0; ReplicationProtocol.writeGetReplicaMaxLSNRequest(dataBuffer); Map<String, SocketChannel> replicaSockets = new HashMap<String, SocketChannel>(); try { for (String replicaId : remoteReplicas) { replicaSockets.put(replicaId, getReplicaSocket(replicaId)); } //send request Iterator<Map.Entry<String, SocketChannel>> iterator = replicaSockets.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, SocketChannel> replicaSocket = iterator.next(); SocketChannel clientSocket = replicaSocket.getValue(); NetworkingUtil.transferBufferToChannel(clientSocket, dataBuffer); dataBuffer.position(0); } iterator = replicaSockets.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, SocketChannel> replicaSocket = iterator.next(); SocketChannel clientSocket = replicaSocket.getValue(); //read response NetworkingUtil.readBytes(clientSocket, dataBuffer, Long.BYTES); maxRemoteLSN = Math.max(maxRemoteLSN, dataBuffer.getLong()); } } finally { closeReplicaSockets(replicaSockets); } return maxRemoteLSN; } //Recovery Method @Override public void requestReplicaFiles(String selectedReplicaId, Set<String> replicasDataToRecover, Set<String> existingFiles) throws IOException { ReplicaFilesRequest request = new ReplicaFilesRequest(replicasDataToRecover, existingFiles); dataBuffer = ReplicationProtocol.writeGetReplicaFilesRequest(dataBuffer, request); try (SocketChannel socketChannel = getReplicaSocket(selectedReplicaId)) { //transfer request NetworkingUtil.transferBufferToChannel(socketChannel, dataBuffer); String indexPath; String destFilePath; ReplicationRequestType responseFunction = ReplicationProtocol.getRequestType(socketChannel, dataBuffer); LSMIndexFileProperties fileProperties; while (responseFunction != ReplicationRequestType.GOODBYE) { dataBuffer = ReplicationProtocol.readRequest(socketChannel, dataBuffer); fileProperties = ReplicationProtocol.readFileReplicationRequest(dataBuffer); //get index path indexPath = replicaResourcesManager.getIndexPath(fileProperties); destFilePath = indexPath + File.separator + fileProperties.getFileName(); //create file File destFile = new File(destFilePath); destFile.createNewFile(); try (RandomAccessFile fileOutputStream = new RandomAccessFile(destFile, "rw"); FileChannel fileChannel = fileOutputStream.getChannel();) { fileOutputStream.setLength(fileProperties.getFileSize()); NetworkingUtil.downloadFile(fileChannel, socketChannel); fileChannel.force(true); } //we need to create LSN map for .metadata files that belong to remote replicas if (!fileProperties.isLSMComponentFile() && !fileProperties.getNodeId().equals(nodeId)) { //replica index replicaResourcesManager.initializeReplicaIndexLSNMap(indexPath, logManager.getAppendLSN()); } responseFunction = ReplicationProtocol.getRequestType(socketChannel, dataBuffer); } //send goodbye ReplicationProtocol.sendGoodbye(socketChannel); } } //Recovery Method @Override public long requestReplicaMinLSN(String selectedReplicaId) throws IOException { long minLSN = 0; ReplicationProtocol.writeMinLSNRequest(dataBuffer); try (SocketChannel socketChannel = getReplicaSocket(selectedReplicaId);) { //transfer request NetworkingUtil.transferBufferToChannel(socketChannel, dataBuffer); //read response NetworkingUtil.readBytes(socketChannel, dataBuffer, Long.BYTES); minLSN = dataBuffer.getLong(); //send goodbye ReplicationProtocol.sendGoodbye(socketChannel); } return minLSN; } //Recovery Method @Override public void requestReplicaLogs(String remoteNode, Set<String> nodeIdsToRecoverFor, long fromLSN, File recoveryLogsFile) throws IOException, ACIDException { ReplicaLogsRequest request = new ReplicaLogsRequest(nodeIdsToRecoverFor, fromLSN); dataBuffer = ReplicationProtocol.writeGetReplicaLogsRequest(dataBuffer, request); try (SocketChannel socketChannel = getReplicaSocket(remoteNode)) { //transfer request NetworkingUtil.transferBufferToChannel(socketChannel, dataBuffer); //read response type ReplicationRequestType responseType = ReplicationProtocol.getRequestType(socketChannel, dataBuffer); ILogRecord logRecord = new LogRecord(); Set<Integer> nodePartitions = ((PersistentLocalResourceRepository) asterixAppRuntimeContextProvider .getLocalResourceRepository()).getNodeOrignalPartitions(); Set<Integer> nodePartitionsJobs = new HashSet<>(); try (RandomAccessFile raf = new RandomAccessFile(recoveryLogsFile, "rw"); FileChannel fileChannel = raf.getChannel();) { while (responseType != ReplicationRequestType.GOODBYE) { dataBuffer = ReplicationProtocol.readRequest(socketChannel, dataBuffer); logRecord.readRemoteLog(dataBuffer, true); switch (logRecord.getLogType()) { case LogType.UPDATE: case LogType.ENTITY_COMMIT: case LogType.UPSERT_ENTITY_COMMIT: if (nodePartitions.contains(logRecord.getResourcePartition())) { nodePartitionsJobs.add(logRecord.getJobId()); dataBuffer.flip(); while (dataBuffer.hasRemaining()) { //store log in temp file to replay it for recovery fileChannel.write(dataBuffer); } } else { //send log to log manager as a remote recovery log logManager.log(logRecord); } break; case LogType.JOB_COMMIT: if (nodePartitionsJobs.contains(logRecord.getJobId())) { nodePartitionsJobs.remove(logRecord.getJobId()); dataBuffer.flip(); while (dataBuffer.hasRemaining()) { //store log in temp file to replay it for recovery fileChannel.write(dataBuffer); } break; } logManager.log(logRecord); break; case LogType.ABORT: case LogType.FLUSH: break; default: throw new ACIDException("Unsupported LogType: " + logRecord.getLogType()); } responseType = ReplicationProtocol.getRequestType(socketChannel, dataBuffer); } } //send goodbye ReplicationProtocol.sendGoodbye(socketChannel); } } //supporting classes /** * This class is responsible for processing replica events. */ private class ReplicasEventsMonitor extends Thread { ReplicaEvent event; @Override public void run() { while (true) { try { event = replicaEventsQ.take(); switch (event.getEventType()) { case NODE_FAILURE: handleReplicaFailure(event.getReplica().getId()); break; case NODE_JOIN: updateReplicaInfo(event.getReplica()); checkReplicaState(event.getReplica().getId(), false, true); break; case NODE_SHUTTING_DOWN: handleShutdownEvent(event.getReplica().getId()); break; default: break; } } catch (InterruptedException e) { //ignore } } } public void handleReplicaFailure(String replicaId) { Replica replica = replicas.get(replicaId); if (replica.getState() == ReplicaState.DEAD) { return; } updateReplicaState(replicaId, ReplicaState.DEAD, true); //delete any invalid LSMComponents for this replica replicaResourcesManager.cleanInvalidLSMComponents(replicaId); } public void handleShutdownEvent(String replicaId) { synchronized (shuttingDownReplicaIds) { shuttingDownReplicaIds.add(replicaId); shuttingDownReplicaIds.notifyAll(); } } } /** * This class process is responsible for processing ASYNC replication job. */ private class ReplicationJobsProccessor extends Thread { Map<String, SocketChannel> replicaSockets; ByteBuffer reusableBuffer = ByteBuffer.allocate(INITIAL_BUFFER_SIZE); @Override public void run() { Thread.currentThread().setName("ReplicationJobsProccessor Thread"); terminateJobsReplication.set(false); jobsReplicationSuspended.set(false); while (true) { try { if (terminateJobsReplication.get()) { closeSockets(); break; } IReplicationJob job = replicationJobsQ.take(); if (job == REPLICATION_JOB_POISON_PILL) { terminateJobsReplication.set(true); continue; } //if there isn't already a connection, establish a new one if (replicaSockets == null) { replicaSockets = getActiveRemoteReplicasSockets(); } processJob(job, replicaSockets, reusableBuffer); //if no more jobs to process, close sockets if (replicationJobsQ.size() == 0) { LOGGER.log(Level.INFO, "No pending replication jobs. Closing connections to replicas"); closeSockets(); } } catch (Exception e) { e.printStackTrace(); } } synchronized (jobsReplicationSuspended) { jobsReplicationSuspended.set(true); jobsReplicationSuspended.notifyAll(); } LOGGER.log(Level.INFO, "ReplicationJobsProccessor stopped. "); } private void closeSockets() { if (replicaSockets != null) { closeReplicaSockets(replicaSockets); replicaSockets.clear(); replicaSockets = null; } } } /** * This class is responsible for listening on sockets that belong to TxnLogsReplicator. */ private class TxnLogsReplicationResponseListener implements Runnable { final SocketChannel replicaSocket; final String replicaId; public TxnLogsReplicationResponseListener(String replicaId, SocketChannel replicaSocket) { this.replicaId = replicaId; this.replicaSocket = replicaSocket; } @Override public void run() { Thread.currentThread().setName("TxnLogs Replication Listener Thread"); LOGGER.log(Level.INFO, "Started listening on socket: " + replicaSocket.socket().getRemoteSocketAddress()); try { BufferedReader incomingResponse = new BufferedReader( new InputStreamReader(replicaSocket.socket().getInputStream())); String responseLine = ""; while (true) { responseLine = incomingResponse.readLine(); if (responseLine == null) { break; } //read ACK for job commit log String replicaId = ReplicationProtocol.getNodeIdFromLogAckMessage(responseLine); int jobId = ReplicationProtocol.getJobIdFromLogAckMessage(responseLine); addAckToJob(jobId, replicaId); } } catch (AsynchronousCloseException e1) { LOGGER.log(Level.INFO, "Replication listener stopped for remote replica: " + replicaId); } catch (IOException e2) { reportFailedReplica(replicaId); } } } }
/** * Created by Jagrut on 23-Jan-16. */ package dashit.uni.com.dashit.service; import android.app.Notification; import android.app.PendingIntent; import android.app.Service; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.graphics.PixelFormat; import android.hardware.Camera; import android.media.CamcorderProfile; import android.media.MediaRecorder; import android.os.Environment; import android.os.Handler; import android.os.IBinder; import android.support.annotation.Nullable; import android.text.format.DateFormat; import android.util.DisplayMetrics; import android.util.Log; import android.util.TypedValue; import android.view.Gravity; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.WindowManager; import android.widget.Toast; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.Date; import dashit.uni.com.dashit.R; import dashit.uni.com.dashit.view.activity.MainActivity; /** * The main application logic. All background tasks are synchronized here. * That includes, Video Recording, File Saving, Hash Creation, Hash Transmission */ public class BackgroundService extends Service implements SurfaceHolder.Callback { private boolean recordingStatus; static boolean accidentStatus = false; boolean manualStopStatus = false; int accidentOnVideoIndex = 0; Handler handler; static String accidentLocation; private WindowManager windowManager; private SurfaceView surfaceView; private Camera camera = null; private MediaRecorder mediaRecorder = null; SurfaceHolder globalHolder; Thread thread = null; int height = 0; int width = 0; /** * Create a surface to hold the camera preview. This surface rests above all surface. * Create Notification to let the user know anytime that the application is running. */ @Override public void onCreate() { super.onCreate(); handler = new Handler(); if (isExternalStorageWritable()) { for (int i = 1; i < 4; i++) { File delPreviousFiles = new File(Environment.getExternalStorageDirectory().toString() + "/dashit" + i + ".mp4"); delPreviousFiles.delete(); } } else { Toast.makeText(BackgroundService.this, R.string.instruction_storage_device_problem, Toast.LENGTH_LONG).show(); onDestroy(); } //Intent to start when user taps on notification Intent notificationIntent = new Intent(getApplicationContext(), MainActivity.class); notificationIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); notificationIntent.setFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP); PendingIntent contentIntent = PendingIntent.getActivity(getApplicationContext(), 0, notificationIntent, 0); // Start foreground service to avoid unexpected kill Notification notification = new Notification.Builder(this) .setContentTitle("DashIt") .setContentText("Video is recorded in background.") .setSmallIcon(R.drawable.ic_launcher) .setContentIntent(contentIntent) .build(); notification.flags = Notification.FLAG_AUTO_CANCEL | Notification.FLAG_ONGOING_EVENT; startForeground(Integer.MAX_VALUE, notification); // Create new SurfaceView, set its size to a small square relative to screen size and DPI windowManager = (WindowManager) this.getSystemService(Context.WINDOW_SERVICE); surfaceView = new SurfaceView(this); DisplayMetrics displaymetrics = new DisplayMetrics(); windowManager.getDefaultDisplay().getMetrics(displaymetrics); height = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, displaymetrics.heightPixels, getResources().getDisplayMetrics()); width = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, displaymetrics.widthPixels, getResources().getDisplayMetrics()); WindowManager.LayoutParams layoutParams = new WindowManager.LayoutParams( (int) (width * 0.02), (int) (height * 0.012), WindowManager.LayoutParams.TYPE_SYSTEM_OVERLAY, WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH, PixelFormat.TRANSLUCENT ); // Move it to the top right corner and set this service as a callback layoutParams.gravity = Gravity.TOP | Gravity.END; layoutParams.screenOrientation = 90; windowManager.addView(surfaceView, layoutParams); surfaceView.getHolder().addCallback(this); } /** * When the surface is created, it will start recording the video in infinite loop. * The variable 'accidentStatus' changes in MyBroadcastReceiver when a collision is detected * to notify when to stop video recording * * @param holder The Surface holder on which to attach the camera preview */ @Override public void surfaceCreated(SurfaceHolder holder) { globalHolder = holder; thread = new Thread(new Runnable() { @Override public void run() { while (!accidentStatus) { int i = 1; while (i < 3) { if (!accidentStatus && !manualStopStatus) { accidentOnVideoIndex = i; startRecording("dashit" + i); try { Thread.sleep(20000); if (recordingStatus) stopRecording(); } catch (InterruptedException e) { e.printStackTrace(); } i++; } else { break; } } } if (!manualStopStatus) { startRecording("dashit3"); try { Thread.sleep(20000); stopRecording(); orderAndSaveVideos(); windowManager.removeView(surfaceView); } catch (InterruptedException e) { e.printStackTrace(); } } } }); thread.start(); } /** * Cleanup tasks when the background service exits: either manually by user or due to collision */ @Override public void onDestroy() { manualStopStatus = true; if (recordingStatus) { stopRecording(); } try { thread.join(); } catch (InterruptedException e) { e.printStackTrace(); } } @Nullable @Override public IBinder onBind(Intent intent) { return null; } /** * Check to find if the device is in proper state to handle File write * * @return {boolean} writable or not */ public boolean isExternalStorageWritable() { return Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState()); } /** * Start recording the video * @param fileName store the following video recording under this name */ public void startRecording(String fileName) { recordingStatus = true; camera = Camera.open(); camera.setDisplayOrientation(90); Camera.Parameters parameters = camera.getParameters(); parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); parameters.set("cam_mode", 1); camera.setParameters(parameters); camera.unlock(); CamcorderProfile profile = null; if(CamcorderProfile.hasProfile(CamcorderProfile.QUALITY_720P)){ profile = CamcorderProfile.get(CamcorderProfile.QUALITY_720P); }else if(CamcorderProfile.hasProfile(CamcorderProfile.QUALITY_480P)){ profile = CamcorderProfile.get(CamcorderProfile.QUALITY_480P); }else{ profile = CamcorderProfile.get(CamcorderProfile.QUALITY_LOW); } mediaRecorder = new MediaRecorder(); mediaRecorder.setCamera(camera); mediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER); mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA); mediaRecorder.setProfile(profile); mediaRecorder.setOrientationHint(90); mediaRecorder.setOutputFile(Environment.getExternalStorageDirectory().toString() + "/" + fileName + ".mp4"); mediaRecorder.setPreviewDisplay(globalHolder.getSurface()); try { mediaRecorder.prepare(); } catch (Exception e) { e.printStackTrace(); } mediaRecorder.start(); } /** * Stop recording video. Executed when called. */ public void stopRecording() { recordingStatus = false; mediaRecorder.stop(); mediaRecorder.reset(); mediaRecorder.release(); camera.lock(); camera.release(); } /** * When received a confirmation of collision, order the videos in correct order and save them. * Initiate PostCollisionTasksService to handle further tasks. */ public void orderAndSaveVideos() { int[] orderOfVideo = new int[3]; if (accidentOnVideoIndex == 2) { orderOfVideo[0] = 1; orderOfVideo[1] = 2; } else { orderOfVideo[0] = 2; orderOfVideo[1] = 1; } orderOfVideo[2] = 3; File dir = new File(Environment.getExternalStorageDirectory().toString() + "/dashitHistory/" + DateFormat.format("dd-MM-yyyy HH:mm", new Date().getTime())); if (!dir.isDirectory()) dir.mkdirs(); for (int i = 0; i < 3; i++) { File fileFrom = new File(Environment.getExternalStorageDirectory().toString() + "/dashit" + orderOfVideo[i] + ".mp4"); if (fileFrom.exists() && !fileFrom.isDirectory()) { File fileTo = new File(dir.getPath() + "/" + (i + 1) + "accVideo" + orderOfVideo[i] + ".mp4"); if(fileFrom.renameTo(fileTo)) Log.i("Video Files: ", "Saved Successfully"); } } //Initate PostCollisionTasksService to handle other tasks. Intent postCollisionTasks = new Intent(getApplicationContext(), PostCollisionTasksService.class); postCollisionTasks.putExtra("directoryPath", dir.getAbsolutePath()); postCollisionTasks.putExtra("accidentLocation", accidentLocation); startService(postCollisionTasks); stopSelf(); } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } @Override public void surfaceDestroyed(SurfaceHolder holder) { } /** * The class which listens to any collision event from SensorService */ public static class CollisionBroadcastReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { accidentLocation = (String) intent.getExtras().get("accidentLocation"); accidentStatus = true; } } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.tools.particleeditor; import java.awt.BorderLayout; import java.awt.Component; import java.awt.EventQueue; import java.awt.Graphics; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.HashMap; import javax.swing.BorderFactory; import javax.swing.ImageIcon; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.UIManager; import javax.swing.UIManager.LookAndFeelInfo; import javax.swing.border.CompoundBorder; import javax.swing.plaf.basic.BasicSplitPaneUI; import com.badlogic.gdx.ApplicationListener; import com.badlogic.gdx.Files.FileType; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.InputProcessor; import com.badlogic.gdx.backends.lwjgl.LwjglCanvas; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.graphics.GL10; import com.badlogic.gdx.graphics.OrthographicCamera; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.BitmapFont; import com.badlogic.gdx.graphics.g2d.ParticleEffect; import com.badlogic.gdx.graphics.g2d.ParticleEmitter; import com.badlogic.gdx.graphics.g2d.ParticleEmitter.GradientColorValue; import com.badlogic.gdx.graphics.g2d.ParticleEmitter.NumericValue; import com.badlogic.gdx.graphics.g2d.Sprite; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.math.Vector3; import com.badlogic.gdx.utils.GdxRuntimeException; public class ParticleEditor extends JFrame { public static final String DEFAULT_PARTICLE = "particle.png"; LwjglCanvas lwjglCanvas; JPanel rowsPanel; JPanel editRowsPanel; EffectPanel effectPanel; private JSplitPane splitPane; OrthographicCamera worldCamera; OrthographicCamera textCamera; NumericValue pixelsPerMeter; NumericValue zoomLevel; NumericValue deltaMultiplier; GradientColorValue backgroundColor; float pixelsPerMeterPrev; float zoomLevelPrev; ParticleEffect effect = new ParticleEffect(); final HashMap<ParticleEmitter, ParticleData> particleData = new HashMap(); public ParticleEditor () { super("Particle Editor"); lwjglCanvas = new LwjglCanvas(new Renderer(), false); addWindowListener(new WindowAdapter() { public void windowClosed (WindowEvent event) { System.exit(0); // Gdx.app.quit(); } }); initializeComponents(); setSize(1000, 950); setLocationRelativeTo(null); setDefaultCloseOperation(DISPOSE_ON_CLOSE); setVisible(true); } void reloadRows () { EventQueue.invokeLater(new Runnable() { public void run () { editRowsPanel.removeAll(); addEditorRow(new NumericPanel(pixelsPerMeter, "Pixels per meter", "")); addEditorRow(new NumericPanel(zoomLevel, "Zoom level", "")); addEditorRow(new NumericPanel(deltaMultiplier, "Delta multiplier", "")); addEditorRow(new GradientPanel(backgroundColor, "Background color", "", true)); rowsPanel.removeAll(); ParticleEmitter emitter = getEmitter(); addRow(new ImagePanel(ParticleEditor.this, "Image", "")); addRow(new CountPanel(ParticleEditor.this, "Count", "Min number of particles at all times, max number of particles allowed.")); addRow(new RangedNumericPanel(emitter.getDelay(), "Delay", "Time from beginning of effect to emission start, in milliseconds.")); addRow(new RangedNumericPanel(emitter.getDuration(), "Duration", "Time particles will be emitted, in milliseconds.")); addRow(new ScaledNumericPanel(emitter.getEmission(), "Duration", "Emission", "Number of particles emitted per second.")); addRow(new ScaledNumericPanel(emitter.getLife(), "Duration", "Life", "Time particles will live, in milliseconds.")); addRow(new ScaledNumericPanel(emitter.getLifeOffset(), "Duration", "Life Offset", "Particle starting life consumed, in milliseconds.")); addRow(new RangedNumericPanel(emitter.getXOffsetValue(), "X Offset", "Amount to offset a particle's starting X location, in world units.")); addRow(new RangedNumericPanel(emitter.getYOffsetValue(), "Y Offset", "Amount to offset a particle's starting Y location, in world units.")); addRow(new SpawnPanel(ParticleEditor.this, emitter.getSpawnShape(), "Spawn", "Shape used to spawn particles.")); addRow(new ScaledNumericPanel(emitter.getSpawnWidth(), "Duration", "Spawn Width", "Width of the spawn shape, in world units.")); addRow(new ScaledNumericPanel(emitter.getSpawnHeight(), "Duration", "Spawn Height", "Height of the spawn shape, in world units.")); addRow(new ScaledNumericPanel(emitter.getScale(), "Life", "Size", "Particle size, in world units.")); addRow(new ScaledNumericPanel(emitter.getVelocity(), "Life", "Velocity", "Particle speed, in world units per second.")); addRow(new ScaledNumericPanel(emitter.getAngle(), "Life", "Angle", "Particle emission angle, in degrees.")); addRow(new ScaledNumericPanel(emitter.getRotation(), "Life", "Rotation", "Particle rotation, in degrees.")); addRow(new ScaledNumericPanel(emitter.getWind(), "Life", "Wind", "Wind strength, in world units per second.")); addRow(new ScaledNumericPanel(emitter.getGravity(), "Life", "Gravity", "Gravity strength, in world units per second.")); addRow(new GradientPanel(emitter.getTint(), "Tint", "", false)); addRow(new PercentagePanel(emitter.getTransparency(), "Life", "Transparency", "")); addRow(new OptionsPanel(ParticleEditor.this, "Options", "")); for (Component component : rowsPanel.getComponents()) if (component instanceof EditorPanel) ((EditorPanel)component).update(ParticleEditor.this); rowsPanel.repaint(); } }); } void addEditorRow (JPanel row) { row.setBorder(BorderFactory.createMatteBorder(0, 0, 1, 0, java.awt.Color.black)); editRowsPanel.add(row, new GridBagConstraints(0, -1, 1, 1, 1, 0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); } void addRow (JPanel row) { row.setBorder(BorderFactory.createMatteBorder(0, 0, 1, 0, java.awt.Color.black)); rowsPanel.add(row, new GridBagConstraints(0, -1, 1, 1, 1, 0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); } public void setVisible (String name, boolean visible) { for (Component component : rowsPanel.getComponents()) if (component instanceof EditorPanel && ((EditorPanel)component).getName().equals(name)) component.setVisible(visible); } public ParticleEmitter getEmitter () { return effect.getEmitters().get(effectPanel.editIndex); } public ImageIcon getIcon (ParticleEmitter emitter) { ParticleData data = particleData.get(emitter); if (data == null) particleData.put(emitter, data = new ParticleData()); String imagePath = emitter.getImagePath(); if (data.icon == null && imagePath != null) { try { URL url; File file = new File(imagePath); if (file.exists()) url = file.toURI().toURL(); else { url = ParticleEditor.class.getResource(imagePath); if (url == null) return null; } data.icon = new ImageIcon(url); } catch (MalformedURLException ex) { ex.printStackTrace(); } } return data.icon; } public void setIcon (ParticleEmitter emitters, ImageIcon icon) { ParticleData data = particleData.get(emitters); if (data == null) particleData.put(emitters, data = new ParticleData()); data.icon = icon; } public void setEnabled (ParticleEmitter emitter, boolean enabled) { ParticleData data = particleData.get(emitter); if (data == null) particleData.put(emitter, data = new ParticleData()); data.enabled = enabled; emitter.reset(); } public boolean isEnabled (ParticleEmitter emitter) { ParticleData data = particleData.get(emitter); if (data == null) return true; return data.enabled; } private void initializeComponents () { // { // JMenuBar menuBar = new JMenuBar(); // setJMenuBar(menuBar); // JPopupMenu.setDefaultLightWeightPopupEnabled(false); // JMenu fileMenu = new JMenu("File"); // menuBar.add(fileMenu); // } splitPane = new JSplitPane(); splitPane.setUI(new BasicSplitPaneUI() { public void paint (Graphics g, JComponent jc) { } }); splitPane.setDividerSize(4); getContentPane().add(splitPane, BorderLayout.CENTER); { JSplitPane rightSplit = new JSplitPane(JSplitPane.VERTICAL_SPLIT); rightSplit.setUI(new BasicSplitPaneUI() { public void paint (Graphics g, JComponent jc) { } }); rightSplit.setDividerSize(4); splitPane.add(rightSplit, JSplitPane.RIGHT); { JPanel propertiesPanel = new JPanel(new GridBagLayout()); rightSplit.add(propertiesPanel, JSplitPane.TOP); propertiesPanel.setBorder(new CompoundBorder(BorderFactory.createEmptyBorder(3, 0, 6, 6), BorderFactory .createTitledBorder("Editor Properties"))); { JScrollPane scroll = new JScrollPane(); propertiesPanel.add(scroll, new GridBagConstraints(0, 0, 1, 1, 1, 1, GridBagConstraints.NORTH, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); scroll.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 0)); { editRowsPanel = new JPanel(new GridBagLayout()); scroll.setViewportView(editRowsPanel); scroll.getVerticalScrollBar().setUnitIncrement(70); } } } { JPanel propertiesPanel = new JPanel(new GridBagLayout()); rightSplit.add(propertiesPanel, JSplitPane.BOTTOM); propertiesPanel.setBorder(new CompoundBorder(BorderFactory.createEmptyBorder(3, 0, 6, 6), BorderFactory .createTitledBorder("Emitter Properties"))); { JScrollPane scroll = new JScrollPane(); propertiesPanel.add(scroll, new GridBagConstraints(0, 0, 1, 1, 1, 1, GridBagConstraints.NORTH, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); scroll.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 0)); { rowsPanel = new JPanel(new GridBagLayout()); scroll.setViewportView(rowsPanel); scroll.getVerticalScrollBar().setUnitIncrement(70); } } } rightSplit.setDividerLocation(200); } { JSplitPane leftSplit = new JSplitPane(JSplitPane.VERTICAL_SPLIT); leftSplit.setUI(new BasicSplitPaneUI() { public void paint (Graphics g, JComponent jc) { } }); leftSplit.setDividerSize(4); splitPane.add(leftSplit, JSplitPane.LEFT); { JPanel spacer = new JPanel(new BorderLayout()); leftSplit.add(spacer, JSplitPane.TOP); spacer.add(lwjglCanvas.getCanvas()); spacer.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 4)); } { JPanel emittersPanel = new JPanel(new BorderLayout()); leftSplit.add(emittersPanel, JSplitPane.BOTTOM); emittersPanel.setBorder(new CompoundBorder(BorderFactory.createEmptyBorder(0, 6, 6, 0), BorderFactory .createTitledBorder("Effect Emitters"))); { effectPanel = new EffectPanel(this); emittersPanel.add(effectPanel); } } leftSplit.setDividerLocation(625); } splitPane.setDividerLocation(325); } class Renderer implements ApplicationListener, InputProcessor { private float maxActiveTimer; private int maxActive, lastMaxActive; private boolean mouseDown; private int activeCount; private int mouseX, mouseY; private BitmapFont font; private SpriteBatch spriteBatch; private Sprite bgImage; // BOZO - Add setting background image to UI. public void create () { if (spriteBatch != null) return; Texture.setEnforcePotImages(false); spriteBatch = new SpriteBatch(); worldCamera = new OrthographicCamera(); textCamera = new OrthographicCamera(); pixelsPerMeter = new NumericValue(); pixelsPerMeter.setValue(1.0f); pixelsPerMeter.setAlwaysActive(true); zoomLevel = new NumericValue(); zoomLevel.setValue(1.0f); zoomLevel.setAlwaysActive(true); deltaMultiplier = new NumericValue(); deltaMultiplier.setValue(1.0f); deltaMultiplier.setAlwaysActive(true); backgroundColor = new GradientColorValue(); backgroundColor.setColors(new float[] { 0f, 0f, 0f}); font = new BitmapFont(Gdx.files.getFileHandle("default.fnt", FileType.Internal), Gdx.files.getFileHandle("default.png", FileType.Internal), true); effectPanel.newExampleEmitter("Untitled", true); // if (resources.openFile("/editor-bg.png") != null) bgImage = new Image(gl, "/editor-bg.png"); Gdx.input.setInputProcessor(this); } @Override public void resize (int width, int height) { Gdx.gl.glViewport(0, 0, width, height); if (pixelsPerMeter.getValue() <= 0) { pixelsPerMeter.setValue(1); } worldCamera.setToOrtho(false, width / pixelsPerMeter.getValue(), height / pixelsPerMeter.getValue()); worldCamera.update(); textCamera.setToOrtho(true, width, height); textCamera.update(); effect.setPosition(worldCamera.viewportWidth / 2, worldCamera.viewportHeight / 2); } public void render () { int viewWidth = Gdx.graphics.getWidth(); int viewHeight = Gdx.graphics.getHeight(); float delta = Math.max(0, Gdx.graphics.getDeltaTime() * deltaMultiplier.getValue()); float[] colors = backgroundColor.getColors(); Gdx.gl.glClearColor(colors[0], colors[1], colors[2], 1.0f); Gdx.gl.glClear(GL10.GL_COLOR_BUFFER_BIT); if ((pixelsPerMeter.getValue() != pixelsPerMeterPrev) || (zoomLevel.getValue() != zoomLevelPrev)) { if (pixelsPerMeter.getValue() <= 0) { pixelsPerMeter.setValue(1); } worldCamera.setToOrtho(false, viewWidth / pixelsPerMeter.getValue(), viewHeight / pixelsPerMeter.getValue()); worldCamera.zoom = zoomLevel.getValue(); worldCamera.update(); effect.setPosition(worldCamera.viewportWidth / 2, worldCamera.viewportHeight / 2); zoomLevelPrev = zoomLevel.getValue(); pixelsPerMeterPrev = pixelsPerMeter.getValue(); } spriteBatch.setProjectionMatrix(worldCamera.combined); spriteBatch.begin(); spriteBatch.enableBlending(); spriteBatch.setBlendFunction(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA); if (bgImage != null) { bgImage.setPosition(viewWidth / 2 - bgImage.getWidth() / 2, viewHeight / 2 - bgImage.getHeight() / 2); bgImage.draw(spriteBatch); } activeCount = 0; boolean complete = true; for (ParticleEmitter emitter : effect.getEmitters()) { if (emitter.getSprite() == null && emitter.getImagePath() != null) loadImage(emitter); boolean enabled = isEnabled(emitter); if (enabled) { if (emitter.getSprite() != null) emitter.draw(spriteBatch, delta); activeCount += emitter.getActiveCount(); if (!emitter.isComplete()) complete = false; } } if (complete) effect.start(); maxActive = Math.max(maxActive, activeCount); maxActiveTimer += delta; if (maxActiveTimer > 3) { maxActiveTimer = 0; lastMaxActive = maxActive; maxActive = 0; } if (mouseDown) { // gl.drawLine(mouseX - 6, mouseY, mouseX + 5, mouseY); // gl.drawLine(mouseX, mouseY - 5, mouseX, mouseY + 6); } spriteBatch.setProjectionMatrix(textCamera.combined); font.draw(spriteBatch, "FPS: " + Gdx.graphics.getFramesPerSecond(), 5, 15); font.draw(spriteBatch, "Count: " + activeCount, 5, 35); font.draw(spriteBatch, "Max: " + lastMaxActive, 5, 55); font.draw(spriteBatch, (int)(getEmitter().getPercentComplete() * 100) + "%", 5, 75); spriteBatch.end(); // gl.drawLine((int)(viewWidth * getCurrentParticles().getPercentComplete()), viewHeight - 1, viewWidth, viewHeight - // 1); } private void loadImage (ParticleEmitter emitter) { final String imagePath = emitter.getImagePath(); String imageName = new File(imagePath.replace('\\', '/')).getName(); try { FileHandle file; if (imagePath.equals(ParticleEditor.DEFAULT_PARTICLE)) file = Gdx.files.classpath(imagePath); else file = Gdx.files.absolute(imagePath); emitter.setSprite(new Sprite(new Texture(file))); } catch (GdxRuntimeException ex) { ex.printStackTrace(); EventQueue.invokeLater(new Runnable() { public void run () { JOptionPane.showMessageDialog(ParticleEditor.this, "Error loading image:\n" + imagePath); } }); emitter.setImagePath(null); } } public boolean keyDown (int keycode) { return false; } public boolean keyUp (int keycode) { return false; } public boolean keyTyped (char character) { return false; } public boolean touchDown (int x, int y, int pointer, int newParam) { Vector3 touchPoint = new Vector3(x, y, 0); worldCamera.unproject(touchPoint); effect.setPosition(touchPoint.x, touchPoint.y); return false; } public boolean touchUp (int x, int y, int pointer, int button) { ParticleEditor.this.dispatchEvent(new WindowEvent(ParticleEditor.this, WindowEvent.WINDOW_LOST_FOCUS)); ParticleEditor.this.dispatchEvent(new WindowEvent(ParticleEditor.this, WindowEvent.WINDOW_GAINED_FOCUS)); ParticleEditor.this.requestFocusInWindow(); return false; } public boolean touchDragged (int x, int y, int pointer) { Vector3 touchPoint = new Vector3(x, y, 0); worldCamera.unproject(touchPoint); effect.setPosition(touchPoint.x, touchPoint.y); return false; } @Override public void dispose () { } @Override public void pause () { } @Override public void resume () { } @Override public boolean mouseMoved (int x, int y) { return false; } @Override public boolean scrolled (int amount) { return false; } } static class ParticleData { public ImageIcon icon; public String imagePath; public boolean enabled = true; } public static void main (String[] args) { for (LookAndFeelInfo info : UIManager.getInstalledLookAndFeels()) { if ("Nimbus".equals(info.getName())) { try { UIManager.setLookAndFeel(info.getClassName()); } catch (Throwable ignored) { } break; } } EventQueue.invokeLater(new Runnable() { public void run () { new ParticleEditor(); } }); } }
package org.ovirt.engine.ui.webadmin.section.main.view; import javax.inject.Inject; import org.ovirt.engine.ui.common.idhandler.ElementIdHandler; import org.ovirt.engine.ui.common.idhandler.WithElementId; import org.ovirt.engine.ui.common.system.ClientStorage; import org.ovirt.engine.ui.common.view.AbstractView; import org.ovirt.engine.ui.webadmin.ApplicationConstants; import org.ovirt.engine.ui.webadmin.ApplicationMessages; import org.ovirt.engine.ui.webadmin.ApplicationResources; import org.ovirt.engine.ui.webadmin.ApplicationTemplates; import org.ovirt.engine.ui.webadmin.section.main.presenter.MainSectionPresenter; import org.ovirt.engine.ui.webadmin.section.main.presenter.MainTabBarOffsetUiHandlers; import org.ovirt.engine.ui.webadmin.system.InternalConfiguration; import org.ovirt.engine.ui.webadmin.uicommon.model.AlertFirstRowModelProvider; import org.ovirt.engine.ui.webadmin.uicommon.model.AlertModelProvider; import org.ovirt.engine.ui.webadmin.uicommon.model.BookmarkModelProvider; import org.ovirt.engine.ui.webadmin.uicommon.model.EventFirstRowModelProvider; import org.ovirt.engine.ui.webadmin.uicommon.model.EventModelProvider; import org.ovirt.engine.ui.webadmin.uicommon.model.SystemTreeModelProvider; import org.ovirt.engine.ui.webadmin.uicommon.model.TagModelProvider; import org.ovirt.engine.ui.webadmin.uicommon.model.TaskFirstRowModelProvider; import org.ovirt.engine.ui.webadmin.uicommon.model.TaskModelProvider; import org.ovirt.engine.ui.webadmin.widget.bookmark.BookmarkList; import org.ovirt.engine.ui.webadmin.widget.footer.AlertsEventsFooterView; import org.ovirt.engine.ui.webadmin.widget.tags.TagList; import org.ovirt.engine.ui.webadmin.widget.tree.SystemTree; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Style.Overflow; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.event.shared.EventBus; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.user.client.ui.DockLayoutPanel; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.LayoutPanel; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.SplitLayoutPanel; import com.google.gwt.user.client.ui.StackLayoutPanel; import com.google.gwt.user.client.ui.Widget; public class MainSectionView extends AbstractView implements MainSectionPresenter.ViewDef { interface ViewUiBinder extends UiBinder<Widget, MainSectionView> { ViewUiBinder uiBinder = GWT.create(ViewUiBinder.class); } interface ViewIdHandler extends ElementIdHandler<MainSectionView> { ViewIdHandler idHandler = GWT.create(ViewIdHandler.class); } private MainTabBarOffsetUiHandlers uiHandlers; @UiField DockLayoutPanel wrapperLayoutPanel; @UiField SimplePanel headerPanel; @UiField(provided = true) final StackLayoutPanel westStackPanel; @UiField LayoutPanel mainContentPanel; @UiField SimplePanel alertEventFooterPanel; @UiField Label footerMessage; @UiField(provided = true) SplitLayoutPanel verticalSplitLayoutPanel; @UiField(provided = true) SplitLayoutPanel horizontalSplitLayoutPanel; @WithElementId Label treeHeader; @WithElementId Label bookmarksHeader; @WithElementId Label tagsHeader; @Inject public MainSectionView(SystemTreeModelProvider treeModelProvider, BookmarkModelProvider bookmarkModelProvider, TagModelProvider tagModelProvider, AlertModelProvider alertModelProvider, AlertFirstRowModelProvider alertFirstRowModelProvider, EventModelProvider eventModelProvider, EventFirstRowModelProvider eventFirstRowModelProvider, TaskModelProvider taskModelProvider, TaskFirstRowModelProvider taskFirstRowModelProvider, InternalConfiguration intConf, ApplicationConstants constants, ApplicationMessages messages, ApplicationResources resources, ApplicationTemplates templates, EventBus eventBus, ClientStorage clientStorage) { westStackPanel = createWestStackPanel(treeModelProvider, bookmarkModelProvider, tagModelProvider); verticalSplitLayoutPanel = new SplitLayoutPanel(2); horizontalSplitLayoutPanel = new SplitLayoutPanel(2); initWidget(ViewUiBinder.uiBinder.createAndBindUi(this)); initHeaders(); ViewIdHandler.idHandler.generateAndSetIds(this); addContentToWestPanel(treeModelProvider, bookmarkModelProvider, tagModelProvider, westStackPanel, constants); initAlertEventFooterPanel(alertModelProvider, alertFirstRowModelProvider, eventModelProvider, eventFirstRowModelProvider, taskModelProvider, taskFirstRowModelProvider, resources, templates, eventBus, clientStorage); headerPanel.getElement().getParentElement().getStyle().setOverflow(Overflow.VISIBLE); if (!intConf.isCurrentBrowserSupported()) { // Browser is not supported footerMessage.setText(messages.browserNotSupportedVersion( intConf.getCurrentBrowser(), intConf.getCurrentBrowserVersion())); } else { // Remove footer message wrapperLayoutPanel.remove(footerMessage); } } private void initHeaders() { treeHeader = new Label("Tree"); bookmarksHeader = new Label("Bookmarks"); tagsHeader = new Label("Tags"); } StackLayoutPanel createWestStackPanel(SystemTreeModelProvider treeModelProvider, BookmarkModelProvider bookmarkModelProvider, TagModelProvider tagModelProvider) { final StackLayoutPanel panel = new StackLayoutPanel(Unit.PX) { @Override public void onResize() { super.onResize(); if (uiHandlers != null) { uiHandlers.setMainTabBarOffset(getOffsetWidth()); } } }; return panel; } private void addContentToWestPanel(SystemTreeModelProvider treeModelProvider, BookmarkModelProvider bookmarkModelProvider, TagModelProvider tagModelProvider, final StackLayoutPanel panel, ApplicationConstants constants) { panel.insert(new SystemTree(treeModelProvider, constants), treeHeader, 26, panel.getWidgetCount()); panel.insert(new BookmarkList(bookmarkModelProvider), bookmarksHeader, 26, panel.getWidgetCount()); panel.insert(new TagList(tagModelProvider), tagsHeader, 26, panel.getWidgetCount()); } void initAlertEventFooterPanel(AlertModelProvider alertModelProvider, AlertFirstRowModelProvider alertFirstRowModelProvider, EventModelProvider eventModelProvider, EventFirstRowModelProvider eventFirstRowModelProvider, TaskModelProvider taskModelProvider, TaskFirstRowModelProvider taskFirstRowModelProvider, ApplicationResources resources, ApplicationTemplates templates, EventBus eventBus, ClientStorage clientStorage ) { alertEventFooterPanel.add(new AlertsEventsFooterView( alertModelProvider, alertFirstRowModelProvider, eventModelProvider, eventFirstRowModelProvider, taskModelProvider, taskFirstRowModelProvider, resources, templates, eventBus, clientStorage)); } @Override public void setInSlot(Object slot, Widget content) { if (slot == MainSectionPresenter.TYPE_SetHeader) { setPanelContent(headerPanel, content); } else if (slot == MainSectionPresenter.TYPE_SetMainContent) { setPanelContent(mainContentPanel, content); } else { super.setInSlot(slot, content); } } @Override public void setUiHandlers(MainTabBarOffsetUiHandlers uiHandlers) { this.uiHandlers = uiHandlers; } }
// Copyright (C) 2016 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server; import com.google.gerrit.extensions.registration.DynamicMap; import com.google.gerrit.server.plugins.DelegatingClassLoader; import com.google.gerrit.util.cli.CmdLineParser; import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.Provider; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.WeakHashMap; /** Helper class to define and parse options from plugins on ssh and RestAPI commands. */ public class DynamicOptions { /** * To provide additional options, bind a DynamicBean. For example: * * <pre> * bind(com.google.gerrit.server.DynamicOptions.DynamicBean.class) * .annotatedWith(Exports.named(com.google.gerrit.sshd.commands.Query.class)) * .to(MyOptions.class); * </pre> * * To define the additional options, implement this interface. For example: * * <pre> * public class MyOptions implements DynamicOptions.DynamicBean { * {@literal @}Option(name = "--verbose", aliases = {"-v"} * usage = "Make the operation more talkative") * public boolean verbose; * } * </pre> * * <p>The option will be prefixed by the plugin name. In the example above, if the plugin name was * my-plugin, then the --verbose option as used by the caller would be --my-plugin--verbose. * * <p>Additional options can be annotated with @RequiresOption which will cause them to be ignored * unless the required option is present. For example: * * <pre> * {@literal @}RequiresOptions("--help") * {@literal @}Option(name = "--help-as-json", * usage = "display help text in json format") * public boolean displayHelpAsJson; * </pre> */ public interface DynamicBean {} /** * To provide additional options to a command in another classloader, bind a ClassNameProvider * which provides the name of your DynamicBean in the other classLoader. * * <p>Do this by binding to just the name of the command you are going to bind to so that your * classLoader does not load the command's class which likely is not in your classpath. To ensure * that the command's class is not in your classpath, you can exclude it during your build. * * <p>For example: * * <pre> * bind(com.google.gerrit.server.DynamicOptions.DynamicBean.class) * .annotatedWith(Exports.named( "com.google.gerrit.plugins.otherplugin.command")) * .to(MyOptionsClassNameProvider.class); * * static class MyOptionsClassNameProvider implements DynamicOptions.ClassNameProvider { * {@literal @}Override * public String getClassName() { * return "com.googlesource.gerrit.plugins.myplugin.CommandOptions"; * } * } * </pre> */ public interface ClassNameProvider extends DynamicBean { String getClassName(); } /** * To provide additional Guice bindings for options to a command in another classloader, bind a * ModulesClassNamesProvider which provides the name of your Modules needed for your DynamicBean * in the other classLoader. * * <p>Do this by binding to the name of the command you are going to bind to and providing an * Iterable of Module names to instantiate and add to the Injector used to instantiate the * DynamicBean in the other classLoader. For example: * * <pre> * bind(com.google.gerrit.server.DynamicOptions.DynamicBean.class) * .annotatedWith(Exports.named( * "com.google.gerrit.plugins.otherplugin.command")) * .to(MyOptionsModulesClassNamesProvider.class); * * static class MyOptionsModulesClassNamesProvider implements DynamicOptions.ClassNameProvider { * {@literal @}Override * public String getClassName() { * return "com.googlesource.gerrit.plugins.myplugin.CommandOptions"; * } * {@literal @}Override * public Iterable<String> getModulesClassNames()() { * return "com.googlesource.gerrit.plugins.myplugin.MyOptionsModule"; * } * } * </pre> */ public interface ModulesClassNamesProvider extends ClassNameProvider { Iterable<String> getModulesClassNames(); } /** * Implement this if your DynamicBean needs an opportunity to act on the Bean directly before or * after argument parsing. */ public interface BeanParseListener extends DynamicBean { void onBeanParseStart(String plugin, Object bean); void onBeanParseEnd(String plugin, Object bean); } /** * The entity which provided additional options may need a way to receive a reference to the * DynamicBean it provided. To do so, the existing class should implement BeanReceiver (a setter) * and then provide some way for the plugin to request its DynamicBean (a getter.) For example: * * <pre> * public class Query extends SshCommand implements DynamicOptions.BeanReceiver { * public void setDynamicBean(String plugin, DynamicOptions.DynamicBean dynamicBean) { * dynamicBeans.put(plugin, dynamicBean); * } * * public DynamicOptions.DynamicBean getDynamicBean(String plugin) { * return dynamicBeans.get(plugin); * } * ... * } * } * </pre> */ public interface BeanReceiver { void setDynamicBean(String plugin, DynamicBean dynamicBean); /** * Returns the class that should be used for looking up exported DynamicBean bindings from * plugins. Override when a particular REST/SSH endpoint should respect DynamicBeans bound on a * different endpoint. For example, {@code GetDetail} is just a synonym for a variant of {@code * GetChange}, and it should respect any DynamicBeans on GetChange. GetChange}. So it should * return {@code GetChange.class} from this method. */ default Class<? extends BeanReceiver> getExportedBeanReceiver() { return getClass(); } } public interface BeanProvider { DynamicBean getDynamicBean(String plugin); } /** * MergedClassloaders allow us to load classes from both plugin classloaders. Store the merged * classloaders in a Map to avoid creating a new classloader for each invocation. Use a * WeakHashMap to avoid leaking these MergedClassLoaders once either plugin is unloaded. Since the * WeakHashMap only takes care of ensuring the Keys can get garbage collected, use WeakReferences * to store the MergedClassloaders in the WeakHashMap. * * <p>Outter keys are the bean plugin's classloaders (the plugin being extended) * * <p>Inner keys are the dynamicBeans plugin's classloaders (the extending plugin) * * <p>The value is the MergedClassLoader representing the merging of the outter and inner key * classloaders. */ protected static Map<ClassLoader, Map<ClassLoader, WeakReference<ClassLoader>>> mergedClByCls = Collections.synchronizedMap(new WeakHashMap<>()); protected Object bean; protected Map<String, DynamicBean> beansByPlugin; protected Injector injector; /** * Internal: For Gerrit to include options from DynamicBeans, setup a DynamicMap and instantiate * this class so the following methods can be called if desired: * * <pre> * DynamicOptions pluginOptions = new DynamicOptions(bean, injector, dynamicBeans); * pluginOptions.parseDynamicBeans(clp); * pluginOptions.setDynamicBeans(); * pluginOptions.onBeanParseStart(); * * // parse arguments here: clp.parseArgument(argv); * * pluginOptions.onBeanParseEnd(); * </pre> */ public DynamicOptions(Object bean, Injector injector, DynamicMap<DynamicBean> dynamicBeans) { this.bean = bean; this.injector = injector; beansByPlugin = new HashMap<>(); Class<?> beanClass = (bean instanceof BeanReceiver) ? ((BeanReceiver) bean).getExportedBeanReceiver() : getClass(); for (String plugin : dynamicBeans.plugins()) { Provider<DynamicBean> provider = dynamicBeans.byPlugin(plugin).get(beanClass.getCanonicalName()); if (provider != null) { beansByPlugin.put(plugin, getDynamicBean(bean, provider.get())); } } } @SuppressWarnings("unchecked") public DynamicBean getDynamicBean(Object bean, DynamicBean dynamicBean) { ClassLoader coreCl = getClass().getClassLoader(); ClassLoader beanCl = bean.getClass().getClassLoader(); ClassLoader loader = beanCl; if (beanCl != coreCl) { // bean from a plugin? ClassLoader dynamicBeanCl = dynamicBean.getClass().getClassLoader(); if (beanCl != dynamicBeanCl) { // in a different plugin? loader = getMergedClassLoader(beanCl, dynamicBeanCl); } } String className = null; if (dynamicBean instanceof ClassNameProvider) { className = ((ClassNameProvider) dynamicBean).getClassName(); } else if (loader != beanCl) { // in a different plugin? className = dynamicBean.getClass().getCanonicalName(); } if (className != null) { try { List<Module> modules = new ArrayList<>(); Injector modulesInjector = injector; if (dynamicBean instanceof ModulesClassNamesProvider) { modulesInjector = injector.createChildInjector(); for (String moduleName : ((ModulesClassNamesProvider) dynamicBean).getModulesClassNames()) { Class<Module> mClass = (Class<Module>) loader.loadClass(moduleName); modules.add(modulesInjector.getInstance(mClass)); } } return modulesInjector .createChildInjector(modules) .getInstance((Class<DynamicOptions.DynamicBean>) loader.loadClass(className)); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } return dynamicBean; } protected ClassLoader getMergedClassLoader(ClassLoader beanCl, ClassLoader dynamicBeanCl) { Map<ClassLoader, WeakReference<ClassLoader>> mergedClByCl = mergedClByCls.get(beanCl); if (mergedClByCl == null) { mergedClByCl = Collections.synchronizedMap(new WeakHashMap<>()); mergedClByCls.put(beanCl, mergedClByCl); } WeakReference<ClassLoader> mergedClRef = mergedClByCl.get(dynamicBeanCl); ClassLoader mergedCl = null; if (mergedClRef != null) { mergedCl = mergedClRef.get(); } if (mergedCl == null) { mergedCl = new DelegatingClassLoader(beanCl, dynamicBeanCl); mergedClByCl.put(dynamicBeanCl, new WeakReference<>(mergedCl)); } return mergedCl; } public void parseDynamicBeans(CmdLineParser clp) { for (Map.Entry<String, DynamicBean> e : beansByPlugin.entrySet()) { clp.parseWithPrefix("--" + e.getKey(), e.getValue()); } clp.drainOptionQueue(); } public void setDynamicBeans() { if (bean instanceof BeanReceiver) { BeanReceiver receiver = (BeanReceiver) bean; for (Map.Entry<String, DynamicBean> e : beansByPlugin.entrySet()) { receiver.setDynamicBean(e.getKey(), e.getValue()); } } } public void onBeanParseStart() { for (Map.Entry<String, DynamicBean> e : beansByPlugin.entrySet()) { DynamicBean instance = e.getValue(); if (instance instanceof BeanParseListener) { BeanParseListener listener = (BeanParseListener) instance; listener.onBeanParseStart(e.getKey(), bean); } } } public void onBeanParseEnd() { for (Map.Entry<String, DynamicBean> e : beansByPlugin.entrySet()) { DynamicBean instance = e.getValue(); if (instance instanceof BeanParseListener) { BeanParseListener listener = (BeanParseListener) instance; listener.onBeanParseEnd(e.getKey(), bean); } } } }
package ru.tr1al.util; import java.io.*; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.zip.Deflater; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; public class ZipUtil { public static void main(String[] args) { try { compressFolder("/tmp", "/tmp.zip"); } catch (Exception e) { System.out.print(e); } } public static void compressFolder(String folderPath, String zipPath) { byte[] buf = new byte[1024]; try { ZipOutputStream out = new ZipOutputStream(new FileOutputStream(zipPath)); out.setLevel(Deflater.BEST_COMPRESSION); File dir = new File(folderPath); if (dir.isDirectory()) { for (File file : dir.listFiles()) { FileInputStream fis = new FileInputStream(file); out.putNextEntry(new ZipEntry(file.getName())); int len; while ((len = fis.read(buf)) != -1) { out.write(buf, 0, len); } out.closeEntry(); } } out.close(); } catch (IOException e) { System.out.print(e); } } public static void compressFiles(List<File> files, File zip) { byte[] buf = new byte[1024]; try { ZipOutputStream out = new ZipOutputStream(new FileOutputStream(zip)); out.setLevel(Deflater.BEST_COMPRESSION); for (File file : files) { FileInputStream fis = new FileInputStream(file); out.putNextEntry(new ZipEntry(file.getName())); int len; while ((len = fis.read(buf)) != -1) { out.write(buf, 0, len); } out.closeEntry(); } out.close(); } catch (IOException e) { System.out.print(e); } } public static void compressData(final byte[] data, final String filename, File zip) { compressData(new HashMap<String, byte[]>() {{ put(filename, data); }}, zip); } public static void compressData(Map<String, byte[]> bytes, File zip) { try { ZipOutputStream out = new ZipOutputStream(new FileOutputStream(zip)); out.setLevel(Deflater.BEST_COMPRESSION); for (Map.Entry<String, byte[]> e : bytes.entrySet()) { out.putNextEntry(new ZipEntry(e.getKey())); out.write(e.getValue()); out.closeEntry(); } out.close(); } catch (IOException e) { System.out.print(e); } } public static List<File> zipDir(File srcDir, String destPath, String destName, Long partSize, String encoding) throws IOException { ChunkedZipOutputStream out = new ChunkedZipOutputStream(destPath, destName, partSize, encoding); zipDir(srcDir, "", out); out.closeStream(); return out.getFiles(); } private static void zipFile(File srcFile, String destPath, ChunkedZipOutputStream out) throws IOException { out.putNextEntry(srcFile, destPath); } private static void zipDir(File srcDir, String destPath, ChunkedZipOutputStream out) throws IOException { for (File file : srcDir.listFiles()) if (file.isDirectory()) zipDir(file, concatPathAndFilename(destPath, file.getName()), out); else zipFile(file, destPath, out); } private static String concatPathAndFilename(String path, String filename) { if (path == null) return filename; String trimmedPath = path.trim(); if (trimmedPath.length() == 0) return filename; String trimmedFilename = filename.trim(); if (trimmedPath.endsWith(File.separator)) return trimmedPath + trimmedFilename; else return trimmedPath + File.separator + trimmedFilename; } public static class ChunkedZipOutputStream { private ZipOutputStream zipOutputStream; private String path; private String name; private long currentSize; private int currentChunkIndex; private Long partSize = 1024 * 1024L; private final String PART_POSTFIX = ".part."; private final String FILE_EXTENSION = ".zip"; private ArrayList<File> files = new ArrayList<File>(); private String encoding; private ZipEntry customEntry = null; private int level = Deflater.NO_COMPRESSION; public ChunkedZipOutputStream(String path, String name, Long partSize, String encoding) throws IOException { this.path = path; this.name = name; this.partSize = partSize; this.encoding = encoding; constructNewStream(); } public void setLevel(int level) { this.level = level; } public void putNextEntry(String path, String content) throws IOException { ZipEntry entry = new ZipEntry(path); zipOutputStream.putNextEntry(entry); zipOutputStream.write(content.getBytes(encoding)); zipOutputStream.closeEntry(); checkSize(entry); } public void startEntry(String path) throws IOException { customEntry = new ZipEntry(path); zipOutputStream.putNextEntry(customEntry); } public void write(String content) throws IOException { zipOutputStream.write(content.getBytes(encoding)); } public void write(byte[] content) throws IOException { zipOutputStream.write(content); } public void closeEntry() throws IOException { zipOutputStream.closeEntry(); checkSize(customEntry); } private void checkSize(ZipEntry entry) throws IOException { if (partSize != null) { long entrySize = entry.getCompressedSize(); if ((currentSize + entrySize) > partSize) { closeStream(); constructNewStream(); } currentSize += entrySize; } } public void putNextEntry(File srcFile, String destPath) throws IOException { ZipEntry entry = new ZipEntry(concatPathAndFilename(destPath, srcFile.getName())); putEntry(entry, srcFile); } public void putNextEntry(String path, File file) throws IOException { ZipEntry entry = new ZipEntry(path); putEntry(entry, file); } public void putNextEntry(String path, byte[] bytes) throws IOException { ZipEntry entry = new ZipEntry(path); putEntry(entry, bytes); } private void putEntry(ZipEntry entry, File file) throws IOException { zipOutputStream.putNextEntry(entry); byte buf[] = new byte[1024]; InputStream in = new BufferedInputStream(new FileInputStream(file)); int len; while ((len = in.read(buf)) > 0) zipOutputStream.write(buf, 0, len); zipOutputStream.closeEntry(); in.close(); checkSize(entry); } private void putEntry(ZipEntry entry, byte[] bytes) throws IOException { zipOutputStream.putNextEntry(entry); zipOutputStream.write(bytes); zipOutputStream.closeEntry(); checkSize(entry); } public void closeStream() throws IOException { zipOutputStream.close(); } private void constructNewStream() throws IOException { File file = new File(path, constructCurrentPartName()); if (!file.exists()) { file.createNewFile(); } file.deleteOnExit(); this.files.add(file); zipOutputStream = new ZipOutputStream(new FileOutputStream(file)); zipOutputStream.setLevel(level); currentChunkIndex++; currentSize = 0; } private String constructCurrentPartName() { StringBuilder partNameBuilder = new StringBuilder(name); if (partSize != null) { partNameBuilder.append(PART_POSTFIX); partNameBuilder.append(currentChunkIndex); } partNameBuilder.append(FILE_EXTENSION); return partNameBuilder.toString(); } public ArrayList<File> getFiles() { return files; } } public static void compressFolderWithSubdir(String folderPath, String zipPath) throws IOException { FileOutputStream fos = new FileOutputStream(zipPath); ZipOutputStream zos = new ZipOutputStream(fos); zos.setLevel(Deflater.BEST_COMPRESSION); addFolder(zos, folderPath, folderPath); zos.close(); } private static void addFolder(ZipOutputStream zos, String folderName, String baseFolderName) throws IOException { File f = new File(folderName); if (f.exists()) { if (f.isDirectory()) { for (File f2 : f.listFiles()) { addFolder(zos, f2.getAbsolutePath(), baseFolderName); } } else { String entryName = folderName.substring(baseFolderName.length() + 1, folderName.length()); ZipEntry ze = new ZipEntry(entryName); zos.putNextEntry(ze); FileInputStream in = new FileInputStream(folderName); int len; byte buffer[] = new byte[1024]; while ((len = in.read(buffer)) > 0) { zos.write(buffer, 0, len); } in.close(); zos.closeEntry(); } } } }
/** * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.waveprotocol.wave.crypto; import org.apache.commons.codec.binary.Base64; import org.waveprotocol.wave.federation.Proto.ProtocolSignature; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.security.Signature; import java.security.cert.CertificateParsingException; import java.security.cert.X509Certificate; import java.util.Collection; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * A class capable of verifying signatures, by looking up certificate chains in * a store, and using a caching cert chain validator. */ public class WaveSignatureVerifier { // regexp that picks out a Common Name out of a X.500 Distinguished Name private static final Pattern CN_PATTERN = Pattern.compile("CN=([^,]+)"); // 2 represents an AlternativeSubjectName of type DNS private static final Integer ALT_NAME_TYPE_DNS = Integer.valueOf(2); // The cert chain validator. This object can tell us whether a given cert // chain checks out ok. private final WaveCertPathValidator pathValidator; // The store that has the cert chains. This object maps from signer ids to // cert chains. private final CertPathStore pathStore; public WaveSignatureVerifier(WaveCertPathValidator validator, CertPathStore store) { this.pathValidator = validator; this.pathStore = store; } /** * Verifies the signature on some signed payload. * @param signedPayload the payload on which we're verifiying the signature. * @param signatureInfo the signature provided with the payload. * @param authority name of the authority that we expect the target * certificate to be issued to. * * @throws SignatureException if the signature can't be verified, either * because it simply didn't check out, or because of other reasons, like us * not supporting the signature algorithm specified. * @throws UnknownSignerException if we can't find the cert chain in the local * cert-path store. */ public void verify(byte[] signedPayload, ProtocolSignature signatureInfo, String authority) throws SignatureException, UnknownSignerException { SignerInfo signer = pathStore.get( signatureInfo.getSignerId().toByteArray()); if (signer == null) { throw new UnknownSignerException("could not find information about signer " + Base64.encodeBase64(signatureInfo.getSignerId().toByteArray())); } verifySignerInfo(signer); Signature verifier; try { verifier = Signature.getInstance(AlgorithmUtil.getJceName( signatureInfo.getSignatureAlgorithm())); } catch (NoSuchAlgorithmException e) { throw new SignatureException("can't verify signatures of type " + signatureInfo.getSignatureAlgorithm().toString(), e); } X509Certificate cert = signer.getCertificates().get(0); try { verifier.initVerify(cert); } catch (InvalidKeyException e) { throw new SignatureException("certificate of signer was not issued for " + "message signing"); } try { verifier.update(signedPayload); } catch (java.security.SignatureException e) { // this is thrown if the verifier object is not properly initialized. // this shouldn't happen as we _just_ initialized it on the previous line. throw new IllegalStateException(e); } try { if (!verifier.verify(signatureInfo.getSignatureBytes().toByteArray())) { throw new SignatureException("signature did not verify"); } } catch (java.security.SignatureException e) { throw new SignatureException(e); } verifyMatchingAuthority(authority, cert); } /** * Verifies that the {@link SignerInfo} (i.e., the cerificate chain) checks * out, i.e., chains up to a trusted CA, and has certificates that aren't * expired. * * @throws SignatureException if the certificate chain in the * {@link SignerInfo} does't verify. */ public void verifySignerInfo(SignerInfo signer) throws SignatureException { pathValidator.validate(signer.getCertificates()); } /** * Verifies that the given certificate was issued to the given authority. * @param authority the authority to which the certificate was issued, * e.g., a domain name. * @param certificate the {@link X509Certificate} * @throws SignatureException if the authority doesn't match the certificate. */ private void verifyMatchingAuthority(String authority, X509Certificate certificate) throws SignatureException { String cn = getCommonNameFromDistinguishedName( certificate.getSubjectX500Principal().getName()); if (cn == null) { throw new SignatureException("no common name found in signer " + "certificate " + certificate.getSubjectDN().toString()); } if (cn.equals(authority)) { return; } if (authorityMatchesSubjectAlternativeNames(authority, certificate)) { return; } throw new SignatureException("expected " + authority + " as CN or alternative name in cert, but didn't find it"); } /** * Returns true if the authority given matches any of the * SubjectAlternativeNames present in the certificate, false otherwise. */ private boolean authorityMatchesSubjectAlternativeNames(String authority, X509Certificate certificate) { Collection<List<?>> subjAltNames = null; try { subjAltNames = certificate.getSubjectAlternativeNames(); } catch (CertificateParsingException e) { // This is a bit strange - it means that the AubjectAlternativeNames // extension wasn't properly encoded in this cert. We'll leave subjAltNames null. } if (subjAltNames == null) { return false; } for (List<?> altName : subjAltNames) { Integer nameType = (Integer) altName.get(0); // We're only interested in alternative names that denote domain names. if (!ALT_NAME_TYPE_DNS.equals(nameType)) { continue; } String dnsName = (String) altName.get(1); if (authority.equals(dnsName)) { return true; } } // None of the names matched. return false; } private String getCommonNameFromDistinguishedName(String dn) { Matcher m = CN_PATTERN.matcher(dn); if (m.find()) { return m.group(1); } else { return null; } } }
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.javascript.rhino.FunctionTypeI; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.ObjectTypeI; import com.google.javascript.rhino.TypeI; import com.google.javascript.rhino.TypeIRegistry; import com.google.javascript.rhino.jstype.JSTypeNative; import java.util.Collection; import java.util.List; import java.util.Set; import java.util.TreeSet; /** * A code generator that outputs type annotations for functions and * constructors. */ class TypedCodeGenerator extends CodeGenerator { private final TypeIRegistry registry; private final JSDocInfoPrinter jsDocInfoPrinter; TypedCodeGenerator( CodeConsumer consumer, CompilerOptions options, TypeIRegistry registry) { super(consumer, options); checkNotNull(registry); this.registry = registry; this.jsDocInfoPrinter = new JSDocInfoPrinter(options.getUseOriginalNamesInOutput()); } @Override protected void add(Node n, Context context) { Node parent = n.getParent(); if (parent != null && (parent.isNormalBlock() || parent.isScript())) { if (n.isFunction()) { add(getFunctionAnnotation(n)); } else if (n.isExprResult() && n.getFirstChild().isAssign()) { Node assign = n.getFirstChild(); if (NodeUtil.isNamespaceDecl(assign.getFirstChild())) { add(jsDocInfoPrinter.print(assign.getJSDocInfo())); } else { Node rhs = assign.getLastChild(); add(getTypeAnnotation(rhs)); } } else if (n.isVar() && n.getFirstFirstChild() != null) { if (NodeUtil.isNamespaceDecl(n.getFirstChild())) { add(jsDocInfoPrinter.print(n.getJSDocInfo())); } else { add(getTypeAnnotation(n.getFirstFirstChild())); } } } super.add(n, context); } private String getTypeAnnotation(Node node) { // Only add annotations for things with JSDoc, or function literals. JSDocInfo jsdoc = NodeUtil.getBestJSDocInfo(node); if (jsdoc == null && !node.isFunction()) { return ""; } TypeI type = node.getTypeI(); if (type == null) { return ""; } else if (type.isFunctionType()) { return getFunctionAnnotation(node); } else if (type.isEnumObject()) { return "/** @enum {" + type.toMaybeObjectType().getEnumeratedTypeOfEnumObject().toNonNullAnnotationString() + "} */\n"; } else if (!type.isUnknownType() && !type.isBottom() && !type.isVoidType() && !type.isPrototypeObject()) { return "/** @type {" + node.getTypeI().toNonNullAnnotationString() + "} */\n"; } else { return ""; } } /** * @param fnNode A node for a function for which to generate a type annotation */ private String getFunctionAnnotation(Node fnNode) { TypeI type = fnNode.getTypeI(); checkState(fnNode.isFunction() || type.isFunctionType()); if (type == null || type.isUnknownType()) { return ""; } FunctionTypeI funType = type.toMaybeFunctionType(); if (type.equals(registry.getNativeType(JSTypeNative.FUNCTION_INSTANCE_TYPE))) { return "/** @type {!Function} */\n"; } StringBuilder sb = new StringBuilder("/**\n"); Node paramNode = null; // We need to use the child nodes of the function as the nodes for the // parameters of the function type do not have the real parameter names. // FUNCTION // NAME // PARAM_LIST // NAME param1 // NAME param2 if (fnNode != null && fnNode.isFunction()) { paramNode = NodeUtil.getFunctionParameters(fnNode).getFirstChild(); } // Param types int minArity = funType.getMinArity(); int maxArity = funType.getMaxArity(); List<TypeI> formals = ImmutableList.copyOf(funType.getParameterTypes()); for (int i = 0; i < formals.size(); i++) { sb.append(" * "); appendAnnotation(sb, "param", getParameterJSDocType(formals, i, minArity, maxArity)); sb.append(" ") .append(paramNode == null ? "p" + i : paramNode.getString()) .append("\n"); if (paramNode != null) { paramNode = paramNode.getNext(); } } // Return type TypeI retType = funType.getReturnType(); if (retType != null && !retType.isBottom() // There is no annotation for the empty type. && !funType.isInterface() // Interfaces never return a value. && !(funType.isConstructor() && retType.isVoidType())) { sb.append(" * "); appendAnnotation(sb, "return", retType.toNonNullAnnotationString()); sb.append("\n"); } if (funType.isConstructor()) { appendConstructorAnnotations(sb, funType); } else if (funType.isInterface()) { appendInterfaceAnnotations(sb, funType); } else { TypeI thisType = funType.getTypeOfThis(); if (thisType != null && !thisType.isUnknownType() && !thisType.isVoidType()) { if (fnNode == null || !thisType.equals(findMethodOwner(fnNode))) { sb.append(" * "); appendAnnotation(sb, "this", thisType.toNonNullAnnotationString()); sb.append("\n"); } } } Collection<String> typeParams = funType.getTypeParameters(); if (!typeParams.isEmpty()) { sb.append(" * @template "); Joiner.on(",").appendTo(sb, typeParams); sb.append("\n"); } sb.append(" */\n"); return sb.toString(); } // TODO(dimvar): it's awkward that we print @constructor after the extends/implements; // we should print it first, like users write it. Same for @interface and @record. private void appendConstructorAnnotations(StringBuilder sb, FunctionTypeI funType) { FunctionTypeI superConstructor = funType.getInstanceType().getSuperClassConstructor(); if (superConstructor != null) { ObjectTypeI superInstance = superConstructor.getInstanceType(); if (!superInstance.toString().equals("Object")) { sb.append(" * "); appendAnnotation(sb, "extends", superInstance.toAnnotationString()); sb.append("\n"); } } // Avoid duplicates, add implemented type to a set first Set<String> interfaces = new TreeSet<>(); for (ObjectTypeI interfaze : funType.getAncestorInterfaces()) { interfaces.add(interfaze.toAnnotationString()); } for (String interfaze : interfaces) { sb.append(" * "); appendAnnotation(sb, "implements", interfaze); sb.append("\n"); } sb.append(" * @constructor\n"); } private void appendInterfaceAnnotations(StringBuilder sb, FunctionTypeI funType) { Set<String> interfaces = new TreeSet<>(); for (ObjectTypeI interfaceType : funType.getAncestorInterfaces()) { interfaces.add(interfaceType.toAnnotationString()); } for (String interfaze : interfaces) { sb.append(" * "); appendAnnotation(sb, "extends", interfaze); sb.append("\n"); } if (funType.isStructuralInterface()) { sb.append(" * @record\n"); } else { sb.append(" * @interface\n"); } } // TODO(sdh): This whole method could be deleted if we don't mind adding // additional @this annotations where they're not actually necessary. /** * Given a method definition node, returns the {@link ObjectTypeI} corresponding * to the class the method is defined on, or null if it is not a prototype method. */ private ObjectTypeI findMethodOwner(Node n) { if (n == null) { return null; } Node parent = n.getParent(); FunctionTypeI ctor = null; if (parent.isAssign()) { Node target = parent.getFirstChild(); if (NodeUtil.isPrototypeProperty(target)) { TypeI type = registry.getType(target.getFirstFirstChild().getQualifiedName()); ctor = type != null ? ((ObjectTypeI) type).getConstructor() : null; } } else if (parent.isClass()) { // TODO(sdh): test this case once NTI understands ES6 classes ctor = parent.getTypeI().toMaybeFunctionType(); } return ctor != null ? ctor.getInstanceType() : null; } private static void appendAnnotation(StringBuilder sb, String name, String type) { sb.append("@").append(name).append(" {").append(type).append("}"); } /** Creates a JSDoc-suitable String representation of the type of a parameter. */ private String getParameterJSDocType(List<TypeI> types, int index, int minArgs, int maxArgs) { TypeI type = types.get(index); if (index < minArgs) { return type.toNonNullAnnotationString(); } boolean isRestArgument = maxArgs == Integer.MAX_VALUE && index == types.size() - 1; if (isRestArgument) { return "..." + restrictByUndefined(type).toNonNullAnnotationString(); } return restrictByUndefined(type).toNonNullAnnotationString() + "="; } /** Removes undefined from a union type. */ private TypeI restrictByUndefined(TypeI type) { // If not voidable, there's nothing to do. If not nullable then the easiest // thing is to simply remove both null and undefined. If nullable, then add // null back into the union after removing null and undefined. if (!type.isVoidable()) { return type; } TypeI restricted = type.restrictByNotNullOrUndefined(); if (type.isNullable()) { TypeI nullType = registry.getNativeType(JSTypeNative.NULL_TYPE); return registry.createUnionType(ImmutableList.of(restricted, nullType)); } // The bottom type cannot appear in a jsdoc return restricted.isBottom() ? type : restricted; } }
/* * Copyright (c) 2007 BUSINESS OBJECTS SOFTWARE LIMITED * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of Business Objects nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /* * VaultRegistry.java * Creation date: Jul 29, 2004. * By: Edward Lam */ package org.openquark.cal.services; import java.util.HashSet; import java.util.Set; import org.openquark.cal.compiler.ModuleName; import org.openquark.cal.services.Vault.VaultProvider; /** * A VaultRegistry maintains a registry of known VaultProviders, thus providing a way to instantiate a vault * given its descriptor and location string. * @author Edward Lam */ public class VaultRegistry { /** The VaultProviders known to this registry. */ private final Set<VaultProvider> vaultProviders = new HashSet<VaultProvider>(); /** The vault authentication manager associated with this registry. */ private final VaultAuthenticationManager vaultAuthenticationManager = new VaultAuthenticationManager(); /** * Constructor for a VaultRegistry. */ public VaultRegistry() { // Register some standard vault providers. registerVaultProvider(StandardVault.getVaultClassProvider()); registerVaultProvider(SimpleCALFileVault.getVaultClassProvider()); registerVaultProvider(JarVault.getVaultClassProvider()); registerVaultProvider(NonExistentVault.getVaultClassProvider()); registerVaultProvider(SimpleCarFileVault.getVaultClassProvider()); } /** * Register a Vault Provider with this registry. * @param newProvider the provider to register. */ public void registerVaultProvider(VaultProvider newProvider) { vaultProviders.add(newProvider); } /** * Register a vault authenticator to use to authenticate access to vaults using this registry. * @param vaultAuthenticator the authenticator to register. */ public void registerVaultAuthenticator(VaultAuthenticator vaultAuthenticator) { vaultAuthenticationManager.registerAuthenticator(vaultAuthenticator); } /** * Get a Vault by descriptor and location. * @param vaultDescriptor a descriptor string which indicates the type of vault. * @param locationString a provider-specific location. * @return the corresponding vault, or null if the corresponding vault could not be found. */ public Vault getVault(String vaultDescriptor, String locationString) { // Iterate through the vault providers, checking for the one with the given descriptor. for (final VaultProvider vaultProvider : vaultProviders) { if (vaultProvider.getVaultDescriptor().equals(vaultDescriptor)) { // Return the vault provided by the provider. return vaultProvider.getVault(locationString, vaultAuthenticationManager); } } // Couldn't find a vault with the given descriptor. return null; } /** * Get a stored module given a vault descriptor and the arguments to the provider it describes. * @param vaultModuleInfo the vault info for the stored module element. * @param status the tracking status object. * @return the corresponding stored module, or null if a corresponding stored module could not be constructed. */ public StoredVaultElement.Module getStoredModule(VaultElementInfo vaultModuleInfo, Status status) { Vault vault = getVault(vaultModuleInfo.getVaultDescriptor(), vaultModuleInfo.getLocationString(), status); if (vault == null) { return null; } String elementName = vaultModuleInfo.getElementName(); int revisionNum = vaultModuleInfo.getRevision(); final ModuleName moduleName = ModuleName.maybeMake(elementName); if (moduleName == null) { status.add(new Status(Status.Severity.ERROR, "The name " + elementName + " is not a valid module name", null)); return null; } StoredVaultElement.Module storedElement = vault.getStoredModule(moduleName, revisionNum, status); if (storedElement == null) { logProblemRetrievingStoredElement("module", elementName, revisionNum, status); } return storedElement; } /** * Get a stored workspace declaration given a vault descriptor and the arguments to the provider it describes. * @param vaultDeclarationInfo the vault info for the stored workspace declaration element. * @param status the tracking status object. * @return the corresponding stored workspace declaration, or null if a corresponding stored workspace declaration could not be constructed. */ public StoredVaultElement.WorkspaceDeclaration getStoredWorkspaceDeclaration(VaultElementInfo vaultDeclarationInfo, Status status) { Vault vault = getVault(vaultDeclarationInfo.getVaultDescriptor(), vaultDeclarationInfo.getLocationString(), status); if (vault == null) { return null; } String elementName = vaultDeclarationInfo.getElementName(); int revisionNum = vaultDeclarationInfo.getRevision(); StoredVaultElement.WorkspaceDeclaration storedElement = vault.getWorkspaceDeclaration(elementName, revisionNum, status); if (storedElement == null) { logProblemRetrievingStoredElement("workspace declaration", elementName, revisionNum, status); } return storedElement; } /** * Get a stored Car given a vault descriptor and the arguments to the provider it describes. * @param vaultCarInfo the vault info for the stored Car element. * @param status the tracking status object. * @return the corresponding stored Car, or null if a corresponding stored Car could not be constructed. */ public StoredVaultElement.Car getStoredCar(VaultElementInfo vaultCarInfo, Status status) { Vault vault = getVault(vaultCarInfo.getVaultDescriptor(), vaultCarInfo.getLocationString(), status); if (vault == null) { return null; } String elementName = vaultCarInfo.getElementName(); int revisionNum = vaultCarInfo.getRevision(); StoredVaultElement.Car storedElement = vault.getCar(elementName, revisionNum, status); if (storedElement == null) { logProblemRetrievingStoredElement("Car", elementName, revisionNum, status); } return storedElement; } /** * Get a Vault by descriptor and location, and the vault cannot be fetched, log the problem to the given status object. * @param vaultDescriptor a descriptor string which indicates the type of vault. * @param locationString a provider-specific location. * @param status the tracking static object. * @return the corresponding vault, or null if the corresponding vault could not be found. */ private Vault getVault(String vaultDescriptor, String locationString, Status status) { Vault vault = getVault(vaultDescriptor, locationString); if (vault == null) { String errorString = "VaultRegistry: could not instantiate a vault with descriptor " + vaultDescriptor + " and location " + locationString; status.add(new Status(Status.Severity.ERROR, errorString)); } return vault; } /** * Logs a problem with retrieving a stored element to the given status object. * @param elementType the type of the element. * @param elementName the name of the element. * @param revisionNum the revision number of the element. * @param status the tracking status object, to which the problem is to be logged. */ private void logProblemRetrievingStoredElement(String elementType, String elementName, int revisionNum, Status status) { String errorString = "VaultRegistry: Could not retrieve " + elementType + " \"" + elementName + "\", revision " + revisionNum + " from the given vault."; status.add(new Status(Status.Severity.ERROR, errorString)); } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.testsuite.transport.socket; import io.netty.bootstrap.Bootstrap; import io.netty.buffer.ByteBuf; import io.netty.buffer.CompositeByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.channel.socket.DatagramChannel; import io.netty.channel.socket.DatagramPacket; import org.junit.Test; import java.net.InetSocketAddress; import java.nio.channels.NotYetConnectedException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.junit.Assert.*; public class DatagramUnicastTest extends AbstractDatagramTest { private static final byte[] BYTES = {0, 1, 2, 3}; private enum WrapType { NONE, DUP, SLICE, READ_ONLY } @Test public void testSimpleSendDirectByteBuf() throws Throwable { run(); } public void testSimpleSendDirectByteBuf(Bootstrap sb, Bootstrap cb) throws Throwable { testSimpleSend(sb, cb, Unpooled.directBuffer().writeBytes(BYTES), true, BYTES, 1); testSimpleSend(sb, cb, Unpooled.directBuffer().writeBytes(BYTES), true, BYTES, 4); } @Test public void testSimpleSendHeapByteBuf() throws Throwable { run(); } public void testSimpleSendHeapByteBuf(Bootstrap sb, Bootstrap cb) throws Throwable { testSimpleSend(sb, cb, Unpooled.buffer().writeBytes(BYTES), true, BYTES, 1); testSimpleSend(sb, cb, Unpooled.buffer().writeBytes(BYTES), true, BYTES, 4); } @Test public void testSimpleSendCompositeDirectByteBuf() throws Throwable { run(); } public void testSimpleSendCompositeDirectByteBuf(Bootstrap sb, Bootstrap cb) throws Throwable { CompositeByteBuf buf = Unpooled.compositeBuffer(); buf.addComponent(true, Unpooled.directBuffer().writeBytes(BYTES, 0, 2)); buf.addComponent(true, Unpooled.directBuffer().writeBytes(BYTES, 2, 2)); testSimpleSend(sb, cb, buf, true, BYTES, 1); CompositeByteBuf buf2 = Unpooled.compositeBuffer(); buf2.addComponent(true, Unpooled.directBuffer().writeBytes(BYTES, 0, 2)); buf2.addComponent(true, Unpooled.directBuffer().writeBytes(BYTES, 2, 2)); testSimpleSend(sb, cb, buf2, true, BYTES, 4); } @Test public void testSimpleSendCompositeHeapByteBuf() throws Throwable { run(); } public void testSimpleSendCompositeHeapByteBuf(Bootstrap sb, Bootstrap cb) throws Throwable { CompositeByteBuf buf = Unpooled.compositeBuffer(); buf.addComponent(true, Unpooled.buffer().writeBytes(BYTES, 0, 2)); buf.addComponent(true, Unpooled.buffer().writeBytes(BYTES, 2, 2)); testSimpleSend(sb, cb, buf, true, BYTES, 1); CompositeByteBuf buf2 = Unpooled.compositeBuffer(); buf2.addComponent(true, Unpooled.buffer().writeBytes(BYTES, 0, 2)); buf2.addComponent(true, Unpooled.buffer().writeBytes(BYTES, 2, 2)); testSimpleSend(sb, cb, buf2, true, BYTES, 4); } @Test public void testSimpleSendCompositeMixedByteBuf() throws Throwable { run(); } public void testSimpleSendCompositeMixedByteBuf(Bootstrap sb, Bootstrap cb) throws Throwable { CompositeByteBuf buf = Unpooled.compositeBuffer(); buf.addComponent(true, Unpooled.directBuffer().writeBytes(BYTES, 0, 2)); buf.addComponent(true, Unpooled.buffer().writeBytes(BYTES, 2, 2)); testSimpleSend(sb, cb, buf, true, BYTES, 1); CompositeByteBuf buf2 = Unpooled.compositeBuffer(); buf2.addComponent(true, Unpooled.directBuffer().writeBytes(BYTES, 0, 2)); buf2.addComponent(true, Unpooled.buffer().writeBytes(BYTES, 2, 2)); testSimpleSend(sb, cb, buf2, true, BYTES, 4); } @Test public void testSimpleSendWithoutBind() throws Throwable { run(); } public void testSimpleSendWithoutBind(Bootstrap sb, Bootstrap cb) throws Throwable { testSimpleSend(sb, cb, Unpooled.directBuffer().writeBytes(BYTES), false, BYTES, 1); testSimpleSend(sb, cb, Unpooled.directBuffer().writeBytes(BYTES), false, BYTES, 4); } private void testSimpleSend(Bootstrap sb, Bootstrap cb, ByteBuf buf, boolean bindClient, final byte[] bytes, int count) throws Throwable { for (WrapType type: WrapType.values()) { testSimpleSend0(sb, cb, buf.retain(), bindClient, bytes, count, type); } assertTrue(buf.release()); } @Test public void testSimpleSendWithConnect() throws Throwable { run(); } public void testSimpleSendWithConnect(Bootstrap sb, Bootstrap cb) throws Throwable { testSimpleSendWithConnect(sb, cb, Unpooled.directBuffer().writeBytes(BYTES), BYTES, 1); testSimpleSendWithConnect(sb, cb, Unpooled.directBuffer().writeBytes(BYTES), BYTES, 4); } @SuppressWarnings("deprecation") private void testSimpleSend0(Bootstrap sb, Bootstrap cb, ByteBuf buf, boolean bindClient, final byte[] bytes, int count, WrapType wrapType) throws Throwable { cb.handler(new SimpleChannelInboundHandler<Object>() { @Override public void channelRead0(ChannelHandlerContext ctx, Object msgs) throws Exception { // Nothing will be sent. } }); final CountDownLatch latch = new CountDownLatch(count); Channel sc = setupServerChannel(sb, bytes, latch); Channel cc; if (bindClient) { cc = cb.bind(newSocketAddress()).sync().channel(); } else { cb.option(ChannelOption.DATAGRAM_CHANNEL_ACTIVE_ON_REGISTRATION, true); cc = cb.register().sync().channel(); } InetSocketAddress addr = (InetSocketAddress) sc.localAddress(); for (int i = 0; i < count; i++) { switch (wrapType) { case DUP: cc.write(new DatagramPacket(buf.retain().duplicate(), addr)); break; case SLICE: cc.write(new DatagramPacket(buf.retain().slice(), addr)); break; case READ_ONLY: cc.write(new DatagramPacket(Unpooled.unmodifiableBuffer(buf.retain()), addr)); break; case NONE: cc.write(new DatagramPacket(buf.retain(), addr)); break; default: throw new Error("unknown wrap type: " + wrapType); } } // release as we used buf.retain() before buf.release(); cc.flush(); assertTrue(latch.await(10, TimeUnit.SECONDS)); sc.close().sync(); cc.close().sync(); } private void testSimpleSendWithConnect(Bootstrap sb, Bootstrap cb, ByteBuf buf, final byte[] bytes, int count) throws Throwable { for (WrapType type: WrapType.values()) { testSimpleSendWithConnect0(sb, cb, buf.retain(), bytes, count, type); } assertTrue(buf.release()); } private void testSimpleSendWithConnect0(Bootstrap sb, Bootstrap cb, ByteBuf buf, final byte[] bytes, int count, WrapType wrapType) throws Throwable { cb.handler(new SimpleChannelInboundHandler<Object>() { @Override public void channelRead0(ChannelHandlerContext ctx, Object msgs) throws Exception { // Nothing will be sent. } }); final CountDownLatch latch = new CountDownLatch(count); Channel sc = setupServerChannel(sb, bytes, latch); DatagramChannel cc = null; try { cc = (DatagramChannel) cb.connect(sc.localAddress()).sync().channel(); for (int i = 0; i < count; i++) { switch (wrapType) { case DUP: cc.write(buf.retain().duplicate()); break; case SLICE: cc.write(buf.retain().slice()); break; case READ_ONLY: cc.write(Unpooled.unmodifiableBuffer(buf.retain())); break; case NONE: cc.write(buf.retain()); break; default: throw new Error("unknown wrap type: " + wrapType); } } cc.flush(); assertTrue(latch.await(10, TimeUnit.SECONDS)); assertTrue(cc.isConnected()); // Test what happens when we call disconnect() cc.disconnect().syncUninterruptibly(); assertFalse(cc.isConnected()); ChannelFuture future = cc.writeAndFlush( buf.retain().duplicate()).awaitUninterruptibly(); assertTrue(future.cause() instanceof NotYetConnectedException); } finally { // release as we used buf.retain() before buf.release(); sc.close().sync(); if (cc != null) { cc.close().sync(); } } } @SuppressWarnings("deprecation") private Channel setupServerChannel(Bootstrap sb, final byte[] bytes, final CountDownLatch latch) throws Throwable { sb.handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ch.pipeline().addLast(new SimpleChannelInboundHandler<DatagramPacket>() { @Override public void channelRead0(ChannelHandlerContext ctx, DatagramPacket msg) throws Exception { ByteBuf buf = msg.content(); assertEquals(bytes.length, buf.readableBytes()); for (byte b : bytes) { assertEquals(b, buf.readByte()); } latch.countDown(); } }); } }); return sb.bind(newSocketAddress()).sync().channel(); } }
/* * Copyright 2015 IBM Corp. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.ibm.watson.developer_cloud.speech_to_text.v1; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.junit.Assume; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import com.ibm.watson.developer_cloud.WatsonServiceTest; import com.ibm.watson.developer_cloud.http.HttpMediaType; import com.ibm.watson.developer_cloud.service.exception.NotFoundException; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.Corpus; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.Corpus.Status; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.Customization; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.KeywordsResult; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.RecognitionJob; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.RecognizeOptions; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.SpeechModel; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.SpeechResults; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.SpeechSession; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.SpeechSessionStatus; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.SpeechWordAlternatives; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.Transcript; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.Word; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.Word.Type; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.WordData; import com.ibm.watson.developer_cloud.speech_to_text.v1.websocket.BaseRecognizeCallback; /** * Speech to text Integration tests. */ public class SpeechToTextIT extends WatsonServiceTest { private static final String EN_BROADBAND16K = "en-US_BroadbandModel"; private static final String SPEECH_RESOURCE = "src/test/resources/speech_to_text/%s"; private static final String SAMPLE_WAV = String.format(SPEECH_RESOURCE, "sample1.wav"); private static final String TWO_SPEAKERS_WAV = String.format(SPEECH_RESOURCE, "twospeakers.wav"); private CountDownLatch lock = new CountDownLatch(1); private SpeechToText service; private SpeechResults asyncResults; private String customizationId; /** The expected exception. */ @Rule public final ExpectedException expectedException = ExpectedException.none(); /* * (non-Javadoc) * * @see com.ibm.watson.developer_cloud.WatsonServiceTest#setUp() */ @Override @Before public void setUp() throws Exception { super.setUp(); this.customizationId = getProperty("speech_to_text.customization_id"); String username = getProperty("speech_to_text.username"); String password = getProperty("speech_to_text.password"); Assume.assumeFalse("config.properties doesn't have valid credentials.", username == null || username.equals(PLACEHOLDER)); service = new SpeechToText(); service.setUsernameAndPassword(username, password); service.setEndPoint(getProperty("speech_to_text.url")); service.setDefaultHeaders(getDefaultHeaders()); } /** * Test create session. */ @Test public void testCreateSession() { SpeechSession session = service.createSession().execute(); try { assertNotNull(session); assertNotNull(session.getSessionId()); } finally { service.deleteSession(session).execute(); } } /** * Test create session speech model. */ @Test public void testCreateSessionSpeechModel() { SpeechSession session = service.createSession(SpeechModel.EN_US_BROADBANDMODEL).execute(); try { assertNotNull(session); assertNotNull(session.getSessionId()); } finally { service.deleteSession(session).execute(); } } /** * Test create session string. */ @Test public void testCreateSessionString() { SpeechSession session = service.createSession(EN_BROADBAND16K).execute(); try { assertNotNull(session); assertNotNull(session.getSessionId()); } finally { service.deleteSession(session).execute(); } } /** * Test get model. */ @Test public void testGetModel() { SpeechModel model = service.getModel(EN_BROADBAND16K).execute(); assertNotNull(model); assertNotNull(model.getName()); assertNotNull(model.getRate()); assertNotNull(model.getDescription()); } /** * Test get models. */ @Test public void testGetModels() { List<SpeechModel> models = service.getModels().execute(); assertNotNull(models); assertTrue(!models.isEmpty()); } /** * Test get recognize status. */ @Test public void testGetRecognizeStatus() { SpeechSession session = service.createSession(SpeechModel.EN_US_BROADBANDMODEL).execute(); SpeechSessionStatus status = service.getRecognizeStatus(session).execute(); try { assertNotNull(status); assertNotNull(status.getModel()); assertNotNull(status.getState()); } finally { service.deleteSession(session).execute(); } } /** * Test recognize audio file. */ @Test public void testRecognizeFileString() { File audio = new File(SAMPLE_WAV); SpeechResults results = service.recognize(audio).execute(); assertNotNull(results.getResults().get(0).getAlternatives().get(0).getTranscript()); } /** * Test recognize multiple speakers. */ @Test public void testRecognizeMultipleSpeakers() { File audio = new File(TWO_SPEAKERS_WAV); RecognizeOptions options = new RecognizeOptions.Builder() .continuous(true) .interimResults(true) .speakerLabels(true) .model(SpeechModel.EN_US_NARROWBANDMODEL.getName()) .contentType(HttpMediaType.AUDIO_WAV) .build(); SpeechResults results = service.recognize(audio, options).execute(); assertNotNull(results.getSpeakerLabels()); assertTrue(results.getSpeakerLabels().size() > 0); } /** * Test recognize file string recognize options. */ @Test public void testRecognizeFileStringRecognizeOptions() { File audio = new File(SAMPLE_WAV); String contentType = HttpMediaType.AUDIO_WAV; RecognizeOptions options = new RecognizeOptions.Builder().continuous(true).timestamps(true).wordConfidence(true) .model(EN_BROADBAND16K).contentType(contentType).profanityFilter(false).build(); SpeechResults results = service.recognize(audio, options).execute(); assertNotNull(results.getResults().get(0).getAlternatives().get(0).getTranscript()); assertNotNull(results.getResults().get(0).getAlternatives().get(0).getTimestamps()); assertNotNull(results.getResults().get(0).getAlternatives().get(0).getWordConfidences()); } /** * Test keyword recognition. */ @Test public void testRecognizeKeywords() { final String keyword1 = "rain"; final String keyword2 = "tornadoes"; final RecognizeOptions options = new RecognizeOptions.Builder().contentType("audio/wav").model(SpeechModel.EN_US_BROADBANDMODEL.getName()) .continuous(true).inactivityTimeout(500).keywords(keyword1, keyword2).keywordsThreshold(0.7).build(); final File audio = new File(SAMPLE_WAV); final SpeechResults results = service.recognize(audio, options).execute(); final Transcript transcript = results.getResults().get(0); assertEquals(2, transcript.getKeywordsResult().size()); assertTrue(transcript.getKeywordsResult().containsKey(keyword1)); assertTrue(transcript.getKeywordsResult().containsKey(keyword2)); assertEquals(1, transcript.getKeywordsResult().get(keyword1).size()); assertEquals(1, transcript.getKeywordsResult().get(keyword2).size()); final KeywordsResult result1 = transcript.getKeywordsResult().get(keyword1).get(0); assertEquals(keyword1, result1.getNormalizedText()); assertEquals(0.9, result1.getConfidence(), 0.1); assertEquals(5.58, result1.getStartTime(), 1.0); assertEquals(6.14, result1.getEndTime(), 1.0); final KeywordsResult result2 = transcript.getKeywordsResult().get(keyword2).get(0); assertEquals(keyword2, result2.getNormalizedText()); assertEquals(0.9, result2.getConfidence(), 0.1); assertEquals(4.42, result2.getStartTime(), 1.0); assertEquals(5.04, result2.getEndTime(), 1.0); } /** * Test recognize webSocket. * * @throws FileNotFoundException the file not found exception * @throws InterruptedException the interrupted exception */ @Test public void testRecognizeWebSocket() throws FileNotFoundException, InterruptedException { RecognizeOptions options = new RecognizeOptions.Builder().continuous(true).interimResults(true) .inactivityTimeout(40).timestamps(true).maxAlternatives(2).wordAlternativesThreshold(0.5).model(EN_BROADBAND16K) .contentType(HttpMediaType.AUDIO_WAV).build(); FileInputStream audio = new FileInputStream(SAMPLE_WAV); service.recognizeUsingWebSocket(audio, options, new BaseRecognizeCallback() { @Override public void onConnected() { System.out.println("onConnected()"); } @Override public void onDisconnected() { System.out.println("onDisconnected()"); lock.countDown(); } @Override public void onError(Exception e) { e.printStackTrace(); lock.countDown(); } @Override public void onTranscription(SpeechResults speechResults) { if (speechResults != null && speechResults.isFinal()) { asyncResults = speechResults; } } }); lock.await(2, TimeUnit.MINUTES); assertNotNull(asyncResults); List<SpeechWordAlternatives> wordAlternatives = asyncResults.getResults().get(asyncResults.getResultIndex()).getWordAlternatives(); assertTrue(wordAlternatives != null && !wordAlternatives.isEmpty()); assertNotNull(wordAlternatives.get(0).getAlternatives()); } /** * Test create recognition job. * * @throws InterruptedException the interrupted exception * @throws FileNotFoundException the file not found exception */ @Test public void testCreateRecognitionJob() throws InterruptedException, FileNotFoundException { File audio = new File(SAMPLE_WAV); RecognitionJob job = service.createRecognitionJob(audio, null, null).execute(); try { assertNotNull(job.getId()); for (int x = 0; x < 30 && job.getStatus() != RecognitionJob.Status.COMPLETED; x++) { Thread.sleep(3000); job = service.getRecognitionJob(job.getId()).execute(); } job = service.getRecognitionJob(job.getId()).execute(); assertEquals(RecognitionJob.Status.COMPLETED, job.getStatus()); assertNotNull(job.getResults()); } finally { service.deleteRecognitionJob(job.getId()); } } /** * Test get recognition job with wrong id. * * @throws InterruptedException the interrupted exception * @throws FileNotFoundException the file not found exception */ @Test public void testGetRecognitionJobWithWrongId() { expectedException.expect(NotFoundException.class); expectedException.expectMessage("job not found"); service.getRecognitionJob("foo").execute(); } /** * Test get recognition jobs. * * @throws InterruptedException the interrupted exception * @throws FileNotFoundException the file not found exception */ @Test public void testGetRecognitionJobs() { List<RecognitionJob> jobs = service.getRecognitionJobs().execute(); assertNotNull(jobs); } /** * Test get customizations. */ @Test public void testGetCustomizations() { List<Customization> customizations = service.getCustomizations(null).execute(); assertNotNull(customizations); assertTrue(!customizations.isEmpty()); } /** * Test get corpora. * */ @Test public void testGetCorpora() { List<Corpus> result = service.getCorpora(customizationId).execute(); assertNotNull(result); } /** * Test add text to corpus. * */ @Test(expected = IllegalArgumentException.class) public void testAddTextToCorpus() { service.addTextToCustomizationCorpus(customizationId, "foo3", null, null).execute(); } /** * Test get words. */ @Test public void testGetWords() { List<WordData> result = service.getWords(customizationId, Type.ALL).execute(); assertNotNull(result); assertTrue(!result.isEmpty()); } /** * Test get word. */ public void testGetWord() { Word result = service.getWord(customizationId, "string").execute(); assertNotNull(result); } /** * Test customization. * * @throws InterruptedException the interrupted exception */ @Test public void testCustomization() throws InterruptedException { // create customization Customization myCustomization = service.createCustomization("IEEE-java-sdk-permanent", SpeechModel.EN_US_BROADBANDMODEL, null).execute(); String id = myCustomization.getId(); try { // Add a corpus file to the model: service .addTextToCustomizationCorpus(id, "corpus-1", false, new File(String.format(SPEECH_RESOURCE, "corpus1.txt"))) .execute(); // Get corpora List<Corpus> corpora = service.getCorpora(id).execute(); assertNotNull(corpora); assertTrue(corpora.size() == 1); // There is only one corpus so far so choose it Corpus corpus = corpora.get(0); for (int x = 0; x < 30 && corpus.getStatus() != Status.ANALYZED; x++) { corpus = service.getCorpora(id).execute().get(0); Thread.sleep(5000); } assertTrue(corpus.getStatus() == Status.ANALYZED); // Now add some user words to the custom model service.addWord(id, new Word("IEEE", "IEEE", "I. triple E.")).execute(); service.addWord(id, new Word("hhonors", "IEEE", "H. honors", "Hilton honors")).execute(); // Display all words in the words resource (coming from OOVs from the corpus add and the new words just added) List<WordData> words = service.getWords(id, Word.Type.ALL).execute(); assertNotNull(words); } finally { service.deleteCustomization(id); } } }
/* ******************************************************************************* * L O G I T A G S * Software and Programming * Dr. Wolfgang Winter * Germany * * All rights reserved * ******************************************************************************* */ package com.logitags.cibet.actuator.archive; import java.io.Serializable; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.Date; import java.util.Iterator; import java.util.Map; import java.util.UUID; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Embeddable; import javax.persistence.Entity; import javax.persistence.EntityManager; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.NamedNativeQueries; import javax.persistence.NamedNativeQuery; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; import javax.persistence.OneToOne; import javax.persistence.PrePersist; import javax.persistence.PreUpdate; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Version; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.logitags.cibet.actuator.dc.ResourceApplyException; import com.logitags.cibet.config.Configuration; import com.logitags.cibet.context.CibetContext; import com.logitags.cibet.context.Context; import com.logitags.cibet.context.InternalRequestScope; import com.logitags.cibet.core.AnnotationNotFoundException; import com.logitags.cibet.core.AnnotationUtil; import com.logitags.cibet.core.CEntityManager; import com.logitags.cibet.core.CibetUtil; import com.logitags.cibet.core.ControlEvent; import com.logitags.cibet.core.ExecutionStatus; import com.logitags.cibet.resource.Resource; import com.logitags.cibet.security.SecurityProvider; import com.logitags.cibet.sensor.jpa.JpaResource; /** * archived entity or method invocation action. */ @Entity @Table(name = "CIB_ARCHIVE") @NamedQueries({ @NamedQuery(name = Archive.SEL_ALL, query = "SELECT a FROM Archive a LEFT JOIN FETCH a.resource r LEFT JOIN FETCH r.parameters ORDER BY a.createDate"), @NamedQuery(name = Archive.SEL_BY_GROUPID, query = "SELECT a FROM Archive a LEFT JOIN FETCH a.resource r LEFT JOIN FETCH r.parameters WHERE r.groupId = :groupId ORDER BY a.createDate"), @NamedQuery(name = Archive.SEL_ALL_BY_TENANT, query = "SELECT a FROM Archive a LEFT JOIN FETCH a.resource r LEFT JOIN FETCH r.parameters WHERE a.tenant LIKE :tenant ORDER BY a.createDate"), @NamedQuery(name = Archive.SEL_ALL_BY_CASEID, query = "SELECT a FROM Archive a LEFT JOIN FETCH a.resource r LEFT JOIN FETCH r.parameters WHERE a.tenant LIKE :tenant AND a.caseId = :caseId ORDER BY a.createDate"), @NamedQuery(name = Archive.SEL_ALL_BY_CASEID_NO_TENANT, query = "SELECT a FROM Archive a LEFT JOIN FETCH a.resource r LEFT JOIN FETCH r.parameters WHERE a.caseId = :caseId ORDER BY a.createDate"), @NamedQuery(name = Archive.SEL_ALL_BY_CLASS, query = "SELECT a FROM Archive a LEFT JOIN FETCH a.resource r LEFT JOIN FETCH r.parameters WHERE a.tenant LIKE :tenant AND r.target = :targetType ORDER BY a.createDate"), @NamedQuery(name = Archive.SEL_BY_PRIMARYKEYID, query = "SELECT a, r FROM Archive a, JpaResource r LEFT JOIN FETCH r.parameters " + "WHERE a.resource = r AND r.target = :target AND r.primaryKeyId = :primaryKeyId ORDER BY a.createDate"), @NamedQuery(name = Archive.SEL_ALL_BY_CLASS_NO_TENANT, query = "SELECT a FROM Archive a LEFT JOIN FETCH a.resource r LEFT JOIN FETCH r.parameters WHERE r.target = :targetType ORDER BY a.createDate") }) @NamedNativeQueries({ @NamedNativeQuery(name = Archive.SEL_BY_METHODNAME, query = "SELECT " + Archive.ARCHIVE + " FROM CIB_ARCHIVE a, CIB_RESOURCE r WHERE a.RESOURCEID = r.RESOURCEID AND a.TENANT LIKE ?1 AND r.TARGET = ?2 AND r.METHOD = ?3 ORDER BY a.CREATEDATE", resultClass = Archive.class), @NamedNativeQuery(name = Archive.SEL_BY_METHODNAME_NO_TENANT, query = "SELECT " + Archive.ARCHIVE + " FROM CIB_ARCHIVE a, CIB_RESOURCE r WHERE a.RESOURCEID = r.RESOURCEID AND r.TARGET = ?1 AND r.METHOD = ?2 ORDER BY a.CREATEDATE", resultClass = Archive.class) }) public class Archive implements Serializable { private static final long serialVersionUID = 1L; private static Log log = LogFactory.getLog(Archive.class); public static final String ARCHIVE = "a.archiveid, a.remark, a.checksum, a.controlevent, a.createuser, a.createdate, a.tenant, a.caseid, " + "a.executionstatus, a.version, a.resourceid"; /** * named query */ public final static String SEL_ALL = "ARCHIVE_SEL_ALL"; /** * named query */ public final static String SEL_ALL_BY_TENANT = "ARCHIVE_SEL_ALL_BY_TENANT"; /** * named query */ public final static String SEL_ALL_BY_CLASS = "ARCHIVE_SEL_ALL_BY_CLASS"; public final static String SEL_ALL_BY_CLASS_NO_TENANT = "ARCHIVE_SEL_ALL_BY_CLASS_NO_TENANT"; /** * named query */ public final static String SEL_ALL_BY_CASEID = "ARCHIVE_SEL_ALL_BY_CASEID"; public final static String SEL_ALL_BY_CASEID_NO_TENANT = "ARCHIVE_SEL_ALL_BY_CASEID_NO_TENANT"; /** * named query */ public final static String SEL_BY_METHODNAME = "com.logitags.cibet.actuator.archive.Archive.SEL_BY_METHODNAME"; public final static String SEL_BY_METHODNAME_NO_TENANT = "com.logitags.cibet.actuator.archive.Archive.SEL_BY_METHODNAME_NO_TENANT"; /** * named query */ public static final String SEL_BY_PRIMARYKEYID = "com.logitags.cibet.actuator.archive.Archive.SEL_BY_PRIMARYKEYID";; public final static String SEL_BY_GROUPID = "com.logitags.cibet.actuator.archive.Archive.SEL_BY_GROUPID"; /** * unique ID * */ @Id private String archiveId; /** * optional comment by the user who is responsible for this Archive creation. (e.g. why a controlled object has been * rejected) */ private String remark; /** * message digest over the Archive data. */ private String checksum; /** * the type of event that is requested on the resource. */ @Column(length = 50) @Enumerated(EnumType.STRING) private ControlEvent controlEvent; /** * user id who initiated the control event */ @Column(length = 50) private String createUser; /** * Date when the user initiated the control event */ @Temporal(TemporalType.TIMESTAMP) private Date createDate = new Date(); /** * tenant */ private String tenant; /** * unique id that identifies the case. A case consists of related dual control data, INVOKE ... event and * RELEASE/REJECT events on the same object / method invocation. */ @Column(length = 60) private String caseId; /** * the execution status of the business case. */ @Enumerated(EnumType.STRING) @Column(length = 50) private ExecutionStatus executionStatus; @OneToOne(cascade = { CascadeType.DETACH, CascadeType.PERSIST, CascadeType.MERGE }) @JoinColumn(name = "RESOURCEID") private Resource resource; @Version private int version; @PrePersist public void prePersist() { if (resource != null) { resource.getUniqueId(); if (resource.getGroupId() == null) { resource.createGroupId(); } } archiveId = UUID.randomUUID().toString(); } @PreUpdate public void preMerge() { resource.createGroupId(); } public String toString() { StringBuffer b = new StringBuffer(); b.append(this.getClass().getName()); b.append("\nid = "); b.append(archiveId); b.append("\ncase id = "); b.append(caseId); b.append("\ncontrolEvent = "); b.append(controlEvent); b.append("\ncreateDate = "); b.append(createDate); b.append("\ncreateUser = "); b.append(createUser); b.append("\ntenant = "); b.append(tenant); b.append("\nexecutionStatus = "); b.append(executionStatus); b.append("\nRESOURCE: "); b.append(resource); return b.toString(); } public void decrypt() { if (getResource().isEncrypted()) { log.debug("decrypt Archive"); // OpenJPA workaround: getResource().getParameters().size(); Context.internalRequestScope().getOrCreateEntityManager(false).detach(this); getResource().decrypt(); } } /** * concatenates the Archive values for the checkSum. * * @return check sum String */ private String createCheckSumString() { SimpleDateFormat dateFormat = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss"); StringBuffer b = new StringBuffer(); b.append(getControlEvent()); b.append(getCreateDate() != null ? dateFormat.format(getCreateDate()) : "NULL"); b.append(getCreateUser()); b.append(getCaseId()); if (getTenant() != null) b.append(getTenant()); if (remark != null) b.append(remark); b.append(executionStatus == null ? "" : executionStatus); if (resource != null) { b.append(resource.createCheckSumString()); } log.debug("checkSumString = " + b.toString()); return b.toString(); } public void createChecksum() { SecurityProvider secProvider = Configuration.instance().getSecurityProvider(); String key = secProvider.getCurrentSecretKey(); getResource().setKeyReference(key); // createCheckSumString String checkSumString = createCheckSumString(); // calculateCheckSum String checksum = secProvider.createMessageDigest(checkSumString, key); setChecksum(checksum); } public boolean checkChecksum() { ArchiveActuator arch = (ArchiveActuator) Configuration.instance().getActuator(ArchiveActuator.DEFAULTNAME); if (arch.isIntegrityCheck()) { String checkSumString = createCheckSumString(); SecurityProvider secProvider = Configuration.instance().getSecurityProvider(); String checksum = secProvider.createMessageDigest(checkSumString, getResource().getKeyReference()); return checksum.equals(getChecksum()); } else { return true; } } /** * redo the invocation with same parameters. * * @param remark * optional comment * @return result or null * @throws ResourceApplyException * in case of error */ @CibetContext public Object redo(String remark) throws ResourceApplyException { ControlEvent originalControlEvent = (ControlEvent) Context.internalRequestScope() .getProperty(InternalRequestScope.CONTROLEVENT); String originalCaseId = Context.internalRequestScope().getCaseId(); String originalRemark = Context.internalRequestScope().getRemark(); try { Context.internalRequestScope().setCaseId(getCaseId()); if (remark != null) { Context.internalRequestScope().setRemark(remark); } Context.internalRequestScope().setProperty(InternalRequestScope.CONTROLEVENT, ControlEvent.REDO); return getResource().apply(ControlEvent.REDO); } finally { Context.internalRequestScope().setCaseId(originalCaseId); Context.internalRequestScope().setRemark(originalRemark); Context.internalRequestScope().setProperty(InternalRequestScope.CONTROLEVENT, originalControlEvent); } } /** * reverses the data modification action represented by this archive. If the event was an update, the old state is * restored. If the event has been removed, the object will be restored. * * @param entityManager * EntityManager for updating or inserting the restored object. * @param remark * optional remark * @return the restored object * */ @CibetContext public Object restore(EntityManager entityManager, String remark) { Object obj = getResource().getUnencodedTargetObject(); if (obj == null || !obj.getClass().isAnnotationPresent(Entity.class)) { String msg = "Failed to restore. Archive does not contain an archived JPA entity"; log.error(msg); throw new IllegalStateException(msg); } ControlEvent originalControlEvent = (ControlEvent) Context.internalRequestScope() .getProperty(InternalRequestScope.CONTROLEVENT); String originalCaseId = Context.internalRequestScope().getCaseId(); String originalRemark = Context.internalRequestScope().getRemark(); try { // check if removed EntityManager localEM = entityManager; if (entityManager instanceof CEntityManager) { localEM = ((CEntityManager) entityManager).getNativeEntityManager(); } Object objFromDb = localEM.find(obj.getClass(), ((JpaResource) getResource()).getPrimaryKeyObject()); // set after find(), otherwise case id is removed: Context.internalRequestScope().setCaseId(getCaseId()); if (remark != null) { Context.internalRequestScope().setRemark(remark); } if (objFromDb == null) { // object has been removed, must be persisted again resetAllIdAndVersion(obj); Context.internalRequestScope().setProperty(InternalRequestScope.CONTROLEVENT, ControlEvent.RESTORE_INSERT); entityManager.persist(obj); } else { // object exists, must be merged // set version to avoid optimistic locking try { Object version = AnnotationUtil.getValueOfAnnotatedFieldOrMethod(objFromDb, Version.class); AnnotationUtil.setValueToAnnotatedFieldOrSetter(obj, Version.class, version); } catch (AnnotationNotFoundException e) { // ignore if entity has no @Version annotation } Context.internalRequestScope().setProperty(InternalRequestScope.CONTROLEVENT, ControlEvent.RESTORE_UPDATE); obj = entityManager.merge(obj); } if (Context.internalRequestScope().getExecutedEventResult().getExecutionStatus() != ExecutionStatus.EXECUTED) { obj = null; } return obj; } finally { Context.internalRequestScope().setCaseId(originalCaseId); Context.requestScope().setRemark(originalRemark); Context.internalRequestScope().setProperty(InternalRequestScope.CONTROLEVENT, originalControlEvent); } } private void resetAllIdAndVersion(Object obj) { if (obj == null) return; resetIdAndVersion(obj); Class<?> intClass = obj.getClass(); while (intClass != null) { Field[] f = intClass.getDeclaredFields(); for (Field field : f) { Class<?> type = field.getType(); try { if (Collection.class.isAssignableFrom(type)) { field.setAccessible(true); Collection<Object> colField = (Collection<Object>) field.get(obj); if (colField == null) continue; Iterator<Object> it = colField.iterator(); while (it.hasNext()) { resetAllIdAndVersion(it.next()); } } else if (Map.class.isAssignableFrom(type)) { field.setAccessible(true); Map<Object, Object> map = (Map<Object, Object>) field.get(obj); Iterator<Object> it = map.keySet().iterator(); while (it.hasNext()) { resetAllIdAndVersion(it.next()); } it = map.values().iterator(); while (it.hasNext()) { resetAllIdAndVersion(it.next()); } } else if (type.isArray()) { Class<?> fieldClass = CibetUtil.arrayClassForName(type.getName()); if (!fieldClass.isPrimitive() && (fieldClass.isAnnotationPresent(Entity.class) || fieldClass.isAnnotationPresent(Embeddable.class))) { field.setAccessible(true); for (int i = 0; i < Array.getLength(field.get(obj)); i++) { resetAllIdAndVersion(Array.get(field.get(obj), i)); } } } else if (!type.isPrimitive() && (type.isAnnotationPresent(Entity.class) || type.isAnnotationPresent(Embeddable.class))) { field.setAccessible(true); resetAllIdAndVersion(field.get(obj)); } } catch (IllegalAccessException e) { String msg = "Failed to re-initialise ID attribute: " + e.getMessage(); log.error(msg, e); throw new RuntimeException(msg, e); } } intClass = intClass.getSuperclass(); } } private void resetIdAndVersion(Object obj) { boolean generatedId = AnnotationUtil.isFieldOrSetterAnnotationPresent(obj.getClass(), GeneratedValue.class); if (generatedId) { // set id == null or 0 try { AnnotationUtil.setValueToAnnotatedFieldOrSetter(obj, Id.class, null); } catch (IllegalArgumentException e) { AnnotationUtil.setValueToAnnotatedFieldOrSetter(obj, Id.class, 0); } } try { try { AnnotationUtil.setValueToAnnotatedFieldOrSetter(obj, Version.class, null); } catch (IllegalArgumentException e) { AnnotationUtil.setValueToAnnotatedFieldOrSetter(obj, Version.class, 0); } } catch (AnnotationNotFoundException e) { // ignore if entity has no @Version annotation } } /** * Set unique ID * * @param archiveId * the new value of */ public void setArchiveId(String archiveId) { this.archiveId = archiveId; } /** * Return unique ID * * @return String */ public String getArchiveId() { return this.archiveId; } /** * Set optional comment by the user who is responsible for this Archive creation. (e.g. why a controlled object has * been rejected) * * @param comment * the new value of optional comment */ public void setRemark(String comment) { this.remark = comment; } /** * Return optional comment by the user who is responsible for this Archive creation. (e.g. why a controlled object * has been rejected) * * @return String */ public String getRemark() { return this.remark; } /** * Return message digest over the Archive data. * * @return the checksum */ public String getChecksum() { return checksum; } /** * Set message digest over the Archive data. * * @param checksum * the checksum to set */ public void setChecksum(String checksum) { this.checksum = checksum; } /** * Set the type of action that is requested on the object * * @param type * the new value of the type of action that is requested on the object */ public void setControlEvent(ControlEvent type) { this.controlEvent = type; } /** * Return the type of action that is requested on the object * * @return enum */ public ControlEvent getControlEvent() { return this.controlEvent; } /** * Set user id who edited or initiated the control request * * @param userId * the new value of user id */ public void setCreateUser(String userId) { this.createUser = userId; } /** * Return user id who edited or initiated the control request * * @return String */ public String getCreateUser() { return this.createUser; } /** * Set date when the editing user requested the control action * * @param date * the new value of date */ public void setCreateDate(Date date) { this.createDate = date; } /** * Return date when the editing user requested the control action * * @return Date */ public Date getCreateDate() { return this.createDate; } /** * Return tenant * * @return the tenant */ public String getTenant() { return tenant; } /** * Set tenant * * @param tenant * the tenant to set */ public void setTenant(String tenant) { this.tenant = tenant; } /** * Returns a unique id that identifies the case. A case consists of related dual control data, INVOKE ... event and * RELEASE/REJECT events on the same object / method invocation. * * @return the caseId */ public String getCaseId() { return caseId; } /** * Set a unique id that identifies the case. A case consists of related dual control data, INVOKE ... event and * RELEASE/REJECT events on the same object / method invocation. * * @param incidentId * the incidentId to set */ public void setCaseId(String incidentId) { this.caseId = incidentId; } /** * @return the executionStatus */ public ExecutionStatus getExecutionStatus() { return executionStatus; } /** * @param executionStatus * the executionStatus to set */ public void setExecutionStatus(ExecutionStatus executionStatus) { this.executionStatus = executionStatus; } /** * @return the version */ public int getVersion() { return version; } /** * @param version * the version to set */ public void setVersion(int version) { this.version = version; } /** * @return the resource */ public Resource getResource() { return resource; } /** * @param resource * the resource to set */ public void setResource(Resource resource) { this.resource = resource; } /* * (non-Javadoc) * * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((archiveId == null) ? 0 : archiveId.hashCode()); return result; } /* * (non-Javadoc) * * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Archive other = (Archive) obj; if (archiveId == null) { if (other.archiveId != null) return false; } else if (!archiveId.equals(other.archiveId)) return false; return true; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.recoveryservicesbackup.implementation; import com.azure.core.annotation.BodyParam; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.Post; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.management.exception.ManagementException; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.recoveryservicesbackup.fluent.ItemLevelRecoveryConnectionsClient; import com.azure.resourcemanager.recoveryservicesbackup.models.IlrRequestResource; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in ItemLevelRecoveryConnectionsClient. */ public final class ItemLevelRecoveryConnectionsClientImpl implements ItemLevelRecoveryConnectionsClient { private final ClientLogger logger = new ClientLogger(ItemLevelRecoveryConnectionsClientImpl.class); /** The proxy service used to perform REST calls. */ private final ItemLevelRecoveryConnectionsService service; /** The service client containing this operation class. */ private final RecoveryServicesBackupClientImpl client; /** * Initializes an instance of ItemLevelRecoveryConnectionsClientImpl. * * @param client the instance of the service client containing this operation class. */ ItemLevelRecoveryConnectionsClientImpl(RecoveryServicesBackupClientImpl client) { this.service = RestProxy .create( ItemLevelRecoveryConnectionsService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for RecoveryServicesBackupClientItemLevelRecoveryConnections to be used * by the proxy service to perform REST calls. */ @Host("{$host}") @ServiceInterface(name = "RecoveryServicesBack") private interface ItemLevelRecoveryConnectionsService { @Headers({"Content-Type: application/json"}) @Post( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices" + "/vaults/{vaultName}/backupFabrics/{fabricName}/protectionContainers/{containerName}/protectedItems" + "/{protectedItemName}/recoveryPoints/{recoveryPointId}/provisionInstantItemRecovery") @ExpectedResponses({202}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<Void>> provision( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("vaultName") String vaultName, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("subscriptionId") String subscriptionId, @PathParam("fabricName") String fabricName, @PathParam("containerName") String containerName, @PathParam("protectedItemName") String protectedItemName, @PathParam("recoveryPointId") String recoveryPointId, @BodyParam("application/json") IlrRequestResource parameters, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Post( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices" + "/vaults/{vaultName}/backupFabrics/{fabricName}/protectionContainers/{containerName}/protectedItems" + "/{protectedItemName}/recoveryPoints/{recoveryPointId}/revokeInstantItemRecovery") @ExpectedResponses({202}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<Void>> revoke( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("vaultName") String vaultName, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("subscriptionId") String subscriptionId, @PathParam("fabricName") String fabricName, @PathParam("containerName") String containerName, @PathParam("protectedItemName") String protectedItemName, @PathParam("recoveryPointId") String recoveryPointId, @HeaderParam("Accept") String accept, Context context); } /** * Provisions a script which invokes an iSCSI connection to the backup data. Executing this script opens a file * explorer displaying all the recoverable files and folders. This is an asynchronous operation. To know the status * of provisioning, call GetProtectedItemOperationResult API. * * @param vaultName The name of the recovery services vault. * @param resourceGroupName The name of the resource group where the recovery services vault is present. * @param fabricName Fabric name associated with the backed up items. * @param containerName Container name associated with the backed up items. * @param protectedItemName Backed up item name whose files/folders are to be restored. * @param recoveryPointId Recovery point ID which represents backed up data. iSCSI connection will be provisioned * for this backed up data. * @param parameters resource ILR request. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> provisionWithResponseAsync( String vaultName, String resourceGroupName, String fabricName, String containerName, String protectedItemName, String recoveryPointId, IlrRequestResource parameters) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (vaultName == null) { return Mono.error(new IllegalArgumentException("Parameter vaultName is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (fabricName == null) { return Mono.error(new IllegalArgumentException("Parameter fabricName is required and cannot be null.")); } if (containerName == null) { return Mono.error(new IllegalArgumentException("Parameter containerName is required and cannot be null.")); } if (protectedItemName == null) { return Mono .error(new IllegalArgumentException("Parameter protectedItemName is required and cannot be null.")); } if (recoveryPointId == null) { return Mono .error(new IllegalArgumentException("Parameter recoveryPointId is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .provision( this.client.getEndpoint(), this.client.getApiVersion(), vaultName, resourceGroupName, this.client.getSubscriptionId(), fabricName, containerName, protectedItemName, recoveryPointId, parameters, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Provisions a script which invokes an iSCSI connection to the backup data. Executing this script opens a file * explorer displaying all the recoverable files and folders. This is an asynchronous operation. To know the status * of provisioning, call GetProtectedItemOperationResult API. * * @param vaultName The name of the recovery services vault. * @param resourceGroupName The name of the resource group where the recovery services vault is present. * @param fabricName Fabric name associated with the backed up items. * @param containerName Container name associated with the backed up items. * @param protectedItemName Backed up item name whose files/folders are to be restored. * @param recoveryPointId Recovery point ID which represents backed up data. iSCSI connection will be provisioned * for this backed up data. * @param parameters resource ILR request. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> provisionWithResponseAsync( String vaultName, String resourceGroupName, String fabricName, String containerName, String protectedItemName, String recoveryPointId, IlrRequestResource parameters, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (vaultName == null) { return Mono.error(new IllegalArgumentException("Parameter vaultName is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (fabricName == null) { return Mono.error(new IllegalArgumentException("Parameter fabricName is required and cannot be null.")); } if (containerName == null) { return Mono.error(new IllegalArgumentException("Parameter containerName is required and cannot be null.")); } if (protectedItemName == null) { return Mono .error(new IllegalArgumentException("Parameter protectedItemName is required and cannot be null.")); } if (recoveryPointId == null) { return Mono .error(new IllegalArgumentException("Parameter recoveryPointId is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .provision( this.client.getEndpoint(), this.client.getApiVersion(), vaultName, resourceGroupName, this.client.getSubscriptionId(), fabricName, containerName, protectedItemName, recoveryPointId, parameters, accept, context); } /** * Provisions a script which invokes an iSCSI connection to the backup data. Executing this script opens a file * explorer displaying all the recoverable files and folders. This is an asynchronous operation. To know the status * of provisioning, call GetProtectedItemOperationResult API. * * @param vaultName The name of the recovery services vault. * @param resourceGroupName The name of the resource group where the recovery services vault is present. * @param fabricName Fabric name associated with the backed up items. * @param containerName Container name associated with the backed up items. * @param protectedItemName Backed up item name whose files/folders are to be restored. * @param recoveryPointId Recovery point ID which represents backed up data. iSCSI connection will be provisioned * for this backed up data. * @param parameters resource ILR request. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return A {@link Mono} that completes when a successful response is received. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Void> provisionAsync( String vaultName, String resourceGroupName, String fabricName, String containerName, String protectedItemName, String recoveryPointId, IlrRequestResource parameters) { return provisionWithResponseAsync( vaultName, resourceGroupName, fabricName, containerName, protectedItemName, recoveryPointId, parameters) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Provisions a script which invokes an iSCSI connection to the backup data. Executing this script opens a file * explorer displaying all the recoverable files and folders. This is an asynchronous operation. To know the status * of provisioning, call GetProtectedItemOperationResult API. * * @param vaultName The name of the recovery services vault. * @param resourceGroupName The name of the resource group where the recovery services vault is present. * @param fabricName Fabric name associated with the backed up items. * @param containerName Container name associated with the backed up items. * @param protectedItemName Backed up item name whose files/folders are to be restored. * @param recoveryPointId Recovery point ID which represents backed up data. iSCSI connection will be provisioned * for this backed up data. * @param parameters resource ILR request. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) public void provision( String vaultName, String resourceGroupName, String fabricName, String containerName, String protectedItemName, String recoveryPointId, IlrRequestResource parameters) { provisionAsync( vaultName, resourceGroupName, fabricName, containerName, protectedItemName, recoveryPointId, parameters) .block(); } /** * Provisions a script which invokes an iSCSI connection to the backup data. Executing this script opens a file * explorer displaying all the recoverable files and folders. This is an asynchronous operation. To know the status * of provisioning, call GetProtectedItemOperationResult API. * * @param vaultName The name of the recovery services vault. * @param resourceGroupName The name of the resource group where the recovery services vault is present. * @param fabricName Fabric name associated with the backed up items. * @param containerName Container name associated with the backed up items. * @param protectedItemName Backed up item name whose files/folders are to be restored. * @param recoveryPointId Recovery point ID which represents backed up data. iSCSI connection will be provisioned * for this backed up data. * @param parameters resource ILR request. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> provisionWithResponse( String vaultName, String resourceGroupName, String fabricName, String containerName, String protectedItemName, String recoveryPointId, IlrRequestResource parameters, Context context) { return provisionWithResponseAsync( vaultName, resourceGroupName, fabricName, containerName, protectedItemName, recoveryPointId, parameters, context) .block(); } /** * Revokes an iSCSI connection which can be used to download a script. Executing this script opens a file explorer * displaying all recoverable files and folders. This is an asynchronous operation. * * @param vaultName The name of the recovery services vault. * @param resourceGroupName The name of the resource group where the recovery services vault is present. * @param fabricName Fabric name associated with the backed up items. * @param containerName Container name associated with the backed up items. * @param protectedItemName Backed up item name whose files/folders are to be restored. * @param recoveryPointId Recovery point ID which represents backed up data. iSCSI connection will be revoked for * this backed up data. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> revokeWithResponseAsync( String vaultName, String resourceGroupName, String fabricName, String containerName, String protectedItemName, String recoveryPointId) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (vaultName == null) { return Mono.error(new IllegalArgumentException("Parameter vaultName is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (fabricName == null) { return Mono.error(new IllegalArgumentException("Parameter fabricName is required and cannot be null.")); } if (containerName == null) { return Mono.error(new IllegalArgumentException("Parameter containerName is required and cannot be null.")); } if (protectedItemName == null) { return Mono .error(new IllegalArgumentException("Parameter protectedItemName is required and cannot be null.")); } if (recoveryPointId == null) { return Mono .error(new IllegalArgumentException("Parameter recoveryPointId is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .revoke( this.client.getEndpoint(), this.client.getApiVersion(), vaultName, resourceGroupName, this.client.getSubscriptionId(), fabricName, containerName, protectedItemName, recoveryPointId, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Revokes an iSCSI connection which can be used to download a script. Executing this script opens a file explorer * displaying all recoverable files and folders. This is an asynchronous operation. * * @param vaultName The name of the recovery services vault. * @param resourceGroupName The name of the resource group where the recovery services vault is present. * @param fabricName Fabric name associated with the backed up items. * @param containerName Container name associated with the backed up items. * @param protectedItemName Backed up item name whose files/folders are to be restored. * @param recoveryPointId Recovery point ID which represents backed up data. iSCSI connection will be revoked for * this backed up data. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> revokeWithResponseAsync( String vaultName, String resourceGroupName, String fabricName, String containerName, String protectedItemName, String recoveryPointId, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (vaultName == null) { return Mono.error(new IllegalArgumentException("Parameter vaultName is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (fabricName == null) { return Mono.error(new IllegalArgumentException("Parameter fabricName is required and cannot be null.")); } if (containerName == null) { return Mono.error(new IllegalArgumentException("Parameter containerName is required and cannot be null.")); } if (protectedItemName == null) { return Mono .error(new IllegalArgumentException("Parameter protectedItemName is required and cannot be null.")); } if (recoveryPointId == null) { return Mono .error(new IllegalArgumentException("Parameter recoveryPointId is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .revoke( this.client.getEndpoint(), this.client.getApiVersion(), vaultName, resourceGroupName, this.client.getSubscriptionId(), fabricName, containerName, protectedItemName, recoveryPointId, accept, context); } /** * Revokes an iSCSI connection which can be used to download a script. Executing this script opens a file explorer * displaying all recoverable files and folders. This is an asynchronous operation. * * @param vaultName The name of the recovery services vault. * @param resourceGroupName The name of the resource group where the recovery services vault is present. * @param fabricName Fabric name associated with the backed up items. * @param containerName Container name associated with the backed up items. * @param protectedItemName Backed up item name whose files/folders are to be restored. * @param recoveryPointId Recovery point ID which represents backed up data. iSCSI connection will be revoked for * this backed up data. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return A {@link Mono} that completes when a successful response is received. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Void> revokeAsync( String vaultName, String resourceGroupName, String fabricName, String containerName, String protectedItemName, String recoveryPointId) { return revokeWithResponseAsync( vaultName, resourceGroupName, fabricName, containerName, protectedItemName, recoveryPointId) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Revokes an iSCSI connection which can be used to download a script. Executing this script opens a file explorer * displaying all recoverable files and folders. This is an asynchronous operation. * * @param vaultName The name of the recovery services vault. * @param resourceGroupName The name of the resource group where the recovery services vault is present. * @param fabricName Fabric name associated with the backed up items. * @param containerName Container name associated with the backed up items. * @param protectedItemName Backed up item name whose files/folders are to be restored. * @param recoveryPointId Recovery point ID which represents backed up data. iSCSI connection will be revoked for * this backed up data. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) public void revoke( String vaultName, String resourceGroupName, String fabricName, String containerName, String protectedItemName, String recoveryPointId) { revokeAsync(vaultName, resourceGroupName, fabricName, containerName, protectedItemName, recoveryPointId) .block(); } /** * Revokes an iSCSI connection which can be used to download a script. Executing this script opens a file explorer * displaying all recoverable files and folders. This is an asynchronous operation. * * @param vaultName The name of the recovery services vault. * @param resourceGroupName The name of the resource group where the recovery services vault is present. * @param fabricName Fabric name associated with the backed up items. * @param containerName Container name associated with the backed up items. * @param protectedItemName Backed up item name whose files/folders are to be restored. * @param recoveryPointId Recovery point ID which represents backed up data. iSCSI connection will be revoked for * this backed up data. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> revokeWithResponse( String vaultName, String resourceGroupName, String fabricName, String containerName, String protectedItemName, String recoveryPointId, Context context) { return revokeWithResponseAsync( vaultName, resourceGroupName, fabricName, containerName, protectedItemName, recoveryPointId, context) .block(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.container.v1beta1.model; /** * Kubernetes Engine service configuration. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Kubernetes Engine API. For a detailed explanation * see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class ServerConfig extends com.google.api.client.json.GenericJson { /** * List of release channel configurations. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<ReleaseChannelConfig> channels; static { // hack to force ProGuard to consider ReleaseChannelConfig used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(ReleaseChannelConfig.class); } /** * Version of Kubernetes the service deploys by default. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String defaultClusterVersion; /** * Default image type. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String defaultImageType; /** * List of valid image types. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> validImageTypes; /** * List of valid master versions, in descending order. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> validMasterVersions; /** * List of valid node upgrade target versions, in descending order. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> validNodeVersions; /** * Maps of Kubernetes version and supported Windows server versions. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, WindowsVersions> windowsVersionMaps; /** * List of release channel configurations. * @return value or {@code null} for none */ public java.util.List<ReleaseChannelConfig> getChannels() { return channels; } /** * List of release channel configurations. * @param channels channels or {@code null} for none */ public ServerConfig setChannels(java.util.List<ReleaseChannelConfig> channels) { this.channels = channels; return this; } /** * Version of Kubernetes the service deploys by default. * @return value or {@code null} for none */ public java.lang.String getDefaultClusterVersion() { return defaultClusterVersion; } /** * Version of Kubernetes the service deploys by default. * @param defaultClusterVersion defaultClusterVersion or {@code null} for none */ public ServerConfig setDefaultClusterVersion(java.lang.String defaultClusterVersion) { this.defaultClusterVersion = defaultClusterVersion; return this; } /** * Default image type. * @return value or {@code null} for none */ public java.lang.String getDefaultImageType() { return defaultImageType; } /** * Default image type. * @param defaultImageType defaultImageType or {@code null} for none */ public ServerConfig setDefaultImageType(java.lang.String defaultImageType) { this.defaultImageType = defaultImageType; return this; } /** * List of valid image types. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getValidImageTypes() { return validImageTypes; } /** * List of valid image types. * @param validImageTypes validImageTypes or {@code null} for none */ public ServerConfig setValidImageTypes(java.util.List<java.lang.String> validImageTypes) { this.validImageTypes = validImageTypes; return this; } /** * List of valid master versions, in descending order. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getValidMasterVersions() { return validMasterVersions; } /** * List of valid master versions, in descending order. * @param validMasterVersions validMasterVersions or {@code null} for none */ public ServerConfig setValidMasterVersions(java.util.List<java.lang.String> validMasterVersions) { this.validMasterVersions = validMasterVersions; return this; } /** * List of valid node upgrade target versions, in descending order. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getValidNodeVersions() { return validNodeVersions; } /** * List of valid node upgrade target versions, in descending order. * @param validNodeVersions validNodeVersions or {@code null} for none */ public ServerConfig setValidNodeVersions(java.util.List<java.lang.String> validNodeVersions) { this.validNodeVersions = validNodeVersions; return this; } /** * Maps of Kubernetes version and supported Windows server versions. * @return value or {@code null} for none */ public java.util.Map<String, WindowsVersions> getWindowsVersionMaps() { return windowsVersionMaps; } /** * Maps of Kubernetes version and supported Windows server versions. * @param windowsVersionMaps windowsVersionMaps or {@code null} for none */ public ServerConfig setWindowsVersionMaps(java.util.Map<String, WindowsVersions> windowsVersionMaps) { this.windowsVersionMaps = windowsVersionMaps; return this; } @Override public ServerConfig set(String fieldName, Object value) { return (ServerConfig) super.set(fieldName, value); } @Override public ServerConfig clone() { return (ServerConfig) super.clone(); } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.buffer; import io.netty.util.Recycler; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.channels.ClosedChannelException; import java.nio.channels.FileChannel; import java.nio.channels.GatheringByteChannel; import java.nio.channels.ScatteringByteChannel; final class PooledDirectByteBuf extends PooledByteBuf<ByteBuffer> { private static final Recycler<PooledDirectByteBuf> RECYCLER = new Recycler<PooledDirectByteBuf>() { @Override protected PooledDirectByteBuf newObject(Handle<PooledDirectByteBuf> handle) { return new PooledDirectByteBuf(handle, 0); } }; static PooledDirectByteBuf newInstance(int maxCapacity) { PooledDirectByteBuf buf = RECYCLER.get(); buf.reuse(maxCapacity); return buf; } private PooledDirectByteBuf(Recycler.Handle<PooledDirectByteBuf> recyclerHandle, int maxCapacity) { super(recyclerHandle, maxCapacity); } @Override protected ByteBuffer newInternalNioBuffer(ByteBuffer memory) { return memory.duplicate(); } @Override public boolean isDirect() { return true; } @Override protected byte _getByte(int index) { return memory.get(idx(index)); } @Override protected short _getShort(int index) { return memory.getShort(idx(index)); } @Override protected short _getShortLE(int index) { return ByteBufUtil.swapShort(_getShort(index)); } @Override protected int _getUnsignedMedium(int index) { index = idx(index); return (memory.get(index) & 0xff) << 16 | (memory.get(index + 1) & 0xff) << 8 | memory.get(index + 2) & 0xff; } @Override protected int _getUnsignedMediumLE(int index) { index = idx(index); return memory.get(index) & 0xff | (memory.get(index + 1) & 0xff) << 8 | (memory.get(index + 2) & 0xff) << 16; } @Override protected int _getInt(int index) { return memory.getInt(idx(index)); } @Override protected int _getIntLE(int index) { return ByteBufUtil.swapInt(_getInt(index)); } @Override protected long _getLong(int index) { return memory.getLong(idx(index)); } @Override protected long _getLongLE(int index) { return ByteBufUtil.swapLong(_getLong(index)); } @Override public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) { checkDstIndex(index, length, dstIndex, dst.capacity()); if (dst.hasArray()) { getBytes(index, dst.array(), dst.arrayOffset() + dstIndex, length); } else if (dst.nioBufferCount() > 0) { for (ByteBuffer bb: dst.nioBuffers(dstIndex, length)) { int bbLen = bb.remaining(); getBytes(index, bb); index += bbLen; } } else { dst.setBytes(dstIndex, this, index, length); } return this; } @Override public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) { getBytes(index, dst, dstIndex, length, false); return this; } private void getBytes(int index, byte[] dst, int dstIndex, int length, boolean internal) { checkDstIndex(index, length, dstIndex, dst.length); ByteBuffer tmpBuf; if (internal) { tmpBuf = internalNioBuffer(); } else { tmpBuf = memory.duplicate(); } index = idx(index); tmpBuf.clear().position(index).limit(index + length); tmpBuf.get(dst, dstIndex, length); } @Override public ByteBuf readBytes(byte[] dst, int dstIndex, int length) { checkReadableBytes(length); getBytes(readerIndex, dst, dstIndex, length, true); readerIndex += length; return this; } @Override public ByteBuf getBytes(int index, ByteBuffer dst) { getBytes(index, dst, false); return this; } private void getBytes(int index, ByteBuffer dst, boolean internal) { checkIndex(index, dst.remaining()); ByteBuffer tmpBuf; if (internal) { tmpBuf = internalNioBuffer(); } else { tmpBuf = memory.duplicate(); } index = idx(index); tmpBuf.clear().position(index).limit(index + dst.remaining()); dst.put(tmpBuf); } @Override public ByteBuf readBytes(ByteBuffer dst) { int length = dst.remaining(); checkReadableBytes(length); getBytes(readerIndex, dst, true); readerIndex += length; return this; } @Override public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException { getBytes(index, out, length, false); return this; } private void getBytes(int index, OutputStream out, int length, boolean internal) throws IOException { checkIndex(index, length); if (length == 0) { return; } ByteBufUtil.readBytes(alloc(), internal ? internalNioBuffer() : memory.duplicate(), idx(index), length, out); } @Override public ByteBuf readBytes(OutputStream out, int length) throws IOException { checkReadableBytes(length); getBytes(readerIndex, out, length, true); readerIndex += length; return this; } @Override public int getBytes(int index, GatheringByteChannel out, int length) throws IOException { return getBytes(index, out, length, false); } private int getBytes(int index, GatheringByteChannel out, int length, boolean internal) throws IOException { checkIndex(index, length); if (length == 0) { return 0; } ByteBuffer tmpBuf; if (internal) { tmpBuf = internalNioBuffer(); } else { tmpBuf = memory.duplicate(); } index = idx(index); tmpBuf.clear().position(index).limit(index + length); return out.write(tmpBuf); } @Override public int getBytes(int index, FileChannel out, long position, int length) throws IOException { return getBytes(index, out, position, length, false); } private int getBytes(int index, FileChannel out, long position, int length, boolean internal) throws IOException { checkIndex(index, length); if (length == 0) { return 0; } ByteBuffer tmpBuf = internal ? internalNioBuffer() : memory.duplicate(); index = idx(index); tmpBuf.clear().position(index).limit(index + length); return out.write(tmpBuf, position); } @Override public int readBytes(GatheringByteChannel out, int length) throws IOException { checkReadableBytes(length); int readBytes = getBytes(readerIndex, out, length, true); readerIndex += readBytes; return readBytes; } @Override public int readBytes(FileChannel out, long position, int length) throws IOException { checkReadableBytes(length); int readBytes = getBytes(readerIndex, out, position, length, true); readerIndex += readBytes; return readBytes; } @Override protected void _setByte(int index, int value) { memory.put(idx(index), (byte) value); } @Override protected void _setShort(int index, int value) { memory.putShort(idx(index), (short) value); } @Override protected void _setShortLE(int index, int value) { _setShort(index, ByteBufUtil.swapShort((short) value)); } @Override protected void _setMedium(int index, int value) { index = idx(index); memory.put(index, (byte) (value >>> 16)); memory.put(index + 1, (byte) (value >>> 8)); memory.put(index + 2, (byte) value); } @Override protected void _setMediumLE(int index, int value) { index = idx(index); memory.put(index, (byte) value); memory.put(index + 1, (byte) (value >>> 8)); memory.put(index + 2, (byte) (value >>> 16)); } @Override protected void _setInt(int index, int value) { memory.putInt(idx(index), value); } @Override protected void _setIntLE(int index, int value) { _setInt(index, ByteBufUtil.swapInt(value)); } @Override protected void _setLong(int index, long value) { memory.putLong(idx(index), value); } @Override protected void _setLongLE(int index, long value) { _setLong(index, ByteBufUtil.swapLong(value)); } @Override public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) { checkSrcIndex(index, length, srcIndex, src.capacity()); if (src.hasArray()) { setBytes(index, src.array(), src.arrayOffset() + srcIndex, length); } else if (src.nioBufferCount() > 0) { for (ByteBuffer bb: src.nioBuffers(srcIndex, length)) { int bbLen = bb.remaining(); setBytes(index, bb); index += bbLen; } } else { src.getBytes(srcIndex, this, index, length); } return this; } @Override public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) { checkSrcIndex(index, length, srcIndex, src.length); ByteBuffer tmpBuf = internalNioBuffer(); index = idx(index); tmpBuf.clear().position(index).limit(index + length); tmpBuf.put(src, srcIndex, length); return this; } @Override public ByteBuf setBytes(int index, ByteBuffer src) { checkIndex(index, src.remaining()); ByteBuffer tmpBuf = internalNioBuffer(); if (src == tmpBuf) { src = src.duplicate(); } index = idx(index); tmpBuf.clear().position(index).limit(index + src.remaining()); tmpBuf.put(src); return this; } @Override public int setBytes(int index, InputStream in, int length) throws IOException { checkIndex(index, length); byte[] tmp = ByteBufUtil.threadLocalTempArray(length); int readBytes = in.read(tmp, 0, length); if (readBytes <= 0) { return readBytes; } ByteBuffer tmpBuf = internalNioBuffer(); tmpBuf.clear().position(idx(index)); tmpBuf.put(tmp, 0, readBytes); return readBytes; } @Override public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException { checkIndex(index, length); ByteBuffer tmpBuf = internalNioBuffer(); index = idx(index); tmpBuf.clear().position(index).limit(index + length); try { return in.read(tmpBuf); } catch (ClosedChannelException ignored) { return -1; } } @Override public int setBytes(int index, FileChannel in, long position, int length) throws IOException { checkIndex(index, length); ByteBuffer tmpBuf = internalNioBuffer(); index = idx(index); tmpBuf.clear().position(index).limit(index + length); try { return in.read(tmpBuf, position); } catch (ClosedChannelException ignored) { return -1; } } @Override public ByteBuf copy(int index, int length) { checkIndex(index, length); ByteBuf copy = alloc().directBuffer(length, maxCapacity()); copy.writeBytes(this, index, length); return copy; } @Override public int nioBufferCount() { return 1; } @Override public ByteBuffer nioBuffer(int index, int length) { checkIndex(index, length); index = idx(index); return ((ByteBuffer) memory.duplicate().position(index).limit(index + length)).slice(); } @Override public ByteBuffer[] nioBuffers(int index, int length) { return new ByteBuffer[] { nioBuffer(index, length) }; } @Override public ByteBuffer internalNioBuffer(int index, int length) { checkIndex(index, length); index = idx(index); return (ByteBuffer) internalNioBuffer().clear().position(index).limit(index + length); } @Override public boolean hasArray() { return false; } @Override public byte[] array() { throw new UnsupportedOperationException("direct buffer"); } @Override public int arrayOffset() { throw new UnsupportedOperationException("direct buffer"); } @Override public boolean hasMemoryAddress() { return false; } @Override public long memoryAddress() { throw new UnsupportedOperationException(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.filter; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.nio.ByteBuffer; import java.util.*; import com.google.common.collect.AbstractIterator; import com.google.common.collect.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.db.*; import org.apache.cassandra.db.columniterator.ISSTableColumnIterator; import org.apache.cassandra.db.columniterator.OnDiskAtomIterator; import org.apache.cassandra.db.columniterator.SSTableSliceIterator; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.db.marshal.CompositeType; import org.apache.cassandra.io.IVersionedSerializer; import org.apache.cassandra.io.sstable.SSTableReader; import org.apache.cassandra.io.util.FileDataInput; import org.apache.cassandra.net.MessagingService; import org.apache.cassandra.tracing.Tracing; public class SliceQueryFilter implements IDiskAtomFilter { private static final Logger logger = LoggerFactory.getLogger(SliceQueryFilter.class); public static final Serializer serializer = new Serializer(); public final ColumnSlice[] slices; public final boolean reversed; public volatile int count; private final int compositesToGroup; // This is a hack to allow rolling upgrade with pre-1.2 nodes private final int countMutliplierForCompatibility; // Not serialized, just a ack for range slices to find the number of live column counted, even when we group private ColumnCounter columnCounter; public SliceQueryFilter(ByteBuffer start, ByteBuffer finish, boolean reversed, int count) { this(new ColumnSlice[] { new ColumnSlice(start, finish) }, reversed, count); } public SliceQueryFilter(ByteBuffer start, ByteBuffer finish, boolean reversed, int count, int compositesToGroup) { this(new ColumnSlice[] { new ColumnSlice(start, finish) }, reversed, count, compositesToGroup, 1); } /** * Constructor that accepts multiple slices. All slices are assumed to be in the same direction (forward or * reversed). */ public SliceQueryFilter(ColumnSlice[] slices, boolean reversed, int count) { this(slices, reversed, count, -1, 1); } public SliceQueryFilter(ColumnSlice[] slices, boolean reversed, int count, int compositesToGroup, int countMutliplierForCompatibility) { this.slices = slices; this.reversed = reversed; this.count = count; this.compositesToGroup = compositesToGroup; this.countMutliplierForCompatibility = countMutliplierForCompatibility; } public SliceQueryFilter withUpdatedCount(int newCount) { return new SliceQueryFilter(slices, reversed, newCount, compositesToGroup, countMutliplierForCompatibility); } public SliceQueryFilter withUpdatedSlices(ColumnSlice[] newSlices) { return new SliceQueryFilter(newSlices, reversed, count, compositesToGroup, countMutliplierForCompatibility); } public OnDiskAtomIterator getMemtableColumnIterator(ColumnFamily cf, DecoratedKey key) { return Memtable.getSliceIterator(key, cf, this); } public ISSTableColumnIterator getSSTableColumnIterator(SSTableReader sstable, DecoratedKey key) { return new SSTableSliceIterator(sstable, key, slices, reversed); } public ISSTableColumnIterator getSSTableColumnIterator(SSTableReader sstable, FileDataInput file, DecoratedKey key, RowIndexEntry indexEntry) { return new SSTableSliceIterator(sstable, file, key, slices, reversed, indexEntry); } public SuperColumn filterSuperColumn(SuperColumn superColumn, int gcBefore) { // we clone shallow, then add, under the theory that generally we're interested in a relatively small number of subcolumns. // this may be a poor assumption. SuperColumn scFiltered = superColumn.cloneMeShallow(); final Iterator<IColumn> subcolumns; if (reversed) { List<IColumn> columnsAsList = new ArrayList<IColumn>(superColumn.getSubColumns()); subcolumns = Lists.reverse(columnsAsList).iterator(); } else { subcolumns = superColumn.getSubColumns().iterator(); } final Comparator<ByteBuffer> comparator = reversed ? superColumn.getComparator().reverseComparator : superColumn.getComparator(); Iterator<IColumn> results = new AbstractIterator<IColumn>() { protected IColumn computeNext() { while (subcolumns.hasNext()) { IColumn subcolumn = subcolumns.next(); // iterate until we get to the "real" start column if (comparator.compare(subcolumn.name(), start()) < 0) continue; // exit loop when columns are out of the range. if (finish().remaining() > 0 && comparator.compare(subcolumn.name(), finish()) > 0) break; return subcolumn; } return endOfData(); } }; // subcolumns is either empty now, or has been redefined in the loop above. either is ok. collectReducedColumns(scFiltered, results, gcBefore); return scFiltered; } public Comparator<IColumn> getColumnComparator(AbstractType<?> comparator) { return reversed ? comparator.columnReverseComparator : comparator.columnComparator; } public void collectReducedColumns(IColumnContainer container, Iterator<IColumn> reducedColumns, int gcBefore) { columnCounter = getColumnCounter(container); while (reducedColumns.hasNext()) { IColumn column = reducedColumns.next(); if (logger.isTraceEnabled()) logger.trace(String.format("collecting %s of %s: %s", columnCounter.live(), count, column.getString(container.getComparator()))); columnCounter.count(column, container); if (columnCounter.live() > count) break; // but we need to add all non-gc-able columns to the result for read repair: if (QueryFilter.isRelevant(column, container, gcBefore)) container.addColumn(column); } Tracing.trace("Read {} live and {} tombstoned cells", columnCounter.live(), columnCounter.ignored()); if (columnCounter.ignored() > DatabaseDescriptor.getTombstoneDebugThreshold()) logger.debug("Read {} live and {} tombstoned cells", columnCounter.live(), columnCounter.ignored()); } public int getLiveCount(ColumnFamily cf) { ColumnCounter counter = getColumnCounter(cf); for (IColumn column : cf) counter.count(column, cf); return counter.live(); } private ColumnCounter getColumnCounter(IColumnContainer container) { AbstractType<?> comparator = container.getComparator(); if (compositesToGroup < 0) return new ColumnCounter(); else if (compositesToGroup == 0) return new ColumnCounter.GroupByPrefix(null, 0); else return new ColumnCounter.GroupByPrefix((CompositeType)comparator, compositesToGroup); } public void trim(ColumnFamily cf, int trimTo) { ColumnCounter counter = getColumnCounter(cf); Collection<ByteBuffer> toRemove = null; boolean trimRemaining = false; Collection<IColumn> columns = reversed ? cf.getReverseSortedColumns() : cf.getSortedColumns(); for (IColumn column : columns) { if (trimRemaining) { toRemove.add(column.name()); continue; } counter.count(column, cf); if (counter.live() > trimTo) { toRemove = new HashSet<ByteBuffer>(); toRemove.add(column.name()); trimRemaining = true; } } if (toRemove != null) { for (ByteBuffer columnName : toRemove) cf.remove(columnName); } } public ByteBuffer start() { return this.slices[0].start; } public ByteBuffer finish() { return this.slices[slices.length - 1].finish; } public void setStart(ByteBuffer start) { assert slices.length == 1; this.slices[0] = new ColumnSlice(start, this.slices[0].finish); } public int lastCounted() { return columnCounter == null ? 0 : columnCounter.live(); } public int lastIgnored() { return columnCounter == null ? 0 : columnCounter.ignored(); } public int lastLive() { return columnCounter == null ? 0 : columnCounter.live(); } @Override public String toString() { return "SliceQueryFilter [reversed=" + reversed + ", slices=" + Arrays.toString(slices) + ", count=" + count + ", toGroup = " + compositesToGroup + "]"; } public boolean isReversed() { return reversed; } public void updateColumnsLimit(int newLimit) { count = newLimit; } public boolean includes(Comparator<ByteBuffer> cmp, ByteBuffer name) { for (ColumnSlice slice : slices) if (slice.includes(cmp, name)) return true; return false; } public static class Serializer implements IVersionedSerializer<SliceQueryFilter> { public void serialize(SliceQueryFilter f, DataOutput dos, int version) throws IOException { if (version < MessagingService.VERSION_12) { // It's kind of lame, but probably better than throwing an exception ColumnSlice slice = new ColumnSlice(f.start(), f.finish()); ColumnSlice.serializer.serialize(slice, dos, version); } else { dos.writeInt(f.slices.length); for (ColumnSlice slice : f.slices) ColumnSlice.serializer.serialize(slice, dos, version); } dos.writeBoolean(f.reversed); int count = f.count; if (f.compositesToGroup > 0 && version < MessagingService.VERSION_12) count *= f.countMutliplierForCompatibility; dos.writeInt(count); if (version < MessagingService.VERSION_12) return; dos.writeInt(f.compositesToGroup); } public SliceQueryFilter deserialize(DataInput dis, int version) throws IOException { ColumnSlice[] slices; if (version < MessagingService.VERSION_12) { slices = new ColumnSlice[]{ ColumnSlice.serializer.deserialize(dis, version) }; } else { slices = new ColumnSlice[dis.readInt()]; for (int i = 0; i < slices.length; i++) slices[i] = ColumnSlice.serializer.deserialize(dis, version); } boolean reversed = dis.readBoolean(); int count = dis.readInt(); int compositesToGroup = -1; if (version >= MessagingService.VERSION_12) compositesToGroup = dis.readInt(); return new SliceQueryFilter(slices, reversed, count, compositesToGroup, 1); } public long serializedSize(SliceQueryFilter f, int version) { TypeSizes sizes = TypeSizes.NATIVE; int size = 0; if (version < MessagingService.VERSION_12) { size += ColumnSlice.serializer.serializedSize(new ColumnSlice(f.start(), f.finish()), version); } else { size += sizes.sizeof(f.slices.length); for (ColumnSlice slice : f.slices) size += ColumnSlice.serializer.serializedSize(slice, version); } size += sizes.sizeof(f.reversed); size += sizes.sizeof(f.count); if (version >= MessagingService.VERSION_12) size += sizes.sizeof(f.compositesToGroup); return size; } } }
package jadx.core.xmlgen; import java.util.HashMap; import java.util.Map; public class ParserConstants { /** * Chunk types */ protected static final int RES_NULL_TYPE = 0x0000; protected static final int RES_STRING_POOL_TYPE = 0x0001; protected static final int RES_TABLE_TYPE = 0x0002; protected static final int RES_XML_TYPE = 0x0003; protected static final int RES_XML_FIRST_CHUNK_TYPE = 0x0100; protected static final int RES_XML_START_NAMESPACE_TYPE = 0x0100; protected static final int RES_XML_END_NAMESPACE_TYPE = 0x0101; protected static final int RES_XML_START_ELEMENT_TYPE = 0x0102; protected static final int RES_XML_END_ELEMENT_TYPE = 0x0103; protected static final int RES_XML_CDATA_TYPE = 0x0104; protected static final int RES_XML_LAST_CHUNK_TYPE = 0x017f; protected static final int RES_XML_RESOURCE_MAP_TYPE = 0x0180; protected static final int RES_TABLE_PACKAGE_TYPE = 0x0200; protected static final int RES_TABLE_TYPE_TYPE = 0x0201; protected static final int RES_TABLE_TYPE_SPEC_TYPE = 0x0202; /** * Type constants */ // Contains no data. protected static final int TYPE_NULL = 0x00; // The 'data' holds a ResTable_ref, a reference to another resource table entry. protected static final int TYPE_REFERENCE = 0x01; // The 'data' holds an attribute resource identifier. protected static final int TYPE_ATTRIBUTE = 0x02; // The 'data' holds an index into the containing resource table's global value string pool. protected static final int TYPE_STRING = 0x03; // The 'data' holds a single-precision floating point number. protected static final int TYPE_FLOAT = 0x04; // The 'data' holds a complex number encoding a dimension value, such as "100in". protected static final int TYPE_DIMENSION = 0x05; // The 'data' holds a complex number encoding a fraction of a container. protected static final int TYPE_FRACTION = 0x06; // Beginning of integer flavors... protected static final int TYPE_FIRST_INT = 0x10; // The 'data' is a raw integer value of the form n..n. protected static final int TYPE_INT_DEC = 0x10; // The 'data' is a raw integer value of the form 0xn..n. protected static final int TYPE_INT_HEX = 0x11; // The 'data' is either 0 or 1, for input "false" or "true" respectively. protected static final int TYPE_INT_BOOLEAN = 0x12; // Beginning of color integer flavors... protected static final int TYPE_FIRST_COLOR_INT = 0x1c; // The 'data' is a raw integer value of the form #aarrggbb. protected static final int TYPE_INT_COLOR_ARGB8 = 0x1c; // The 'data' is a raw integer value of the form #rrggbb. protected static final int TYPE_INT_COLOR_RGB8 = 0x1d; // The 'data' is a raw integer value of the form #argb. protected static final int TYPE_INT_COLOR_ARGB4 = 0x1e; // The 'data' is a raw integer value of the form #rgb. protected static final int TYPE_INT_COLOR_RGB4 = 0x1f; // ...end of integer flavors. protected static final int TYPE_LAST_COLOR_INT = 0x1f; // ...end of integer flavors. protected static final int TYPE_LAST_INT = 0x1f; // Where the unit type information is. This gives us 16 possible // types, as defined below. protected static final int COMPLEX_UNIT_SHIFT = 0; protected static final int COMPLEX_UNIT_MASK = 0xf; // TYPE_DIMENSION: Value is raw pixels. protected static final int COMPLEX_UNIT_PX = 0; // TYPE_DIMENSION: Value is Device Independent Pixels. protected static final int COMPLEX_UNIT_DIP = 1; // TYPE_DIMENSION: Value is a Scaled device independent Pixels. protected static final int COMPLEX_UNIT_SP = 2; // TYPE_DIMENSION: Value is in points. protected static final int COMPLEX_UNIT_PT = 3; // TYPE_DIMENSION: Value is in inches. protected static final int COMPLEX_UNIT_IN = 4; // TYPE_DIMENSION: Value is in millimeters. protected static final int COMPLEX_UNIT_MM = 5; // TYPE_FRACTION: A basic fraction of the overall size. protected static final int COMPLEX_UNIT_FRACTION = 0; // TYPE_FRACTION: A fraction of the parent size. protected static final int COMPLEX_UNIT_FRACTION_PARENT = 1; // Where the radix information is, telling where the decimal place // appears in the mantissa. This give us 4 possible fixed point // representations as defined below. protected static final int COMPLEX_RADIX_SHIFT = 4; protected static final int COMPLEX_RADIX_MASK = 0x3; // The mantissa is an integral number -- i.e., 0xnnnnnn.0 protected static final int COMPLEX_RADIX_23p0 = 0; // The mantissa magnitude is 16 bits -- i.e, 0xnnnn.nn protected static final int COMPLEX_RADIX_16p7 = 1; // The mantissa magnitude is 8 bits -- i.e, 0xnn.nnnn protected static final int COMPLEX_RADIX_8p15 = 2; // The mantissa magnitude is 0 bits -- i.e, 0x0.nnnnnn protected static final int COMPLEX_RADIX_0p23 = 3; // Where the actual value is. This gives us 23 bits of // precision. The top bit is the sign. protected static final int COMPLEX_MANTISSA_SHIFT = 8; protected static final int COMPLEX_MANTISSA_MASK = 0xffffff; protected static final double MANTISSA_MULT = 1.0f / (1 << COMPLEX_MANTISSA_SHIFT); protected static final double[] RADIX_MULTS = new double[]{ 1.0f * MANTISSA_MULT, 1.0f / (1 << 7) * MANTISSA_MULT, 1.0f / (1 << 15) * MANTISSA_MULT, 1.0f / (1 << 23) * MANTISSA_MULT }; /** * String pool flags */ protected static final int SORTED_FLAG = 1; protected static final int UTF8_FLAG = 1 << 8; protected static final int NO_ENTRY = 0xFFFFFFFF; /** * ResTable_entry */ // If set, this is a complex entry, holding a set of name/value mappings. // It is followed by an array of ResTable_map structures. protected static final int FLAG_COMPLEX = 0x0001; // If set, this resource has been declared public, so libraries are allowed to reference it. protected static final int FLAG_PUBLIC = 0x0002; /** * ResTable_map */ protected static final int ATTR_TYPE = ResMakeInternal(0); // For integral attributes, this is the minimum value it can hold. protected static final int ATTR_MIN = ResMakeInternal(1); // For integral attributes, this is the maximum value it can hold. protected static final int ATTR_MAX = ResMakeInternal(2); // Localization of this resource is can be encouraged or required with an aapt flag if this is set protected static final int ATTR_L10N = ResMakeInternal(3); // for plural support, see android.content.res.PluralRules#attrForQuantity(int) protected static final int ATTR_OTHER = ResMakeInternal(4); protected static final int ATTR_ZERO = ResMakeInternal(5); protected static final int ATTR_ONE = ResMakeInternal(6); protected static final int ATTR_TWO = ResMakeInternal(7); protected static final int ATTR_FEW = ResMakeInternal(8); protected static final int ATTR_MANY = ResMakeInternal(9); protected static final Map<Integer, String> PLURALS_MAP = new HashMap<Integer, String>() { { put(ATTR_OTHER, "other"); put(ATTR_ZERO, "zero"); put(ATTR_ONE, "one"); put(ATTR_TWO, "two"); put(ATTR_FEW, "few"); put(ATTR_MANY, "many"); } }; private static int ResMakeInternal(int entry) { return 0x01000000 | entry & 0xFFFF; } protected static boolean isResInternalId(int resid) { return (resid & 0xFFFF0000) != 0 && (resid & 0xFF0000) == 0; } // Bit mask of allowed types, for use with ATTR_TYPE. protected static final int ATTR_TYPE_ANY = 0x0000FFFF; // Attribute holds a references to another resource. protected static final int ATTR_TYPE_REFERENCE = 1; // Attribute holds a generic string. protected static final int ATTR_TYPE_STRING = 1 << 1; // Attribute holds an integer value. ATTR_MIN and ATTR_MIN can // optionally specify a constrained range of possible integer values. protected static final int ATTR_TYPE_INTEGER = 1 << 2; // Attribute holds a boolean integer. protected static final int ATTR_TYPE_BOOLEAN = 1 << 3; // Attribute holds a color value. protected static final int ATTR_TYPE_COLOR = 1 << 4; // Attribute holds a floating point value. protected static final int ATTR_TYPE_FLOAT = 1 << 5; // Attribute holds a dimension value, such as "20px". protected static final int ATTR_TYPE_DIMENSION = 1 << 6; // Attribute holds a fraction value, such as "20%". protected static final int ATTR_TYPE_FRACTION = 1 << 7; // Attribute holds an enumeration. The enumeration values are // supplied as additional entries in the map. protected static final int ATTR_TYPE_ENUM = 1 << 16; // Attribute holds a bitmaks of flags. The flag bit values are // supplied as additional entries in the map. protected static final int ATTR_TYPE_FLAGS = 1 << 17; // Enum of localization modes, for use with ATTR_L10N protected static final int ATTR_L10N_NOT_REQUIRED = 0; protected static final int ATTR_L10N_SUGGESTED = 1; }
// Copyright 2012 Cloudera Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.cloudera.impala.catalog; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.log4j.Logger; import org.apache.thrift.TException; import com.cloudera.impala.catalog.MetaStoreClientPool.MetaStoreClient; import com.cloudera.impala.common.ImpalaException; import com.cloudera.impala.thrift.TCatalogObject; import com.cloudera.impala.thrift.TCatalogObjectType; import com.cloudera.impala.thrift.TDataSource; import com.cloudera.impala.thrift.TDatabase; import com.cloudera.impala.thrift.TFunction; import com.cloudera.impala.thrift.TPrivilege; import com.cloudera.impala.thrift.TRole; import com.cloudera.impala.thrift.TTable; import com.cloudera.impala.thrift.TUniqueId; import com.cloudera.impala.thrift.TUpdateCatalogCacheRequest; import com.cloudera.impala.thrift.TUpdateCatalogCacheResponse; /** * Thread safe Catalog for an Impalad. The Impalad catalog can be updated either via * a StateStore heartbeat or by directly applying the result of a catalog operation to * the CatalogCache. All updates are applied using the updateCatalog() function. * Table metadata is loaded lazily. The CatalogServer initially broadcasts (via the * statestore) the known table names (as IncompleteTables). These table names are added * to the Impalad catalog cache and when one of the tables is accessed, the impalad will * make an RPC to the CatalogServer to request loading the complete table metadata. * In both cases, we need to ensure that work from one update is not "undone" by another * update. To handle this the ImpaladCatalog does the following: * - Tracks the overall catalog version last received in a state store heartbeat, this * version is maintained by the catalog server and it is always guaranteed that * this impalad's catalog will never contain any objects < than this version * (any updates with a lower version number are ignored). * - For updated/new objects, check if the object already exists in the * catalog cache. If it does, only apply the update if the catalog version is > the * existing object's catalog version. Also keep a log of all dropped catalog objects * (and the version they were dropped in). Before updating any object, check if it was * dropped in a later version. If so, ignore the update. * - Before dropping any catalog object, see if the object already exists in the catalog * cache. If it does, only drop the object if the version of the drop is > that * object's catalog version. * The CatalogServiceId is also tracked to detect if a different instance of the catalog * service has been started, in which case a full topic update is required. */ public class ImpaladCatalog extends Catalog { private static final Logger LOG = Logger.getLogger(ImpaladCatalog.class); private static final TUniqueId INITIAL_CATALOG_SERVICE_ID = new TUniqueId(0L, 0L); // The last known Catalog Service ID. If the ID changes, it indicates the CatalogServer // has restarted. private TUniqueId catalogServiceId_ = INITIAL_CATALOG_SERVICE_ID; // The catalog version received in the last StateStore heartbeat. It is guaranteed // all objects in the catalog have at a minimum, this version. Because updates may // be applied out of band of a StateStore heartbeat, it is possible the catalog // contains some objects > than this version. private long lastSyncedCatalogVersion_ = Catalog.INITIAL_CATALOG_VERSION; // Flag to determine if the Catalog is ready to accept user requests. See isReady(). private final AtomicBoolean isReady_ = new AtomicBoolean(false); // Tracks modifications to this Impalad's catalog from direct updates to the cache. private final CatalogDeltaLog catalogDeltaLog_ = new CatalogDeltaLog(); // Object that is used to synchronize on and signal when a catalog update is received. private final Object catalogUpdateEventNotifier_ = new Object(); /** * C'tor used by tests that need to validate the ImpaladCatalog outside of the * CatalogServer. */ public ImpaladCatalog() { super(false); } /** * Updates the internal Catalog based on the given TCatalogUpdateReq. * This method: * 1) Updates all databases in the Catalog * 2) Updates all tables, views, and functions in the Catalog * 3) Removes all dropped tables, views, and functions * 4) Removes all dropped databases * * This method is called once per statestore heartbeat and is guaranteed the same * object will not be in both the "updated" list and the "removed" list (it is * a detail handled by the statestore). * Catalog updates are ordered by the object type with the dependent objects coming * first. That is, database "foo" will always come before table "foo.bar". * Synchronized because updateCatalog() can be called by during a statestore update or * during a direct-DDL operation and catalogServiceId_ and lastSyncedCatalogVersion_ * must be protected. */ public synchronized TUpdateCatalogCacheResponse updateCatalog( TUpdateCatalogCacheRequest req) throws CatalogException { // Check for changes in the catalog service ID. if (!catalogServiceId_.equals(req.getCatalog_service_id())) { boolean firstRun = catalogServiceId_.equals(INITIAL_CATALOG_SERVICE_ID); catalogServiceId_ = req.getCatalog_service_id(); if (!firstRun) { // Throw an exception which will trigger a full topic update request. throw new CatalogException("Detected catalog service ID change. Aborting " + "updateCatalog()"); } } // First process all updates long newCatalogVersion = lastSyncedCatalogVersion_; for (TCatalogObject catalogObject: req.getUpdated_objects()) { if (catalogObject.getType() == TCatalogObjectType.CATALOG) { newCatalogVersion = catalogObject.getCatalog_version(); } else { try { addCatalogObject(catalogObject); } catch (Exception e) { LOG.error("Error adding catalog object: " + e.getMessage(), e); } } } // Now remove all objects from the catalog. Removing a database before removing // its child tables/functions is fine. If that happens, the removal of the child // object will be a no-op. for (TCatalogObject catalogObject: req.getRemoved_objects()) { removeCatalogObject(catalogObject, newCatalogVersion); } lastSyncedCatalogVersion_ = newCatalogVersion; // Cleanup old entries in the log. catalogDeltaLog_.garbageCollect(lastSyncedCatalogVersion_); isReady_.set(true); // Notify all the threads waiting on a catalog update. synchronized (catalogUpdateEventNotifier_) { catalogUpdateEventNotifier_.notifyAll(); } return new TUpdateCatalogCacheResponse(catalogServiceId_); } /** * Causes the calling thread to wait until a catalog update notification has been sent * or the given timeout has been reached. A timeout value of 0 indicates an indefinite * wait. Does not protect against spurious wakeups, so this should be called in a loop. * */ public void waitForCatalogUpdate(long timeoutMs) { synchronized (catalogUpdateEventNotifier_) { try { catalogUpdateEventNotifier_.wait(timeoutMs); } catch (InterruptedException e) { // Ignore } } } /** * Returns the Table object for the given dbName/tableName. Returns null * if the table does not exist. Will throw a TableLoadingException if the table's * metadata was not able to be loaded successfully and DatabaseNotFoundException * if the parent database does not exist. */ @Override public Table getTable(String dbName, String tableName) throws CatalogException { Table table = super.getTable(dbName, tableName); if (table == null) return null; if (table.isLoaded() && table instanceof IncompleteTable) { // If there were problems loading this table's metadata, throw an exception // when it is accessed. ImpalaException cause = ((IncompleteTable) table).getCause(); if (cause instanceof TableLoadingException) throw (TableLoadingException) cause; throw new TableLoadingException("Missing metadata for table: " + tableName, cause); } return table; } /** * Returns the HDFS path where the metastore would create the given table. If the table * has a "location" set, that will be returned. Otherwise the path will be resolved * based on the location of the parent database. The metastore folder hierarchy is: * <warehouse directory>/<db name>.db/<table name> * Except for items in the default database which will be: * <warehouse directory>/<table name> * This method handles both of these cases. */ public Path getTablePath(org.apache.hadoop.hive.metastore.api.Table msTbl) throws NoSuchObjectException, MetaException, TException { MetaStoreClient client = getMetaStoreClient(); try { // If the table did not have its path set, build the path based on the the // location property of the parent database. if (msTbl.getSd().getLocation() == null || msTbl.getSd().getLocation().isEmpty()) { String dbLocation = client.getHiveClient().getDatabase(msTbl.getDbName()).getLocationUri(); return new Path(dbLocation, msTbl.getTableName().toLowerCase()); } else { return new Path(msTbl.getSd().getLocation()); } } finally { client.release(); } } /** * Adds the given TCatalogObject to the catalog cache. The update may be ignored * (considered out of date) if: * 1) An item exists in the catalog cache with a version > than the given * TCatalogObject's version. * 2) The catalogDeltaLog_ contains an entry for this object with a version * > than the given TCatalogObject's version. */ private void addCatalogObject(TCatalogObject catalogObject) throws TableLoadingException, DatabaseNotFoundException { // This item is out of date and should not be applied to the catalog. if (catalogDeltaLog_.wasObjectRemovedAfter(catalogObject)) { LOG.debug(String.format("Skipping update because a matching object was removed " + "in a later catalog version: %s", catalogObject)); return; } switch(catalogObject.getType()) { case DATABASE: addDb(catalogObject.getDb(), catalogObject.getCatalog_version()); break; case TABLE: case VIEW: addTable(catalogObject.getTable(), catalogObject.getCatalog_version()); break; case FUNCTION: addFunction(catalogObject.getFn(), catalogObject.getCatalog_version()); break; case DATA_SOURCE: addDataSource(catalogObject.getData_source(), catalogObject.getCatalog_version()); break; case ROLE: Role role = Role.fromThrift(catalogObject.getRole()); role.setCatalogVersion(catalogObject.getCatalog_version()); authPolicy_.addRole(role); break; case PRIVILEGE: RolePrivilege privilege = RolePrivilege.fromThrift(catalogObject.getPrivilege()); privilege.setCatalogVersion(catalogObject.getCatalog_version()); try { authPolicy_.addPrivilege(privilege); } catch (CatalogException e) { LOG.error("Error adding privilege: ", e); } break; case HDFS_CACHE_POOL: HdfsCachePool cachePool = new HdfsCachePool(catalogObject.getCache_pool()); cachePool.setCatalogVersion(catalogObject.getCatalog_version()); hdfsCachePools_.add(cachePool); break; default: throw new IllegalStateException( "Unexpected TCatalogObjectType: " + catalogObject.getType()); } } /** * Removes the matching TCatalogObject from the catalog, if one exists and its * catalog version is < the catalog version of this drop operation. * Note that drop operations that come from statestore heartbeats always have a * version of 0. To determine the drop version for statestore updates, * the catalog version from the current update is used. This is okay because there * can never be a catalog update from the statestore that contains a drop * and an addition of the same object. For more details on how drop * versioning works, see CatalogServerCatalog.java */ private void removeCatalogObject(TCatalogObject catalogObject, long currentCatalogUpdateVersion) { // The TCatalogObject associated with a drop operation from a state store // heartbeat will always have a version of zero. Because no update from // the state store can contain both a drop and an addition of the same object, // we can assume the drop version is the current catalog version of this update. // If the TCatalogObject contains a version that != 0, it indicates the drop // came from a direct update. long dropCatalogVersion = catalogObject.getCatalog_version() == 0 ? currentCatalogUpdateVersion : catalogObject.getCatalog_version(); switch(catalogObject.getType()) { case DATABASE: removeDb(catalogObject.getDb(), dropCatalogVersion); break; case TABLE: case VIEW: removeTable(catalogObject.getTable(), dropCatalogVersion); break; case FUNCTION: removeFunction(catalogObject.getFn(), dropCatalogVersion); break; case DATA_SOURCE: removeDataSource(catalogObject.getData_source(), dropCatalogVersion); break; case ROLE: removeRole(catalogObject.getRole(), dropCatalogVersion); break; case PRIVILEGE: removePrivilege(catalogObject.getPrivilege(), dropCatalogVersion); break; case HDFS_CACHE_POOL: HdfsCachePool existingItem = hdfsCachePools_.get(catalogObject.getCache_pool().getPool_name()); if (existingItem.getCatalogVersion() > catalogObject.getCatalog_version()) { hdfsCachePools_.remove(catalogObject.getCache_pool().getPool_name()); } break; default: throw new IllegalStateException( "Unexpected TCatalogObjectType: " + catalogObject.getType()); } if (catalogObject.getCatalog_version() > lastSyncedCatalogVersion_) { catalogDeltaLog_.addRemovedObject(catalogObject); } } private void addDb(TDatabase thriftDb, long catalogVersion) { Db existingDb = getDb(thriftDb.getDb_name()); if (existingDb == null || existingDb.getCatalogVersion() < catalogVersion) { Db newDb = Db.fromTDatabase(thriftDb, this); newDb.setCatalogVersion(catalogVersion); addDb(newDb); } } private void addTable(TTable thriftTable, long catalogVersion) throws TableLoadingException { Db db = getDb(thriftTable.db_name); if (db == null) { LOG.debug("Parent database of table does not exist: " + thriftTable.db_name + "." + thriftTable.tbl_name); return; } Table newTable = Table.fromThrift(db, thriftTable); newTable.setCatalogVersion(catalogVersion); db.addTable(newTable); } private void addFunction(TFunction fn, long catalogVersion) { Function function = Function.fromThrift(fn); function.setCatalogVersion(catalogVersion); Db db = getDb(function.getFunctionName().getDb()); if (db == null) { LOG.debug("Parent database of function does not exist: " + function.getName()); return; } Function existingFn = db.getFunction(fn.getSignature()); if (existingFn == null || existingFn.getCatalogVersion() < catalogVersion) { db.addFunction(function); } } private void addDataSource(TDataSource thrift, long catalogVersion) { DataSource dataSource = DataSource.fromThrift(thrift); dataSource.setCatalogVersion(catalogVersion); addDataSource(dataSource); } private void removeDataSource(TDataSource thrift, long dropCatalogVersion) { removeDataSource(thrift.getName()); } private void removeDb(TDatabase thriftDb, long dropCatalogVersion) { Db db = getDb(thriftDb.getDb_name()); if (db != null && db.getCatalogVersion() < dropCatalogVersion) { removeDb(db.getName()); } } private void removeTable(TTable thriftTable, long dropCatalogVersion) { Db db = getDb(thriftTable.db_name); // The parent database doesn't exist, nothing to do. if (db == null) return; Table table = db.getTable(thriftTable.getTbl_name()); if (table != null && table.getCatalogVersion() < dropCatalogVersion) { db.removeTable(thriftTable.tbl_name); } } private void removeFunction(TFunction thriftFn, long dropCatalogVersion) { Db db = getDb(thriftFn.name.getDb_name()); // The parent database doesn't exist, nothing to do. if (db == null) return; // If the function exists and it has a catalog version less than the // version of the drop, remove the function. Function fn = db.getFunction(thriftFn.getSignature()); if (fn != null && fn.getCatalogVersion() < dropCatalogVersion) { db.removeFunction(thriftFn.getSignature()); } } private void removeRole(TRole thriftRole, long dropCatalogVersion) { Role existingRole = authPolicy_.getRole(thriftRole.getRole_name()); // version of the drop, remove the function. if (existingRole != null && existingRole.getCatalogVersion() < dropCatalogVersion) { authPolicy_.removeRole(thriftRole.getRole_name()); } } private void removePrivilege(TPrivilege thriftPrivilege, long dropCatalogVersion) { Role role = authPolicy_.getRole(thriftPrivilege.getRole_id()); if (role == null) return; RolePrivilege existingPrivilege = role.getPrivilege(thriftPrivilege.getPrivilege_name()); // version of the drop, remove the function. if (existingPrivilege != null && existingPrivilege.getCatalogVersion() < dropCatalogVersion) { role.removePrivilege(thriftPrivilege.getPrivilege_name()); } } /** * Returns true if the ImpaladCatalog is ready to accept requests (has * received and processed a valid catalog topic update from the StateStore), * false otherwise. */ public boolean isReady() { return isReady_.get(); } // Only used for testing. public void setIsReady(boolean isReady) { isReady_.set(isReady); } public AuthorizationPolicy getAuthPolicy() { return authPolicy_; } }
package org.jboss.resteasy.core; import org.jboss.resteasy.core.interception.AbstractWriterInterceptorContext; import org.jboss.resteasy.core.interception.ContainerResponseContextImpl; import org.jboss.resteasy.core.interception.ResponseContainerRequestContext; import org.jboss.resteasy.core.interception.ServerWriterInterceptorContext; import org.jboss.resteasy.core.registry.SegmentNode; import org.jboss.resteasy.resteasy_jaxrs.i18n.Messages; import org.jboss.resteasy.specimpl.BuiltResponse; import org.jboss.resteasy.spi.HttpRequest; import org.jboss.resteasy.spi.HttpResponse; import org.jboss.resteasy.spi.ResteasyProviderFactory; import org.jboss.resteasy.util.CommitHeaderOutputStream; import org.jboss.resteasy.util.HttpHeaderNames; import javax.ws.rs.NotAcceptableException; import javax.ws.rs.container.ContainerResponseFilter; import javax.ws.rs.core.GenericEntity; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.NewCookie; import javax.ws.rs.core.Response; import javax.ws.rs.ext.MessageBodyWriter; import javax.ws.rs.ext.WriterInterceptor; import java.io.IOException; import java.io.OutputStream; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import java.util.Iterator; import java.util.List; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class ServerResponseWriter { public static void writeNomapResponse(BuiltResponse jaxrsResponse, final HttpRequest request, final HttpResponse response, final ResteasyProviderFactory providerFactory) throws IOException { ResourceMethodInvoker method =(ResourceMethodInvoker) request.getAttribute(ResourceMethodInvoker.class.getName()); if (jaxrsResponse.getEntity() != null && jaxrsResponse.getMediaType() == null) { setDefaultContentType(request, jaxrsResponse, providerFactory, method); } executeFilters(jaxrsResponse, request, response, providerFactory, method); if (jaxrsResponse.getEntity() == null) { response.setStatus(jaxrsResponse.getStatus()); commitHeaders(jaxrsResponse, response); return; } Class type = jaxrsResponse.getEntityClass(); Object ent = jaxrsResponse.getEntity(); Type generic = jaxrsResponse.getGenericType(); Annotation[] annotations = jaxrsResponse.getAnnotations(); @SuppressWarnings(value = "unchecked") MessageBodyWriter writer = providerFactory.getMessageBodyWriter( type, generic, annotations, jaxrsResponse.getMediaType()); if (writer == null) { throw new NoMessageBodyWriterFoundFailure(type, jaxrsResponse.getMediaType()); } response.setStatus(jaxrsResponse.getStatus()); final BuiltResponse built = jaxrsResponse; CommitHeaderOutputStream.CommitCallback callback = new CommitHeaderOutputStream.CommitCallback() { private boolean committed; @Override public void commit() { if (committed) return; committed = true; commitHeaders(built, response); } }; OutputStream os = new CommitHeaderOutputStream(response.getOutputStream(), callback); WriterInterceptor[] writerInterceptors = null; if (method != null) { writerInterceptors = method.getWriterInterceptors(); } else { writerInterceptors = providerFactory.getServerWriterInterceptorRegistry().postMatch(null, null); } AbstractWriterInterceptorContext writerContext = new ServerWriterInterceptorContext(writerInterceptors, providerFactory, ent, type, generic, annotations, jaxrsResponse.getMediaType(), jaxrsResponse.getMetadata(), os, request); writerContext.proceed(); callback.commit(); // just in case the output stream is never used } private static void executeFilters(BuiltResponse jaxrsResponse, HttpRequest request, HttpResponse response, ResteasyProviderFactory providerFactory, ResourceMethodInvoker method) throws IOException { ContainerResponseFilter[] responseFilters = null; if (method != null) { responseFilters = method.getResponseFilters(); } else { responseFilters = providerFactory.getContainerResponseFilterRegistry().postMatch(null, null); } if (responseFilters != null) { ResponseContainerRequestContext requestContext = new ResponseContainerRequestContext(request); ContainerResponseContextImpl responseContext = new ContainerResponseContextImpl(request, response, jaxrsResponse); for (ContainerResponseFilter filter : responseFilters) { filter.filter(requestContext, responseContext); } } } protected static void setDefaultContentType(HttpRequest request, BuiltResponse jaxrsResponse, ResteasyProviderFactory providerFactory, ResourceMethodInvoker method) { MediaType chosen = (MediaType)request.getAttribute(SegmentNode.RESTEASY_CHOSEN_ACCEPT); if (chosen != null && chosen.isWildcardSubtype()) chosen = null; if (chosen == null) { if (method != null) { // pick most specific for (MediaType produce : method.getProduces()) { if (!produce.isWildcardType()) { chosen = produce; if (!produce.isWildcardSubtype()) { break; } } } } } if (chosen == null) { chosen = MediaType.WILDCARD_TYPE; Class type = jaxrsResponse.getEntityClass(); Object ent = jaxrsResponse.getEntity(); Type generic = jaxrsResponse.getGenericType(); if (generic == null) { if (method != null && !Response.class.isAssignableFrom(method.getMethod().getReturnType())) generic = method.getGenericReturnType(); else generic = type; } Annotation[] annotations = jaxrsResponse.getAnnotations(); if (annotations == null && method != null) { annotations = method.getMethodAnnotations(); } MediaType mt = providerFactory.getConcreteMediaTypeFromMessageBodyWriters(type, generic, annotations, chosen); if (mt != null) { jaxrsResponse.getHeaders().putSingle(HttpHeaders.CONTENT_TYPE, mt); return; } } if (chosen.isWildcardType()) { chosen = MediaType.APPLICATION_OCTET_STREAM_TYPE; } else if (chosen.isWildcardSubtype() && chosen.getSubtype().equals("application")) { chosen = MediaType.APPLICATION_OCTET_STREAM_TYPE; } else if (chosen.isWildcardSubtype()) { throw new NotAcceptableException(Messages.MESSAGES.illegalResponseMediaType(chosen.toString())); } jaxrsResponse.getHeaders().putSingle(HttpHeaders.CONTENT_TYPE, chosen); } public static MediaType resolveContentType(BuiltResponse response) { MediaType responseContentType = null; Object type = response.getMetadata().getFirst(HttpHeaderNames.CONTENT_TYPE); if (type == null) { return MediaType.WILDCARD_TYPE; } if (type instanceof MediaType) { responseContentType = (MediaType) type; } else { responseContentType = MediaType.valueOf(type.toString()); } return responseContentType; } public static void commitHeaders(BuiltResponse jaxrsResponse, HttpResponse response) { if (jaxrsResponse.getMetadata() != null) { List<Object> cookies = jaxrsResponse.getMetadata().get( HttpHeaderNames.SET_COOKIE); if (cookies != null) { Iterator<Object> it = cookies.iterator(); while (it.hasNext()) { Object next = it.next(); if (next instanceof NewCookie) { NewCookie cookie = (NewCookie) next; response.addNewCookie(cookie); it.remove(); } } if (cookies.size() < 1) jaxrsResponse.getMetadata().remove(HttpHeaderNames.SET_COOKIE); } } if (jaxrsResponse.getMetadata() != null && jaxrsResponse.getMetadata().size() > 0) { response.getOutputHeaders().putAll(jaxrsResponse.getMetadata()); } } }
package com.netthreads.test.domain; import java.util.List; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import com.netthreads.test.helper.CreateHelper; import com.netthreads.test.system.PersistenceConfiguration; import com.netthreads.trader.dao.RatingRepository; import com.netthreads.trader.domain.Client; import com.netthreads.trader.domain.Rating; import com.netthreads.trader.domain.Task; import com.netthreads.trader.domain.TaskState.State; import com.netthreads.trader.domain.UserDetails; import com.netthreads.trader.exception.DataLayerException; import com.netthreads.trader.service.ClientService; import com.netthreads.trader.service.TaskStateService; /** * Test * * @author Alistair * */ @RunWith(SpringJUnit4ClassRunner.class) @Configuration @ContextConfiguration(classes = { PersistenceConfiguration.class }) public class TestRating { private static final Logger LOG = LoggerFactory.getLogger(TestRating.class); private static int TEST_INDEX = 5; @Autowired private CreateHelper createHelper; @Autowired private ClientService clientService; @Autowired private TaskStateService taskStateService; @Autowired private RatingRepository ratingRepository; @Before public void before() throws DataLayerException { LOG.info("Build injector"); populate(); } @After public void after() throws DataLayerException { depopulate(); } /** * Ratings are created against Clients. * * @throws DataLayerException */ @Test public void testWrite() throws DataLayerException { // Client Client client = clientService.findByName(createHelper.createUserName(TEST_INDEX)); // Client associated task Task task = createHelper.createTask(TEST_INDEX); task.setTaskState(taskStateService.findByState(State.UNPUBLISHED)); client.getTasks().add(task); task.setOwner(client); // Client associated rating Rating rating = createHelper.createRating(TEST_INDEX); rating.setTask(task); client.getRatings().add(rating); rating.setOwner(client); clientService.update(client); // Read back and check rating list size. Client readBackClient = clientService.findByName(createHelper.createUserName(TEST_INDEX)); org.junit.Assert.assertTrue(readBackClient.getRatings().size() == 1); } /** * Read. * * @throws DataLayerException */ @Test public void testRead() { // Client Client client = clientService.findByName(createHelper.createUserName(TEST_INDEX)); for (Rating rating : client.getRatings()) { Rating readBackRating = ratingRepository.findOne(rating.getId()); org.junit.Assert.assertNotNull(readBackRating); } } /** * Update * * @throws DataLayerException */ @Test public void testUpdate() throws DataLayerException { // Client Client client = clientService.findByName(createHelper.createUserName(TEST_INDEX)); for (Rating rating : client.getRatings()) { Rating readBackRating = ratingRepository.findOne(rating.getId()); readBackRating.setScore(1000); ratingRepository.save(readBackRating); } Client updatedClient = clientService.findByName(createHelper.createUserName(TEST_INDEX)); for (Rating rating : updatedClient.getRatings()) { Rating readBackRating = ratingRepository.findOne(rating.getId()); org.junit.Assert.assertNotNull(readBackRating.getScore() == 1000); } } /** * Update * * @throws DataLayerException */ @Test public void testDelete() throws DataLayerException { // Update Client client = clientService.findByName(createHelper.createUserName(TEST_INDEX)); client.getRatings().clear(); clientService.update(client); List<Rating> ratings = (List<Rating>) ratingRepository.findAll(); org.junit.Assert.assertTrue(ratings.isEmpty()); } /** * Populate test data. * * @throws DataLayerException */ private void populate() throws DataLayerException { // --------------------------------------------------------------- // User // --------------------------------------------------------------- UserDetails userDetails = createHelper.createUserDetails(TEST_INDEX); // --------------------------------------------------------------- // Client user // --------------------------------------------------------------- Client client = new Client(); client.setUserDetails(userDetails); // Client service. clientService.create(client); } /** * Depopulate test data. * * @throws DataLayerException */ public void depopulate() throws DataLayerException { Client client = clientService.findByName(createHelper.createUserName(TEST_INDEX)); clientService.delete(client); } }
package org.ciwise.blackhole.web.rest; import org.ciwise.blackhole.BlackholeApp; import org.ciwise.blackhole.domain.Authority; import org.ciwise.blackhole.domain.User; import org.ciwise.blackhole.repository.AuthorityRepository; import org.ciwise.blackhole.repository.UserRepository; import org.ciwise.blackhole.security.AuthoritiesConstants; import org.ciwise.blackhole.service.MailService; import org.ciwise.blackhole.service.UserService; import org.ciwise.blackhole.web.rest.dto.ManagedUserDTO; import org.ciwise.blackhole.web.rest.dto.UserDTO; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.springframework.boot.test.IntegrationTest; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.http.MediaType; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import javax.inject.Inject; import javax.transaction.Transactional; import java.util.*; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.when; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; /** * Test class for the AccountResource REST controller. * * @see UserService */ //@RunWith(SpringJUnit4ClassRunner.class) //@SpringApplicationConfiguration(classes = BlackholeApp.class) //@WebAppConfiguration //@IntegrationTest @RunWith(SpringRunner.class) @SpringBootTest(classes = BlackholeApp.class) public class AccountResourceIntTest { @Inject private UserRepository userRepository; @Inject private AuthorityRepository authorityRepository; @Inject private UserService userService; @Mock private UserService mockUserService; @Mock private MailService mockMailService; private MockMvc restUserMockMvc; private MockMvc restMvc; @Before public void setup() { MockitoAnnotations.initMocks(this); doNothing().when(mockMailService).sendActivationEmail((User) anyObject(), anyString()); AccountResource accountResource = new AccountResource(); ReflectionTestUtils.setField(accountResource, "userRepository", userRepository); ReflectionTestUtils.setField(accountResource, "userService", userService); ReflectionTestUtils.setField(accountResource, "mailService", mockMailService); AccountResource accountUserMockResource = new AccountResource(); ReflectionTestUtils.setField(accountUserMockResource, "userRepository", userRepository); ReflectionTestUtils.setField(accountUserMockResource, "userService", mockUserService); ReflectionTestUtils.setField(accountUserMockResource, "mailService", mockMailService); this.restMvc = MockMvcBuilders.standaloneSetup(accountResource).build(); this.restUserMockMvc = MockMvcBuilders.standaloneSetup(accountUserMockResource).build(); } @Test public void testNonAuthenticatedUser() throws Exception { restUserMockMvc.perform(get("/api/authenticate") .accept(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(content().string("")); } @Test public void testAuthenticatedUser() throws Exception { restUserMockMvc.perform(get("/api/authenticate") .with(request -> { request.setRemoteUser("test"); return request; }) .accept(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(content().string("test")); } @Test public void testGetExistingAccount() throws Exception { Set<Authority> authorities = new HashSet<>(); Authority authority = new Authority(); authority.setName(AuthoritiesConstants.ADMIN); authorities.add(authority); User user = new User(); user.setLogin("test"); user.setFirstName("john"); user.setLastName("doe"); user.setEmail("john.doe@jhipter.com"); user.setAuthorities(authorities); when(mockUserService.getUserWithAuthorities()).thenReturn(user); restUserMockMvc.perform(get("/api/account") .accept(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(jsonPath("$.login").value("test")) .andExpect(jsonPath("$.firstName").value("john")) .andExpect(jsonPath("$.lastName").value("doe")) .andExpect(jsonPath("$.email").value("john.doe@jhipter.com")) .andExpect(jsonPath("$.authorities").value(AuthoritiesConstants.ADMIN)); } @Test public void testGetUnknownAccount() throws Exception { when(mockUserService.getUserWithAuthorities()).thenReturn(null); restUserMockMvc.perform(get("/api/account") .accept(MediaType.APPLICATION_JSON)) .andExpect(status().isInternalServerError()); } @Test @Transactional public void testRegisterValid() throws Exception { ManagedUserDTO validUser = new ManagedUserDTO( null, // id "joe", // login "password", // password "Joe", // firstName "Shmoe", // lastName "joe@example.com", // e-mail true, // activated "en", // langKey new HashSet<>(Arrays.asList(AuthoritiesConstants.USER)), null, // createdDate null, // lastModifiedBy null // lastModifiedDate ); restMvc.perform( post("/api/register") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(validUser))) .andExpect(status().isCreated()); Optional<User> user = userRepository.findOneByLogin("joe"); assertThat(user.isPresent()).isTrue(); } @Test @Transactional public void testRegisterInvalidLogin() throws Exception { ManagedUserDTO invalidUser = new ManagedUserDTO( null, // id "funky-log!n", // login <-- invalid "password", // password "Funky", // firstName "One", // lastName "funky@example.com", // e-mail true, // activated "en", // langKey new HashSet<>(Arrays.asList(AuthoritiesConstants.USER)), null, // createdDate null, // lastModifiedBy null // lastModifiedDate ); restUserMockMvc.perform( post("/api/register") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(invalidUser))) .andExpect(status().isBadRequest()); Optional<User> user = userRepository.findOneByEmail("funky@example.com"); assertThat(user.isPresent()).isFalse(); } @Test @Transactional public void testRegisterInvalidEmail() throws Exception { ManagedUserDTO invalidUser = new ManagedUserDTO( null, // id "bob", // login "password", // password "Bob", // firstName "Green", // lastName "invalid", // e-mail <-- invalid true, // activated "en", // langKey new HashSet<>(Arrays.asList(AuthoritiesConstants.USER)), null, // createdDate null, // lastModifiedBy null // lastModifiedDate ); restUserMockMvc.perform( post("/api/register") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(invalidUser))) .andExpect(status().isBadRequest()); Optional<User> user = userRepository.findOneByLogin("bob"); assertThat(user.isPresent()).isFalse(); } @Test @Transactional public void testRegisterInvalidPassword() throws Exception { ManagedUserDTO invalidUser = new ManagedUserDTO( null, // id "bob", // login "123", // password with only 3 digits "Bob", // firstName "Green", // lastName "bob@example.com", // e-mail true, // activated "en", // langKey new HashSet<>(Arrays.asList(AuthoritiesConstants.USER)), null, // createdDate null, // lastModifiedBy null // lastModifiedDate ); restUserMockMvc.perform( post("/api/register") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(invalidUser))) .andExpect(status().isBadRequest()); Optional<User> user = userRepository.findOneByLogin("bob"); assertThat(user.isPresent()).isFalse(); } @Test @Transactional public void testRegisterDuplicateLogin() throws Exception { // Good ManagedUserDTO validUser = new ManagedUserDTO( null, // id "alice", // login "password", // password "Alice", // firstName "Something", // lastName "alice@example.com", // e-mail true, // activated "en", // langKey new HashSet<>(Arrays.asList(AuthoritiesConstants.USER)), null, // createdDate null, // lastModifiedBy null // lastModifiedDate ); // Duplicate login, different e-mail ManagedUserDTO duplicatedUser = new ManagedUserDTO(validUser.getId(), validUser.getLogin(), validUser.getPassword(), validUser.getLogin(), validUser.getLastName(), "alicejr@example.com", true, validUser.getLangKey(), validUser.getAuthorities(), validUser.getCreatedDate(), validUser.getLastModifiedBy(), validUser.getLastModifiedDate()); // Good user restMvc.perform( post("/api/register") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(validUser))) .andExpect(status().isCreated()); // Duplicate login restMvc.perform( post("/api/register") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(duplicatedUser))) .andExpect(status().is4xxClientError()); Optional<User> userDup = userRepository.findOneByEmail("alicejr@example.com"); assertThat(userDup.isPresent()).isFalse(); } @Test @Transactional public void testRegisterDuplicateEmail() throws Exception { // Good ManagedUserDTO validUser = new ManagedUserDTO( null, // id "john", // login "password", // password "John", // firstName "Doe", // lastName "john@example.com", // e-mail true, // activated "en", // langKey new HashSet<>(Arrays.asList(AuthoritiesConstants.USER)), null, // createdDate null, // lastModifiedBy null // lastModifiedDate ); // Duplicate e-mail, different login ManagedUserDTO duplicatedUser = new ManagedUserDTO(validUser.getId(), "johnjr", validUser.getPassword(), validUser.getLogin(), validUser.getLastName(), validUser.getEmail(), true, validUser.getLangKey(), validUser.getAuthorities(), validUser.getCreatedDate(), validUser.getLastModifiedBy(), validUser.getLastModifiedDate()); // Good user restMvc.perform( post("/api/register") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(validUser))) .andExpect(status().isCreated()); // Duplicate e-mail restMvc.perform( post("/api/register") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(duplicatedUser))) .andExpect(status().is4xxClientError()); Optional<User> userDup = userRepository.findOneByLogin("johnjr"); assertThat(userDup.isPresent()).isFalse(); } @Test @Transactional public void testRegisterAdminIsIgnored() throws Exception { ManagedUserDTO validUser = new ManagedUserDTO( null, // id "badguy", // login "password", // password "Bad", // firstName "Guy", // lastName "badguy@example.com", // e-mail true, // activated "en", // langKey new HashSet<>(Arrays.asList(AuthoritiesConstants.ADMIN)), null, // createdDate null, // lastModifiedBy null // lastModifiedDate ); restMvc.perform( post("/api/register") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(validUser))) .andExpect(status().isCreated()); Optional<User> userDup = userRepository.findOneByLogin("badguy"); assertThat(userDup.isPresent()).isTrue(); assertThat(userDup.get().getAuthorities()).hasSize(1) .containsExactly(authorityRepository.findOne(AuthoritiesConstants.USER)); } @Test @Transactional public void testSaveInvalidLogin() throws Exception { UserDTO invalidUser = new UserDTO( "funky-log!n", // login <-- invalid "Funky", // firstName "One", // lastName "funky@example.com", // e-mail true, // activated "en", // langKey new HashSet<>(Arrays.asList(AuthoritiesConstants.USER)) ); restUserMockMvc.perform( post("/api/account") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(invalidUser))) .andExpect(status().isBadRequest()); Optional<User> user = userRepository.findOneByEmail("funky@example.com"); assertThat(user.isPresent()).isFalse(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.engine.impl.log; import java.text.CharacterIterator; import java.text.DecimalFormat; import java.text.SimpleDateFormat; import java.text.StringCharacterIterator; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.Enumeration; import java.util.List; import java.util.Locale; import javax.servlet.http.Cookie; import org.apache.sling.engine.impl.request.RequestData; /** * The <code>CustomLogFormat</code> class implements the support for log format * strings similar to the Apache httpd CustomLog configuration. * * @see <a * href="http://sling.apache.org/site/client-request-logging.html">Client * Request Logging</a> for documentation of supported formats. */ class CustomLogFormat { /* * NOTE: Documentation at * https://cwiki.apache.org/confluence/display/SLINGxSITE * /Client+Request+Logging should be kept in sync with this class ! */ /** * The parsed list of log format parts whose <code>print</code> method is * called when building the log message line. */ Parameter[] logParameters; /** * Creates a new instance from of this class parsing the log format pattern. * * @param pattern The pattern to be parsed. */ CustomLogFormat(String pattern) { this.logParameters = this.parse(pattern); if (this.logParameters.length == 0) { this.logParameters = null; } } /** * Creates a log message from the given <code>request</code> and * <code>response</code> objects according to the log format from which this * instance has been created. * * @param request The {@link RequestLoggerRequest} used to extract values * for the log message. * @param response The {@link RequestLoggerResponse} used to extract values * for the log message. * @return The formatted log message or <code>null</code> if this log * formatter has not been initialized with a valid log format * pattern. */ String format(RequestLoggerRequest request, RequestLoggerResponse response) { if (this.logParameters != null) { StringBuilder buf = new StringBuilder(); for (int i = 0; i < this.logParameters.length; i++) { this.logParameters[i].print(buf, request, response); } return buf.toString(); } return null; } /** * Returns a string representation of this log format instance. The returned * String is actually rebuilt from the parsed format string and may be used * to create another instance of this class with the same format string. * * @return String representation of this instance. */ public String toString() { StringBuilder buf = new StringBuilder(); for (int i = 0; this.logParameters != null && i < this.logParameters.length; i++) { buf.append(this.logParameters[i]); } return buf.toString(); } // ---------- Parsing the format pattern ----------------------------------- private Parameter[] parse(String pattern) { List<Parameter> parameterList = new ArrayList<Parameter>(); StringBuilder buf = new StringBuilder(); CharacterIterator sr = new StringCharacterIterator(pattern); for (int c = sr.first(); c != CharacterIterator.DONE; c = sr.next()) { if (c == '%') { int c1 = sr.next(); if (c1 != '%') { if (buf.length() > 0) { Parameter text = new PlainTextParameter(buf.toString()); parameterList.add(text); buf.setLength(0); } Parameter param = this.parseFormatString(sr, c1); if (param != null) { parameterList.add(param); } continue; } } buf.append((char) c); } // append any remaining plain text if (buf.length() > 0) { Parameter text = new PlainTextParameter(buf.toString()); parameterList.add(text); buf.setLength(0); } return parameterList.toArray(new Parameter[parameterList.size()]); } private Parameter parseFormatString(CharacterIterator sr, int c) { // read all modifiers boolean required = true; int[] statCodes = null; while (c != CharacterIterator.DONE) { if (c == '!') { required = false; } else if (c >= '0' && c <= '9') { statCodes = this.parseStatusCodes(sr, c); } else if (c == '>' || c == '<') { // ignore first/last modifiers } else { break; } c = sr.next(); } // read name String name; if (c == '{') { StringBuilder nameBuf = new StringBuilder(); for (c = sr.next(); c != CharacterIterator.DONE && c != '}'; c = sr.next()) { nameBuf.append((char) c); } name = (nameBuf.length() > 0) ? nameBuf.toString() : null; // get the format indicator c = sr.next(); } else { name = null; } Parameter param; switch (c) { case 'a': param = new RemoteIPParameter(); break; case 'A': param = new LocalIPParameter(); break; case 'b': case 'B': param = new ByteCountParameter(); break; case 'C': param = (name == null) ? null : new CookieParameter(name, true); break; case 'D': param = new DurationParameter(false); break; case 'f': // we assume the path to the content the request resolved to param = new ContentPathParameter(); break; case 'h': param = new RemoteHostParameter(); break; case 'H': param = new ProtocolParameter(); break; case 'i': param = (name == null) ? null : new HeaderParameter(name, true); break; case 'm': param = new MethodParameter(); break; case 'M': param = new ParamParameter(name); break; case 'o': param = (name == null) ? null : new HeaderParameter(name, false); break; case 'p': param = new LocalPortParameter(); break; case 'P': // %{format}P form is not currently supported param = new ThreadParameter(name); break; case 'q': param = new QueryParameter(); break; case 'r': param = new FirstRequestLineParameter(); break; case 'R': param = new IdParameter(); break; case 's': param = new StatusParameter(); break; case 't': // %{format}t form is not currently supported param = new TimeParameter(name); break; case 'T': param = new DurationParameter(true); break; case 'u': param = new UserParameter(); break; case 'U': param = new RequestParameter(); break; case 'v': case 'V': param = new ServerNameParameter(); break; case 'y': param = new AuthTypeParameter(); break; case 'X': // no supported fall through to default case 'I': // no supported fall through to default case 'O': // no supported fall through to default case 'n': // no supported fall through to default case 'l': // no supported fall through to default case 'e': // no supported fall through to default default: param = new NonImplementedParameter(name); break; } if (param instanceof BaseParameter) { BaseParameter baseParam = (BaseParameter) param; baseParam.setParName((char) c); baseParam.setRequired(required); baseParam.setStatusLimits(statCodes); } return param; } private int[] parseStatusCodes(CharacterIterator sr, int c) { StringBuilder buf = new StringBuilder(); buf.append((char) c); List<Integer> numbers = new ArrayList<Integer>(); for (c = sr.next(); c != CharacterIterator.DONE; c = sr.next()) { if (c == ',') { int num = 0; try { num = Integer.parseInt(buf.toString()); } catch (NumberFormatException nfe) { // don't care } if (num >= 100 && num <= 999) { numbers.add(num); } buf.setLength(0); } else if (c >= '0' && c <= '9') { buf.append((char) c); } else { // end of number list break; } } // reset to the last mark sr.previous(); // get the last number int num = 0; try { num = Integer.parseInt(buf.toString()); } catch (NumberFormatException nfe) { // don't care } if (num >= 100 && num <= 999) { numbers.add(new Integer(num)); } if (numbers.isEmpty()) { return null; } int[] statusCodes = new int[numbers.size()]; for (int i = 0; i < numbers.size(); i++) { statusCodes[i] = (numbers.get(i)).intValue(); } return statusCodes; } // ---------- Parameter support -------------------------------------------- static interface Parameter { void print(StringBuilder dest, RequestLoggerRequest request, RequestLoggerResponse response); } static class PlainTextParameter implements Parameter { private String value; PlainTextParameter(String value) { this.value = value; } public void print(StringBuilder dest, RequestLoggerRequest request, RequestLoggerResponse response) { dest.append(this.value); } public String toString() { return this.value; } } abstract static class BaseParameter implements Parameter { private int[] statusLimits; private boolean required; private char parName; private final String parParam; private final boolean isRequest; protected BaseParameter(String parParam, boolean isRequest) { this.parParam = parParam; this.isRequest = isRequest; } public void setParName(char parName) { this.parName = parName; } public void setStatusLimits(int[] statusLimits) { this.statusLimits = statusLimits; } public void setRequired(boolean required) { this.required = required; } protected abstract String getValue(RequestLoggerRequest request); protected abstract String getValue(RequestLoggerResponse response); public final void print(StringBuilder dest, RequestLoggerRequest request, RequestLoggerResponse response) { if (this.printOk(response.getStatus())) { String value = this.isRequest ? this.getValue(request) : this.getValue(response); dest.append((value == null) ? "-" : value); } } protected boolean printOk(int status) { if (this.statusLimits == null) { return true; } for (int i = 0; i < this.statusLimits.length; i++) { if (status == this.statusLimits[i]) { return this.required; } } return !this.required; } protected char getParName() { return this.parName; } protected String getParParam() { return this.parParam; } public String toString() { StringBuilder result = new StringBuilder("%"); if (this.statusLimits != null) { if (!this.required) { result.append('!'); } for (int i = 0; i < this.statusLimits.length; i++) { if (i > 0) { result.append(','); } result.append(this.statusLimits[i]); } } if (this.parParam != null) { result.append('{').append(this.parParam).append('}'); } result.append(this.parName); return result.toString(); } // --------- helper ---------------------------------------------------- private static boolean isPrint(char c) { return c >= 0x20 && c < 0x7f && c != '\\' && c != '"'; } static String escape(String value) { // nothing to do for empty values if (value == null || value.length() == 0) { return value; } // find the first non-printable int i = 0; while (i < value.length() && isPrint(value.charAt(i))) { i++; } // if none has been found, just return the value if (i >= value.length()) { return value; } // otherwise copy the printable first part in a string buffer // and start encoding StringBuilder buf = new StringBuilder(value.substring(0, i)); while (i < value.length()) { char c = value.charAt(i); if (isPrint(c)) { buf.append(c); } else if (c == '\n') { // LF buf.append("\\n"); } else if (c == '\r') { // CR buf.append("\\r"); } else if (c == '\t') { // HTAB buf.append("\\t"); } else if (c == '\f') { // VTAB buf.append("\\f"); } else if (c == '\b') { // BSP buf.append("\\b"); } else if (c == '"') { // " buf.append("\\\""); } else if (c == '\\') { // \ buf.append("\\\\"); } else { // encode buf.append("\\u"); if (c < 0x10) { buf.append('0'); // leading zero } if (c < 0x100) { buf.append('0'); // leading zero } if (c < 0x1000) { buf.append('0'); // leading zero } buf.append(Integer.toHexString(c)); } i++; } // return the encoded string value return buf.toString(); } } static class NonImplementedParameter extends BaseParameter { NonImplementedParameter(String parParam) { super(parParam, true); } protected String getValue(RequestLoggerRequest request) { return null; } protected String getValue(RequestLoggerResponse response) { return null; } } static class ThreadParameter extends BaseParameter { public ThreadParameter(String parParam) { super(parParam, true); } protected String getValue(RequestLoggerRequest request) { return Thread.currentThread().getName(); } protected String getValue(RequestLoggerResponse response) { return null; } } static class ParamParameter extends BaseParameter { public ParamParameter(String parParam) { super(parParam, true); } protected String getValue(RequestLoggerRequest request) { return request.getParameter(this.getParParam()); } protected String getValue(RequestLoggerResponse response) { return null; } } static class IdParameter extends BaseParameter { public IdParameter() { super(null, false); } protected String getValue(RequestLoggerRequest request) { return null; } protected String getValue(RequestLoggerResponse response) { return String.valueOf(response.getRequestId()); } } static class ByteCountParameter extends BaseParameter { public ByteCountParameter() { super(null, false); } protected String getValue(RequestLoggerRequest request) { return null; } protected String getValue(RequestLoggerResponse response) { int count = response.getCount(); if (count == 0) { return (this.getParName() == 'b') ? "-" : "0"; } return String.valueOf(count); } } static class TimeParameter extends BaseParameter { /** date format - see access logging in service() */ private static final SimpleDateFormat accessLogFmt = new SimpleDateFormat("dd/MMM/yyyy:HH:mm:ss ", Locale.US); /** time format for GMT offset - see access logging in service() */ private static final DecimalFormat dfmt = new DecimalFormat("+0000;-0000"); /** the timezone for the timezone offset calculation */ private static final Calendar calendar = Calendar.getInstance(); /** last zone offset (cached by hours) */ private static String lastZoneOffset = ""; private static long lastZoneOffsetHour = -1; /** last formatted time (cached in seconds) */ private static String lastTimeFormatted = ""; private static long lastTimeFormattedSeconds = -1; private final boolean requestStart; public TimeParameter(String parParam) { super(parParam, false); this.requestStart = parParam == null || !parParam.equals("end"); } protected String getValue(RequestLoggerRequest request) { return null; } protected String getValue(RequestLoggerResponse response) { long time = this.requestStart ? response.getRequestStart() : response.getRequestEnd(); return timeFormatted(time); } // ---------- internal // ----------------------------------------------------- static String timeFormatted(long time) { if (time / 1000 != lastTimeFormattedSeconds) { lastTimeFormattedSeconds = time / 1000; Date date = new Date(time); StringBuilder buf = new StringBuilder(accessLogFmt.format(date)); if (time / 3600000 != lastZoneOffsetHour) { lastZoneOffsetHour = time / 3600000; calendar.setTime(date); int tzOffset = calendar.get(Calendar.ZONE_OFFSET) + calendar.get(Calendar.DST_OFFSET); tzOffset /= (60 * 1000); tzOffset = ((tzOffset / 60) * 100) + (tzOffset % 60); lastZoneOffset = dfmt.format(tzOffset); } buf.append(lastZoneOffset); lastTimeFormatted = buf.toString(); } return lastTimeFormatted; } } static class DurationParameter extends BaseParameter { private final boolean seconds; public DurationParameter(boolean seconds) { super(null, false); this.seconds = seconds; } protected String getValue(RequestLoggerRequest request) { return null; } protected String getValue(RequestLoggerResponse response) { long time = response.getRequestDuration(); if (this.seconds) { time /= 1000; } return String.valueOf(time); } } static class RemoteIPParameter extends BaseParameter { public RemoteIPParameter() { super(null, true); } protected String getValue(RequestLoggerRequest request) { return request.getRemoteAddr(); } protected String getValue(RequestLoggerResponse response) { return null; } } static class RemoteHostParameter extends BaseParameter { public RemoteHostParameter() { super(null, true); } protected String getValue(RequestLoggerRequest request) { return request.getRemoteHost(); } protected String getValue(RequestLoggerResponse response) { return null; } } static class LocalIPParameter extends BaseParameter { public LocalIPParameter() { super(null, true); } protected String getValue(RequestLoggerRequest request) { return request.getLocalAddr(); } protected String getValue(RequestLoggerResponse response) { return null; } } static class LocalPortParameter extends BaseParameter { public LocalPortParameter() { super(null, true); } protected String getValue(RequestLoggerRequest request) { return String.valueOf(request.getServerPort()); } protected String getValue(RequestLoggerResponse response) { return null; } } static class ServerNameParameter extends BaseParameter { public ServerNameParameter() { super(null, true); } protected String getValue(RequestLoggerRequest request) { return request.getServerName(); } protected String getValue(RequestLoggerResponse response) { return null; } } static class ContentPathParameter extends BaseParameter { public ContentPathParameter() { super(null, true); } protected String getValue(RequestLoggerRequest request) { final Object resourcePath = request.getAttribute(RequestData.REQUEST_RESOURCE_PATH_ATTR); if (resourcePath instanceof String) { return (String) resourcePath; } return null; } protected String getValue(RequestLoggerResponse response) { return null; } } static class FirstRequestLineParameter extends BaseParameter { public FirstRequestLineParameter() { super(null, true); } protected String getValue(RequestLoggerRequest request) { String query = request.getQueryString(); query = (query == null || query.length() == 0) ? "" : "?" + query; return request.getMethod() + " " + request.getRequestURI() + query + " " + request.getProtocol(); } protected String getValue(RequestLoggerResponse response) { return null; } } static class ProtocolParameter extends BaseParameter { public ProtocolParameter() { super(null, true); } protected String getValue(RequestLoggerRequest request) { return request.getProtocol(); } protected String getValue(RequestLoggerResponse response) { return null; } } static class MethodParameter extends BaseParameter { public MethodParameter() { super(null, true); } protected String getValue(RequestLoggerRequest request) { return request.getMethod(); } protected String getValue(RequestLoggerResponse response) { return null; } } static class RequestParameter extends BaseParameter { public RequestParameter() { super(null, true); } protected String getValue(RequestLoggerRequest request) { return request.getRequestURI(); } protected String getValue(RequestLoggerResponse response) { return null; } } static class QueryParameter extends BaseParameter { public QueryParameter() { super(null, true); } protected String getValue(RequestLoggerRequest request) { String query = request.getQueryString(); return (query == null || query.length() == 0) ? "" : "?" + query; } protected String getValue(RequestLoggerResponse response) { return null; } } static class UserParameter extends BaseParameter { public UserParameter() { super(null, true); } protected String getValue(RequestLoggerRequest request) { final String user = request.getRemoteUser(); return (user == null) ? null : escape(user); } protected String getValue(RequestLoggerResponse response) { return null; } } static class AuthTypeParameter extends BaseParameter { public AuthTypeParameter() { super(null, true); } protected String getValue(RequestLoggerRequest request) { final String authType = request.getAuthType(); return (authType == null) ? null : escape(authType); } protected String getValue(RequestLoggerResponse response) { return null; } } static class StatusParameter extends BaseParameter { public StatusParameter() { super(null, false); } protected String getValue(RequestLoggerRequest request) { return null; } protected String getValue(RequestLoggerResponse response) { return String.valueOf(response.getStatus()); } } static class CookieParameter extends BaseParameter { private String cookieName; CookieParameter(String cookieName, boolean isRequest) { super(cookieName, isRequest); this.cookieName = cookieName; } protected String getValue(RequestLoggerRequest request) { return getValue(request.getCookie(this.cookieName)); } protected String getValue(RequestLoggerResponse response) { return getValue(response.getCookie(this.cookieName)); } private String getValue(final Cookie cookie) { return (cookie == null) ? null : escape(cookie.getValue()); } } static class HeaderParameter extends BaseParameter { private String headerName; HeaderParameter(String headerName, boolean isRequest) { super(headerName, isRequest); this.headerName = headerName; } protected String getValue(RequestLoggerRequest request) { Enumeration<?> values = request.getHeaders(this.headerName); if (values == null || !values.hasMoreElements()) { return null; } String value = (String) values.nextElement(); while (values.hasMoreElements()) { value += "," + values.nextElement(); } return escape(value); } protected String getValue(RequestLoggerResponse response) { return escape(response.getHeadersString(this.headerName)); } } }
/* Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ValidatingXSRTests.detailed; import com.foo.sample.HeadingDocument; import org.apache.xmlbeans.SchemaType; import org.apache.xmlbeans.XmlBeans; import org.apache.xmlbeans.XmlError; import org.apache.xmlbeans.impl.validator.ValidatingXMLStreamReader; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.openuri.test.location.LocationDocument; import org.openuri.test.mixedContent.LetterDocument; import org.openuri.test.mixedContent.NoMixedDocument; import org.openuri.test.person.Name; import org.openuri.test.person.PersonDocument; import org.openuri.test.person.PersonType; import tools.util.JarUtil; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.events.XMLEvent; import javax.xml.stream.util.StreamReaderDelegate; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; import java.util.Collection; import static org.junit.jupiter.api.Assertions.*; // Schema Imports public class ValidatingXmlStreamReaderTests { // Base variable static String casesLoc = "xbean/ValidatingStream/"; ////////////////////////////////////////////////////////////////////// // Tests // NOTE: Tests that use getCasesFile are reading files // from cases/qatest/files/xbean // Tests that use getResourceFromJar are getting the contents of // the file in the same location, but packaged into xmlcases.jar // SO, any change to the xml files for these tests will not be // reflected till they make it into xmlcases.jar. (ant build.xmlcases) @Test void testDocWithNoSchema() throws Exception { checkDocIsInvalid(getCasesFile(casesLoc + "po.xml"), null); } @Test void testValidLocationDoc() throws Exception { checkDocIsValid(getCasesFile(casesLoc + "location.xml"), null); } @Test void testInvalidLocationDoc() throws Exception { checkDocIsInvalid(getCasesFile(casesLoc + "location-inv.xml"), LocationDocument.type); } @Test void testValidPersonDoc() throws Exception { checkDocIsValid(getCasesFile(casesLoc + "person.xml"), PersonDocument.type); } @Test void testInvalidPersonDoc() throws Exception { checkDocIsInvalid(getCasesFile(casesLoc + "person-inv.xml"), PersonDocument.type); } @Test void testValidMixedContentDoc() throws Exception { checkDocIsValid(getCasesFile(casesLoc + "mixed-content.xml"), LetterDocument.type); } @Test void testInvalidNomixedContentDoc() throws Exception { checkDocIsInvalid(getCasesFile(casesLoc + "nomixed-content-inv.xml"), NoMixedDocument.type); } @Test void testInvalidMissingAttributeDoc() throws Exception { checkDocIsInvalid(getCasesFile(casesLoc + "foo-inv.xml"), HeadingDocument.type); } @Test void testContentName() throws Exception { String sXml = JarUtil.getResourceFromJar(casesLoc + "person-frag.xml"); SchemaType type = Name.type; assertTrue(checkContent(sXml, type, true), "Xml-fragment is not valid:\n" + sXml); } // Same as testContentName.. expect the xml has no chars before the first // start element @Test void testContentName2() throws Exception { String sXml = JarUtil.getResourceFromJar(casesLoc + "person-frag2.xml"); SchemaType type = Name.type; assertTrue(checkContent(sXml, type, true), "Xml-fragment is not valid:\n" + sXml); } @Test void testContentSibling() throws Exception { String sXml = JarUtil.getResourceFromJar(casesLoc + "person-sibling.xml"); SchemaType type = PersonType.type; assertTrue(checkContent(sXml, type, true), "Xml-fragment is not valid:\n" + sXml); } @Test void testInvalidContentSibling() throws Exception { String sXml = JarUtil.getResourceFromJar(casesLoc + "person-sibling-inv.xml"); SchemaType type = PersonType.type; assertFalse(checkContent(sXml, type, true), "Invalid Xml-fragment is getting validated:\n" + sXml); } @Test void testValidXsiType() throws Exception { String sXml = JarUtil.getResourceFromJar(casesLoc + "person-justname.xml"); SchemaType type = Name.type; assertTrue(checkContent(sXml, type, true), "Xml-fragment is not valid:\n" + sXml); } @Test void testInvalidXsiType() throws Exception { String sXml = JarUtil.getResourceFromJar(casesLoc + "person-justname-inv.xml"); SchemaType type = Name.type; assertFalse(checkContent(sXml, type, true), "Invalid Xml-fragment is getting validated:\n" + sXml); } @Test void testIncompatibleXsiType() throws Exception { String sXml = JarUtil.getResourceFromJar(casesLoc + "person-xsi-inv.xml"); SchemaType type = Name.type; assertFalse(checkContent(sXml, type, true), "Invalid Xml-fragment is getting validated:\n" + sXml); } @Test void testValidMixedContent() throws Exception { String sXml = JarUtil.getResourceFromJar(casesLoc + "mixed-content.xml"); SchemaType type = org.openuri.test.mixedContent.LetterType.type; assertTrue(checkContent(sXml, type, true), "Xml-fragment is not valid:\n" + sXml); } @Test @Disabled public void testGlobalAttribute() throws Exception { String sXml = JarUtil.getResourceFromJar(casesLoc + "global-attr.xml"); assertTrue(checkContent(sXml, null, true), "Global Attribute test failed:\n"); } // Tests for increasing code-coverage metrics @Test void testValXsrReuse() throws Exception { Collection<XmlError> errors = new ArrayList<>(); File[] xmls = new File[2]; xmls[0] = getCasesFile(casesLoc + "person.xml"); xmls[1] = getCasesFile(casesLoc + "person-inv.xml"); SchemaType type = PersonDocument.type; boolean[] ret = runValidator(xmls, type, errors); String common = "Test for ValidatingXmlStreamReader reuse failed"; assertEquals(2, ret.length, common + "\nReturn value has more than 2 elements"); assertTrue(ret[0] && !ret[1], common + "\nExpected: true & false. Actual: " + ret[0] + " & " + ret[1]); } // public void testIllegalEvent() throws Exception { // Will require writing another XSR wrapper.. albeit simple // } private boolean runValidator(File xml, SchemaType type, Collection<XmlError> errors) throws IllegalArgumentException, Exception { if (errors == null) { throw new IllegalArgumentException( "Collection object cannot be null"); } XMLStreamReader xsr = XMLInputFactory.newInstance(). createXMLStreamReader(new FileInputStream(xml)); ValidatingXMLStreamReader valXsr = new ValidatingXMLStreamReader(); valXsr.init(xsr, false, type, XmlBeans.typeLoaderForClassLoader(ValidatingXMLStreamReader.class.getClassLoader()), null, errors); // Walk through the xml while (valXsr.hasNext()) { valXsr.next(); } return valXsr.isValid(); //return true; } // This method is primarily for testing re-use of the ValXSR object. // but could come in handy later.. private boolean[] runValidator(File[] xml, SchemaType type, Collection<XmlError> errors) throws IllegalArgumentException, Exception { if (errors == null) { throw new IllegalArgumentException( "Collection object cannot be null"); } ValidatingXMLStreamReader valXsr = new ValidatingXMLStreamReader(); boolean[] retArray = new boolean[xml.length]; for (int i = 0; i < xml.length; i++) { XMLStreamReader xsr = XMLInputFactory.newInstance(). createXMLStreamReader(new FileInputStream(xml[i])); valXsr.init(xsr, false, type, XmlBeans.typeLoaderForClassLoader(ValidatingXMLStreamReader.class.getClassLoader()), null, errors); // Walk through the xml while (valXsr.hasNext()) { valXsr.next(); } retArray[i] = valXsr.isValid(); } return retArray; } protected void checkDocIsValid(File file, SchemaType type) throws Exception { Collection<XmlError> errors = new ArrayList<>(); boolean isValid = runValidator(file, type, errors); assertTrue(isValid, "File '" + file.getName() + "' is invalid."); } protected void checkDocIsInvalid(File file, SchemaType type) throws Exception { Collection<XmlError> errors = new ArrayList<>(); boolean isValid = runValidator(file, type, errors); assertFalse(isValid, "File '" + file.getName() + "' is valid, but was expecting invalid."); } public boolean checkContent(String fragment, SchemaType type, boolean printErrors) throws Exception { XMLStreamReader xsr = XMLInputFactory.newInstance(). createXMLStreamReader(new StringReader(fragment)); XmlContentTestXSR cxsr = new XmlContentTestXSR(xsr); Collection<XmlError> errors = new ArrayList<>(); ValidatingXMLStreamReader valXsr = new ValidatingXMLStreamReader(); valXsr.init(cxsr, false, type, XmlBeans.typeLoaderForClassLoader(ValidatingXMLStreamReader.class.getClassLoader()), null, errors); // Walk through the xml while (valXsr.hasNext()) { valXsr.next(); } return valXsr.isValid(); } private static File getCasesFile(String path) throws java.io.IOException { if (path.length() == 0) { throw new IOException("getCasesFile was called with path of len 0"); } return JarUtil.getResourceFromJarasFile(path); //return new File(casesRoot + path); } ///////////////////////////////////////////////////////////////////////// // XmlStreamReader extension for content Validation // will not work for Global Attribute private static class XmlContentTestXSR extends StreamReaderDelegate implements XMLStreamReader { private static final int TAGOPEN = 100; private static final int TAGCLOSE = 101; private static final int UNDEFINED = 99; private static final int ATTRIBUTE = 102; private static final int ENDCONTENT = 103; int state = -1; int depth = -1; boolean initialized = false; int attributeCount = -1; boolean hasAttributes = false; // Constructor Wrappers public XmlContentTestXSR(XMLStreamReader xsr) throws XMLStreamException { super(xsr); } public boolean hasNext() { if (state == UNDEFINED || state == ENDCONTENT) { return false; } if (!initialized) // next() has not been called yet { return true; } return true; } public int next() throws XMLStreamException { int _next; if (!initialized) { // First time next() is called.. // Scan for the first XMLEvent.START_ELEMENT _next = UNDEFINED; while ((super.hasNext()) && (_next != XMLEvent.START_ELEMENT)) { _next = super.next(); } if (_next != XMLEvent.START_ELEMENT) { throw new XMLStreamException( "Could not find a start element"); } initialized = true; // Now move past the first tag state = TAGOPEN; depth = 1; if ((attributeCount = super.getAttributeCount()) > 0) { // The first element has attributes.. this is part of // the content. So the first event should XMLEvent.ATTRIBUTE _next = XMLEvent.ATTRIBUTE; } else { // return super.next(); /* If content is <xml-fragment/> then we will have returned END_ELEMENT above, without ever generating a START_ELEMENT In this case probably we should detect this and return a END_DOCUMENT */ _next = super.next(); if (_next == XMLEvent.END_ELEMENT) { _next = XMLEvent.END_DOCUMENT; state = ENDCONTENT; } } return _next; } _next = super.next(); switch (_next) { case XMLEvent.START_ELEMENT: state = TAGOPEN; depth++; break; case XMLEvent.END_ELEMENT: --depth; if (depth < 0 && state == TAGOPEN) { throw new XMLStreamException( "Illegal XML Stream state"); } else if (depth == 0 && state == TAGOPEN) { state = ENDCONTENT; // at this point we will return ENDDOCUMENT _next = XMLEvent.END_DOCUMENT; } break; } return _next; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.service.modules.flow; import com.google.common.base.Splitter; import java.net.URI; import java.net.URISyntaxException; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; import com.google.common.base.Optional; import com.google.common.collect.Maps; import com.typesafe.config.Config; import com.typesafe.config.ConfigValueFactory; import org.apache.commons.lang3.StringUtils; import org.apache.gobblin.configuration.ConfigurationKeys; import org.apache.gobblin.runtime.spec_executorInstance.InMemorySpecExecutor; import org.apache.gobblin.service.modules.policy.ServicePolicy; import org.apache.gobblin.util.ClassAliasResolver; import org.apache.gobblin.util.ConfigUtils; import org.jgrapht.graph.DirectedWeightedMultigraph; import org.slf4j.Logger; import org.apache.gobblin.runtime.api.FlowEdge; import org.apache.gobblin.runtime.api.ServiceNode; import org.apache.gobblin.runtime.api.FlowSpec; import org.apache.gobblin.instrumented.Instrumented; import org.apache.gobblin.runtime.api.Spec; import org.apache.gobblin.runtime.api.TopologySpec; import org.apache.gobblin.service.ServiceConfigKeys; import org.apache.gobblin.runtime.spec_executorInstance.BaseServiceNodeImpl; import org.apache.gobblin.runtime.api.JobSpec; import org.apache.gobblin.runtime.api.JobTemplate; import org.apache.gobblin.runtime.api.SpecExecutor; import org.apache.gobblin.runtime.api.SpecNotFoundException; import org.apache.gobblin.runtime.job_spec.ResolvedJobSpec; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import static org.apache.gobblin.service.ServiceConfigKeys.*; import static org.apache.gobblin.service.modules.utils.FindPathUtils.*; // Users are capable to inject hints/prioritization into route selection, in two forms: // 1. PolicyBasedBlockedConnection: Define some undesired routes // 2. Specified a complete path. FlowCompiler is responsible to verify if the path given is valid. // TODO: Flow monitoring, injecting weight for flowEdge:ETL-6213 @Slf4j public class MultiHopsFlowToJobSpecCompiler extends BaseFlowToJobSpecCompiler { private static final Splitter SPLIT_BY_COMMA = Splitter.on(",").omitEmptyStrings().trimResults(); @Getter private DirectedWeightedMultigraph<ServiceNode, FlowEdge> weightedGraph = new DirectedWeightedMultigraph<>(LoadBasedFlowEdgeImpl.class); public ServicePolicy servicePolicy; // Contains user-specified complete path of how the data movement is executed from source to sink. private Optional<String> optionalUserSpecifiedPath; private FlowEdgeProps defaultFlowEdgeProps = new FlowEdgeProps(); public MultiHopsFlowToJobSpecCompiler(Config config) { this(config, Optional.absent(), true); } public MultiHopsFlowToJobSpecCompiler(Config config, Optional<Logger> log) { this(config, log, true); } public MultiHopsFlowToJobSpecCompiler(Config config, Optional<Logger> log, boolean instrumentationEnabled) { super(config, log, instrumentationEnabled); String policyClassName = config.hasPath(SERVICE_POLICY_NAME) ? config.getString(SERVICE_POLICY_NAME) : ServiceConfigKeys.DEFAULT_SERVICE_POLICY; ClassAliasResolver<ServicePolicy> classResolver = new ClassAliasResolver<>(ServicePolicy.class); try { servicePolicy = classResolver.resolveClass(policyClassName).newInstance(); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) { throw new RuntimeException("Error happen when resolving class for :" + policyClassName, e); } if (config.hasPath(ServiceConfigKeys.POLICY_BASED_BLOCKED_CONNECTION) && config.getStringList(ServiceConfigKeys.POLICY_BASED_BLOCKED_CONNECTION).size() > 0) { try { for (String sourceSinkPair : config.getStringList(ServiceConfigKeys.POLICY_BASED_BLOCKED_CONNECTION)) { BaseServiceNodeImpl source = new BaseServiceNodeImpl(sourceSinkPair.split(":")[0]); BaseServiceNodeImpl sink = new BaseServiceNodeImpl(sourceSinkPair.split(":")[1]); URI specExecutorURI = new URI(sourceSinkPair.split(":")[2]); servicePolicy.addFlowEdge( new LoadBasedFlowEdgeImpl(source, sink, InMemorySpecExecutor.createDummySpecExecutor(specExecutorURI))); } } catch (URISyntaxException e) { this.log.warn("Constructing of FlowEdge in ServicePolicy Failed"); } } if (config.hasPath(ServiceConfigKeys.POLICY_BASED_BLOCKED_NODES) && StringUtils.isNotBlank(config.getString(ServiceConfigKeys.POLICY_BASED_BLOCKED_NODES))) { for (String blacklistedNode : SPLIT_BY_COMMA.splitToList( config.getString(ServiceConfigKeys.POLICY_BASED_BLOCKED_NODES))) { servicePolicy.addServiceNode(new BaseServiceNodeImpl(blacklistedNode)); } } if (config.hasPath(ServiceConfigKeys.POLICY_BASED_DATA_MOVEMENT_PATH) && StringUtils.isNotBlank( config.getString(ServiceConfigKeys.POLICY_BASED_DATA_MOVEMENT_PATH))) { optionalUserSpecifiedPath = Optional.of(config.getString(ServiceConfigKeys.POLICY_BASED_DATA_MOVEMENT_PATH)); } else { optionalUserSpecifiedPath = Optional.absent(); } } @Override public Map<Spec, SpecExecutor> compileFlow(Spec spec) { // A Map from JobSpec to SpexExecutor, as the output of Flow Compiler. Map<Spec, SpecExecutor> specExecutorInstanceMap = Maps.newLinkedHashMap(); findPath(specExecutorInstanceMap, spec); return specExecutorInstanceMap; } /** * @return Transform a set of {@link TopologySpec} into a instance of {@link org.jgrapht.graph.WeightedMultigraph} * and filter out connections between blacklisted vertices that user specified. * The output of this function only stays in memory, so each time a logical flow is compiled, the multigraph will * be re-calculated. * */ private void inMemoryWeightGraphGenerator() { for (TopologySpec topologySpec : topologySpecMap.values()) { weightGraphGenerateHelper(topologySpec); } // Filter out connection appearing in servicePolicy. // This is where servicePolicy is enforced. servicePolicy.populateBlackListedEdges(this.weightedGraph); if (servicePolicy.getBlacklistedEdges().size() > 0) { for (FlowEdge toDeletedEdge : servicePolicy.getBlacklistedEdges()) { weightedGraph.removeEdge(toDeletedEdge); } } } // Basically a dijkstra path finding for connecting source and sink by multiple hops in between. // If there's any user-specified prioritization, conduct the DFS and see if the user-specified path is available. // there's no updates on TopologySpec, or user should be aware of the possibility // that a topologySpec not being reflected in findPath. private void findPath(Map<Spec, SpecExecutor> specExecutorInstanceMap, Spec spec) { inMemoryWeightGraphGenerator(); FlowSpec flowSpec = (FlowSpec) spec; if (optionalUserSpecifiedPath.isPresent()) { log.info("Starting to evaluate user's specified path ... "); if (userSpecifiedPathVerificator(specExecutorInstanceMap, flowSpec)) { log.info("User specified path[ " + optionalUserSpecifiedPath.get() + "] successfully verified."); return; } else { log.error("Will not execute user specified path[ " + optionalUserSpecifiedPath.get() + "]"); log.info("Start to execute FlowCompiler's algorithm for valid data movement path"); } } ServiceNode sourceNode = new BaseServiceNodeImpl(flowSpec.getConfig().getString(ServiceConfigKeys.FLOW_SOURCE_IDENTIFIER_KEY)); ServiceNode targetNode = new BaseServiceNodeImpl(flowSpec.getConfig().getString(ServiceConfigKeys.FLOW_DESTINATION_IDENTIFIER_KEY)); List<FlowEdge> resultEdgePath = dijkstraBasedPathFindingHelper(sourceNode, targetNode, this.weightedGraph); for (int i = 0; i < resultEdgePath.size() ; i++) { FlowEdge tmpFlowEdge = resultEdgePath.get(i); ServiceNode edgeSrcNode = ((LoadBasedFlowEdgeImpl) tmpFlowEdge).getSourceNode(); ServiceNode edgeTgtNode = ((LoadBasedFlowEdgeImpl) tmpFlowEdge).getTargetNode(); specExecutorInstanceMap.put(convertHopToJobSpec(edgeSrcNode, edgeTgtNode, flowSpec), ((LoadBasedFlowEdgeImpl) (resultEdgePath.get(i))).getSpecExecutorInstance()); } } /** * As the base implementation here, all templates will be considered for each edge. */ @Override protected void populateEdgeTemplateMap() { if (templateCatalog.isPresent()) { for (FlowEdge flowEdge : this.weightedGraph.edgeSet()) { edgeTemplateMap.put(flowEdge.getEdgeIdentity(), templateCatalog.get(). getAllTemplates(). stream().map(jobTemplate -> jobTemplate.getUri()).collect(Collectors.toList())); } } } // If path specified not existed, return false; // else return true. private boolean userSpecifiedPathVerificator(Map<Spec, SpecExecutor> specExecutorInstanceMap, FlowSpec flowSpec) { Map<Spec, SpecExecutor> tmpSpecExecutorInstanceMap = new HashMap<>(); List<String> userSpecfiedPath = Arrays.asList(optionalUserSpecifiedPath.get().split(",")); for (int i = 0; i < userSpecfiedPath.size() - 1; i++) { ServiceNode sourceNode = new BaseServiceNodeImpl(userSpecfiedPath.get(i)); ServiceNode targetNode = new BaseServiceNodeImpl(userSpecfiedPath.get(i + 1)); if (weightedGraph.containsVertex(sourceNode) && weightedGraph.containsVertex(targetNode) && weightedGraph.containsEdge(sourceNode, targetNode)) { tmpSpecExecutorInstanceMap.put(convertHopToJobSpec(sourceNode, targetNode, flowSpec), (((LoadBasedFlowEdgeImpl) weightedGraph.getEdge(sourceNode, targetNode)).getSpecExecutorInstance())); } else { log.error("User Specified Path is invalid"); return false; } } specExecutorInstanceMap.putAll(tmpSpecExecutorInstanceMap); return true; } // Helper function for transform TopologySpecMap into a weightedDirectedGraph. private void weightGraphGenerateHelper(TopologySpec topologySpec) { try { Map<ServiceNode, ServiceNode> capabilities = topologySpec.getSpecExecutor().getCapabilities().get(); for (Map.Entry<ServiceNode, ServiceNode> capability : capabilities.entrySet()) { BaseServiceNodeImpl sourceNode = new BaseServiceNodeImpl(capability.getKey().getNodeName()); BaseServiceNodeImpl targetNode = new BaseServiceNodeImpl(capability.getValue().getNodeName()); if (!weightedGraph.containsVertex(sourceNode)) { weightedGraph.addVertex(sourceNode); } if (!weightedGraph.containsVertex(targetNode)) { weightedGraph.addVertex(targetNode); } FlowEdge flowEdge = new LoadBasedFlowEdgeImpl(sourceNode, targetNode, defaultFlowEdgeProps, topologySpec.getSpecExecutor()); // In Multi-Graph if flowEdge existed, just skip it. if (!weightedGraph.containsEdge(flowEdge)) { weightedGraph.addEdge(sourceNode, targetNode, flowEdge); } } } catch (InterruptedException | ExecutionException e) { Instrumented.markMeter(this.flowCompilationFailedMeter); throw new RuntimeException("Cannot determine topology capabilities", e); } } /** * Generate JobSpec based on the #templateURI that user specified. */ private JobSpec buildJobSpec (ServiceNode sourceNode, ServiceNode targetNode, URI templateURI, FlowSpec flowSpec) { JobSpec jobSpec; JobSpec.Builder jobSpecBuilder = JobSpec.builder(jobSpecURIGenerator(flowSpec, sourceNode, targetNode)) .withConfig(flowSpec.getConfig()) .withDescription(flowSpec.getDescription()) .withVersion(flowSpec.getVersion()); if (templateURI != null) { jobSpecBuilder.withTemplate(templateURI); try { jobSpec = new ResolvedJobSpec(jobSpecBuilder.build(), templateCatalog.get()); log.info("Resolved JobSpec properties are: " + jobSpec.getConfigAsProperties()); } catch (SpecNotFoundException | JobTemplate.TemplateException e) { throw new RuntimeException("Could not resolve template in JobSpec from TemplateCatalog", e); } } else { jobSpec = jobSpecBuilder.build(); log.info("Unresolved JobSpec properties are: " + jobSpec.getConfigAsProperties()); } // Remove schedule jobSpec.setConfig(jobSpec.getConfig().withoutPath(ConfigurationKeys.JOB_SCHEDULE_KEY)); // Add job.name and job.group if (flowSpec.getConfig().hasPath(ConfigurationKeys.FLOW_NAME_KEY)) { jobSpec.setConfig(jobSpec.getConfig() .withValue(ConfigurationKeys.JOB_NAME_KEY, ConfigValueFactory.fromAnyRef( flowSpec.getConfig().getValue(ConfigurationKeys.FLOW_NAME_KEY).unwrapped().toString() + "-" + sourceNode.getNodeName() + "-" + targetNode.getNodeName()))); } if (flowSpec.getConfig().hasPath(ConfigurationKeys.FLOW_GROUP_KEY)) { jobSpec.setConfig(jobSpec.getConfig() .withValue(ConfigurationKeys.JOB_GROUP_KEY, flowSpec.getConfig().getValue(ConfigurationKeys.FLOW_GROUP_KEY))); } // Add flow execution id for this compilation long flowExecutionId = System.currentTimeMillis(); jobSpec.setConfig(jobSpec.getConfig().withValue(ConfigurationKeys.FLOW_EXECUTION_ID_KEY, ConfigValueFactory.fromAnyRef(flowExecutionId))); // Reset properties in Spec from Config jobSpec.setConfigAsProperties(ConfigUtils.configToProperties(jobSpec.getConfig())); return jobSpec; } /** * A naive implementation of resolving templates in each JobSpec among Multi-hop FlowSpec. * Handle the case when edge is not specified. * Always select the first available template. */ private JobSpec convertHopToJobSpec (ServiceNode sourceNode, ServiceNode targetNode, FlowSpec flowSpec) { FlowEdge flowEdge = weightedGraph.getAllEdges(sourceNode, targetNode).iterator().next(); URI templateURI = getTemplateURI (sourceNode, targetNode, flowSpec, flowEdge); return buildJobSpec(sourceNode, targetNode, templateURI, flowSpec); } private URI getTemplateURI (ServiceNode sourceNode, ServiceNode targetNode, FlowSpec flowSpec, FlowEdge flowEdge) { URI firstTemplateURI = (edgeTemplateMap != null && edgeTemplateMap.containsKey(flowEdge.getEdgeIdentity())) ? edgeTemplateMap.get( flowEdge.getEdgeIdentity()).get(0) : jobSpecGenerator(flowSpec).getTemplateURI().orNull(); return firstTemplateURI; } /** * A naive implementation of generating a jobSpec's URI within a multi-hop logical Flow. */ public static URI jobSpecURIGenerator(FlowSpec flowSpec, ServiceNode sourceNode, ServiceNode targetNode) { try { return new URI(JobSpec.Builder.DEFAULT_JOB_CATALOG_SCHEME, flowSpec.getUri().getAuthority(), StringUtils.appendIfMissing(StringUtils.prependIfMissing(flowSpec.getUri().getPath(), "/"),"/") + sourceNode.getNodeName() + "-" + targetNode.getNodeName(), null); } catch (URISyntaxException e) { log.error( "URI construction failed when jobSpec from " + sourceNode.getNodeName() + " to " + targetNode.getNodeName()); throw new RuntimeException(); } } }
package water.rapids; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import water.*; import water.fvec.Frame; import water.fvec.NFSFileVec; import water.fvec.Vec; import water.parser.ParseDataset; import water.parser.ParseSetup; import water.util.ArrayUtils; import java.io.File; import java.util.Arrays; public class RapidsTest extends TestUtil { @BeforeClass public static void setup() { stall_till_cloudsize(1); } @Test public void bigSlice() { // check that large slices do something sane String tree = "(rows a.hex [0:2147483647])"; checkTree(tree); } @Test public void test1() { // Checking `hex + 5` String tree = "(+ a.hex #5)"; checkTree(tree); } @Test public void test2() { // Checking `hex + 5 + 10` String tree = "(+ a.hex (+ #5 #10))"; checkTree(tree); } @Test public void test3() { // Checking `hex + 5 - 1 * hex + 15 * (23 / hex)` String tree = "(+ (- (+ a.hex #5) (* #1 a.hex)) (* #15 (/ #23 a.hex)))"; checkTree(tree); } @Test public void test4() { //Checking `hex == 5`, <=, >=, <, >, != String tree = "(== a.hex #5)"; checkTree(tree); tree = "(<= a.hex #5)"; checkTree(tree); tree = "(>= a.hex #1.25132)"; checkTree(tree); tree = "(< a.hex #112.341e-5)"; checkTree(tree); tree = "(> a.hex #0.0123)"; checkTree(tree); tree = "(!= a.hex #0)"; checkTree(tree); } @Test public void test4_throws() { String tree = "(== a.hex (cols a.hex [1 2]))"; checkTree(tree,true); } @Test public void test5() { // Checking `hex && hex`, ||, &, | String tree = "(&& a.hex a.hex)"; checkTree(tree); tree = "(|| a.hex a.hex)"; checkTree(tree); tree = "(& a.hex a.hex)"; checkTree(tree); tree = "(| a.hex a.hex)"; checkTree(tree); } @Test public void test6() { // Checking `hex[,1]` String tree = "(cols a.hex [0])"; checkTree(tree); // Checking `hex[1,5]` tree = "(rows (cols a.hex [0]) [5])"; checkTree(tree); // Checking `hex[c(1:5,7,9),6]` tree = "(cols (rows a.hex [0:4 6 7]) [0])"; checkTree(tree); // Checking `hex[c(8,1,1,7),1]` tree = "(rows a.hex [8 1 1 7])"; checkTree(tree); } @Test public void testRowAssign() { String tree; // Assign column 3 over column 0 tree = "(:= a.hex (cols a.hex [3]) 0 [0:150])"; checkTree(tree); // Assign 17 over column 0 tree = "(:= a.hex 17 [0] [0:150])"; checkTree(tree); // Assign 17 over column 0, row 5 tree = "(:= a.hex 17 [0] [5])"; checkTree(tree); // Append 17 tree = "(append a.hex 17 \"nnn\")"; checkTree(tree); } @Test public void testFun() { // Compute 3*3; single variable defined in function body String tree = "({var1 . (* var1 var1)} 3)"; checkTree(tree); // Unknown var2 tree = "({var1 . (* var1 var2)} 3)"; checkTree(tree,true); // Compute 3* a.hex[0,0] tree = "({var1 . (* var1 (rows a.hex [0]))} 3)"; checkTree(tree); // Some more horrible functions. Drop the passed function and return a 3 tree = "({fun . 3} {y . (* y y)})"; checkTree(tree); // Apply a 3 to the passed function tree = "({fun . (fun 3)} {y . (* y y)})"; checkTree(tree); // Pass the squaring function thru the ID function tree = "({fun . fun} {y . (* y y)})"; checkTree(tree); // Pass the squaring function thru the twice-apply-3 function tree = "({fun . (fun (fun 3))} {y . (* y y)})"; checkTree(tree); // Pass the squaring function thru the twice-apply-x function tree = "({fun x . (fun (fun x))} {y . (* y y)} 3)"; checkTree(tree); // Pass the squaring function thru the twice-apply function tree = " ({fun . {x . (fun (fun x))}} {y . (* y y)}) "; checkTree(tree); // Pass the squaring function thru the twice-apply function, and apply it tree = "(({fun . {x . (fun (fun x))}} {y . (* y y)}) 3)"; checkTree(tree); } @Test public void testCBind() { String tree = "(cbind 1 2)"; checkTree(tree); tree = "(cbind 1 a.hex 2)"; checkTree(tree); tree = "(cbind a.hex (cols a.hex 0) 2)"; checkTree(tree); } @Test public void testRBind() { String tree = "(rbind 1 2)"; checkTree(tree); //tree = "(rbind a.hex 1 2)"; //checkTree(tree); } @Test public void testApply() { // Sum, reduction. 1 row result String tree = "(apply a.hex 2 {x . (sum x)})"; checkTree(tree); // Return ID column results. Shared data result. tree = "(apply a.hex 2 {x . x})"; checkTree(tree); // Return column results, new data result. tree = "(apply a.hex 2 abs)"; checkTree(tree); // Return two results tree = "(apply a.hex 2 {x . (rbind (sumNA x) (sum x))})"; checkTree(tree); } @Test public void testRowApply() { String tree = "(apply a.hex 1 sum)"; checkTree(tree); tree = "(apply a.hex 1 max)"; checkTree(tree); tree = "(apply a.hex 1 {x . (sum x)})"; checkTree(tree); tree = "(apply a.hex 1 {x . (sum (* x x))})"; checkTree(tree); } @Test public void testMath() { for( String s : new String[] {"abs", "cos", "sin", "acos", "ceiling", "floor", "cosh", "exp", "log", "sqrt", "tan", "tanh"} ) checkTree("("+s+" a.hex)"); } @Test public void testVariance() { // Checking variance: scalar String tree = "({x . (var x x \"everything\")} (rows a.hex [0]))"; checkTree(tree); tree = "({x . (var x x \"everything\")} a.hex)"; checkTree(tree); tree = "(table (trunc (cols a.hex 1)))"; checkTree(tree); tree = "(table (cols a.hex 1))"; checkTree(tree); tree = "(table (cols a.hex 1) (cols a.hex 2))"; checkTree(tree); } private void checkTree(String tree) { checkTree(tree,false); } private void checkTree(String tree, boolean expectThrow) { //Frame r = frame(new double[][]{{-1},{1},{2},{3},{4},{5},{6},{254}}); //Key ahex = Key.make("a.hex"); //Frame fr = new Frame(ahex, null, new Vec[]{r.remove(0)}); //r.delete(); //DKV.put(ahex, fr); Frame fr = parse_test_file(Key.make("a.hex"),"smalldata/iris/iris_wheader.csv"); fr.remove(4).remove(); try { Val val = Exec.exec(tree); Assert.assertFalse(expectThrow); System.out.println(val.toString()); if( val instanceof ValFrame ) { Frame fr2= ((ValFrame)val)._fr; System.out.println(fr2.vec(0)); fr2.remove(); } } catch( IllegalArgumentException iae ) { if( !expectThrow ) throw iae; } finally { fr.delete(); } } @Test public void testMerge() { Frame l=null,r=null,f=null; try { l = ArrayUtils.frame("name" ,vec(ar("Cliff","Arno","Tomas","Spencer"),ari(0,1,2,3))); l. add("age" ,vec(ar(">dirt" ,"middle","middle","young'n"),ari(0,1,2,3))); l = new Frame(l); DKV.put(l); System.out.println(l); r = ArrayUtils.frame("name" ,vec(ar("Arno","Tomas","Michael","Cliff"),ari(0,1,2,3))); r. add("skill",vec(ar("science","linearmath","sparkling","hacker"),ari(0,1,2,3))); r = new Frame(r); DKV.put(r); System.out.println(r); String x = String.format("(merge %s %s #1 #0 )",l._key,r._key); Val res = Exec.exec(x); f = res.getFrame(); System.out.println(f); Vec names = f.vec(0); Assert.assertEquals(names.factor(names.at8(0)),"Cliff"); Vec ages = f.vec(1); Assert.assertEquals(ages .factor(ages .at8(0)),">dirt"); Vec skilz = f.vec(2); Assert.assertEquals(skilz.factor(skilz.at8(0)),"hacker"); } finally { if( f != null ) f.delete(); if( r != null ) r.delete(); if( l != null ) l.delete(); } } @Test public void testQuantile() { Frame f = null; try { Frame fr = ArrayUtils.frame(ard(ard(1.223292e-02), ard(1.635312e-25), ard(1.601522e-11), ard(8.452298e-10), ard(2.643733e-10), ard(2.671520e-06), ard(1.165381e-06), ard(7.193265e-10), ard(3.383532e-04), ard(2.561221e-05))); double[] probs = new double[]{0.001, 0.005, .01, .02, .05, .10, .50, .8883, .90, .99}; String x = String.format("(quantile %s %s \"interpolate\")", fr._key, Arrays.toString(probs)); Val val = Exec.exec(x); fr.delete(); f = val.getFrame(); Assert.assertEquals(2,f.numCols()); // Expected values computed as golden values from R's quantile call double[] exp = ard(1.4413698000016206E-13, 7.206849000001562E-13, 1.4413698000001489E-12, 2.882739600000134E-12, 7.20684900000009E-12, 1.4413698000000017E-11, 5.831131148999999E-07, 3.3669567275300000E-04, 0.00152780988 , 0.011162408988 ); for( int i=0; i<exp.length; i++ ) Assert.assertTrue( "expected "+exp[i]+" got "+f.vec(1).at(i), water.util.MathUtils.compare(exp[i],f.vec(1).at(i),1e-6,1e-6) ); } finally { if( f != null ) f.delete(); } } static void exec_str( String str ) { Val val = Exec.exec(str); switch( val.type() ) { case Val.FRM: Frame fr = val.getFrame(); System.out.println(fr); checkSaneFrame(); fr.delete(); break; case Val.NUM: System.out.println("num= "+val.getNum()); checkSaneFrame(); break; case Val.STR: System.out.println("str= "+val.getStr()); checkSaneFrame(); break; default: throw water.H2O.fail(); } } static void checkSaneFrame() { assert checkSaneFrame_impl(); } static boolean checkSaneFrame_impl() { for( Key k : H2O.localKeySet() ) { Value val = Value.STORE_get(k); if( val != null && val.isFrame() ) { Frame fr = val.get(); Vec vecs[] = fr.vecs(); for( int i=0; i<vecs.length; i++ ) { Vec v = vecs[i]; if( DKV.get(v._key) == null ) { System.err.println("Frame "+fr._key+" in the DKV, is missing Vec "+v._key+", name="+fr._names[i]); return false; } } } } return true; } @Test public void testChicago() { String oldtz = Exec.exec("(getTimeZone)").getStr(); try { parse_test_file(Key.make("weather.hex"),"smalldata/chicago/chicagoAllWeather.csv"); parse_test_file(Key.make( "crimes.hex"),"smalldata/chicago/chicagoCrimes10k.csv.zip"); String fname = "smalldata/chicago/chicagoCensus.csv"; File f = find_test_file(fname); assert f != null && f.exists():" file not found: " + fname; NFSFileVec nfs = NFSFileVec.make(f); ParseSetup ps = ParseSetup.guessSetup(new Key[]{nfs._key}, false, 1); ps.getColumnTypes()[1] = Vec.T_CAT; ParseDataset.parse(Key.make( "census.hex"), new Key[]{nfs._key}, true, ps); exec_str("(tmp= census.hex (colnames= census.hex [0 1 2 3 4 5 6 7 8] [\"Community.Area.Number\" \"COMMUNITY.AREA.NAME\" \"PERCENT.OF.HOUSING.CROWDED\" \"PERCENT.HOUSEHOLDS.BELOW.POVERTY\" \"PERCENT.AGED.16..UNEMPLOYED\" \"PERCENT.AGED.25..WITHOUT.HIGH.SCHOOL.DIPLOMA\" \"PERCENT.AGED.UNDER.18.OR.OVER.64\" \"PER.CAPITA.INCOME.\" \"HARDSHIP.INDEX\"]))"); exec_str("(tmp= crimes.hex (colnames= crimes.hex [0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21] [\"ID\" \"Case.Number\" \"Date\" \"Block\" \"IUCR\" \"Primary.Type\" \"Description\" \"Location.Description\" \"Arrest\" \"Domestic\" \"Beat\" \"District\" \"Ward\" \"Community.Area\" \"FBI.Code\" \"X.Coordinate\" \"Y.Coordinate\" \"Year\" \"Updated.On\" \"Latitude\" \"Longitude\" \"Location\"]))"); exec_str("(setTimeZone \"Etc/UTC\")"); exec_str("(tmp= crimes.hex (append crimes.hex (tmp= unary_op_6 (day (tmp= nary_op_5 (as.Date (cols crimes.hex [2]) \"%m/%d/%Y %I:%M:%S %p\")))) \"Day\"))"); exec_str("(tmp= crimes.hex (append crimes.hex (tmp= binary_op_31 (+ (tmp= unary_op_7 (month nary_op_5)) #1)) \"Month\"))"); Keyed.remove(Key.make("nary_op_30")); exec_str("(tmp= crimes.hex (append crimes.hex (tmp= binary_op_32 (+ (tmp= binary_op_9 (- (tmp= unary_op_8 (year nary_op_5)) #1900)) #1900)) \"Year\"))"); exec_str("(tmp= crimes.hex (append crimes.hex (tmp= unary_op_10 (week nary_op_5)) \"WeekNum\"))"); Keyed.remove(Key.make("binary_op_32")); Keyed.remove(Key.make("binary_op_31")); Keyed.remove(Key.make("unary_op_8")); checkSaneFrame(); exec_str("(tmp= crimes.hex (append crimes.hex (tmp= unary_op_11 (dayOfWeek nary_op_5)) \"WeekDay\"))"); Keyed.remove(Key.make("nfs:\\C:\\Users\\cliffc\\Desktop\\h2o-3\\smalldata\\chicago\\chicagoCrimes10k.csv.zip")); exec_str("(tmp= crimes.hex (append crimes.hex (tmp= unary_op_12 (hour nary_op_5)) \"HourOfDay\"))"); exec_str("(tmp= crimes.hex (append crimes.hex (tmp= nary_op_16 (ifelse (tmp= binary_op_15 (| (tmp= binary_op_13 (== unary_op_11 \"Sun\")) (tmp= binary_op_14 (== unary_op_11 \"Sat\")))) 1 0)) \"Weekend\"))"); // Season is incorrectly assigned in the original chicago demo; picks up the Weekend flag exec_str("(tmp= crimes.hex (append crimes.hex nary_op_16 \"Season\"))"); // Standard "head of 10 rows" pattern for printing exec_str("(tmp= subset_33 (rows crimes.hex [0:10]))"); Keyed.remove(Key.make("subset_33")); Keyed.remove(Key.make("subset_33")); Keyed.remove(Key.make("unary_op_29")); Keyed.remove(Key.make("nary_op_28")); Keyed.remove(Key.make("nary_op_27")); Keyed.remove(Key.make("nary_op_26")); Keyed.remove(Key.make("binary_op_25")); Keyed.remove(Key.make("binary_op_24")); Keyed.remove(Key.make("binary_op_23")); Keyed.remove(Key.make("binary_op_22")); Keyed.remove(Key.make("binary_op_21")); Keyed.remove(Key.make("binary_op_20")); Keyed.remove(Key.make("binary_op_19")); Keyed.remove(Key.make("binary_op_18")); Keyed.remove(Key.make("binary_op_17")); Keyed.remove(Key.make("nary_op_16")); Keyed.remove(Key.make("binary_op_15")); Keyed.remove(Key.make("binary_op_14")); Keyed.remove(Key.make("binary_op_13")); Keyed.remove(Key.make("unary_op_12")); Keyed.remove(Key.make("unary_op_11")); Keyed.remove(Key.make("unary_op_10")); Keyed.remove(Key.make("binary_op_9")); Keyed.remove(Key.make("unary_op_8")); Keyed.remove(Key.make("unary_op_7")); Keyed.remove(Key.make("unary_op_6")); Keyed.remove(Key.make("nary_op_5")); checkSaneFrame(); // Standard "head of 10 rows" pattern for printing exec_str("(tmp= subset_34 (rows crimes.hex [0:10]))"); Keyed.remove(Key.make("subset_34")); exec_str("(tmp= census.hex (colnames= census.hex [0 1 2 3 4 5 6 7 8] [\"Community.Area\" \"COMMUNITY.AREA.NAME\" \"PERCENT.OF.HOUSING.CROWDED\" \"PERCENT.HOUSEHOLDS.BELOW.POVERTY\" \"PERCENT.AGED.16..UNEMPLOYED\" \"PERCENT.AGED.25..WITHOUT.HIGH.SCHOOL.DIPLOMA\" \"PERCENT.AGED.UNDER.18.OR.OVER.64\" \"PER.CAPITA.INCOME.\" \"HARDSHIP.INDEX\"]))"); Keyed.remove(Key.make("subset_34")); exec_str("(tmp= subset_35 (cols crimes.hex [-3]))"); exec_str("(tmp= subset_36 (cols weather.hex [-1]))"); exec_str("(tmp= subset_36 (colnames= subset_36 [0 1 2 3 4 5] [\"Month\" \"Day\" \"Year\" \"maxTemp\" \"meanTemp\" \"minTemp\"]))"); Keyed.remove(Key.make("crimes.hex")); Keyed.remove(Key.make("weather.hex")); // nary_op_37 = merge( X Y ); Vecs in X & nary_op_37 shared exec_str("(tmp= nary_op_37 (merge subset_35 census.hex TRUE FALSE))"); // nary_op_38 = merge( nary_op_37 subset_36); Vecs in nary_op_38 and nary_pop_37 and X shared exec_str("(tmp= subset_41 (rows (tmp= nary_op_38 (merge nary_op_37 subset_36 TRUE FALSE)) (tmp= binary_op_40 (<= (tmp= nary_op_39 (h2o.runif nary_op_38 30792152736.5179)) #0.8))))"); // Standard "head of 10 rows" pattern for printing exec_str("(tmp= subset_44 (rows subset_41 [0:10]))"); Keyed.remove(Key.make("subset_44")); Keyed.remove(Key.make("subset_44")); Keyed.remove(Key.make("binary_op_40")); Keyed.remove(Key.make("nary_op_37")); exec_str("(tmp= subset_43 (rows nary_op_38 (tmp= binary_op_42 (> nary_op_39 #0.8))))"); // Chicago demo continues on past, but this is all I've captured for now checkSaneFrame(); } finally { Exec.exec("(setTimeZone \""+oldtz+"\")"); // Restore time zone (which is global, and will affect following tests) for( String s : new String[]{"weather.hex","crimes.hex","census.hex", "nary_op_5", "unary_op_6", "unary_op_7", "unary_op_8", "binary_op_9", "unary_op_10", "unary_op_11", "unary_op_12", "binary_op_13", "binary_op_14", "binary_op_15", "nary_op_16", "binary_op_17", "binary_op_18", "binary_op_19", "binary_op_20", "binary_op_21", "binary_op_22", "binary_op_23", "binary_op_24", "binary_op_25", "nary_op_26", "nary_op_27", "nary_op_28", "unary_op_29", "binary_op_30", "binary_op_31", "binary_op_32", "subset_33", "subset_34", "subset_35", "subset_36", "nary_op_37", "nary_op_38", "nary_op_39", "binary_op_40", "subset_41", "binary_op_42", "subset_43", "subset_44", } ) Keyed.remove(Key.make(s)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.highlight; import org.apache.lucene.analysis.CachingTokenFilter; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.search.highlight.*; import org.apache.lucene.search.highlight.Formatter; import org.apache.lucene.search.vectorhighlight.*; import org.apache.lucene.util.AttributeSource.State; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.HighlightParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.core.PluginInfo; import org.apache.solr.core.SolrConfig; import org.apache.solr.core.SolrCore; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; import org.apache.solr.search.DocIterator; import org.apache.solr.search.DocList; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.util.plugin.PluginInfoInitialized; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.StringReader; import java.util.*; /** * * @since solr 1.3 */ public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInfoInitialized { public static Logger log = LoggerFactory.getLogger(DefaultSolrHighlighter.class); private SolrCore solrCore; public DefaultSolrHighlighter() { } public DefaultSolrHighlighter(SolrCore solrCore) { this.solrCore = solrCore; } @Override public void init(PluginInfo info) { formatters.clear(); encoders.clear(); fragmenters.clear(); fragListBuilders.clear(); fragmentsBuilders.clear(); boundaryScanners.clear(); // Load the fragmenters SolrFragmenter frag = solrCore.initPlugins(info.getChildren("fragmenter") , fragmenters,SolrFragmenter.class,null); if (frag == null) frag = new GapFragmenter(); fragmenters.put("", frag); fragmenters.put(null, frag); // Load the formatters SolrFormatter fmt = solrCore.initPlugins(info.getChildren("formatter"), formatters,SolrFormatter.class,null); if (fmt == null) fmt = new HtmlFormatter(); formatters.put("", fmt); formatters.put(null, fmt); // Load the formatters SolrEncoder enc = solrCore.initPlugins(info.getChildren("encoder"), encoders,SolrEncoder.class,null); if (enc == null) enc = new DefaultEncoder(); encoders.put("", enc); encoders.put(null, enc); // Load the FragListBuilders SolrFragListBuilder fragListBuilder = solrCore.initPlugins(info.getChildren("fragListBuilder"), fragListBuilders, SolrFragListBuilder.class, null ); if( fragListBuilder == null ) fragListBuilder = new SimpleFragListBuilder(); fragListBuilders.put( "", fragListBuilder ); fragListBuilders.put( null, fragListBuilder ); // Load the FragmentsBuilders SolrFragmentsBuilder fragsBuilder = solrCore.initPlugins(info.getChildren("fragmentsBuilder"), fragmentsBuilders, SolrFragmentsBuilder.class, null); if( fragsBuilder == null ) fragsBuilder = new ScoreOrderFragmentsBuilder(); fragmentsBuilders.put( "", fragsBuilder ); fragmentsBuilders.put( null, fragsBuilder ); // Load the BoundaryScanners SolrBoundaryScanner boundaryScanner = solrCore.initPlugins(info.getChildren("boundaryScanner"), boundaryScanners, SolrBoundaryScanner.class, null); if(boundaryScanner == null) boundaryScanner = new SimpleBoundaryScanner(); boundaryScanners.put("", boundaryScanner); boundaryScanners.put(null, boundaryScanner); initialized = true; } //just for back-compat with the deprecated method private boolean initialized = false; @Override @Deprecated public void initalize( SolrConfig config) { if (initialized) return; SolrFragmenter frag = new GapFragmenter(); fragmenters.put("", frag); fragmenters.put(null, frag); SolrFormatter fmt = new HtmlFormatter(); formatters.put("", fmt); formatters.put(null, fmt); SolrEncoder enc = new DefaultEncoder(); encoders.put("", enc); encoders.put(null, enc); SolrFragListBuilder fragListBuilder = new SimpleFragListBuilder(); fragListBuilders.put( "", fragListBuilder ); fragListBuilders.put( null, fragListBuilder ); SolrFragmentsBuilder fragsBuilder = new ScoreOrderFragmentsBuilder(); fragmentsBuilders.put( "", fragsBuilder ); fragmentsBuilders.put( null, fragsBuilder ); SolrBoundaryScanner boundaryScanner = new SimpleBoundaryScanner(); boundaryScanners.put("", boundaryScanner); boundaryScanners.put(null, boundaryScanner); } /** * Return a phrase {@link org.apache.lucene.search.highlight.Highlighter} appropriate for this field. * @param query The current Query * @param fieldName The name of the field * @param request The current SolrQueryRequest * @param tokenStream document text CachingTokenStream * @throws IOException If there is a low-level I/O error. */ protected Highlighter getPhraseHighlighter(Query query, String fieldName, SolrQueryRequest request, CachingTokenFilter tokenStream) throws IOException { SolrParams params = request.getParams(); Highlighter highlighter = null; highlighter = new Highlighter( getFormatter(fieldName, params), getEncoder(fieldName, params), getSpanQueryScorer(query, fieldName, tokenStream, request)); highlighter.setTextFragmenter(getFragmenter(fieldName, params)); return highlighter; } /** * Return a {@link org.apache.lucene.search.highlight.Highlighter} appropriate for this field. * @param query The current Query * @param fieldName The name of the field * @param request The current SolrQueryRequest */ protected Highlighter getHighlighter(Query query, String fieldName, SolrQueryRequest request) { SolrParams params = request.getParams(); Highlighter highlighter = new Highlighter( getFormatter(fieldName, params), getEncoder(fieldName, params), getQueryScorer(query, fieldName, request)); highlighter.setTextFragmenter(getFragmenter(fieldName, params)); return highlighter; } /** * Return a {@link org.apache.lucene.search.highlight.QueryScorer} suitable for this Query and field. * @param query The current query * @param tokenStream document text CachingTokenStream * @param fieldName The name of the field * @param request The SolrQueryRequest */ private QueryScorer getSpanQueryScorer(Query query, String fieldName, TokenStream tokenStream, SolrQueryRequest request) { boolean reqFieldMatch = request.getParams().getFieldBool(fieldName, HighlightParams.FIELD_MATCH, false); Boolean highlightMultiTerm = request.getParams().getBool(HighlightParams.HIGHLIGHT_MULTI_TERM, true); if(highlightMultiTerm == null) { highlightMultiTerm = false; } QueryScorer scorer; if (reqFieldMatch) { scorer = new QueryScorer(query, fieldName); } else { scorer = new QueryScorer(query, null); } scorer.setExpandMultiTermQuery(highlightMultiTerm); return scorer; } /** * Return a {@link org.apache.lucene.search.highlight.Scorer} suitable for this Query and field. * @param query The current query * @param fieldName The name of the field * @param request The SolrQueryRequest */ private Scorer getQueryScorer(Query query, String fieldName, SolrQueryRequest request) { boolean reqFieldMatch = request.getParams().getFieldBool(fieldName, HighlightParams.FIELD_MATCH, false); if (reqFieldMatch) { return new QueryTermScorer(query, request.getSearcher().getIndexReader(), fieldName); } else { return new QueryTermScorer(query); } } /** * Return the max number of snippets for this field. If this has not * been configured for this field, fall back to the configured default * or the solr default. * @param fieldName The name of the field * @param params The params controlling Highlighting */ protected int getMaxSnippets(String fieldName, SolrParams params) { return params.getFieldInt(fieldName, HighlightParams.SNIPPETS,1); } /** * Return whether adjacent fragments should be merged. * @param fieldName The name of the field * @param params The params controlling Highlighting */ protected boolean isMergeContiguousFragments(String fieldName, SolrParams params){ return params.getFieldBool(fieldName, HighlightParams.MERGE_CONTIGUOUS_FRAGMENTS, false); } /** * Return a {@link org.apache.lucene.search.highlight.Formatter} appropriate for this field. If a formatter * has not been configured for this field, fall back to the configured * default or the solr default ({@link org.apache.lucene.search.highlight.SimpleHTMLFormatter}). * * @param fieldName The name of the field * @param params The params controlling Highlighting * @return An appropriate {@link org.apache.lucene.search.highlight.Formatter}. */ protected Formatter getFormatter(String fieldName, SolrParams params ) { String str = params.getFieldParam( fieldName, HighlightParams.FORMATTER ); SolrFormatter formatter = formatters.get( str ); if( formatter == null ) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown formatter: "+str ); } return formatter.getFormatter( fieldName, params ); } /** * Return an {@link org.apache.lucene.search.highlight.Encoder} appropriate for this field. If an encoder * has not been configured for this field, fall back to the configured * default or the solr default ({@link org.apache.lucene.search.highlight.DefaultEncoder}). * * @param fieldName The name of the field * @param params The params controlling Highlighting * @return An appropriate {@link org.apache.lucene.search.highlight.Encoder}. */ protected Encoder getEncoder(String fieldName, SolrParams params){ String str = params.getFieldParam( fieldName, HighlightParams.ENCODER ); SolrEncoder encoder = encoders.get( str ); if( encoder == null ) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown encoder: "+str ); } return encoder.getEncoder( fieldName, params ); } /** * Return a {@link org.apache.lucene.search.highlight.Fragmenter} appropriate for this field. If a fragmenter * has not been configured for this field, fall back to the configured * default or the solr default ({@link GapFragmenter}). * * @param fieldName The name of the field * @param params The params controlling Highlighting * @return An appropriate {@link org.apache.lucene.search.highlight.Fragmenter}. */ protected Fragmenter getFragmenter(String fieldName, SolrParams params) { String fmt = params.getFieldParam( fieldName, HighlightParams.FRAGMENTER ); SolrFragmenter frag = fragmenters.get( fmt ); if( frag == null ) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown fragmenter: "+fmt ); } return frag.getFragmenter( fieldName, params ); } protected FragListBuilder getFragListBuilder( String fieldName, SolrParams params ){ String flb = params.getFieldParam( fieldName, HighlightParams.FRAG_LIST_BUILDER ); SolrFragListBuilder solrFlb = fragListBuilders.get( flb ); if( solrFlb == null ){ throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown fragListBuilder: " + flb ); } return solrFlb.getFragListBuilder( params ); } protected FragmentsBuilder getFragmentsBuilder( String fieldName, SolrParams params ){ BoundaryScanner bs = getBoundaryScanner(fieldName, params); return getSolrFragmentsBuilder( fieldName, params ).getFragmentsBuilder( params, bs ); } private SolrFragmentsBuilder getSolrFragmentsBuilder( String fieldName, SolrParams params ){ String fb = params.getFieldParam( fieldName, HighlightParams.FRAGMENTS_BUILDER ); SolrFragmentsBuilder solrFb = fragmentsBuilders.get( fb ); if( solrFb == null ){ throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown fragmentsBuilder: " + fb ); } return solrFb; } private BoundaryScanner getBoundaryScanner(String fieldName, SolrParams params){ String bs = params.getFieldParam(fieldName, HighlightParams.BOUNDARY_SCANNER); SolrBoundaryScanner solrBs = boundaryScanners.get(bs); if(solrBs == null){ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown boundaryScanner: " + bs); } return solrBs.getBoundaryScanner(fieldName, params); } /** * Generates a list of Highlighted query fragments for each item in a list * of documents, or returns null if highlighting is disabled. * * @param docs query results * @param query the query * @param req the current request * @param defaultFields default list of fields to summarize * * @return NamedList containing a NamedList for each document, which in * turns contains sets (field, summary) pairs. */ @Override @SuppressWarnings("unchecked") public NamedList<Object> doHighlighting(DocList docs, Query query, SolrQueryRequest req, String[] defaultFields) throws IOException { SolrParams params = req.getParams(); if (!isHighlightingEnabled(params)) return null; SolrIndexSearcher searcher = req.getSearcher(); IndexSchema schema = searcher.getSchema(); NamedList fragments = new SimpleOrderedMap(); String[] fieldNames = getHighlightFields(query, req, defaultFields); Set<String> fset = new HashSet<String>(); { // pre-fetch documents using the Searcher's doc cache for(String f : fieldNames) { fset.add(f); } // fetch unique key if one exists. SchemaField keyField = schema.getUniqueKeyField(); if(null != keyField) fset.add(keyField.getName()); } // get FastVectorHighlighter instance out of the processing loop FastVectorHighlighter fvh = new FastVectorHighlighter( // FVH cannot process hl.usePhraseHighlighter parameter per-field basis params.getBool( HighlightParams.USE_PHRASE_HIGHLIGHTER, true ), // FVH cannot process hl.requireFieldMatch parameter per-field basis params.getBool( HighlightParams.FIELD_MATCH, false ) ); fvh.setPhraseLimit(params.getInt(HighlightParams.PHRASE_LIMIT, Integer.MAX_VALUE)); FieldQuery fieldQuery = fvh.getFieldQuery( query, searcher.getIndexReader() ); // Highlight each document DocIterator iterator = docs.iterator(); for (int i = 0; i < docs.size(); i++) { int docId = iterator.nextDoc(); Document doc = searcher.doc(docId, fset); NamedList docSummaries = new SimpleOrderedMap(); for (String fieldName : fieldNames) { fieldName = fieldName.trim(); if( useFastVectorHighlighter( params, schema, fieldName ) ) doHighlightingByFastVectorHighlighter( fvh, fieldQuery, req, docSummaries, docId, doc, fieldName ); else doHighlightingByHighlighter( query, req, docSummaries, docId, doc, fieldName ); } String printId = schema.printableUniqueKey(doc); fragments.add(printId == null ? null : printId, docSummaries); } return fragments; } /* * If fieldName is undefined, this method returns false, then * doHighlightingByHighlighter() will do nothing for the field. */ private boolean useFastVectorHighlighter( SolrParams params, IndexSchema schema, String fieldName ){ SchemaField schemaField = schema.getFieldOrNull( fieldName ); if( schemaField == null ) return false; boolean useFvhParam = params.getFieldBool( fieldName, HighlightParams.USE_FVH, false ); if( !useFvhParam ) return false; boolean termPosOff = schemaField.storeTermPositions() && schemaField.storeTermOffsets(); if( !termPosOff ) { log.warn( "Solr will use Highlighter instead of FastVectorHighlighter because {} field does not store TermPositions and TermOffsets.", fieldName ); } return termPosOff; } private void doHighlightingByHighlighter( Query query, SolrQueryRequest req, NamedList docSummaries, int docId, Document doc, String fieldName ) throws IOException { final SolrIndexSearcher searcher = req.getSearcher(); final IndexSchema schema = searcher.getSchema(); // TODO: Currently in trunk highlighting numeric fields is broken (Lucene) - // so we disable them until fixed (see LUCENE-3080)! // BEGIN: Hack final SchemaField schemaField = schema.getFieldOrNull(fieldName); if (schemaField != null && ( (schemaField.getType() instanceof org.apache.solr.schema.TrieField) || (schemaField.getType() instanceof org.apache.solr.schema.TrieDateField) )) return; // END: Hack SolrParams params = req.getParams(); IndexableField[] docFields = doc.getFields(fieldName); List<String> listFields = new ArrayList<String>(); for (IndexableField field : docFields) { listFields.add(field.stringValue()); } // preserve order of values in a multiValued list boolean preserveMulti = params.getFieldBool(fieldName, HighlightParams.PRESERVE_MULTI, false); String[] docTexts = (String[]) listFields.toArray(new String[listFields.size()]); // according to Document javadoc, doc.getValues() never returns null. check empty instead of null if (docTexts.length == 0) return; TokenStream tstream = null; int numFragments = getMaxSnippets(fieldName, params); boolean mergeContiguousFragments = isMergeContiguousFragments(fieldName, params); String[] summaries = null; List<TextFragment> frags = new ArrayList<TextFragment>(); TermOffsetsTokenStream tots = null; // to be non-null iff we're using TermOffsets optimization TokenStream tvStream = TokenSources.getTokenStreamWithOffsets(searcher.getIndexReader(), docId, fieldName); if (tvStream != null) { tots = new TermOffsetsTokenStream(tvStream); } for (int j = 0; j < docTexts.length; j++) { if( tots != null ) { // if we're using TermOffsets optimization, then get the next // field value's TokenStream (i.e. get field j's TokenStream) from tots: tstream = tots.getMultiValuedTokenStream( docTexts[j].length() ); } else { // fall back to analyzer tstream = createAnalyzerTStream(schema, fieldName, docTexts[j]); } int maxCharsToAnalyze = params.getFieldInt(fieldName, HighlightParams.MAX_CHARS, Highlighter.DEFAULT_MAX_CHARS_TO_ANALYZE); Highlighter highlighter; if (Boolean.valueOf(req.getParams().get(HighlightParams.USE_PHRASE_HIGHLIGHTER, "true"))) { if (maxCharsToAnalyze < 0) { tstream = new CachingTokenFilter(tstream); } else { tstream = new CachingTokenFilter(new OffsetLimitTokenFilter(tstream, maxCharsToAnalyze)); } // get highlighter highlighter = getPhraseHighlighter(query, fieldName, req, (CachingTokenFilter) tstream); // after highlighter initialization, reset tstream since construction of highlighter already used it tstream.reset(); } else { // use "the old way" highlighter = getHighlighter(query, fieldName, req); } if (maxCharsToAnalyze < 0) { highlighter.setMaxDocCharsToAnalyze(docTexts[j].length()); } else { highlighter.setMaxDocCharsToAnalyze(maxCharsToAnalyze); } try { TextFragment[] bestTextFragments = highlighter.getBestTextFragments(tstream, docTexts[j], mergeContiguousFragments, numFragments); for (int k = 0; k < bestTextFragments.length; k++) { if (preserveMulti) { if (bestTextFragments[k] != null) { frags.add(bestTextFragments[k]); } } else { if ((bestTextFragments[k] != null) && (bestTextFragments[k].getScore() > 0)) { frags.add(bestTextFragments[k]); } } } } catch (InvalidTokenOffsetsException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); } } // sort such that the fragments with the highest score come first if(!preserveMulti){ Collections.sort(frags, new Comparator<TextFragment>() { @Override public int compare(TextFragment arg0, TextFragment arg1) { return Math.round(arg1.getScore() - arg0.getScore()); } }); } // convert fragments back into text // TODO: we can include score and position information in output as snippet attributes if (frags.size() > 0) { ArrayList<String> fragTexts = new ArrayList<String>(); for (TextFragment fragment: frags) { if (preserveMulti) { if (fragment != null) { fragTexts.add(fragment.toString()); } } else { if ((fragment != null) && (fragment.getScore() > 0)) { fragTexts.add(fragment.toString()); } } if (fragTexts.size() >= numFragments && !preserveMulti) break; } summaries = fragTexts.toArray(new String[0]); if (summaries.length > 0) docSummaries.add(fieldName, summaries); } // no summeries made, copy text from alternate field if (summaries == null || summaries.length == 0) { alternateField( docSummaries, params, doc, fieldName ); } } private void doHighlightingByFastVectorHighlighter( FastVectorHighlighter highlighter, FieldQuery fieldQuery, SolrQueryRequest req, NamedList docSummaries, int docId, Document doc, String fieldName ) throws IOException { SolrParams params = req.getParams(); SolrFragmentsBuilder solrFb = getSolrFragmentsBuilder( fieldName, params ); String[] snippets = highlighter.getBestFragments( fieldQuery, req.getSearcher().getIndexReader(), docId, fieldName, params.getFieldInt( fieldName, HighlightParams.FRAGSIZE, 100 ), params.getFieldInt( fieldName, HighlightParams.SNIPPETS, 1 ), getFragListBuilder( fieldName, params ), getFragmentsBuilder( fieldName, params ), solrFb.getPreTags( params, fieldName ), solrFb.getPostTags( params, fieldName ), getEncoder( fieldName, params ) ); if( snippets != null && snippets.length > 0 ) docSummaries.add( fieldName, snippets ); else alternateField( docSummaries, params, doc, fieldName ); } private void alternateField( NamedList docSummaries, SolrParams params, Document doc, String fieldName ){ String alternateField = params.getFieldParam(fieldName, HighlightParams.ALTERNATE_FIELD); if (alternateField != null && alternateField.length() > 0) { IndexableField[] docFields = doc.getFields(alternateField); List<String> listFields = new ArrayList<String>(); for (IndexableField field : docFields) { if (field.binaryValue() == null) listFields.add(field.stringValue()); } String[] altTexts = listFields.toArray(new String[listFields.size()]); if (altTexts != null && altTexts.length > 0){ Encoder encoder = getEncoder(fieldName, params); int alternateFieldLen = params.getFieldInt(fieldName, HighlightParams.ALTERNATE_FIELD_LENGTH,0); List<String> altList = new ArrayList<String>(); int len = 0; for( String altText: altTexts ){ if( alternateFieldLen <= 0 ){ altList.add(encoder.encodeText(altText)); } else{ altList.add( len + altText.length() > alternateFieldLen ? encoder.encodeText(new String(altText.substring( 0, alternateFieldLen - len ))) : encoder.encodeText(altText) ); len += altText.length(); if( len >= alternateFieldLen ) break; } } docSummaries.add(fieldName, altList); } } } private TokenStream createAnalyzerTStream(IndexSchema schema, String fieldName, String docText) throws IOException { TokenStream tstream; TokenStream ts = schema.getAnalyzer().tokenStream(fieldName, new StringReader(docText)); ts.reset(); tstream = new TokenOrderingFilter(ts, 10); return tstream; } } /** Orders Tokens in a window first by their startOffset ascending. * endOffset is currently ignored. * This is meant to work around fickleness in the highlighter only. It * can mess up token positions and should not be used for indexing or querying. */ final class TokenOrderingFilter extends TokenFilter { private final int windowSize; private final LinkedList<OrderedToken> queue = new LinkedList<OrderedToken>(); private boolean done=false; private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); protected TokenOrderingFilter(TokenStream input, int windowSize) { super(input); this.windowSize = windowSize; } @Override public boolean incrementToken() throws IOException { while (!done && queue.size() < windowSize) { if (!input.incrementToken()) { done = true; break; } // reverse iterating for better efficiency since we know the // list is already sorted, and most token start offsets will be too. ListIterator<OrderedToken> iter = queue.listIterator(queue.size()); while(iter.hasPrevious()) { if (offsetAtt.startOffset() >= iter.previous().startOffset) { // insertion will be before what next() would return (what // we just compared against), so move back one so the insertion // will be after. iter.next(); break; } } OrderedToken ot = new OrderedToken(); ot.state = captureState(); ot.startOffset = offsetAtt.startOffset(); iter.add(ot); } if (queue.isEmpty()) { return false; } else { restoreState(queue.removeFirst().state); return true; } } } // for TokenOrderingFilter, so it can easily sort by startOffset class OrderedToken { State state; int startOffset; } class TermOffsetsTokenStream { TokenStream bufferedTokenStream = null; OffsetAttribute bufferedOffsetAtt; State bufferedToken; int bufferedStartOffset; int bufferedEndOffset; int startOffset; int endOffset; public TermOffsetsTokenStream( TokenStream tstream ){ bufferedTokenStream = tstream; bufferedOffsetAtt = bufferedTokenStream.addAttribute(OffsetAttribute.class); startOffset = 0; bufferedToken = null; } public TokenStream getMultiValuedTokenStream( final int length ){ endOffset = startOffset + length; return new MultiValuedStream(length); } final class MultiValuedStream extends TokenStream { private final int length; OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); MultiValuedStream(int length) { super(bufferedTokenStream.cloneAttributes()); this.length = length; } @Override public boolean incrementToken() throws IOException { while( true ){ if( bufferedToken == null ) { if (!bufferedTokenStream.incrementToken()) return false; bufferedToken = bufferedTokenStream.captureState(); bufferedStartOffset = bufferedOffsetAtt.startOffset(); bufferedEndOffset = bufferedOffsetAtt.endOffset(); } if( startOffset <= bufferedStartOffset && bufferedEndOffset <= endOffset ){ restoreState(bufferedToken); bufferedToken = null; offsetAtt.setOffset( offsetAtt.startOffset() - startOffset, offsetAtt.endOffset() - startOffset ); return true; } else if( bufferedEndOffset > endOffset ){ startOffset += length + 1; return false; } bufferedToken = null; } } }; };
/* * Copyright (c) 2019, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.identity.governance.internal.service.impl.notification; import static org.mockito.Mockito.when; import static org.powermock.api.mockito.PowerMockito.mockStatic; import static org.testng.Assert.assertTrue; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertEquals; import org.apache.commons.lang.StringUtils; import org.mockito.InjectMocks; import org.mockito.Matchers; import org.mockito.Mock; import org.powermock.core.classloader.annotations.PrepareForTest; import org.testng.IObjectFactory; import org.testng.annotations.ObjectFactory; import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; import org.testng.annotations.DataProvider; import org.wso2.carbon.identity.core.util.IdentityTenantUtil; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.governance.IdentityGovernanceUtil; import org.wso2.carbon.identity.governance.IdentityMgtConstants; import org.wso2.carbon.identity.governance.exceptions.notiification.NotificationChannelManagerException; import org.wso2.carbon.identity.governance.internal.IdentityMgtServiceDataHolder; import org.wso2.carbon.identity.governance.service.notification.NotificationChannels; import org.wso2.carbon.user.api.UserRealm; import org.wso2.carbon.user.core.UserStoreManager; import org.wso2.carbon.user.core.service.RealmService; import java.util.HashMap; /** * Class contains test cases for DefaultNotificationChannelManager. */ @PrepareForTest({ IdentityUtil.class, IdentityTenantUtil.class, IdentityGovernanceUtil.class, IdentityMgtServiceDataHolder.class }) public class DefaultNotificationChannelManagerTest { /** * DefaultNotificationChannelManager instance. */ @InjectMocks private DefaultNotificationChannelManager defaultNotificationChannelManager; /** * Claims map with channel related attributes. */ private HashMap<String, String> channelClaims; @Mock UserStoreManager userStoreManager; @Mock RealmService realmService; @Mock UserRealm userRealm; @Mock IdentityMgtServiceDataHolder identityMgtServiceDataHolder; @ObjectFactory public IObjectFactory getObjectFactory() { return new org.powermock.modules.testng.PowerMockObjectFactory(); } private static final String SUCCESSFUL_CHANNEL_RESOLVE = "Successful channel resolve"; private static final String ERROR_IN_CHANNEL_RESOLVE = "Error while resolving the channel"; private static final String CHANNEL_RESOLVING_NOT_ENABLED = "Channel resolving not enabled"; /** * Initializing variables. */ @BeforeTest public void setup() { defaultNotificationChannelManager = new DefaultNotificationChannelManager(); // Get the claims map with the corresponding channel claims and values. channelClaims = getDefaultClaimsMap(new String[] { NotificationChannels.EMAIL_CHANNEL.getClaimUri(), NotificationChannels.SMS_CHANNEL.getClaimUri() }, new String[] { "test@wso2.com", "1234567890" }); } /** * Testing for supported notification channels */ @Test public void testIsSupportedChannel() { // SMS notification channel. boolean isSupportedChannel1 = defaultNotificationChannelManager.isSupportedChannel("SMS"); assertTrue(isSupportedChannel1); // EMAIL notification channel. boolean isSupportedChannel2 = defaultNotificationChannelManager.isSupportedChannel("EMAIL"); assertTrue(isSupportedChannel2); // Unsupported channel notification channel. boolean isSupportedChannel3 = defaultNotificationChannelManager.isSupportedChannel("CALL"); assertFalse(isSupportedChannel3); // Case sensitivity test. boolean isSupportedChannel4 = defaultNotificationChannelManager.isSupportedChannel("email"); assertTrue(isSupportedChannel4); } /** * Test resolve notification channel for the user by resolving the channel claims in the request. * * @param channelClaims Channel claims * @param defaultNotificationChannel Default notification channel * @param expectedChannel Expected resolved channel * @param scenario Channel resolving scenario. * @param scenarioType Channel resolving scenario type * @param expectedErrorCode Expected error * @throws NotificationChannelManagerException Error while resolving the channel */ @Test(dataProvider = "channelClaimsForChannelResolveUsingUserClaimsMap") public void testResolveCommunicationChannel(HashMap<String, String> channelClaims, String defaultNotificationChannel, String expectedChannel, String scenario, String scenarioType, String expectedErrorCode) throws NotificationChannelManagerException { // Test Meta data. String testUser = "testUser"; String testTenantDomain = "testTenantDomain"; String testUserstoreDomain = "testTenantDomain"; // Convert the empty strings to null objects, since null objects cannot be passed as method arguments. if (StringUtils.isEmpty(expectedChannel)) { expectedChannel = null; } if (SUCCESSFUL_CHANNEL_RESOLVE.equals(scenarioType)) { // Mock configurations. mockSelfRegistrationConfigurations(defaultNotificationChannel, true); String resolvedChannel = defaultNotificationChannelManager .resolveCommunicationChannel(testUser, testTenantDomain, testUserstoreDomain, channelClaims); assertEquals(resolvedChannel, expectedChannel, scenario); } else if (ERROR_IN_CHANNEL_RESOLVE.equals(scenarioType)) { try { // Mock configurations. mockSelfRegistrationConfigurations(defaultNotificationChannel, true); String resolvedChannel = defaultNotificationChannelManager .resolveCommunicationChannel(testUser, testTenantDomain, testUserstoreDomain, channelClaims); assertEquals(resolvedChannel, expectedChannel, scenario); } catch (NotificationChannelManagerException e) { assertEquals(e.getErrorCode(), expectedErrorCode, scenario); } } else if (CHANNEL_RESOLVING_NOT_ENABLED.equals(scenarioType)) { // Mock configurations. mockSelfRegistrationConfigurations(defaultNotificationChannel, false); String resolvedChannel = defaultNotificationChannelManager .resolveCommunicationChannel(testUser, testTenantDomain, testUserstoreDomain, channelClaims); assertEquals(resolvedChannel, expectedChannel, scenario); } } /** * Test resolving notification channel using the user. * * @param channelClaims Channel claims * @param defaultNotificationChannel Default notification channel * @param expectedChannel Expected resolved channel * @param scenario Channel resolving scenario. * @param scenarioType Channel resolving scenario type * @param expectedErrorCode Expected error * @throws NotificationChannelManagerException Error while resolving the channel */ @Test(dataProvider = "channelClaimsForChannelResolveUsingUser") public void testResolveCommunicationChannelWithUsername(HashMap<String, String> channelClaims, String defaultNotificationChannel, String expectedChannel, String scenario, String scenarioType, String expectedErrorCode) throws Exception { // Meta Data String testUser = "testUser"; String testTenantDomain = "testTenantDomain"; String testUserstoreDomain = "testTenantDomain"; // Mock classes and configurations. mockUserstoreManager(channelClaims); mockSelfRegistrationConfigurations(defaultNotificationChannel, true); // Convert the empty strings to null objects, since null objects cannot be passed as method arguments. if (StringUtils.isEmpty(expectedChannel)) { expectedChannel = null; } if (SUCCESSFUL_CHANNEL_RESOLVE.equals(scenarioType)) { String resolvedChannel = defaultNotificationChannelManager .resolveCommunicationChannel(testUser, testTenantDomain, testUserstoreDomain); assertEquals(resolvedChannel, expectedChannel, scenario); } else if (ERROR_IN_CHANNEL_RESOLVE.equals(scenarioType)) { try { String resolvedChannel = defaultNotificationChannelManager .resolveCommunicationChannel(testUser, testTenantDomain, testUserstoreDomain); assertEquals(resolvedChannel, expectedChannel, scenario); } catch (NotificationChannelManagerException e) { assertEquals(e.getErrorCode(), expectedErrorCode, scenario); } } else if (CHANNEL_RESOLVING_NOT_ENABLED.equals(scenarioType)){ mockSelfRegistrationConfigurations(defaultNotificationChannel, false); String resolvedChannel = defaultNotificationChannelManager .resolveCommunicationChannel(testUser, testTenantDomain, testUserstoreDomain); assertEquals(resolvedChannel, expectedChannel, scenario); } } /** * Mock self registration configurations. * * @param defaultNotificationChannel Default notification channel * @param enableResolving Enable channel resolving */ private void mockSelfRegistrationConfigurations(String defaultNotificationChannel, boolean enableResolving) { mockStatic(IdentityUtil.class); when(IdentityUtil.getProperty(IdentityMgtConstants.NotificationChannelConstants.DEFAULT_NOTIFICATION_CHANNEL)) .thenReturn(defaultNotificationChannel); when(IdentityUtil.getProperty(IdentityMgtConstants.PropertyConfig.RESOLVE_NOTIFICATION_CHANNELS)) .thenReturn(Boolean.toString(enableResolving)); } /** * Mock userstore manager to get user claims. * * @param claimsMap User claims * @throws Exception Error while mocking Userstoremanager */ private void mockUserstoreManager(HashMap<String, String> claimsMap) throws Exception { mockStatic(IdentityTenantUtil.class); when(IdentityTenantUtil.getTenantId(Matchers.anyString())).thenReturn(-1234); mockStatic(IdentityMgtServiceDataHolder.class); when(IdentityMgtServiceDataHolder.getInstance()).thenReturn(identityMgtServiceDataHolder); when(identityMgtServiceDataHolder.getRealmService()).thenReturn(realmService); when(realmService.getTenantUserRealm(Matchers.anyInt())).thenReturn(userRealm); when(userRealm.getUserStoreManager()).thenReturn(userStoreManager); when(userStoreManager .getUserClaimValues(Matchers.anyString(), Matchers.any(String[].class), Matchers.anyString())) .thenReturn(claimsMap); } /** * Method to create a claims map. * NOTE: the length of both arguments needs to be the same. * * @param keys Key set * @param values Values * @return Map of claims */ private HashMap<String, String> getDefaultClaimsMap(String[] keys, String[] values) { HashMap<String, String> channelClaims = new HashMap<>(); for (int counter = 0; counter < keys.length; counter++) { channelClaims.put(keys[counter], values[counter]); } return channelClaims; } /** * Contains user data related to the channels to resolve the notification channel using user claims in the request. * * @return Object[][] */ @DataProvider(name = "channelClaimsForChannelResolveUsingUserClaimsMap") private Object[][] buildChannelClaimSet1() { // Preferred Channel EMAIL with email claim values. HashMap<String, String> channelClaimsMap1 = new HashMap<>(channelClaims); channelClaimsMap1.put(IdentityMgtConstants.Claim.PREFERED_CHANNEL_CLAIM, NotificationChannels.EMAIL_CHANNEL.getChannelType()); String defaultChannel1 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String expectedChannel1 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String message1 = "SCENARIO: User specified the Preferred Channel as EMAIL and has email claim values : "; // Preferred Channel SMS with SMS claim values. HashMap<String, String> channelClaimsMap2 = new HashMap<>(channelClaims); channelClaimsMap2.put(IdentityMgtConstants.Claim.PREFERED_CHANNEL_CLAIM, NotificationChannels.SMS_CHANNEL.getChannelType()); String defaultChannel2 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String expectedChannel2 = NotificationChannels.SMS_CHANNEL.getChannelType(); String message2 = "SCENARIO: User specified the Preferred Channel as SMS and has mobile claim values : "; // User has not specified a preferred channel, but have values for email claim. HashMap<String, String> channelClaimsMap3 = new HashMap<>(channelClaims); channelClaimsMap3.remove(NotificationChannels.SMS_CHANNEL.getClaimUri()); String defaultChannel3 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String expectedChannel3 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String message3 = "SCENARIO: User has no preferred channel, has email claims only : "; // User has not specified a preferred channel, but have values for mobile claim. HashMap<String, String> channelClaimsMap4 = new HashMap<>(channelClaims); channelClaimsMap4.remove(NotificationChannels.EMAIL_CHANNEL.getClaimUri()); String defaultChannel4 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String expectedChannel4 = NotificationChannels.SMS_CHANNEL.getChannelType(); String message4 = "SCENARIO: User has no preferred channel, has mobile claims only : "; // User has not specified a preferred channel, but provided both email and mobile values. // Default notification channel in EMAIL. HashMap<String, String> channelClaimsMap5 = new HashMap<>(channelClaims); String defaultChannel5 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String expectedChannel5 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String message5 = "SCENARIO: User has both email and mobile claims. Default channel EMAIL : "; // User has not specified a preferred channel, but provided both email and mobile values. // Default notification channel in SMS. HashMap<String, String> channelClaimsMap6 = new HashMap<>(channelClaims); String defaultChannel6 = NotificationChannels.SMS_CHANNEL.getChannelType(); String expectedChannel6 = NotificationChannels.SMS_CHANNEL.getChannelType(); String message6 = "SCENARIO: User has both email and mobile claims. Default channel SMS : "; // User has not specified a preferred channel or notification channels. HashMap<String, String> channelClaimsMap7 = new HashMap<>(); String defaultChannel7 = NotificationChannels.SMS_CHANNEL.getChannelType(); String message7 = "SCENARIO: User has not specified any notification channels : "; /* ERROR SCENARIOS */ // No claim values for the preferred channel. HashMap<String, String> channelClaimsMap8 = new HashMap<>(); channelClaimsMap8.put(IdentityMgtConstants.Claim.PREFERED_CHANNEL_CLAIM, NotificationChannels.EMAIL_CHANNEL.getChannelType()); String defaultChannel8 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String message8 = "SCENARIO: User specified the Preferred Channel as EMAIL, but no email claim values : "; String expectedErrorCode8 = IdentityMgtConstants.ErrorMessages. ERROR_CODE_NO_CLAIM_MATCHED_FOR_PREFERRED_CHANNEL.getCode(); // Invalid notification channel as the preferred notification channel. HashMap<String, String> channelClaimsMap9 = new HashMap<>(channelClaims); channelClaimsMap9.put(IdentityMgtConstants.Claim.PREFERED_CHANNEL_CLAIM, "new notification channel"); String defaultChannel9 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String message9 = "SCENARIO: User specified an invalid notification channel type : "; String expectedErrorCode9 = IdentityMgtConstants.ErrorMessages. ERROR_CODE_UNSUPPORTED_PREFERRED_CHANNEL.getCode(); /* CHANNEL RESOLVING CONFIG NOT ENABLED */ // Default notification channel in EMAIL. HashMap<String, String> channelClaimsMap10 = new HashMap<>(channelClaims); String defaultChannel10 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String expectedChannel10 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String message10 = "SCENARIO: Configs not enabled. Default Channel " + defaultChannel10 + " : "; // Default notification channel in SMS. HashMap<String, String> channelClaimsMap11 = new HashMap<>(channelClaims); String defaultChannel11 = NotificationChannels.SMS_CHANNEL.getChannelType(); String expectedChannel11 = NotificationChannels.SMS_CHANNEL.getChannelType(); String message11 = "SCENARIO: Configs not enabled. Default Channel " + defaultChannel11 + " : "; return new Object[][] { { channelClaimsMap1, defaultChannel1, expectedChannel1, message1, SUCCESSFUL_CHANNEL_RESOLVE, null }, { channelClaimsMap2, defaultChannel2, expectedChannel2, message2, SUCCESSFUL_CHANNEL_RESOLVE, null }, { channelClaimsMap3, defaultChannel3, expectedChannel3, message3, SUCCESSFUL_CHANNEL_RESOLVE, null }, { channelClaimsMap4, defaultChannel4, expectedChannel4, message4, SUCCESSFUL_CHANNEL_RESOLVE, null }, { channelClaimsMap5, defaultChannel5, expectedChannel5, message5, SUCCESSFUL_CHANNEL_RESOLVE, null }, { channelClaimsMap6, defaultChannel6, expectedChannel6, message6, SUCCESSFUL_CHANNEL_RESOLVE, null }, { channelClaimsMap7, defaultChannel7, StringUtils.EMPTY, message7, SUCCESSFUL_CHANNEL_RESOLVE, null }, { channelClaimsMap8, defaultChannel8, StringUtils.EMPTY, message8, ERROR_IN_CHANNEL_RESOLVE, expectedErrorCode8 }, { channelClaimsMap9, defaultChannel9, StringUtils.EMPTY, message9, ERROR_IN_CHANNEL_RESOLVE, expectedErrorCode9 }, { channelClaimsMap10, defaultChannel10, expectedChannel10, message10, CHANNEL_RESOLVING_NOT_ENABLED, null }, { channelClaimsMap11, defaultChannel11, expectedChannel11, message11, CHANNEL_RESOLVING_NOT_ENABLED, null } }; } /** * Contains user data related to the channels to resolve the notification channel using username in the request. * * @return Object[][] */ @DataProvider(name = "channelClaimsForChannelResolveUsingUser") private Object[][] buildChannelClaimSet2() { // Preferred Channel EMAIL with email claim values. HashMap<String, String> channelClaimsMap1 = new HashMap<>(channelClaims); channelClaimsMap1.put(IdentityMgtConstants.Claim.PREFERED_CHANNEL_CLAIM, NotificationChannels.EMAIL_CHANNEL.getChannelType()); String defaultChannel1 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String expectedChannel1 = defaultChannel1; String message1 = "SCENARIO: User specified the Preferred Channel as EMAIL and has email claim values : "; // Preferred Channel SMS with SMS claim values. HashMap<String, String> channelClaimsMap2 = new HashMap<>(channelClaims); channelClaimsMap2.put(IdentityMgtConstants.Claim.PREFERED_CHANNEL_CLAIM, NotificationChannels.SMS_CHANNEL.getChannelType()); String defaultChannel2 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String expectedChannel2 = NotificationChannels.SMS_CHANNEL.getChannelType(); String message2 = "SCENARIO: User specified the Preferred Channel as SMS and has mobile claim values : "; // User has not specified a preferred channel, but have values for email claim. HashMap<String, String> channelClaimsMap3 = new HashMap<>(channelClaims); channelClaimsMap3.remove(NotificationChannels.SMS_CHANNEL.getClaimUri()); String defaultChannel3 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String expectedChannel3 = defaultChannel3; String message3 = "SCENARIO: User has no preferred channel, has email claims only : "; // User has not specified a preferred channel, but have values for mobile claim. HashMap<String, String> channelClaimsMap4 = new HashMap<>(channelClaims); channelClaimsMap4.remove(NotificationChannels.EMAIL_CHANNEL.getClaimUri()); String defaultChannel4 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String expectedChannel4 = NotificationChannels.SMS_CHANNEL.getChannelType(); String message4 = "SCENARIO: User has no preferred channel, has mobile claims only : "; // User has not specified a preferred channel, but provided both email and mobile values. // Default notification channel in EMAIL. HashMap<String, String> channelClaimsMap5 = new HashMap<>(channelClaims); String defaultChannel5 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String expectedChannel5 = defaultChannel5 ; String message5 = "SCENARIO: User has both email and mobile claims. Default channel EMAIL : "; // User has not specified a preferred channel, but provided both email and mobile values. // Default notification channel in SMS. HashMap<String, String> channelClaimsMap6 = new HashMap<>(channelClaims); String defaultChannel6 = NotificationChannels.SMS_CHANNEL.getChannelType(); String expectedChannel6 = defaultChannel6; String message6 = "SCENARIO: User has both email and mobile claims. Default channel SMS : "; /* ERROR SCENARIOS */ // Preferred channel as EMAIL but no claims for email channel. HashMap<String, String> channelClaimsMap7 = new HashMap<>(channelClaims); channelClaimsMap7.remove(NotificationChannels.EMAIL_CHANNEL.getClaimUri()); channelClaimsMap7.put(IdentityMgtConstants.Claim.PREFERED_CHANNEL_CLAIM, NotificationChannels.EMAIL_CHANNEL.getChannelType()); String defaultChannel7 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String message7 = "SCENARIO: User specified the Preferred Channel as EMAIL, but no email claim values : "; String expectedErrorCode7 = IdentityMgtConstants.ErrorMessages.ERROR_CODE_NO_CLAIM_MATCHED_FOR_PREFERRED_CHANNEL.getCode(); // User has no channel claims. HashMap<String, String> channelClaimsMap8 = new HashMap<>(); String defaultChannel8 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String message8 = "SCENARIO: User no channel claims : "; String expectedErrorCode8 = IdentityMgtConstants.ErrorMessages.ERROR_CODE_NO_NOTIFICATION_CHANNELS.getCode(); /* CHANNEL RESOLVING CONFIG NOT ENABLED */ // Default notification channel in EMAIL. HashMap<String, String> channelClaimsMap9 = new HashMap<>(channelClaims); String defaultChannel9 = NotificationChannels.EMAIL_CHANNEL.getChannelType(); String expectedChannel9 = defaultChannel9; String message9 = "SCENARIO: Configs not enabled. Default Channel " + defaultChannel9 + " : "; // Default notification channel in SMS. HashMap<String, String> channelClaimsMap10 = new HashMap<>(channelClaims); String defaultChannel10 = NotificationChannels.SMS_CHANNEL.getChannelType(); String expectedChannel10 = defaultChannel10; String message10 = "SCENARIO: Configs not enabled. Default Channel " + defaultChannel10 + " : "; return new Object[][] { { channelClaimsMap1, defaultChannel1, expectedChannel1, message1, SUCCESSFUL_CHANNEL_RESOLVE, null }, { channelClaimsMap2, defaultChannel2, expectedChannel2, message2, SUCCESSFUL_CHANNEL_RESOLVE, null }, { channelClaimsMap3, defaultChannel3, expectedChannel3, message3, SUCCESSFUL_CHANNEL_RESOLVE, null }, { channelClaimsMap4, defaultChannel4, expectedChannel4, message4, SUCCESSFUL_CHANNEL_RESOLVE, null }, { channelClaimsMap5, defaultChannel5, expectedChannel5, message5, SUCCESSFUL_CHANNEL_RESOLVE, null }, { channelClaimsMap6, defaultChannel6, expectedChannel6, message6, SUCCESSFUL_CHANNEL_RESOLVE, null }, { channelClaimsMap7, defaultChannel7, StringUtils.EMPTY, message7, ERROR_IN_CHANNEL_RESOLVE, expectedErrorCode7 }, { channelClaimsMap8, defaultChannel8, StringUtils.EMPTY, message8, ERROR_IN_CHANNEL_RESOLVE, expectedErrorCode8 }, { channelClaimsMap9, defaultChannel9, expectedChannel9, message9, CHANNEL_RESOLVING_NOT_ENABLED, null }, { channelClaimsMap10, defaultChannel10, expectedChannel10, message10, CHANNEL_RESOLVING_NOT_ENABLED, null } }; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence.snapshot; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.compute.ComputeJob; import org.apache.ignite.compute.ComputeJobAdapter; import org.apache.ignite.compute.ComputeJobResult; import org.apache.ignite.compute.ComputeJobResultPolicy; import org.apache.ignite.compute.ComputeTaskAdapter; import org.apache.ignite.internal.GridComponent; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState; import org.apache.ignite.internal.processors.cache.persistence.file.FilePageStore; import org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager; import org.apache.ignite.internal.processors.cache.persistence.metastorage.MetaStorage; import org.apache.ignite.internal.processors.cache.persistence.tree.io.PageIO; import org.apache.ignite.internal.processors.cache.persistence.tree.io.PagePartitionMetaIO; import org.apache.ignite.internal.processors.cache.verify.PartitionHashRecordV2; import org.apache.ignite.internal.processors.cache.verify.PartitionKeyV2; import org.apache.ignite.internal.processors.cache.verify.VerifyBackupPartitionsTaskV2; import org.apache.ignite.internal.processors.task.GridInternal; import org.apache.ignite.internal.util.GridUnsafe; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.resources.IgniteInstanceResource; import org.apache.ignite.resources.LoggerResource; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.internal.pagemem.PageIdAllocator.FLAG_DATA; import static org.apache.ignite.internal.pagemem.PageIdAllocator.FLAG_IDX; import static org.apache.ignite.internal.pagemem.PageIdAllocator.INDEX_PARTITION; import static org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState.OWNING; import static org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState.fromOrdinal; import static org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager.cacheGroupName; import static org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager.cachePartitionFiles; import static org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager.partId; import static org.apache.ignite.internal.processors.cache.persistence.partstate.GroupPartitionId.getTypeByPartId; import static org.apache.ignite.internal.processors.cache.verify.IdleVerifyUtility.calculatePartitionHash; import static org.apache.ignite.internal.processors.cache.verify.IdleVerifyUtility.checkPartitionsPageCrcSum; import static org.apache.ignite.internal.processors.cache.verify.VerifyBackupPartitionsTaskV2.reduce0; /** * Task for checking snapshot partitions consistency the same way as {@link VerifyBackupPartitionsTaskV2} does. * Since a snapshot partitions already stored apart on disk the is no requirement for a cluster upcoming updates * to be hold on. */ @GridInternal public class SnapshotPartitionsVerifyTask extends ComputeTaskAdapter<SnapshotPartitionsVerifyTaskArg, SnapshotPartitionsVerifyTaskResult> { /** Serial version uid. */ private static final long serialVersionUID = 0L; /** Task argument. */ private final Map<ClusterNode, List<SnapshotMetadata>> metas = new HashMap<>(); /** Ignite instance. */ @IgniteInstanceResource private IgniteEx ignite; /** {@inheritDoc} */ @Override public @NotNull Map<? extends ComputeJob, ClusterNode> map( List<ClusterNode> subgrid, @Nullable SnapshotPartitionsVerifyTaskArg arg ) throws IgniteException { Map<ClusterNode, List<SnapshotMetadata>> clusterMetas = arg.clusterMetadata(); if (!subgrid.containsAll(clusterMetas.keySet())) { throw new IgniteSnapshotVerifyException(F.asMap(ignite.localNode(), new IgniteException("Some of Ignite nodes left the cluster during the snapshot verification " + "[curr=" + F.viewReadOnly(subgrid, F.node2id()) + ", init=" + F.viewReadOnly(clusterMetas.keySet(), F.node2id()) + ']'))); } Map<ComputeJob, ClusterNode> jobs = new HashMap<>(); Set<SnapshotMetadata> allMetas = new HashSet<>(); clusterMetas.values().forEach(allMetas::addAll); Set<String> missed = null; for (SnapshotMetadata meta : allMetas) { if (missed == null) missed = new HashSet<>(meta.baselineNodes()); missed.remove(meta.consistentId()); if (missed.isEmpty()) break; } if (!missed.isEmpty()) { throw new IgniteSnapshotVerifyException(F.asMap(ignite.localNode(), new IgniteException("Some metadata is missing from the snapshot: " + missed))); } metas.putAll(clusterMetas); while (!allMetas.isEmpty()) { for (Map.Entry<ClusterNode, List<SnapshotMetadata>> e : clusterMetas.entrySet()) { SnapshotMetadata meta = F.find(e.getValue(), null, allMetas::remove); if (meta == null) continue; jobs.put(new VisorVerifySnapshotPartitionsJob(meta.snapshotName(), meta.consistentId(), arg.cacheGroupNames()), e.getKey()); if (allMetas.isEmpty()) break; } } return jobs; } /** {@inheritDoc} */ @Override public @Nullable SnapshotPartitionsVerifyTaskResult reduce(List<ComputeJobResult> results) throws IgniteException { return new SnapshotPartitionsVerifyTaskResult(metas, reduce0(results)); } /** {@inheritDoc} */ @Override public ComputeJobResultPolicy result(ComputeJobResult res, List<ComputeJobResult> rcvd) throws IgniteException { // Handle all exceptions during the `reduce` operation. return ComputeJobResultPolicy.WAIT; } /** Job that collects update counters of snapshot partitions on the node it executes. */ private static class VisorVerifySnapshotPartitionsJob extends ComputeJobAdapter { /** Serial version uid. */ private static final long serialVersionUID = 0L; /** Ignite instance. */ @IgniteInstanceResource private IgniteEx ignite; /** Injected logger. */ @LoggerResource private IgniteLogger log; /** Snapshot name to validate. */ private final String snpName; /** Consistent snapshot metadata file name. */ private final String consId; /** Set of cache groups to be checked in the snapshot or {@code empty} to check everything. */ private final Set<String> rqGrps; /** * @param snpName Snapshot name to validate. * @param consId Consistent snapshot metadata file name. * @param rqGrps Set of cache groups to be checked in the snapshot or {@code empty} to check everything. */ public VisorVerifySnapshotPartitionsJob(String snpName, String consId, Collection<String> rqGrps) { this.snpName = snpName; this.consId = consId; this.rqGrps = rqGrps == null ? Collections.emptySet() : new HashSet<>(rqGrps); } @Override public Map<PartitionKeyV2, PartitionHashRecordV2> execute() throws IgniteException { IgniteSnapshotManager snpMgr = ignite.context().cache().context().snapshotMgr(); if (log.isInfoEnabled()) { log.info("Verify snapshot partitions procedure has been initiated " + "[snpName=" + snpName + ", consId=" + consId + ']'); } SnapshotMetadata meta = snpMgr.readSnapshotMetadata(snpName, consId); Set<Integer> grps = rqGrps.isEmpty() ? new HashSet<>(meta.partitions().keySet()) : rqGrps.stream().map(CU::cacheId).collect(Collectors.toSet()); Set<File> partFiles = new HashSet<>(); for (File dir : snpMgr.snapshotCacheDirectories(snpName, meta.folderName())) { int grpId = CU.cacheId(cacheGroupName(dir)); if (!grps.remove(grpId)) continue; Set<Integer> parts = new HashSet<>(meta.partitions().get(grpId)); for (File part : cachePartitionFiles(dir)) { int partId = partId(part.getName()); if (!parts.remove(partId)) continue; partFiles.add(part); } if (!parts.isEmpty()) { throw new IgniteException("Snapshot data doesn't contain required cache group partition " + "[grpId=" + grpId + ", snpName=" + snpName + ", consId=" + consId + ", missed=" + parts + ", meta=" + meta + ']'); } } if (!grps.isEmpty()) { throw new IgniteException("Snapshot data doesn't contain required cache groups " + "[grps=" + grps + ", snpName=" + snpName + ", consId=" + consId + ", meta=" + meta + ']'); } Map<PartitionKeyV2, PartitionHashRecordV2> res = new ConcurrentHashMap<>(); ThreadLocal<ByteBuffer> buff = ThreadLocal.withInitial(() -> ByteBuffer.allocateDirect(meta.pageSize()) .order(ByteOrder.nativeOrder())); try { GridKernalContext snpCtx = snpMgr.createStandaloneKernalContext(snpName, meta.folderName()); for (GridComponent comp : snpCtx) comp.start(); try { U.doInParallel( snpMgr.snapshotExecutorService(), partFiles, part -> { String grpName = cacheGroupName(part.getParentFile()); int grpId = CU.cacheId(grpName); int partId = partId(part.getName()); FilePageStoreManager storeMgr = (FilePageStoreManager)ignite.context().cache().context().pageStore(); try (FilePageStore pageStore = (FilePageStore)storeMgr.getPageStoreFactory(grpId, false) .createPageStore(getTypeByPartId(partId), part::toPath, val -> { }) ) { if (partId == INDEX_PARTITION) { checkPartitionsPageCrcSum(() -> pageStore, INDEX_PARTITION, FLAG_IDX); return null; } if (grpId == MetaStorage.METASTORAGE_CACHE_ID) { checkPartitionsPageCrcSum(() -> pageStore, partId, FLAG_DATA); return null; } ByteBuffer pageBuff = buff.get(); pageBuff.clear(); pageStore.read(0, pageBuff, true); long pageAddr = GridUnsafe.bufferAddress(pageBuff); PagePartitionMetaIO io = PageIO.getPageIO(pageBuff); GridDhtPartitionState partState = fromOrdinal(io.getPartitionState(pageAddr)); if (partState != OWNING) { throw new IgniteCheckedException("Snapshot partitions must be in the OWNING " + "state only: " + partState); } long updateCntr = io.getUpdateCounter(pageAddr); long size = io.getSize(pageAddr); if (log.isDebugEnabled()) { log.debug("Partition [grpId=" + grpId + ", id=" + partId + ", counter=" + updateCntr + ", size=" + size + "]"); } // Snapshot partitions must always be in OWNING state. // There is no `primary` partitions for snapshot. PartitionKeyV2 key = new PartitionKeyV2(grpId, partId, grpName); PartitionHashRecordV2 hash = calculatePartitionHash(key, updateCntr, consId, GridDhtPartitionState.OWNING, false, size, snpMgr.partitionRowIterator(snpCtx, grpName, partId, pageStore)); assert hash != null : "OWNING must have hash: " + key; res.put(key, hash); } catch (IOException e) { throw new IgniteCheckedException(e); } return null; } ); } finally { for (GridComponent comp : snpCtx) comp.stop(true); } } catch (IgniteCheckedException e) { throw new IgniteException(e); } return res; } /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; VisorVerifySnapshotPartitionsJob job = (VisorVerifySnapshotPartitionsJob)o; return snpName.equals(job.snpName) && consId.equals(job.consId); } /** {@inheritDoc} */ @Override public int hashCode() { return Objects.hash(snpName, consId); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.pdx.internal; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.apache.geode.InternalGemFireError; import org.apache.geode.cache.client.Pool; import org.apache.geode.cache.client.ServerConnectivityException; import org.apache.geode.cache.client.internal.AddPDXEnumOp; import org.apache.geode.cache.client.internal.AddPDXTypeOp; import org.apache.geode.cache.client.internal.ExecutablePool; import org.apache.geode.cache.client.internal.GetPDXEnumByIdOp; import org.apache.geode.cache.client.internal.GetPDXEnumsOp; import org.apache.geode.cache.client.internal.GetPDXIdForEnumOp; import org.apache.geode.cache.client.internal.GetPDXIdForTypeOp; import org.apache.geode.cache.client.internal.GetPDXTypeByIdOp; import org.apache.geode.cache.client.internal.GetPDXTypesOp; import org.apache.geode.cache.client.internal.PoolImpl; import org.apache.geode.cache.wan.GatewaySender; import org.apache.geode.internal.cache.InternalCache; import org.apache.geode.internal.cache.PoolManagerImpl; import org.apache.geode.internal.logging.LogService; public class ClientTypeRegistration implements TypeRegistration { private static final Logger logger = LogService.getLogger(); private final InternalCache cache; public ClientTypeRegistration(InternalCache cache) { this.cache = cache; // See GEODE-5771: Even when set, PDX persistence is internally ignored. if (cache.getPdxPersistent() || StringUtils.isNotBlank(cache.getPdxDiskStore())) { logger.warn("PDX persistence is not supported on client side."); } } @Override public int defineType(PdxType newType) { Collection<Pool> pools = getAllPools(); ServerConnectivityException lastException = null; int newTypeId = -1; for (Pool pool : pools) { try { newTypeId = GetPDXIdForTypeOp.execute((ExecutablePool) pool, newType); newType.setTypeId(newTypeId); copyTypeToOtherPools(newType, newTypeId, pool); return newTypeId; } catch (ServerConnectivityException e) { // ignore, try the next pool. lastException = e; } } throw returnCorrectExceptionForFailure(pools, newTypeId, lastException); } /** * Send a type to all pools. This used to make sure that any types * used by this client make it to all clusters this client is connected to. */ private void copyTypeToOtherPools(PdxType newType, int newTypeId, Pool exception) { Collection<Pool> pools = getAllPoolsExcept(exception); for (Pool pool : pools) { try { sendTypeToPool(newType, newTypeId, pool); } catch (ServerConnectivityException e) { logger.debug("Received an exception sending pdx type to pool {}, {}", pool, e.getMessage(), e); } } } private Collection<Pool> getAllPoolsExcept(Pool pool) { Collection<Pool> targetPools = new ArrayList<>(getAllPools()); targetPools.remove(pool); return targetPools; } private void sendTypeToPool(PdxType type, int id, Pool pool) { try { AddPDXTypeOp.execute((ExecutablePool) pool, id, type); } catch (ServerConnectivityException serverConnectivityException) { logger.debug("Received an exception sending pdx type to pool {}, {}", pool, serverConnectivityException.getMessage(), serverConnectivityException); throw serverConnectivityException; } } @Override public PdxType getType(int typeId) { Collection<Pool> pools = getAllPools(); ServerConnectivityException lastException = null; for (Pool pool : pools) { try { PdxType type = GetPDXTypeByIdOp.execute((ExecutablePool) pool, typeId); if (type != null) { return type; } } catch (ServerConnectivityException e) { logger.debug("Received an exception getting pdx type from pool {}, {}", pool, e.getMessage(), e); // ignore, try the next pool. lastException = e; } } if (lastException != null) { throw lastException; } else { throw returnCorrectExceptionForFailure(pools, typeId, lastException); } } private Collection<Pool> getAllPools() { Collection<Pool> pools = PoolManagerImpl.getPMI().getMap().values(); for (Iterator<Pool> itr = pools.iterator(); itr.hasNext();) { PoolImpl pool = (PoolImpl) itr.next(); if (pool.isUsedByGateway()) { itr.remove(); } } if (pools.isEmpty()) { if (this.cache.isClosed()) { throw cache.getCacheClosedException("PDX detected cache was closed"); } throw cache.getCacheClosedException( "Client pools have been closed so the PDX type registry is not available."); } return pools; } @Override public void addRemoteType(int typeId, PdxType type) { throw new UnsupportedOperationException("Clients will not be asked to add remote types"); } public int getLastAllocatedTypeId() { throw new UnsupportedOperationException("Clients does not keep track of last allocated id"); } @Override public void initialize() { // do nothing } @Override public void gatewaySenderStarted(GatewaySender gatewaySender) { // do nothing } @Override public void creatingPersistentRegion() { // do nothing } @Override public void creatingPool() { // do nothing } @Override public int getEnumId(Enum<?> v) { EnumInfo enumInfo = new EnumInfo(v); return processEnumInfoForEnumId(enumInfo); } private int processEnumInfoForEnumId(EnumInfo enumInfo) { Collection<Pool> pools = getAllPools(); ServerConnectivityException lastException = null; for (Pool pool : pools) { try { int result = GetPDXIdForEnumOp.execute((ExecutablePool) pool, enumInfo); copyEnumToOtherPools(enumInfo, result, pool); return result; } catch (ServerConnectivityException e) { // ignore, try the next pool. lastException = e; } } throw returnCorrectExceptionForFailure(pools, -1, lastException); } /** * Send an enum to all pools. This used to make sure that any enums * used by this client make it to all clusters this client is connected to. */ private void copyEnumToOtherPools(EnumInfo enumInfo, int newTypeId, Pool exception) { Collection<Pool> pools = getAllPoolsExcept(exception); for (Pool pool : pools) { try { sendEnumIdToPool(enumInfo, newTypeId, pool); } catch (ServerConnectivityException e) { logger.debug("Received an exception sending pdx enum to pool {}, {}", pool, e.getMessage()); } } } private void sendEnumIdToPool(EnumInfo enumInfo, int id, Pool pool) { try { AddPDXEnumOp.execute((ExecutablePool) pool, id, enumInfo); } catch (ServerConnectivityException serverConnectivityException) { logger.debug("Received an exception sending pdx type to pool {}, {}", pool, serverConnectivityException.getMessage(), serverConnectivityException); throw serverConnectivityException; } } @Override public void addRemoteEnum(int enumId, EnumInfo newInfo) { throw new UnsupportedOperationException("Clients will not be asked to add remote enums"); } @Override public int defineEnum(EnumInfo newInfo) { return processEnumInfoForEnumId(newInfo); } @Override public EnumInfo getEnumById(int enumId) { Collection<Pool> pools = getAllPools(); ServerConnectivityException lastException = null; for (Pool pool : pools) { try { EnumInfo result = GetPDXEnumByIdOp.execute((ExecutablePool) pool, enumId); if (result != null) { return result; } } catch (ServerConnectivityException e) { logger.debug("Received an exception getting pdx type from pool {}, {}", pool, e.getMessage(), e); // ignore, try the next pool. lastException = e; } } throw returnCorrectExceptionForFailure(pools, enumId, lastException); } @SuppressWarnings({"unchecked", "serial"}) @Override public Map<Integer, PdxType> types() { Collection<Pool> pools = getAllPools(); Map<Integer, PdxType> types = new HashMap<>(); for (Pool p : pools) { try { types.putAll(GetPDXTypesOp.execute((ExecutablePool) p)); } catch (Exception e) { e.printStackTrace(); } } return types; } @SuppressWarnings({"unchecked", "serial"}) @Override public Map<Integer, EnumInfo> enums() { Collection<Pool> pools = getAllPools(); Map<Integer, EnumInfo> enums = new HashMap<>(); for (Pool p : pools) { enums.putAll(GetPDXEnumsOp.execute((ExecutablePool) p)); } return enums; } @Override public PdxType getPdxTypeForField(String fieldName, String className) { for (Object value : types().values()) { if (value instanceof PdxType) { PdxType pdxType = (PdxType) value; if (pdxType.getClassName().equals(className) && pdxType.getPdxField(fieldName) != null) { return pdxType; } } } return null; } @Override public Set<PdxType> getPdxTypesForClassName(String className) { Set<PdxType> result = new HashSet<>(); for (Object value : types().values()) { if (value instanceof PdxType) { PdxType pdxType = (PdxType) value; if (pdxType.getClassName().equals(className)) { result.add(pdxType); } } } return result; } @Override public boolean isClient() { return true; } /** * Add an type as part of an import. The type is sent to all pools in case * the pools are connected to different clusters, but if one pool fails * the import will fail. */ @Override public void addImportedType(int typeId, PdxType importedType) { Collection<Pool> pools = getAllPools(); for (Pool pool : pools) { try { sendTypeToPool(importedType, typeId, pool); } catch (ServerConnectivityException e) { throw returnCorrectExceptionForFailure(pools, typeId, e); } } } /** * Add an enum as part of an import. The enum is sent to all pools in case * the pools are connected to different clusters, but if one pool fails * the import will fail. */ @Override public void addImportedEnum(int enumId, EnumInfo importedInfo) { Collection<Pool> pools = getAllPools(); for (Pool pool : pools) { try { sendEnumIdToPool(importedInfo, enumId, pool); } catch (ServerConnectivityException e) { throw returnCorrectExceptionForFailure(pools, enumId, e); } } } private RuntimeException returnCorrectExceptionForFailure(final Collection<Pool> pools, final int typeId, final ServerConnectivityException lastException) { if (lastException != null) { throw lastException; } else { throw new InternalGemFireError("Unable to determine PDXType for id " + typeId); } } @Override public int getLocalSize() { return 0; } }
package me.jp.sticker.stickerview.view; import android.annotation.TargetApi; import android.content.Context; import android.content.res.Resources; import android.graphics.RectF; import android.graphics.Typeface; import android.os.Build; import android.text.Layout; import android.text.StaticLayout; import android.text.TextPaint; import android.util.AttributeSet; import android.util.SparseIntArray; import android.util.TypedValue; import android.widget.EditText; /** * Created by varsovski on 29-Oct-15. */ public class AutoFitEditText extends EditText { private static final int NO_LINE_LIMIT = -1; private final RectF _availableSpaceRect = new RectF(); private final SparseIntArray _textCachedSizes = new SparseIntArray(); private final SizeTester _sizeTester; private float _maxTextSize; private float _spacingMult = 1.0f; private float _spacingAdd = 0.0f; private Float _minTextSize; private int _widthLimit; private int _maxLines; private boolean _enableSizeCache = true; private boolean _initiallized = false; private TextPaint paint; private interface SizeTester { /** * AutoFitEditText * * @param suggestedSize Size of text to be tested * @param availableSpace available space in which text must fit * @return an integer < 0 if after applying {@code suggestedSize} to * text, it takes less space than {@code availableSpace}, > 0 * otherwise */ public int onTestSize(int suggestedSize, RectF availableSpace); } public AutoFitEditText(final Context context) { this(context, null, 0); } public AutoFitEditText(final Context context, final AttributeSet attrs) { this(context, attrs, 0); } public AutoFitEditText(final Context context, final AttributeSet attrs, final int defStyle) { super(context, attrs, defStyle); // using the minimal recommended font size _minTextSize = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP, 12, getResources().getDisplayMetrics()); _maxTextSize = getTextSize(); if (_maxLines == 0) // no value was assigned during construction _maxLines = NO_LINE_LIMIT; // prepare size tester: _sizeTester = new SizeTester() { final RectF textRect = new RectF(); @TargetApi(Build.VERSION_CODES.JELLY_BEAN) @Override public int onTestSize(final int suggestedSize, final RectF availableSPace) { paint.setTextSize(suggestedSize); final String text = getText().toString(); final boolean singleline = getMaxLines() == 1; if (singleline) { textRect.bottom = paint.getFontSpacing(); textRect.right = paint.measureText(text); } else { final StaticLayout layout = new StaticLayout(text, paint, _widthLimit, Layout.Alignment.ALIGN_NORMAL, _spacingMult, _spacingAdd, true); if (getMaxLines() != NO_LINE_LIMIT && layout.getLineCount() > getMaxLines()) return 1; textRect.bottom = layout.getHeight(); int maxWidth = -1; for (int i = 0; i < layout.getLineCount(); i++) if (maxWidth < layout.getLineWidth(i)) maxWidth = (int) layout.getLineWidth(i); textRect.right = maxWidth; } textRect.offsetTo(0, 0); if (availableSPace.contains(textRect)) // may be too small, don't worry we will find the best match return -1; // else, too big return 1; } }; _initiallized = true; } @Override public void setTypeface(final Typeface tf) { if (paint == null) paint = new TextPaint(getPaint()); paint.setTypeface(tf); super.setTypeface(tf); } @Override public void setTextSize(final float size) { _maxTextSize = size; _textCachedSizes.clear(); adjustTextSize(); } @Override public void setMaxLines(final int maxlines) { super.setMaxLines(maxlines); _maxLines = maxlines; reAdjust(); } @Override public int getMaxLines() { return _maxLines; } @Override public void setSingleLine() { super.setSingleLine(); _maxLines = 1; reAdjust(); } @Override public void setSingleLine(final boolean singleLine) { super.setSingleLine(singleLine); if (singleLine) _maxLines = 1; else _maxLines = NO_LINE_LIMIT; reAdjust(); } @Override public void setLines(final int lines) { super.setLines(lines); _maxLines = lines; reAdjust(); } @Override public void setTextSize(final int unit, final float size) { final Context c = getContext(); Resources r; if (c == null) r = Resources.getSystem(); else r = c.getResources(); _maxTextSize = TypedValue.applyDimension(unit, size, r.getDisplayMetrics()); _textCachedSizes.clear(); adjustTextSize(); } @Override public void setLineSpacing(final float add, final float mult) { super.setLineSpacing(add, mult); _spacingMult = mult; _spacingAdd = add; } /** * Set the lower text size limit and invalidate the view * * @param */ public void setMinTextSize(final Float minTextSize) { _minTextSize = minTextSize; reAdjust(); } public Float get_minTextSize() { return _minTextSize; } private void reAdjust() { adjustTextSize(); } private void adjustTextSize() { if (!_initiallized) return; final int startSize = Math.round(_minTextSize); final int heightLimit = getMeasuredHeight() - getCompoundPaddingBottom() - getCompoundPaddingTop(); _widthLimit = getMeasuredWidth() - getCompoundPaddingLeft() - getCompoundPaddingRight(); if (_widthLimit <= 0) return; _availableSpaceRect.right = _widthLimit; _availableSpaceRect.bottom = heightLimit; super.setTextSize( TypedValue.COMPLEX_UNIT_PX, efficientTextSizeSearch(startSize, (int) _maxTextSize, _sizeTester, _availableSpaceRect)); } /** * Enables or disables size caching, enabling it will improve performance * where you are animating a value inside TextView. This stores the font * size against getText().length() Be careful though while enabling it as 0 * takes more space than 1 on some fonts and so on. * * @param enable enable font size caching */ public void setEnableSizeCache(final boolean enable) { _enableSizeCache = enable; _textCachedSizes.clear(); adjustTextSize(); } private int efficientTextSizeSearch(final int start, final int end, final SizeTester sizeTester, final RectF availableSpace) { if (!_enableSizeCache) return binarySearch(start, end, sizeTester, availableSpace); final String text = getText().toString(); final int key = text == null ? 0 : text.length(); int size = _textCachedSizes.get(key); if (size != 0) return size; size = binarySearch(start, end, sizeTester, availableSpace); _textCachedSizes.put(key, size); return size; } private int binarySearch(final int start, final int end, final SizeTester sizeTester, final RectF availableSpace) { int lastBest = start; int lo = start; int hi = end - 1; int mid = 0; while (lo <= hi) { mid = lo + hi >>> 1; final int midValCmp = sizeTester.onTestSize(mid, availableSpace); if (midValCmp < 0) { lastBest = lo; lo = mid + 1; } else if (midValCmp > 0) { hi = mid - 1; lastBest = hi; } else return mid; } // make sure to return last best // this is what should always be returned return lastBest; } @Override protected void onTextChanged(final CharSequence text, final int start, final int before, final int after) { super.onTextChanged(text, start, before, after); reAdjust(); } @Override protected void onSizeChanged(final int width, final int height, final int oldwidth, final int oldheight) { _textCachedSizes.clear(); super.onSizeChanged(width, height, oldwidth, oldheight); if (width != oldwidth || height != oldheight) reAdjust(); } }