gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2005 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.common; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.ArrayList; import java.util.List; import org.drools.RuleBaseConfiguration; import org.drools.core.util.AbstractHashTable.FieldIndex; import org.drools.core.util.LeftTupleIndexHashTable; import org.drools.core.util.LeftTupleList; import org.drools.core.util.LinkedList; import org.drools.core.util.LinkedListEntry; import org.drools.core.util.RightTupleIndexHashTable; import org.drools.core.util.RightTupleList; import org.drools.reteoo.BetaMemory; import org.drools.reteoo.LeftTuple; import org.drools.reteoo.LeftTupleMemory; import org.drools.reteoo.RightTupleMemory; import org.drools.rule.ContextEntry; import org.drools.rule.IndexableConstraint; import org.drools.rule.constraint.MvelConstraint; import org.drools.spi.BetaNodeFieldConstraint; import org.drools.spi.Constraint; public class DefaultBetaConstraints implements BetaConstraints { private static final long serialVersionUID = 510l; private LinkedList constraints; private int indexed; public DefaultBetaConstraints() { } public DefaultBetaConstraints(final BetaNodeFieldConstraint[] constraints, final RuleBaseConfiguration conf) { this( constraints, conf, false ); } public static boolean compositeAllowed(BetaNodeFieldConstraint[] constraints) { // 1) If there is 1 or more unification restrictions it cannot be composite // 2) Ensures any non unification restrictions are first int firstUnification = -1; int firstNonUnification = -1; for ( int i = 0, length = constraints.length; i < length; i++ ) { if ( DefaultBetaConstraints.isIndexable( constraints[i] ) ) { final boolean isUnification = ((IndexableConstraint) constraints[i]).isUnification(); if ( isUnification && firstUnification == -1 ) { firstUnification = i; } else if ( !isUnification &&firstNonUnification == -1 ) { firstNonUnification = i; } } if ( firstUnification != -1 && firstNonUnification != -1) { break; } } if (firstNonUnification != -1 && firstNonUnification > 0) { // Make sure a nonunification indexable constraint is first swap(constraints, 0, firstNonUnification); } return (firstUnification == -1); } public static void swap(final BetaNodeFieldConstraint[] constraints, final int p1, final int p2) { final BetaNodeFieldConstraint temp = constraints[p2]; constraints[p2] = constraints[p1]; constraints[p1] = temp; } public DefaultBetaConstraints(final BetaNodeFieldConstraint[] constraints, final RuleBaseConfiguration conf, final boolean disableIndexing) { this.indexed = -1; this.constraints = new LinkedList(); int depth = conf.getCompositeKeyDepth(); if ( !compositeAllowed(constraints) ) { // UnificationRestrictions cannot be allowed in composite indexes // We also ensure that if there is a mixture that standard restriction is first depth = 1; } // First create a LinkedList of constraints, with the indexed constraints first. for ( int i = 0, length = constraints.length; i < length; i++ ) { // Determine if this constraint is indexable if ( (!disableIndexing) && conf.isIndexLeftBetaMemory() && conf.isIndexRightBetaMemory() && isIndexable( constraints[i] ) && (this.indexed < depth - 1) ) { if ( depth >= 1 && this.indexed == -1 ) { // first index, so just add to the front this.constraints.insertAfter( null, new LinkedListEntry( constraints[i] ) ); this.indexed++; } else { // insert this index after the previous index this.constraints.insertAfter( findNode( this.indexed++ ), new LinkedListEntry( constraints[i] ) ); } } else { // not indexed, so just add to the end this.constraints.add( new LinkedListEntry( constraints[i] ) ); } } } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { constraints = (LinkedList)in.readObject(); indexed = in.readInt(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeObject(constraints); out.writeInt(indexed); } public ContextEntry[] createContext() { // Now create the ContextEntries in the same order the constraints ContextEntry[] contexts = new ContextEntry[this.constraints.size()]; int i = 0; for ( LinkedListEntry entry = (LinkedListEntry) this.constraints.getFirst(); entry != null; entry = (LinkedListEntry) entry.getNext() ) { final BetaNodeFieldConstraint constraint = (BetaNodeFieldConstraint) entry.getObject(); contexts[i++] = constraint.createContextEntry(); } return contexts; } private LinkedListEntry findNode(final int pos) { LinkedListEntry current = (LinkedListEntry) this.constraints.getFirst(); for ( int i = 0; i < pos; i++ ) { current = (LinkedListEntry) current.getNext(); } return current; } public static boolean isIndexable(final BetaNodeFieldConstraint constraint) { return constraint instanceof IndexableConstraint && ((IndexableConstraint)constraint).isIndexable(); } /* (non-Javadoc) * @see org.drools.common.BetaNodeConstraints#updateFromTuple(org.drools.reteoo.ReteTuple) */ public void updateFromTuple(final ContextEntry[] context, final InternalWorkingMemory workingMemory, final LeftTuple tuple) { for (ContextEntry aContext : context) { aContext.updateFromTuple(workingMemory, tuple); } } /* (non-Javadoc) * @see org.drools.common.BetaNodeConstraints#updateFromFactHandle(org.drools.common.InternalFactHandle) */ public void updateFromFactHandle(final ContextEntry[] context, final InternalWorkingMemory workingMemory, final InternalFactHandle handle) { for (ContextEntry aContext : context) { aContext.updateFromFactHandle(workingMemory, handle); } } public void resetTuple(final ContextEntry[] context) { for (ContextEntry aContext : context) { aContext.resetTuple(); } } public void resetFactHandle(final ContextEntry[] context) { for (ContextEntry aContext : context) { aContext.resetFactHandle(); } } /* (non-Javadoc) * @see org.drools.common.BetaNodeConstraints#isAllowedCachedLeft(java.lang.Object) */ public boolean isAllowedCachedLeft(final ContextEntry[] context, final InternalFactHandle handle) { // skip the indexed constraints LinkedListEntry entry = findNode( this.indexed+1 ); int i = 1; while ( entry != null ) { if ( !((BetaNodeFieldConstraint) entry.getObject()).isAllowedCachedLeft( context[this.indexed + i], handle ) ) { return false; } entry = (LinkedListEntry) entry.getNext(); i++; } return true; } /* (non-Javadoc) * @see org.drools.common.BetaNodeConstraints#isAllowedCachedRight(org.drools.reteoo.ReteTuple) */ public boolean isAllowedCachedRight(final ContextEntry[] context, final LeftTuple tuple) { // skip the indexed constraints LinkedListEntry entry = findNode( this.indexed+1 ); int i = 1; while ( entry != null ) { if ( !((BetaNodeFieldConstraint) entry.getObject()).isAllowedCachedRight( tuple, context[this.indexed + i] ) ) { return false; } entry = (LinkedListEntry) entry.getNext(); i++; } return true; } public boolean isIndexed() { // false if -1 return this.indexed >= 0; } public int getIndexCount() { return this.indexed + 1; } public boolean isEmpty() { return false; } public BetaMemory createBetaMemory(final RuleBaseConfiguration config, final short nodeType ) { BetaMemory memory; if ( this.indexed >= 0 ) { LinkedListEntry entry = (LinkedListEntry) this.constraints.getFirst(); final List<FieldIndex> list = new ArrayList<FieldIndex>(); for ( int pos = 0; pos <= this.indexed; pos++ ) { final Constraint constraint = (Constraint) entry.getObject(); final IndexableConstraint indexableConstraint = (IndexableConstraint) constraint; final FieldIndex index = indexableConstraint.getFieldIndex(); list.add( index ); entry = (LinkedListEntry) entry.getNext(); } final FieldIndex[] indexes = list.toArray( new FieldIndex[list.size()] ); LeftTupleMemory tupleMemory; if ( config.isIndexLeftBetaMemory() ) { tupleMemory = new LeftTupleIndexHashTable( indexes ); } else { tupleMemory = new LeftTupleList(); } RightTupleMemory factHandleMemory; if ( config.isIndexRightBetaMemory() ) { factHandleMemory = new RightTupleIndexHashTable( indexes ); } else { factHandleMemory = new RightTupleList(); } memory = new BetaMemory( config.isSequential() ? null : tupleMemory, factHandleMemory, this.createContext(), nodeType ); } else { memory = new BetaMemory( config.isSequential() ? null : new LeftTupleList(), new RightTupleList(), this.createContext(), nodeType ); } return memory; } public int hashCode() { return this.constraints.hashCode(); } /* (non-Javadoc) * @see org.drools.common.BetaNodeConstraints#getConstraints() */ public LinkedList getConstraints() { return this.constraints; } /** * Determine if another object is equal to this. * * @param object * The object to test. * * @return <code>true</code> if <code>object</code> is equal to this, * otherwise <code>false</code>. */ public boolean equals(final Object object) { if ( this == object ) { return true; } if ( object == null || !(object instanceof DefaultBetaConstraints) ) { return false; } final DefaultBetaConstraints other = (DefaultBetaConstraints) object; if ( this.constraints == other.constraints ) { return true; } if ( this.constraints.size() != other.constraints.size() ) { return false; } return this.constraints.equals( other.constraints ); } public BetaConstraints getOriginalConstraint() { throw new UnsupportedOperationException(); } public long getListenedPropertyMask(List<String> settableProperties) { long mask = 0L; LinkedListEntry entry = (LinkedListEntry) constraints.getFirst(); while ( entry != null ) { final Constraint constraint = (Constraint) entry.getObject(); entry = (LinkedListEntry) entry.getNext(); if (constraint instanceof MvelConstraint) { mask |= ((MvelConstraint)constraint).getListenedPropertyMask(settableProperties); } else { return Long.MAX_VALUE; } } return mask; } }
// Generated from Model.g4 by ANTLR 4.5 import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.misc.*; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) public class ModelLexer extends Lexer { static { RuntimeMetaData.checkVersion("4.5", RuntimeMetaData.VERSION); } protected static final DFA[] _decisionToDFA; protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int T__0=1, T__1=2, T__2=3, T__3=4, T__4=5, T__5=6, T__6=7, T__7=8, T__8=9, Encoding=10, UnitsDoc=11, Group=12, Sketch=13, Star=14, Div=15, Plus=16, Minus=17, Less=18, LessEqual=19, Greater=20, GreaterEqual=21, Equal=22, TwoEqual=23, NotEqual=24, Exclamation=25, Id=26, Const=27, StringLiteral=28, Keyword=29, Whitespace=30; public static String[] modeNames = { "DEFAULT_MODE" }; public static final String[] ruleNames = { "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6", "T__7", "T__8", "Encoding", "UnitsDoc", "Group", "Sketch", "Star", "Div", "Plus", "Minus", "Less", "LessEqual", "Greater", "GreaterEqual", "Equal", "TwoEqual", "NotEqual", "Exclamation", "Id", "IdChar", "Nondigit", "Digit", "Const", "IntegerConst", "NonzeroDigit", "FloatingConst", "FractionalConstant", "ExponentPart", "Sign", "DigitSeq", "StringLiteral", "SCharSequence", "SChar", "EscapeSequence", "Keyword", "Whitespace" }; private static final String[] _LITERAL_NAMES = { null, "'['", "','", "']'", "'('", "')'", "':NOT:'", "'^'", "':AND:'", "':OR:'", null, null, null, null, "'*'", "'/'", "'+'", "'-'", "'<'", "'<='", "'>'", "'>='", "'='", "'=='", "'<>'", "'!'", null, null, null, "':NA:'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, null, null, null, null, null, "Encoding", "UnitsDoc", "Group", "Sketch", "Star", "Div", "Plus", "Minus", "Less", "LessEqual", "Greater", "GreaterEqual", "Equal", "TwoEqual", "NotEqual", "Exclamation", "Id", "Const", "StringLiteral", "Keyword", "Whitespace" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); /** * @deprecated Use {@link #VOCABULARY} instead. */ @Deprecated public static final String[] tokenNames; static { tokenNames = new String[_SYMBOLIC_NAMES.length]; for (int i = 0; i < tokenNames.length; i++) { tokenNames[i] = VOCABULARY.getLiteralName(i); if (tokenNames[i] == null) { tokenNames[i] = VOCABULARY.getSymbolicName(i); } if (tokenNames[i] == null) { tokenNames[i] = "<INVALID>"; } } } @Override @Deprecated public String[] getTokenNames() { return tokenNames; } @Override public Vocabulary getVocabulary() { return VOCABULARY; } public ModelLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } @Override public String getGrammarFileName() { return "Model.g4"; } @Override public String[] getRuleNames() { return ruleNames; } @Override public String getSerializedATN() { return _serializedATN; } @Override public String[] getModeNames() { return modeNames; } @Override public ATN getATN() { return _ATN; } public static final String _serializedATN = "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2 \u012c\b\1\4\2\t"+ "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+ ",\t,\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3"+ "\b\3\b\3\t\3\t\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\n\3\13\3\13\6\13y\n\13"+ "\r\13\16\13z\3\13\3\13\3\13\3\13\3\f\3\f\7\f\u0083\n\f\f\f\16\f\u0086"+ "\13\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\7\r\u0092\n\r\f\r\16\r\u0095"+ "\13\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\7\16\u00a0\n\16\f\16\16"+ "\16\u00a3\13\16\3\16\3\16\3\17\3\17\3\20\3\20\3\21\3\21\3\22\3\22\3\23"+ "\3\23\3\24\3\24\3\24\3\25\3\25\3\26\3\26\3\26\3\27\3\27\3\30\3\30\3\30"+ "\3\31\3\31\3\31\3\32\3\32\3\33\3\33\7\33\u00c5\n\33\f\33\16\33\u00c8\13"+ "\33\3\33\3\33\3\33\7\33\u00cd\n\33\f\33\16\33\u00d0\13\33\3\33\3\33\3"+ "\33\5\33\u00d5\n\33\3\33\5\33\u00d8\n\33\3\34\3\34\3\35\3\35\3\36\3\36"+ "\3\37\3\37\5\37\u00e2\n\37\3 \6 \u00e5\n \r \16 \u00e6\3!\3!\3\"\3\"\5"+ "\"\u00ed\n\"\3\"\3\"\3\"\5\"\u00f2\n\"\3#\5#\u00f5\n#\3#\3#\3#\3#\3#\5"+ "#\u00fc\n#\3$\3$\5$\u0100\n$\3$\3$\3$\5$\u0105\n$\3$\5$\u0108\n$\3%\3"+ "%\3&\6&\u010d\n&\r&\16&\u010e\3\'\3\'\5\'\u0113\n\'\3\'\3\'\3(\6(\u0118"+ "\n(\r(\16(\u0119\3)\3)\3*\3*\3*\3+\3+\3+\3+\3+\3,\6,\u0127\n,\r,\16,\u0128"+ "\3,\3,\4\u0084\u0093\2-\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f"+ "\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63"+ "\33\65\34\67\29\2;\2=\35?\2A\2C\2E\2G\2I\2K\2M\36O\2Q\2S\2U\37W \3\2\13"+ "\6\2//\62;C\\c|\6\2\62;C\\aac|\5\2C\\aac|\3\2\62;\3\2\63;\4\2--//\6\2"+ "\f\f\17\17$$^^\f\2$$))AA^^cdhhppttvvxx\5\2\13\f\17\17\"\"\u0135\2\3\3"+ "\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2"+ "\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3"+ "\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2"+ "%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61"+ "\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2=\3\2\2\2\2M\3\2\2\2\2U\3\2\2\2\2"+ "W\3\2\2\2\3Y\3\2\2\2\5[\3\2\2\2\7]\3\2\2\2\t_\3\2\2\2\13a\3\2\2\2\rc\3"+ "\2\2\2\17i\3\2\2\2\21k\3\2\2\2\23q\3\2\2\2\25v\3\2\2\2\27\u0080\3\2\2"+ "\2\31\u008b\3\2\2\2\33\u009a\3\2\2\2\35\u00a6\3\2\2\2\37\u00a8\3\2\2\2"+ "!\u00aa\3\2\2\2#\u00ac\3\2\2\2%\u00ae\3\2\2\2\'\u00b0\3\2\2\2)\u00b3\3"+ "\2\2\2+\u00b5\3\2\2\2-\u00b8\3\2\2\2/\u00ba\3\2\2\2\61\u00bd\3\2\2\2\63"+ "\u00c0\3\2\2\2\65\u00d4\3\2\2\2\67\u00d9\3\2\2\29\u00db\3\2\2\2;\u00dd"+ "\3\2\2\2=\u00e1\3\2\2\2?\u00e4\3\2\2\2A\u00e8\3\2\2\2C\u00f1\3\2\2\2E"+ "\u00fb\3\2\2\2G\u0107\3\2\2\2I\u0109\3\2\2\2K\u010c\3\2\2\2M\u0110\3\2"+ "\2\2O\u0117\3\2\2\2Q\u011b\3\2\2\2S\u011d\3\2\2\2U\u0120\3\2\2\2W\u0126"+ "\3\2\2\2YZ\7]\2\2Z\4\3\2\2\2[\\\7.\2\2\\\6\3\2\2\2]^\7_\2\2^\b\3\2\2\2"+ "_`\7*\2\2`\n\3\2\2\2ab\7+\2\2b\f\3\2\2\2cd\7<\2\2de\7P\2\2ef\7Q\2\2fg"+ "\7V\2\2gh\7<\2\2h\16\3\2\2\2ij\7`\2\2j\20\3\2\2\2kl\7<\2\2lm\7C\2\2mn"+ "\7P\2\2no\7F\2\2op\7<\2\2p\22\3\2\2\2qr\7<\2\2rs\7Q\2\2st\7T\2\2tu\7<"+ "\2\2u\24\3\2\2\2vx\7}\2\2wy\t\2\2\2xw\3\2\2\2yz\3\2\2\2zx\3\2\2\2z{\3"+ "\2\2\2{|\3\2\2\2|}\7\177\2\2}~\3\2\2\2~\177\b\13\2\2\177\26\3\2\2\2\u0080"+ "\u0084\7\u0080\2\2\u0081\u0083\13\2\2\2\u0082\u0081\3\2\2\2\u0083\u0086"+ "\3\2\2\2\u0084\u0085\3\2\2\2\u0084\u0082\3\2\2\2\u0085\u0087\3\2\2\2\u0086"+ "\u0084\3\2\2\2\u0087\u0088\7~\2\2\u0088\u0089\3\2\2\2\u0089\u008a\b\f"+ "\2\2\u008a\30\3\2\2\2\u008b\u008c\7,\2\2\u008c\u008d\7,\2\2\u008d\u008e"+ "\7,\2\2\u008e\u008f\7,\2\2\u008f\u0093\3\2\2\2\u0090\u0092\13\2\2\2\u0091"+ "\u0090\3\2\2\2\u0092\u0095\3\2\2\2\u0093\u0094\3\2\2\2\u0093\u0091\3\2"+ "\2\2\u0094\u0096\3\2\2\2\u0095\u0093\3\2\2\2\u0096\u0097\7~\2\2\u0097"+ "\u0098\3\2\2\2\u0098\u0099\b\r\2\2\u0099\32\3\2\2\2\u009a\u009b\7^\2\2"+ "\u009b\u009c\7^\2\2\u009c\u009d\7^\2\2\u009d\u00a1\3\2\2\2\u009e\u00a0"+ "\13\2\2\2\u009f\u009e\3\2\2\2\u00a0\u00a3\3\2\2\2\u00a1\u009f\3\2\2\2"+ "\u00a1\u00a2\3\2\2\2\u00a2\u00a4\3\2\2\2\u00a3\u00a1\3\2\2\2\u00a4\u00a5"+ "\b\16\2\2\u00a5\34\3\2\2\2\u00a6\u00a7\7,\2\2\u00a7\36\3\2\2\2\u00a8\u00a9"+ "\7\61\2\2\u00a9 \3\2\2\2\u00aa\u00ab\7-\2\2\u00ab\"\3\2\2\2\u00ac\u00ad"+ "\7/\2\2\u00ad$\3\2\2\2\u00ae\u00af\7>\2\2\u00af&\3\2\2\2\u00b0\u00b1\7"+ ">\2\2\u00b1\u00b2\7?\2\2\u00b2(\3\2\2\2\u00b3\u00b4\7@\2\2\u00b4*\3\2"+ "\2\2\u00b5\u00b6\7@\2\2\u00b6\u00b7\7?\2\2\u00b7,\3\2\2\2\u00b8\u00b9"+ "\7?\2\2\u00b9.\3\2\2\2\u00ba\u00bb\7?\2\2\u00bb\u00bc\7?\2\2\u00bc\60"+ "\3\2\2\2\u00bd\u00be\7>\2\2\u00be\u00bf\7@\2\2\u00bf\62\3\2\2\2\u00c0"+ "\u00c1\7#\2\2\u00c1\64\3\2\2\2\u00c2\u00c6\59\35\2\u00c3\u00c5\5\67\34"+ "\2\u00c4\u00c3\3\2\2\2\u00c5\u00c8\3\2\2\2\u00c6\u00c4\3\2\2\2\u00c6\u00c7"+ "\3\2\2\2\u00c7\u00d5\3\2\2\2\u00c8\u00c6\3\2\2\2\u00c9\u00ce\59\35\2\u00ca"+ "\u00cd\5\67\34\2\u00cb\u00cd\7\"\2\2\u00cc\u00ca\3\2\2\2\u00cc\u00cb\3"+ "\2\2\2\u00cd\u00d0\3\2\2\2\u00ce\u00cc\3\2\2\2\u00ce\u00cf\3\2\2\2\u00cf"+ "\u00d1\3\2\2\2\u00d0\u00ce\3\2\2\2\u00d1\u00d2\5\67\34\2\u00d2\u00d5\3"+ "\2\2\2\u00d3\u00d5\5M\'\2\u00d4\u00c2\3\2\2\2\u00d4\u00c9\3\2\2\2\u00d4"+ "\u00d3\3\2\2\2\u00d5\u00d7\3\2\2\2\u00d6\u00d8\5\63\32\2\u00d7\u00d6\3"+ "\2\2\2\u00d7\u00d8\3\2\2\2\u00d8\66\3\2\2\2\u00d9\u00da\t\3\2\2\u00da"+ "8\3\2\2\2\u00db\u00dc\t\4\2\2\u00dc:\3\2\2\2\u00dd\u00de\t\5\2\2\u00de"+ "<\3\2\2\2\u00df\u00e2\5? \2\u00e0\u00e2\5C\"\2\u00e1\u00df\3\2\2\2\u00e1"+ "\u00e0\3\2\2\2\u00e2>\3\2\2\2\u00e3\u00e5\5;\36\2\u00e4\u00e3\3\2\2\2"+ "\u00e5\u00e6\3\2\2\2\u00e6\u00e4\3\2\2\2\u00e6\u00e7\3\2\2\2\u00e7@\3"+ "\2\2\2\u00e8\u00e9\t\6\2\2\u00e9B\3\2\2\2\u00ea\u00ec\5E#\2\u00eb\u00ed"+ "\5G$\2\u00ec\u00eb\3\2\2\2\u00ec\u00ed\3\2\2\2\u00ed\u00f2\3\2\2\2\u00ee"+ "\u00ef\5K&\2\u00ef\u00f0\5G$\2\u00f0\u00f2\3\2\2\2\u00f1\u00ea\3\2\2\2"+ "\u00f1\u00ee\3\2\2\2\u00f2D\3\2\2\2\u00f3\u00f5\5K&\2\u00f4\u00f3\3\2"+ "\2\2\u00f4\u00f5\3\2\2\2\u00f5\u00f6\3\2\2\2\u00f6\u00f7\7\60\2\2\u00f7"+ "\u00fc\5K&\2\u00f8\u00f9\5K&\2\u00f9\u00fa\7\60\2\2\u00fa\u00fc\3\2\2"+ "\2\u00fb\u00f4\3\2\2\2\u00fb\u00f8\3\2\2\2\u00fcF\3\2\2\2\u00fd\u00ff"+ "\7g\2\2\u00fe\u0100\5I%\2\u00ff\u00fe\3\2\2\2\u00ff\u0100\3\2\2\2\u0100"+ "\u0101\3\2\2\2\u0101\u0108\5K&\2\u0102\u0104\7G\2\2\u0103\u0105\5I%\2"+ "\u0104\u0103\3\2\2\2\u0104\u0105\3\2\2\2\u0105\u0106\3\2\2\2\u0106\u0108"+ "\5K&\2\u0107\u00fd\3\2\2\2\u0107\u0102\3\2\2\2\u0108H\3\2\2\2\u0109\u010a"+ "\t\7\2\2\u010aJ\3\2\2\2\u010b\u010d\5;\36\2\u010c\u010b\3\2\2\2\u010d"+ "\u010e\3\2\2\2\u010e\u010c\3\2\2\2\u010e\u010f\3\2\2\2\u010fL\3\2\2\2"+ "\u0110\u0112\7$\2\2\u0111\u0113\5O(\2\u0112\u0111\3\2\2\2\u0112\u0113"+ "\3\2\2\2\u0113\u0114\3\2\2\2\u0114\u0115\7$\2\2\u0115N\3\2\2\2\u0116\u0118"+ "\5Q)\2\u0117\u0116\3\2\2\2\u0118\u0119\3\2\2\2\u0119\u0117\3\2\2\2\u0119"+ "\u011a\3\2\2\2\u011aP\3\2\2\2\u011b\u011c\n\b\2\2\u011cR\3\2\2\2\u011d"+ "\u011e\7^\2\2\u011e\u011f\t\t\2\2\u011fT\3\2\2\2\u0120\u0121\7<\2\2\u0121"+ "\u0122\7P\2\2\u0122\u0123\7C\2\2\u0123\u0124\7<\2\2\u0124V\3\2\2\2\u0125"+ "\u0127\t\n\2\2\u0126\u0125\3\2\2\2\u0127\u0128\3\2\2\2\u0128\u0126\3\2"+ "\2\2\u0128\u0129\3\2\2\2\u0129\u012a\3\2\2\2\u012a\u012b\b,\2\2\u012b"+ "X\3\2\2\2\31\2z\u0084\u0093\u00a1\u00c6\u00cc\u00ce\u00d4\u00d7\u00e1"+ "\u00e6\u00ec\u00f1\u00f4\u00fb\u00ff\u0104\u0107\u010e\u0112\u0119\u0128"+ "\3\b\2\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); } } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2016 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.pscanrules; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.mockito.Mockito.withSettings; import java.time.LocalDateTime; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; import org.junit.jupiter.api.Test; import org.parosproxy.paros.model.Model; import org.parosproxy.paros.model.OptionsParam; import org.parosproxy.paros.network.HttpMalformedHeaderException; import org.parosproxy.paros.network.HttpMessage; import org.zaproxy.addon.commonlib.http.HttpDateUtils; import org.zaproxy.zap.extension.ruleconfig.RuleConfigParam; import org.zaproxy.zap.utils.ZapXmlConfiguration; class CookieSecureFlagScanRuleUnitTest extends PassiveScannerTest<CookieSecureFlagScanRule> { private Model model; @Override protected CookieSecureFlagScanRule createScanner() { rule = new CookieSecureFlagScanRule(); // Mock the model and options model = mock(Model.class, withSettings().lenient()); OptionsParam options = new OptionsParam(); ZapXmlConfiguration conf = new ZapXmlConfiguration(); options.load(conf); when(model.getOptionsParam()).thenReturn(options); rule.setModel(model); return rule; } @Test void httpNoSecureFlag() throws HttpMalformedHeaderException { HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET http://www.example.com/test/ HTTP/1.1"); msg.setResponseBody("<html></html>"); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: test=123; Path=/;\r\n" + "Content-Type: text/html;charset=ISO-8859-1\r\n" + "Content-Length: " + msg.getResponseBody().length() + "\r\n"); scanHttpResponseReceive(msg); assertThat(alertsRaised.size(), equalTo(0)); } @Test void httpsNoSecureFlag() throws HttpMalformedHeaderException { HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET https://www.example.com/test/ HTTP/1.1"); msg.setResponseBody("<html></html>"); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: test=123; Path=/;\r\n" + "Content-Type: text/html;charset=ISO-8859-1\r\n" + "Content-Length: " + msg.getResponseBody().length() + "\r\n"); scanHttpResponseReceive(msg); assertThat(alertsRaised.size(), equalTo(1)); assertThat(alertsRaised.get(0).getParam(), equalTo("test")); assertThat(alertsRaised.get(0).getEvidence(), equalTo("Set-Cookie: test")); } @Test void httpsSecureFlag() throws HttpMalformedHeaderException { HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET https://www.example.com/test/ HTTP/1.1"); msg.setResponseBody("<html></html>"); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: test=123; Path=/; Secure;\r\n" + "Content-Type: text/html;charset=ISO-8859-1\r\n" + "Content-Length: " + msg.getResponseBody().length() + "\r\n"); scanHttpResponseReceive(msg); assertThat(alertsRaised.size(), equalTo(0)); } @Test void secondCookieNoSecureFlag() throws HttpMalformedHeaderException { HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET https://www.example.com/test/ HTTP/1.1"); msg.setResponseBody("<html></html>"); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: hasatt=test123; Path=/; Secure; HttpOnly\r\n" + "Set-Cookie: test=123; Path=/;\r\n" + "Content-Type: text/html;charset=ISO-8859-1\r\n" + "Content-Length: " + msg.getResponseBody().length() + "\r\n"); scanHttpResponseReceive(msg); assertThat(alertsRaised.size(), equalTo(1)); assertThat(alertsRaised.get(0).getParam(), equalTo("test")); assertThat(alertsRaised.get(0).getEvidence(), equalTo("Set-Cookie: test")); } @Test void cookieOnIgnoreList() throws HttpMalformedHeaderException { model.getOptionsParam() .getConfig() .setProperty(RuleConfigParam.RULE_COOKIE_IGNORE_LIST, "aaaa,test,bbb"); HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET https://www.example.com/test/ HTTP/1.1"); msg.setResponseBody("<html></html>"); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: test=123; Path=/;\r\n" + "Content-Type: text/html;charset=ISO-8859-1\r\n" + "Content-Length: " + msg.getResponseBody().length() + "\r\n"); scanHttpResponseReceive(msg); assertThat(alertsRaised.size(), equalTo(0)); } @Test void cookieNotOnIgnoreList() throws HttpMalformedHeaderException { model.getOptionsParam() .getConfig() .setProperty(RuleConfigParam.RULE_COOKIE_IGNORE_LIST, "aaaa,bbb,ccc"); HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET https://www.example.com/test/ HTTP/1.1"); msg.setResponseBody("<html></html>"); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: test=123; Path=/;\r\n" + "Content-Type: text/html;charset=ISO-8859-1\r\n" + "Content-Length: " + msg.getResponseBody().length() + "\r\n"); scanHttpResponseReceive(msg); assertThat(alertsRaised.size(), equalTo(1)); assertThat(alertsRaised.get(0).getParam(), equalTo("test")); assertThat(alertsRaised.get(0).getEvidence(), equalTo("Set-Cookie: test")); } @Test void shouldNotAlertOnDelete() throws HttpMalformedHeaderException { // Given - value empty and epoch start date HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET https://www.example.com/test/ HTTP/1.1"); // When msg.setResponseBody("<html></html>"); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: test=\"\"; expires=Thu, 01 Jan 1970 00:00:00 GMT; Path=/; HttpOnly\r\n" + "Content-Type: text/html;charset=ISO-8859-1\r\n" + "Content-Length: " + msg.getResponseBody().length() + "\r\n"); scanHttpResponseReceive(msg); // Then assertThat(alertsRaised.size(), equalTo(0)); } @Test void shouldNotAlertOnDeleteHyphenatedDate() throws HttpMalformedHeaderException { // Given - value empty and epoch start date HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET https://www.example.com/test/ HTTP/1.1"); // When msg.setResponseBody("<html></html>"); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: test=\"\"; expires=Thu, 01-Jan-1970 00:00:00 GMT; Path=/; HttpOnly\r\n" + "Content-Type: text/html;charset=ISO-8859-1\r\n" + "Content-Length: " + msg.getResponseBody().length() + "\r\n"); scanHttpResponseReceive(msg); // Then assertThat(alertsRaised.size(), equalTo(0)); } @Test void shouldAlertWhenFutureExpiry() throws HttpMalformedHeaderException { // Given - value empty and epoch start date HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET https://www.example.com/test/ HTTP/1.1"); // When msg.setResponseBody("<html></html>"); String expiry = HttpDateUtils.format(LocalDateTime.now().plusYears(1).toInstant(ZoneOffset.UTC)); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: test=\"\"; expires=" + expiry + "; Path=/; HttpOnly\r\n" + "Content-Type: text/html;charset=ISO-8859-1\r\n" + "Content-Length: " + msg.getResponseBody().length() + "\r\n"); scanHttpResponseReceive(msg); // Then assertThat(alertsRaised.size(), equalTo(1)); assertThat(alertsRaised.get(0).getParam(), equalTo("test")); assertThat(alertsRaised.get(0).getEvidence(), equalTo("Set-Cookie: test")); } @Test void shouldAlertWhenFutureExpiryHyphenatedDate() throws HttpMalformedHeaderException { // Given - value empty and epoch start date HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET https://www.example.com/test/ HTTP/1.1"); // When msg.setResponseBody("<html></html>"); DateTimeFormatter df = DateTimeFormatter.ofPattern("EEE, dd-MMM-yyyy HH:mm:ss zzz") .withZone(ZoneOffset.UTC); LocalDateTime dateTime = LocalDateTime.now().plusYears(1); String expiry = dateTime.format(df); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: test=\"\"; expires=" + expiry + "; Path=/; HttpOnly\r\n" + "Content-Type: text/html;charset=ISO-8859-1\r\n" + "Content-Length: " + msg.getResponseBody().length() + "\r\n"); scanHttpResponseReceive(msg); // Then assertThat(alertsRaised.size(), equalTo(1)); assertThat(alertsRaised.get(0).getParam(), equalTo("test")); assertThat(alertsRaised.get(0).getEvidence(), equalTo("Set-Cookie: test")); } @Test void secondCookieNoSecureAttributeFirstExpired() throws HttpMalformedHeaderException { HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET https://www.example.com/test/ HTTP/1.1"); msg.setResponseBody("<html></html>"); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: hasatt=test123; expires=Thu, 01-Jan-1970 00:00:00 GMT; Path=/; secure\r\n" + "Set-Cookie: test=123; Path=/;\r\n" + "Content-Type: text/html;charset=ISO-8859-1\r\n" + "Content-Length: " + msg.getResponseBody().length() + "\r\n"); scanHttpResponseReceive(msg); assertThat(alertsRaised.size(), equalTo(1)); assertThat(alertsRaised.get(0).getParam(), equalTo("test")); assertThat(alertsRaised.get(0).getEvidence(), equalTo("Set-Cookie: test")); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.core.operations.lookup; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import org.apache.directory.api.ldap.model.entry.Entry; import org.apache.directory.ldap.client.api.LdapConnection; import org.apache.directory.server.core.annotations.ApplyLdifs; import org.apache.directory.server.core.annotations.CreateDS; import org.apache.directory.server.core.integ.AbstractLdapTestUnit; import org.apache.directory.server.core.integ.FrameworkRunner; import org.apache.directory.server.core.integ.IntegrationUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; /** * Test the lookup operation * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ @RunWith(FrameworkRunner.class) @CreateDS(name = "LookupIT") @ApplyLdifs( { // Entry # 1 "dn: cn=test,ou=system", "objectClass: person", "cn: test", "sn: sn_test" }) public class LookupIT extends AbstractLdapTestUnit { /** The ldap connection */ private LdapConnection connection; @Before public void setup() throws Exception { connection = IntegrationUtils.getAdminConnection( getService() ); } @After public void shutdown() throws Exception { connection.close(); } /** * Test a lookup( Dn, "*") operation */ @Test public void testLookupStar() throws Exception { Entry entry = connection.lookup( "cn=test,ou=system", "*" ); assertNotNull( entry ); // Check that we don't have any operational attributes : // We should have only 3 attributes : objectClass, cn and sn assertEquals( 3, entry.size() ); // Check that all the user attributes are present assertEquals( "test", entry.get( "cn" ).getString() ); assertEquals( "sn_test", entry.get( "sn" ).getString() ); assertTrue( entry.contains( "objectClass", "top", "person" ) ); } /** * Test a lookup( Dn, "+") operation */ @Test public void testLookupPlus() throws Exception { getService().setDenormalizeOpAttrsEnabled( true ); Entry entry = connection.lookup( "cn=test,ou=system", "+" ); assertNotNull( entry ); // We should have 6 attributes assertEquals( 6, entry.size() ); // Check that all the user attributes are absent assertNull( entry.get( "cn" ) ); assertNull( entry.get( "sn" ) ); assertNull( entry.get( "objectClass" ) ); /* assertEquals( "test", entry.get( "cn" ).getString() ); assertEquals( "sn_test", entry.get( "sn" ).getString() ); assertTrue( entry.contains( "objectClass", "top", "person" ) ); */ // Check that we have all the operational attributes : // We should have 6 operational attributes : createTime, createUser, entryCSN, entryDn, entryParentId and entryUUID assertNotNull( entry.get( "createTimestamp" ).getString() ); assertNotNull( entry.get( "creatorsName" ) ); assertEquals( "uid=admin,ou=system", entry.get( "creatorsName" ).getString() ); assertNotNull( entry.get( "entryCSN" ).getString() ); assertNotNull( entry.get( "entryUUID" ).getString() ); assertNotNull( entry.get( "entryParentId" ).getString() ); assertNotNull( entry.get( "entryDn" )); assertEquals( "cn=test,ou=system", entry.get( "entryDn" ).getString() ); } /** * Test a lookup( Dn, []) operation */ @Test public void testLookupEmptyAtrid() throws Exception { Entry entry = connection.lookup( "cn=test,ou=system", ( String[] ) null ); assertNotNull( entry ); // We should have 3 attributes assertEquals( 3, entry.size() ); // Check that all the user attributes are present assertEquals( "test", entry.get( "cn" ).getString() ); assertEquals( "sn_test", entry.get( "sn" ).getString() ); assertTrue( entry.contains( "objectClass", "top", "person" ) ); } /** * Test a lookup( Dn ) operation */ @Test public void testLookup() throws Exception { Entry entry = connection.lookup( "cn=test,ou=system" ); assertNotNull( entry ); // We should have 3 attributes assertEquals( 3, entry.size() ); // Check that all the user attributes are present assertEquals( "test", entry.get( "cn" ).getString() ); assertEquals( "sn_test", entry.get( "sn" ).getString() ); assertTrue( entry.contains( "objectClass", "top", "person" ) ); } /** * Test a lookup( Dn ) operation on the subschema subentry */ @Test public void testLookupSubSchemaSubEntryOpAttrs() throws Exception { Entry entry = connection.lookup( "cn=schema", "+" ); assertNotNull( entry ); // We should have 12 attributes assertEquals( 12, entry.size() ); // Check that all the operational attributes are present assertTrue( entry.containsAttribute( "attributeTypes", "comparators", "createTimeStamp", "creatorsName", "modifiersName", "modifyTimeStamp", "ldapSyntaxes", "matchingRules", "normalizers", "objectClasses", "syntaxCheckers", "subtreeSpecification" ) ); } /** * Test a lookup( Dn ) operation on the subschema subentry */ @Test public void testLookupSubSchemaSubEntryUserAttrs() throws Exception { Entry entry = connection.lookup( "cn=schema", "*" ); assertNotNull( entry ); // We should have 2 attributes assertEquals( 2, entry.size() ); // Check that all the operational attributes are present assertTrue( entry.containsAttribute( "cn", "objectClass" ) ); } /** * Test a lookup( Dn ) operation with a list of attributes */ @Test public void testLookupWithAttrs() throws Exception { Entry entry = connection.lookup( "cn=test,ou=system", "name" ); assertNotNull( entry ); // We should have 2 attributes assertEquals( 2, entry.size() ); // Check that all the user attributes are present assertEquals( "test", entry.get( "cn" ).getString() ); assertEquals( "sn_test", entry.get( "sn" ).getString() ); assertFalse( entry.containsAttribute( "objectClass" ) ); } /** * Test a lookup( Dn ) operation with no attributes */ @Test public void testLookupWithNoAttrs() throws Exception { Entry entry = connection.lookup( "cn=test,ou=system", "1.1" ); assertNotNull( entry ); // We should have 0 attributes assertEquals( 0, entry.size() ); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master.procedure; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.security.PrivilegedExceptionAction; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.InvalidFamilyOperationException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.TableState; import org.apache.hadoop.hbase.executor.EventType; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.procedure2.StateMachineProcedure; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos; import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyState; import org.apache.hadoop.security.UserGroupInformation; /** * The procedure to modify a column family from an existing table. */ @InterfaceAudience.Private public class ModifyColumnFamilyProcedure extends StateMachineProcedure<MasterProcedureEnv, ModifyColumnFamilyState> implements TableProcedureInterface { private static final Log LOG = LogFactory.getLog(ModifyColumnFamilyProcedure.class); private final AtomicBoolean aborted = new AtomicBoolean(false); private TableName tableName; private HTableDescriptor unmodifiedHTableDescriptor; private HColumnDescriptor cfDescriptor; private UserGroupInformation user; private Boolean traceEnabled; public ModifyColumnFamilyProcedure() { this.unmodifiedHTableDescriptor = null; this.traceEnabled = null; } public ModifyColumnFamilyProcedure( final MasterProcedureEnv env, final TableName tableName, final HColumnDescriptor cfDescriptor) throws IOException { this.tableName = tableName; this.cfDescriptor = cfDescriptor; this.user = env.getRequestUser().getUGI(); this.unmodifiedHTableDescriptor = null; this.traceEnabled = null; } @Override protected Flow executeFromState(final MasterProcedureEnv env, final ModifyColumnFamilyState state) { if (isTraceEnabled()) { LOG.trace(this + " execute state=" + state); } try { switch (state) { case MODIFY_COLUMN_FAMILY_PREPARE: prepareModify(env); setNextState(ModifyColumnFamilyState.MODIFY_COLUMN_FAMILY_PRE_OPERATION); break; case MODIFY_COLUMN_FAMILY_PRE_OPERATION: preModify(env, state); setNextState(ModifyColumnFamilyState.MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR); break; case MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR: updateTableDescriptor(env); setNextState(ModifyColumnFamilyState.MODIFY_COLUMN_FAMILY_POST_OPERATION); break; case MODIFY_COLUMN_FAMILY_POST_OPERATION: postModify(env, state); setNextState(ModifyColumnFamilyState.MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS); break; case MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS: reOpenAllRegionsIfTableIsOnline(env); return Flow.NO_MORE_STATE; default: throw new UnsupportedOperationException(this + " unhandled state=" + state); } } catch (InterruptedException|IOException e) { LOG.warn("Error trying to modify the column family " + getColumnFamilyName() + " of the table " + tableName + "(in state=" + state + ")", e); setFailure("master-modify-columnfamily", e); } return Flow.HAS_MORE_STATE; } @Override protected void rollbackState(final MasterProcedureEnv env, final ModifyColumnFamilyState state) throws IOException { if (isTraceEnabled()) { LOG.trace(this + " rollback state=" + state); } try { switch (state) { case MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS: break; // Nothing to undo. case MODIFY_COLUMN_FAMILY_POST_OPERATION: // TODO-MAYBE: call the coprocessor event to undo? break; case MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR: restoreTableDescriptor(env); break; case MODIFY_COLUMN_FAMILY_PRE_OPERATION: // TODO-MAYBE: call the coprocessor event to undo? break; case MODIFY_COLUMN_FAMILY_PREPARE: break; // nothing to do default: throw new UnsupportedOperationException(this + " unhandled state=" + state); } } catch (IOException e) { // This will be retried. Unless there is a bug in the code, // this should be just a "temporary error" (e.g. network down) LOG.warn("Failed rollback attempt step " + state + " for adding the column family" + getColumnFamilyName() + " to the table " + tableName, e); throw e; } } @Override protected ModifyColumnFamilyState getState(final int stateId) { return ModifyColumnFamilyState.valueOf(stateId); } @Override protected int getStateId(final ModifyColumnFamilyState state) { return state.getNumber(); } @Override protected ModifyColumnFamilyState getInitialState() { return ModifyColumnFamilyState.MODIFY_COLUMN_FAMILY_PREPARE; } @Override protected void setNextState(ModifyColumnFamilyState state) { if (aborted.get()) { setAbortFailure("modify-columnfamily", "abort requested"); } else { super.setNextState(state); } } @Override public boolean abort(final MasterProcedureEnv env) { aborted.set(true); return true; } @Override protected boolean acquireLock(final MasterProcedureEnv env) { if (!env.isInitialized()) return false; return env.getProcedureQueue().tryAcquireTableWrite( tableName, EventType.C_M_MODIFY_FAMILY.toString()); } @Override protected void releaseLock(final MasterProcedureEnv env) { env.getProcedureQueue().releaseTableWrite(tableName); } @Override public void serializeStateData(final OutputStream stream) throws IOException { super.serializeStateData(stream); MasterProcedureProtos.ModifyColumnFamilyStateData.Builder modifyCFMsg = MasterProcedureProtos.ModifyColumnFamilyStateData.newBuilder() .setUserInfo(MasterProcedureUtil.toProtoUserInfo(user)) .setTableName(ProtobufUtil.toProtoTableName(tableName)) .setColumnfamilySchema(cfDescriptor.convert()); if (unmodifiedHTableDescriptor != null) { modifyCFMsg.setUnmodifiedTableSchema(unmodifiedHTableDescriptor.convert()); } modifyCFMsg.build().writeDelimitedTo(stream); } @Override public void deserializeStateData(final InputStream stream) throws IOException { super.deserializeStateData(stream); MasterProcedureProtos.ModifyColumnFamilyStateData modifyCFMsg = MasterProcedureProtos.ModifyColumnFamilyStateData.parseDelimitedFrom(stream); user = MasterProcedureUtil.toUserInfo(modifyCFMsg.getUserInfo()); tableName = ProtobufUtil.toTableName(modifyCFMsg.getTableName()); cfDescriptor = HColumnDescriptor.convert(modifyCFMsg.getColumnfamilySchema()); if (modifyCFMsg.hasUnmodifiedTableSchema()) { unmodifiedHTableDescriptor = HTableDescriptor.convert(modifyCFMsg.getUnmodifiedTableSchema()); } } @Override public void toStringClassDetails(StringBuilder sb) { sb.append(getClass().getSimpleName()); sb.append(" (table="); sb.append(tableName); sb.append(", columnfamily="); if (cfDescriptor != null) { sb.append(getColumnFamilyName()); } else { sb.append("Unknown"); } sb.append(") user="); sb.append(user); } @Override public TableName getTableName() { return tableName; } @Override public TableOperationType getTableOperationType() { return TableOperationType.EDIT; } /** * Action before any real action of modifying column family. * @param env MasterProcedureEnv * @throws IOException */ private void prepareModify(final MasterProcedureEnv env) throws IOException { // Checks whether the table is allowed to be modified. MasterDDLOperationHelper.checkTableModifiable(env, tableName); unmodifiedHTableDescriptor = env.getMasterServices().getTableDescriptors().get(tableName); if (unmodifiedHTableDescriptor == null) { throw new IOException("HTableDescriptor missing for " + tableName); } if (!unmodifiedHTableDescriptor.hasFamily(cfDescriptor.getName())) { throw new InvalidFamilyOperationException("Family '" + getColumnFamilyName() + "' does not exist, so it cannot be modified"); } } /** * Action before modifying column family. * @param env MasterProcedureEnv * @param state the procedure state * @throws IOException * @throws InterruptedException */ private void preModify(final MasterProcedureEnv env, final ModifyColumnFamilyState state) throws IOException, InterruptedException { runCoprocessorAction(env, state); } /** * Modify the column family from the file system */ private void updateTableDescriptor(final MasterProcedureEnv env) throws IOException { // Update table descriptor LOG.info("ModifyColumnFamily. Table = " + tableName + " HCD = " + cfDescriptor.toString()); HTableDescriptor htd = env.getMasterServices().getTableDescriptors().get(tableName); htd.modifyFamily(cfDescriptor); env.getMasterServices().getTableDescriptors().add(htd); } /** * Restore back to the old descriptor * @param env MasterProcedureEnv * @throws IOException **/ private void restoreTableDescriptor(final MasterProcedureEnv env) throws IOException { env.getMasterServices().getTableDescriptors().add(unmodifiedHTableDescriptor); // Make sure regions are opened after table descriptor is updated. reOpenAllRegionsIfTableIsOnline(env); } /** * Action after modifying column family. * @param env MasterProcedureEnv * @param state the procedure state * @throws IOException * @throws InterruptedException */ private void postModify(final MasterProcedureEnv env, final ModifyColumnFamilyState state) throws IOException, InterruptedException { runCoprocessorAction(env, state); } /** * Last action from the procedure - executed when online schema change is supported. * @param env MasterProcedureEnv * @throws IOException */ private void reOpenAllRegionsIfTableIsOnline(final MasterProcedureEnv env) throws IOException { // This operation only run when the table is enabled. if (!env.getMasterServices().getAssignmentManager().getTableStateManager() .isTableState(getTableName(), TableState.State.ENABLED)) { return; } List<HRegionInfo> regionInfoList = ProcedureSyncWait.getRegionsFromMeta(env, getTableName()); if (MasterDDLOperationHelper.reOpenAllRegions(env, getTableName(), regionInfoList)) { LOG.info("Completed add column family operation on table " + getTableName()); } else { LOG.warn("Error on reopening the regions on table " + getTableName()); } } /** * The procedure could be restarted from a different machine. If the variable is null, we need to * retrieve it. * @return traceEnabled */ private Boolean isTraceEnabled() { if (traceEnabled == null) { traceEnabled = LOG.isTraceEnabled(); } return traceEnabled; } private String getColumnFamilyName() { return cfDescriptor.getNameAsString(); } /** * Coprocessor Action. * @param env MasterProcedureEnv * @param state the procedure state * @throws IOException * @throws InterruptedException */ private void runCoprocessorAction(final MasterProcedureEnv env, final ModifyColumnFamilyState state) throws IOException, InterruptedException { final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost(); if (cpHost != null) { user.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { switch (state) { case MODIFY_COLUMN_FAMILY_PRE_OPERATION: cpHost.preModifyColumnHandler(tableName, cfDescriptor); break; case MODIFY_COLUMN_FAMILY_POST_OPERATION: cpHost.postModifyColumnHandler(tableName, cfDescriptor); break; default: throw new UnsupportedOperationException(this + " unhandled state=" + state); } return null; } }); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.logging; import java.lang.reflect.Constructor; import java.util.Hashtable; import org.apache.commons.logging.impl.NoOpLog; /** * Factory for creating {@link Log} instances. Applications should call * the <code>makeNewLogInstance()</code> method to instantiate new instances * of the configured {@link Log} implementation class. * <p> * By default, calling <code>getInstance()</code> will use the following * algorithm: * <ul> * <li>If Log4J is available, return an instance of * <code>org.apache.commons.logging.impl.Log4JLogger</code>.</li> * <li>If JDK 1.4 or later is available, return an instance of * <code>org.apache.commons.logging.impl.Jdk14Logger</code>.</li> * <li>Otherwise, return an instance of * <code>org.apache.commons.logging.impl.NoOpLog</code>.</li> * </ul> * <p> * You can change the default behavior in one of two ways: * <ul> * <li>On the startup command line, set the system property * <code>org.apache.commons.logging.log</code> to the name of the * <code>org.apache.commons.logging.Log</code> implementation class * you want to use.</li> * <li>At runtime, call <code>LogSource.setLogImplementation()</code>.</li> * </ul> * * @deprecated Use {@link LogFactory} instead - The default factory * implementation performs exactly the same algorithm as this class did * * @version $Id$ */ public class LogSource { // ------------------------------------------------------- Class Attributes static protected Hashtable logs = new Hashtable(); /** Is log4j available (in the current classpath) */ static protected boolean log4jIsAvailable = false; /** Is JDK 1.4 logging available */ static protected boolean jdk14IsAvailable = false; /** Constructor for current log class */ static protected Constructor logImplctor = null; // ----------------------------------------------------- Class Initializers static { // Is Log4J Available? try { log4jIsAvailable = null != Class.forName("org.apache.log4j.Logger"); } catch (Throwable t) { log4jIsAvailable = false; } // Is JDK 1.4 Logging Available? try { jdk14IsAvailable = null != Class.forName("java.util.logging.Logger") && null != Class.forName("org.apache.commons.logging.impl.Jdk14Logger"); } catch (Throwable t) { jdk14IsAvailable = false; } // Set the default Log implementation String name = null; try { name = System.getProperty("org.apache.commons.logging.log"); if (name == null) { name = System.getProperty("org.apache.commons.logging.Log"); } } catch (Throwable t) { } if (name != null) { try { setLogImplementation(name); } catch (Throwable t) { try { setLogImplementation("org.apache.commons.logging.impl.NoOpLog"); } catch (Throwable u) { // ignored } } } else { try { if (log4jIsAvailable) { setLogImplementation("org.apache.commons.logging.impl.Log4JLogger"); } else if (jdk14IsAvailable) { setLogImplementation("org.apache.commons.logging.impl.Jdk14Logger"); } else { setLogImplementation("org.apache.commons.logging.impl.NoOpLog"); } } catch (Throwable t) { try { setLogImplementation("org.apache.commons.logging.impl.NoOpLog"); } catch (Throwable u) { // ignored } } } } // ------------------------------------------------------------ Constructor /** Don't allow others to create instances. */ private LogSource() { } // ---------------------------------------------------------- Class Methods /** * Set the log implementation/log implementation factory * by the name of the class. The given class must implement {@link Log}, * and provide a constructor that takes a single {@link String} argument * (containing the name of the log). */ static public void setLogImplementation(String classname) throws LinkageError, NoSuchMethodException, SecurityException, ClassNotFoundException { try { Class logclass = Class.forName(classname); Class[] argtypes = new Class[1]; argtypes[0] = "".getClass(); logImplctor = logclass.getConstructor(argtypes); } catch (Throwable t) { logImplctor = null; } } /** * Set the log implementation/log implementation factory by class. * The given class must implement {@link Log}, and provide a constructor * that takes a single {@link String} argument (containing the name of the log). */ static public void setLogImplementation(Class logclass) throws LinkageError, ExceptionInInitializerError, NoSuchMethodException, SecurityException { Class[] argtypes = new Class[1]; argtypes[0] = "".getClass(); logImplctor = logclass.getConstructor(argtypes); } /** Get a <code>Log</code> instance by class name. */ static public Log getInstance(String name) { Log log = (Log) logs.get(name); if (null == log) { log = makeNewLogInstance(name); logs.put(name, log); } return log; } /** Get a <code>Log</code> instance by class. */ static public Log getInstance(Class clazz) { return getInstance(clazz.getName()); } /** * Create a new {@link Log} implementation, based on the given <i>name</i>. * <p> * The specific {@link Log} implementation returned is determined by the * value of the {@code org.apache.commons.logging.log} property. The value * of {@code org.apache.commons.logging.log} may be set to the fully specified * name of a class that implements the {@link Log} interface. This class must * also have a public constructor that takes a single {@link String} argument * (containing the <i>name</i> of the {@link Log} to be constructed. * <p> * When {@code org.apache.commons.logging.log} is not set, or when no corresponding * class can be found, this method will return a Log4JLogger if the log4j Logger * class is available in the {@link LogSource}'s classpath, or a Jdk14Logger if we * are on a JDK 1.4 or later system, or NoOpLog if neither of the above conditions is true. * * @param name the log name (or category) */ static public Log makeNewLogInstance(String name) { Log log; try { Object[] args = { name }; log = (Log) logImplctor.newInstance(args); } catch (Throwable t) { log = null; } if (null == log) { log = new NoOpLog(name); } return log; } /** * Returns a {@link String} array containing the names of * all logs known to me. */ static public String[] getLogNames() { return (String[]) logs.keySet().toArray(new String[logs.size()]); } }
package eu.scasefp7.eclipse.storyboards.diagram.part; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.TimeZone; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.Path; import org.eclipse.core.runtime.Platform; import org.eclipse.core.runtime.Status; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.edit.provider.ComposedAdapterFactory; import org.eclipse.emf.edit.provider.IItemLabelProvider; import org.eclipse.emf.edit.provider.ReflectiveItemProviderAdapterFactory; import org.eclipse.emf.edit.provider.resource.ResourceItemProviderAdapterFactory; import org.eclipse.emf.edit.ui.provider.ExtendedImageRegistry; import org.eclipse.gmf.runtime.diagram.core.preferences.PreferencesHint; import org.eclipse.gmf.tooling.runtime.LogHelper; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.swt.graphics.Image; import org.eclipse.ui.plugin.AbstractUIPlugin; import org.osgi.framework.BundleContext; import eu.scasefp7.eclipse.storyboards.diagram.edit.policies.StoryboardsBaseItemSemanticEditPolicy; import eu.scasefp7.eclipse.storyboards.diagram.providers.ElementInitializers; import eu.scasefp7.eclipse.storyboards.provider.StoryboardsItemProviderAdapterFactory; /** * @generated */ public class StoryboardsDiagramEditorPlugin extends AbstractUIPlugin { /** * A UTC ISO 8601 date formatter used to log the time of errors. * * @generated NOT */ private static final DateFormat formatter; /** * @generated NOT */ static { formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'"); formatter.setTimeZone(TimeZone.getTimeZone("UTC")); } /** * The starting time of the current session for this plugin. * * @generated NOT */ private static String STARTING_TIME; /** * The current error ID for this session for this plugin. * * @generated NOT */ private static int errorID; /** * The plug-in ID. * * @generated NOT */ public static final String PLUGIN_ID = "StoryboardsDiagram"; /** * @generated */ public static final String ID = "eu.scasefp7.eclipse.storyboards.diagram"; //$NON-NLS-1$ /** * @generated */ private LogHelper myLogHelper; /** * @generated */ public static final PreferencesHint DIAGRAM_PREFERENCES_HINT = new PreferencesHint(ID); /** * @generated */ private static StoryboardsDiagramEditorPlugin instance; /** * @generated */ private ComposedAdapterFactory adapterFactory; /** * @generated */ private StoryboardsDocumentProvider documentProvider; /** * @generated */ private StoryboardsBaseItemSemanticEditPolicy.LinkConstraints linkConstraints; /** * @generated */ private ElementInitializers initializers; /** * @generated */ public StoryboardsDiagramEditorPlugin() { } /** * @generated NOT */ public void start(BundleContext context) throws Exception { super.start(context); instance = this; myLogHelper = new LogHelper(this); PreferencesHint.registerPreferenceStore(DIAGRAM_PREFERENCES_HINT, getPreferenceStore()); adapterFactory = createAdapterFactory(); STARTING_TIME = formatter.format(new Date()); errorID = 0; } /** * @generated */ public void stop(BundleContext context) throws Exception { adapterFactory.dispose(); adapterFactory = null; linkConstraints = null; initializers = null; instance = null; super.stop(context); } /** * @generated */ public static StoryboardsDiagramEditorPlugin getInstance() { return instance; } /** * @generated */ protected ComposedAdapterFactory createAdapterFactory() { ArrayList<AdapterFactory> factories = new ArrayList<AdapterFactory>(); fillItemProviderFactories(factories); return new ComposedAdapterFactory(factories); } /** * @generated */ protected void fillItemProviderFactories(List<AdapterFactory> factories) { factories.add(new StoryboardsItemProviderAdapterFactory()); factories.add(new ResourceItemProviderAdapterFactory()); factories.add(new ReflectiveItemProviderAdapterFactory()); } /** * @generated */ public AdapterFactory getItemProvidersAdapterFactory() { return adapterFactory; } /** * @generated */ public ImageDescriptor getItemImageDescriptor(Object item) { IItemLabelProvider labelProvider = (IItemLabelProvider) adapterFactory.adapt(item, IItemLabelProvider.class); if (labelProvider != null) { return ExtendedImageRegistry.getInstance().getImageDescriptor(labelProvider.getImage(item)); } return null; } /** * Returns an image descriptor for the image file at the given * plug-in relative path. * * @generated * @param path the path * @return the image descriptor */ public static ImageDescriptor getBundledImageDescriptor(String path) { return AbstractUIPlugin.imageDescriptorFromPlugin(ID, path); } /** * Respects images residing in any plug-in. If path is relative, * then this bundle is looked up for the image, otherwise, for absolute * path, first segment is taken as id of plug-in with image * * @generated * @param path the path to image, either absolute (with plug-in id as first segment), or relative for bundled images * @return the image descriptor */ public static ImageDescriptor findImageDescriptor(String path) { final IPath p = new Path(path); if (p.isAbsolute() && p.segmentCount() > 1) { return AbstractUIPlugin.imageDescriptorFromPlugin(p.segment(0), p.removeFirstSegments(1).makeAbsolute() .toString()); } else { return getBundledImageDescriptor(p.makeAbsolute().toString()); } } /** * Returns an image for the image file at the given plug-in relative path. * Client do not need to dispose this image. Images will be disposed automatically. * * @generated * @param path the path * @return image instance */ public Image getBundledImage(String path) { Image image = getImageRegistry().get(path); if (image == null) { getImageRegistry().put(path, getBundledImageDescriptor(path)); image = getImageRegistry().get(path); } return image; } /** * Returns string from plug-in's resource bundle * * @generated */ public static String getString(String key) { return Platform.getResourceString(getInstance().getBundle(), "%" + key); //$NON-NLS-1$ } /** * @generated */ public StoryboardsDocumentProvider getDocumentProvider() { if (documentProvider == null) { documentProvider = new StoryboardsDocumentProvider(); } return documentProvider; } /** * @generated */ public StoryboardsBaseItemSemanticEditPolicy.LinkConstraints getLinkConstraints() { return linkConstraints; } /** * @generated */ public void setLinkConstraints(StoryboardsBaseItemSemanticEditPolicy.LinkConstraints lc) { this.linkConstraints = lc; } /** * @generated */ public ElementInitializers getElementInitializers() { return initializers; } /** * @generated */ public void setElementInitializers(ElementInitializers i) { this.initializers = i; } /** * @generated */ public void logError(String error) { getLogHelper().logError(error, null); } /** * @generated */ public void logError(String error, Throwable throwable) { getLogHelper().logError(error, throwable); } /** * @generated */ public void logInfo(String message) { getLogHelper().logInfo(message, null); } /** * @generated */ public void logInfo(String message, Throwable throwable) { getLogHelper().logInfo(message, throwable); } /** * @generated */ public LogHelper getLogHelper() { return myLogHelper; } /** * Logs an exception to the Eclipse log file. This method detects the class and the method in which the exception * was caught automatically using the current stack trace. If required, the user can override these values by * calling {@link #log(String, String, String, Exception)} instead. * * @param message a human-readable message about the exception. * @param exception the exception that will be logged. * * @generated NOT */ public static void log(String message, Exception exception) { StackTraceElement stackTraceElement = Thread.currentThread().getStackTrace()[2]; log(stackTraceElement.getClassName(), stackTraceElement.getMethodName(), message, exception); } /** * Logs an exception to the Eclipse log file. Note that in most cases you can use the * {@link #log(String, Exception)} method which automatically detects the class and the method in which the * exception was caught, so it requires as parameters only a human-readable message and the exception. * * @param className the name of the class in which the exception was caught. * @param methodName the name of the method in which the exception was caught. * @param message a human-readable message about the exception. * @param exception the exception that will be logged. * * @generated NOT */ public static void log(String className, String methodName, String message, Exception exception) { String msg = message; msg += "\n!ERROR_ID t" + errorID; msg += "\n!SERVICE_NAME Requirements Editor"; msg += "\n!SERVICE_VERSION 1.0.0-SNAPSHOT"; msg += "\n!STARTING_TIME " + STARTING_TIME; msg += "\n!CLASS_NAME " + className; msg += "\n!FUNCTION_NAME " + methodName; msg += "\n!FAILURE_TIMESTAMP " + formatter.format(new Date()); errorID++; if (instance != null) instance.getLog().log(new Status(Status.ERROR, PLUGIN_ID, Status.OK, msg, exception)); else exception.printStackTrace(); } }
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.siddhi.core.query.window; import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.wso2.siddhi.core.ExecutionPlanRuntime; import org.wso2.siddhi.core.SiddhiManager; import org.wso2.siddhi.core.event.Event; import org.wso2.siddhi.core.query.output.callback.QueryCallback; import org.wso2.siddhi.core.stream.input.InputHandler; import org.wso2.siddhi.core.util.EventPrinter; public class TimeBatchWindowTestCase { private static final Logger log = Logger.getLogger(TimeBatchWindowTestCase.class); private int inEventCount; private int removeEventCount; private boolean eventArrived; @Before public void init() { inEventCount = 0; removeEventCount = 0; eventArrived = false; } @Test public void timeWindowBatchTest1() throws InterruptedException { SiddhiManager siddhiManager = new SiddhiManager(); String cseEventStream = "" + "define stream cseEventStream (symbol string, price float, volume int);"; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.timeBatch(1 sec) " + "select symbol,sum(price) as sumPrice,volume " + "insert all events into outputStream ;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(cseEventStream + query); executionPlanRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEventCount == 0) { Assert.assertTrue("Remove Events will only arrive after the second time period. ", removeEvents == null); } if (inEvents != null) { inEventCount = inEventCount + inEvents.length; } else if (removeEvents != null) { removeEventCount = removeEventCount + removeEvents.length; } eventArrived = true; } }); InputHandler inputHandler = executionPlanRuntime.getInputHandler("cseEventStream"); executionPlanRuntime.start(); inputHandler.send(new Object[]{"IBM", 700f, 0}); inputHandler.send(new Object[]{"WSO2", 60.5f, 1}); Thread.sleep(3000); Assert.assertEquals(1, inEventCount); Assert.assertEquals(1, removeEventCount); Assert.assertTrue(eventArrived); executionPlanRuntime.shutdown(); } @Test public void timeWindowBatchTest2() throws InterruptedException { SiddhiManager siddhiManager = new SiddhiManager(); String cseEventStream = "" + "define stream cseEventStream (symbol string, price float, volume int);"; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.timeBatch(1 sec) " + "select symbol, sum(price) as price " + "insert all events into outputStream ;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(cseEventStream + query); executionPlanRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEvents != null) { inEventCount = inEventCount + inEvents.length; } if (removeEvents != null) { Assert.assertTrue("InEvents arrived before RemoveEvents", inEventCount > removeEventCount); removeEventCount = removeEventCount + removeEvents.length; } eventArrived = true; } }); InputHandler inputHandler = executionPlanRuntime.getInputHandler("cseEventStream"); executionPlanRuntime.start(); inputHandler.send(new Object[]{"IBM", 700f, 1}); Thread.sleep(1100); inputHandler.send(new Object[]{"WSO2", 60.5f, 2}); inputHandler.send(new Object[]{"IBM", 700f, 3}); inputHandler.send(new Object[]{"WSO2", 60.5f, 4}); Thread.sleep(1100); inputHandler.send(new Object[]{"IBM", 700f, 5}); inputHandler.send(new Object[]{"WSO2", 60.5f, 6}); Thread.sleep(2000); Assert.assertEquals(3, inEventCount); Assert.assertEquals(1, removeEventCount); Assert.assertTrue(eventArrived); executionPlanRuntime.shutdown(); } @Test public void timeWindowBatchTest3() throws InterruptedException { SiddhiManager siddhiManager = new SiddhiManager(); String cseEventStream = "" + "define stream cseEventStream (symbol string, price float, volume int);"; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.timeBatch(1 sec) " + "select symbol, sum(price) as price " + "insert into outputStream ;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(cseEventStream + query); executionPlanRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEvents != null) { inEventCount = inEventCount + inEvents.length; } if (removeEvents != null) { removeEventCount = removeEventCount + removeEvents.length; } Assert.assertTrue("Remove events should not arrive ", removeEvents == null); eventArrived = true; } }); InputHandler inputHandler = executionPlanRuntime.getInputHandler("cseEventStream"); executionPlanRuntime.start(); inputHandler.send(new Object[]{"IBM", 700f, 1}); Thread.sleep(1100); inputHandler.send(new Object[]{"WSO2", 60.5f, 2}); inputHandler.send(new Object[]{"IBM", 700f, 3}); inputHandler.send(new Object[]{"WSO2", 60.5f, 4}); Thread.sleep(1100); inputHandler.send(new Object[]{"IBM", 700f, 5}); inputHandler.send(new Object[]{"WSO2", 60.5f, 6}); Thread.sleep(2000); Assert.assertEquals(3, inEventCount); Assert.assertEquals(0, removeEventCount); Assert.assertTrue(eventArrived); executionPlanRuntime.shutdown(); } @Test public void timeWindowBatchTest4() throws InterruptedException { SiddhiManager siddhiManager = new SiddhiManager(); String cseEventStream = "" + "define stream cseEventStream (symbol string, price float, volume int);"; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.timeBatch(1 sec) " + "select symbol, sum(price) as price " + "insert expired events into outputStream ;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(cseEventStream + query); executionPlanRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEvents != null) { inEventCount = inEventCount + inEvents.length; } if (removeEvents != null) { removeEventCount = removeEventCount + removeEvents.length; } Assert.assertTrue("inEvents should not arrive ", inEvents == null); eventArrived = true; } }); InputHandler inputHandler = executionPlanRuntime.getInputHandler("cseEventStream"); executionPlanRuntime.start(); inputHandler.send(new Object[]{"IBM", 700f, 1}); Thread.sleep(1100); inputHandler.send(new Object[]{"WSO2", 60.5f, 2}); inputHandler.send(new Object[]{"IBM", 700f, 3}); inputHandler.send(new Object[]{"WSO2", 60.5f, 4}); Thread.sleep(1100); inputHandler.send(new Object[]{"IBM", 700f, 5}); inputHandler.send(new Object[]{"WSO2", 60.5f, 6}); Thread.sleep(2000); Assert.assertEquals(0, inEventCount); Assert.assertEquals(3, removeEventCount); Assert.assertTrue(eventArrived); executionPlanRuntime.shutdown(); } @Test public void timeWindowBatchTest5() throws InterruptedException { log.info("timeWindowBatch Test5"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, company string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.timeBatch(1 sec) join twitterStream#window.timeBatch(1 sec) " + "on cseEventStream.symbol== twitterStream.company " + "select cseEventStream.symbol as symbol, twitterStream.tweet, cseEventStream.price " + "insert all events into outputStream ;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); try { executionPlanRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEvents != null) { inEventCount += (inEvents.length); } if (removeEvents != null) { removeEventCount += (removeEvents.length); } eventArrived = true; } }); InputHandler cseEventStreamHandler = executionPlanRuntime.getInputHandler("cseEventStream"); InputHandler twitterStreamHandler = executionPlanRuntime.getInputHandler("twitterStream"); executionPlanRuntime.start(); cseEventStreamHandler.send(new Object[]{"WSO2", 55.6f, 100}); twitterStreamHandler.send(new Object[]{"User1", "Hello World", "WSO2"}); cseEventStreamHandler.send(new Object[]{"IBM", 75.6f, 100}); Thread.sleep(1100); cseEventStreamHandler.send(new Object[]{"WSO2", 57.6f, 100}); Thread.sleep(1000); Assert.assertTrue("In Events can be 1 or 2 ", inEventCount == 1 || inEventCount == 2); Assert.assertTrue("Removed Events can be 1 or 2 ", removeEventCount == 1 || removeEventCount == 2); Assert.assertTrue(eventArrived); } finally { executionPlanRuntime.shutdown(); } } @Test public void timeWindowBatchTest6() throws InterruptedException { log.info("timeWindowBatch Test6"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream cseEventStream (symbol string, price float, volume int); " + "define stream twitterStream (user string, tweet string, company string); "; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.timeBatch(1 sec) join twitterStream#window.timeBatch(1 sec) " + "on cseEventStream.symbol== twitterStream.company " + "select cseEventStream.symbol as symbol, twitterStream.tweet, cseEventStream.price " + "insert into outputStream ;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); try { executionPlanRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEvents != null) { inEventCount += (inEvents.length); } if (removeEvents != null) { removeEventCount += (removeEvents.length); } eventArrived = true; } }); InputHandler cseEventStreamHandler = executionPlanRuntime.getInputHandler("cseEventStream"); InputHandler twitterStreamHandler = executionPlanRuntime.getInputHandler("twitterStream"); executionPlanRuntime.start(); cseEventStreamHandler.send(new Object[]{"WSO2", 55.6f, 100}); twitterStreamHandler.send(new Object[]{"User1", "Hello World", "WSO2"}); cseEventStreamHandler.send(new Object[]{"IBM", 75.6f, 100}); Thread.sleep(1500); cseEventStreamHandler.send(new Object[]{"WSO2", 57.6f, 100}); Thread.sleep(1000); Assert.assertTrue("In Events can be 1 or 2 ", inEventCount == 1 || inEventCount == 2); Assert.assertEquals(0, removeEventCount); Assert.assertTrue(eventArrived); } finally { executionPlanRuntime.shutdown(); } } @Test public void timeWindowBatchTest7() throws InterruptedException { SiddhiManager siddhiManager = new SiddhiManager(); String cseEventStream = "" + "define stream cseEventStream (symbol string, price float, volume int);"; String query = "" + "@info(name = 'query1') " + "from cseEventStream#window.timeBatch(2 sec , 0) " + "select symbol, sum(price) as sumPrice, volume " + "insert into outputStream ;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(cseEventStream + query); executionPlanRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEventCount == 0) { Assert.assertTrue("Remove Events will only arrive after the second time period. ", removeEvents == null); } if (inEvents != null) { inEventCount = inEventCount + inEvents.length; } else if (removeEvents != null) { removeEventCount = removeEventCount + removeEvents.length; } eventArrived = true; } }); InputHandler inputHandler = executionPlanRuntime.getInputHandler("cseEventStream"); executionPlanRuntime.start(); // Start sending events in the beginning of a cycle while (System.currentTimeMillis() % 2000 != 0) { ; } inputHandler.send(new Object[]{"IBM", 700f, 0}); inputHandler.send(new Object[]{"WSO2", 60.5f, 1}); Thread.sleep(8500); inputHandler.send(new Object[]{"WSO2", 60.5f, 1}); inputHandler.send(new Object[]{"II", 60.5f, 1}); Thread.sleep(13000); inputHandler.send(new Object[]{"TT", 60.5f, 1}); inputHandler.send(new Object[]{"YY", 60.5f, 1}); Thread.sleep(5000); Assert.assertEquals(3, inEventCount); Assert.assertEquals(0, removeEventCount); Assert.assertTrue(eventArrived); executionPlanRuntime.shutdown(); } // @Test // public void timeWindowBatchStartTimeTest() throws InterruptedException { // // SiddhiManager siddhiManager = new SiddhiManager(); // // String cseEventStream = "define stream cseEventStream (symbol string, price float, volume int);"; // String query = "@info(name = 'query1') from cseEventStream#window.timeBatch(1 sec, 0) select symbol, sum // (price) as price" + // " insert all events into outputStream ;"; // // ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(cseEventStream + query); // // executionPlanRuntime.addCallback("query1", new QueryCallback() { // @Override // public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { // EventPrinter.print(timeStamp, inEvents, removeEvents); // if (inEvents != null) { // inEventCount = inEventCount + inEvents.length; // } // if (removeEvents != null) { // Assert.assertTrue("InEvents arrived before RemoveEvents", inEventCount > removeEventCount); // // checking whether events are emitted close to a round time with 10% error. // long timestamp = removeEvents[0].getTimestamp(); // Assert.assertTrue("Remove events timestamps are close to round times", (timestamp % 1000) < 100); // removeEventCount = removeEventCount + removeEvents.length; // } // eventArrived = true; // } // // }); // // InputHandler inputHandler = executionPlanRuntime.getInputHandler("cseEventStream"); // executionPlanRuntime.start(); // inputHandler.send(new Object[]{"IBM", 700f, 1}); // Thread.sleep(1100); // inputHandler.send(new Object[]{"WSO2", 60.5f, 2}); // inputHandler.send(new Object[]{"IBM", 700f, 3}); // inputHandler.send(new Object[]{"WSO2", 60.5f, 4}); // Thread.sleep(1100); // inputHandler.send(new Object[]{"IBM", 700f, 5}); // inputHandler.send(new Object[]{"WSO2", 60.5f, 6}); // Thread.sleep(3000); // inputHandler.send(new Object[]{"WSO2", 60.5f, 6}); // Thread.sleep(5000); // // Assert.assertEquals(4, inEventCount); // Assert.assertEquals(2, removeEventCount); // Assert.assertTrue(eventArrived); // executionPlanRuntime.shutdown(); // } }
/* * Copyright (c) 2016-2021 VMware Inc. or its affiliates, All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package reactor.core.scheduler; import java.util.Objects; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import reactor.core.Disposable; import reactor.core.Disposables; import reactor.core.Exceptions; import reactor.core.Scannable; import reactor.util.annotation.Nullable; /** * Wraps a java.util.concurrent.Executor and provides the Scheduler API over it. * <p> * It supports both non-trampolined worker (for cases where the trampolining happens * externally) and trampolined worker. This scheduler is NOT time-capable (can't schedule * with delay / periodically). * * @author Stephane Maldini */ final class ExecutorScheduler implements Scheduler, Scannable { final Executor executor; final boolean trampoline; volatile boolean terminated; ExecutorScheduler(Executor executor, boolean trampoline) { this.executor = executor; this.trampoline = trampoline; } @Override public Disposable schedule(Runnable task) { if(terminated){ throw Exceptions.failWithRejected(); } Objects.requireNonNull(task, "task"); ExecutorPlainRunnable r = new ExecutorPlainRunnable(task); //RejectedExecutionException are propagated up, but since Executor doesn't from //failing tasks we'll also wrap the execute call in a try catch: try { executor.execute(r); } catch (Throwable ex) { if (executor instanceof ExecutorService && ((ExecutorService) executor).isShutdown()) { terminated = true; } Schedulers.handleError(ex); throw Exceptions.failWithRejected(ex); } return r; } @Override public void dispose() { terminated = true; } @Override public boolean isDisposed() { return terminated; } @Override public Worker createWorker() { return trampoline ? new ExecutorSchedulerTrampolineWorker(executor) : new ExecutorSchedulerWorker(executor); } @Override public String toString() { StringBuilder ts = new StringBuilder(Schedulers.FROM_EXECUTOR) .append('(').append(executor); if (trampoline) ts.append(",trampolining"); ts.append(')'); return ts.toString(); } @Override public Object scanUnsafe(Attr key) { if (key == Attr.TERMINATED || key == Attr.CANCELLED) return isDisposed(); if (key == Attr.NAME) return toString(); return null; } /** * A non-tracked runnable that wraps a task and offers cancel support in the form * of not executing the task. * <p>Since Executor doesn't have cancellation support of its own, the * ExecutorRunnable will stay in the Executor's queue and be always executed. */ static final class ExecutorPlainRunnable extends AtomicBoolean implements Runnable, Disposable { /** */ private static final long serialVersionUID = 5116223460201378097L; final Runnable task; ExecutorPlainRunnable(Runnable task) { this.task = task; } @Override public void run() { if (!get()) { try { task.run(); } catch (Throwable ex) { Schedulers.handleError(ex); } finally { lazySet(true); } } } @Override public boolean isDisposed() { return get(); } @Override public void dispose() { set(true); } } /** * Common interface between the tracking workers to signal the need for removal. */ interface WorkerDelete { void delete(ExecutorTrackedRunnable r); } /** * A Runnable that wraps a task and has reference back to its parent worker to * remove itself once completed or cancelled */ static final class ExecutorTrackedRunnable extends AtomicBoolean implements Runnable, Disposable { /** */ private static final long serialVersionUID = 3503344795919906192L; final Runnable task; final WorkerDelete parent; final boolean callRemoveOnFinish; ExecutorTrackedRunnable(Runnable task, WorkerDelete parent, boolean callRemoveOnFinish) { this.task = task; this.parent = parent; this.callRemoveOnFinish = callRemoveOnFinish; } @Override public void run() { if (!get()) { try { task.run(); } catch (Throwable ex) { Schedulers.handleError(ex); } finally { if (callRemoveOnFinish) { dispose(); } else { lazySet(true); } } } } @Override public void dispose() { if (compareAndSet(false, true)) { parent.delete(this); } } @Override public boolean isDisposed() { return get(); } } /** * A non-trampolining worker that tracks tasks. */ static final class ExecutorSchedulerWorker implements Scheduler.Worker, WorkerDelete, Scannable { final Executor executor; final Disposable.Composite tasks; ExecutorSchedulerWorker(Executor executor) { this.executor = executor; this.tasks = Disposables.composite(); } @Override public Disposable schedule(Runnable task) { Objects.requireNonNull(task, "task"); ExecutorTrackedRunnable r = new ExecutorTrackedRunnable(task, this, true); if (!tasks.add(r)) { throw Exceptions.failWithRejected(); } try { executor.execute(r); } catch (Throwable ex) { tasks.remove(r); Schedulers.handleError(ex); throw Exceptions.failWithRejected(ex); } return r; } @Override public void dispose() { tasks.dispose(); } @Override public boolean isDisposed() { return tasks.isDisposed(); } @Override public void delete(ExecutorTrackedRunnable r) { tasks.remove(r); } @Override public Object scanUnsafe(Attr key) { if (key == Attr.TERMINATED || key == Attr.CANCELLED) return isDisposed(); if (key == Attr.BUFFERED) return tasks.size(); if (key == Attr.PARENT) return (executor instanceof Scannable) ? executor : null; if (key == Attr.NAME) { //hack to recognize the SingleWorker if (executor instanceof SingleWorkerScheduler) return executor + ".worker"; return Schedulers.FROM_EXECUTOR + "(" + executor + ").worker"; } return Schedulers.scanExecutor(executor, key); } } /** * A trampolining worker that tracks tasks. */ static final class ExecutorSchedulerTrampolineWorker implements Scheduler.Worker, WorkerDelete, Runnable, Scannable { final Executor executor; final Queue<ExecutorTrackedRunnable> queue; volatile boolean terminated; volatile int wip; static final AtomicIntegerFieldUpdater<ExecutorSchedulerTrampolineWorker> WIP = AtomicIntegerFieldUpdater.newUpdater(ExecutorSchedulerTrampolineWorker.class, "wip"); ExecutorSchedulerTrampolineWorker(Executor executor) { this.executor = executor; this.queue = new ConcurrentLinkedQueue<>(); } @Override public Disposable schedule(Runnable task) { Objects.requireNonNull(task, "task"); if (terminated) { throw Exceptions.failWithRejected(); } ExecutorTrackedRunnable r = new ExecutorTrackedRunnable(task, this, false); synchronized (this) { if (terminated) { throw Exceptions.failWithRejected(); } queue.offer(r); } if (WIP.getAndIncrement(this) == 0) { try { executor.execute(this); } catch (Throwable ex) { r.dispose(); Schedulers.handleError(ex); throw Exceptions.failWithRejected(ex); } } return r; } @Override public void dispose() { if (terminated) { return; } terminated = true; final Queue<ExecutorTrackedRunnable> q = queue; ExecutorTrackedRunnable r; while ((r = q.poll()) != null && !q.isEmpty()) { r.dispose(); } } @Override public boolean isDisposed() { return terminated; } @Override public void delete(ExecutorTrackedRunnable r) { synchronized (this) { if (!terminated) { queue.remove(r); } } } @Override public void run() { final Queue<ExecutorTrackedRunnable> q = queue; for (; ; ) { int e = 0; int r = wip; while (e != r) { if (terminated) { return; } ExecutorTrackedRunnable task = q.poll(); if (task == null) { break; } task.run(); e++; } if (e == r && terminated) { return; } if (WIP.addAndGet(this, -e) == 0) { break; } } } @Override public Object scanUnsafe(Attr key) { if (key == Attr.TERMINATED || key == Attr.CANCELLED) return isDisposed(); if (key == Attr.PARENT) return (executor instanceof Scannable) ? executor : null; if (key == Attr.NAME) return Schedulers.FROM_EXECUTOR + "(" + executor + ",trampolining).worker"; if (key == Attr.BUFFERED || key == Attr.LARGE_BUFFERED) return queue.size(); return Schedulers.scanExecutor(executor, key); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Collection; import java.util.HashSet; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.log4j.Logger; import org.apache.zookeeper.AsyncCallback; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZKTestCase; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.server.ZKDatabase; import org.apache.zookeeper.server.quorum.Leader; import org.apache.zookeeper.test.ClientBase.CountdownWatcher; import org.junit.Assert; import org.junit.Test; public class FollowerResyncConcurrencyTest extends ZKTestCase { private static final Logger LOG = Logger.getLogger(FollowerResyncConcurrencyTest.class); public static final long CONNECTION_TIMEOUT = ClientTest.CONNECTION_TIMEOUT; private volatile int counter = 0; private volatile int errors = 0; /** * See ZOOKEEPER-1319 - verify that a lagging follwer resyncs correctly * * 1) start with down quorum * 2) start leader/follower1, add some data * 3) restart leader/follower1 * 4) start follower2 * 5) verify data consistency across the ensemble * * @throws Exception */ @Test public void testLaggingFollowerResyncsUnderNewEpoch() throws Exception { CountdownWatcher watcher1 = new CountdownWatcher(); CountdownWatcher watcher2 = new CountdownWatcher(); CountdownWatcher watcher3 = new CountdownWatcher(); QuorumUtil qu = new QuorumUtil(1); qu.shutdownAll(); qu.start(1); qu.start(2); Assert.assertTrue("Waiting for server up", ClientBase.waitForServerUp("127.0.0.1:" + qu.getPeer(1).clientPort, ClientBase.CONNECTION_TIMEOUT)); Assert.assertTrue("Waiting for server up", ClientBase.waitForServerUp("127.0.0.1:" + qu.getPeer(2).clientPort, ClientBase.CONNECTION_TIMEOUT)); ZooKeeper zk1 = createClient(qu.getPeer(1).peer.getClientPort(), watcher1); LOG.info("zk1 has session id 0x" + Long.toHexString(zk1.getSessionId())); final String resyncPath = "/resyncundernewepoch"; zk1.create(resyncPath, null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk1.close(); qu.shutdown(1); qu.shutdown(2); Assert.assertTrue("Waiting for server down", ClientBase.waitForServerDown("127.0.0.1:" + qu.getPeer(1).clientPort, ClientBase.CONNECTION_TIMEOUT)); Assert.assertTrue("Waiting for server down", ClientBase.waitForServerDown("127.0.0.1:" + qu.getPeer(2).clientPort, ClientBase.CONNECTION_TIMEOUT)); qu.start(1); qu.start(2); Assert.assertTrue("Waiting for server up", ClientBase.waitForServerUp("127.0.0.1:" + qu.getPeer(1).clientPort, ClientBase.CONNECTION_TIMEOUT)); Assert.assertTrue("Waiting for server up", ClientBase.waitForServerUp("127.0.0.1:" + qu.getPeer(2).clientPort, ClientBase.CONNECTION_TIMEOUT)); qu.start(3); Assert.assertTrue("Waiting for server up", ClientBase.waitForServerUp("127.0.0.1:" + qu.getPeer(3).clientPort, ClientBase.CONNECTION_TIMEOUT)); zk1 = createClient(qu.getPeer(1).peer.getClientPort(), watcher1); LOG.info("zk1 has session id 0x" + Long.toHexString(zk1.getSessionId())); assertNotNull("zk1 has data", zk1.exists(resyncPath, false)); final ZooKeeper zk2 = createClient(qu.getPeer(2).peer.getClientPort(), watcher2); LOG.info("zk2 has session id 0x" + Long.toHexString(zk2.getSessionId())); assertNotNull("zk2 has data", zk2.exists(resyncPath, false)); final ZooKeeper zk3 = createClient(qu.getPeer(3).peer.getClientPort(), watcher3); LOG.info("zk3 has session id 0x" + Long.toHexString(zk3.getSessionId())); assertNotNull("zk3 has data", zk3.exists(resyncPath, false)); zk1.close(); zk2.close(); zk3.close(); qu.shutdownAll(); } /** * See ZOOKEEPER-962. This tests for one of the bugs hit while fixing this, * setting the ZXID of the SNAP packet * Starts up 3 ZKs. Shut down F1, write a node, restart the one that was shut down * The non-leader ZKs are writing to cluster * Shut down F1 again * Restart after sessions are expired, expect to get a snap file * Shut down, run some transactions through. * Restart to a diff while transactions are running in leader * @throws IOException * @throws InterruptedException * @throws KeeperException */ @Test public void testResyncBySnapThenDiffAfterFollowerCrashes() throws IOException, InterruptedException, KeeperException, Throwable { final Semaphore sem = new Semaphore(0); QuorumUtil qu = new QuorumUtil(1); qu.startAll(); CountdownWatcher watcher1 = new CountdownWatcher(); CountdownWatcher watcher2 = new CountdownWatcher(); CountdownWatcher watcher3 = new CountdownWatcher(); int index = 1; while(qu.getPeer(index).peer.leader == null) { index++; } Leader leader = qu.getPeer(index).peer.leader; assertNotNull(leader); /* Reusing the index variable to select a follower to connect to */ index = (index == 1) ? 2 : 1; LOG.info("Connecting to follower:" + index); qu.shutdown(index); final ZooKeeper zk3 = createClient(qu.getPeer(3).peer.getClientPort(), watcher3); LOG.info("zk3 has session id 0x" + Long.toHexString(zk3.getSessionId())); zk3.create("/mybar", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL); qu.restart(index); final ZooKeeper zk1 = createClient(qu.getPeer(index).peer.getClientPort(), watcher1); LOG.info("zk1 has session id 0x" + Long.toHexString(zk1.getSessionId())); final ZooKeeper zk2 = createClient(qu.getPeer(index).peer.getClientPort(), watcher2); LOG.info("zk2 has session id 0x" + Long.toHexString(zk2.getSessionId())); zk1.create("/first", new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); Thread mytestfooThread = new Thread(new Runnable() { @Override public void run() { for(int i = 0; i < 3000; i++) { zk3.create("/mytestfoo", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL, new AsyncCallback.StringCallback() { @Override public void processResult(int rc, String path, Object ctx, String name) { counter++; if (rc != 0) { errors++; } if(counter == 16200){ sem.release(); } } }, null); if(i%10==0){ try { Thread.sleep(100); } catch (Exception e) { } } } } }); for(int i = 0; i < 13000; i++) { zk3.create("/mybar", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL, new AsyncCallback.StringCallback() { @Override public void processResult(int rc, String path, Object ctx, String name) { counter++; if (rc != 0) { errors++; } if(counter == 16200){ sem.release(); } } }, null); if(i == 5000){ qu.shutdown(index); LOG.info("Shutting down s1"); } if(i == 12000){ //Restart off of snap, then get some txns for a log, then shut down mytestfooThread.start(); qu.restart(index); Thread.sleep(300); qu.shutdown(index); Thread.sleep(300); qu.restart(index); LOG.info("Setting up server: " + index); } if((i % 1000) == 0){ Thread.sleep(1000); } if(i%50 == 0) { zk2.create("/newbaz", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL, new AsyncCallback.StringCallback() { @Override public void processResult(int rc, String path, Object ctx, String name) { counter++; if (rc != 0) { errors++; } if(counter == 16200){ sem.release(); } } }, null); } } // Wait until all updates return if(!sem.tryAcquire(ClientBase.CONNECTION_TIMEOUT, TimeUnit.MILLISECONDS)) { LOG.warn("Did not aquire semaphore fast enough"); } mytestfooThread.join(ClientBase.CONNECTION_TIMEOUT); if (mytestfooThread.isAlive()) { LOG.error("mytestfooThread is still alive"); } Thread.sleep(1000); verifyState(qu, index, leader); zk1.close(); zk2.close(); zk3.close(); qu.shutdownAll(); } /** * This test: * Starts up 3 ZKs. The non-leader ZKs are writing to cluster * Shut down one of the non-leader ZKs. * Restart after sessions have expired but <500 txns have taken place (get a diff) * Shut down immediately after restarting, start running separate thread with other transactions * Restart to a diff while transactions are running in leader * * * Before fixes for ZOOKEEPER-962, restarting off of diff could get an inconsistent view of data missing transactions that * completed during diff syncing. Follower would also be considered "restarted" before all forwarded transactions * were completely processed, so restarting would cause a snap file with a too-high zxid to be written, and transactions * would be missed * * This test should pretty reliably catch the failure of restarting the server before all diff messages have been processed, * however, due to the transient nature of the system it may not catch failures due to concurrent processing of transactions * during the leader's diff forwarding. * * @throws IOException * @throws InterruptedException * @throws KeeperException * @throws Throwable */ @Test public void testResyncByDiffAfterFollowerCrashes() throws IOException, InterruptedException, KeeperException, Throwable { final Semaphore sem = new Semaphore(0); QuorumUtil qu = new QuorumUtil(1); qu.startAll(); CountdownWatcher watcher1 = new CountdownWatcher(); CountdownWatcher watcher2 = new CountdownWatcher(); CountdownWatcher watcher3 = new CountdownWatcher(); int index = 1; while(qu.getPeer(index).peer.leader == null) { index++; } Leader leader = qu.getPeer(index).peer.leader; assertNotNull(leader); /* Reusing the index variable to select a follower to connect to */ index = (index == 1) ? 2 : 1; LOG.info("Connecting to follower:" + index); final ZooKeeper zk1 = createClient(qu.getPeer(index).peer.getClientPort(), watcher1); LOG.info("zk1 has session id 0x" + Long.toHexString(zk1.getSessionId())); final ZooKeeper zk2 = createClient(qu.getPeer(index).peer.getClientPort(), watcher2); LOG.info("zk2 has session id 0x" + Long.toHexString(zk2.getSessionId())); final ZooKeeper zk3 = createClient(qu.getPeer(3).peer.getClientPort(), watcher3); LOG.info("zk3 has session id 0x" + Long.toHexString(zk3.getSessionId())); zk1.create("/first", new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk2.create("/mybar", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL); final AtomicBoolean runNow = new AtomicBoolean(false); Thread mytestfooThread = new Thread(new Runnable() { @Override public void run() { int inSyncCounter = 0; while(inSyncCounter < 400) { if(runNow.get()) { zk3.create("/mytestfoo", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL, new AsyncCallback.StringCallback() { @Override public void processResult(int rc, String path, Object ctx, String name) { counter++; if (rc != 0) { errors++; } if(counter > 7300){ sem.release(); } } }, null); try { Thread.sleep(10); } catch (Exception e) { } inSyncCounter++; } else { Thread.yield(); } } } }); mytestfooThread.start(); for(int i = 0; i < 5000; i++) { zk2.create("/mybar", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL, new AsyncCallback.StringCallback() { @Override public void processResult(int rc, String path, Object ctx, String name) { counter++; if (rc != 0) { errors++; } if(counter > 7300){ sem.release(); } } }, null); if(i == 1000){ qu.shutdown(index); Thread.sleep(1100); LOG.info("Shutting down s1"); } if(i == 1100 || i == 1150 || i == 1200) { Thread.sleep(1000); } if(i == 1200){ qu.startThenShutdown(index); runNow.set(true); qu.restart(index); LOG.info("Setting up server: " + index); } if(i>=1000 && i%2== 0) { zk3.create("/newbaz", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL, new AsyncCallback.StringCallback() { @Override public void processResult(int rc, String path, Object ctx, String name) { counter++; if (rc != 0) { errors++; } if(counter > 7300){ sem.release(); } } }, null); } if(i == 1050 || i == 1100 || i == 1150) { Thread.sleep(1000); } } // Wait until all updates return if(!sem.tryAcquire(ClientBase.CONNECTION_TIMEOUT, TimeUnit.MILLISECONDS)) { LOG.warn("Did not aquire semaphore fast enough"); } mytestfooThread.join(ClientBase.CONNECTION_TIMEOUT); if (mytestfooThread.isAlive()) { LOG.error("mytestfooThread is still alive"); } Thread.sleep(1000); // Verify that server is following and has the same epoch as the leader verifyState(qu, index, leader); zk1.close(); zk2.close(); zk3.close(); qu.shutdownAll(); } private static DisconnectableZooKeeper createClient(int port, CountdownWatcher watcher) throws IOException, TimeoutException, InterruptedException { DisconnectableZooKeeper zk = new DisconnectableZooKeeper( "127.0.0.1:" + port, ClientBase.CONNECTION_TIMEOUT, watcher); watcher.waitForConnected(CONNECTION_TIMEOUT); return zk; } private void verifyState(QuorumUtil qu, int index, Leader leader) { assertTrue("Not following", qu.getPeer(index).peer.follower != null); long epochF = (qu.getPeer(index).peer.getActiveServer().getZxid() >> 32L); long epochL = (leader.getEpoch() >> 32L); assertTrue("Zxid: " + qu.getPeer(index).peer.getActiveServer().getZKDatabase().getDataTreeLastProcessedZxid() + "Current epoch: " + epochF, epochF == epochL); int leaderIndex = (index == 1) ? 2 : 1; Collection<Long> sessionsRestarted = qu.getPeer(index).peer.getActiveServer().getZKDatabase().getSessions(); Collection<Long> sessionsNotRestarted = qu.getPeer(leaderIndex).peer.getActiveServer().getZKDatabase().getSessions(); for(Long l : sessionsRestarted) { assertTrue("Should have same set of sessions in both servers, did not expect: " + l, sessionsNotRestarted.contains(l)); } assertEquals("Should have same number of sessions", sessionsNotRestarted.size(), sessionsRestarted.size()); ZKDatabase restarted = qu.getPeer(index).peer.getActiveServer().getZKDatabase(); ZKDatabase clean = qu.getPeer(3).peer.getActiveServer().getZKDatabase(); ZKDatabase lead = qu.getPeer(leaderIndex).peer.getActiveServer().getZKDatabase(); for(Long l : sessionsRestarted) { assertTrue("Should have same set of sessions in both servers, did not expect: " + l, sessionsNotRestarted.contains(l)); HashSet ephemerals = restarted.getEphemerals(l); HashSet cleanEphemerals = clean.getEphemerals(l); for(Object o : cleanEphemerals) { if(!ephemerals.contains(o)) { LOG.info("Restarted follower doesn't contain ephemeral " + o); } } HashSet leadEphemerals = lead.getEphemerals(l); for(Object o : leadEphemerals) { if(!cleanEphemerals.contains(o)) { LOG.info("Follower doesn't contain ephemeral from leader " + o); } } assertEquals("Should have same number of ephemerals in both followers", ephemerals.size(), cleanEphemerals.size()); assertEquals("Leader should equal follower", lead.getEphemerals(l).size(), cleanEphemerals.size()); } } }
/* * Copyright (c) 2005-2010 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.application.questionnaire.migration.mappers; import static java.lang.String.format; import static org.mifos.platform.questionnaire.QuestionnaireConstants.DEFAULT_EVENT_FOR_SURVEYS; import static org.mifos.platform.questionnaire.QuestionnaireConstants.DEFAULT_ORDER; import static org.mifos.platform.questionnaire.QuestionnaireConstants.DEFAULT_VERSION; import static org.mifos.platform.questionnaire.QuestionnaireConstants.MULTI_SELECT_DELIMITER; import static org.mifos.platform.util.CollectionUtils.asMap; import static org.mifos.platform.util.MapEntry.makeEntry; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.mifos.customers.surveys.business.Question; import org.mifos.customers.surveys.business.QuestionChoice; import org.mifos.customers.surveys.business.Survey; import org.mifos.customers.surveys.business.SurveyInstance; import org.mifos.customers.surveys.business.SurveyQuestion; import org.mifos.customers.surveys.business.SurveyResponse; import org.mifos.customers.surveys.helpers.AnswerType; import org.mifos.customers.surveys.helpers.SurveyType; import org.mifos.platform.questionnaire.QuestionnaireConstants; import org.mifos.platform.questionnaire.service.QuestionType; import org.mifos.platform.questionnaire.service.QuestionnaireServiceFacade; import org.mifos.platform.questionnaire.service.dtos.ChoiceDto; import org.mifos.platform.questionnaire.service.dtos.EventSourceDto; import org.mifos.platform.questionnaire.service.dtos.QuestionDto; import org.mifos.platform.questionnaire.service.dtos.QuestionGroupDto; import org.mifos.platform.questionnaire.service.dtos.QuestionGroupInstanceDto; import org.mifos.platform.questionnaire.service.dtos.QuestionGroupResponseDto; import org.mifos.platform.questionnaire.service.dtos.SectionDto; import org.springframework.beans.factory.annotation.Autowired; public class QuestionnaireMigrationMapperImpl implements QuestionnaireMigrationMapper { private static final String DEFAULT_SECTION_NAME = "PPI India 2009"; private static final String DEFAULT_SURVEY_NAME = "PPI India 2009"; private Map<SurveyType, String> surveyTypeToSourceMap; private Map<AnswerType, QuestionType> answerToQuestionType; @Autowired private QuestionnaireServiceFacade questionnaireServiceFacade; public QuestionnaireMigrationMapperImpl() { populateSurveyTypeToSourceMappings(); populateAnswerToQuestionTypeMappings(); } // Intended to be used from unit tests for injecting mocks public QuestionnaireMigrationMapperImpl(QuestionnaireServiceFacade questionnaireServiceFacade) { this.questionnaireServiceFacade = questionnaireServiceFacade; } @Override public QuestionGroupDto map(Survey survey) { QuestionGroupDto questionGroupDto = new QuestionGroupDto(); questionGroupDto.setTitle(DEFAULT_SURVEY_NAME); questionGroupDto.setEditable(false); questionGroupDto.setPpi(true); questionGroupDto.setActive(survey.getState() == 1); questionGroupDto.setEventSourceDtos(Arrays.asList(mapEventSourceForSurvey(survey))); questionGroupDto.addSection(mapToSectionForSurvey(survey.getQuestions())); return questionGroupDto; } @Override public QuestionGroupInstanceDto map(SurveyInstance surveyInstance, Integer questionGroupId, Integer eventSourceId) { QuestionGroupInstanceDto questionGroupInstanceDto = new QuestionGroupInstanceDto(); questionGroupInstanceDto.setDateConducted(surveyInstance.getDateConducted()); questionGroupInstanceDto.setCompleted(surveyInstance.getCompletedStatus()); questionGroupInstanceDto.setCreatorId(surveyInstance.getCreator()); questionGroupInstanceDto.setEventSourceId(eventSourceId); questionGroupInstanceDto.setEntityId(mapToEntityId(surveyInstance)); questionGroupInstanceDto.setQuestionGroupId(questionGroupId); questionGroupInstanceDto.setVersion(DEFAULT_VERSION); questionGroupInstanceDto.setQuestionGroupResponseDtos(mapToQuestionGroupResponseDtos(surveyInstance, questionGroupId)); return questionGroupInstanceDto; } private List<QuestionGroupResponseDto> mapToQuestionGroupResponseDtos(SurveyInstance surveyInstance, Integer questionGroupId) { List<QuestionGroupResponseDto> questionGroupResponseDtos = new ArrayList<QuestionGroupResponseDto>(); for (SurveyResponse surveyResponse : surveyInstance.getSurveyResponses()) { if (surveyResponse.getQuestion().getAnswerTypeAsEnum() == AnswerType.MULTISELECT) { questionGroupResponseDtos .addAll(mapToMultiSelectQuestionGroupResponses(questionGroupId, surveyResponse)); } else { questionGroupResponseDtos.add(mapToQuestionGroupResponse(questionGroupId, surveyResponse)); } } return questionGroupResponseDtos; } private List<QuestionGroupResponseDto> mapToMultiSelectQuestionGroupResponses(Integer questionGroupId, SurveyResponse surveyResponse) { List<QuestionGroupResponseDto> questionGroupResponseDtos = new ArrayList<QuestionGroupResponseDto>(); String multiSelectValue = surveyResponse.getMultiSelectValue(); if (StringUtils.isNotEmpty(multiSelectValue)) { Map<Integer, QuestionChoice> choiceLookup = getChoiceLookup(surveyResponse); Integer questionId = surveyResponse.getQuestion().getQuestionId(); Integer sectionQuestionId = getSectionQuestionId(questionGroupId, questionId); String[] answers = StringUtils.split(multiSelectValue, MULTI_SELECT_DELIMITER); for (int ansIndex = 0; ansIndex < answers.length; ansIndex++) { if (isChoiceSelected(answers[ansIndex])) { String answer = choiceLookup.get(ansIndex).getChoiceText(); questionGroupResponseDtos.add(mapToQuestionGroupResponse(sectionQuestionId, answer)); } } } return questionGroupResponseDtos; } private boolean isChoiceSelected(String answer) { return StringUtils.isNotEmpty(answer) && QuestionnaireConstants.CHOICE_SELECTED.equals(answer); } private Map<Integer, QuestionChoice> getChoiceLookup(SurveyResponse surveyResponse) { Map<Integer, QuestionChoice> questionChoiceLookup = new HashMap<Integer, QuestionChoice>(); for (QuestionChoice questionChoice : surveyResponse.getSurveyQuestion().getQuestion().getChoices()) { questionChoiceLookup.put(questionChoice.getChoiceOrder(), questionChoice); } return questionChoiceLookup; } private QuestionGroupResponseDto mapToQuestionGroupResponse(Integer sectionQuestionId, String answer) { QuestionGroupResponseDto questionGroupResponseDto = new QuestionGroupResponseDto(); questionGroupResponseDto.setResponse(answer); questionGroupResponseDto.setSectionQuestionId(sectionQuestionId); return questionGroupResponseDto; } private QuestionGroupResponseDto mapToQuestionGroupResponse(Integer questionGroupId, SurveyResponse surveyResponse) { Integer questionId = surveyResponse.getQuestion().getQuestionId(); Integer sectionQuestionId = getSectionQuestionId(questionGroupId, questionId); return mapToQuestionGroupResponse(sectionQuestionId, surveyResponse.toString()); } private Integer getSectionQuestionId(Integer questionGroupId, Integer questionId) { return questionnaireServiceFacade.getSectionQuestionId(DEFAULT_SECTION_NAME, questionId, questionGroupId); } private Integer mapToEntityId(SurveyInstance surveyInstance) { Integer result = 0; if (surveyInstance.isForCustomer()) { result = surveyInstance.getCustomer(); } else if (surveyInstance.isForAccount()) { result = surveyInstance.getAccount(); } return result; } private SectionDto mapToSectionForSurvey(List<SurveyQuestion> questions) { SectionDto sectionDto = getDefaultSection(); for (SurveyQuestion question : questions) { sectionDto.addQuestion(mapToQuestionDto(question)); } addDateSurveyConductedQuestion(sectionDto); return sectionDto; } private void addDateSurveyConductedQuestion(SectionDto sectionDto) { QuestionDto questionDto = new QuestionDto(); questionDto.setText("Date Survey Was Taken"); questionDto.setMandatory(true); questionDto.setActive(true); questionDto.setNickname("ppi_india_2009_survey_date"); questionDto.setOrder(0); questionDto.setType(answerToQuestionType.get(AnswerType.DATE)); sectionDto.addQuestion(questionDto); } private QuestionDto mapToQuestionDto(SurveyQuestion surveyQuestion) { QuestionDto questionDto = new QuestionDto(); Question question = surveyQuestion.getQuestion(); questionDto.setText(question.getQuestionText()); questionDto.setNickname(question.getNickname()); questionDto.setMandatory(surveyQuestion.getMandatory() == 1); questionDto.setActive(question.getQuestionState() == 1); questionDto.setOrder(surveyQuestion.getOrder() + 1); AnswerType answerType = question.getAnswerTypeAsEnum(); questionDto.setType(answerToQuestionType.get(answerType)); if (answerType == AnswerType.NUMBER) { mapNumberQuestion(questionDto, question); } else if (answerType == AnswerType.SINGLESELECT || answerType == AnswerType.MULTISELECT || answerType == AnswerType.CHOICE) { mapChoiceBasedQuestion(questionDto, question.getChoices()); } return questionDto; } private void mapChoiceBasedQuestion(QuestionDto questionDto, List<QuestionChoice> questionChoices) { List<ChoiceDto> choices = new ArrayList<ChoiceDto>(); for (int i = 0, choicesSize = questionChoices.size(); i < choicesSize; i++) { QuestionChoice questionChoice = questionChoices.get(i); choices.add(mapToChoiceDto(i, questionChoice)); } questionDto.setChoices(choices); } private ChoiceDto mapToChoiceDto(int i, QuestionChoice questionChoice) { ChoiceDto choiceDto = new ChoiceDto(); choiceDto.setOrder(i); choiceDto.setValue(questionChoice.getChoiceText()); return choiceDto; } private void mapNumberQuestion(QuestionDto questionDto, Question question) { questionDto.setMinValue(question.getNumericMin()); questionDto.setMaxValue(question.getNumericMax()); } private EventSourceDto mapEventSourceForSurvey(Survey survey) { SurveyType surveyType = survey.getAppliesToAsEnum(); String event = DEFAULT_EVENT_FOR_SURVEYS; String source = surveyTypeToSourceMap.get(surveyType); return new EventSourceDto(event, source, getEventSourceDescription(event, source)); } private String getEventSourceDescription(String event, String source) { return format("%s %s", event, source); } private SectionDto getDefaultSection() { SectionDto sectionDto = new SectionDto(); sectionDto.setName(DEFAULT_SECTION_NAME); sectionDto.setOrder(DEFAULT_ORDER); return sectionDto; } @SuppressWarnings("unchecked") private void populateSurveyTypeToSourceMappings() { surveyTypeToSourceMap = asMap( makeEntry(SurveyType.CLIENT, "Client"), makeEntry(SurveyType.GROUP, "Group"), makeEntry(SurveyType.CENTER, "Center"), makeEntry(SurveyType.LOAN, "Loan"), makeEntry(SurveyType.SAVINGS, "Savings"), makeEntry(SurveyType.ALL, "All") ); } @SuppressWarnings("unchecked") private void populateAnswerToQuestionTypeMappings() { answerToQuestionType = asMap(makeEntry(AnswerType.INVALID, QuestionType.INVALID), makeEntry(AnswerType.FREETEXT, QuestionType.FREETEXT), makeEntry(AnswerType.DATE, QuestionType.DATE), makeEntry(AnswerType.NUMBER, QuestionType.NUMERIC), makeEntry(AnswerType.SINGLESELECT, QuestionType.SINGLE_SELECT), makeEntry(AnswerType.CHOICE, QuestionType.SINGLE_SELECT), makeEntry(AnswerType.MULTISELECT, QuestionType.MULTI_SELECT)); } }
/* * Copyright (c) 2013-2015 by appPlant UG. All rights reserved. * * @APPPLANT_LICENSE_HEADER_START@ * * This file contains Original Code and/or Modifications of Original Code * as defined in and that are subject to the Apache License * Version 2.0 (the 'License'). You may not use this file except in * compliance with the License. Please obtain a copy of the License at * http://opensource.org/licenses/Apache-2.0/ and read it before using this * file. * * The Original Code and all software distributed under the License are * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. * Please see the License for the specific language governing rights and * limitations under the License. * * @APPPLANT_LICENSE_HEADER_END@ */ package de.appplant.cordova.plugin.notification; import android.app.AlarmManager; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.os.Build; import android.support.v4.app.NotificationCompat; import org.json.JSONException; import org.json.JSONObject; import java.util.Date; /** * Wrapper class around OS notification class. Handles basic operations * like show, delete, cancel for a single local notification instance. */ public class Notification { // Used to differ notifications by their life cycle state public enum Type { ALL, SCHEDULED, TRIGGERED } // Default receiver to handle the trigger event private static Class<?> defaultReceiver = TriggerReceiver.class; // Key for private preferences static final String PREF_KEY = "LocalNotification"; // Application context passed by constructor private final Context context; // Notification options passed by JS private final Options options; // Builder with full configuration private final NotificationCompat.Builder builder; // Receiver to handle the trigger event private Class<?> receiver = defaultReceiver; /** * Constructor * * @param context * Application context * @param options * Parsed notification options * @param builder * Pre-configured notification builder */ protected Notification (Context context, Options options, NotificationCompat.Builder builder, Class<?> receiver) { this.context = context; this.options = options; this.builder = builder; this.receiver = receiver != null ? receiver : defaultReceiver; } /** * Get application context. */ public Context getContext () { return context; } /** * Get notification options. */ public Options getOptions () { return options; } /** * Get notification ID. */ public int getId () { return options.getId(); } /** * If it's a repeating notification. */ public boolean isRepeating () { return getOptions().getRepeatInterval() > 0; } /** * If the notification was in the past. */ public boolean wasInThePast () { return new Date().after(options.getTriggerDate()); } /** * If the notification is scheduled. */ public boolean isScheduled () { return isRepeating() || !wasInThePast(); } /** * If the notification is triggered. */ public boolean isTriggered () { return wasInThePast(); } /** * If the notification is an update. * * @param keepFlag * Set to false to remove the flag from the option map */ protected boolean isUpdate (boolean keepFlag) { boolean updated = options.getDict().optBoolean("updated", false); if (!keepFlag) { options.getDict().remove("updated"); } return updated; } /** * Notification type can be one of pending or scheduled. */ public Type getType () { return isScheduled() ? Type.SCHEDULED : Type.TRIGGERED; } /** * Schedule the local notification. */ public void schedule() { long triggerTime = options.getTriggerTime(); persist(); // Intent gets called when the Notification gets fired Intent intent = new Intent(context, receiver) .setAction(options.getIdStr()) .putExtra(Options.EXTRA, options.toString()); PendingIntent pi = PendingIntent.getBroadcast( context, 0, intent, PendingIntent.FLAG_CANCEL_CURRENT); if (isRepeating()) { if (wasInThePast()) { triggerTime = System.currentTimeMillis(); } getAlarmMgr().setRepeating(AlarmManager.RTC_WAKEUP, triggerTime, options.getRepeatInterval(), pi); } else { getAlarmMgr().set(AlarmManager.RTC_WAKEUP, triggerTime, pi); } } /** * Clear the local notification without canceling repeating alarms. */ public void clear () { if (!isRepeating() && wasInThePast()) unpersist(); if (!isRepeating()) getNotMgr().cancel(getId()); } /** * Cancel the local notification. * * Create an intent that looks similar, to the one that was registered * using schedule. Making sure the notification id in the action is the * same. Now we can search for such an intent using the 'getService' * method and cancel it. */ public void cancel() { Intent intent = new Intent(context, receiver) .setAction(options.getIdStr()); PendingIntent pi = PendingIntent. getBroadcast(context, 0, intent, 0); getAlarmMgr().cancel(pi); getNotMgr().cancel(options.getId()); unpersist(); } /** * Present the local notification to user. */ public void show () { // TODO Show dialog when in foreground showNotification(); } /** * Show as local notification when in background. */ @SuppressWarnings("deprecation") private void showNotification () { int id = getOptions().getId(); if (Build.VERSION.SDK_INT <= 15) { // Notification for HoneyComb to ICS getNotMgr().notify(id, builder.getNotification()); } else { // Notification for Jellybean and above getNotMgr().notify(id, builder.build()); } } /** * Count of triggers since schedule. */ public int getTriggerCountSinceSchedule() { long now = System.currentTimeMillis(); long triggerTime = options.getTriggerTime(); if (!wasInThePast()) return 0; if (!isRepeating()) return 1; return (int) ((now - triggerTime) / options.getRepeatInterval()); } /** * Encode options to JSON. */ public String toString() { JSONObject dict = options.getDict(); JSONObject json = new JSONObject(); try { json = new JSONObject(dict.toString()); } catch (JSONException e) { e.printStackTrace(); } json.remove("firstAt"); json.remove("updated"); json.remove("soundUri"); json.remove("iconUri"); return json.toString(); } /** * Persist the information of this notification to the Android Shared * Preferences. This will allow the application to restore the notification * upon device reboot, app restart, retrieve notifications, aso. */ private void persist () { SharedPreferences.Editor editor = getPrefs().edit(); editor.putString(options.getIdStr(), options.toString()); if (Build.VERSION.SDK_INT < 9) { editor.commit(); } else { editor.apply(); } } /** * Remove the notification from the Android shared Preferences. */ private void unpersist () { SharedPreferences.Editor editor = getPrefs().edit(); editor.remove(options.getIdStr()); if (Build.VERSION.SDK_INT < 9) { editor.commit(); } else { editor.apply(); } } /** * Shared private preferences for the application. */ private SharedPreferences getPrefs () { return context.getSharedPreferences(PREF_KEY, Context.MODE_PRIVATE); } /** * Notification manager for the application. */ private NotificationManager getNotMgr () { return (NotificationManager) context .getSystemService(Context.NOTIFICATION_SERVICE); } /** * Alarm manager for the application. */ private AlarmManager getAlarmMgr () { return (AlarmManager) context.getSystemService(Context.ALARM_SERVICE); } /** * Set default receiver to handle the trigger event. * * @param receiver * broadcast receiver */ public static void setDefaultTriggerReceiver (Class<?> receiver) { defaultReceiver = receiver; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.hbase.index.covered; import java.io.IOException; import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import com.google.common.collect.Lists; import com.google.common.primitives.Longs; import org.apache.phoenix.hbase.index.builder.BaseIndexBuilder; import org.apache.phoenix.hbase.index.covered.data.LocalHBaseState; import org.apache.phoenix.hbase.index.covered.data.LocalTable; import org.apache.phoenix.hbase.index.covered.update.ColumnTracker; import org.apache.phoenix.hbase.index.covered.update.IndexUpdateManager; import org.apache.phoenix.hbase.index.covered.update.IndexedColumnGroup; /** * Build covered indexes for phoenix updates. * <p> * Before any call to prePut/preDelete, the row has already been locked. This ensures that we don't * need to do any extra synchronization in the IndexBuilder. * <p> * NOTE: This implementation doesn't cleanup the index when we remove a key-value on compaction or * flush, leading to a bloated index that needs to be cleaned up by a background process. */ public class CoveredColumnsIndexBuilder extends BaseIndexBuilder { private static final Log LOG = LogFactory.getLog(CoveredColumnsIndexBuilder.class); public static final String CODEC_CLASS_NAME_KEY = "org.apache.hadoop.hbase.index.codec.class"; protected RegionCoprocessorEnvironment env; protected IndexCodec codec; protected LocalHBaseState localTable; @Override public void setup(RegionCoprocessorEnvironment env) throws IOException { this.env = env; // setup the phoenix codec. Generally, this will just be in standard one, but abstracting here // so we can use it later when generalizing covered indexes Configuration conf = env.getConfiguration(); Class<? extends IndexCodec> codecClass = conf.getClass(CODEC_CLASS_NAME_KEY, null, IndexCodec.class); try { Constructor<? extends IndexCodec> meth = codecClass.getDeclaredConstructor(new Class[0]); meth.setAccessible(true); this.codec = meth.newInstance(); this.codec.initialize(env); } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException(e); } this.localTable = new LocalTable(env); } @Override public Collection<Pair<Mutation, byte[]>> getIndexUpdate(Mutation mutation) throws IOException { // build the index updates for each group IndexUpdateManager updateMap = new IndexUpdateManager(); batchMutationAndAddUpdates(updateMap, mutation); if (LOG.isDebugEnabled()) { LOG.debug("Found index updates for Mutation: " + mutation + "\n" + updateMap); } return updateMap.toMap(); } /** * Split the mutation into batches based on the timestamps of each keyvalue. We need to check each * key-value in the update to see if it matches the others. Generally, this will be the case, but * you can add kvs to a mutation that don't all have the timestamp, so we need to manage * everything in batches based on timestamp. * <p> * Adds all the updates in the {@link Mutation} to the state, as a side-effect. * @param updateMap index updates into which to add new updates. Modified as a side-effect. * @param state current state of the row for the mutation. * @param m mutation to batch * @throws IOException */ private void batchMutationAndAddUpdates(IndexUpdateManager manager, Mutation m) throws IOException { // split the mutation into timestamp-based batches Collection<Batch> batches = createTimestampBatchesFromMutation(m); // create a state manager, so we can manage each batch LocalTableState state = new LocalTableState(env, localTable, m); // go through each batch of keyvalues and build separate index entries for each boolean cleanupCurrentState = true; for (Batch batch : batches) { /* * We have to split the work between the cleanup and the update for each group because when we * update the current state of the row for the current batch (appending the mutations for the * current batch) the next group will see that as the current state, which will can cause the * a delete and a put to be created for the next group. */ if (addMutationsForBatch(manager, batch, state, cleanupCurrentState)) { cleanupCurrentState = false; } } } /** * Batch all the {@link KeyValue}s in a {@link Mutation} by timestamp. Updates any * {@link KeyValue} with a timestamp == {@link HConstants#LATEST_TIMESTAMP} to the timestamp at * the time the method is called. * @param m {@link Mutation} from which to extract the {@link KeyValue}s * @return the mutation, broken into batches and sorted in ascending order (smallest first) */ protected Collection<Batch> createTimestampBatchesFromMutation(Mutation m) { Map<Long, Batch> batches = new HashMap<Long, Batch>(); for (List<Cell> family : m.getFamilyCellMap().values()) { List<KeyValue> familyKVs = KeyValueUtil.ensureKeyValues(family); createTimestampBatchesFromKeyValues(familyKVs, batches); } // sort the batches List<Batch> sorted = new ArrayList<Batch>(batches.values()); Collections.sort(sorted, new Comparator<Batch>() { @Override public int compare(Batch o1, Batch o2) { return Longs.compare(o1.getTimestamp(), o2.getTimestamp()); } }); return sorted; } /** * Batch all the {@link KeyValue}s in a collection of kvs by timestamp. Updates any * {@link KeyValue} with a timestamp == {@link HConstants#LATEST_TIMESTAMP} to the timestamp at * the time the method is called. * @param kvs {@link KeyValue}s to break into batches * @param batches to update with the given kvs */ protected void createTimestampBatchesFromKeyValues(Collection<KeyValue> kvs, Map<Long, Batch> batches) { long now = EnvironmentEdgeManager.currentTimeMillis(); byte[] nowBytes = Bytes.toBytes(now); // batch kvs by timestamp for (KeyValue kv : kvs) { long ts = kv.getTimestamp(); // override the timestamp to the current time, so the index and primary tables match // all the keys with LATEST_TIMESTAMP will then be put into the same batch if (kv.updateLatestStamp(nowBytes)) { ts = now; } Batch batch = batches.get(ts); if (batch == null) { batch = new Batch(ts); batches.put(ts, batch); } batch.add(kv); } } /** * For a single batch, get all the index updates and add them to the updateMap * <p> * This method manages cleaning up the entire history of the row from the given timestamp forward * for out-of-order (e.g. 'back in time') updates. * <p> * If things arrive out of order (client is using custom timestamps) we should still see the index * in the correct order (assuming we scan after the out-of-order update in finished). Therefore, * we when we aren't the most recent update to the index, we need to delete the state at the * current timestamp (similar to above), but also issue a delete for the added index updates at * the next newest timestamp of any of the columns in the update; we need to cleanup the insert so * it looks like it was also deleted at that next newest timestamp. However, its not enough to * just update the one in front of us - that column will likely be applied to index entries up the * entire history in front of us, which also needs to be fixed up. * <p> * However, the current update usually will be the most recent thing to be added. In that case, * all we need to is issue a delete for the previous index row (the state of the row, without the * update applied) at the current timestamp. This gets rid of anything currently in the index for * the current state of the row (at the timestamp). Then we can just follow that by applying the * pending update and building the index update based on the new row state. * @param updateMap map to update with new index elements * @param batch timestamp-based batch of edits * @param state local state to update and pass to the codec * @param requireCurrentStateCleanup <tt>true</tt> if we should should attempt to cleanup the * current state of the table, in the event of a 'back in time' batch. <tt>false</tt> * indicates we should not attempt the cleanup, e.g. an earlier batch already did the * cleanup. * @return <tt>true</tt> if we cleaned up the current state forward (had a back-in-time put), * <tt>false</tt> otherwise * @throws IOException */ private boolean addMutationsForBatch(IndexUpdateManager updateMap, Batch batch, LocalTableState state, boolean requireCurrentStateCleanup) throws IOException { // need a temporary manager for the current batch. It should resolve any conflicts for the // current batch. Essentially, we can get the case where a batch doesn't change the current // state of the index (all Puts are covered by deletes), in which case we don't want to add // anything // A. Get the correct values for the pending state in the batch // A.1 start by cleaning up the current state - as long as there are key-values in the batch // that are indexed, we need to change the current state of the index. Its up to the codec to // determine if we need to make any cleanup given the pending update. long batchTs = batch.getTimestamp(); state.setPendingUpdates(batch.getKvs()); addCleanupForCurrentBatch(updateMap, batchTs, state); // A.2 do a single pass first for the updates to the current state state.applyPendingUpdates(); long minTs = addUpdateForGivenTimestamp(batchTs, state, updateMap); // if all the updates are the latest thing in the index, we are done - don't go and fix history if (ColumnTracker.isNewestTime(minTs)) { return false; } // A.3 otherwise, we need to roll up through the current state and get the 'correct' view of the // index. after this, we have the correct view of the index, from the batch up to the index while(!ColumnTracker.isNewestTime(minTs) ){ minTs = addUpdateForGivenTimestamp(minTs, state, updateMap); } // B. only cleanup the current state if we need to - its a huge waste of effort otherwise. if (requireCurrentStateCleanup) { // roll back the pending update. This is needed so we can remove all the 'old' index entries. // We don't need to do the puts here, but just the deletes at the given timestamps since we // just want to completely hide the incorrect entries. state.rollback(batch.getKvs()); // setup state state.setPendingUpdates(batch.getKvs()); // cleanup the pending batch. If anything in the correct history is covered by Deletes used to // 'fix' history (same row key and ts), we just drop the delete (we don't want to drop both // because the update may have a different set of columns or value based on the update). cleanupIndexStateFromBatchOnward(updateMap, batchTs, state); // have to roll the state forward again, so the current state is correct state.applyPendingUpdates(); return true; } return false; } private long addUpdateForGivenTimestamp(long ts, LocalTableState state, IndexUpdateManager updateMap) throws IOException { state.setCurrentTimestamp(ts); ts = addCurrentStateMutationsForBatch(updateMap, state); return ts; } private void addCleanupForCurrentBatch(IndexUpdateManager updateMap, long batchTs, LocalTableState state) throws IOException { // get the cleanup for the current state state.setCurrentTimestamp(batchTs); addDeleteUpdatesToMap(updateMap, state, batchTs); // ignore any index tracking from the delete state.resetTrackedColumns(); } /** * Add the necessary mutations for the pending batch on the local state. Handles rolling up * through history to determine the index changes after applying the batch (for the case where the * batch is back in time). * @param updateMap to update with index mutations * @param batch to apply to the current state * @param state current state of the table * @return the minimum timestamp across all index columns requested. If * {@link ColumnTracker#isNewestTime(long)} returns <tt>true</tt> on the returned * timestamp, we know that this <i>was not a back-in-time update</i>. * @throws IOException */ private long addCurrentStateMutationsForBatch(IndexUpdateManager updateMap, LocalTableState state) throws IOException { // get the index updates for this current batch Iterable<IndexUpdate> upserts = codec.getIndexUpserts(state); state.resetTrackedColumns(); /* * go through all the pending updates. If we are sure that all the entries are the latest * timestamp, we can just add the index updates and move on. However, if there are columns that * we skip past (based on the timestamp of the batch), we need to roll back up the history. * Regardless of whether or not they are the latest timestamp, the entries here are going to be * correct for the current batch timestamp, so we add them to the updates. The only thing we * really care about it if we need to roll up the history and fix it as we go. */ // timestamp of the next update we need to track long minTs = ColumnTracker.NO_NEWER_PRIMARY_TABLE_ENTRY_TIMESTAMP; List<IndexedColumnGroup> columnHints = new ArrayList<IndexedColumnGroup>(); for (IndexUpdate update : upserts) { // this is the one bit where we check the timestamps final ColumnTracker tracker = update.getIndexedColumns(); long trackerTs = tracker.getTS(); // update the next min TS we need to track if (trackerTs < minTs) { minTs = tracker.getTS(); } // track index hints for the next round. Hint if we need an update for that column for the // next timestamp. These columns clearly won't need to update as we go through time as they // already match the most recent possible thing. boolean needsCleanup = false; if (tracker.hasNewerTimestamps()) { columnHints.add(tracker); // this update also needs to be cleaned up at the next timestamp because it not the latest. needsCleanup = true; } // only make the put if the index update has been setup if (update.isValid()) { byte[] table = update.getTableName(); Mutation mutation = update.getUpdate(); updateMap.addIndexUpdate(table, mutation); // only make the cleanup if we made a put and need cleanup if (needsCleanup) { // there is a TS for the interested columns that is greater than the columns in the // put. Therefore, we need to issue a delete at the same timestamp Delete d = new Delete(mutation.getRow()); d.setTimestamp(tracker.getTS()); updateMap.addIndexUpdate(table, d); } } } return minTs; } /** * Cleanup the index based on the current state from the given batch. Iterates over each timestamp * (for the indexed rows) for the current state of the table and cleans up all the existing * entries generated by the codec. * <p> * Adds all pending updates to the updateMap * @param updateMap updated with the pending index updates from the codec * @param batchTs timestamp from which we should cleanup * @param state current state of the primary table. Should already by setup to the correct state * from which we want to cleanup. * @throws IOException */ private void cleanupIndexStateFromBatchOnward(IndexUpdateManager updateMap, long batchTs, LocalTableState state) throws IOException { // get the cleanup for the current state state.setCurrentTimestamp(batchTs); addDeleteUpdatesToMap(updateMap, state, batchTs); Set<ColumnTracker> trackers = state.getTrackedColumns(); long minTs = ColumnTracker.NO_NEWER_PRIMARY_TABLE_ENTRY_TIMESTAMP; for (ColumnTracker tracker : trackers) { if (tracker.getTS() < minTs) { minTs = tracker.getTS(); } } state.resetTrackedColumns(); if (!ColumnTracker.isNewestTime(minTs)) { state.setHints(Lists.newArrayList(trackers)); cleanupIndexStateFromBatchOnward(updateMap, minTs, state); } } /** * Get the index deletes from the codec {@link IndexCodec#getIndexDeletes(TableState)} and then * add them to the update map. * <p> * Expects the {@link LocalTableState} to already be correctly setup (correct timestamp, updates * applied, etc). * @throws IOException */ protected void addDeleteUpdatesToMap(IndexUpdateManager updateMap, LocalTableState state, long ts) throws IOException { Iterable<IndexUpdate> cleanup = codec.getIndexDeletes(state); if (cleanup != null) { for (IndexUpdate d : cleanup) { if (!d.isValid()) { continue; } // override the timestamps in the delete to match the current batch. Delete remove = (Delete)d.getUpdate(); remove.setTimestamp(ts); updateMap.addIndexUpdate(d.getTableName(), remove); } } } @Override public Collection<Pair<Mutation, byte[]>> getIndexUpdate(Delete d) throws IOException { // stores all the return values IndexUpdateManager updateMap = new IndexUpdateManager(); // We have to figure out which kind of delete it is, since we need to do different things if its // a general (row) delete, versus a delete of just a single column or family Map<byte[], List<Cell>> families = d.getFamilyCellMap(); /* * Option 1: its a row delete marker, so we just need to delete the most recent state for each * group, as of the specified timestamp in the delete. This can happen if we have a single row * update and it is part of a batch mutation (prepare doesn't happen until later... maybe a * bug?). In a single delete, this delete gets all the column families appended, so the family * map won't be empty by the time it gets here. */ if (families.size() == 0) { LocalTableState state = new LocalTableState(env, localTable, d); // get a consistent view of name long now = d.getTimeStamp(); if (now == HConstants.LATEST_TIMESTAMP) { now = EnvironmentEdgeManager.currentTimeMillis(); // update the delete's idea of 'now' to be consistent with the index d.setTimestamp(now); } // get deletes from the codec // we only need to get deletes and not add puts because this delete covers all columns addDeleteUpdatesToMap(updateMap, state, now); /* * Update the current state for all the kvs in the delete. Generally, we would just iterate * the family map, but since we go here, the family map is empty! Therefore, we need to fake a * bunch of family deletes (just like hos HRegion#prepareDelete works). This is just needed * for current version of HBase that has an issue where the batch update doesn't update the * deletes before calling the hook. */ byte[] deleteRow = d.getRow(); for (byte[] family : this.env.getRegion().getTableDesc().getFamiliesKeys()) { state.addPendingUpdates(new KeyValue(deleteRow, family, null, now, KeyValue.Type.DeleteFamily)); } } else { // Option 2: Its actually a bunch single updates, which can have different timestamps. // Therefore, we need to do something similar to the put case and batch by timestamp batchMutationAndAddUpdates(updateMap, d); } if (LOG.isDebugEnabled()) { LOG.debug("Found index updates for Delete: " + d + "\n" + updateMap); } return updateMap.toMap(); } @Override public Collection<Pair<Mutation, byte[]>> getIndexUpdateForFilteredRows( Collection<KeyValue> filtered) throws IOException { // TODO Implement IndexBuilder.getIndexUpdateForFilteredRows return null; } /** * Exposed for testing! * @param codec codec to use for this instance of the builder */ public void setIndexCodecForTesting(IndexCodec codec) { this.codec = codec; } @Override public boolean isEnabled(Mutation m) throws IOException { // ask the codec to see if we should even attempt indexing return this.codec.isEnabled(m); } }
package org.bouncycastle.util; import java.math.BigInteger; import java.util.NoSuchElementException; /** * General array utilities. */ public final class Arrays { private Arrays() { // static class, hide constructor } public static boolean areEqual( boolean[] a, boolean[] b) { if (a == b) { return true; } if (a == null || b == null) { return false; } if (a.length != b.length) { return false; } for (int i = 0; i != a.length; i++) { if (a[i] != b[i]) { return false; } } return true; } public static boolean areEqual( char[] a, char[] b) { if (a == b) { return true; } if (a == null || b == null) { return false; } if (a.length != b.length) { return false; } for (int i = 0; i != a.length; i++) { if (a[i] != b[i]) { return false; } } return true; } public static boolean areEqual( byte[] a, byte[] b) { if (a == b) { return true; } if (a == null || b == null) { return false; } if (a.length != b.length) { return false; } for (int i = 0; i != a.length; i++) { if (a[i] != b[i]) { return false; } } return true; } /** * A constant time equals comparison - does not terminate early if * test will fail. * * @param a first array * @param b second array * @return true if arrays equal, false otherwise. */ public static boolean constantTimeAreEqual( byte[] a, byte[] b) { if (a == b) { return true; } if (a == null || b == null) { return false; } if (a.length != b.length) { return false; } int nonEqual = 0; for (int i = 0; i != a.length; i++) { nonEqual |= (a[i] ^ b[i]); } return nonEqual == 0; } public static boolean areEqual( int[] a, int[] b) { if (a == b) { return true; } if (a == null || b == null) { return false; } if (a.length != b.length) { return false; } for (int i = 0; i != a.length; i++) { if (a[i] != b[i]) { return false; } } return true; } public static boolean areEqual( long[] a, long[] b) { if (a == b) { return true; } if (a == null || b == null) { return false; } if (a.length != b.length) { return false; } for (int i = 0; i != a.length; i++) { if (a[i] != b[i]) { return false; } } return true; } public static boolean areEqual(Object[] a, Object[] b) { if (a == b) { return true; } if (a == null || b == null) { return false; } if (a.length != b.length) { return false; } for (int i = 0; i != a.length; i++) { Object objA = a[i], objB = b[i]; if (objA == null) { if (objB != null) { return false; } } else if (!objA.equals(objB)) { return false; } } return true; } public static boolean contains(short[] a, short n) { for (int i = 0; i < a.length; ++i) { if (a[i] == n) { return true; } } return false; } public static boolean contains(int[] a, int n) { for (int i = 0; i < a.length; ++i) { if (a[i] == n) { return true; } } return false; } public static void fill( byte[] array, byte value) { for (int i = 0; i < array.length; i++) { array[i] = value; } } public static void fill( char[] array, char value) { for (int i = 0; i < array.length; i++) { array[i] = value; } } public static void fill( long[] array, long value) { for (int i = 0; i < array.length; i++) { array[i] = value; } } public static void fill( short[] array, short value) { for (int i = 0; i < array.length; i++) { array[i] = value; } } public static void fill( int[] array, int value) { for (int i = 0; i < array.length; i++) { array[i] = value; } } public static int hashCode(byte[] data) { if (data == null) { return 0; } int i = data.length; int hc = i + 1; while (--i >= 0) { hc *= 257; hc ^= data[i]; } return hc; } public static int hashCode(byte[] data, int off, int len) { if (data == null) { return 0; } int i = len; int hc = i + 1; while (--i >= 0) { hc *= 257; hc ^= data[off + i]; } return hc; } public static int hashCode(char[] data) { if (data == null) { return 0; } int i = data.length; int hc = i + 1; while (--i >= 0) { hc *= 257; hc ^= data[i]; } return hc; } public static int hashCode(int[][] ints) { int hc = 0; for (int i = 0; i != ints.length; i++) { hc = hc * 257 + hashCode(ints[i]); } return hc; } public static int hashCode(int[] data) { if (data == null) { return 0; } int i = data.length; int hc = i + 1; while (--i >= 0) { hc *= 257; hc ^= data[i]; } return hc; } public static int hashCode(int[] data, int off, int len) { if (data == null) { return 0; } int i = len; int hc = i + 1; while (--i >= 0) { hc *= 257; hc ^= data[off + i]; } return hc; } public static int hashCode(long[] data) { if (data == null) { return 0; } int i = data.length; int hc = i + 1; while (--i >= 0) { long di = data[i]; hc *= 257; hc ^= (int)di; hc *= 257; hc ^= (int)(di >>> 32); } return hc; } public static int hashCode(long[] data, int off, int len) { if (data == null) { return 0; } int i = len; int hc = i + 1; while (--i >= 0) { long di = data[off + i]; hc *= 257; hc ^= (int)di; hc *= 257; hc ^= (int)(di >>> 32); } return hc; } public static int hashCode(short[][][] shorts) { int hc = 0; for (int i = 0; i != shorts.length; i++) { hc = hc * 257 + hashCode(shorts[i]); } return hc; } public static int hashCode(short[][] shorts) { int hc = 0; for (int i = 0; i != shorts.length; i++) { hc = hc * 257 + hashCode(shorts[i]); } return hc; } public static int hashCode(short[] data) { if (data == null) { return 0; } int i = data.length; int hc = i + 1; while (--i >= 0) { hc *= 257; hc ^= (data[i] & 0xff); } return hc; } public static int hashCode(Object[] data) { if (data == null) { return 0; } int i = data.length; int hc = i + 1; while (--i >= 0) { hc *= 257; hc ^= data[i].hashCode(); } return hc; } public static byte[] clone(byte[] data) { if (data == null) { return null; } byte[] copy = new byte[data.length]; System.arraycopy(data, 0, copy, 0, data.length); return copy; } public static char[] clone(char[] data) { if (data == null) { return null; } char[] copy = new char[data.length]; System.arraycopy(data, 0, copy, 0, data.length); return copy; } public static byte[] clone(byte[] data, byte[] existing) { if (data == null) { return null; } if ((existing == null) || (existing.length != data.length)) { return clone(data); } System.arraycopy(data, 0, existing, 0, existing.length); return existing; } public static byte[][] clone(byte[][] data) { if (data == null) { return null; } byte[][] copy = new byte[data.length][]; for (int i = 0; i != copy.length; i++) { copy[i] = clone(data[i]); } return copy; } public static byte[][][] clone(byte[][][] data) { if (data == null) { return null; } byte[][][] copy = new byte[data.length][][]; for (int i = 0; i != copy.length; i++) { copy[i] = clone(data[i]); } return copy; } public static int[] clone(int[] data) { if (data == null) { return null; } int[] copy = new int[data.length]; System.arraycopy(data, 0, copy, 0, data.length); return copy; } public static long[] clone(long[] data) { if (data == null) { return null; } long[] copy = new long[data.length]; System.arraycopy(data, 0, copy, 0, data.length); return copy; } public static long[] clone(long[] data, long[] existing) { if (data == null) { return null; } if ((existing == null) || (existing.length != data.length)) { return clone(data); } System.arraycopy(data, 0, existing, 0, existing.length); return existing; } public static short[] clone(short[] data) { if (data == null) { return null; } short[] copy = new short[data.length]; System.arraycopy(data, 0, copy, 0, data.length); return copy; } public static BigInteger[] clone(BigInteger[] data) { if (data == null) { return null; } BigInteger[] copy = new BigInteger[data.length]; System.arraycopy(data, 0, copy, 0, data.length); return copy; } public static byte[] copyOf(byte[] data, int newLength) { byte[] tmp = new byte[newLength]; if (newLength < data.length) { System.arraycopy(data, 0, tmp, 0, newLength); } else { System.arraycopy(data, 0, tmp, 0, data.length); } return tmp; } public static char[] copyOf(char[] data, int newLength) { char[] tmp = new char[newLength]; if (newLength < data.length) { System.arraycopy(data, 0, tmp, 0, newLength); } else { System.arraycopy(data, 0, tmp, 0, data.length); } return tmp; } public static int[] copyOf(int[] data, int newLength) { int[] tmp = new int[newLength]; if (newLength < data.length) { System.arraycopy(data, 0, tmp, 0, newLength); } else { System.arraycopy(data, 0, tmp, 0, data.length); } return tmp; } public static long[] copyOf(long[] data, int newLength) { long[] tmp = new long[newLength]; if (newLength < data.length) { System.arraycopy(data, 0, tmp, 0, newLength); } else { System.arraycopy(data, 0, tmp, 0, data.length); } return tmp; } public static BigInteger[] copyOf(BigInteger[] data, int newLength) { BigInteger[] tmp = new BigInteger[newLength]; if (newLength < data.length) { System.arraycopy(data, 0, tmp, 0, newLength); } else { System.arraycopy(data, 0, tmp, 0, data.length); } return tmp; } /** * Make a copy of a range of bytes from the passed in data array. The range can * extend beyond the end of the input array, in which case the return array will * be padded with zeroes. * * @param data the array from which the data is to be copied. * @param from the start index at which the copying should take place. * @param to the final index of the range (exclusive). * * @return a new byte array containing the range given. */ public static byte[] copyOfRange(byte[] data, int from, int to) { int newLength = getLength(from, to); byte[] tmp = new byte[newLength]; if (data.length - from < newLength) { System.arraycopy(data, from, tmp, 0, data.length - from); } else { System.arraycopy(data, from, tmp, 0, newLength); } return tmp; } public static int[] copyOfRange(int[] data, int from, int to) { int newLength = getLength(from, to); int[] tmp = new int[newLength]; if (data.length - from < newLength) { System.arraycopy(data, from, tmp, 0, data.length - from); } else { System.arraycopy(data, from, tmp, 0, newLength); } return tmp; } public static long[] copyOfRange(long[] data, int from, int to) { int newLength = getLength(from, to); long[] tmp = new long[newLength]; if (data.length - from < newLength) { System.arraycopy(data, from, tmp, 0, data.length - from); } else { System.arraycopy(data, from, tmp, 0, newLength); } return tmp; } public static BigInteger[] copyOfRange(BigInteger[] data, int from, int to) { int newLength = getLength(from, to); BigInteger[] tmp = new BigInteger[newLength]; if (data.length - from < newLength) { System.arraycopy(data, from, tmp, 0, data.length - from); } else { System.arraycopy(data, from, tmp, 0, newLength); } return tmp; } private static int getLength(int from, int to) { int newLength = to - from; if (newLength < 0) { StringBuffer sb = new StringBuffer(from); sb.append(" > ").append(to); throw new IllegalArgumentException(sb.toString()); } return newLength; } public static byte[] append(byte[] a, byte b) { if (a == null) { return new byte[]{ b }; } int length = a.length; byte[] result = new byte[length + 1]; System.arraycopy(a, 0, result, 0, length); result[length] = b; return result; } public static short[] append(short[] a, short b) { if (a == null) { return new short[]{ b }; } int length = a.length; short[] result = new short[length + 1]; System.arraycopy(a, 0, result, 0, length); result[length] = b; return result; } public static int[] append(int[] a, int b) { if (a == null) { return new int[]{ b }; } int length = a.length; int[] result = new int[length + 1]; System.arraycopy(a, 0, result, 0, length); result[length] = b; return result; } public static byte[] concatenate(byte[] a, byte[] b) { if (a != null && b != null) { byte[] rv = new byte[a.length + b.length]; System.arraycopy(a, 0, rv, 0, a.length); System.arraycopy(b, 0, rv, a.length, b.length); return rv; } else if (b != null) { return clone(b); } else { return clone(a); } } public static byte[] concatenate(byte[] a, byte[] b, byte[] c) { if (a != null && b != null && c != null) { byte[] rv = new byte[a.length + b.length + c.length]; System.arraycopy(a, 0, rv, 0, a.length); System.arraycopy(b, 0, rv, a.length, b.length); System.arraycopy(c, 0, rv, a.length + b.length, c.length); return rv; } else if (a == null) { return concatenate(b, c); } else if (b == null) { return concatenate(a, c); } else { return concatenate(a, b); } } public static byte[] concatenate(byte[] a, byte[] b, byte[] c, byte[] d) { if (a != null && b != null && c != null && d != null) { byte[] rv = new byte[a.length + b.length + c.length + d.length]; System.arraycopy(a, 0, rv, 0, a.length); System.arraycopy(b, 0, rv, a.length, b.length); System.arraycopy(c, 0, rv, a.length + b.length, c.length); System.arraycopy(d, 0, rv, a.length + b.length + c.length, d.length); return rv; } else if (d == null) { return concatenate(a, b, c); } else if (c == null) { return concatenate(a, b, d); } else if (b == null) { return concatenate(a, c, d); } else { return concatenate(b, c, d); } } public static int[] concatenate(int[] a, int[] b) { if (a == null) { return clone(b); } if (b == null) { return clone(a); } int[] c = new int[a.length + b.length]; System.arraycopy(a, 0, c, 0, a.length); System.arraycopy(b, 0, c, a.length, b.length); return c; } public static byte[] prepend(byte[] a, byte b) { if (a == null) { return new byte[]{ b }; } int length = a.length; byte[] result = new byte[length + 1]; System.arraycopy(a, 0, result, 1, length); result[0] = b; return result; } public static short[] prepend(short[] a, short b) { if (a == null) { return new short[]{ b }; } int length = a.length; short[] result = new short[length + 1]; System.arraycopy(a, 0, result, 1, length); result[0] = b; return result; } public static int[] prepend(int[] a, int b) { if (a == null) { return new int[]{ b }; } int length = a.length; int[] result = new int[length + 1]; System.arraycopy(a, 0, result, 1, length); result[0] = b; return result; } public static byte[] reverse(byte[] a) { if (a == null) { return null; } int p1 = 0, p2 = a.length; byte[] result = new byte[p2]; while (--p2 >= 0) { result[p2] = a[p1++]; } return result; } public static int[] reverse(int[] a) { if (a == null) { return null; } int p1 = 0, p2 = a.length; int[] result = new int[p2]; while (--p2 >= 0) { result[p2] = a[p1++]; } return result; } /** * Iterator backed by a specific array. */ public static class Iterator<T> implements java.util.Iterator<T> { private final T[] dataArray; private int position = 0; /** * Base constructor. * <p> * Note: the array is not cloned, changes to it will affect the values returned by next(). * </p> * * @param dataArray array backing the iterator. */ public Iterator(T[] dataArray) { this.dataArray = dataArray; } public boolean hasNext() { return position < dataArray.length; } public T next() { if (position == dataArray.length) { throw new NoSuchElementException("Out of elements: " + position); } return dataArray[position++]; } public void remove() { throw new UnsupportedOperationException("Cannot remove element from an Array."); } } }
package controllers; import com.google.api.services.gmail.model.Message; import com.jfoenix.controls.JFXButton; import com.jfoenix.controls.JFXDialog; import com.jfoenix.controls.JFXDialogLayout; import com.jfoenix.controls.JFXSnackbar; import dialogActivities.ComposeActivity; import dialogActivities.ZoomInMailView; import gmailServices.FormattedMessage; import gmailServices.GmailMessages; import gmailServices.GmailOperations; import javafx.application.Platform; import javafx.concurrent.Task; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.geometry.Insets; import javafx.geometry.Pos; import javafx.scene.control.Label; import javafx.scene.control.TextArea; import javafx.scene.image.Image; import javafx.scene.image.ImageView; import javafx.scene.layout.*; import javafx.scene.paint.Paint; import javafx.scene.text.Font; import javafx.scene.text.Text; import javafx.scene.web.WebEngine; import javafx.scene.web.WebView; import sun.awt.PlatformFont; import utilClasses.NotifyUser; import java.io.File; import java.io.IOException; import java.util.List; /** * Created by Ashok on 4/18/2017. */ public class ScreenComponent extends GridPane { private static ScreenComponent screenComponent; private static StackPane componentParent; private static BorderPane inboxComponent; private static BorderPane draftComponent; private static BorderPane sentComponent; private static BorderPane trashComponent; private Label subjectLabel; private Label toFromLabel; private Label dateLabel; private WebView messageDisplay; private WebEngine messageEngine; private JFXButton attachments; private TextArea messageTextArea; private JFXButton replyButton; private JFXButton editDraft; private JFXButton forwardSent; private JFXButton restore; private JFXButton deleteInboxMessage; private JFXButton deleteSentMessage; private JFXButton zoomMail; private JFXButton forwardInbox; private String folderLabel; private Message message = null; private FormattedMessage formattedMessage = null; int index; public ScreenComponent(){ initComponent(); } private void setActions(){ attachments.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { JFXSnackbar snackbar = new JFXSnackbar(MainUI3Controller.getScreenParent()); snackbar.show("Downloading", 2000); Task<Void> downloadAttachments = new Task<Void>() { @Override protected Void call() throws Exception { try { List<File> attachmentsList = GmailOperations.downloadAttachments(message, System.getProperty("user.home")); if (attachmentsList != null && !attachmentsList.isEmpty()) { Platform.runLater(new Runnable() { @Override public void run() { NotifyUser.getNotification("Attachments Downloaded", "" + attachmentsList.size() + " Attachments successfully downloaded to" + System.getProperty("user.home")).showInformation(); } }); } else { Platform.runLater(new Runnable() { @Override public void run() { JFXSnackbar snackbar = new JFXSnackbar(MainUI3Controller.getScreenParent()); snackbar.show("No attachments with this mail", 5000); } }); } }catch (IOException e) { e.printStackTrace(); Platform.runLater(new Runnable() { @Override public void run() { NotifyUser.getNotification("Internet connection has lost", "Please check your internet connection").showInformation(); } }); } return null; } }; Thread startDownload = new Thread(downloadAttachments); startDownload.setDaemon(true); startDownload.start(); //int count; /* JFXSnackbar snackbar; try { List<File> attachmentsList = GmailOperations.downloadAttachments(message, System.getProperty("user.home")); if(attachmentsList != null && !attachmentsList.isEmpty()) NotifyUser.getNotification("Attachments Downloaded", ""+attachmentsList.size()+" Attachments successfully downloaded to"+System.getProperty("user.home")).showInformation(); else { snackbar = new JFXSnackbar(MainUI3Controller.getScreenParent()); //snackbar.getStylesheets().add(getClass().getResource("/sneckbar.css").toExternalForm()); snackbar.show("No attachments with this mail", 5000); } } catch (IOException e) { e.printStackTrace(); NotifyUser.getNotification("Internet connection has lost", "Please check your internet connection").showInformation(); } */ } }); replyButton.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { try { GmailOperations.sendMessage(formattedMessage.getFromEmailId(), GmailMessages.USERS_EMAIL_ADDRESS, "Reply for "+formattedMessage.getSubject(), messageTextArea.getText(), false, null ); messageTextArea.setText(""); } catch (Exception e) { e.printStackTrace(); System.out.println("Change it with sneckbar"); NotifyUser.getNotification("Internet connection has lost", "Please check your internet connection").showInformation(); } } }); editDraft.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { ComposeActivity composeActivity = new ComposeActivity(formattedMessage, message, true, false); composeActivity.setStage(AmailMain.getStage()); JFXDialogLayout content = new JFXDialogLayout(); content.setHeading(new Text("Compose")); content.setBody(composeActivity.getContent()); JFXDialog dialog = new JFXDialog(MainUI3Controller.getScreenParent(), content,JFXDialog.DialogTransition.CENTER); composeActivity.setAction(dialog); dialog.show(); } }); restore.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { try { GmailOperations.untrashMessage(formattedMessage.getMessageId()); } catch (IOException e) { NotifyUser.getNotification("Internet connection has lost", "Please check your internet connection").showInformation(); } } }); zoomMail.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { ZoomInMailView zoomInMailView = new ZoomInMailView(); JFXDialogLayout content = new JFXDialogLayout(); content.setHeading(new Text("Compose")); content.setBody(zoomInMailView.getContainer()); JFXDialog dialog = new JFXDialog(MainUI3Controller.getScreenParent(), content,JFXDialog.DialogTransition.CENTER); zoomInMailView.setInfo(formattedMessage.getBodyText(),dialog); dialog.show(); dialog.setOverlayClose(false); } }); forwardSent.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { ComposeActivity composeActivity = new ComposeActivity(formattedMessage, message, false, true); composeActivity.setStage(AmailMain.getStage()); JFXDialogLayout content = new JFXDialogLayout(); content.setHeading(new Text("Compose")); content.setBody(composeActivity.getContent()); JFXDialog dialog = new JFXDialog(MainUI3Controller.getScreenParent(), content,JFXDialog.DialogTransition.CENTER); composeActivity.setAction(dialog); dialog.show(); } }); forwardInbox.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { ComposeActivity composeActivity = new ComposeActivity(formattedMessage, message, false, true); composeActivity.setStage(AmailMain.getStage()); JFXDialogLayout content = new JFXDialogLayout(); content.setHeading(new Text("Compose")); content.setBody(composeActivity.getContent()); JFXDialog dialog = new JFXDialog(MainUI3Controller.getScreenParent(), content,JFXDialog.DialogTransition.CENTER); composeActivity.setAction(dialog); dialog.show(); } }); } private void initComponent(){ initVariables(); //this.getStylesheets().add(controllers.ScreenComponent.class.getResource("/uiComponents.css").toExternalForm()); ColumnConstraints column0 = new ColumnConstraints(10,100,USE_COMPUTED_SIZE); ColumnConstraints column1 = new ColumnConstraints(10,100,USE_COMPUTED_SIZE); //column0.setHgrow(Priority.ALWAYS); //column1.setHgrow(Priority.ALWAYS); RowConstraints row0 = new RowConstraints(10,110, 195); RowConstraints row1 = new RowConstraints(10,360,800); RowConstraints row2 = new RowConstraints(10,140,250); row1.setVgrow(Priority.ALWAYS); this.getColumnConstraints().add(column0); this.getColumnConstraints().add(column1); this.getRowConstraints().add(row0); this.getRowConstraints().add(row1); this.getRowConstraints().add(row2); this.add(setRow0(),0,0,2,1 ); this.add(setRow1(), 0,1,2,1); this.add(componentParent,0,2,2,1); inboxComponent = setInboxComponent(); draftComponent = setDraftComponent(); sentComponent = setSentComponent(); trashComponent = setTrashComponent(); setScreenComponent("INBOX"); //screenComponent.setGridLinesVisible(true); setActions(); } private BorderPane setRow0(){ BorderPane rowParent = new BorderPane(); BorderPane childParent = new BorderPane(); HBox childContainer = new HBox(); childContainer.setAlignment(Pos.CENTER); Region region = new Region(); region.setPrefHeight(90); region.setPrefWidth(30); //subjectLabel = new Label(); subjectLabel.setPrefHeight(20); subjectLabel.setPrefWidth(430); subjectLabel.setFont(new Font(24)); subjectLabel.setText("Hello"); childContainer.getChildren().add(region); childContainer.getChildren().add(subjectLabel); childParent.setLeft(childContainer); HBox childContainer2 = new HBox(); childContainer2.setAlignment(Pos.CENTER_LEFT); Region region1 = new Region(); region1.setPrefWidth(30); region1.setPrefHeight(40); //toFromLabel = new Label(); toFromLabel.setText("Hello"); //dateLabel = new Label(); dateLabel.setText("hello"); childContainer2.getChildren().add(region1); childContainer2.getChildren().add(toFromLabel); childContainer2.getChildren().add(dateLabel); childContainer2.setMargin(dateLabel,new Insets(0,0,0,20)); rowParent.setCenter(childParent); rowParent.setBottom(childContainer2); return rowParent; } private VBox setRow1(){ VBox parentContainer = new VBox(); // messageDisplay = new WebView(); //messageEngine = messageDisplay.getEngine(); messageDisplay.setPrefHeight(320); parentContainer.getChildren().add(messageDisplay); VBox.setVgrow(messageDisplay, Priority.ALWAYS); HBox childContainer = new HBox(); childContainer.setPrefHeight(50); childContainer.setAlignment(Pos.CENTER_LEFT); //attachments.getStyleClass().add("button-raised"); setButtonStyle(attachments); childContainer.getChildren().add(attachments); Region region = new Region(); childContainer.getChildren().add(region); childContainer.getChildren().add(zoomMail); childContainer.setMargin(attachments, new Insets(0,0,0,20)); childContainer.setMargin(zoomMail, new Insets(0,20,0,0)); HBox.setHgrow(region, Priority.ALWAYS); parentContainer.getChildren().add(childContainer); return parentContainer; } private BorderPane setInboxComponent(){ BorderPane container = new BorderPane(); HBox childContainer = new HBox(); childContainer.setPrefHeight(93); //childContainer.setAlignment(Pos.CENTER_RIGHT); //messageTextArea = new TextArea(); messageTextArea.setPromptText("Reply"); messageTextArea.setPrefHeight(200); childContainer.getChildren().add(messageTextArea); HBox.setHgrow(messageTextArea, Priority.ALWAYS); HBox.setMargin(messageTextArea,new Insets(0,0,0,5)); Region region = new Region(); region.setPrefHeight(90); region.setPrefWidth(45); childContainer.getChildren().add(region); container.setCenter(childContainer); HBox childContainer1 = new HBox(); childContainer1.setPrefHeight(45); childContainer1.setAlignment(Pos.CENTER_RIGHT); //replyButton = new JFXButton("Send"); setButtonStyle(replyButton); setButtonStyle(deleteInboxMessage); //childContainer1.getChildren().add(deleteInboxMessage); childContainer1.getChildren().add(forwardInbox); childContainer1.getChildren().add(replyButton); HBox.setMargin(replyButton, new Insets(0,20,0,20)); container.setBottom(childContainer1); deleteInboxMessage.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { try{ GmailOperations.trashMessage(formattedMessage.getMessageId()); GmailMessages.inboxMessages.remove(index); } catch (IOException e) { e.printStackTrace(); NotifyUser.getNotification("Internet connection has lost", "Please check your internet connection").showInformation(); } } }); return container; } private BorderPane setDraftComponent(){ BorderPane container = new BorderPane(); HBox childContainer = new HBox(); childContainer.setPrefHeight(93); //childContainer.setAlignment(Pos.CENTER_RIGHT); //messageTextArea = new TextArea(); container.setCenter(childContainer); HBox childContainer1 = new HBox(); childContainer1.setPrefHeight(45); childContainer1.setAlignment(Pos.CENTER_RIGHT); //replyButton = new JFXButton("Send"); setButtonStyle(editDraft); childContainer1.getChildren().add(editDraft); HBox.setMargin(editDraft, new Insets(0,20,0,0)); container.setBottom(childContainer1); return container; } private BorderPane setSentComponent(){ BorderPane container = new BorderPane(); HBox childContainer = new HBox(); childContainer.setPrefHeight(93); //childContainer.setAlignment(Pos.CENTER_RIGHT); //messageTextArea = new TextArea(); container.setCenter(childContainer); HBox childContainer1 = new HBox(); childContainer1.setPrefHeight(45); childContainer1.setAlignment(Pos.CENTER_RIGHT); setButtonStyle(forwardSent); setButtonStyle(deleteSentMessage); //childContainer1.getChildren().add(deleteSentMessage); childContainer1.getChildren().add(forwardSent); HBox.setMargin(forwardSent, new Insets(0,20,0,20)); container.setBottom(childContainer1); deleteSentMessage.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { try{ GmailOperations.trashMessage(formattedMessage.getMessageId()); GmailMessages.inboxMessages.remove(index); } catch (IOException e) { e.printStackTrace(); NotifyUser.getNotification("Internet connection has lost", "Please check your internet connection").showInformation(); } } }); return container; } private BorderPane setTrashComponent(){ BorderPane container = new BorderPane(); HBox childContainer = new HBox(); childContainer.setPrefHeight(93); //childContainer.setAlignment(Pos.CENTER_RIGHT); //messageTextArea = new TextArea(); container.setCenter(childContainer); HBox childContainer1 = new HBox(); childContainer1.setPrefHeight(45); childContainer1.setAlignment(Pos.CENTER_RIGHT); setButtonStyle(restore); childContainer1.getChildren().add(restore); HBox.setMargin(restore, new Insets(0,20,0,0)); container.setBottom(childContainer1); return container; } private void initVariables(){ componentParent = new StackPane(); subjectLabel = new Label(); toFromLabel = new Label(); dateLabel = new Label(); messageDisplay = new WebView(); messageEngine = messageDisplay.getEngine(); messageEngine.setJavaScriptEnabled(true); attachments = new JFXButton("Attachments"); messageTextArea = new TextArea(); replyButton = new JFXButton("Send"); editDraft = new JFXButton("Edit"); forwardSent = new JFXButton("Edit"); restore = new JFXButton("Restore"); deleteInboxMessage = new JFXButton(); deleteSentMessage = new JFXButton(); ImageView deleteImage = new ImageView(new Image(getClass().getResourceAsStream("/delete.png"))); deleteImage.setFitWidth(20); deleteImage.setFitHeight(20); deleteInboxMessage.setGraphic(deleteImage); deleteSentMessage.setGraphic(deleteImage); zoomMail = new JFXButton("Zoom"); setButtonStyle(zoomMail); forwardInbox = new JFXButton("Forward"); setButtonStyle(forwardInbox); } public void setScreenComponent(String folderName){ folderLabel = folderName; switch(folderName){ case "INBOX": setComponent(inboxComponent); break; case "DRAFT": setComponent(draftComponent); break; case "SENT": setComponent(sentComponent); break; case "TRASH": setComponent(trashComponent); } } public void setScreenComponent(String folderName, int index){ this.index = index; folderLabel = folderName; switch(folderName){ case "INBOX": setComponent(inboxComponent); break; case "DRAFT": setComponent(draftComponent); break; case "SENT": setComponent(sentComponent); break; case "TRASH": setComponent(trashComponent); } } private void setComponent(BorderPane component){ if(componentParent.getChildren().isEmpty()) componentParent.getChildren().add(0,component); else{ componentParent.getChildren().remove(0); componentParent.getChildren().add(0,component); } } // load message into the ui elements , IOException due to internet error public void setInfo(FormattedMessage formattedMessage){ this.formattedMessage = formattedMessage; try { if(formattedMessage.getBodyText()==null || formattedMessage.getBodyText().equals("")) { message = GmailOperations.getMessage(formattedMessage.getMessageId()); formattedMessage.setBodyText(GmailOperations.getMessageBody(message)); messageEngine.loadContent(formattedMessage.getBodyText()); } else messageEngine.loadContent(formattedMessage.getBodyText()); }catch (IOException e) { e.printStackTrace(); if(AmailMain.isInternetUp){ AmailMain.isInternetUp = false; NotifyUser.getNotification("Internet connection has lost", "Please check your internet connection").showInformation(); } } subjectLabel.setText(formattedMessage.getSubject()); dateLabel.setText(formattedMessage.getDate()); if(folderLabel == "INBOX" || folderLabel == "TRASH"){ toFromLabel.setText("From "+formattedMessage.getFrom()+" to you"); } else toFromLabel.setText("From you to "+formattedMessage.getTo()); } public void setButtonStyle(JFXButton button){ button.setStyle("-fx-background-color: #0091EA;"); button.setButtonType(JFXButton.ButtonType.RAISED); button.setTextFill(Paint.valueOf("WHITE")); } public static ScreenComponent getInstance(){ if(screenComponent == null) screenComponent = new ScreenComponent(); return screenComponent; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math3.util; import org.apache.commons.math3.distribution.IntegerDistribution; import org.apache.commons.math3.distribution.UniformIntegerDistribution; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; /** * This class contains test cases for the ResizableDoubleArray. * */ public class ResizableDoubleArrayTest extends DoubleArrayAbstractTest { @After public void tearDown() throws Exception { da = null; ra = null; } @Before public void setUp() throws Exception { da = new ResizableDoubleArray(); ra = new ResizableDoubleArray(); } @Test public void testConstructors() { float defaultExpansionFactor = 2.0f; double defaultContractionCriteria = 2.5; int defaultMode = ResizableDoubleArray.MULTIPLICATIVE_MODE; ResizableDoubleArray testDa = new ResizableDoubleArray(2); Assert.assertEquals(0, testDa.getNumElements()); Assert.assertEquals(2, testDa.getCapacity()); Assert.assertEquals(defaultExpansionFactor, testDa.getExpansionFactor(), 0); Assert.assertEquals(defaultContractionCriteria, testDa.getContractionCriterion(), 0); Assert.assertEquals(defaultMode, testDa.getExpansionMode()); try { da = new ResizableDoubleArray(-1); Assert.fail("Expecting IllegalArgumentException"); } catch (IllegalArgumentException ex) { // expected } testDa = new ResizableDoubleArray((double[]) null); Assert.assertEquals(0, testDa.getNumElements()); double[] initialArray = new double[] { 0, 1, 2 }; testDa = new ResizableDoubleArray(initialArray); Assert.assertEquals(3, testDa.getNumElements()); testDa = new ResizableDoubleArray(2, 2.0); Assert.assertEquals(0, testDa.getNumElements()); Assert.assertEquals(2, testDa.getCapacity()); Assert.assertEquals(defaultExpansionFactor, testDa.getExpansionFactor(), 0); Assert.assertEquals(defaultContractionCriteria, testDa.getContractionCriterion(), 0); Assert.assertEquals(defaultMode, testDa.getExpansionMode()); try { da = new ResizableDoubleArray(2, 0.5); Assert.fail("Expecting IllegalArgumentException"); } catch (IllegalArgumentException ex) { // expected } testDa = new ResizableDoubleArray(2, 3.0); Assert.assertEquals(3.0f, testDa.getExpansionFactor(), 0); Assert.assertEquals(3.5f, testDa.getContractionCriterion(), 0); testDa = new ResizableDoubleArray(2, 2.0, 3.0); Assert.assertEquals(0, testDa.getNumElements()); Assert.assertEquals(2, testDa.getCapacity()); Assert.assertEquals(defaultExpansionFactor, testDa.getExpansionFactor(), 0); Assert.assertEquals(3.0f, testDa.getContractionCriterion(), 0); Assert.assertEquals(defaultMode, testDa.getExpansionMode()); try { da = new ResizableDoubleArray(2, 2.0, 1.5); Assert.fail("Expecting IllegalArgumentException"); } catch (IllegalArgumentException ex) { // expected } testDa = new ResizableDoubleArray(2, 2.0, 3.0, ResizableDoubleArray.ExpansionMode.ADDITIVE); Assert.assertEquals(0, testDa.getNumElements()); Assert.assertEquals(2, testDa.getCapacity()); Assert.assertEquals(defaultExpansionFactor, testDa.getExpansionFactor(), 0); Assert.assertEquals(3.0f, testDa.getContractionCriterion(), 0); Assert.assertEquals(ResizableDoubleArray.ADDITIVE_MODE, testDa.getExpansionMode()); try { da = new ResizableDoubleArray(2, 2.0f, 2.5f, -1); Assert.fail("Expecting IllegalArgumentException"); } catch (IllegalArgumentException ex) { // expected } // Copy constructor testDa = new ResizableDoubleArray(2, 2.0, 3.0, ResizableDoubleArray.ExpansionMode.ADDITIVE); testDa.addElement(2.0); testDa.addElement(3.2); ResizableDoubleArray copyDa = new ResizableDoubleArray(testDa); Assert.assertEquals(copyDa, testDa); Assert.assertEquals(testDa, copyDa); } @Test public void testSetElementArbitraryExpansion1() { // MULTIPLICATIVE_MODE da.addElement(2.0); da.addElement(4.0); da.addElement(6.0); da.setElement(1, 3.0); // Expand the array arbitrarily to 1000 items da.setElement(1000, 3.4); Assert.assertEquals( "The number of elements should now be 1001, it isn't", da.getNumElements(), 1001); Assert.assertEquals( "Uninitialized Elements are default value of 0.0, index 766 wasn't", 0.0, da.getElement( 760 ), Double.MIN_VALUE ); Assert.assertEquals( "The 1000th index should be 3.4, it isn't", 3.4, da.getElement(1000), Double.MIN_VALUE ); Assert.assertEquals( "The 0th index should be 2.0, it isn't", 2.0, da.getElement(0), Double.MIN_VALUE); } @Test public void testSetElementArbitraryExpansion2() { // Make sure numElements and expansion work correctly for expansion boundary cases da.addElement(2.0); da.addElement(4.0); da.addElement(6.0); Assert.assertEquals(16, ((ResizableDoubleArray) da).getCapacity()); Assert.assertEquals(3, da.getNumElements()); da.setElement(3, 7.0); Assert.assertEquals(16, ((ResizableDoubleArray) da).getCapacity()); Assert.assertEquals(4, da.getNumElements()); da.setElement(10, 10.0); Assert.assertEquals(16, ((ResizableDoubleArray) da).getCapacity()); Assert.assertEquals(11, da.getNumElements()); da.setElement(9, 10.0); Assert.assertEquals(16, ((ResizableDoubleArray) da).getCapacity()); Assert.assertEquals(11, da.getNumElements()); try { da.setElement(-2, 3); Assert.fail("Expecting ArrayIndexOutOfBoundsException for negative index"); } catch (ArrayIndexOutOfBoundsException ex) { // expected } // ADDITIVE_MODE ResizableDoubleArray testDa = new ResizableDoubleArray(2, 2.0, 3.0, ResizableDoubleArray.ExpansionMode.ADDITIVE); Assert.assertEquals(2, testDa.getCapacity()); testDa.addElement(1d); testDa.addElement(1d); Assert.assertEquals(2, testDa.getCapacity()); testDa.addElement(1d); Assert.assertEquals(4, testDa.getCapacity()); } @Override @Test public void testAdd1000() { super.testAdd1000(); Assert.assertEquals("Internal Storage length should be 1024 if we started out with initial capacity of " + "16 and an expansion factor of 2.0", 1024, ((ResizableDoubleArray) da).getCapacity()); } @Test public void testAddElements() { ResizableDoubleArray testDa = new ResizableDoubleArray(); // MULTIPLICATIVE_MODE testDa.addElements(new double[] {4, 5, 6}); Assert.assertEquals(3, testDa.getNumElements(), 0); Assert.assertEquals(4, testDa.getElement(0), 0); Assert.assertEquals(5, testDa.getElement(1), 0); Assert.assertEquals(6, testDa.getElement(2), 0); testDa.addElements(new double[] {4, 5, 6}); Assert.assertEquals(6, testDa.getNumElements()); // ADDITIVE_MODE (x's are occupied storage locations, 0's are open) testDa = new ResizableDoubleArray(2, 2.0, 2.5, ResizableDoubleArray.ExpansionMode.ADDITIVE); Assert.assertEquals(2, testDa.getCapacity()); testDa.addElements(new double[] { 1d }); // x,0 testDa.addElements(new double[] { 2d }); // x,x testDa.addElements(new double[] { 3d }); // x,x,x,0 -- expanded Assert.assertEquals(1d, testDa.getElement(0), 0); Assert.assertEquals(2d, testDa.getElement(1), 0); Assert.assertEquals(3d, testDa.getElement(2), 0); Assert.assertEquals(4, testDa.getCapacity()); // x,x,x,0 Assert.assertEquals(3, testDa.getNumElements()); } @Override @Test public void testAddElementRolling() { super.testAddElementRolling(); // MULTIPLICATIVE_MODE da.clear(); da.addElement(1); da.addElement(2); da.addElementRolling(3); Assert.assertEquals(3, da.getElement(1), 0); da.addElementRolling(4); Assert.assertEquals(3, da.getElement(0), 0); Assert.assertEquals(4, da.getElement(1), 0); da.addElement(5); Assert.assertEquals(5, da.getElement(2), 0); da.addElementRolling(6); Assert.assertEquals(4, da.getElement(0), 0); Assert.assertEquals(5, da.getElement(1), 0); Assert.assertEquals(6, da.getElement(2), 0); // ADDITIVE_MODE (x's are occupied storage locations, 0's are open) ResizableDoubleArray testDa = new ResizableDoubleArray(2, 2.0, 2.5, ResizableDoubleArray.ExpansionMode.ADDITIVE); Assert.assertEquals(2, testDa.getCapacity()); testDa.addElement(1d); // x,0 testDa.addElement(2d); // x,x testDa.addElement(3d); // x,x,x,0 -- expanded Assert.assertEquals(1d, testDa.getElement(0), 0); Assert.assertEquals(2d, testDa.getElement(1), 0); Assert.assertEquals(3d, testDa.getElement(2), 0); Assert.assertEquals(4, testDa.getCapacity()); // x,x,x,0 Assert.assertEquals(3, testDa.getNumElements()); testDa.addElementRolling(4d); Assert.assertEquals(2d, testDa.getElement(0), 0); Assert.assertEquals(3d, testDa.getElement(1), 0); Assert.assertEquals(4d, testDa.getElement(2), 0); Assert.assertEquals(4, testDa.getCapacity()); // 0,x,x,x Assert.assertEquals(3, testDa.getNumElements()); testDa.addElementRolling(5d); // 0,0,x,x,x,0 -- time to contract Assert.assertEquals(3d, testDa.getElement(0), 0); Assert.assertEquals(4d, testDa.getElement(1), 0); Assert.assertEquals(5d, testDa.getElement(2), 0); Assert.assertEquals(4, testDa.getCapacity()); // contracted -- x,x,x,0 Assert.assertEquals(3, testDa.getNumElements()); try { testDa.getElement(4); Assert.fail("Expecting ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException ex) { // expected } try { testDa.getElement(-1); Assert.fail("Expecting ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException ex) { // expected } } @Test public void testSetNumberOfElements() { da.addElement( 1.0 ); da.addElement( 1.0 ); da.addElement( 1.0 ); da.addElement( 1.0 ); da.addElement( 1.0 ); da.addElement( 1.0 ); Assert.assertEquals( "Number of elements should equal 6", da.getNumElements(), 6); ((ResizableDoubleArray) da).setNumElements( 3 ); Assert.assertEquals( "Number of elements should equal 3", da.getNumElements(), 3); try { ((ResizableDoubleArray) da).setNumElements( -3 ); Assert.fail( "Setting number of elements to negative should've thrown an exception"); } catch( IllegalArgumentException iae ) { } ((ResizableDoubleArray) da).setNumElements(1024); Assert.assertEquals( "Number of elements should now be 1024", da.getNumElements(), 1024); Assert.assertEquals( "Element 453 should be a default double", da.getElement( 453 ), 0.0, Double.MIN_VALUE); } @Test public void testWithInitialCapacity() { ResizableDoubleArray eDA2 = new ResizableDoubleArray(2); Assert.assertEquals("Initial number of elements should be 0", 0, eDA2.getNumElements()); final IntegerDistribution randomData = new UniformIntegerDistribution(100, 1000); final int iterations = randomData.sample(); for( int i = 0; i < iterations; i++) { eDA2.addElement( i ); } Assert.assertEquals("Number of elements should be equal to " + iterations, iterations, eDA2.getNumElements()); eDA2.addElement( 2.0 ); Assert.assertEquals("Number of elements should be equals to " + (iterations +1), iterations + 1 , eDA2.getNumElements() ); } @Test public void testWithInitialCapacityAndExpansionFactor() { ResizableDoubleArray eDA3 = new ResizableDoubleArray(3, 3.0, 3.5); Assert.assertEquals("Initial number of elements should be 0", 0, eDA3.getNumElements() ); final IntegerDistribution randomData = new UniformIntegerDistribution(100, 3000); final int iterations = randomData.sample(); for( int i = 0; i < iterations; i++) { eDA3.addElement( i ); } Assert.assertEquals("Number of elements should be equal to " + iterations, iterations,eDA3.getNumElements()); eDA3.addElement( 2.0 ); Assert.assertEquals("Number of elements should be equals to " + (iterations +1), iterations +1, eDA3.getNumElements() ); Assert.assertEquals("Expansion factor should equal 3.0", 3.0f, eDA3.getExpansionFactor(), Double.MIN_VALUE); } @Test public void testDiscard() { da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); Assert.assertEquals( "Number of elements should be 11", 11, da.getNumElements()); ((ResizableDoubleArray)da).discardFrontElements(5); Assert.assertEquals( "Number of elements should be 6", 6, da.getNumElements()); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); Assert.assertEquals( "Number of elements should be 10", 10, da.getNumElements()); ((ResizableDoubleArray)da).discardMostRecentElements(2); Assert.assertEquals( "Number of elements should be 8", 8, da.getNumElements()); try { ((ResizableDoubleArray)da).discardFrontElements(-1); Assert.fail( "Trying to discard a negative number of element is not allowed"); } catch( Exception e ){ } try { ((ResizableDoubleArray)da).discardMostRecentElements(-1); Assert.fail( "Trying to discard a negative number of element is not allowed"); } catch( Exception e ){ } try { ((ResizableDoubleArray)da).discardFrontElements( 10000 ); Assert.fail( "You can't discard more elements than the array contains"); } catch( Exception e ){ } try { ((ResizableDoubleArray)da).discardMostRecentElements( 10000 ); Assert.fail( "You can't discard more elements than the array contains"); } catch( Exception e ){ } } @Test public void testSubstitute() { da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); da.addElement(2.0); Assert.assertEquals( "Number of elements should be 11", 11, da.getNumElements()); ((ResizableDoubleArray)da).substituteMostRecentElement(24); Assert.assertEquals( "Number of elements should be 11", 11, da.getNumElements()); try { ((ResizableDoubleArray)da).discardMostRecentElements(10); } catch( Exception e ){ Assert.fail( "Trying to discard a negative number of element is not allowed"); } ((ResizableDoubleArray)da).substituteMostRecentElement(24); Assert.assertEquals( "Number of elements should be 1", 1, da.getNumElements()); } @Test public void testMutators() { ((ResizableDoubleArray)da).setContractionCriteria(10f); Assert.assertEquals(10f, ((ResizableDoubleArray)da).getContractionCriterion(), 0); ((ResizableDoubleArray)da).setExpansionFactor(8f); Assert.assertEquals(8f, ((ResizableDoubleArray)da).getExpansionFactor(), 0); try { ((ResizableDoubleArray)da).setExpansionFactor(11f); // greater than contractionCriteria Assert.fail("Expecting IllegalArgumentException"); } catch (IllegalArgumentException ex) { // expected } ((ResizableDoubleArray)da).setExpansionMode( ResizableDoubleArray.ADDITIVE_MODE); Assert.assertEquals(ResizableDoubleArray.ADDITIVE_MODE, ((ResizableDoubleArray)da).getExpansionMode()); try { ((ResizableDoubleArray)da).setExpansionMode(-1); Assert.fail("Expecting IllegalArgumentException"); } catch (IllegalArgumentException ex) { // expected } } @Test public void testEqualsAndHashCode() throws Exception { // Wrong type ResizableDoubleArray first = new ResizableDoubleArray(); Double other = new Double(2); Assert.assertFalse(first.equals(other)); // Null other = null; Assert.assertFalse(first.equals(other)); // Reflexive Assert.assertTrue(first.equals(first)); // Argumentless constructor ResizableDoubleArray second = new ResizableDoubleArray(); verifyEquality(first, second); // Equals iff same data, same properties ResizableDoubleArray third = new ResizableDoubleArray(3, 2.0, 2.0); verifyInequality(third, first); ResizableDoubleArray fourth = new ResizableDoubleArray(3, 2.0, 2.0); ResizableDoubleArray fifth = new ResizableDoubleArray(2, 2.0, 2.0); verifyEquality(third, fourth); verifyInequality(third, fifth); third.addElement(4.1); third.addElement(4.2); third.addElement(4.3); fourth.addElement(4.1); fourth.addElement(4.2); fourth.addElement(4.3); verifyEquality(third, fourth); // expand fourth.addElement(4.4); verifyInequality(third, fourth); third.addElement(4.4); verifyEquality(third, fourth); fourth.addElement(4.4); verifyInequality(third, fourth); third.addElement(4.4); verifyEquality(third, fourth); fourth.addElementRolling(4.5); third.addElementRolling(4.5); verifyEquality(third, fourth); // discard third.discardFrontElements(1); verifyInequality(third, fourth); fourth.discardFrontElements(1); verifyEquality(third, fourth); // discard recent third.discardMostRecentElements(2); fourth.discardMostRecentElements(2); verifyEquality(third, fourth); // wrong order third.addElement(18); fourth.addElement(17); third.addElement(17); fourth.addElement(18); verifyInequality(third, fourth); // copy ResizableDoubleArray.copy(fourth, fifth); verifyEquality(fourth, fifth); // Copy constructor verifyEquality(fourth, new ResizableDoubleArray(fourth)); // Instance copy verifyEquality(fourth, fourth.copy()); } @Test public void testGetArrayRef() { final ResizableDoubleArray a = new ResizableDoubleArray(); // Modify "a" through the public API. final int index = 20; final double v1 = 1.2; a.setElement(index, v1); // Modify the internal storage through the protected API. final double v2 = v1 + 3.4; final double[] aInternalArray = a.getArrayRef(); aInternalArray[a.getStartIndex() + index] = v2; Assert.assertEquals(v2, a.getElement(index), 0d); } @Test public void testCompute() { final ResizableDoubleArray a = new ResizableDoubleArray(); final int max = 20; for (int i = 1; i <= max; i++) { a.setElement(i, i); } final MathArrays.Function add = new MathArrays.Function() { public double evaluate(double[] a, int index, int num) { double sum = 0; final int max = index + num; for (int i = index; i < max; i++) { sum += a[i]; } return sum; } public double evaluate(double[] a) { return evaluate(a, 0, a.length); } }; final double sum = a.compute(add); Assert.assertEquals(0.5 * max * (max + 1), sum, 0); } private void verifyEquality(ResizableDoubleArray a, ResizableDoubleArray b) { Assert.assertTrue(b.equals(a)); Assert.assertTrue(a.equals(b)); Assert.assertEquals(a.hashCode(), b.hashCode()); } private void verifyInequality(ResizableDoubleArray a, ResizableDoubleArray b) { Assert.assertFalse(b.equals(a)); Assert.assertFalse(a.equals(b)); Assert.assertFalse(a.hashCode() == b.hashCode()); } }
package hex.naivebayes; import hex.*; import hex.schemas.ModelBuilderSchema; import hex.schemas.NaiveBayesV3; import hex.naivebayes.NaiveBayesModel.NaiveBayesOutput; import hex.naivebayes.NaiveBayesModel.NaiveBayesParameters; import water.*; import water.exceptions.H2OModelBuilderIllegalArgumentException; import water.fvec.Chunk; import water.fvec.Frame; import water.fvec.Vec; import water.util.ArrayUtils; import water.util.Log; import water.util.PrettyPrint; import water.util.TwoDimTable; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Naive Bayes * This is an algorithm for computing the conditional a-posterior probabilities of a categorical * response from independent predictors using Bayes rule. * <a href = "http://en.wikipedia.org/wiki/Naive_Bayes_classifier">Naive Bayes on Wikipedia</a> * <a href = "http://cs229.stanford.edu/notes/cs229-notes2.pdf">Lecture Notes by Andrew Ng</a> * @author anqi_fu * */ public class NaiveBayes extends ModelBuilder<NaiveBayesModel,NaiveBayesParameters,NaiveBayesOutput> { @Override public ModelBuilderSchema schema() { return new NaiveBayesV3(); } public boolean isSupervised(){return true;} @Override protected Job<NaiveBayesModel> trainModelImpl(long work, boolean restartTimer) { return start(new NaiveBayesDriver(), work, restartTimer); } @Override public long progressUnits() { return 6; } @Override public ModelCategory[] can_build() { return new ModelCategory[]{ ModelCategory.Unknown }; } @Override protected void checkMemoryFootPrint() { // compute memory usage for pcond matrix long mem_usage = (_train.numCols() - 1) * _train.lastVec().cardinality(); String[][] domains = _train.domains(); long count = 0; for (int i = 0; i < _train.numCols() - 1; i++) { count += domains[i] == null ? 2 : domains[i].length; } mem_usage *= count; mem_usage *= 8; //doubles long max_mem = H2O.SELF._heartbeat.get_free_mem(); if (mem_usage > max_mem) { String msg = "Conditional probabilities won't fit in the driver node's memory (" + PrettyPrint.bytes(mem_usage) + " > " + PrettyPrint.bytes(max_mem) + ") - try reducing the number of columns, the number of response classes or the number of categorical factors of the predictors."; error("_train", msg); cancel(msg); } } // Called from an http request public NaiveBayes(NaiveBayesModel.NaiveBayesParameters parms) { super("NaiveBayes", parms); init(false); } @Override public void init(boolean expensive) { super.init(expensive); if (_response != null) { if (!_response.isCategorical()) error("_response", "Response must be a categorical column"); else if (_response.isConst()) error("_response", "Response must have at least two unique categorical levels"); } if (_parms._laplace < 0) error("_laplace", "Laplace smoothing must be an integer >= 0"); if (_parms._min_sdev < 1e-10) error("_min_sdev", "Min. standard deviation must be at least 1e-10"); if (_parms._eps_sdev < 0) error("_eps_sdev", "Threshold for standard deviation must be positive"); if (_parms._min_prob < 1e-10) error("_min_prob", "Min. probability must be at least 1e-10"); if (_parms._eps_prob < 0) error("_eps_prob", "Threshold for probability must be positive"); hide("_balance_classes", "Balance classes is not applicable to NaiveBayes."); hide("_class_sampling_factors", "Class sampling factors is not applicable to NaiveBayes."); hide("_max_after_balance_size", "Max after balance size is not applicable to NaiveBayes."); if (expensive && error_count() == 0) checkMemoryFootPrint(); } private static boolean couldBeBool(Vec v) { return v != null && v.isInt() && v.min()+1==v.max(); } class NaiveBayesDriver extends H2O.H2OCountedCompleter<NaiveBayesDriver> { protected NaiveBayesDriver() { super(true); } // bump driver priority public boolean computeStatsFillModel(NaiveBayesModel model, DataInfo dinfo, NBTask tsk) { model._output._levels = _response.domain(); model._output._rescnt = tsk._rescnt; model._output._ncats = dinfo._cats; if(!isRunning(_key)) return false; update(1, "Initializing arrays for model statistics"); // String[][] domains = dinfo._adaptedFrame.domains(); String[][] domains = model._output._domains; double[] apriori = new double[tsk._nrescat]; double[][][] pcond = new double[tsk._npreds][][]; for(int i = 0; i < pcond.length; i++) { int ncnt = domains[i] == null ? 2 : domains[i].length; pcond[i] = new double[tsk._nrescat][ncnt]; } if(!isRunning(_key)) return false; update(1, "Computing probabilities for categorical cols"); // A-priori probability of response y for(int i = 0; i < apriori.length; i++) apriori[i] = ((double)tsk._rescnt[i] + _parms._laplace)/(tsk._nobs + tsk._nrescat * _parms._laplace); // apriori[i] = tsk._rescnt[i]/tsk._nobs; // Note: R doesn't apply laplace smoothing to priors, even though this is textbook definition // Probability of categorical predictor x_j conditional on response y for(int col = 0; col < dinfo._cats; col++) { assert pcond[col].length == tsk._nrescat; for(int i = 0; i < pcond[col].length; i++) { for(int j = 0; j < pcond[col][i].length; j++) pcond[col][i][j] = ((double)tsk._jntcnt[col][i][j] + _parms._laplace)/((double)tsk._rescnt[i] + domains[col].length * _parms._laplace); } } if(!isRunning(_key)) return false; update(1, "Computing mean and standard deviation for numeric cols"); // Mean and standard deviation of numeric predictor x_j for every level of response y for(int col = 0; col < dinfo._nums; col++) { for(int i = 0; i < pcond[0].length; i++) { int cidx = dinfo._cats + col; double num = tsk._rescnt[i]; double pmean = tsk._jntsum[col][i][0]/num; pcond[cidx][i][0] = pmean; // double pvar = tsk._jntsum[col][i][1]/num - pmean * pmean; double pvar = tsk._jntsum[col][i][1]/(num - 1) - pmean * pmean * num/(num - 1); pcond[cidx][i][1] = Math.sqrt(pvar); } } model._output._apriori_raw = apriori; model._output._pcond_raw = pcond; // Create table of conditional probabilities for every predictor model._output._pcond = new TwoDimTable[pcond.length]; String[] rowNames = _response.domain(); for(int col = 0; col < dinfo._cats; col++) { String[] colNames = _train.vec(col).domain(); String[] colTypes = new String[colNames.length]; String[] colFormats = new String[colNames.length]; Arrays.fill(colTypes, "double"); Arrays.fill(colFormats, "%5f"); model._output._pcond[col] = new TwoDimTable(_train.name(col), null, rowNames, colNames, colTypes, colFormats, "Y_by_" + _train.name(col), new String[rowNames.length][], pcond[col]); } for(int col = 0; col < dinfo._nums; col++) { int cidx = dinfo._cats + col; model._output._pcond[cidx] = new TwoDimTable(_train.name(cidx), null, rowNames, new String[] {"Mean", "Std_Dev"}, new String[] {"double", "double"}, new String[] {"%5f", "%5f"}, "Y_by_" + _train.name(cidx), new String[rowNames.length][], pcond[cidx]); } // Create table of a-priori probabilities for the response String[] colTypes = new String[_response.cardinality()]; String[] colFormats = new String[_response.cardinality()]; Arrays.fill(colTypes, "double"); Arrays.fill(colFormats, "%5f"); model._output._apriori = new TwoDimTable("A Priori Response Probabilities", null, new String[1], _response.domain(), colTypes, colFormats, "", new String[1][], new double[][] {apriori}); model._output._model_summary = createModelSummaryTable(model._output); if(!isRunning(_key)) return false; update(1, "Scoring and computing metrics on training data"); if (_parms._compute_metrics) { model.score(_parms.train()).delete(); // This scores on the training data and appends a ModelMetrics ModelMetrics mm = ModelMetrics.getFromDKV(model,_parms.train()); model._output._training_metrics = mm; } // At the end: validation scoring (no need to gather scoring history) if(!isRunning(_key)) return false; update(1, "Scoring and computing metrics on validation data"); if (_valid != null) { model.score(_parms.valid()).delete(); //this appends a ModelMetrics on the validation set model._output._validation_metrics = ModelMetrics.getFromDKV(model,_parms.valid()); } return true; } @Override protected void compute2() { NaiveBayesModel model = null; DataInfo dinfo = null; try { Scope.enter(); init(true); // Initialize parameters _parms.read_lock_frames(NaiveBayes.this); // Fetch & read-lock input frames if (error_count() > 0) throw H2OModelBuilderIllegalArgumentException.makeFromBuilder(NaiveBayes.this); dinfo = new DataInfo(Key.make(), _train, _valid, 1, false, DataInfo.TransformType.NONE, DataInfo.TransformType.NONE, true, false, false, false, false, false); // The model to be built model = new NaiveBayesModel(dest(), _parms, new NaiveBayesOutput(NaiveBayes.this)); model.delete_and_lock(_key); _train.read_lock(_key); update(1, "Begin distributed Naive Bayes calculation"); NBTask tsk = new NBTask(_key, dinfo, _response.cardinality()).doAll(dinfo._adaptedFrame); if (computeStatsFillModel(model, dinfo, tsk)) model.update(_key); done(); } catch (Throwable t) { Job thisJob = DKV.getGet(_key); if (thisJob._state == JobState.CANCELLED) { Log.info("Job cancelled by user."); } else { t.printStackTrace(); failed(t); throw t; } } finally { updateModelOutput(); _train.unlock(_key); if (model != null) model.unlock(_key); if (dinfo != null) dinfo.remove(); _parms.read_unlock_frames(NaiveBayes.this); Scope.exit(); } tryComplete(); } } private TwoDimTable createModelSummaryTable(NaiveBayesOutput output) { List<String> colHeaders = new ArrayList<>(); List<String> colTypes = new ArrayList<>(); List<String> colFormat = new ArrayList<>(); colHeaders.add("Number of Response Levels"); colTypes.add("long"); colFormat.add("%d"); colHeaders.add("Min Apriori Probability"); colTypes.add("double"); colFormat.add("%.5f"); colHeaders.add("Max Apriori Probability"); colTypes.add("double"); colFormat.add("%.5f"); double apriori_min = output._apriori_raw[0]; double apriori_max = output._apriori_raw[0]; for(int i = 1; i < output._apriori_raw.length; i++) { if(output._apriori_raw[i] < apriori_min) apriori_min = output._apriori_raw[i]; else if(output._apriori_raw[i] > apriori_max) apriori_max = output._apriori_raw[i]; } final int rows = 1; TwoDimTable table = new TwoDimTable( "Model Summary", null, new String[rows], colHeaders.toArray(new String[0]), colTypes.toArray(new String[0]), colFormat.toArray(new String[0]), ""); int row = 0; int col = 0; table.set(row, col++, output._apriori_raw.length); table.set(row, col++, apriori_min); table.set(row, col++, apriori_max); return table; } // Note: NA handling differs from R for efficiency purposes // R's method: For each predictor x_j, skip counting that row for p(x_j|y) calculation if x_j = NA. // If response y = NA, skip counting row entirely in all calculations // H2O's method: Just skip all rows where any x_j = NA or y = NA. Should be more memory-efficient, but results incomparable with R. private static class NBTask extends MRTask<NBTask> { final protected Key _jobKey; final DataInfo _dinfo; final String[][] _domains; // Domains of the training frame final int _nrescat; // Number of levels for the response y final int _npreds; // Number of predictors in the training frame public int _nobs; // Number of rows counted in calculation public int[/*nrescat*/] _rescnt; // Count of each level in the response public int[/*npreds*/][/*nrescat*/][] _jntcnt; // For each categorical predictor, joint count of response and predictor levels public double[/*npreds*/][/*nrescat*/][] _jntsum; // For each numeric predictor, sum and squared sum of entries for every response level public NBTask(Key jobKey, DataInfo dinfo, int nres) { _jobKey = jobKey; _dinfo = dinfo; _nrescat = nres; _domains = dinfo._adaptedFrame.domains(); _npreds = dinfo._adaptedFrame.numCols()-1; assert _npreds == dinfo._nums + dinfo._cats; assert _nrescat == _domains[_npreds].length; // Response in last vec of adapted frame } @Override public void map(Chunk[] chks) { if(_jobKey != null && !isRunning(_jobKey)) { throw new JobCancelledException(); } _nobs = 0; _rescnt = new int[_nrescat]; if(_dinfo._cats > 0) { _jntcnt = new int[_dinfo._cats][][]; for (int i = 0; i < _dinfo._cats; i++) { _jntcnt[i] = new int[_nrescat][_domains[i].length]; } } if(_dinfo._nums > 0) { _jntsum = new double[_dinfo._nums][][]; for (int i = 0; i < _dinfo._nums; i++) { _jntsum[i] = new double[_nrescat][2]; } } Chunk res = chks[_npreds]; // Response at the end OUTER: for(int row = 0; row < chks[0]._len; row++) { // Skip row if any entries in it are NA for(int col = 0; col < chks.length; col++) { if(Double.isNaN(chks[col].atd(row))) continue OUTER; } // Record joint counts of categorical predictors and response int rlevel = (int)res.atd(row); for(int col = 0; col < _dinfo._cats; col++) { int plevel = (int)chks[col].atd(row); _jntcnt[col][rlevel][plevel]++; } // Record sum for each pair of numerical predictors and response for(int col = 0; col < _dinfo._nums; col++) { int cidx = _dinfo._cats + col; double x = chks[cidx].atd(row); _jntsum[col][rlevel][0] += x; _jntsum[col][rlevel][1] += x*x; } _rescnt[rlevel]++; _nobs++; } } @Override public void reduce(NBTask nt) { _nobs += nt._nobs; ArrayUtils.add(_rescnt, nt._rescnt); if(null != _jntcnt) { for (int col = 0; col < _jntcnt.length; col++) ArrayUtils.add(_jntcnt[col], nt._jntcnt[col]); } if(null != _jntsum) { for (int col = 0; col < _jntsum.length; col++) ArrayUtils.add(_jntsum[col], nt._jntsum[col]); } } } }
/* * Copyright (c) 2008-2016 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.haulmont.cuba.core.global; import com.haulmont.bali.util.Preconditions; import com.haulmont.bali.util.StringHelper; import com.haulmont.chile.core.model.MetaClass; import com.haulmont.cuba.core.entity.Entity; import com.haulmont.cuba.core.global.queryconditions.Condition; import javax.annotation.Nullable; import javax.persistence.TemporalType; import java.io.Serializable; import java.util.*; import java.util.stream.Collectors; /** * Class that defines parameters for loading entities from the database via {@link DataManager}. * <p>Typical usage: * <pre> LoadContext&lt;User&gt; context = LoadContext.create(User.class).setQuery( LoadContext.createQuery("select u from sec$User u where u.login like :login") .setParameter("login", "a%") .setMaxResults(10)) .setView("user.browse"); List&lt;User&gt; users = dataManager.loadList(context); * </pre> * <p> * Instead of using this class directly, consider fluent interface with the entry point in {@link DataManager#load(Class)}. */ public class LoadContext<E extends Entity> implements DataLoadContext, Serializable { private static final long serialVersionUID = -8808320502197308698L; protected String metaClass; protected Query query; protected View view; protected Object id; protected List<Object> idList = new ArrayList<>(0); protected boolean softDeletion = true; protected List<Query> prevQueries = new ArrayList<>(0); protected int queryKey; protected boolean loadDynamicAttributes; protected boolean loadPartialEntities = true; protected boolean authorizationRequired; protected boolean joinTransaction; protected Map<String, Object> hints; // lazy initialized map /** * Factory method to create a LoadContext instance. * * @param entityClass class of the loaded entities */ public static <E extends Entity> LoadContext<E> create(Class<E> entityClass) { return new LoadContext<>(entityClass); } /** * Factory method to create a LoadContext.Query instance for passing into {@link #setQuery(Query)} method. * * @param queryString JPQL query string. Only named parameters are supported. */ public static LoadContext.Query createQuery(String queryString) { return new LoadContext.Query(queryString); } /** * @param metaClass metaclass of the loaded entities */ public LoadContext(MetaClass metaClass) { Preconditions.checkNotNullArgument(metaClass, "metaClass is null"); this.metaClass = AppBeans.get(Metadata.class).getExtendedEntities().getEffectiveMetaClass(metaClass).getName(); } /** * @param javaClass class of the loaded entities */ public LoadContext(Class<E> javaClass) { Preconditions.checkNotNullArgument(javaClass, "javaClass is null"); this.metaClass = AppBeans.get(Metadata.class).getExtendedEntities().getEffectiveMetaClass(javaClass).getName(); } protected LoadContext() { } /** * @return name of metaclass of the loaded entities */ public String getMetaClass() { return metaClass; } /** * @return name of metaclass of the loaded entities */ public String getEntityMetaClass() { return metaClass; } /** * @return query definition */ public Query getQuery() { return query; } /** * @param query query definition * @return this instance for chaining */ public LoadContext<E> setQuery(@Nullable Query query) { this.query = query; return this; } /** * @param queryString JPQL query string. Only named parameters are supported. * @return query definition object */ @Override public Query setQueryString(String queryString) { final Query query = new Query(queryString); setQuery(query); return query; } /** * @return view that is used for loading entities */ public View getView() { return view; } /** * @param view view that is used for loading entities * @return this instance for chaining */ public LoadContext<E> setView(View view) { this.view = view; return this; } /** * @param viewName view that is used for loading entities * @return this instance for chaining */ public LoadContext<E> setView(String viewName) { Metadata metadata = AppBeans.get(Metadata.NAME); this.view = metadata.getViewRepository().getView(metadata.getSession().getClassNN(metaClass), viewName); return this; } /** * @return id of an entity to be loaded */ public Object getId() { return id; } /** * @param id id of an entity to be loaded * @return this instance for chaining */ public LoadContext<E> setId(Object id) { this.id = id instanceof Entity ? ((Entity) id).getId() : id; // for compatibility with legacy code relying on implicit conversions return this; } /** * @return identifiers of entities to be loaded */ public List<?> getIds() { return idList; } /** * * @param ids identifiers of entities to be loaded * @return this instance for chaining */ public LoadContext<E> setIds(Collection<?> ids) { this.idList.clear(); this.idList.addAll(ids); return this; } /** * @return whether to use soft deletion when loading entities */ public boolean isSoftDeletion() { return softDeletion; } /** * @param softDeletion whether to use soft deletion when loading entities */ public LoadContext<E> setSoftDeletion(boolean softDeletion) { this.softDeletion = softDeletion; return this; } /** * Allows to execute query on a previous query result. * @return editable list of previous queries */ public List<Query> getPrevQueries() { return prevQueries; } /** * @return key of the current stack of sequential queries, which is unique for the current user session */ public int getQueryKey() { return queryKey; } /** * @param queryKey key of the current stack of sequential queries, which is unique for the current user session */ public LoadContext<E> setQueryKey(int queryKey) { this.queryKey = queryKey; return this; } /** * @return custom hints which are used by the query */ public Map<String, Object> getHints() { return hints == null ? Collections.emptyMap() : Collections.unmodifiableMap(hints); } /** * Sets custom hint that should be used by the query. * @see com.haulmont.cuba.core.global.QueryHints */ public LoadContext<E> setHint(String hintName, Object value) { if (hints == null) { hints = new HashMap<>(); } hints.put(hintName, value); return this; } /** * @return whether to load dynamic attributes */ public boolean isLoadDynamicAttributes() { return loadDynamicAttributes; } /** * @param loadDynamicAttributes whether to load dynamic attributes */ public LoadContext<E> setLoadDynamicAttributes(boolean loadDynamicAttributes) { this.loadDynamicAttributes = loadDynamicAttributes; return this; } /** * @return whether to load partial entities. When true (which is by default), some local attributes can be unfetched * according to {@link #setView(View)}. * <p>The state of {@link View#loadPartialEntities()} is ignored when the view is passed to {@link DataManager}. */ public boolean isLoadPartialEntities() { return loadPartialEntities; } /** * Whether to load partial entities. When true (which is by default), some local attributes can be unfetched * according to {@link #setView(View)}. * <p>The state of {@link View#loadPartialEntities()} is ignored when the view is passed to {@link DataManager}. */ public LoadContext<E> setLoadPartialEntities(boolean loadPartialEntities) { this.loadPartialEntities = loadPartialEntities; return this; } public boolean isAuthorizationRequired() { return authorizationRequired; } public LoadContext<E> setAuthorizationRequired(boolean authorizationRequired) { this.authorizationRequired = authorizationRequired; return this; } public boolean isJoinTransaction() { return joinTransaction; } public LoadContext<E> setJoinTransaction(boolean joinTransaction) { this.joinTransaction = joinTransaction; return this; } /** * Creates a copy of this LoadContext instance. */ public LoadContext<?> copy() { LoadContext<?> ctx; try { ctx = getClass().newInstance(); } catch (InstantiationException | IllegalAccessException e) { throw new RuntimeException("Error copying LoadContext", e); } ctx.metaClass = metaClass; ctx.setQuery(query != null ? query.copy() : null); ctx.view = view; ctx.id = id; ctx.idList.addAll(idList); ctx.softDeletion = softDeletion; ctx.prevQueries.addAll(prevQueries.stream().map(Query::copy).collect(Collectors.toList())); ctx.queryKey = queryKey; if (hints != null) { ctx.getHints().putAll(hints); } ctx.loadDynamicAttributes = loadDynamicAttributes; ctx.authorizationRequired = authorizationRequired; ctx.joinTransaction = joinTransaction; return ctx; } @Override public String toString() { return String.format( "LoadContext{metaClass=%s, query=%s, view=%s, id=%s, softDeletion=%s, partialEntities=%s, dynamicAttributes=%s}", metaClass, query, view, id, softDeletion, loadPartialEntities, loadDynamicAttributes ); } /** * Class that defines a query to be executed for data loading. */ public static class Query implements DataLoadContextQuery, Serializable { private static final long serialVersionUID = 3819951144050635838L; private Map<String, Object> parameters = new HashMap<>(); private String[] noConversionParams; private String queryString; private int firstResult; private int maxResults; private boolean cacheable; private Condition condition; private Sort sort; /** * @param queryString JPQL query string. Only named parameters are supported. */ public Query(String queryString) { this.queryString = queryString; } /** * @return JPQL query string */ public String getQueryString() { return queryString; } /** * @param queryString JPQL query string. Only named parameters are supported. */ public Query setQueryString(String queryString) { this.queryString = queryString; return this; } /** * Set value for a query parameter. * @param name parameter name * @param value parameter value * @return this query instance for chaining */ public Query setParameter(String name, Object value) { parameters.put(name, value); return this; } /** * Set value for a query parameter. * @deprecated implicit conversions are deprecated, do not use this feature * @param name parameter name * @param value parameter value * @param implicitConversions whether to do parameter value conversions, e.g. convert an entity to its ID * @return this query instance for chaining */ @Deprecated public Query setParameter(String name, Object value, boolean implicitConversions) { parameters.put(name, value); if (!implicitConversions) { // this is a rare case, so let's save some memory by using an array instead of a list if (noConversionParams == null) noConversionParams = new String[0]; noConversionParams = Arrays.copyOfRange(noConversionParams, 0, noConversionParams.length + 1); noConversionParams[noConversionParams.length - 1] = name; } return this; } /** * Set value for a parameter of java.util.Date type. * @param name parameter name * @param value date value * @param temporalType temporal type * @return this query instance for chaining */ public Query setParameter(String name, Date value, TemporalType temporalType) { parameters.put(name, new TemporalValue(value, temporalType)); return this; } /** * @return editable map of the query parameters */ public Map<String, Object> getParameters() { return parameters; } /** * @param parameters map of the query parameters */ public Query setParameters(Map<String, Object> parameters) { this.parameters.putAll(parameters); return this; } /** * @param firstResult results offset * @return this query instance for chaining */ public Query setFirstResult(int firstResult) { this.firstResult = firstResult; return this; } /** * @param maxResults results limit * @return this query instance for chaining */ public Query setMaxResults(int maxResults) { this.maxResults = maxResults; return this; } /** * @return root query condition */ public Condition getCondition() { return condition; } /** * @param condition root query condition * @return this query instance for chaining */ public Query setCondition(Condition condition) { this.condition = condition; return this; } /** * @return query sort */ public Sort getSort() { return sort; } /** * @param sort query sort * @return this query instance for chaining */ public Query setSort(Sort sort) { this.sort = sort; return this; } /** * Indicates that the query results should be cached. * @return the same query instance */ public Query setCacheable(boolean cacheable) { this.cacheable = cacheable; return this; } /** * @return results offset */ public int getFirstResult() { return firstResult; } /** * @return results limit */ public int getMaxResults() { return maxResults; } public boolean isCacheable() { return cacheable; } @Nullable public String[] getNoConversionParams() { return noConversionParams; } /** * Creates a copy of this Query instance. */ public Query copy() { Query query = new Query(queryString); query.parameters.putAll(parameters); query.firstResult = firstResult; query.maxResults = maxResults; query.cacheable = cacheable; query.condition = condition == null ? null : condition.copy(); query.sort = sort; return query; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Query query = (Query) o; if (firstResult != query.firstResult) return false; if (maxResults != query.maxResults) return false; if (!parameters.equals(query.parameters)) return false; return queryString.equals(query.queryString); } @Override public int hashCode() { int result = parameters.hashCode(); result = 31 * result + queryString.hashCode(); result = 31 * result + firstResult; result = 31 * result + maxResults; return result; } @Override public String toString() { String stringResult = "Query{" + "queryString='" + queryString + '\'' + ", condition=" + condition + ", sort=" + sort + ", firstResult=" + firstResult + ", maxResults=" + maxResults + "}"; return StringHelper.removeExtraSpaces(stringResult.replace('\n', ' ')); } } }
/* * Copyright 2010 The Kuali Foundation. * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.module.endow.batch.service.impl; import java.sql.Date; import java.util.List; import org.kuali.kfs.module.endow.batch.service.RollFrequencyDatesService; import org.kuali.kfs.module.endow.businessobject.AutomatedCashInvestmentModel; import org.kuali.kfs.module.endow.businessobject.CashSweepModel; import org.kuali.kfs.module.endow.businessobject.EndowmentRecurringCashTransfer; import org.kuali.kfs.module.endow.businessobject.FeeMethod; import org.kuali.kfs.module.endow.businessobject.Security; import org.kuali.kfs.module.endow.businessobject.Tickler; import org.kuali.kfs.module.endow.dataaccess.AutomatedCashInvestmentModelDao; import org.kuali.kfs.module.endow.dataaccess.CashSweepModelDao; import org.kuali.kfs.module.endow.dataaccess.FeeMethodDao; import org.kuali.kfs.module.endow.dataaccess.RecurringCashTransferDao; import org.kuali.kfs.module.endow.dataaccess.SecurityDao; import org.kuali.kfs.module.endow.dataaccess.TicklerDao; import org.kuali.kfs.module.endow.document.service.FrequencyDatesService; import org.kuali.kfs.module.endow.document.service.KEMService; import org.kuali.kfs.sys.service.ReportWriterService; import org.kuali.rice.krad.bo.PersistableBusinessObject; import org.kuali.rice.krad.service.BusinessObjectService; import org.kuali.rice.krad.util.ObjectUtils; import org.springframework.transaction.annotation.Transactional; /** * This class implements the RollFrequencyDatesService batch job. */ @Transactional public class RollFrequencyDatesServiceImpl implements RollFrequencyDatesService { protected static org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(RollFrequencyDatesServiceImpl.class); protected BusinessObjectService businessObjectService; protected KEMService kemService; protected FrequencyDatesService frequencyDatesService; protected SecurityDao securityDao; protected FeeMethodDao feeMethodDao; protected TicklerDao ticklerDao; protected RecurringCashTransferDao recurringCashTransferDao; protected AutomatedCashInvestmentModelDao automatedCashInvestmentModelDao; protected CashSweepModelDao cashSweepModelDao; protected ReportWriterService rollFrequencyDatesTotalReportWriterService; protected ReportWriterService rollFrequencyDatesExceptionReportWriterService; /** * Updates some date fields based on the frequency for the activity * * @return true if the fields are updated successfully; false otherwise */ public boolean updateFrequencyDate() { LOG.info("Begin the batch Roll Frequncy Dates ..."); // update Security Income Next Pay Dates updateSecurityIncomeNextPayDates(); // update Tickler Next Due Dates updateTicklerNextDueDates(); // update Fee Method Next Process Dates updateFeeMethodProcessDates(); // update Recurring Cash Transfer Next Process Dates updateRecurringCashTransferProcessDates(); // update Cash Sweep Model Next Due Dates updateCashSweepModelNextDueDates(); // update Cash Investment Model Next Due Dates updateAutomatedCashInvestmentModelNextDueDates(); LOG.info("The batch Roll Frequncy Dates was finished."); return true; } /** * This method updates the income next pay dates in Security */ protected boolean updateSecurityIncomeNextPayDates() { boolean success = true; int counter = 0; // get all the active security records whose next income pay date is equal to the current date List<Security> securityRecords = securityDao.getSecuritiesWithNextPayDateEqualToCurrentDate(kemService.getCurrentDate()); if (securityRecords != null) { for (Security security : securityRecords) { Date incomeNextPayDate = security.getIncomeNextPayDate(); // if maturity date is equals to income next pay date, do nothing Date maturityDate = security.getMaturityDate(); if (ObjectUtils.isNotNull(maturityDate) && ObjectUtils.isNotNull(incomeNextPayDate)) { if (maturityDate.compareTo(incomeNextPayDate) == 0) { continue; } } // replace income next date // first, with the next date calculated based on the frequency code // if it is invalid, with the dividend pay date String frequencyCode = security.getIncomePayFrequency(); Date nextDate = frequencyDatesService.calculateNextDueDate(frequencyCode, kemService.getCurrentDate()); if (nextDate == null) { nextDate = security.getDividendPayDate(); if (ObjectUtils.isNull(nextDate) || (ObjectUtils.isNotNull(incomeNextPayDate) && nextDate.compareTo(incomeNextPayDate) == 0)) { // we don't need to update income next pay date continue; } } // update income next pay date security.setIncomeNextPayDate(nextDate); if (updateBusinessObject(security)) { counter++; generateTotalReport("END_SEC_T", counter); } else { LOG.error("Failed to update Security " + security.getId()); generateExceptionReport("END_SEC_T", security.getId()); success = false; } } } LOG.info("Total Security Income Next Pay Dates updated in END_SEC_T: " + counter); return success; } /** * This method updates the next due dates in Tickler */ protected boolean updateTicklerNextDueDates() { boolean success = true; int counter = 0; List<Tickler> ticklerRecords = ticklerDao.getTicklerWithNextPayDateEqualToCurrentDate(kemService.getCurrentDate()); if (ticklerRecords != null) { for (Tickler tickler : ticklerRecords) { String frequencyCode = tickler.getFrequencyCode(); Date nextDate = frequencyDatesService.calculateNextDueDate(frequencyCode, kemService.getCurrentDate()); if (nextDate != null) { tickler.setNextDueDate(nextDate); if (updateBusinessObject(tickler)) { counter++; generateTotalReport("END_TKLR_T", counter); } else { LOG.error("Failed to update Tickler " + tickler.getNumber()); generateExceptionReport("END_TKLR_T", tickler.getNumber()); success = false; } } } } LOG.info("Total Tickler Next Due Dates updated in END_TKLR_T: " + counter); return success; } /** * This method updates the next process dates in FeeMethod */ protected boolean updateFeeMethodProcessDates() { boolean success = true; int counter = 0; List<FeeMethod> feeMethodRecords = feeMethodDao.getFeeMethodWithNextPayDateEqualToCurrentDate(kemService.getCurrentDate()); if (feeMethodRecords != null) { for (FeeMethod feeMethod : feeMethodRecords) { String frequencyCode = feeMethod.getFeeFrequencyCode(); Date nextDate = frequencyDatesService.calculateNextDueDate(frequencyCode, kemService.getCurrentDate()); if (nextDate != null) { feeMethod.setFeeLastProcessDate(feeMethod.getFeeNextProcessDate()); feeMethod.setFeeNextProcessDate(nextDate); if (updateBusinessObject(feeMethod)) { counter++; generateTotalReport("END_FEE_MTHD_T", counter); } else { LOG.error("Failed to update FeeMethod " + feeMethod.getCode()); generateExceptionReport("END_FEE_MTHD_T", feeMethod.getCode()); success = false; } } } } LOG.info("Total Fee Next Process Dates and Fee Last Process Dates updated in END_FEE_MTHD_T: " + counter); return success; } /** * This method updates the next process dates in EndowmentRecurringCashTransfer */ protected boolean updateRecurringCashTransferProcessDates() { boolean success = true; int counter = 0; List<EndowmentRecurringCashTransfer> recurringCashTransferRecords = recurringCashTransferDao.getRecurringCashTransferWithNextPayDateEqualToCurrentDate(kemService.getCurrentDate()); if (recurringCashTransferRecords != null) { for (EndowmentRecurringCashTransfer recurringCashTransfer : recurringCashTransferRecords) { String frequencyCode = recurringCashTransfer.getFrequencyCode(); Date nextDate = frequencyDatesService.calculateNextDueDate(frequencyCode, kemService.getCurrentDate()); if (nextDate != null) { recurringCashTransfer.setLastProcessDate(recurringCashTransfer.getNextProcessDate()); recurringCashTransfer.setNextProcessDate(nextDate); if (updateBusinessObject(recurringCashTransfer)) { counter++; generateTotalReport("END_REC_CSH_XFR_T", counter); } else { LOG.error("Failed to update EndowmentRecurringCashTransfer " + recurringCashTransfer.getTransferNumber()); generateExceptionReport("END_REC_CSH_XFR_T", recurringCashTransfer.getTransferNumber()); success = false; } } } } LOG.info("Total Next Process Dates and Last Process Dates updated in END_REC_CSH_XFR_T: " + counter); return success; } protected boolean updateCashSweepModelNextDueDates() { boolean success = true; int counter = 0; List<CashSweepModel> csmRecords = cashSweepModelDao.getCashSweepModelWithNextPayDateEqualToCurrentDate(kemService.getCurrentDate()); if (csmRecords != null) { for (CashSweepModel csm : csmRecords) { String frequencyCode = csm.getCashSweepFrequencyCode(); Date nextDate = frequencyDatesService.calculateNextDueDate(frequencyCode, kemService.getCurrentDate()); if (nextDate != null) { csm.setCashSweepNextDueDate(nextDate); if (updateBusinessObject(csm)) { counter++; generateTotalReport("END_CSH_SWEEP_MDL_T", counter); } else { LOG.error("Failed to update FeeMethod " + csm.getCashSweepModelID()); generateExceptionReport("END_CSH_SWEEP_MDL_T", csm.getCashSweepModelID().toString()); success = false; } } } } LOG.info("Total Cash Sweep Model Next Due Dates updated in END_CSH_SWEEP_MDL_T: " + counter); return success; } protected boolean updateAutomatedCashInvestmentModelNextDueDates() { boolean success = true; int counter = 0; List<AutomatedCashInvestmentModel> aciRecords = automatedCashInvestmentModelDao.getAutomatedCashInvestmentModelWithNextPayDateEqualToCurrentDate(kemService.getCurrentDate()); if (aciRecords != null) { for (AutomatedCashInvestmentModel aci : aciRecords) { String frequencyCode = aci.getAciFrequencyCode(); Date nextDate = frequencyDatesService.calculateNextDueDate(frequencyCode, kemService.getCurrentDate()); if (nextDate != null) { aci.setAciNextDueDate(nextDate); if (updateBusinessObject(aci)) { counter++; generateTotalReport("END_AUTO_CSH_INVEST_MDL_T", counter); } else { LOG.error("Failed to update FeeMethod " + aci.getAciModelID()); generateExceptionReport("END_AUTO_CSH_INVEST_MDL_T", aci.getAciModelID().toString()); success = false; } } } } LOG.info("Total ACI Next Due Dates updated in END_AUTO_CSH_INVEST_MDL_T: " + counter); return success; } /** * Generates the statistic report for updated tables * * @param tableName * @param counter */ protected void generateTotalReport(String tableName, int counter) { try { rollFrequencyDatesTotalReportWriterService.writeFormattedMessageLine(tableName + ": %s", counter); } catch (Exception e) { LOG.error("Failed to generate the statistic report: " + e.getMessage()); rollFrequencyDatesExceptionReportWriterService.writeFormattedMessageLine("Failed to generate the total report: " + e.getMessage()); } } /** * Generates the exception report * * @param tableName * @param counter */ protected void generateExceptionReport(String tableName, String errorMessage) { try { rollFrequencyDatesExceptionReportWriterService.writeFormattedMessageLine(tableName + ": %s", errorMessage); } catch (Exception e) { LOG.error("Failed to generate the exception report.",e); } } protected void initializeReports() { rollFrequencyDatesTotalReportWriterService.writeSubTitle("<rollFrequencyDatesJob> Number of Records Updated"); rollFrequencyDatesTotalReportWriterService.writeNewLines(1); rollFrequencyDatesExceptionReportWriterService.writeSubTitle("<rollFrequencyDatesJob> Records Failed for update"); rollFrequencyDatesExceptionReportWriterService.writeNewLines(1); } /** * Updates business object * * @param businessObject * @return boolean */ protected boolean updateBusinessObject(PersistableBusinessObject businessObject) { boolean result = true; try { businessObjectService.save(businessObject); } catch (Exception e) { // such as IllegalArgumentException LOG.error("Unable to save " + businessObject, e); result = false; } return result; } /** * Sets the businessObjectService attribute value. * * @param businessObjectService The businessObjectService to set. */ public void setBusinessObjectService(BusinessObjectService businessObjectService) { this.businessObjectService = businessObjectService; } /** * Sets the kemService attribute value. * * @param kemService The kemService to set. */ public void setKemService(KEMService kemService) { this.kemService = kemService; } /** * Sets the securityDao attribute value. * * @param securityDao The securityDao to set. */ public void setSecurityDao(SecurityDao securityDao) { this.securityDao = securityDao; } /** * Sets the feeMethodDao attribute value. * * @param feeMethodDao The feeMethodDao to set. */ public void setFeeMethodDao(FeeMethodDao feeMethodDao) { this.feeMethodDao = feeMethodDao; } /** * Sets the ticklerDao attribute value. * * @param ticklerDao The ticklerDao to set. */ public void setTicklerDao(TicklerDao ticklerDao) { this.ticklerDao = ticklerDao; } /** * Sets the recurringCashTransferDao attribute value. * * @param recurringCashTransferDao The recurringCashTransferDao to set. */ public void setRecurringCashTransferDao(RecurringCashTransferDao recurringCashTransferDao) { this.recurringCashTransferDao = recurringCashTransferDao; } /** * Sets the rollFrequencyDatesTotalReportWriterService attribute value. * * @param rollFrequencyDatesTotalReportWriterService The rollFrequencyDatesTotalReportWriterService to set. */ public void setRollFrequencyDatesTotalReportWriterService(ReportWriterService rollFrequencyDatesTotalReportWriterService) { this.rollFrequencyDatesTotalReportWriterService = rollFrequencyDatesTotalReportWriterService; } /** * Sets the rollFrequencyDatesExceptionReportWriterService attribute value. * * @param rollFrequencyDatesExceptionReportWriterService The rollFrequencyDatesExceptionReportWriterService to set. */ public void setRollFrequencyDatesExceptionReportWriterService(ReportWriterService rollFrequencyDatesExceptionReportWriterService) { this.rollFrequencyDatesExceptionReportWriterService = rollFrequencyDatesExceptionReportWriterService; } /** * Sets the automatedCashInvestmentModelDao attribute value. * * @param automatedCashInvestmentModelDao The automatedCashInvestmentModelDao to set. */ public void setAutomatedCashInvestmentModelDao(AutomatedCashInvestmentModelDao automatedCashInvestmentModelDao) { this.automatedCashInvestmentModelDao = automatedCashInvestmentModelDao; } /** * Sets the cashSweepModelDao attribute value. * * @param cashSweepModelDao The cashSweepModelDao to set. */ public void setCashSweepModelDao(CashSweepModelDao cashSweepModelDao) { this.cashSweepModelDao = cashSweepModelDao; } /** * Gets the frequencyDatesService attribute. * * @return Returns the frequencyDatesService. */ protected FrequencyDatesService getFrequencyDatesService() { return frequencyDatesService; } /** * Sets the frequencyDatesService attribute value. * * @param frequencyDatesService The frequencyDatesService to set. */ public void setFrequencyDatesService(FrequencyDatesService frequencyDatesService) { this.frequencyDatesService = frequencyDatesService; } }
package datastructures; import seq.SeqType; import java.util.*; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.function.UnaryOperator; import java.util.stream.Stream; /** * Created by devinmcgloin on 1/25/16. * Dynamic Lists are backed by either an arraylist or an lnked list. */ public class DynamicList<E> implements List<E>, ISeq<E> { private SeqType type; private List<E> data; public DynamicList() { type = SeqType.ARRAYLIST; data = new ArrayList<E>(); } public DynamicList(SeqType type) { this.type = type; switch (type) { case LINKEDLIST: data = new LinkedList<E>(); break; case ARRAYLIST: data = new ArrayList<E>(); break; } } public DynamicList(E[] arr) { this(); for (E item : arr) add(item); } public DynamicList(E[] arr, SeqType t) { this(t); for (E item : arr) add(item); } @Override public String toString() { return "DynamicList{" + "type=" + type + ", data=" + data + '}'; } public void convert(SeqType t) { if (this.type == t) { return; } E[] arr; switch (t) { case ARRAYLIST: arr = (E[]) toArray(); data = new LinkedList<E>(Arrays.asList(arr)); break; case LINKEDLIST: arr = (E[]) toArray(); data = new ArrayList<E>(Arrays.asList(arr)); break; } } /** * Performs the given action for each element of the {@code Iterable} * until all elements have been processed or the action throws an * exception. Unless otherwise specified by the implementing class, * actions are performed in the order of iteration (if an iteration order * is specified). Exceptions thrown by the action are relayed to the * caller. * * @param action The action to be performed for each element * @throws NullPointerException if the specified action is null * @implSpec <p>The default implementation behaves as if: * <pre>{@code * for (T type : this) * action.accept(type); * }</pre> * @since 1.8 */ @Override public void forEach(Consumer<? super E> action) { data.forEach(action); } /** * Returns the number of elements in this list. If this list contains * more than <tt>Integer.MAX_VALUE</tt> elements, returns * <tt>Integer.MAX_VALUE</tt>. * * @return the number of elements in this list */ @Override public int size() { return data.size(); } /** * Returns <tt>true</tt> if this list contains no elements. * * @return <tt>true</tt> if this list contains no elements */ @Override public boolean isEmpty() { return data.isEmpty(); } /** * Returns <tt>true</tt> if this list contains the specified element. * More formally, returns <tt>true</tt> if and only if this list contains * at least one element <tt>e</tt> such that * <tt>(o==null&nbsp;?&nbsp;e==null&nbsp;:&nbsp;o.equals(e))</tt>. * * @param o element whose presence in this list is to be tested * @return <tt>true</tt> if this list contains the specified element * @throws ClassCastException if the type of the specified element * is incompatible with this list * (<a href="Collection.html#optional-restrictions">optional</a>) * @throws NullPointerException if the specified element is null and this * list does not permit null elements * (<a href="Collection.html#optional-restrictions">optional</a>) */ @Override public boolean contains(Object o) { return data.contains(o); } /** * Returns an iterator over the elements in this list in proper sequence. * * @return an iterator over the elements in this list in proper sequence */ @Override public Iterator<E> iterator() { return data.iterator(); } /** * Returns an array containing all of the elements in this list in proper * sequence (from first to last element). * <p/> * <p>The returned array will be "safe" in that no references to it are * maintained by this list. (In other words, this method must * allocate a new array even if this list is backed by an array). * The caller is thus free to modify the returned array. * <p/> * <p>This method acts as bridge between array-based and collection-based * APIs. * * @return an array containing all of the elements in this list in proper * sequence * @see Arrays#asList(Object[]) */ @Override public Object[] toArray() { return data.toArray(); } /** * Returns an array containing all of the elements in this list in * proper sequence (from first to last element); the runtime type of * the returned array is that of the specified array. If the list fits * in the specified array, it is returned therein. Otherwise, a new * array is allocated with the runtime type of the specified array and * the size of this list. * <p/> * <p>If the list fits in the specified array with room to spare (i.e., * the array has more elements than the list), the element in the array * immediately following the end of the list is set to <tt>null</tt>. * (This is useful in determining the length of the list <i>only</i> if * the caller knows that the list does not contain any null elements.) * <p/> * <p>Like the {@link #toArray()} method, this method acts as bridge between * array-based and collection-based APIs. Further, this method allows * precise control over the runtime type of the output array, and may, * under certain circumstances, be used to save allocation costs. * <p/> * <p>Suppose <tt>x</tt> is a list known to contain only strings. * The following code can be used to dump the list into a newly * allocated array of <tt>String</tt>: * <p/> * <pre>{@code * String[] y = x.toArray(new String[0]); * }</pre> * <p/> * Note that <tt>toArray(new Object[0])</tt> is identical in function to * <tt>toArray()</tt>. * * @param a the array into which the elements of this list are to * be stored, if it is big enough; otherwise, a new array of the * same runtime type is allocated for this purpose. * @return an array containing the elements of this list * @throws ArrayStoreException if the runtime type of the specified array * is not a supertype of the runtime type of every element in * this list * @throws NullPointerException if the specified array is null */ @Override public <T> T[] toArray(T[] a) { return data.toArray(a); } /** * Appends the specified element to the end of this list (optional * operation). * <p/> * <p>Lists that support this operation may place limitations on what * elements may be added to this list. In particular, some * lists will refuse to add null elements, and others will impose * restrictions on the type of elements that may be added. List * classes should clearly specify in their documentation any restrictions * on what elements may be added. * * @param e element to be appended to this list * @return <tt>true</tt> (as specified by {@link Collection#add}) * @throws UnsupportedOperationException if the <tt>add</tt> operation * is not supported by this list * @throws ClassCastException if the class of the specified element * prevents it from being added to this list * @throws NullPointerException if the specified element is null and this * list does not permit null elements * @throws IllegalArgumentException if some property of this element * prevents it from being added to this list */ @Override public boolean add(E e) { return data.add(e); } /** * Removes the first occurrence of the specified element from this list, * if it is present (optional operation). If this list does not contain * the element, it is unchanged. More formally, removes the element with * the lowest index <tt>i</tt> such that * <tt>(o==null&nbsp;?&nbsp;get(i)==null&nbsp;:&nbsp;o.equals(get(i)))</tt> * (if such an element exists). Returns <tt>true</tt> if this list * contained the specified element (or equivalently, if this list changed * as a result of the call). * * @param o element to be removed from this list, if present * @return <tt>true</tt> if this list contained the specified element * @throws ClassCastException if the type of the specified element * is incompatible with this list * (<a href="Collection.html#optional-restrictions">optional</a>) * @throws NullPointerException if the specified element is null and this * list does not permit null elements * (<a href="Collection.html#optional-restrictions">optional</a>) * @throws UnsupportedOperationException if the <tt>remove</tt> operation * is not supported by this list */ @Override public boolean remove(Object o) { return data.remove(o); } /** * Returns <tt>true</tt> if this list contains all of the elements of the * specified collection. * * @param c collection to be checked for containment in this list * @return <tt>true</tt> if this list contains all of the elements of the * specified collection * @throws ClassCastException if the types of one or more elements * in the specified collection are incompatible with this * list * (<a href="Collection.html#optional-restrictions">optional</a>) * @throws NullPointerException if the specified collection contains one * or more null elements and this list does not permit null * elements * (<a href="Collection.html#optional-restrictions">optional</a>), * or if the specified collection is null * @see #contains(Object) */ @Override public boolean containsAll(Collection<?> c) { return data.containsAll(c); } /** * Appends all of the elements in the specified collection to the end of * this list, in the order that they are returned by the specified * collection's iterator (optional operation). The behavior of this * operation is undefined if the specified collection is modified while * the operation is in progress. (Note that this will occur if the * specified collection is this list, and it's nonempty.) * * @param c collection containing elements to be added to this list * @return <tt>true</tt> if this list changed as a result of the call * @throws UnsupportedOperationException if the <tt>addAll</tt> operation * is not supported by this list * @throws ClassCastException if the class of an element of the specified * collection prevents it from being added to this list * @throws NullPointerException if the specified collection contains one * or more null elements and this list does not permit null * elements, or if the specified collection is null * @throws IllegalArgumentException if some property of an element of the * specified collection prevents it from being added to this list * @see #add(Object) */ @Override public boolean addAll(Collection<? extends E> c) { return data.addAll(c); } /** * Inserts all of the elements in the specified collection into this * list at the specified position (optional operation). Shifts the * element currently at that position (if any) and any subsequent * elements to the right (increases their indices). The new elements * will appear in this list in the order that they are returned by the * specified collection's iterator. The behavior of this operation is * undefined if the specified collection is modified while the * operation is in progress. (Note that this will occur if the specified * collection is this list, and it's nonempty.) * * @param index index at which to insert the first element from the * specified collection * @param c collection containing elements to be added to this list * @return <tt>true</tt> if this list changed as a result of the call * @throws UnsupportedOperationException if the <tt>addAll</tt> operation * is not supported by this list * @throws ClassCastException if the class of an element of the specified * collection prevents it from being added to this list * @throws NullPointerException if the specified collection contains one * or more null elements and this list does not permit null * elements, or if the specified collection is null * @throws IllegalArgumentException if some property of an element of the * specified collection prevents it from being added to this list * @throws IndexOutOfBoundsException if the index is out of range * (<tt>index &lt; 0 || index &gt; size()</tt>) */ @Override public boolean addAll(int index, Collection<? extends E> c) { return data.addAll(index, c); } /** * Removes from this list all of its elements that are contained in the * specified collection (optional operation). * * @param c collection containing elements to be removed from this list * @return <tt>true</tt> if this list changed as a result of the call * @throws UnsupportedOperationException if the <tt>removeAll</tt> operation * is not supported by this list * @throws ClassCastException if the class of an element of this list * is incompatible with the specified collection * (<a href="Collection.html#optional-restrictions">optional</a>) * @throws NullPointerException if this list contains a null element and the * specified collection does not permit null elements * (<a href="Collection.html#optional-restrictions">optional</a>), * or if the specified collection is null * @see #remove(Object) * @see #contains(Object) */ @Override public boolean removeAll(Collection<?> c) { return data.removeAll(c); } /** * Retains only the elements in this list that are contained in the * specified collection (optional operation). In other words, removes * from this list all of its elements that are not contained in the * specified collection. * * @param c collection containing elements to be retained in this list * @return <tt>true</tt> if this list changed as a result of the call * @throws UnsupportedOperationException if the <tt>retainAll</tt> operation * is not supported by this list * @throws ClassCastException if the class of an element of this list * is incompatible with the specified collection * (<a href="Collection.html#optional-restrictions">optional</a>) * @throws NullPointerException if this list contains a null element and the * specified collection does not permit null elements * (<a href="Collection.html#optional-restrictions">optional</a>), * or if the specified collection is null * @see #remove(Object) * @see #contains(Object) */ @Override public boolean retainAll(Collection<?> c) { return data.retainAll(c); } /** * Removes all of the elements from this list (optional operation). * The list will be empty after this call returns. * * @throws UnsupportedOperationException if the <tt>clear</tt> operation * is not supported by this list */ @Override public void clear() { data.clear(); } /** * Returns the element at the specified position in this list. * * @param index index of the element to return * @return the element at the specified position in this list * @throws IndexOutOfBoundsException if the index is out of range * (<tt>index &lt; 0 || index &gt;= size()</tt>) */ @Override public E get(int index) { return data.get(index); } /** * Replaces the element at the specified position in this list with the * specified element (optional operation). * * @param index index of the element to replace * @param element element to be stored at the specified position * @return the element previously at the specified position * @throws UnsupportedOperationException if the <tt>set</tt> operation * is not supported by this list * @throws ClassCastException if the class of the specified element * prevents it from being added to this list * @throws NullPointerException if the specified element is null and * this list does not permit null elements * @throws IllegalArgumentException if some property of the specified * element prevents it from being added to this list * @throws IndexOutOfBoundsException if the index is out of range * (<tt>index &lt; 0 || index &gt;= size()</tt>) */ @Override public E set(int index, E element) { return data.set(index, element); } /** * Inserts the specified element at the specified position in this list * (optional operation). Shifts the element currently at that position * (if any) and any subsequent elements to the right (adds one to their * indices). * * @param index index at which the specified element is to be inserted * @param element element to be inserted * @throws UnsupportedOperationException if the <tt>add</tt> operation * is not supported by this list * @throws ClassCastException if the class of the specified element * prevents it from being added to this list * @throws NullPointerException if the specified element is null and * this list does not permit null elements * @throws IllegalArgumentException if some property of the specified * element prevents it from being added to this list * @throws IndexOutOfBoundsException if the index is out of range * (<tt>index &lt; 0 || index &gt; size()</tt>) */ @Override public void add(int index, E element) { if (type == SeqType.LINKEDLIST) { LinkedList<E> ll = (LinkedList<E>) data; if (index == 0) ll.addFirst(element); else if (index == size() - 1) ll.addLast(element); } data.add(index, element); } /** * Removes the element at the specified position in this list (optional * operation). Shifts any subsequent elements to the left (subtracts one * from their indices). Returns the element that was removed from the * list. * * @param index the index of the element to be removed * @return the element previously at the specified position * @throws UnsupportedOperationException if the <tt>remove</tt> operation * is not supported by this list * @throws IndexOutOfBoundsException if the index is out of range * (<tt>index &lt; 0 || index &gt;= size()</tt>) */ @Override public E remove(int index) { if (type == SeqType.LINKEDLIST) { LinkedList<E> ll = (LinkedList<E>) data; if (index == 0) return ll.removeFirst(); else if (index == size() - 1) return ll.removeFirst(); } return data.remove(index); } /** * Returns the index of the first occurrence of the specified element * in this list, or -1 if this list does not contain the element. * More formally, returns the lowest index <tt>i</tt> such that * <tt>(o==null&nbsp;?&nbsp;get(i)==null&nbsp;:&nbsp;o.equals(get(i)))</tt>, * or -1 if there is no such index. * * @param o element to search for * @return the index of the first occurrence of the specified element in * this list, or -1 if this list does not contain the element * @throws ClassCastException if the type of the specified element * is incompatible with this list * (<a href="Collection.html#optional-restrictions">optional</a>) * @throws NullPointerException if the specified element is null and this * list does not permit null elements * (<a href="Collection.html#optional-restrictions">optional</a>) */ @Override public int indexOf(Object o) { return data.indexOf(o); } /** * Returns the index of the last occurrence of the specified element * in this list, or -1 if this list does not contain the element. * More formally, returns the highest index <tt>i</tt> such that * <tt>(o==null&nbsp;?&nbsp;get(i)==null&nbsp;:&nbsp;o.equals(get(i)))</tt>, * or -1 if there is no such index. * * @param o element to search for * @return the index of the last occurrence of the specified element in * this list, or -1 if this list does not contain the element * @throws ClassCastException if the type of the specified element * is incompatible with this list * (<a href="Collection.html#optional-restrictions">optional</a>) * @throws NullPointerException if the specified element is null and this * list does not permit null elements * (<a href="Collection.html#optional-restrictions">optional</a>) */ @Override public int lastIndexOf(Object o) { return data.lastIndexOf(o); } /** * Returns a list iterator over the elements in this list (in proper * sequence). * * @return a list iterator over the elements in this list (in proper * sequence) */ @Override public ListIterator<E> listIterator() { return data.listIterator(); } /** * Returns a list iterator over the elements in this list (in proper * sequence), starting at the specified position in the list. * The specified index indicates the first element that would be * returned by an initial call to {@link ListIterator#next next}. * An initial call to {@link ListIterator#previous previous} would * return the element with the specified index minus one. * * @param index index of the first element to be returned from the * list iterator (by a call to {@link ListIterator#next next}) * @return a list iterator over the elements in this list (in proper * sequence), starting at the specified position in the list * @throws IndexOutOfBoundsException if the index is out of range * ({@code index < 0 || index > size()}) */ @Override public ListIterator<E> listIterator(int index) { return data.listIterator(index); } /** * Returns a view of the portion of this list between the specified * <tt>fromIndex</tt>, inclusive, and <tt>toIndex</tt>, exclusive. (If * <tt>fromIndex</tt> and <tt>toIndex</tt> are equal, the returned list is * empty.) The returned list is backed by this list, so non-structural * changes in the returned list are reflected in this list, and vice-versa. * The returned list supports all of the optional list operations supported * by this list.<p> * <p/> * This method eliminates the need for explicit range operations (of * the sort that commonly exist for arrays). Any operation that expects * a list can be used as a range operation by passing a subList view * instead of a whole list. For example, the following idiom * removes a range of elements from a list: * <pre>{@code * list.subList(from, to).clear(); * }</pre> * Similar idioms may be constructed for <tt>indexOf</tt> and * <tt>lastIndexOf</tt>, and all of the algorithms in the * <tt>Collections</tt> class can be applied to a subList.<p> * <p/> * The semantics of the list returned by this method become undefined if * the backing list (i.e., this list) is <i>structurally modified</i> in * any way other than via the returned list. (Structural modifications are * those that change the size of this list, or otherwise perturb it in such * a fashion that iterations in progress may yield incorrect results.) * * @param fromIndex low endpoint (inclusive) of the subList * @param toIndex high endpoint (exclusive) of the subList * @return a view of the specified range within this list * @throws IndexOutOfBoundsException for an illegal endpoint index value * (<tt>fromIndex &lt; 0 || toIndex &gt; size || * fromIndex &gt; toIndex</tt>) */ @Override public List<E> subList(int fromIndex, int toIndex) { return data.subList(fromIndex, toIndex); } /** * Replaces each element of this list with the result of applying the * operator to that element. Errors or runtime exceptions thrown by * the operator are relayed to the caller. * * @param operator the operator to apply to each element * @throws UnsupportedOperationException if this list is unmodifiable. * Implementations may throw this exception if an element * cannot be replaced or if, in general, modification is not * supported * @throws NullPointerException if the specified operator is null or * if the operator result is a null value and this list does * not permit null elements * (<a href="Collection.html#optional-restrictions">optional</a>) * @implSpec The default implementation is equivalent to, for this {@code list}: * <pre>{@code * final ListIterator<E> li = list.listIterator(); * while (li.hasNext()) { * li.set(operator.apply(li.next())); * } * }</pre> * <p/> * If the list's list-iterator does not support the {@code set} operation * then an {@code UnsupportedOperationException} will be thrown when * replacing the first element. * @since 1.8 */ @Override public void replaceAll(UnaryOperator<E> operator) { data.replaceAll(operator); } /** * Sorts this list according to the order induced by the specified * {@link Comparator}. * <p/> * <p>All elements in this list must be <i>mutually comparable</i> using the * specified comparator (that is, {@code c.compare(e1, e2)} must not throw * a {@code ClassCastException} for any elements {@code e1} and {@code e2} * in the list). * <p/> * <p>If the specified comparator is {@code null} then all elements in this * list must implement the {@link Comparable} interface and the elements' * {@linkplain Comparable natural ordering} should be used. * <p/> * <p>This list must be modifiable, but need not be resizable. * * @param c the {@code Comparator} used to compare list elements. * A {@code null} value indicates that the elements' * {@linkplain Comparable natural ordering} should be used * @throws ClassCastException if the list contains elements that are not * <i>mutually comparable</i> using the specified comparator * @throws UnsupportedOperationException if the list's list-iterator does * not support the {@code set} operation * @throws IllegalArgumentException (<a href="Collection.html#optional-restrictions">optional</a>) * if the comparator is found to violate the {@link Comparator} * contract * @implSpec The default implementation obtains an array containing all elements in * this list, sorts the array, and iterates over this list resetting each * element from the corresponding position in the array. (This avoids the * n<sup>2</sup> log(n) performance that would result from attempting * to sort a linked list in place.) * @implNote This implementation is a stable, adaptive, iterative mergesort that * requires far fewer than n lg(n) comparisons when the input array is * partially sorted, while offering the performance of a traditional * mergesort when the input array is randomly ordered. If the input array * is nearly sorted, the implementation requires approximately n * comparisons. Temporary storage requirements vary from a small constant * for nearly sorted input arrays to n/2 object references for randomly * ordered input arrays. * <p/> * <p>The implementation takes equal advantage of ascending and * descending order in its input array, and can take advantage of * ascending and descending order in different parts of the same * input array. It is well-suited to merging two or more sorted arrays: * simply concatenate the arrays and sort the resulting array. * <p/> * <p>The implementation was adapted from Tim Peters's list sort for Python * (<a href="http://svn.python.org/projects/python/trunk/Objects/listsort.txt"> * TimSort</a>). It uses techniques from Peter McIlroy's "Optimistic * Sorting and Information Theoretic Complexity", in Proceedings of the * Fourth Annual ACM-SIAM Symposium on Discrete Algorithms, pp 467-474, * January 1993. * @since 1.8 */ @Override public void sort(Comparator<? super E> c) { data.sort(c); } /** * Creates a {@link Spliterator} over the elements in this list. * <p/> * <p>The {@code Spliterator} reports {@link Spliterator#SIZED} and * {@link Spliterator#ORDERED}. Implementations should document the * reporting of additional characteristic values. * * @return a {@code Spliterator} over the elements in this list * @implSpec The default implementation creates a * <em><a href="Spliterator.html#binding">late-binding</a></em> spliterator * from the list's {@code Iterator}. The spliterator inherits the * <em>fail-fast</em> properties of the list's iterator. * @implNote The created {@code Spliterator} additionally reports * {@link Spliterator#SUBSIZED}. * @since 1.8 */ @Override public Spliterator<E> spliterator() { return data.spliterator(); } /** * Removes all of the elements of this collection that satisfy the given * predicate. Errors or runtime exceptions thrown during iteration or by * the predicate are relayed to the caller. * * @param filter a predicate which returns {@code true} for elements to be * removed * @return {@code true} if any elements were removed * @throws NullPointerException if the specified filter is null * @throws UnsupportedOperationException if elements cannot be removed * from this collection. Implementations may throw this exception if a * matching element cannot be removed or if, in general, removal is not * supported. * @implSpec The default implementation traverses all elements of the collection using * its {@link #iterator}. Each matching element is removed using * {@link Iterator#remove()}. If the collection's iterator does not * support removal then an {@code UnsupportedOperationException} will be * thrown on the first matching element. * @since 1.8 */ @Override public boolean removeIf(Predicate<? super E> filter) { return data.removeIf(filter); } /** * Returns a sequential {@code Stream} with this collection as its source. * <p/> * <p>This method should be overridden when the {@link #spliterator()} * method cannot return a spliterator that is {@code IMMUTABLE}, * {@code CONCURRENT}, or <em>late-binding</em>. (See {@link #spliterator()} * for details.) * * @return a sequential {@code Stream} over the elements in this collection * @implSpec The default implementation creates a sequential {@code Stream} from the * collection's {@code Spliterator}. * @since 1.8 */ @Override public Stream<E> stream() { return data.stream(); } /** * Returns a possibly parallel {@code Stream} with this collection as its * source. It is allowable for this method to return a sequential stream. * <p/> * <p>This method should be overridden when the {@link #spliterator()} * method cannot return a spliterator that is {@code IMMUTABLE}, * {@code CONCURRENT}, or <em>late-binding</em>. (See {@link #spliterator()} * for details.) * * @return a possibly parallel {@code Stream} over the elements in this * collection * @implSpec The default implementation creates a parallel {@code Stream} from the * collection's {@code Spliterator}. * @since 1.8 */ @Override public Stream<E> parallelStream() { return data.parallelStream(); } }
/* * Copyright (c) 2018, James Swindle <wilingua@gmail.com> * Copyright (c) 2018, Adam <Adam@sigterm.info> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.npchighlight; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.inject.Provides; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.inject.Inject; import lombok.AccessLevel; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; import net.runelite.api.Client; import net.runelite.api.GameState; import net.runelite.api.GraphicID; import net.runelite.api.GraphicsObject; import net.runelite.api.MenuAction; import net.runelite.api.MenuEntry; import net.runelite.api.NPC; import net.runelite.api.coords.WorldPoint; import net.runelite.api.events.ConfigChanged; import net.runelite.api.events.FocusChanged; import net.runelite.api.events.GameStateChanged; import net.runelite.api.events.GameTick; import net.runelite.api.events.GraphicsObjectCreated; import net.runelite.api.events.MenuEntryAdded; import net.runelite.api.events.MenuOptionClicked; import net.runelite.api.events.NpcDespawned; import net.runelite.api.events.NpcSpawned; import net.runelite.client.callback.ClientThread; import net.runelite.client.config.ConfigManager; import net.runelite.client.eventbus.Subscribe; import net.runelite.client.input.KeyManager; import net.runelite.client.plugins.Plugin; import net.runelite.client.plugins.PluginDescriptor; import net.runelite.client.ui.overlay.OverlayManager; import net.runelite.client.util.Text; import net.runelite.client.util.WildcardMatcher; @PluginDescriptor( name = "NPC Indicators", description = "Highlight NPCs on-screen and/or on the minimap", tags = {"highlight", "minimap", "npcs", "overlay", "respawn", "tags"} ) @Slf4j public class NpcIndicatorsPlugin extends Plugin { private static final int MAX_ACTOR_VIEW_RANGE = 15; // Option added to NPC menu private static final String TAG = "Tag"; private static final List<MenuAction> NPC_MENU_ACTIONS = ImmutableList.of(MenuAction.NPC_FIRST_OPTION, MenuAction.NPC_SECOND_OPTION, MenuAction.NPC_THIRD_OPTION, MenuAction.NPC_FOURTH_OPTION, MenuAction.NPC_FIFTH_OPTION); @Inject private Client client; @Inject private NpcIndicatorsConfig config; @Inject private OverlayManager overlayManager; @Inject private NpcSceneOverlay npcSceneOverlay; @Inject private NpcMinimapOverlay npcMinimapOverlay; @Inject private NpcIndicatorsInput inputListener; @Inject private KeyManager keyManager; @Inject private ClientThread clientThread; @Setter(AccessLevel.PACKAGE) private boolean hotKeyPressed = false; /** * NPCs to highlight */ @Getter(AccessLevel.PACKAGE) private final Set<NPC> highlightedNpcs = new HashSet<>(); /** * Dead NPCs that should be displayed with a respawn indicator if the config is on. */ @Getter(AccessLevel.PACKAGE) private final Map<Integer, MemorizedNpc> deadNpcsToDisplay = new HashMap<>(); /** * The time when the last game tick event ran. */ @Getter(AccessLevel.PACKAGE) private Instant lastTickUpdate; /** * Tagged NPCs that have died at some point, which are memorized to * remember when and where they will respawn */ private final Map<Integer, MemorizedNpc> memorizedNpcs = new HashMap<>(); /** * Highlight strings from the configuration */ private List<String> highlights = new ArrayList<>(); /** * NPC ids marked with the Tag option */ private final Set<Integer> npcTags = new HashSet<>(); /** * Tagged NPCs that spawned this tick, which need to be verified that * they actually spawned and didn't just walk into view range. */ private final List<NPC> spawnedNpcsThisTick = new ArrayList<>(); /** * Tagged NPCs that despawned this tick, which need to be verified that * they actually spawned and didn't just walk into view range. */ private final List<NPC> despawnedNpcsThisTick = new ArrayList<>(); /** * World locations of graphics object which indicate that an * NPC teleported that were played this tick. */ private final Set<WorldPoint> teleportGraphicsObjectSpawnedThisTick = new HashSet<>(); /** * The players location on the last game tick. */ private WorldPoint lastPlayerLocation; /** * When hopping worlds, NPCs can spawn without them actually respawning, * so we would not want to mark it as a real spawn in those cases. */ private boolean skipNextSpawnCheck = false; @Provides NpcIndicatorsConfig provideConfig(ConfigManager configManager) { return configManager.getConfig(NpcIndicatorsConfig.class); } @Override protected void startUp() throws Exception { overlayManager.add(npcSceneOverlay); overlayManager.add(npcMinimapOverlay); keyManager.registerKeyListener(inputListener); highlights = getHighlights(); clientThread.invoke(() -> { skipNextSpawnCheck = true; rebuildAllNpcs(); }); } @Override protected void shutDown() throws Exception { overlayManager.remove(npcSceneOverlay); overlayManager.remove(npcMinimapOverlay); deadNpcsToDisplay.clear(); memorizedNpcs.clear(); spawnedNpcsThisTick.clear(); despawnedNpcsThisTick.clear(); teleportGraphicsObjectSpawnedThisTick.clear(); npcTags.clear(); highlightedNpcs.clear(); keyManager.unregisterKeyListener(inputListener); } @Subscribe public void onGameStateChanged(GameStateChanged event) { if (event.getGameState() == GameState.LOGIN_SCREEN || event.getGameState() == GameState.HOPPING) { highlightedNpcs.clear(); deadNpcsToDisplay.clear(); memorizedNpcs.forEach((id, npc) -> npc.setDiedOnTick(-1)); lastPlayerLocation = null; skipNextSpawnCheck = true; } } @Subscribe public void onConfigChanged(ConfigChanged configChanged) { if (!configChanged.getGroup().equals("npcindicators")) { return; } highlights = getHighlights(); rebuildAllNpcs(); } @Subscribe public void onFocusChanged(FocusChanged focusChanged) { if (!focusChanged.isFocused()) { hotKeyPressed = false; } } @Subscribe public void onMenuEntryAdded(MenuEntryAdded event) { if (!hotKeyPressed || event.getType() != MenuAction.EXAMINE_NPC.getId()) { return; } MenuEntry[] menuEntries = client.getMenuEntries(); menuEntries = Arrays.copyOf(menuEntries, menuEntries.length + 1); MenuEntry menuEntry = menuEntries[menuEntries.length - 1] = new MenuEntry(); menuEntry.setOption(TAG); menuEntry.setTarget(event.getTarget()); menuEntry.setParam0(event.getActionParam0()); menuEntry.setParam1(event.getActionParam1()); menuEntry.setIdentifier(event.getIdentifier()); menuEntry.setType(MenuAction.RUNELITE.getId()); client.setMenuEntries(menuEntries); } @Subscribe public void onMenuOptionClicked(MenuOptionClicked click) { if (click.getMenuAction() != MenuAction.RUNELITE || !click.getMenuOption().equals(TAG)) { return; } final int id = click.getId(); final boolean removed = npcTags.remove(id); final NPC[] cachedNPCs = client.getCachedNPCs(); final NPC npc = cachedNPCs[id]; if (npc == null || npc.getName() == null) { return; } if (removed) { highlightedNpcs.remove(npc); memorizedNpcs.remove(npc.getIndex()); } else { memorizeNpc(npc); npcTags.add(id); highlightedNpcs.add(npc); } click.consume(); } @Subscribe public void onNpcSpawned(NpcSpawned npcSpawned) { final NPC npc = npcSpawned.getNpc(); final String npcName = npc.getName(); if (npcName == null) { return; } if (npcTags.contains(npc.getIndex())) { memorizeNpc(npc); highlightedNpcs.add(npc); spawnedNpcsThisTick.add(npc); return; } for (String highlight : highlights) { if (WildcardMatcher.matches(highlight, npcName)) { memorizeNpc(npc); highlightedNpcs.add(npc); spawnedNpcsThisTick.add(npc); break; } } } @Subscribe public void onNpcDespawned(NpcDespawned npcDespawned) { final NPC npc = npcDespawned.getNpc(); if (memorizedNpcs.containsKey(npc.getIndex())) { despawnedNpcsThisTick.add(npc); } highlightedNpcs.remove(npc); } @Subscribe public void onGraphicsObjectCreated(GraphicsObjectCreated event) { final GraphicsObject go = event.getGraphicsObject(); if (go.getId() == GraphicID.GREY_BUBBLE_TELEPORT) { teleportGraphicsObjectSpawnedThisTick.add(WorldPoint.fromLocal(client, go.getLocation())); } } @Subscribe public void onGameTick(GameTick event) { removeOldHighlightedRespawns(); validateSpawnedNpcs(); lastTickUpdate = Instant.now(); lastPlayerLocation = client.getLocalPlayer().getWorldLocation(); } private static boolean isInViewRange(WorldPoint wp1, WorldPoint wp2) { int distance = wp1.distanceTo(wp2); return distance < MAX_ACTOR_VIEW_RANGE; } private static WorldPoint getWorldLocationBehind(NPC npc) { final int orientation = npc.getOrientation() / 256; int dx = 0, dy = 0; switch (orientation) { case 0: // South dy = -1; break; case 1: // Southwest dx = -1; dy = -1; break; case 2: // West dx = -1; break; case 3: // Northwest dx = -1; dy = 1; break; case 4: // North dy = 1; break; case 5: // Northeast dx = 1; dy = 1; break; case 6: // East dx = 1; break; case 7: // Southeast dx = 1; dy = -1; break; } final WorldPoint currWP = npc.getWorldLocation(); return new WorldPoint(currWP.getX() - dx, currWP.getY() - dy, currWP.getPlane()); } private void memorizeNpc(NPC npc) { final int npcIndex = npc.getIndex(); memorizedNpcs.putIfAbsent(npcIndex, new MemorizedNpc(npc)); } private void removeOldHighlightedRespawns() { deadNpcsToDisplay.values().removeIf(x -> x.getDiedOnTick() + x.getRespawnTime() <= client.getTickCount() + 1); } @VisibleForTesting List<String> getHighlights() { final String configNpcs = config.getNpcToHighlight().toLowerCase(); if (configNpcs.isEmpty()) { return Collections.emptyList(); } return Text.fromCSV(configNpcs); } private void rebuildAllNpcs() { highlightedNpcs.clear(); if (client.getGameState() != GameState.LOGGED_IN && client.getGameState() != GameState.LOADING) { // NPCs are still in the client after logging out, // but we don't want to highlight those. return; } outer: for (NPC npc : client.getNpcs()) { final String npcName = npc.getName(); if (npcName == null) { continue; } if (npcTags.contains(npc.getIndex())) { highlightedNpcs.add(npc); continue; } for (String highlight : highlights) { if (WildcardMatcher.matches(highlight, npcName)) { memorizeNpc(npc); highlightedNpcs.add(npc); continue outer; } } // NPC is not highlighted memorizedNpcs.remove(npc.getIndex()); } } private void validateSpawnedNpcs() { if (skipNextSpawnCheck) { skipNextSpawnCheck = false; } else { for (NPC npc : despawnedNpcsThisTick) { if (!teleportGraphicsObjectSpawnedThisTick.isEmpty()) { if (teleportGraphicsObjectSpawnedThisTick.contains(npc.getWorldLocation())) { // NPC teleported away, so we don't want to add the respawn timer continue; } } if (isInViewRange(client.getLocalPlayer().getWorldLocation(), npc.getWorldLocation())) { final MemorizedNpc mn = memorizedNpcs.get(npc.getIndex()); if (mn != null) { mn.setDiedOnTick(client.getTickCount() + 1); // This runs before tickCounter updates, so we add 1 if (!mn.getPossibleRespawnLocations().isEmpty()) { log.debug("Starting {} tick countdown for {}", mn.getRespawnTime(), mn.getNpcName()); deadNpcsToDisplay.put(mn.getNpcIndex(), mn); } } } } for (NPC npc : spawnedNpcsThisTick) { if (!teleportGraphicsObjectSpawnedThisTick.isEmpty()) { if (teleportGraphicsObjectSpawnedThisTick.contains(npc.getWorldLocation()) || teleportGraphicsObjectSpawnedThisTick.contains(getWorldLocationBehind(npc))) { // NPC teleported here, so we don't want to update the respawn timer continue; } } if (lastPlayerLocation != null && isInViewRange(lastPlayerLocation, npc.getWorldLocation())) { final MemorizedNpc mn = memorizedNpcs.get(npc.getIndex()); if (mn.getDiedOnTick() != -1) { mn.setRespawnTime(client.getTickCount() + 1 - mn.getDiedOnTick()); mn.setDiedOnTick(-1); } final WorldPoint npcLocation = npc.getWorldLocation(); // An NPC can move in the same tick as it spawns, so we also have // to consider whatever tile is behind the npc final WorldPoint possibleOtherNpcLocation = getWorldLocationBehind(npc); mn.getPossibleRespawnLocations().removeIf(x -> x.distanceTo(npcLocation) != 0 && x.distanceTo(possibleOtherNpcLocation) != 0); if (mn.getPossibleRespawnLocations().isEmpty()) { mn.getPossibleRespawnLocations().add(npcLocation); mn.getPossibleRespawnLocations().add(possibleOtherNpcLocation); } } } } spawnedNpcsThisTick.clear(); despawnedNpcsThisTick.clear(); teleportGraphicsObjectSpawnedThisTick.clear(); } }
/** * Licensed to Cloudera, Inc. under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Cloudera, Inc. licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudera.util; import java.io.File; import java.io.IOException; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.cloudera.flume.agent.FlumeNode; import com.cloudera.flume.agent.LivenessManager; import com.cloudera.flume.agent.LogicalNodeManager; import com.cloudera.flume.agent.MockMasterRPC; import com.cloudera.flume.agent.diskfailover.DiskFailoverManager; import com.cloudera.flume.agent.diskfailover.NaiveFileFailoverManager; import com.cloudera.flume.agent.durability.NaiveFileWALManager; import com.cloudera.flume.agent.durability.WALManager; import com.cloudera.flume.conf.Context; import com.cloudera.flume.conf.FlumeBuilder; import com.cloudera.flume.conf.FlumeConfiguration; import com.cloudera.flume.conf.FlumeSpecException; import com.cloudera.flume.core.Attributes; import com.cloudera.flume.core.EventSink; import com.cloudera.flume.core.EventSource; import com.cloudera.flume.core.EventUtil; import com.cloudera.flume.handlers.debug.AttrSynthSource; import com.cloudera.flume.handlers.debug.BenchmarkReportDecorator; import com.cloudera.flume.handlers.debug.MemorySinkSource; import com.cloudera.flume.handlers.debug.NoNlASCIISynthSource; import com.cloudera.flume.handlers.endtoend.CollectorAckListener; import com.cloudera.flume.reporter.ReportEvent; import com.cloudera.flume.reporter.ReportManager; import com.cloudera.flume.reporter.Reportable; /** * This sets up a batttery of synthetic datasets for testing against different * decorators and sinks. Generally, each test requires ~2GB mem. ~1GB for * keeping a data set in memory and the other GB for some gc headroom. */ @SuppressWarnings("serial") public class BenchmarkHarness { static final Logger LOG = LoggerFactory.getLogger(BenchmarkHarness.class); // These are setup to point to new default logging dir for each test. public static FlumeNode node; public static MockMasterRPC mock; public static File tmpdir; /** * This sets the log dir in the FlumeConfiguration and then instantiates a * mock master and node that use that configuration */ public static void setupLocalWriteDir() { try { tmpdir = FileUtil.mktempdir(); } catch (Exception e) { Assert.fail("mk temp dir failed"); } FlumeConfiguration conf = FlumeConfiguration.get(); conf.clear(); // reset all back to defaults. conf.set(FlumeConfiguration.AGENT_LOG_DIR_NEW, tmpdir.getAbsolutePath()); mock = new MockMasterRPC(); node = new FlumeNode(mock, false /* starthttp */, false /* oneshot */); ReportManager.get().clear(); } /** * This version allows a particular test case to replace the default * xxxManager with one that is reasonable for the test. * * Any args that are null will default to the "normal" version. */ public static void setupFlumeNode(LogicalNodeManager nodesMan, WALManager walMan, DiskFailoverManager dfMan, CollectorAckListener colAck, LivenessManager liveman) { try { tmpdir = FileUtil.mktempdir(); } catch (Exception e) { Assert.fail("mk temp dir failed"); } FlumeConfiguration conf = FlumeConfiguration.get(); conf.set(FlumeConfiguration.AGENT_LOG_DIR_NEW, tmpdir.getAbsolutePath()); mock = new MockMasterRPC(); nodesMan = (nodesMan != null) ? nodesMan : new LogicalNodeManager(NetUtils .localhost()); walMan = (walMan != null) ? walMan : new NaiveFileWALManager(new File(conf .getAgentLogsDir())); dfMan = (dfMan != null) ? dfMan : new NaiveFileFailoverManager(new File( conf.getAgentLogsDir())); colAck = (colAck != null) ? colAck : new CollectorAckListener(mock); liveman = (liveman != null) ? liveman : new LivenessManager(nodesMan, mock, walMan); node = new FlumeNode(NetUtils.localhost(), mock, nodesMan, walMan, dfMan, colAck, liveman); } /** * Cleanup the temp dir after the test is run. */ public static void cleanupLocalWriteDir() throws IOException { FileUtil.rmr(tmpdir); } // This is a tiny test set, suitable for step-through debugging public static Map<String, EventSource> tiny = new HashMap<String, EventSource>() { { // datasets with fields, x attributes, 10 byte long attr names, 10 // byte // values. put("10,10,5,5,8", new AttrSynthSource(10, 5, 5, 8, 1337)); } }; // This is a data set that varies the size of the body of an event. public static Map<String, EventSource> varyMsgBytes = new HashMap<String, EventSource>() { { // 1337 is the rand seed. // this is *really* slow put("100000,10,0,0,0", new NoNlASCIISynthSource(100000, 10, 1337)); put("100000,100,0,0,0", new NoNlASCIISynthSource(100000, 100, 1337)); put("100000,1000,0,0,0", new NoNlASCIISynthSource(100000, 1000, 1337)); put("100000,3000,0,0,0", new NoNlASCIISynthSource(100000, 3000, 1337)); put("100000,10000,0,0,0", new NoNlASCIISynthSource(100000, 10000, 1337)); } }; // This dataset varies the # of attributes an event has. The latter two // entries send fewer messages because the size of the messages are memory // prohibitive public static Map<String, EventSource> varyNumAttrs = new HashMap<String, EventSource>() { { // datasets with fields, x attributes, 10 byte long attr names, 10 // byte // values. put("100000,0,10,10,10", new AttrSynthSource(100000, 10, 10, 10, 1337)); put("100000,0,100,10,10", new AttrSynthSource(100000, 100, 10, 10, 1337)); put("10000,0,1000,10,10", new AttrSynthSource(10000, 1000, 10, 10, 1337)); put("1000,0,10000,10,10", new AttrSynthSource(1000, 10000, 10, 10, 1337)); } }; // This dataset varies the size of the values associated with an attribute. public static Map<String, EventSource> varyValSize = new HashMap<String, EventSource>() { { // datasets with fields, 10 attributes, 10 byte long attr names, xx // byte // values. put("100000,0,10,10,10", new AttrSynthSource(100000, 10, 10, 10, 1337)); put("100000,0,10,10,100", new AttrSynthSource(100000, 10, 10, 100, 1337)); put("100000,0,10,10,1000", new AttrSynthSource(100000, 10, 10, 1000, 1337)); put("1000,0,10,10,10000", new AttrSynthSource(10000, 10, 10, 10000, 1337)); } }; /** * This takes what ever data set comes in and multiplies it by 10x volume. */ public static EventSink createDecoratorBenchmarkSink(String name, String deco) throws FlumeSpecException { String spec = "let benchsink := { benchreport(\"" + name + "\") => null } in { mult(10) => { benchinject => { " + deco + " => benchsink } } }"; return FlumeBuilder.buildSink(new Context(), spec); } public static EventSink createSinkBenchmark(String name, String sink) throws FlumeSpecException { String spec = "{benchinject => {benchreport(\"" + name + "\") => " + sink + " } }"; return FlumeBuilder.buildSink(new Context(), spec); } /** * This takes a single decorator, and then applies all of the datasets through * the decorator. Each source is bufferzied -- the given number of messages * are stored in memory so that they can be blasted through any deco. */ public static void doDecoBenchmark(String deco, Map<String, EventSource> sets) throws FlumeSpecException, IOException { for (Map.Entry<String, EventSource> ent : sets.entrySet()) { setupLocalWriteDir(); ReportManager.get().clear(); // copy all events into memory EventSource src = MemorySinkSource.bufferize(ent.getValue()); EventSink snk = createDecoratorBenchmarkSink(ent.getKey() + "," + deco, deco); src.open(); snk.open(); EventUtil.dumpAll(src, snk); src.close(); snk.close(); dumpReports(); cleanupLocalWriteDir(); } } /** * This gets reports and outputs them to std err in csv format. */ public static void dumpReports() { ReportManager rman = ReportManager.get(); SortedMap<String, Reportable> sorted = new TreeMap<String, Reportable>(rman .getReportables()); for (Map.Entry<String, Reportable> ent : sorted.entrySet()) { String params = ent.getKey(); ReportEvent r = ent.getValue().getReport(); System.out.println(new String(r.toString())); System.err.print(new Date(r.getTimestamp()) + ","); System.err.print(params + ","); System.err.print(Attributes.readString(r, BenchmarkReportDecorator.A_BENCHMARK_CSV)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jp.tonyu.soytext2.js; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import java.util.Set; import jp.tonyu.db.NotInWriteTransactionException; import jp.tonyu.debug.Log; import jp.tonyu.js.AllPropAction; import jp.tonyu.js.BlankScriptableObject; import jp.tonyu.js.BuiltinFunc; import jp.tonyu.js.Scriptables; import jp.tonyu.js.StringPropAction; import jp.tonyu.soytext.Origin; import jp.tonyu.soytext2.document.DocumentRecord; import jp.tonyu.soytext2.document.DocumentSet; import jp.tonyu.soytext2.document.IndexRecord; import jp.tonyu.soytext2.document.PairSet; import jp.tonyu.soytext2.file.AttachedBinData; import jp.tonyu.soytext2.file.BinData; import jp.tonyu.soytext2.servlet.HttpContext; import jp.tonyu.util.SFile; import jp.tonyu.util.SPrintf; import net.arnx.jsonic.JSON; import org.mozilla.javascript.Context; import org.mozilla.javascript.Function; import org.mozilla.javascript.NativeJavaObject; import org.mozilla.javascript.Scriptable; import org.mozilla.javascript.ScriptableObject; import org.mozilla.javascript.UniqueTag; import org.omg.CosNaming.NamingContextPackage.NotFound; public class DocumentScriptable implements Function { public static final String IS_INSTANCE_ON_MEMORY = "isInstanceOnMemory"; public static final String CALLSUPER="callSuper"; public static final String ONAPPLY="onApply",APPLY="apply",CALL="call"; public static final String ONUPDATEINDEX = "onUpdateIndex"; private static final String UPDATE_INDEX = "updateIndex"; private static final String DOLLAR="$"; private static final Object SETCONTENTANDSAVE = "setContentAndSave"; private static final Object GETCONTENT = "getContent"; private static final String ON_GENERATE_CONTENT = "onGenerateContent"; DocumentRecord _d; final String _id; public final DocumentLoader loader; Map<Object, Object>_binds=new HashMap<Object, Object>(); Scriptable scope=null; boolean scopeLoaded=false; public static boolean lazyLoad=true; boolean contentLoaded=!lazyLoad; // true iff loaded or loading private synchronized void loadContent() { if (contentLoaded) return; contentLoaded=true; reloadFromContent(); } public synchronized Scriptable getScope() { if (scopeLoaded) return scope; scopeLoaded=true; String ss=getDocument().scope; if (ss==null || ss.length()==0) { return null; } else { try { Map<Object, Object> m=(Map)JSON.decode(ss); Scriptable res=new BlankScriptableObject(); for (Map.Entry e:m.entrySet()) { ScriptableObject.putProperty(res, e.getKey()+"", loader.byId(e.getValue()+"")); } scope=res; } catch (Exception e) { e.printStackTrace(); scope=null; } } return scope; } public void setScope(Scriptable s) { scope=s; binds().remove(DocumentRecord.ATTR_SCOPE); scopeLoaded=true; } public synchronized void setScopeRaw(String r) { getDocument().scope=r; scopeLoaded=false; getScope(); } public DocumentRecord getDocument() { if (_d!=null) return _d; _d=loader.recordById(_id); if (_d==null) Log.die("Document "+_id+" is not exist"); return _d; } private Map<Object, Object> binds() { loadContent(); return _binds; } public DocumentScriptable(final DocumentLoader loader,String id) { this.loader=loader; _id=id; } public DocumentScriptable(final DocumentLoader loader, DocumentRecord rec) { this.loader=loader; _d=rec; if (_d.content==null) contentLoaded=true; // When new document _id=rec.id; } BuiltinFunc saveFunc =new BuiltinFunc() { @Override public Object call(Context cx, Scriptable scope, Scriptable thisObj, Object[] args) { save(); return DocumentScriptable.this; } }; BuiltinFunc saveRawFunc =new BuiltinFunc() { @Override public Object call(Context cx, Scriptable scope, Scriptable thisObj, Object[] args) { saveRaw((Scriptable)args[0]); return DocumentScriptable.this; } }; BuiltinFunc reloadFromContentFunc =new BuiltinFunc() { @Override public Object call(Context cx, Scriptable scope, Scriptable thisObj, Object[] args) { reloadFromContent(); return DocumentScriptable.this; } }; BuiltinFunc updateIndexFunc =new BuiltinFunc() { @Override public Object call(Context cx, Scriptable scope, Scriptable thisObj, Object[] args) { if (args.length==0) { updateIndex(); } else if (args[1] instanceof Scriptable){ updateIndex((Scriptable)args[1]); } return DocumentScriptable.this; } }; BuiltinFunc setContentAndSaveFunc = new BuiltinFunc() { @Override public Object call(Context cx, Scriptable scope, Scriptable thisObj, Object[] args) { setContentAndSave(args[0]+""); return DocumentScriptable.this; } }; BuiltinFunc getContentFunc = new BuiltinFunc() { @Override public Object call(Context cx, Scriptable scope, Scriptable thisObj, Object[] args) { return getDocument().content; } }; BuiltinFunc hasOwnPropFunc= new BuiltinFunc() { @Override public Object call(Context cx, Scriptable scope, Scriptable thisObj, Object[] args) { if (args.length==0) return false; return binds().containsKey(args[0]); } }; int callsuperlim=0; BuiltinFunc callSuperFunc =new BuiltinFunc() { @Override public Object call(Context cx, Scriptable scope, Scriptable thisObj, Object[] args) { loadContent(); if (args.length>0) { int c=0; String name=args[0]+""; for (Scriptable p=DocumentScriptable.this;p!=null ; p=p.getPrototype()) { Object fo=p.get(name, p); if (fo instanceof Function) { c++; if (c==2) { Function f = (Function) fo; Object[] argShift=new Object[args.length-1]; for (int i=0 ; i<argShift.length ; i++) { argShift[i]=args[i+1]; } Log.d(this, "Calling superclass function "+cx.decompileFunction(f,0)); return f.call(cx, scope, thisObj, argShift); } } } } return null; } }; public Object get(Object key) { //Log.d(this, "get - "+_id+"."+key); if ("id".equals(key)) return _id; // Document Funcs (will be move into Document - 66646.2.2011.tonyu.jp) if ("save".equals(key)) return saveFunc; if (UPDATE_INDEX.equals(key) || ("_"+UPDATE_INDEX).equals(key)) return updateIndexFunc; if (SETCONTENTANDSAVE.equals(key)) return setContentAndSaveFunc; if (GETCONTENT.equals(key)) return getContentFunc; // --- the followings need not be moved if (CALLSUPER.equals(key)) return callSuperFunc; if ("identityHashCode".equals(key)) return System.identityHashCode(this); if ("hasOwnProperty".equals(key)) return hasOwnPropFunc; if ("_reloadFromContent".equals(key)) return reloadFromContentFunc; if ("_saveRaw".equals(key)) return saveRawFunc; DocumentRecord d=getDocument(); if ("_id".equals(key)) return d.id; if (DocumentRecord.LASTUPDATE.equals(key)||("_"+DocumentRecord.LASTUPDATE).equals(key)) return d.lastUpdate; if (DocumentRecord.OWNER.equals(key)||("_"+DocumentRecord.OWNER).equals(key)) return d.owner; if ("summary".equals(key)||"_summary".equals(key)) return d.summary; if ("_version".equals(key)) return d.version; if ("_content".equals(key)) return d.content; if (("_"+DocumentRecord.ATTR_CONSTRUCTOR).equals(key) || DocumentRecord.ATTR_CONSTRUCTOR.equals(key)) { return getConstructor(); } if (("_"+DocumentRecord.ATTR_SCOPE).equals(key) || DocumentRecord.ATTR_SCOPE.equals(key)) { return getScope(); } if ("_scopeRaw".equals(key)) { return getDocument().scope; } Object res = binds().get(key); if (res!=null) return res; return UniqueTag.NOT_FOUND; } public Object put(Object key,Object value) { if (("_"+DocumentRecord.ATTR_SCOPE).equals(key) || DocumentRecord.ATTR_SCOPE.equals(key)) { setScope((Scriptable)value); return value; } if (("_"+DocumentRecord.ATTR_CONSTRUCTOR).equals(key) || DocumentRecord.ATTR_CONSTRUCTOR.equals(key)) { setConstructor((Scriptable)value); return value; } if ("_scopeRaw".equals(key)) { setScopeRaw(value+""); return value; } if ("_summary".equals(key)) { getDocument().summary=value+""; return value; } if ("_content".equals(key)) { getDocument().content=value+""; //reloadFromContent(); (would be better comment out for import records..) return value; } if (key instanceof String || key instanceof Number) { binds().put(key, value); } else if (value==null){ binds().remove(key); } else { Log.die("Cannot put "+key); } return value; } public Set<Object> keySet() { return binds().keySet(); } @Override public void delete(String name) { binds().remove(name); } @Override public void delete(int index) { binds().remove(index); } @Override public Object get(String name, Scriptable start) { return get(name); } @Override public Object get(int index, Scriptable start) { return get(index); } @Override public String getClassName() { // TODO Auto-generated method stub return null; } @Override public Object getDefaultValue(Class<?> hint) { return toString(); } @Override public Object[] getIds() { Set<Object> keys=binds().keySet(); Object[] res=new Object[keys.size()]; int i=0; for (Object key:keys) { if (key instanceof String || key instanceof Number) { res[i]=key; } else { Log.die("Wrong key! "+key); } i++; } return res; } void trace(Object msg) { if (id().equals("13892.1.2010.tonyu.jp")) { Log.d("loadconst", msg); } } @Override public Scriptable getParentScope() { // TODO Auto-generated method stub return null; } public Scriptable getConstructor() { String c=getDocument().constructor; if (c!=null && c.length()>0) { if ("Function".equals(c)) { return loader.jsSession().funcFactory; } else { return loader.byId(c); } } return null; } public void setConstructor(Scriptable s) { trace("Set const s="+s); if (s instanceof DocumentScriptable) { DocumentScriptable ds = (DocumentScriptable) s; getDocument().constructor=ds._id; } else if (s instanceof Function) { getDocument().constructor="Function"; } else { Log.die(s+" cannot be a constructor"); } binds().remove(DocumentRecord.ATTR_CONSTRUCTOR); } @Override public Scriptable getPrototype() { Scriptable s=getConstructor(); trace("Get const s="+s); if (s==null) return null; Object res=s.get(Scriptables.PROTOTYPE,s); if (res instanceof Scriptable) { Scriptable ss = (Scriptable) res; return ss; } return null; } @Override public boolean has(String name, Scriptable start) { return binds().containsKey(name); } @Override public boolean has(int index, Scriptable start) { return binds().containsKey(index); } /* <p> * The JavaScript code "lhs instanceof rhs" causes rhs.hasInstance(lhs) to * be called. */ @Override public boolean hasInstance(Scriptable instance) { for (int i=0 ;i<100 ;i++) { Object c=ScriptableObject.getProperty(instance, Scriptables.CONSTRUCTOR); if (equals(c)) return true; if (c instanceof Scriptable) { Scriptable cs = (Scriptable) c; Object p=ScriptableObject.getProperty(cs, Scriptables.PROTOTYPE); if (p instanceof Scriptable) { instance = (Scriptable) p; continue; } } return false; } return false; } @Override public void put(String name, Scriptable start, Object value) { //if (name.equals("contentEquals")) Log.die("Who set it?"); trace("put "+name +" = "+value); put(name,value); } @Override public void put(int index, Scriptable start, Object value) { put(index,value); } @Override public void setParentScope(Scriptable parent) { // TODO Auto-generated method stub } @Override public void setPrototype(Scriptable prototype) { //Log.d(this, "__proto__"+prototype); //this.__proto__= prototype; } public void save() { refreshRecordAttrsToRecord(); refreshContentToRecord(); Log.d(this, "save() content changed to "+getDocument().content); PairSet<String,String> updatingIndex = indexUpdateMap(); loader.save(getDocument(), updatingIndex); } public void saveRaw(Scriptable updatingIndex) { loader.save(getDocument(), convUpdIdx(updatingIndex)); } private void updateIndex(Scriptable updatingIndex) { PairSet<String, String> updatingIndexp = convUpdIdx(updatingIndex); updateIndex(updatingIndexp); } private PairSet<String, String> convUpdIdx(Scriptable updatingIndex) { // upd=[[k,v],[k,v]...] PairSet<String,String> updatingIndexp = new PairSet<String, String>(); Object []idxs=Scriptables.toArray(updatingIndex); for (Object idx:idxs) { Object[] idxa=Scriptables.toArray(idx); if (idxa.length==2) { updatingIndexp.put(idxa[0]+"", idxa[1]+""); } } return updatingIndexp; } private void updateIndex(PairSet<String,String> updatingIndex) { loader.updateIndex(getDocument(), updatingIndex); } private void refreshRecordAttrsToRecord() { refreshSummaryToRecord(); refreshScopeToRecord(); //refreshConstructor(); } /*private void refreshConstructor() { Scriptable con = getConstructor(); if (con instanceof Scriptable) { setConstructor(con); } }*/ private void refreshScopeToRecord() { final Map r=new HashMap(); Scriptable s=getScope(); if (s==null) { getDocument().scope=null; return; } Scriptables.each(scope, new StringPropAction() { @Override public void run(String key, Object value) { if (value instanceof DocumentScriptable) { DocumentScriptable ds=(DocumentScriptable) value; r.put(key, ds.id()); } else { Log.d(this,"Cannot :"+DocumentScriptable.this+"["+key+"]="+value+";"); } } }); getDocument().scope=JSON.encode(r); Log.d(this, "Scope changed to - "+getDocument().scope); binds().remove(DocumentRecord.ATTR_SCOPE); } public void updateIndex() { PairSet<String,String> updatingIndex = indexUpdateMap(); loader.updateIndex(getDocument(), updatingIndex); } private PairSet<String,String> indexUpdateMap() { PairSet<String,String> updatingIndex=new PairSet<String,String>(); mkIndex(updatingIndex); Log.d(UPDATE_INDEX, "save() - index set to "+updatingIndex); return updatingIndex; } private void mkIndex(PairSet<String,String> idx) { String name = Scriptables.getAsString(this, "name", null); if (name!=null) idx.put("name", name); mkClassIndex(idx); mkBackLinkIndex(this , idx); Object ouio = ScriptableObject.getProperty(this, ONUPDATEINDEX); if (ouio instanceof Function) { Function oui=(Function)ouio; loader.jsSession().call(oui, this, new Object[]{ new IndexUpdateContext(this,idx) } ); } } private void mkClassIndex( PairSet<String, String> idx) { int depth=0; for (Function klass=Scriptables.getClass(this); klass!=null; klass=Scriptables.getSuperclass(klass) ) { if (klass instanceof DocumentScriptable) { DocumentScriptable d = (DocumentScriptable) klass; idx.put(IndexRecord.INDEX_INSTANCEOF, d.id()); } else { break; } if (depth++>16) Log.die("Depth too many"); } } public String id() { return _id; } private static void mkBackLinkIndex(final Scriptable s, final PairSet<String,String> idx) { if (s instanceof NativeJavaObject) return; if (s instanceof DocumentScriptable) { DocumentScriptable ds = (DocumentScriptable) s; Scriptable scope=ds.getScope(); if (scope!=null) { mkBackLinkIndex(scope, idx); } } Scriptables.each(s, new AllPropAction() { @Override public void run(Object key, Object value) { //Log.d("updateIndex", key+"="+value); if (value instanceof DocumentScriptable) { Log.d(UPDATE_INDEX, s+"put "+key+"="+value); DocumentScriptable d = (DocumentScriptable) value; idx.put(IndexRecord.INDEX_REFERS, d.getDocument().id); } else if (value instanceof Scriptable) { Scriptable scr = (Scriptable) value; mkBackLinkIndex(scr,idx); } } }); } private void refreshContentToRecord() { final StringBuilder b=new StringBuilder(); Object gen=ScriptableObject.getProperty( this, ON_GENERATE_CONTENT); if (gen instanceof Function) { Function func = (Function) gen; b.append( loader.jsSession().call(func, this, new Object[]{this} ) ); } else { b.append(HashLiteralConv.toHashLiteral(this)); } getDocument().content=b+""; } public void setContentAndSave(String content) { DocumentRecord d=getDocument(); d.content=content; if (d.content==null) Log.die("Content of "+d.id+" is null!"); String c=d.content; if (c.length()>10000) c=c.substring(0,10000); Log.d(System.identityHashCode(this), "setContentAndSave() content changed to "+c); loader.loadFromContent(content, this); refreshRecordAttrsToRecord(); PairSet<String,String> idx = indexUpdateMap(); loader.save(d, idx); //loader.getDocumentSet().save(d, idx);//d.save(); } public void reloadFromContent() { DocumentRecord d=getDocument(); assert d.content!=null; if (d.content==null) return; //Log.die("Content of "+d.id+" is null!"); trace("Reading content - "+d.content); loader.loadFromContent(d.content, this); refreshRecordAttrsToRecord(); } @Override public String toString() { return "(Docscr "+id()+")"; } public void clear() { binds().clear(); } public void refreshSummaryToRecord() { DocumentRecord d=getDocument(); d.summary=genSummary(); Log.d(this, "Sumamry changed to "+d.summary); } public String genSummary() { Object res; res=get("name"); String ress = res+""; if (res!=null && res!=UniqueTag.NOT_FOUND && ress.length()>0) return ress; res=get("title");ress = res+""; if (res!=null && res!=UniqueTag.NOT_FOUND && ress.length()>0) return ress; res=get(HttpContext.ATTR_BODY);ress = res+""; if (res!=null && res!=UniqueTag.NOT_FOUND && ress.length()>0) return ress.substring(0,Math.min(ress.length(), 20)); return id(); } @Override public Object call(Context cx, Scriptable scope, Scriptable thisObj, Object[] args) { Object r; r=ScriptableObject.getProperty(this,DOLLAR); if (r instanceof Function) { Function f = (Function) r; return f.call(cx, scope, thisObj , args); } r=ScriptableObject.getProperty(this,ONAPPLY); if (r instanceof Function) { Function f = (Function) r; Object[] args2=new Object[] { thisObj ,args }; return f.call(cx, scope, this, args2); } r=ScriptableObject.getProperty(this,APPLY); if (r instanceof Function) { Function f = (Function) r; Object[] args2=new Object[] { thisObj ,args }; return f.call(cx, scope, this, args2); } r=ScriptableObject.getProperty(this,CALL); if (r instanceof Function) { Function f = (Function) r; Object[] args2=new Object[args.length+1]; args2[0]=thisObj; for (int i=1 ; i<args2.length ;i++){ args2[i]=args[i-1]; } return f.call(cx, scope, this , args2); } Log.die(this+" is not function-callable."); return null; } public boolean isInstanceOnMemory() { Object r=get(IS_INSTANCE_ON_MEMORY); if (r instanceof Boolean) { return (Boolean)r; } return false; } @Override public Scriptable construct(Context cx, Scriptable scope, Object[] args) { Scriptable d; // generate id if (isInstanceOnMemory()) { d=new BlessedScriptable(this); /* new BlankScriptableObject(); Object prot = get(Scriptables.PROTOTYPE); if (prot instanceof Scriptables) { d.setPrototype( (Scriptable)prot ); }*/ } else { d=loader.newDocument(); // generate id } //Scriptable cons = getConstructor(); ScriptableObject.putProperty(d,Scriptables.CONSTRUCTOR, this); //cons); String name=Scriptables.getAsString(this, "name", null); if (name!=null) { Scriptable scope2=new BlankScriptableObject(); ScriptableObject.putProperty(scope2, name, this); ScriptableObject.putProperty(d, DocumentRecord.ATTR_SCOPE, scope2); } /*Scriptable p=getPrototype(); if (p!=null) {*/ Object init=ScriptableObject.getProperty(d,"initialize"); Log.d(this, " initialize = "+init); if (init instanceof Function) { Log.d(this, " initialize called!"); Function f = (Function) init; f.call(cx, scope, d, args); } else { Log.d(this, " initialize did not called"); if (init!=null) { Log.d(this, "init="+init.getClass().getSuperclass()); } } //} return d; } public void refreshIndex() throws NotInWriteTransactionException { PairSet<String,String> h = indexUpdateMap(); loader.getDocumentSet().updateIndex(getDocument(), h); } public boolean isRecordLoaded() { return _d!=null; } public void loadRecord(DocumentRecord d) { Log.d(this , "Loaded ! "+d); _d=d; } public void clearScope() { scopeLoaded=false; } }
/* * Copyright 1997-2022 Optimatika * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.ojalgo.matrix.decomposition; import static org.ojalgo.function.constant.PrimitiveMath.*; import java.util.List; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.ojalgo.RecoverableCondition; import org.ojalgo.TestUtils; import org.ojalgo.function.UnaryFunction; import org.ojalgo.matrix.decomposition.HermitianEvD.Primitive; import org.ojalgo.matrix.decomposition.MatrixDecomposition.RankRevealing; import org.ojalgo.matrix.store.GenericStore; import org.ojalgo.matrix.store.MatrixStore; import org.ojalgo.matrix.store.Primitive64Store; import org.ojalgo.matrix.task.InverterTask; import org.ojalgo.matrix.task.SolverTask; import org.ojalgo.netio.BasicLogger; import org.ojalgo.random.Uniform; import org.ojalgo.scalar.ComplexNumber; import org.ojalgo.scalar.RationalNumber; import org.ojalgo.type.context.NumberContext; /** * @author apete */ @Disabled public class ExtremeElementsCase extends MatrixDecompositionTests { /** * 146 = (308/2) - (16/2) */ static final NumberContext ACCURACY = NumberContext.of(12, 146); private static void performInvertTest(final Primitive64Store original, final InverterTask<Double> task, final NumberContext context) { String clazz = task.getClass().toString(); try { MatrixStore<Double> tmpInverse = task.invert(original); MatrixStore<Double> tmpExpected = Primitive64Store.FACTORY.makeIdentity(original.countRows()); MatrixStore<Double> tmpActual = original.multiply(tmpInverse); TestUtils.assertEquals(clazz, tmpExpected, tmpActual, context); } catch (RecoverableCondition cause) { TestUtils.fail(clazz + " " + cause.toString()); } } private static void performSolveTest(final Primitive64Store body, final Primitive64Store rhs, final SolverTask<Double> task, final NumberContext accuracy) { String clazz = task.getClass().toString(); try { MatrixStore<Double> solution = task.solve(body, rhs); MatrixStore<Double> expected = rhs; MatrixStore<Double> actual = body.multiply(solution); TestUtils.assertEquals(clazz, expected, actual, accuracy); } catch (RecoverableCondition cause) { TestUtils.fail(clazz + " " + cause.toString()); } } static void doTestInvert(final boolean large) { for (int precision = 1; precision <= 16; precision++) { NumberContext accuracy = ACCURACY.withoutScale().withPrecision(precision); for (int dim = 1; dim <= 10; dim++) { // exp = 308 could potentially create numbers that are 2E308 which is larger than Double.MAX_VALUE for (int exp = 0; exp < 300; exp++) { double scale = POWER.invoke(TEN, large ? exp : -exp); Primitive64Store original = Primitive64Store.FACTORY.makeSPD(dim); if (DEBUG) { BasicLogger.debug("Scale exp={} => factor={} and context={}", exp, scale, accuracy); BasicLogger.debug("Original (unscaled) {}", original.toString()); } original.modifyAll(MULTIPLY.by(scale)); ExtremeElementsCase.performInvertTest(original, InverterTask.PRIMITIVE.make(original), accuracy); List<MatrixDecomposition<Double>> allDecomps = MatrixDecompositionTests.getPrimitiveAll(); for (MatrixDecomposition<Double> decomp : allDecomps) { if (DEBUG) { BasicLogger.debug("{} at dim={} for scale={}", decomp.getClass(), dim, scale); } if (decomp instanceof MatrixDecomposition.Solver) { ExtremeElementsCase.performInvertTest(original, (InverterTask<Double>) decomp, accuracy); } } } } } } static void doTestSVD(final boolean large) { for (int precision = 1; precision < 16; precision++) { NumberContext accuracy = ACCURACY.withPrecision(precision); for (int dim = 2; dim <= 10; dim++) { // exp = 308 could potentially create numbers that are 2E308 which is larger than Double.MAX_VALUE for (int exp = 0; exp < 308; exp++) { double scale = POWER.invoke(TEN, large ? exp : -exp); Primitive64Store original = Primitive64Store.FACTORY.makeSPD(dim); if (DEBUG) { BasicLogger.debug(); BasicLogger.debug("Scale exp={} => factor={} and context={}", exp, scale, accuracy); BasicLogger.debug("Original (unscaled) {}", original.toString()); } original.modifyAll(MULTIPLY.by(scale)); ExtremeElementsCase.performInvertTest(original, InverterTask.PRIMITIVE.make(original), accuracy); SingularValue<Double>[] allDecomps = MatrixDecompositionTests.getPrimitiveSingularValue(); for (SingularValue<Double> decomp : allDecomps) { if (DEBUG) { BasicLogger.debug("{} at precision= {}, dim={}, exp={} and scale={}", decomp.getClass(), precision, dim, exp, scale); } decomp.decompose(original); if (precision == 2 && dim == 2 && exp == 0) { BasicLogger.debug(); } decomp.decompose(original); TestUtils.assertEquals(original, decomp, accuracy); } } } } } static void doTestRank(final boolean large) { for (int dim = 1; dim <= 10; dim++) { // exp = 308 could potentially create numbers that are 2E308 which is larger than Double.MAX_VALUE for (int exp = 0; exp < 308; exp++) { double scale = POWER.invoke(TEN, large ? exp : -exp); Primitive64Store matrix = Primitive64Store.FACTORY.makeSPD(dim); matrix.modifyAll(MULTIPLY.by(scale)); SingularValue<Double> reference = SingularValue.PRIMITIVE.make(matrix); reference.decompose(matrix); if (DEBUG) { BasicLogger.debug(); BasicLogger.debug("Reference at dim={} for scale={} => rank={} {}", dim, scale, reference.getRank(), reference.isFullRank()); BasicLogger.debug("Singular Values: {}", reference.getSingularValues()); BasicLogger.debug("Matrix (unscaled) {}", matrix.toString()); } List<RankRevealing<Double>> decomps = MatrixDecompositionTests.getPrimitiveMatrixDecompositionRankRevealing(); for (RankRevealing<Double> revealer : decomps) { revealer.decompose(matrix); if (DEBUG) { BasicLogger.debug("{} at dim={} for scale={} => rank={} {}", revealer.getClass(), dim, scale, revealer.getRank(), revealer.isFullRank()); } TestUtils.assertEquals(reference.getRank(), revealer.getRank()); TestUtils.assertEquals(reference.isFullRank(), revealer.isFullRank()); } } } } static void doTestSolve(final boolean large) { for (int precision = 1; precision < 16; precision++) { final int precision1 = precision; NumberContext tmpContext = NumberContext.of(precision1, Integer.MIN_VALUE); for (int dim = 2; dim <= 10; dim++) { // exp = 308 could potentially create numbers that are 2E308 which is larger than Double.MAX_VALUE for (int exp = 0; exp < 300; exp++) { double scale = POWER.invoke(TEN, large ? exp : -exp); Primitive64Store tmpBody = Primitive64Store.FACTORY.makeSPD(dim); Primitive64Store tmpRHS = Primitive64Store.FACTORY.makeFilled(dim, 1, new Uniform()); if (DEBUG) { BasicLogger.debug("Scale exp={} => factor={} and context={}", exp, scale, tmpContext); BasicLogger.debug("Body (unscaled) {}", tmpBody.toString()); BasicLogger.debug("RHS (unscaled) {}", tmpRHS.toString()); } UnaryFunction<Double> tmpModifier = MULTIPLY.second(scale); tmpBody.modifyAll(tmpModifier); tmpRHS.modifyAll(tmpModifier); ExtremeElementsCase.performSolveTest(tmpBody, tmpRHS, SolverTask.PRIMITIVE.make(tmpBody, tmpRHS), tmpContext); List<MatrixDecomposition<Double>> tmpAllDecomps = MatrixDecompositionTests.getPrimitiveAll(); for (MatrixDecomposition<Double> decomp : tmpAllDecomps) { if (decomp instanceof MatrixDecomposition.Solver) { if (DEBUG) { BasicLogger.debug("{} at precision= {}, dim={}, exp={} and scale={}", decomp.getClass(), precision, dim, exp, scale); } ExtremeElementsCase.performSolveTest(tmpBody, tmpRHS, (SolverTask<Double>) decomp, tmpContext); } } } } } } static MatrixStore<Double> getVerySmall() { long dim = 5L; Primitive64Store rndm = Primitive64Store.FACTORY.make(dim, dim); for (long j = 0L; j < dim; j++) { for (long i = 0L; i < dim; i++) { rndm.set(i, j, Uniform.randomInteger(4)); } } return rndm.transpose().multiply(rndm).multiply(1E-150); } @Override @BeforeEach public void minimiseAllBranchLimits() { TestUtils.minimiseAllBranchLimits(); } @Test public void testEvD() { MatrixStore<Double> tmpProblematic = ExtremeElementsCase.getVerySmall(); Eigenvalue<RationalNumber> tmpBig = Eigenvalue.RATIONAL.make(true); Eigenvalue<ComplexNumber> tmpComplex = Eigenvalue.COMPLEX.make(true); Eigenvalue<Double> tmpPrimitive = Eigenvalue.PRIMITIVE.make(); Eigenvalue<Double> tmpJama = new RawEigenvalue.Dynamic(); TestUtils.assertTrue("Rational.compute()", tmpBig.decompose(GenericStore.RATIONAL.makeWrapper(tmpProblematic))); TestUtils.assertTrue("Complex.compute()", tmpComplex.decompose(GenericStore.COMPLEX.makeWrapper(tmpProblematic))); TestUtils.assertTrue("Primitive.compute()", tmpPrimitive.decompose(tmpProblematic)); TestUtils.assertTrue("Jama.compute()", tmpJama.decompose(tmpProblematic)); if (MatrixDecompositionTests.DEBUG) { BasicLogger.debug("Rational: {}", tmpBig.getEigenvalues()); BasicLogger.debug("Complex: {}", tmpComplex.getEigenvalues()); BasicLogger.debug("Primitive: {}", tmpPrimitive.getEigenvalues()); BasicLogger.debug("Jama: {}", tmpJama.getEigenvalues()); } // TestUtils.assertEquals("QR.Q Rational vs Complex", tmpBig.getQ(), tmpComplex.getQ()); // TestUtils.assertEquals("QR.Q Complex vs Primitive", tmpComplex.getQ(), tmpPrimitive.getQ()); // TestUtils.assertEquals("QR.Q Primitive vs Jama", tmpPrimitive.getQ(), tmpJama.getQ()); TestUtils.assertEquals("EvD Rational vs Complex", tmpBig.getEigenvalues().get(0), tmpComplex.getEigenvalues().get(0), ACCURACY); TestUtils.assertEquals("EvD Complex vs Primitive", tmpComplex.getEigenvalues().get(0), tmpPrimitive.getEigenvalues().get(0), ACCURACY); TestUtils.assertEquals("EvD Primitive vs Jama", tmpPrimitive.getEigenvalues().get(0), tmpJama.getEigenvalues().get(0), ACCURACY); // TODO TestUtils.assertEquals("Rational.reconstruct()", tmpProblematic, tmpBig.reconstruct(), PRECISION); TestUtils.assertEquals("Complex.reconstruct()", tmpProblematic, tmpComplex.reconstruct(), ACCURACY); TestUtils.assertEquals("Primitive.reconstruct()", tmpProblematic, tmpPrimitive.reconstruct(), ACCURACY); TestUtils.assertEquals("Jama.reconstruct()", tmpProblematic, tmpJama.reconstruct(), ACCURACY); // TODO TestUtils.assertEquals("trace() Rational vs Complex", tmpBig.getTrace(), tmpComplex.getTrace(), PRECISION); TestUtils.assertEquals("trace() Complex vs Primitive", tmpComplex.getTrace(), tmpPrimitive.getTrace(), ACCURACY); TestUtils.assertEquals("trace() Primitive vs Jama", tmpPrimitive.getTrace(), tmpJama.getTrace(), ACCURACY); TestUtils.assertEquals("det() Rational vs Complex", tmpBig.getDeterminant(), tmpComplex.getDeterminant(), ACCURACY); TestUtils.assertEquals("det() Complex vs Primitive", tmpComplex.getDeterminant(), tmpPrimitive.getDeterminant(), ACCURACY); TestUtils.assertEquals("det() Primitive vs Jama", tmpPrimitive.getDeterminant(), tmpJama.getDeterminant(), ACCURACY); } @Test public void testInvertEvD_10_307_1() { Primitive64Store tmpOriginal = Primitive64Store.FACTORY.rows(new double[][] { { 1.488828119167862, 0.42210916029401624, 0.3090339419657017, 0.31968488522727556, 0.32307269871880584, 0.46899580731023627, 0.12091920407255509, 0.03795763520492966, 0.17470282114825963, 0.3946701200769135 }, { 0.42210916029401624, 1.8635124366670595, 0.545906918558408, 0.5647217567560566, 0.570706312407284, 0.8284787565954789, 0.21360317145069477, 0.06705197344564522, 0.3086116630097931, 0.6971828004646068 }, { 0.3090339419657017, 0.545906918558408, 1.632193464017115, 0.41344326780911667, 0.417824671952357, 0.6065446573280001, 0.1563828419260192, 0.04908999287306165, 0.22594032001124298, 0.5104204536764679 }, { 0.31968488522727556, 0.5647217567560566, 0.41344326780911667, 1.6539821927009415, 0.43222511886101456, 0.6274493925480824, 0.16177262133291218, 0.05078189352797441, 0.23372741780909156, 0.528012240705021 }, { 0.32307269871880584, 0.570706312407284, 0.417824671952357, 0.43222511886101456, 1.660912672676802, 0.6340986950817811, 0.1634869828633994, 0.051320047166039655, 0.23620430969852588, 0.5336077726660703 }, { 0.46899580731023627, 0.8284787565954789, 0.6065446573280001, 0.6274493925480824, 0.6340986950817811, 1.959428864502749, 0.23732958500300408, 0.07449990991899043, 0.34289134104035285, 0.7746238203382216 }, { 0.12091920407255509, 0.21360317145069477, 0.1563828419260192, 0.16177262133291218, 0.1634869828633994, 0.23732958500300408, 1.2473654835536, 0.019207996469193075, 0.08840622324485663, 0.19971798116519177 }, { 0.03795763520492966, 0.06705197344564522, 0.04908999287306165, 0.05078189352797441, 0.051320047166039655, 0.07449990991899043, 0.019207996469193075, 1.0776502695252994, 0.027751515547194034, 0.06269328624082444 }, { 0.17470282114825963, 0.3086116630097931, 0.22594032001124298, 0.23372741780909156, 0.23620430969852588, 0.34289134104035285, 0.08840622324485663, 0.027751515547194034, 1.3573911039439759, 0.2885504830370714 }, { 0.3946701200769135, 0.6971828004646068, 0.5104204536764679, 0.528012240705021, 0.5336077726660703, 0.7746238203382216, 0.19971798116519177, 0.06269328624082444, 0.2885504830370714, 1.8073801497932753 } }); tmpOriginal.modifyAll(MULTIPLY.second(POWER.invoke(TEN, 307))); RawEigenvalue.Symmetric tmpAlgorithm = new RawEigenvalue.Symmetric(); ExtremeElementsCase.performInvertTest(tmpOriginal, tmpAlgorithm, ACCURACY.withoutScale().withPrecision(1)); } @Test public void testInvertEvD_3_155_1() { Primitive64Store tmpOriginal = Primitive64Store.FACTORY.rows(new double[][] { { 1.509726074514643, 0.6439543946598099, 1.2096354379603502 }, { 0.6439543946598099, 1.134228320145167, 0.8341376835908743 }, { 1.2096354379603502, 0.8341376835908743, 1.6999093634457072 } }); tmpOriginal.modifyAll(MULTIPLY.second(POWER.invoke(TEN, 155))); Primitive tmpAlgorithm = new HermitianEvD.Primitive(); ExtremeElementsCase.performInvertTest(tmpOriginal, tmpAlgorithm, ACCURACY.withoutScale().withPrecision(1)); } @Test public void testInvertSVD_6_307_2() { Primitive64Store tmpOriginal = Primitive64Store.FACTORY.rows( new double[][] { { 1.7951923814808213, 0.659451350679988, 0.7107146253894259, 0.5763579411022435, 0.7199441830503458, 0.6356947473097578 }, { 0.659451350679988, 1.829297873115869, 0.7411968989569697, 0.6010777087922337, 0.7508223087524556, 0.6629594475153139 }, { 0.7107146253894259, 0.7411968989569697, 1.8937643794649044, 0.6478032355134435, 0.8091884190528792, 0.7144954285155056 }, { 0.5763579411022435, 0.6010777087922337, 0.6478032355134435, 1.7248031476721892, 0.6562158066095086, 0.5794240042274624 }, { 0.7199441830503458, 0.7508223087524556, 0.8091884190528792, 0.6562158066095086, 1.905371077260138, 0.7237740848430495 }, { 0.6356947473097578, 0.6629594475153139, 0.7144954285155056, 0.5794240042274624, 0.7237740848430495, 1.7994225826534653 } }); tmpOriginal.modifyAll(MULTIPLY.second(POWER.invoke(TEN, 307))); RawSingularValue tmpAlgorithm = new RawSingularValue(); ExtremeElementsCase.performInvertTest(tmpOriginal, tmpAlgorithm, ACCURACY.withoutScale().withPrecision(2)); } @Test public void testInvertSVD_7_307_1() { Primitive64Store tmpOriginal = Primitive64Store.FACTORY.rows(new double[][] { { 1.6630365629391541, 0.5725332799439422, 0.6293312306387542, 0.3255116741968718, 0.16197060952553563, 0.38338065513999414, 0.45947212690705896 }, { 0.5725332799439422, 1.8635018216883505, 0.8196058776803916, 0.42392824070490653, 0.2109414837777316, 0.4992935723573937, 0.5983908592318098 }, { 0.6293312306387542, 0.8196058776803916, 1.949165198143842, 0.46598388385643336, 0.23186785507316293, 0.5488258051522601, 0.6577540014446122 }, { 0.3255116741968718, 0.42392824070490653, 0.46598388385643336, 1.4909407601202584, 0.11992999873960987, 0.283871509914158, 0.3402129050589385 }, { 0.16197060952553563, 0.2109414837777316, 0.23186785507316293, 0.11992999873960987, 1.2442860900574488, 0.14125097541024584, 0.16928576136879764 }, { 0.38338065513999414, 0.4992935723573937, 0.5488258051522601, 0.283871509914158, 0.14125097541024584, 1.5782194777321448, 0.4006954489432253 }, { 0.45947212690705896, 0.5983908592318098, 0.6577540014446122, 0.3402129050589385, 0.16928576136879764, 0.4006954489432253, 1.6929815829013701 } }); tmpOriginal.modifyAll(MULTIPLY.second(POWER.invoke(TEN, 307))); SingularValueDecomposition.Primitive tmpAlgorithm = new SingularValueDecomposition.Primitive(); ExtremeElementsCase.performInvertTest(tmpOriginal, tmpAlgorithm, ACCURACY.withoutScale().withPrecision(1)); } @Test public void testInvertTask_2_155_1() { Primitive64Store tmpOriginal = Primitive64Store.FACTORY .rows(new double[][] { { 1.7755876870972727, 0.5243083105843722 }, { 0.5243083105843722, 1.6760142267686806 } }); tmpOriginal.modifyAll(MULTIPLY.second(POWER.invoke(TEN, 155))); InverterTask<Double> tmpAlgorithm = InverterTask.PRIMITIVE.make(tmpOriginal); ExtremeElementsCase.performInvertTest(tmpOriginal, tmpAlgorithm, ACCURACY.withoutScale().withPrecision(1)); } @Test public void testLU() { MatrixStore<Double> tmpProblematic = ExtremeElementsCase.getVerySmall(); LU<RationalNumber> tmpRational = LU.RATIONAL.make(); LU<ComplexNumber> tmpComplex = LU.COMPLEX.make(); LU<Double> tmpPrimitive = LU.PRIMITIVE.make(); LU<Double> tmpRaw = new RawLU(); TestUtils.assertTrue("Rational.compute()", tmpRational.decompose(GenericStore.RATIONAL.makeWrapper(tmpProblematic))); TestUtils.assertTrue("Complex.compute()", tmpComplex.decompose(GenericStore.COMPLEX.makeWrapper(tmpProblematic))); TestUtils.assertTrue("Primitive.compute()", tmpPrimitive.decompose(tmpProblematic)); TestUtils.assertTrue("Jama.compute()", tmpRaw.decompose(tmpProblematic)); if (DEBUG) { BasicLogger.debug("Rational.L", tmpRational.getL()); BasicLogger.debug("Complex.L", tmpComplex.getL()); BasicLogger.debug("Primitive.L", tmpPrimitive.getL()); BasicLogger.debug("Jama.L", tmpRaw.getL()); } TestUtils.assertEquals("L Rational vs Complex", tmpRational.getL(), tmpComplex.getL(), ACCURACY); TestUtils.assertEquals("L Complex vs Primitive", tmpComplex.getL(), tmpPrimitive.getL(), ACCURACY); TestUtils.assertEquals("L Primitive vs Jama", tmpPrimitive.getL(), tmpRaw.getL(), ACCURACY); TestUtils.assertEquals("U Rational vs Complex", tmpRational.getU(), tmpComplex.getU(), ACCURACY); TestUtils.assertEquals("U Complex vs Primitive", tmpComplex.getU(), tmpPrimitive.getU(), ACCURACY); TestUtils.assertEquals("U Primitive vs Jama", tmpPrimitive.getU(), tmpRaw.getU(), ACCURACY); TestUtils.assertEquals("Rational.reconstruct()", tmpProblematic, tmpRational.reconstruct(), ACCURACY); TestUtils.assertEquals("Complex.reconstruct()", tmpProblematic, tmpComplex.reconstruct(), ACCURACY); TestUtils.assertEquals("Primitive.reconstruct()", tmpProblematic, tmpPrimitive.reconstruct(), ACCURACY); TestUtils.assertEquals("Jama.reconstruct()", tmpProblematic, tmpRaw.reconstruct(), ACCURACY); SingularValue<Double> tmpSVD = new RawSingularValue(); tmpSVD.decompose(tmpProblematic); TestUtils.assertEquals("rank() SVD vs Rational", tmpSVD.getRank(), tmpRational.getRank()); TestUtils.assertEquals("rank() SVD vs Complex", tmpSVD.getRank(), tmpComplex.getRank()); TestUtils.assertEquals("rank() SVD vs Primitive", tmpSVD.getRank(), tmpPrimitive.getRank()); TestUtils.assertEquals("rank() SVD vs Jama", tmpSVD.getRank(), tmpRaw.getRank()); } @Test public void testOverflowInvert() { ExtremeElementsCase.doTestInvert(true); } @Test public void testOverflowRank() { ExtremeElementsCase.doTestRank(true); } @Test public void testOverflowSolve() { ExtremeElementsCase.doTestSolve(true); } @Test public void testQR() { MatrixStore<Double> tmpProblematic = ExtremeElementsCase.getVerySmall(); QR<RationalNumber> tmpBig = QR.RATIONAL.make(); QR<ComplexNumber> tmpComplex = QR.COMPLEX.make(); QR<Double> tmpPrimitive = QR.PRIMITIVE.make(); QR<Double> tmpJama = new RawQR(); TestUtils.assertTrue("Rational.compute()", tmpBig.decompose(GenericStore.RATIONAL.makeWrapper(tmpProblematic))); TestUtils.assertTrue("Complex.compute()", tmpComplex.decompose(GenericStore.COMPLEX.makeWrapper(tmpProblematic))); TestUtils.assertTrue("Primitive.compute()", tmpPrimitive.decompose(tmpProblematic)); TestUtils.assertTrue("Jama.compute()", tmpJama.decompose(tmpProblematic)); if (MatrixDecompositionTests.DEBUG) { BasicLogger.debug("Rational Q", tmpBig.getQ()); BasicLogger.debug("Complex Q", tmpComplex.getQ()); BasicLogger.debug("Primitive Q", tmpPrimitive.getQ()); BasicLogger.debug("Jama Q", tmpJama.getQ()); } TestUtils.assertEquals("QR.reconstruct() Rational", tmpProblematic, tmpBig.reconstruct(), ACCURACY); TestUtils.assertEquals("QR.reconstruct() Complex", tmpProblematic, tmpComplex.reconstruct(), ACCURACY); TestUtils.assertEquals("QR.reconstruct() Primitive", tmpProblematic, tmpPrimitive.reconstruct(), ACCURACY); TestUtils.assertEquals("QR.reconstruct() Jama", tmpProblematic, tmpJama.reconstruct(), ACCURACY); SingularValue<Double> tmpSVD = new RawSingularValue(); tmpSVD.decompose(tmpProblematic); TestUtils.assertEquals("rank() SVD vs Rational", tmpSVD.getRank(), tmpBig.getRank()); TestUtils.assertEquals("rank() SVD vs Complex", tmpSVD.getRank(), tmpComplex.getRank()); TestUtils.assertEquals("rank() SVD vs Primitive", tmpSVD.getRank(), tmpPrimitive.getRank()); TestUtils.assertEquals("rank() SVD vs Jama", tmpSVD.getRank(), tmpJama.getRank()); } @Test public void testSolveLU_1_16_1() { Primitive64Store tmpBody = Primitive64Store.FACTORY.rows(new double[][] { { 1.7259687987824925 } }); Primitive64Store tmpRHS = Primitive64Store.FACTORY.rows(new double[][] { { 0.6533251061005759 } }); UnaryFunction<Double> tmpSecond = MULTIPLY.second(POWER.invoke(TEN, -16)); tmpBody.modifyAll(tmpSecond); tmpRHS.modifyAll(tmpSecond); SolverTask<Double> tmpAlgorithm = new LUDecomposition.Primitive(); ExtremeElementsCase.performSolveTest(tmpBody, tmpRHS, tmpAlgorithm, ACCURACY.withoutScale().withPrecision(1)); } @Test public void testUnderflowInvert() { ExtremeElementsCase.doTestInvert(true); } @Test public void testUnderflowRank() { ExtremeElementsCase.doTestRank(false); } @Test public void testUnderflowSolve() { ExtremeElementsCase.doTestSolve(false); } @Test public void testUnderflowSVD() { ExtremeElementsCase.doTestSVD(false); } @Test public void testOverflowSVD() { ExtremeElementsCase.doTestSVD(true); } }
/* * Copyright (c) 2017 - 2019, SmartDeviceLink Consortium, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following * disclaimer in the documentation and/or other materials provided with the * distribution. * * Neither the name of the SmartDeviceLink Consortium, Inc. nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.smartdevicelink.proxy.rpc; import androidx.annotation.NonNull; import com.smartdevicelink.protocol.enums.FunctionID; import com.smartdevicelink.proxy.RPCRequest; import com.smartdevicelink.proxy.rpc.enums.FileType; import java.util.Hashtable; import java.util.zip.CRC32; /** * Used to push a binary data onto the SDL module from a mobile device, such as * icons and album art. * * <p><b> Parameter List</b></p> * * <table border="1" rules="all"> * <tr> * <th>Name</th> * <th>Type</th> * <th>Description</th> * <th> Req.</th> * <th>Notes</th> * <th>Version Available</th> * </tr> * <tr> * <td>FileName</td> * <td>String</td> * <td>File reference name.</td> * <td>Y</td> * <td>Maxlength=500</td> * <td>SmartDeviceLink 2.0</td> * </tr> * <tr> * <td>fileType</td> * <td>FileType</td> * <td>Selected file type.</td> * <td>Y</td> * <td></td> * <td>SmartDeviceLink 2.0</td> * </tr> * <tr> * <td>persistentFile</td> * <td>Boolean</td> * <td>Indicates if the file is meant to persist between sessions / ignition cycles. If set to TRUE,then the system will aim to persist this file through session / cycles. While files with this designation will have priority over others,they are subject to deletion by the system at any time.In the event of automatic deletion by the system, the app will receive a rejection and have to resend the file. If omitted, the value will be set to false.</td> * <td>N</td> * <td></td> * <td>SmartDeviceLink 2.0</td> * </tr> * <tr> * <td>systemFile</td> * <td>Boolean</td> * <td>Indicates if the file is meant to be passed thru core to elsewhere on the system. If set to TRUE, then the system will instead pass the data thru as it arrives to a predetermined area outside of core. If omitted, the value will be set to false.</td> * <td>N</td> * <td></td> * <td>SmartDeviceLink 2.3.2</td> * </tr> * <tr> * <td>offset</td> * <td>Float</td> * <td>Optional offset in bytes for resuming partial data chunks</td> * <td>N</td> * <td>Minvalue=0; Maxvalue=100000000000</td> * <td>SmartDeviceLink 2.3.2</td> * </tr> * <tr> * <td>length</td> * <td>Float</td> * <td>Optional length in bytes for resuming partial data chunks. If offset is set to 0, then length is the total length of the file to be downloaded</td> * <td>N</td> * <td>Minvalue=0; Maxvalue=100000000000</td> * <td>SmartDeviceLink 2.3.2</td> * </tr> * <tr> * <td>crc</td> * <td>Long</td> * <td>Additional CRC32 checksum to protect data integrity up to 512 Mbits .</td> * <td>N</td> * <td>minvalue="0" maxvalue="4294967295"</td> * <td>SmartDeviceLink 2.3.2</td> * </tr> * </table> * <p> <b>Note: </b></p> * When using PutFiles you may want to check for memory * * <p><b>Response</b> </p> * Response is sent, when the file data was copied (success case). Or when an error occurred. Not supported on First generation SDL modules. * * <p><b> Non-default Result Codes:</b></p> * <p> SUCCESS</p> * <p> INVALID_DATA</p> * <p> OUT_OF_MEMORY</p> * <p> TOO_MANY_PENDING_REQUESTS</p> * <p> APPLICATION_NOT_REGISTERED</p> * <p> GENERIC_ERROR</p> * <p>REJECTED</p> * * <p><table border="1" rules="all"></p> * <tr> * <th>Name</th> * <th>Type</th> * <th>Description</th> * <th> Req.</th> * <th>Notes</th> * <th>Version Available</th> * </tr> * <tr> * <td>spaceAvailable</td> * <td>Integer</td> * <td>Provides the total local space available on SDL for the registered app.</td> * <td></td> * <td>Minvalue=0; Maxvalue=2000000000</td> * <td>SmartDeviceLink 2.0</td> * </tr> * * </table> * * @see DeleteFile * @see ListFiles * @since SmartDeviceLink 2.0 */ public class PutFile extends RPCRequest { public static final String KEY_PERSISTENT_FILE = "persistentFile"; public static final String KEY_SYSTEM_FILE = "systemFile"; public static final String KEY_FILE_TYPE = "fileType"; public static final String KEY_SDL_FILE_NAME = "syncFileName"; public static final String KEY_OFFSET = "offset"; public static final String KEY_LENGTH = "length"; public static final String KEY_CRC = "crc"; /** * Constructs a new PutFile object */ public PutFile() { super(FunctionID.PUT_FILE.toString()); } /** * Constructs a new PutFile object indicated by the Hashtable parameter * * @param hash The Hashtable to use */ public PutFile(Hashtable<String, Object> hash) { super(hash); } /** * Constructs a new PutFile object * * @param syncFileName a String value representing a file reference name * <b>Notes: </b>Maxlength=500, however the max file name length may vary based on remote filesystem limitations * @param fileType a FileType value representing a selected file type */ public PutFile(@NonNull String syncFileName, @NonNull FileType fileType) { this(); setSdlFileName(syncFileName); setFileType(fileType); } /** * Sets a file reference name * * @param sdlFileName a String value representing a file reference name * <p></p> * <b>Notes: </b>Maxlength=500, however the max file name length may vary based on remote filesystem limitations */ public PutFile setSdlFileName(@NonNull String sdlFileName) { setParameters(KEY_SDL_FILE_NAME, sdlFileName); return this; } /** * Gets a file reference name * * @return String - a String value representing a file reference name */ public String getSdlFileName() { return getString(KEY_SDL_FILE_NAME); } /** * Sets file type * * @param fileType a FileType value representing a selected file type */ public PutFile setFileType(@NonNull FileType fileType) { setParameters(KEY_FILE_TYPE, fileType); return this; } /** * Gets a file type * * @return FileType -a FileType value representing a selected file type */ public FileType getFileType() { return (FileType) getObject(FileType.class, KEY_FILE_TYPE); } /** * Sets a value to indicates if the file is meant to persist between * sessions / ignition cycles. If set to TRUE, then the system will aim to * persist this file through session / cycles. While files with this * designation will have priority over others, they are subject to deletion * by the system at any time. In the event of automatic deletion by the * system, the app will receive a rejection and have to resend the file. If * omitted, the value will be set to false * <p></p> * * @param persistentFile a Boolean value */ public PutFile setPersistentFile(Boolean persistentFile) { setParameters(KEY_PERSISTENT_FILE, persistentFile); return this; } /** * Gets a value to Indicates if the file is meant to persist between * sessions / ignition cycles * * @return Boolean -a Boolean value to indicates if the file is meant to * persist between sessions / ignition cycles */ public Boolean getPersistentFile() { return getBoolean(KEY_PERSISTENT_FILE); } public PutFile setFileData(byte[] fileData) { setBulkData(fileData); return this; } public byte[] getFileData() { return getBulkData(); } /** * @param offset Optional offset in bytes for resuming partial data chunks * @deprecated as of SmartDeviceLink 4.0. Use {@link #setOffset(Long)} instead. */ public PutFile setOffset(Integer offset) { if (offset == null) { setOffset((Long) null); } else { setOffset(offset.longValue()); } return this; } /** * @param offset Optional offset in bytes for resuming partial data chunks */ public PutFile setOffset(Long offset) { setParameters(KEY_OFFSET, offset); return this; } public Long getOffset() { final Object o = getParameters(KEY_OFFSET); if (o == null) { return null; } if (o instanceof Integer) { return ((Integer) o).longValue(); } else if (o instanceof Long) { return (Long) o; } return null; } /** * @param length Optional length in bytes for resuming partial data chunks. If offset is set to 0, then length is * the total length of the file to be downloaded * @deprecated as of SmartDeviceLink 4.0. Use {@link #setLength(Long)} instead. */ public PutFile setLength(Integer length) { if (length == null) { setLength((Long) null); } else { setLength(length.longValue()); } return this; } /** * @param length Optional length in bytes for resuming partial data chunks. If offset is set to 0, then length is * the total length of the file to be downloaded */ public PutFile setLength(Long length) { setParameters(KEY_LENGTH, length); return this; } public Long getLength() { final Object o = getParameters(KEY_LENGTH); if (o == null) { return null; } if (o instanceof Integer) { return ((Integer) o).longValue(); } else if (o instanceof Long) { return (Long) o; } return null; } public PutFile setSystemFile(Boolean systemFile) { setParameters(KEY_SYSTEM_FILE, systemFile); return this; } public Boolean getSystemFile() { final Object o = getParameters(KEY_SYSTEM_FILE); if (o instanceof Boolean) { return (Boolean) o; } else return null; } /** * This takes the file data as an array of bytes and calculates the * CRC32 for it. * * @param fileData - the file as a byte array */ public PutFile setCRC(byte[] fileData) { if (fileData != null) { CRC32 crc = new CRC32(); crc.update(fileData); parameters.put(KEY_CRC, crc.getValue()); } else { parameters.remove(KEY_CRC); } return this; } /** * This assumes you have created your own CRC32 and are setting it with the file * <STRONG>Please avoid using your own calculations for this, and use the method * included in java.util</STRONG> * * @param crc - the CRC32 of the file being set */ public PutFile setCRC(Long crc) { if (crc != null) { parameters.put(KEY_CRC, crc); } else { parameters.remove(KEY_CRC); } return this; } /** * This returns the CRC, if it has been set, for the file object * * @return - a CRC32 Long */ public Long getCRC() { final Object o = parameters.get(KEY_CRC); if (o == null) { return null; } if (o instanceof Integer) { return ((Integer) o).longValue(); } else if (o instanceof Long) { return (Long) o; } return null; } }
package com.devicehive.websockets.handlers; import com.devicehive.exceptions.HiveException; import com.devicehive.json.GsonFactory; import com.devicehive.json.strategies.JsonPolicyApply; import com.devicehive.json.strategies.JsonPolicyDef; import com.devicehive.model.ErrorResponse; import com.devicehive.util.ThreadLocalVariablesKeeper; import com.devicehive.websockets.converters.JsonMessageBuilder; import com.devicehive.websockets.converters.WebSocketResponse; import com.devicehive.websockets.handlers.annotations.Action; import com.devicehive.websockets.handlers.annotations.WsParam; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.Maps; import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.enterprise.inject.spi.Bean; import javax.enterprise.inject.spi.BeanManager; import javax.inject.Inject; import javax.inject.Singleton; import javax.persistence.OptimisticLockException; import javax.persistence.PersistenceException; import javax.servlet.http.HttpServletResponse; import javax.validation.ConstraintViolationException; import javax.websocket.Session; import java.lang.annotation.Annotation; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentMap; @Singleton public class WebsocketExecutor { private static final Logger logger = LoggerFactory.getLogger(WebsocketExecutor.class); private static Set<Class<WebsocketHandlers>> HANDLERS_SET = new HashSet() { { add(CommonHandlers.class); add(CommandHandlers.class); add(NotificationHandlers.class); add(DeviceHandlers.class); } private static final long serialVersionUID = -7417770184838061839L; }; @Inject private BeanManager beanManager; private ConcurrentMap<String, Pair<Class<WebsocketHandlers>, Method>> methodsCache = Maps.newConcurrentMap(); private ConcurrentMap<Method, List<WebsocketParameterDescriptor>> parametersCache = Maps.newConcurrentMap(); public JsonObject execute(JsonObject request, Session session) { JsonObject response = null; try { logger.debug("[execute] "); ThreadLocalVariablesKeeper.setRequest(request); ThreadLocalVariablesKeeper.setSession(session); response = tryExecute(request, session); logger.debug("[execute] building final response"); } catch (HiveException ex) { response = JsonMessageBuilder.createError(ex).build(); } catch (ConstraintViolationException ex) { response = JsonMessageBuilder.createErrorResponseBuilder(HttpServletResponse.SC_BAD_REQUEST, ex.getMessage()).build(); } catch (org.hibernate.exception.ConstraintViolationException ex) { response = JsonMessageBuilder.createErrorResponseBuilder(HttpServletResponse.SC_CONFLICT, ex.getMessage()).build(); } catch (JsonParseException ex) { response = JsonMessageBuilder.createErrorResponseBuilder(HttpServletResponse.SC_BAD_REQUEST, ErrorResponse.JSON_SYNTAX_ERROR_MESSAGE).build(); } catch (OptimisticLockException ex) { response = JsonMessageBuilder.createErrorResponseBuilder(HttpServletResponse.SC_CONFLICT, ErrorResponse.CONFLICT_MESSAGE).build(); } catch (PersistenceException ex) { if (ex.getCause() instanceof org.hibernate.exception.ConstraintViolationException) { response = JsonMessageBuilder.createErrorResponseBuilder(HttpServletResponse.SC_CONFLICT, ex.getMessage()).build(); } else { response = JsonMessageBuilder.createErrorResponseBuilder(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ex.getMessage()).build(); } } catch (Exception ex) { response = JsonMessageBuilder.createErrorResponseBuilder(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ex.getMessage()).build(); } finally { ThreadLocalVariablesKeeper.setRequest(null); ThreadLocalVariablesKeeper.setSession(null); } logger.debug("[execute] building final response"); return new JsonMessageBuilder() .addAction(request.get(JsonMessageBuilder.ACTION).getAsString()) .addRequestId(request.get(JsonMessageBuilder.REQUEST_ID)) .include(response) .build(); } public JsonObject tryExecute(JsonObject request, Session session) { Pair<Class<WebsocketHandlers>, Method> methodPair = getMethod(request); List<Object> args = prepareArgumentValues(methodPair.getRight(), request, session); WebSocketResponse response = null; try { response = (WebSocketResponse) methodPair.getRight().invoke(getBean(methodPair.getLeft()), args.toArray()); } catch (InvocationTargetException ex) { Throwable target = ex.getTargetException(); Throwables.propagateIfPossible(target); throw new HiveException(target.getMessage(), target); } catch (IllegalAccessException ex) { throw HiveException.fatal(); } if (response == null) { logger.error("[tryExecute] response is null "); return JsonMessageBuilder.createErrorResponseBuilder(HttpServletResponse.SC_INTERNAL_SERVER_ERROR).build(); } else { return response.getResponseAsJson(); } } private WebsocketHandlers getBean(Class<WebsocketHandlers> clazz) { Bean bean = beanManager.getBeans(clazz).iterator().next(); return (WebsocketHandlers) beanManager.getReference(bean, bean.getBeanClass(), beanManager.createCreationalContext(bean)); } private Pair<Class<WebsocketHandlers>, Method> getMethod(JsonObject request) { String action = request.getAsJsonPrimitive("action").getAsString(); Pair<Class<WebsocketHandlers>, Method> methodPair = methodsCache.get(action); if (methodPair != null) { return methodPair; } for (Class<WebsocketHandlers> currentClass : HANDLERS_SET) { boolean found = false; for (final Method method : currentClass.getMethods()) { if (method.isAnnotationPresent(Action.class)) { if (method.getAnnotation(Action.class).value().equals(action)) { Preconditions.checkState(method.getReturnType().equals(WebSocketResponse.class), "Method should have %s return type", WebSocketResponse.class); methodPair = ImmutablePair.of(currentClass, method); found = true; break; } } } if (found) { break; } } if (methodPair == null) { throw new HiveException("Unknown action requested: " + action, HttpServletResponse.SC_BAD_REQUEST); } methodsCache.put(action, methodPair); return methodPair; } private List<WebsocketParameterDescriptor> getArguments(Method executedMethod) { List<WebsocketParameterDescriptor> descriptors = parametersCache.get(executedMethod); if (descriptors != null) { return descriptors; } Type[] parameterTypes = executedMethod.getGenericParameterTypes(); Annotation[][] allAnnotations = executedMethod.getParameterAnnotations(); descriptors = new ArrayList<>(parameterTypes.length); for (int i = 0; i < parameterTypes.length; i++) { Type type = parameterTypes[i]; String name = null; JsonPolicyDef.Policy jsonPolicy = null; for (Annotation currentParamAnnotation : allAnnotations[i]) { if (currentParamAnnotation instanceof WsParam) { name = ((WsParam) currentParamAnnotation).value(); } if (currentParamAnnotation instanceof JsonPolicyApply) { jsonPolicy = ((JsonPolicyApply) currentParamAnnotation).value(); } } descriptors.add(new WebsocketParameterDescriptor(name, type, jsonPolicy)); } parametersCache.put(executedMethod, descriptors); return descriptors; } private List<Object> prepareArgumentValues(Method executedMethod, JsonObject request, Session session) { List<WebsocketParameterDescriptor> descriptors = getArguments(executedMethod); List<Object> values = new ArrayList<>(descriptors.size()); for (WebsocketParameterDescriptor descriptor : descriptors) { Type type = descriptor.getType(); if (Session.class.equals(type)) { values.add(session); } else { String name = descriptor.getName(); if (JsonObject.class.equals(type)) { values.add(name != null ? request.getAsJsonObject(name) : request); } else { Preconditions.checkNotNull(name); Gson gson = descriptor.getPolicy() == null ? GsonFactory.createGson() : GsonFactory.createGson(descriptor.getPolicy()); values.add(gson.fromJson(request.get(name), type)); } } } return values; } private static class WebsocketParameterDescriptor { private String name; private Type type; private JsonPolicyDef.Policy policy; public WebsocketParameterDescriptor(String name, Type type, JsonPolicyDef.Policy policy) { this.name = name; this.type = type; this.policy = policy; } private String getName() { return name; } private Type getType() { return type; } private JsonPolicyDef.Policy getPolicy() { return policy; } } }
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package io.trivium.dep.com.google.common.collect; import static io.trivium.dep.com.google.common.base.Preconditions.checkArgument; import static io.trivium.dep.com.google.common.base.Preconditions.checkNotNull; import io.trivium.dep.com.google.common.annotations.Beta; import io.trivium.dep.com.google.common.annotations.GwtIncompatible; import java.io.Serializable; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; /** * A {@link SortedMultiset} whose contents will never change, with many other important properties * detailed at {@link ImmutableCollection}. * * <p><b>Warning:</b> as with any sorted collection, you are strongly advised not to use a {@link * Comparator} or {@link Comparable} type whose comparison behavior is <i>inconsistent with * equals</i>. That is, {@code a.compareTo(b)} or {@code comparator.compare(a, b)} should equal zero * <i>if and only if</i> {@code a.equals(b)}. If this advice is not followed, the resulting * collection will not correctly obey its specification. * * <p>See the Guava User Guide article on <a href= * "https://github.com/google/guava/wiki/ImmutableCollectionsExplained"> * immutable collections</a>. * * @author Louis Wasserman * @since 12.0 */ @Beta @GwtIncompatible("hasn't been tested yet") public abstract class ImmutableSortedMultiset<E> extends ImmutableSortedMultisetFauxverideShim<E> implements SortedMultiset<E> { // TODO(lowasser): GWT compatibility private static final Comparator<Comparable> NATURAL_ORDER = Ordering.natural(); private static final ImmutableSortedMultiset<Comparable> NATURAL_EMPTY_MULTISET = new RegularImmutableSortedMultiset<Comparable>(NATURAL_ORDER); /** * Returns the empty immutable sorted multiset. */ @SuppressWarnings("unchecked") public static <E> ImmutableSortedMultiset<E> of() { return (ImmutableSortedMultiset) NATURAL_EMPTY_MULTISET; } /** * Returns an immutable sorted multiset containing a single element. */ public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> of(E element) { RegularImmutableSortedSet<E> elementSet = (RegularImmutableSortedSet<E>) ImmutableSortedSet.of(element); long[] cumulativeCounts = {0, 1}; return new RegularImmutableSortedMultiset<E>(elementSet, cumulativeCounts, 0, 1); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * @throws NullPointerException if any element is null */ @SuppressWarnings("unchecked") public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> of(E e1, E e2) { return copyOf(Ordering.natural(), Arrays.asList(e1, e2)); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * @throws NullPointerException if any element is null */ @SuppressWarnings("unchecked") public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> of(E e1, E e2, E e3) { return copyOf(Ordering.natural(), Arrays.asList(e1, e2, e3)); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * @throws NullPointerException if any element is null */ @SuppressWarnings("unchecked") public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> of( E e1, E e2, E e3, E e4) { return copyOf(Ordering.natural(), Arrays.asList(e1, e2, e3, e4)); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * @throws NullPointerException if any element is null */ @SuppressWarnings("unchecked") public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> of( E e1, E e2, E e3, E e4, E e5) { return copyOf(Ordering.natural(), Arrays.asList(e1, e2, e3, e4, e5)); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * @throws NullPointerException if any element is null */ @SuppressWarnings("unchecked") public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> of( E e1, E e2, E e3, E e4, E e5, E e6, E... remaining) { int size = remaining.length + 6; List<E> all = Lists.newArrayListWithCapacity(size); Collections.addAll(all, e1, e2, e3, e4, e5, e6); Collections.addAll(all, remaining); return copyOf(Ordering.natural(), all); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * @throws NullPointerException if any of {@code elements} is null */ public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> copyOf(E[] elements) { return copyOf(Ordering.natural(), Arrays.asList(elements)); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. To create a copy of a {@code SortedMultiset} that preserves the * comparator, call {@link #copyOfSorted} instead. This method iterates over {@code elements} at * most once. * * <p>Note that if {@code s} is a {@code multiset<String>}, then {@code * ImmutableSortedMultiset.copyOf(s)} returns an {@code ImmutableSortedMultiset<String>} * containing each of the strings in {@code s}, while {@code ImmutableSortedMultiset.of(s)} * returns an {@code ImmutableSortedMultiset<multiset<String>>} containing one element (the given * multiset itself). * * <p>Despite the method name, this method attempts to avoid actually copying the data when it is * safe to do so. The exact circumstances under which a copy will or will not be performed are * undocumented and subject to change. * * <p>This method is not type-safe, as it may be called on elements that are not mutually * comparable. * * @throws ClassCastException if the elements are not mutually comparable * @throws NullPointerException if any of {@code elements} is null */ public static <E> ImmutableSortedMultiset<E> copyOf(Iterable<? extends E> elements) { // Hack around E not being a subtype of Comparable. // Unsafe, see ImmutableSortedMultisetFauxverideShim. @SuppressWarnings("unchecked") Ordering<E> naturalOrder = (Ordering<E>) Ordering.<Comparable>natural(); return copyOf(naturalOrder, elements); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * <p>This method is not type-safe, as it may be called on elements that are not mutually * comparable. * * @throws ClassCastException if the elements are not mutually comparable * @throws NullPointerException if any of {@code elements} is null */ public static <E> ImmutableSortedMultiset<E> copyOf(Iterator<? extends E> elements) { // Hack around E not being a subtype of Comparable. // Unsafe, see ImmutableSortedMultisetFauxverideShim. @SuppressWarnings("unchecked") Ordering<E> naturalOrder = (Ordering<E>) Ordering.<Comparable>natural(); return copyOf(naturalOrder, elements); } /** * Returns an immutable sorted multiset containing the given elements sorted by the given {@code * Comparator}. * * @throws NullPointerException if {@code comparator} or any of {@code elements} is null */ public static <E> ImmutableSortedMultiset<E> copyOf( Comparator<? super E> comparator, Iterator<? extends E> elements) { checkNotNull(comparator); return new Builder<E>(comparator).addAll(elements).build(); } /** * Returns an immutable sorted multiset containing the given elements sorted by the given {@code * Comparator}. This method iterates over {@code elements} at most once. * * <p>Despite the method name, this method attempts to avoid actually copying the data when it is * safe to do so. The exact circumstances under which a copy will or will not be performed are * undocumented and subject to change. * * @throws NullPointerException if {@code comparator} or any of {@code elements} is null */ public static <E> ImmutableSortedMultiset<E> copyOf( Comparator<? super E> comparator, Iterable<? extends E> elements) { if (elements instanceof ImmutableSortedMultiset) { @SuppressWarnings("unchecked") // immutable collections are always safe for covariant casts ImmutableSortedMultiset<E> multiset = (ImmutableSortedMultiset<E>) elements; if (comparator.equals(multiset.comparator())) { if (multiset.isPartialView()) { return copyOfSortedEntries(comparator, multiset.entrySet().asList()); } else { return multiset; } } } elements = Lists.newArrayList(elements); // defensive copy TreeMultiset<E> sortedCopy = TreeMultiset.create(checkNotNull(comparator)); Iterables.addAll(sortedCopy, elements); return copyOfSortedEntries(comparator, sortedCopy.entrySet()); } /** * Returns an immutable sorted multiset containing the elements of a sorted multiset, sorted by * the same {@code Comparator}. That behavior differs from {@link #copyOf(Iterable)}, which * always uses the natural ordering of the elements. * * <p>Despite the method name, this method attempts to avoid actually copying the data when it is * safe to do so. The exact circumstances under which a copy will or will not be performed are * undocumented and subject to change. * * <p>This method is safe to use even when {@code sortedMultiset} is a synchronized or concurrent * collection that is currently being modified by another thread. * * @throws NullPointerException if {@code sortedMultiset} or any of its elements is null */ public static <E> ImmutableSortedMultiset<E> copyOfSorted(SortedMultiset<E> sortedMultiset) { return copyOfSortedEntries( sortedMultiset.comparator(), Lists.newArrayList(sortedMultiset.entrySet())); } private static <E> ImmutableSortedMultiset<E> copyOfSortedEntries( Comparator<? super E> comparator, Collection<Entry<E>> entries) { if (entries.isEmpty()) { return emptyMultiset(comparator); } ImmutableList.Builder<E> elementsBuilder = new ImmutableList.Builder<E>(entries.size()); long[] cumulativeCounts = new long[entries.size() + 1]; int i = 0; for (Entry<E> entry : entries) { elementsBuilder.add(entry.getElement()); cumulativeCounts[i + 1] = cumulativeCounts[i] + entry.getCount(); i++; } return new RegularImmutableSortedMultiset<E>( new RegularImmutableSortedSet<E>(elementsBuilder.build(), comparator), cumulativeCounts, 0, entries.size()); } @SuppressWarnings("unchecked") static <E> ImmutableSortedMultiset<E> emptyMultiset(Comparator<? super E> comparator) { if (NATURAL_ORDER.equals(comparator)) { return (ImmutableSortedMultiset<E>) NATURAL_EMPTY_MULTISET; } else { return new RegularImmutableSortedMultiset<E>(comparator); } } ImmutableSortedMultiset() {} @Override public final Comparator<? super E> comparator() { return elementSet().comparator(); } @Override public abstract ImmutableSortedSet<E> elementSet(); transient ImmutableSortedMultiset<E> descendingMultiset; @Override public ImmutableSortedMultiset<E> descendingMultiset() { ImmutableSortedMultiset<E> result = descendingMultiset; if (result == null) { return descendingMultiset = this.isEmpty() ? emptyMultiset(Ordering.from(comparator()).reverse()) : new DescendingImmutableSortedMultiset<E>(this); } return result; } /** * {@inheritDoc} * * <p>This implementation is guaranteed to throw an {@link UnsupportedOperationException}. * * @throws UnsupportedOperationException always * @deprecated Unsupported operation. */ @Deprecated @Override public final Entry<E> pollFirstEntry() { throw new UnsupportedOperationException(); } /** * {@inheritDoc} * * <p>This implementation is guaranteed to throw an {@link UnsupportedOperationException}. * * @throws UnsupportedOperationException always * @deprecated Unsupported operation. */ @Deprecated @Override public final Entry<E> pollLastEntry() { throw new UnsupportedOperationException(); } @Override public abstract ImmutableSortedMultiset<E> headMultiset(E upperBound, BoundType boundType); @Override public ImmutableSortedMultiset<E> subMultiset( E lowerBound, BoundType lowerBoundType, E upperBound, BoundType upperBoundType) { checkArgument( comparator().compare(lowerBound, upperBound) <= 0, "Expected lowerBound <= upperBound but %s > %s", lowerBound, upperBound); return tailMultiset(lowerBound, lowerBoundType).headMultiset(upperBound, upperBoundType); } @Override public abstract ImmutableSortedMultiset<E> tailMultiset(E lowerBound, BoundType boundType); /** * Returns a builder that creates immutable sorted multisets with an explicit comparator. If the * comparator has a more general type than the set being generated, such as creating a {@code * SortedMultiset<Integer>} with a {@code Comparator<Number>}, use the {@link Builder} * constructor instead. * * @throws NullPointerException if {@code comparator} is null */ public static <E> Builder<E> orderedBy(Comparator<E> comparator) { return new Builder<E>(comparator); } /** * Returns a builder that creates immutable sorted multisets whose elements are ordered by the * reverse of their natural ordering. * * <p>Note: the type parameter {@code E} extends {@code Comparable<?>} rather than {@code * Comparable<? super E>} as a workaround for javac <a * href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6468354">bug 6468354</a>. */ public static <E extends Comparable<?>> Builder<E> reverseOrder() { return new Builder<E>(Ordering.natural().reverse()); } /** * Returns a builder that creates immutable sorted multisets whose elements are ordered by their * natural ordering. The sorted multisets use {@link Ordering#natural()} as the comparator. This * method provides more type-safety than {@link #builder}, as it can be called only for classes * that implement {@link Comparable}. * * <p>Note: the type parameter {@code E} extends {@code Comparable<?>} rather than {@code * Comparable<? super E>} as a workaround for javac <a * href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6468354">bug 6468354</a>. */ public static <E extends Comparable<?>> Builder<E> naturalOrder() { return new Builder<E>(Ordering.natural()); } /** * A builder for creating immutable multiset instances, especially {@code public static final} * multisets ("constant multisets"). Example: * * <pre> {@code * * public static final ImmutableSortedMultiset<Bean> BEANS = * new ImmutableSortedMultiset.Builder<Bean>() * .addCopies(Bean.COCOA, 4) * .addCopies(Bean.GARDEN, 6) * .addCopies(Bean.RED, 8) * .addCopies(Bean.BLACK_EYED, 10) * .build();}</pre> * * <p>Builder instances can be reused; it is safe to call {@link #build} multiple times to build * multiple multisets in series. * * @since 12.0 */ public static class Builder<E> extends ImmutableMultiset.Builder<E> { /** * Creates a new builder. The returned builder is equivalent to the builder generated by * {@link ImmutableSortedMultiset#orderedBy(Comparator)}. */ public Builder(Comparator<? super E> comparator) { super(TreeMultiset.<E>create(checkNotNull(comparator))); } /** * Adds {@code element} to the {@code ImmutableSortedMultiset}. * * @param element the element to add * @return this {@code Builder} object * @throws NullPointerException if {@code element} is null */ @Override public Builder<E> add(E element) { super.add(element); return this; } /** * Adds a number of occurrences of an element to this {@code ImmutableSortedMultiset}. * * @param element the element to add * @param occurrences the number of occurrences of the element to add. May be zero, in which * case no change will be made. * @return this {@code Builder} object * @throws NullPointerException if {@code element} is null * @throws IllegalArgumentException if {@code occurrences} is negative, or if this operation * would result in more than {@link Integer#MAX_VALUE} occurrences of the element */ @Override public Builder<E> addCopies(E element, int occurrences) { super.addCopies(element, occurrences); return this; } /** * Adds or removes the necessary occurrences of an element such that the element attains the * desired count. * * @param element the element to add or remove occurrences of * @param count the desired count of the element in this multiset * @return this {@code Builder} object * @throws NullPointerException if {@code element} is null * @throws IllegalArgumentException if {@code count} is negative */ @Override public Builder<E> setCount(E element, int count) { super.setCount(element, count); return this; } /** * Adds each element of {@code elements} to the {@code ImmutableSortedMultiset}. * * @param elements the elements to add * @return this {@code Builder} object * @throws NullPointerException if {@code elements} is null or contains a null element */ @Override public Builder<E> add(E... elements) { super.add(elements); return this; } /** * Adds each element of {@code elements} to the {@code ImmutableSortedMultiset}. * * @param elements the {@code Iterable} to add to the {@code ImmutableSortedMultiset} * @return this {@code Builder} object * @throws NullPointerException if {@code elements} is null or contains a null element */ @Override public Builder<E> addAll(Iterable<? extends E> elements) { super.addAll(elements); return this; } /** * Adds each element of {@code elements} to the {@code ImmutableSortedMultiset}. * * @param elements the elements to add to the {@code ImmutableSortedMultiset} * @return this {@code Builder} object * @throws NullPointerException if {@code elements} is null or contains a null element */ @Override public Builder<E> addAll(Iterator<? extends E> elements) { super.addAll(elements); return this; } /** * Returns a newly-created {@code ImmutableSortedMultiset} based on the contents of the {@code * Builder}. */ @Override public ImmutableSortedMultiset<E> build() { return copyOfSorted((SortedMultiset<E>) contents); } } private static final class SerializedForm<E> implements Serializable { Comparator<? super E> comparator; E[] elements; int[] counts; @SuppressWarnings("unchecked") SerializedForm(SortedMultiset<E> multiset) { this.comparator = multiset.comparator(); int n = multiset.entrySet().size(); elements = (E[]) new Object[n]; counts = new int[n]; int i = 0; for (Entry<E> entry : multiset.entrySet()) { elements[i] = entry.getElement(); counts[i] = entry.getCount(); i++; } } Object readResolve() { int n = elements.length; Builder<E> builder = new Builder<E>(comparator); for (int i = 0; i < n; i++) { builder.addCopies(elements[i], counts[i]); } return builder.build(); } } @Override Object writeReplace() { return new SerializedForm<E>(this); } }
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.common.hash; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.annotations.Beta; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Objects; import com.google.common.base.Predicate; import com.google.common.hash.BloomFilterStrategies.BitArray; import com.google.common.primitives.SignedBytes; import com.google.common.primitives.UnsignedBytes; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Serializable; import javax.annotation.Nullable; /** * A Bloom filter for instances of {@code T}. A Bloom filter offers an approximate containment test * with one-sided error: if it claims that an element is contained in it, this might be in error, * but if it claims that an element is <i>not</i> contained in it, then this is definitely true. * * <p>If you are unfamiliar with Bloom filters, this nice * <a href="http://llimllib.github.com/bloomfilter-tutorial/">tutorial</a> may help you understand * how they work. * * <p>The false positive probability ({@code FPP}) of a bloom filter is defined as the probability * that {@linkplain #mightContain(Object)} will erroneously return {@code true} for an object that * has not actually been put in the {@code BloomFilter}. * * <p>Bloom filters are serializable. They also support a more compact serial representation via * the {@link #writeTo} and {@link #readFrom} methods. Both serialized forms will continue to be * supported by future versions of this library. However, serial forms generated by newer versions * of the code may not be readable by older versions of the code (e.g., a serialized bloom filter * generated today may <i>not</i> be readable by a binary that was compiled 6 months ago). * * @param <T> the type of instances that the {@code BloomFilter} accepts * @author Dimitris Andreou * @author Kevin Bourrillion * @since 11.0 */ @Beta public final class BloomFilter<T> implements Predicate<T>, Serializable { /** * A strategy to translate T instances, to {@code numHashFunctions} bit indexes. * * <p>Implementations should be collections of pure functions (i.e. stateless). */ interface Strategy extends java.io.Serializable { /** * Sets {@code numHashFunctions} bits of the given bit array, by hashing a user element. * * <p>Returns whether any bits changed as a result of this operation. */ <T> boolean put(T object, Funnel<? super T> funnel, int numHashFunctions, BitArray bits); /** * Queries {@code numHashFunctions} bits of the given bit array, by hashing a user element; * returns {@code true} if and only if all selected bits are set. */ <T> boolean mightContain( T object, Funnel<? super T> funnel, int numHashFunctions, BitArray bits); /** * Identifier used to encode this strategy, when marshalled as part of a BloomFilter. * Only values in the [-128, 127] range are valid for the compact serial form. * Non-negative values are reserved for enums defined in BloomFilterStrategies; * negative values are reserved for any custom, stateful strategy we may define * (e.g. any kind of strategy that would depend on user input). */ int ordinal(); } /** The bit set of the BloomFilter (not necessarily power of 2!)*/ private final BitArray bits; /** Number of hashes per element */ private final int numHashFunctions; /** The funnel to translate Ts to bytes */ private final Funnel<? super T> funnel; /** * The strategy we employ to map an element T to {@code numHashFunctions} bit indexes. */ private final Strategy strategy; /** * Creates a BloomFilter. */ private BloomFilter(BitArray bits, int numHashFunctions, Funnel<? super T> funnel, Strategy strategy) { checkArgument(numHashFunctions > 0, "numHashFunctions (%s) must be > 0", numHashFunctions); checkArgument(numHashFunctions <= 255, "numHashFunctions (%s) must be <= 255", numHashFunctions); this.bits = checkNotNull(bits); this.numHashFunctions = numHashFunctions; this.funnel = checkNotNull(funnel); this.strategy = checkNotNull(strategy); } /** * Creates a new {@code BloomFilter} that's a copy of this instance. The new instance is equal to * this instance but shares no mutable state. * * @since 12.0 */ public BloomFilter<T> copy() { return new BloomFilter<T>(bits.copy(), numHashFunctions, funnel, strategy); } /** * Returns {@code true} if the element <i>might</i> have been put in this Bloom filter, * {@code false} if this is <i>definitely</i> not the case. */ public boolean mightContain(T object) { return strategy.mightContain(object, funnel, numHashFunctions, bits); } /** * @deprecated Provided only to satisfy the {@link Predicate} interface; use {@link #mightContain} * instead. */ @Deprecated @Override public boolean apply(T input) { return mightContain(input); } /** * Puts an element into this {@code BloomFilter}. Ensures that subsequent invocations of * {@link #mightContain(Object)} with the same element will always return {@code true}. * * @return true if the bloom filter's bits changed as a result of this operation. If the bits * changed, this is <i>definitely</i> the first time {@code object} has been added to the * filter. If the bits haven't changed, this <i>might</i> be the first time {@code object} * has been added to the filter. Note that {@code put(t)} always returns the * <i>opposite</i> result to what {@code mightContain(t)} would have returned at the time * it is called." * @since 12.0 (present in 11.0 with {@code void} return type}) */ public boolean put(T object) { return strategy.put(object, funnel, numHashFunctions, bits); } /** * Returns the probability that {@linkplain #mightContain(Object)} will erroneously return * {@code true} for an object that has not actually been put in the {@code BloomFilter}. * * <p>Ideally, this number should be close to the {@code fpp} parameter * passed in {@linkplain #create(Funnel, int, double)}, or smaller. If it is * significantly higher, it is usually the case that too many elements (more than * expected) have been put in the {@code BloomFilter}, degenerating it. * * @since 14.0 (since 11.0 as expectedFalsePositiveProbability()) */ public double expectedFpp() { // You down with FPP? (Yeah you know me!) Who's down with FPP? (Every last homie!) return Math.pow((double) bits.bitCount() / bitSize(), numHashFunctions); } /** * Returns the number of bits in the underlying bit array. */ @VisibleForTesting long bitSize() { return bits.bitSize(); } /** * Determines whether a given bloom filter is compatible with this bloom filter. For two * bloom filters to be compatible, they must: * * <ul> * <li>not be the same instance * <li>have the same number of hash functions * <li>have the same bit size * <li>have the same strategy * <li>have equal funnels * <ul> * * @param that The bloom filter to check for compatibility. * @since 15.0 */ public boolean isCompatible(BloomFilter<T> that) { checkNotNull(that); return (this != that) && (this.numHashFunctions == that.numHashFunctions) && (this.bitSize() == that.bitSize()) && (this.strategy.equals(that.strategy)) && (this.funnel.equals(that.funnel)); } /** * Combines this bloom filter with another bloom filter by performing a bitwise OR of the * underlying data. The mutations happen to <b>this</b> instance. Callers must ensure the * bloom filters are appropriately sized to avoid saturating them. * * @param that The bloom filter to combine this bloom filter with. It is not mutated. * @throws IllegalArgumentException if {@code isCompatible(that) == false} * * @since 15.0 */ public void putAll(BloomFilter<T> that) { checkNotNull(that); checkArgument(this != that, "Cannot combine a BloomFilter with itself."); checkArgument(this.numHashFunctions == that.numHashFunctions, "BloomFilters must have the same number of hash functions (%s != %s)", this.numHashFunctions, that.numHashFunctions); checkArgument(this.bitSize() == that.bitSize(), "BloomFilters must have the same size underlying bit arrays (%s != %s)", this.bitSize(), that.bitSize()); checkArgument(this.strategy.equals(that.strategy), "BloomFilters must have equal strategies (%s != %s)", this.strategy, that.strategy); checkArgument(this.funnel.equals(that.funnel), "BloomFilters must have equal funnels (%s != %s)", this.funnel, that.funnel); this.bits.putAll(that.bits); } @Override public boolean equals(@Nullable Object object) { if (object == this) { return true; } if (object instanceof BloomFilter) { BloomFilter<?> that = (BloomFilter<?>) object; return this.numHashFunctions == that.numHashFunctions && this.funnel.equals(that.funnel) && this.bits.equals(that.bits) && this.strategy.equals(that.strategy); } return false; } @Override public int hashCode() { return Objects.hashCode(numHashFunctions, funnel, strategy, bits); } private static final Strategy DEFAULT_STRATEGY = BloomFilterStrategies.MURMUR128_MITZ_64; /** * Creates a {@link BloomFilter BloomFilter<T>} with the expected number of * insertions and expected false positive probability. * * <p>Note that overflowing a {@code BloomFilter} with significantly more elements * than specified, will result in its saturation, and a sharp deterioration of its * false positive probability. * * <p>The constructed {@code BloomFilter<T>} will be serializable if the provided * {@code Funnel<T>} is. * * <p>It is recommended that the funnel be implemented as a Java enum. This has the * benefit of ensuring proper serialization and deserialization, which is important * since {@link #equals} also relies on object identity of funnels. * * @param funnel the funnel of T's that the constructed {@code BloomFilter<T>} will use * @param expectedInsertions the number of expected insertions to the constructed * {@code BloomFilter<T>}; must be positive * @param fpp the desired false positive probability (must be positive and less than 1.0) * @return a {@code BloomFilter} */ public static <T> BloomFilter<T> create( Funnel<? super T> funnel, int expectedInsertions /* n */, double fpp) { return create(funnel, expectedInsertions, fpp, DEFAULT_STRATEGY); } @VisibleForTesting static <T> BloomFilter<T> create( Funnel<? super T> funnel, int expectedInsertions /* n */, double fpp, Strategy strategy) { checkNotNull(funnel); checkArgument(expectedInsertions >= 0, "Expected insertions (%s) must be >= 0", expectedInsertions); checkArgument(fpp > 0.0, "False positive probability (%s) must be > 0.0", fpp); checkArgument(fpp < 1.0, "False positive probability (%s) must be < 1.0", fpp); checkNotNull(strategy); if (expectedInsertions == 0) { expectedInsertions = 1; } /* * TODO(user): Put a warning in the javadoc about tiny fpp values, * since the resulting size is proportional to -log(p), but there is not * much of a point after all, e.g. optimalM(1000, 0.0000000000000001) = 76680 * which is less than 10kb. Who cares! */ long numBits = optimalNumOfBits(expectedInsertions, fpp); int numHashFunctions = optimalNumOfHashFunctions(expectedInsertions, numBits); try { return new BloomFilter<T>(new BitArray(numBits), numHashFunctions, funnel, strategy); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Could not create BloomFilter of " + numBits + " bits", e); } } /** * Creates a {@link BloomFilter BloomFilter<T>} with the expected number of * insertions and a default expected false positive probability of 3%. * * <p>Note that overflowing a {@code BloomFilter} with significantly more elements * than specified, will result in its saturation, and a sharp deterioration of its * false positive probability. * * <p>The constructed {@code BloomFilter<T>} will be serializable if the provided * {@code Funnel<T>} is. * * @param funnel the funnel of T's that the constructed {@code BloomFilter<T>} will use * @param expectedInsertions the number of expected insertions to the constructed * {@code BloomFilter<T>}; must be positive * @return a {@code BloomFilter} */ public static <T> BloomFilter<T> create( Funnel<? super T> funnel, int expectedInsertions /* n */) { return create(funnel, expectedInsertions, 0.03); // FYI, for 3%, we always get 5 hash functions } /* * Cheat sheet: * * m: total bits * n: expected insertions * b: m/n, bits per insertion * p: expected false positive probability * * 1) Optimal k = b * ln2 * 2) p = (1 - e ^ (-kn/m))^k * 3) For optimal k: p = 2 ^ (-k) ~= 0.6185^b * 4) For optimal k: m = -nlnp / ((ln2) ^ 2) */ /** * Computes the optimal k (number of hashes per element inserted in Bloom filter), given the * expected insertions and total number of bits in the Bloom filter. * * See http://en.wikipedia.org/wiki/File:Bloom_filter_fp_probability.svg for the formula. * * @param n expected insertions (must be positive) * @param m total number of bits in Bloom filter (must be positive) */ @VisibleForTesting static int optimalNumOfHashFunctions(long n, long m) { // (m / n) * log(2), but avoid truncation due to division! return Math.max(1, (int) Math.round((double) m / n * Math.log(2))); } /** * Computes m (total bits of Bloom filter) which is expected to achieve, for the specified * expected insertions, the required false positive probability. * * See http://en.wikipedia.org/wiki/Bloom_filter#Probability_of_false_positives for the formula. * * @param n expected insertions (must be positive) * @param p false positive rate (must be 0 < p < 1) */ @VisibleForTesting static long optimalNumOfBits(long n, double p) { if (p == 0) { p = Double.MIN_VALUE; } return (long) (-n * Math.log(p) / (Math.log(2) * Math.log(2))); } private Object writeReplace() { return new SerialForm<T>(this); } private static class SerialForm<T> implements Serializable { final long[] data; final int numHashFunctions; final Funnel<? super T> funnel; final Strategy strategy; SerialForm(BloomFilter<T> bf) { this.data = bf.bits.data; this.numHashFunctions = bf.numHashFunctions; this.funnel = bf.funnel; this.strategy = bf.strategy; } Object readResolve() { return new BloomFilter<T>(new BitArray(data), numHashFunctions, funnel, strategy); } private static final long serialVersionUID = 1; } /** * Writes this {@code BloomFilter} to an output stream, with a custom format (not Java * serialization). This has been measured to save at least 400 bytes compared to regular * serialization. * * <p>Use {@linkplain #readFrom(InputStream, Funnel)} to reconstruct the written BloomFilter. */ public void writeTo(OutputStream out) throws IOException { /* * Serial form: * 1 signed byte for the strategy * 1 unsigned byte for the number of hash functions * 1 big endian int, the number of longs in our bitset * N big endian longs of our bitset */ DataOutputStream dout = new DataOutputStream(out); dout.writeByte(SignedBytes.checkedCast(strategy.ordinal())); dout.writeByte(UnsignedBytes.checkedCast(numHashFunctions)); // note: checked at the c'tor dout.writeInt(bits.data.length); for (long value : bits.data) { dout.writeLong(value); } } /** * Reads a byte stream, which was written by {@linkplain #writeTo(OutputStream)}, into * a {@code BloomFilter<T>}. * * The {@code Funnel} to be used is not encoded in the stream, so it must be provided here. * <b>Warning:</b> the funnel provided <b>must</b> behave identically to the one used to * populate the original Bloom filter! * * @throws IOException if the InputStream throws an {@code IOException}, or if its data does * not appear to be a BloomFilter serialized using the * {@linkplain #writeTo(OutputStream)} method. */ public static <T> BloomFilter<T> readFrom(InputStream in, Funnel<T> funnel) throws IOException { checkNotNull(in, "InputStream"); checkNotNull(funnel, "Funnel"); int strategyOrdinal = -1; int numHashFunctions = -1; int dataLength = -1; try { DataInputStream din = new DataInputStream(in); // currently this assumes there is no negative ordinal; will have to be updated if we // add non-stateless strategies (for which we've reserved negative ordinals; see // Strategy.ordinal()). strategyOrdinal = din.readByte(); numHashFunctions = UnsignedBytes.toInt(din.readByte()); dataLength = din.readInt(); Strategy strategy = BloomFilterStrategies.values()[strategyOrdinal]; long[] data = new long[dataLength]; for (int i = 0; i < data.length; i++) { data[i] = din.readLong(); } return new BloomFilter<T>(new BitArray(data), numHashFunctions, funnel, strategy); } catch (RuntimeException e) { IOException ioException = new IOException( "Unable to deserialize BloomFilter from InputStream." + " strategyOrdinal: " + strategyOrdinal + " numHashFunctions: " + numHashFunctions + " dataLength: " + dataLength); ioException.initCause(e); throw ioException; } } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2012 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.httppanel.component; import java.awt.CardLayout; import java.awt.Component; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.regex.Pattern; import javax.swing.JComboBox; import javax.swing.JComponent; import javax.swing.JList; import javax.swing.JPanel; import javax.swing.ListCellRenderer; import javax.swing.MutableComboBoxModel; import org.apache.commons.configuration.FileConfiguration; import org.apache.log4j.Logger; import org.zaproxy.zap.extension.httppanel.HttpPanel; import org.zaproxy.zap.extension.httppanel.Message; import org.zaproxy.zap.extension.httppanel.view.HttpPanelDefaultViewSelector; import org.zaproxy.zap.extension.httppanel.view.HttpPanelView; import org.zaproxy.zap.extension.search.SearchMatch; import org.zaproxy.zap.extension.search.SearchableHttpPanelView; import org.zaproxy.zap.model.MessageLocation; import org.zaproxy.zap.utils.SortedComboBoxModel; import org.zaproxy.zap.view.messagelocation.MessageLocationHighlight; import org.zaproxy.zap.view.messagelocation.MessageLocationHighlighter; public class HttpPanelComponentViewsManager implements ItemListener, MessageLocationHighlighter { private static final Logger logger = Logger.getLogger(HttpPanelComponentViewsManager.class); private static final String VIEWS_KEY = "views"; private static final String DEFAULT_VIEW_KEY = "defaultview"; private static DefaultViewSelectorComparator defaultViewSelectorComparator; private Message message; private JPanel panelViews; private JComboBox<ViewItem> comboBoxSelectView; private MutableComboBoxModel<ViewItem> comboBoxModel; private HttpPanelView currentView; private List<ViewItem> enabledViews; private Map<String, ViewItem> viewItems; private Map<String, HttpPanelView> views; private List<HttpPanelDefaultViewSelector> defaultViewsSelectors; private String savedSelectedViewName; private String configurationKey; private String viewsConfigurationKey; private boolean isEditable; private Object changingComboBoxLocker; private boolean changingComboBox; private HttpPanel owner; public HttpPanelComponentViewsManager(String configurationKey) { enabledViews = new ArrayList<>(); viewItems = new HashMap<>(); views = new HashMap<>(); defaultViewsSelectors = new ArrayList<>(); isEditable = false; this.configurationKey = configurationKey; this.viewsConfigurationKey = ""; changingComboBoxLocker = new Object(); changingComboBox = false; savedSelectedViewName = null; comboBoxModel = new SortedComboBoxModel<>(); comboBoxSelectView = new JComboBox<>(comboBoxModel); comboBoxSelectView.addItemListener(this); panelViews = new JPanel(new CardLayout()); } public HttpPanelComponentViewsManager(String configurationKey, String label) { this(configurationKey); comboBoxSelectView.setRenderer( new CustomDelegateListCellRenderer(comboBoxSelectView, label)); } public HttpPanelComponentViewsManager(HttpPanel owner, String configurationKey) { this(configurationKey); this.owner = owner; } public HttpPanelComponentViewsManager(HttpPanel owner, String configurationKey, String label) { this(configurationKey, label); this.owner = owner; } public JComponent getSelectableViewsComponent() { return comboBoxSelectView; } public JPanel getViewsPanel() { return panelViews; } public void setSelected(boolean selected) { if (currentView != null) { currentView.setSelected(selected); } } private void switchView(final String name) { if (this.currentView != null && this.currentView.getCaptionName().equals(name)) { currentView.setSelected(true); if (owner != null) { owner.fireMessageViewChangedEvent(currentView, currentView); } return; } HttpPanelView view = views.get(name); if (view == null) { logger.info("No view found with name: " + name); return; } HttpPanelView previousView = currentView; if (this.currentView != null) { this.currentView.setSelected(false); this.currentView.getModel().clear(); } this.currentView = view; comboBoxModel.setSelectedItem(viewItems.get(name)); this.currentView.getModel().setMessage(message); ((CardLayout) panelViews.getLayout()).show(panelViews, name); this.currentView.setSelected(true); if (owner != null) { owner.fireMessageViewChangedEvent(previousView, currentView); } } public void setMessage(Message aMessage) { this.message = aMessage; enableViews(); String defaultViewName = getDefaultEnabledViewName(); if (defaultViewName != null) { if (defaultViewName.equals(currentView.getName())) { currentView.getModel().setMessage(message); } else { switchView(defaultViewName); } } else if (!enabledViews.contains(viewItems.get(currentView.getName()))) { switchView(enabledViews.get(0).getConfigName()); } else { currentView.getModel().setMessage(message); } } private void enableViews() { Iterator<Entry<String, HttpPanelView>> it = views.entrySet().iterator(); while (it.hasNext()) { HttpPanelView view = it.next().getValue(); ViewItem viewItem = viewItems.get(view.getName()); if (!view.isEnabled(message)) { if (enabledViews.contains(viewItem)) { disableView(viewItem); } } else if (!enabledViews.contains(viewItem)) { enableView(viewItem); } } } private String getDefaultEnabledViewName() { String defaultViewName = null; Iterator<HttpPanelDefaultViewSelector> itD = defaultViewsSelectors.iterator(); while (itD.hasNext()) { HttpPanelDefaultViewSelector defaultView = itD.next(); if (defaultView.matchToDefaultView(message)) { if (enabledViews.contains(viewItems.get(defaultView.getViewName()))) { defaultViewName = defaultView.getViewName(); break; } } } return defaultViewName; } @Override public void itemStateChanged(ItemEvent e) { synchronized (changingComboBoxLocker) { if (changingComboBox) { return; } } if (e.getStateChange() == ItemEvent.SELECTED) { if (currentView == null) { return; } ViewItem item = (ViewItem) comboBoxModel.getSelectedItem(); if (item == null || item.getConfigName().equals(currentView.getName())) { return; } save(); switchView(item.getConfigName()); } } public void save() { if (message == null || currentView == null) { return; } if (isEditable) { if (currentView.hasChanged()) { currentView.save(); } } } public void addView(HttpPanelView view) { final String targetViewName = view.getTargetViewName(); if (!"".equals(targetViewName) && views.containsKey(targetViewName)) { removeView(targetViewName); } final String viewConfigName = view.getName(); views.put(viewConfigName, view); ViewItem viewItem = new ViewItem(viewConfigName, view.getCaptionName(), view.getPosition()); viewItems.put(viewConfigName, viewItem); panelViews.add(view.getPane(), viewConfigName); view.setEditable(isEditable); view.setParentConfigurationKey(viewsConfigurationKey); if (view.isEnabled(message)) { enableView(viewItem); boolean switchView = false; if (currentView == null) { switchView = true; } else if (savedSelectedViewName != null) { if (savedSelectedViewName.equals(viewConfigName)) { switchView = true; } else if (!savedSelectedViewName.equals(currentView.getName()) && currentView.getPosition() > view.getPosition()) { switchView = true; } } else if (currentView.getPosition() > view.getPosition()) { switchView = true; } if (switchView) { switchView(viewConfigName); } } } private void enableView(ViewItem viewItem) { enabledViews.add(viewItem); Collections.sort(enabledViews); synchronized (changingComboBoxLocker) { changingComboBox = true; comboBoxModel.addElement(viewItem); changingComboBox = false; } } private void disableView(ViewItem viewItem) { enabledViews.remove(viewItem); synchronized (changingComboBoxLocker) { changingComboBox = true; comboBoxModel.removeElement(viewItem); changingComboBox = false; } } public void addView(HttpPanelView view, FileConfiguration fileConfiguration) { addView(view); view.loadConfiguration(fileConfiguration); } public void removeView(String viewName) { HttpPanelView view = views.get(viewName); if (view == null) { return; } views.remove(viewName); panelViews.remove(view.getPane()); ViewItem viewItem = viewItems.get(viewName); if (enabledViews.contains(viewItem)) { disableView(viewItem); } viewItems.remove(view.getName()); if (viewName.equals(currentView.getName())) { if (enabledViews.size() > 0) { switchView(enabledViews.get(0).getConfigName()); } else { currentView = null; } } } public void clearView() { if (currentView != null) { currentView.getModel().clear(); setMessage(null); } } public void clearView(boolean enableViewSelect) { clearView(); setEnableViewSelect(enableViewSelect); } public void setEnableViewSelect(boolean enableViewSelect) { comboBoxSelectView.setEnabled(enableViewSelect); } public void addDefaultViewSelector(HttpPanelDefaultViewSelector defaultViewSelector) { defaultViewsSelectors.add(defaultViewSelector); Collections.sort(defaultViewsSelectors, getDefaultViewSelectorComparator()); } public void removeDefaultViewSelector(String defaultViewSelectorName) { Iterator<HttpPanelDefaultViewSelector> itD = defaultViewsSelectors.iterator(); while (itD.hasNext()) { HttpPanelDefaultViewSelector defaultView = itD.next(); if (defaultView.getName().equals(defaultViewSelectorName)) { defaultViewsSelectors.remove(defaultView); break; } } } private static Comparator<HttpPanelDefaultViewSelector> getDefaultViewSelectorComparator() { if (defaultViewSelectorComparator == null) { createDefaultViewSelectorComparator(); } return defaultViewSelectorComparator; } private static synchronized void createDefaultViewSelectorComparator() { if (defaultViewSelectorComparator == null) { defaultViewSelectorComparator = new DefaultViewSelectorComparator(); } } public void setConfigurationKey(String parentKey) { configurationKey = parentKey + configurationKey + "."; viewsConfigurationKey = configurationKey + VIEWS_KEY + "."; Iterator<HttpPanelView> it = views.values().iterator(); while (it.hasNext()) { it.next().setParentConfigurationKey(viewsConfigurationKey); } } public void loadConfig(FileConfiguration fileConfiguration) { savedSelectedViewName = fileConfiguration.getString(configurationKey + DEFAULT_VIEW_KEY); Iterator<HttpPanelView> it = views.values().iterator(); while (it.hasNext()) { it.next().loadConfiguration(fileConfiguration); } } public void saveConfig(FileConfiguration fileConfiguration) { if (currentView != null) { fileConfiguration.setProperty( configurationKey + DEFAULT_VIEW_KEY, currentView.getName()); } Iterator<HttpPanelView> it = views.values().iterator(); while (it.hasNext()) { it.next().saveConfiguration(fileConfiguration); } } public void setEditable(boolean editable) { if (isEditable != editable) { isEditable = editable; Iterator<HttpPanelView> it = views.values().iterator(); while (it.hasNext()) { it.next().setEditable(editable); } } } public void highlight(SearchMatch sm) { if (currentView instanceof SearchableHttpPanelView) { ((SearchableHttpPanelView) currentView).highlight(sm); } else { SearchableHttpPanelView searchableView = findSearchableView(); if (currentView != null) { switchView(((HttpPanelView) searchableView).getName()); searchableView.highlight(sm); } } } public void search(Pattern p, List<SearchMatch> matches) { if (currentView instanceof SearchableHttpPanelView) { ((SearchableHttpPanelView) currentView).search(p, matches); } else { SearchableHttpPanelView searchableView = findSearchableView(); if (searchableView != null) { searchableView.search(p, matches); } } } private SearchableHttpPanelView findSearchableView() { SearchableHttpPanelView searchableView = null; Iterator<HttpPanelView> it = views.values().iterator(); while (it.hasNext()) { HttpPanelView view = it.next(); if (view.isEnabled(message)) { if (view instanceof SearchableHttpPanelView) { searchableView = (SearchableHttpPanelView) view; break; } } } return searchableView; } private static final class ViewItem implements Comparable<ViewItem> { private final String configName; private String name; private final int position; public ViewItem(String configName, String name, int position) { this.configName = configName; this.name = name; this.position = position; } public String getConfigName() { return configName; } @Override public int compareTo(ViewItem o) { if (position < o.position) { return -1; } else if (position > o.position) { return 1; } return 0; } @Override public int hashCode() { return 31 * configName.hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } ViewItem other = (ViewItem) obj; if (!configName.equals(other.configName)) { return false; } return true; } @Override public String toString() { return name; } } private static final class CustomDelegateListCellRenderer implements ListCellRenderer<ViewItem> { private ListCellRenderer<? super ViewItem> delegateRenderer; private JComboBox<ViewItem> comboBox; private String label; private ViewItem viewItem; public CustomDelegateListCellRenderer(JComboBox<ViewItem> aComboBox, String label) { this.delegateRenderer = aComboBox.getRenderer(); this.comboBox = aComboBox; this.label = label; this.viewItem = new ViewItem("", "", -1); this.comboBox.addPropertyChangeListener( "UI", new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { delegateRenderer = new JComboBox<ViewItem>().getRenderer(); } }); } @Override public Component getListCellRendererComponent( JList<? extends ViewItem> list, ViewItem value, int index, boolean isSelected, boolean cellHasFocus) { if (index != -1) { return delegateRenderer.getListCellRendererComponent( list, value, index, isSelected, cellHasFocus); } viewItem.name = label + value.name; return delegateRenderer.getListCellRendererComponent( list, viewItem, index, isSelected, cellHasFocus); } } private static final class DefaultViewSelectorComparator implements Comparator<HttpPanelDefaultViewSelector>, Serializable { private static final long serialVersionUID = -1380844848294384189L; @Override public int compare(HttpPanelDefaultViewSelector o1, HttpPanelDefaultViewSelector o2) { final int order1 = o1.getOrder(); final int order2 = o2.getOrder(); if (order1 < order2) { return -1; } else if (order1 > order2) { return 1; } return 0; } } @Override public boolean supports(MessageLocation location) { for (ViewItem item : enabledViews) { HttpPanelView view = views.get(item.getConfigName()); if (view instanceof MessageLocationHighlighter) { MessageLocationHighlighter highlighter = (MessageLocationHighlighter) view; if (highlighter.supports(location)) { return true; } } } return false; } @Override public boolean supports(Class<? extends MessageLocation> classLocation) { for (ViewItem item : enabledViews) { HttpPanelView view = views.get(item.getConfigName()); if (view instanceof MessageLocationHighlighter) { MessageLocationHighlighter highlighter = (MessageLocationHighlighter) view; if (highlighter.supports(classLocation)) { return true; } } } return false; } @Override public MessageLocationHighlight highlight(MessageLocation location) { if (currentView instanceof MessageLocationHighlighter) { MessageLocationHighlighter highlighter = (MessageLocationHighlighter) currentView; return highlighter.highlight(location); } return null; } @Override public MessageLocationHighlight highlight( MessageLocation location, MessageLocationHighlight highlight) { if (currentView instanceof MessageLocationHighlighter) { MessageLocationHighlighter highlighter = (MessageLocationHighlighter) currentView; return highlighter.highlight(location, highlight); } return null; } @Override public void removeHighlight( MessageLocation location, MessageLocationHighlight highlightReference) { if (currentView instanceof MessageLocationHighlighter) { MessageLocationHighlighter highlighter = (MessageLocationHighlighter) currentView; highlighter.removeHighlight(location, highlightReference); } } public HttpPanelView setSelectedView(String viewName) { for (ViewItem item : enabledViews) { if (viewName.equals(item.getConfigName())) { switchView(viewName); return currentView; } } return null; } }
// // PersistentStore.java // xal // // Created by Pelaia II, Tom on 10/10/06. // Copyright 2006 Oak Ridge National Lab. All rights reserved. // package xal.service.pvlogger; import java.net.URL; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.ArrayList; import java.util.Set; import java.sql.*; import xal.tools.data.*; import xal.tools.database.*; /** contains information about the persistent storage */ class PersistentStore { /** proxy to a database table of snapshot group channels */ final private SnapshotGroupChannelTable SNAPSHOT_GROUP_CHANNEL_TABLE; /** proxy to a database table of snapshot groups */ final protected SnapshotGroupTable SNAPSHOT_GROUP_TABLE; /** channel snapshot tables keyed by service ID */ final protected Map<String,ChannelSnapshotTable> CHANNEL_SNAPSHOT_TABLES; /** machine snapshot table */ final protected MachineSnapshotTable MACHINE_SNAPSHOT_TABLE; /** map of channel groups keyed by group ID */ final protected Map<String,ChannelGroup> CHANNEL_GROUPS; /** Constructor */ public PersistentStore( final DataAdaptor storeAdaptor ) { final Map<String,DBTableConfiguration> tableConfigurations = loadTableConfigurations( storeAdaptor ); SNAPSHOT_GROUP_CHANNEL_TABLE = new SnapshotGroupChannelTable( tableConfigurations.get( "SnapshotGroupChannel" ) ); SNAPSHOT_GROUP_TABLE = new SnapshotGroupTable( tableConfigurations.get( "SnapshotGroup" ), SNAPSHOT_GROUP_CHANNEL_TABLE ); MACHINE_SNAPSHOT_TABLE = new MachineSnapshotTable( tableConfigurations.get( "MachineSnapshot" ) ); CHANNEL_SNAPSHOT_TABLES = loadChannelSnapshotTables( storeAdaptor ); CHANNEL_GROUPS = new HashMap<String,ChannelGroup>(); } /** get the table configurations from the configuration */ static private Map<String,DBTableConfiguration> loadTableConfigurations( final DataAdaptor storeAdaptor ) { final List<DataAdaptor> tableAdaptors = storeAdaptor.childAdaptors( "dbtable" ); final Map<String,DBTableConfiguration> tableConfigurations = new HashMap<String,DBTableConfiguration>(2); for ( final DataAdaptor tableAdaptor : tableAdaptors ) { final String entity = tableAdaptor.stringValue( "entity" ); tableConfigurations.put( entity, new DBTableConfiguration( tableAdaptor ) ); } return tableConfigurations; } /** load the machine snapshot tables from the configuration */ static private Map<String,ChannelSnapshotTable> loadChannelSnapshotTables( final DataAdaptor storeAdaptor ) { final Map<String,ChannelSnapshotTable> channelSnapshotTables = new HashMap<String,ChannelSnapshotTable>(); final List<DataAdaptor> serviceAdaptors = storeAdaptor.childAdaptors( "service" ); for ( final DataAdaptor serviceAdaptor : serviceAdaptors ) { final String serviceID = serviceAdaptor.stringValue( "name" ); final DataAdaptor tableAdaptor = serviceAdaptor.childAdaptor( "dbtable" ); final ChannelSnapshotTable channelSnapshotTable = new ChannelSnapshotTable( new DBTableConfiguration( tableAdaptor ) ); channelSnapshotTables.put( serviceID, channelSnapshotTable ); } return channelSnapshotTables; } /** get a new connection using the specified connection dictionary */ static public Connection connectionInstance( final ConnectionDictionary dictionary ) throws SQLException { final DatabaseAdaptor databaseAdaptor = dictionary.getDatabaseAdaptor(); final Connection connection = databaseAdaptor.getConnection( dictionary ); connection.setAutoCommit( false ); return connection; } /** * Fetch the machine snapshot corresponding to the specified snasphot ID * @param connection database connection * @param snapshotID machine snaspshot ID * @return machine snapshot corresponding to the specified ID */ public MachineSnapshot fetchMachineSnapshot( final Connection connection, final long snapshotID ) throws SQLException { final MachineSnapshot machineSnapshot = MACHINE_SNAPSHOT_TABLE.fetchMachineSnapshot( connection, snapshotID ); final ChannelSnapshotTable channelSnapshotTable = getChannelSnapshotTable( connection, machineSnapshot ); MACHINE_SNAPSHOT_TABLE.loadChannelSnapshotsInto( connection, channelSnapshotTable, machineSnapshot ); return machineSnapshot; } /** * Fetch the machine snapshots within the specified time range. If the type is not null, then restrict the machine snapshots to those of the specified type. * The machine snapshots do not include the channel snapshots. A complete snapshot can be obtained using the fetchMachineSnapshot(id) method. * @param connection database connection * @param type The type of machine snapshots to fetch or null for no restriction * @param startTime The start time of the time range * @param endTime The end time of the time range * @return An array of machine snapshots meeting the specified criteria */ public MachineSnapshot[] fetchMachineSnapshotsInRange( final Connection connection, final String type, final java.util.Date startTime, final java.util.Date endTime ) throws SQLException { return MACHINE_SNAPSHOT_TABLE.fetchMachineSnapshotsInRange( connection, type, startTime, endTime ); } /** * Fetch the channel snapshots from the data source and populate the machine snapshot * @param connection database connection * @param machineSnapshot The machine snapshot for which to fetch the channel snapshots and load them * @return the machineSnapshot which is the same as the parameter returned for convenience */ public MachineSnapshot loadChannelSnapshotsInto( final Connection connection, final MachineSnapshot machineSnapshot ) throws SQLException { final ChannelSnapshotTable channelSnapshotTable = getChannelSnapshotTable( connection, machineSnapshot ); return MACHINE_SNAPSHOT_TABLE.loadChannelSnapshotsInto( connection, channelSnapshotTable, machineSnapshot ); } /** * Fetch channel groups as an array of types * @param connection database connection * @return array of types corresponding to all of the channel groups */ public String[] fetchTypes( final Connection connection ) throws SQLException { return SNAPSHOT_GROUP_TABLE.fetchTypes( connection ); } /** * Fetch the channel groups associated with the service ID as an array of types * @param connection database connection * @param serviceID service ID of groups to fetch * @return array of types corresponding to channel groups with the specified service ID */ public String[] fetchTypes( final Connection connection, final String serviceID ) throws SQLException { return SNAPSHOT_GROUP_TABLE.fetchTypes( connection, serviceID ); } /** * Get the channel group corresponding to the specified type. * @param connection database connection * @param type channel group type */ public ChannelGroup getChannelGroup( final Connection connection, final String type ) throws SQLException { synchronized( CHANNEL_GROUPS ) { if ( !CHANNEL_GROUPS.containsKey( type ) ) { fetchChannelGroup( connection, type ); } return CHANNEL_GROUPS.get( type ); } } /** * Fetch the channel group for the specified type from the database * @param connection database connection * @param type channel group type * @return list of all channel groups */ protected ChannelGroup fetchChannelGroup( final Connection connection, final String type ) throws SQLException { final ChannelGroup channelGroup = SNAPSHOT_GROUP_TABLE.fetchChannelGroup( connection, type ); CHANNEL_GROUPS.put( type, channelGroup ); return channelGroup; } /** * Insert the channel snapshots. * @param connection database connection * @param channelNames PVs to insert * @param groupID Channel Group ID */ public void insertChannels( final Connection connection, final List<String> channelNames, final String groupID ) throws SQLException { SNAPSHOT_GROUP_CHANNEL_TABLE.insertChannels( connection, channelNames, groupID ); } /** Publish the channel group edits */ public void publishGroupEdits( final Connection connection, final Set<ChannelGroupRecord> records ) throws SQLException { SNAPSHOT_GROUP_TABLE.publishGroupEdits( connection, records ); } /** get the channel snapshot table for the specified machine snapshot */ protected ChannelSnapshotTable getChannelSnapshotTable( final Connection connection, final MachineSnapshot machineSnapshot ) throws SQLException { final String groupID = machineSnapshot.getType(); final ChannelGroup group = getChannelGroup( connection, groupID ); final String serviceID = group.getServiceID(); return CHANNEL_SNAPSHOT_TABLES.get( serviceID ); } /** * Publish the machine snapshots to the database * @param connection database connection * @param machineSnapshots machine snapshots to publish to the database * @return machine snapshots successfully published to the database */ public List<MachineSnapshot> publish( final Connection connection, final DatabaseAdaptor databaseAdaptor, final List<MachineSnapshot> machineSnapshots ) { if ( machineSnapshots.size() == 0 ) return null; final List<MachineSnapshot> successfulSnapshots = new ArrayList<MachineSnapshot>( machineSnapshots.size() ); for ( final MachineSnapshot machineSnapshot : machineSnapshots ) { if ( publish( connection, databaseAdaptor, machineSnapshot ) ) { successfulSnapshots.add( machineSnapshot ); } } return successfulSnapshots; } /** * Publish the specified machine snapshot * @param connection database connection * @param machineSnapshot machine snapshot to publish */ protected boolean publish( final Connection connection, final DatabaseAdaptor databaseAdaptor, final MachineSnapshot machineSnapshot ) { try { final ChannelSnapshotTable channelSnapshotTable = getChannelSnapshotTable( connection, machineSnapshot ); MACHINE_SNAPSHOT_TABLE.insert( connection, databaseAdaptor, channelSnapshotTable, machineSnapshot ); return true; } catch( SQLException exception ) { exception.printStackTrace(); return false; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal; import java.io.BufferedOutputStream; import java.io.ByteArrayOutputStream; import java.io.CharArrayWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintStream; import java.io.PrintWriter; import java.lang.management.LockInfo; import java.lang.management.ManagementFactory; import java.lang.management.MonitorInfo; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.Vector; import java.util.zip.GZIPOutputStream; import org.apache.logging.log4j.Logger; import org.apache.geode.SystemFailure; import org.apache.geode.distributed.internal.DistributionConfig; import org.apache.geode.internal.i18n.LocalizedStrings; import org.apache.geode.internal.io.TeePrintStream; import org.apache.geode.internal.logging.LogService; import org.apache.geode.internal.logging.LoggingThreadGroup; /** * Used to interact with operating system processes. Use <code>exec</code> to create a new process * by executing a command. Use <code>kill</code> to kill a process. * */ // TODO: In the next major release, we should remove the variables and logic related to the system // properties used to determine whether output redirection is allowed or not // (DISABLE_OUTPUT_REDIRECTION_PROPERTY, ENABLE_OUTPUT_REDIRECTION_PROPERTY, // DISABLE_REDIRECTION_CONFIGURATION_PROPERTY, ENABLE_OUTPUT_REDIRECTION, DISABLE_OUTPUT_REDIRECTION // and DISABLE_REDIRECTION_CONFIGURATION). GFSH should always use the new redirect-output flag. public class OSProcess { private static final Logger logger = LogService.getLogger(); /** * @deprecated use GFSH redirect-output flag instead. */ @Deprecated public static final String DISABLE_OUTPUT_REDIRECTION_PROPERTY = DistributionConfig.GEMFIRE_PREFIX + "OSProcess.DISABLE_OUTPUT_REDIRECTION"; /** * @deprecated use GFSH redirect-output flag instead. */ @Deprecated public static final String ENABLE_OUTPUT_REDIRECTION_PROPERTY = DistributionConfig.GEMFIRE_PREFIX + "OSProcess.ENABLE_OUTPUT_REDIRECTION"; /** * @deprecated use GFSH redirect-output flag instead. */ @Deprecated public static final String DISABLE_REDIRECTION_CONFIGURATION_PROPERTY = DistributionConfig.GEMFIRE_PREFIX + "OSProcess.DISABLE_REDIRECTION_CONFIGURATION"; /** * @deprecated use GFSH redirect-output flag instead. */ @Deprecated private static final boolean ENABLE_OUTPUT_REDIRECTION = Boolean.getBoolean(ENABLE_OUTPUT_REDIRECTION_PROPERTY); /** * @deprecated use GFSH redirect-output flag instead. */ @Deprecated private static final boolean DISABLE_OUTPUT_REDIRECTION = Boolean.getBoolean(DISABLE_OUTPUT_REDIRECTION_PROPERTY); /** * @deprecated use GFSH redirect-output flag instead. */ @Deprecated private static final boolean DISABLE_REDIRECTION_CONFIGURATION = Boolean.getBoolean(DISABLE_REDIRECTION_CONFIGURATION_PROPERTY); static final boolean pureMode = PureJavaMode.isPure(); static { if (!pureMode) { registerSigQuitHandler(); } } /** * Starts a background command writing its stdout and stderr to the specified log file. * * @param cmdarray An array of strings that specify the command to run. The first element must be * the executable. Each additional command line argument should have its own entry in the * array. * @param workdir the current directory of the created process * @param logfile the file the created process will write stdout and stderr to. * @param inheritLogfile can be set to false if the child process is willing to create its own log * file. Setting to false can help on Windows because it keeps the child process from * inheriting handles from the parent. * @return the process id of the created process; -1 on failure * @exception IOException if a child process could not be created. */ private static native int bgexecInternal(String[] cmdarray, String workdir, String logfile, boolean inheritLogfile) throws IOException; /** * Starts execution of the specified command and arguments in a separate detached process in the * specified working directory writing output to the specified log file. * <p> * If there is a security manager, its <code>checkExec</code> method is called with the first * component of the array <code>cmdarray</code> as its argument. This may result in a security * exception. * <p> * Given an array of strings <code>cmdarray</code>, representing the tokens of a command line, * this method creates a new process in which to execute the specified command. * * @param cmdarray array containing the command to call and its arguments. * @param workdir the current directory of the created process; null causes working directory to * default to the current directory. * @param logfile the file the created process will write stdout and stderr to; null causes a * default log file name to be used. * @param inheritLogfile can be set to false if the child process is willing to create its own log * file. Setting to false can help on Windows because it keeps the child process from * inheriting handles from the parent. * @param env any extra environment variables as key,value map; these will be in addition to those * inherited from the parent process and will overwrite same keys * @return the process id of the created process; -1 on failure * @exception SecurityException if the current thread cannot create a subprocess. * @see java.lang.SecurityException * @see java.lang.SecurityManager#checkExec(java.lang.String) */ public static int bgexec(String cmdarray[], File workdir, File logfile, boolean inheritLogfile, Map<String, String> env) throws IOException { String commandShell = System.getProperty(DistributionConfig.GEMFIRE_PREFIX + "commandShell", "bash"); if (cmdarray.length == 0) { throw new java.lang.IndexOutOfBoundsException(); } boolean isWindows = false; String os = System.getProperty("os.name"); if (os != null) { if (os.indexOf("Windows") != -1) { isWindows = true; } } for (int i = 0; i < cmdarray.length; i++) { if (cmdarray[i] == null) { throw new NullPointerException(); } if (isWindows) { if (i == 0) { // do the following before quotes get added. File cmd = new File(cmdarray[0]); if (!cmd.exists()) { cmd = new File(cmdarray[0] + ".exe"); if (cmd.exists()) { cmdarray[0] = cmd.getPath(); } } } String s = cmdarray[i]; if (i != 0) { if (s.length() == 0) { cmdarray[i] = "\"\""; // fix for bug 22207 } else if ((s.indexOf(' ') >= 0 || s.indexOf('\t') >= 0)) { String unquotedS = s; if (s.indexOf('\"') != -1) { // Note that Windows provides no way to embed a double // quote in a double quoted string so need to remove // any internal quotes and let the outer quotes // preserve the whitespace. StringBuffer b = new StringBuffer(s); int quoteIdx = s.lastIndexOf('\"'); while (quoteIdx != -1) { b.deleteCharAt(quoteIdx); quoteIdx = s.lastIndexOf('\"', quoteIdx - 1); } unquotedS = b.toString(); } // It has whitespace and its not quoted cmdarray[i] = '"' + unquotedS + '"'; } } } } File cmd = new File(cmdarray[0]); if (!cmd.exists()) { throw new IOException(LocalizedStrings.OSProcess_THE_EXECUTABLE_0_DOES_NOT_EXIST .toLocalizedString(cmd.getPath())); } SecurityManager security = System.getSecurityManager(); if (security != null) { security.checkExec(cmdarray[0]); } if (workdir != null && !workdir.isDirectory()) { String curDir = new File("").getAbsolutePath(); System.out.println( LocalizedStrings.OSProcess_WARNING_0_IS_NOT_A_DIRECTORY_DEFAULTING_TO_CURRENT_DIRECTORY_1 .toLocalizedString(new Object[] {workdir, curDir})); workdir = null; } if (workdir == null) { workdir = new File("").getAbsoluteFile(); } if (logfile == null) { logfile = File.createTempFile("bgexec", ".log", workdir); } if (!logfile.isAbsolute()) { // put it in the working directory logfile = new File(workdir, logfile.getPath()); } // fix for bug 24575 if (logfile.exists()) { // it already exists so make sure its a file and can be written if (!logfile.isFile()) { throw new IOException(LocalizedStrings.OSProcess_THE_LOG_FILE_0_WAS_NOT_A_NORMAL_FILE .toLocalizedString(logfile.getPath())); } if (!logfile.canWrite()) { throw new IOException(LocalizedStrings.OSProcess_NEED_WRITE_ACCESS_FOR_THE_LOG_FILE_0 .toLocalizedString(logfile.getPath())); } } else { try { logfile.createNewFile(); } catch (IOException io) { throw new IOException(LocalizedStrings.OSProcess_COULD_NOT_CREATE_LOG_FILE_0_BECAUSE_1 .toLocalizedString(new Object[] {logfile.getPath(), io.getMessage()})); } } String trace = System.getProperty("org.apache.geode.internal.OSProcess.trace"); if (trace != null && trace.length() > 0) { for (int i = 0; i < cmdarray.length; i++) { System.out.println("cmdarray[" + i + "] = " + cmdarray[i]); } System.out.println("workdir=" + workdir.getPath()); System.out.println("logfile=" + logfile.getPath()); } int result = 0; if (pureMode || (env != null && env.size() > 0)) { StringBuffer sb = new StringBuffer(); Vector cmdVec = new Vector(); // Add shell code to spawn a process silently if (isWindows) { cmdVec.add("cmd.exe"); cmdVec.add("/c"); sb.append("start /b \"\" "); } else { // to address issue with users that don't have bash shell installed if (commandShell.equals("bash")) { cmdVec.add("bash"); cmdVec.add("--norc"); cmdVec.add("-c"); } else { cmdVec.add(commandShell); } } // Add the actual command for (int i = 0; i < cmdarray.length; i++) { if (i != 0) sb.append(" "); if (cmdarray[i].length() != 0 && cmdarray[i].charAt(0) == '\"') { // The token has already been quoted, see bug 40835 sb.append(cmdarray[i]); } else { sb.append("\""); sb.append(cmdarray[i]); sb.append("\""); } } // Add the IO redirction code, this prevents hangs and IO blocking sb.append(" >> "); sb.append(logfile.getPath()); sb.append(" 2>&1"); if (isWindows) { sb.append(" <NUL"); } else { sb.append(" </dev/null &"); } cmdVec.add(sb.toString()); String[] cmdStrings = (String[]) cmdVec.toArray(new String[0]); if (trace != null && trace.length() > 0) { for (int i = 0; i < cmdStrings.length; i++) { System.out.println("cmdStrings[" + i + "] = " + cmdStrings[i]); } System.out.println("workdir=" + workdir.getPath()); System.out.println("logfile=" + logfile.getPath()); } final ProcessBuilder procBuilder = new ProcessBuilder(cmdStrings); if (env != null && env.size() > 0) { // adjust the environment variables inheriting from parent procBuilder.environment().putAll(env); } procBuilder.directory(workdir); final Process process = procBuilder.start(); try { process.getInputStream().close(); } catch (IOException ignore) { } try { process.getOutputStream().close(); } catch (IOException ignore) { } try { process.getErrorStream().close(); } catch (IOException ignore) { } try { // short count = 1000; boolean processIsStillRunning = true; while (processIsStillRunning) { Thread.sleep(10); try { process.exitValue(); processIsStillRunning = false; } catch (IllegalThreadStateException itse) { // Ignore this, we are polling the exitStatus // instead of using the blocking Process#waitFor() } } } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } } else { result = bgexecInternal(cmdarray, workdir.getPath(), logfile.getPath(), inheritLogfile); if (result != -1) { if (pids != null) { pids.add(Integer.valueOf(result)); if (trace != null && trace.length() > 0) { System.out.println("bgexec child pid is: " + result); } } } } return result; // Always 0 for pureJava } /** * Checks to make sure that we are operating on a valid process id. Sending signals to processes * with <code>pid</code> 0 or -1 can have unintended consequences. * * @throws IllegalArgumentException If <code>pid</code> is not positive * * @since GemFire 4.0 */ private static void checkPid(int pid) { if (pid <= 0) { throw new IllegalArgumentException( LocalizedStrings.OSProcess_SHOULD_NOT_SEND_A_SIGNAL_TO_PID_0 .toLocalizedString(Integer.valueOf(pid))); } } /** * Ask a process to shut itself down. The process may catch and ignore this shutdown request. * * @param pid the id of the process to shutdown * @return true if the request was sent to the process; false if the process does not exist or can * not be asked to shutdown. */ public static boolean shutdown(int pid) { if (pureMode) { throw new RuntimeException( LocalizedStrings.OSProcess_SHUTDOWN_NOT_ALLOWED_IN_PURE_JAVA_MODE.toLocalizedString()); } else { checkPid(pid); return _shutdown(pid); } } private static native boolean _shutdown(int pid); /** * Terminate a process without warning and without a chance of an orderly shutdown. This method * should only be used as a last resort. The {@link #shutdown(int)} method should be used in most * cases. * * @param pid the id of the process to kill * @return true if the process was killed; false if it does not exist or can not be killed. */ public static boolean kill(int pid) { if (pureMode) { throw new RuntimeException( LocalizedStrings.OSProcess_KILL_NOT_ALLOWED_IN_PURE_JAVA_MODE.toLocalizedString()); } else { checkPid(pid); return _kill(pid); } } private static native boolean _kill(int pid); /** * Tells a process to print its stacks to its standard output * * @param pid the id of the process that will print its stacks, or zero for the current process * @return true if the process was told; false if it does not exist or can not be told. */ public static boolean printStacks(int pid) { return printStacks(pid, false); } /** * Tells a process to print its stacks to its standard output or the given log writer * * @param pid the id of the process that will print its stacks, or zero for the current process * @param useNative if true we attempt to use native code, which goes to stdout * @return true if the process was told; false if it does not exist or can not be told. */ public static boolean printStacks(int pid, boolean useNative) { if (pureMode || !useNative) { if (pid > 0 && pid != myPid[0]) { return false; } CharArrayWriter cw = new CharArrayWriter(50000); PrintWriter sb = new PrintWriter(cw, true); sb.append("\n******** full thread dump ********\n"); ThreadMXBean bean = ManagementFactory.getThreadMXBean(); long[] threadIds = bean.getAllThreadIds(); ThreadInfo[] infos = bean.getThreadInfo(threadIds, true, true); long thisThread = Thread.currentThread().getId(); for (int i = 0; i < infos.length; i++) { if (i != thisThread && infos[i] != null) { formatThreadInfo(infos[i], sb); } } sb.flush(); logger.warn(cw.toString()); return true; } else { if (pid < 0) checkPid(pid); return _printStacks(pid); } } /** dumps this vm's stacks and returns gzipped result */ public static byte[] zipStacks() throws IOException { ThreadMXBean bean = ManagementFactory.getThreadMXBean(); long[] threadIds = bean.getAllThreadIds(); ThreadInfo[] infos = bean.getThreadInfo(threadIds, true, true); long thisThread = Thread.currentThread().getId(); ByteArrayOutputStream baos = new ByteArrayOutputStream(10000); GZIPOutputStream zipOut = new GZIPOutputStream(baos, 10000); PrintWriter pw = new PrintWriter(zipOut, true); for (int i = 0; i < infos.length; i++) { if (i != thisThread && infos[i] != null) { formatThreadInfo(infos[i], pw); } } pw.flush(); zipOut.close(); byte[] result = baos.toByteArray(); return result; } private static native boolean _printStacks(int pid); static final int MAX_STACK_FRAMES = 75; private static void formatThreadInfo(ThreadInfo t, PrintWriter pw) { // this is largely copied from the JDK's ThreadInfo.java, but it limits the // stacks to 8 elements pw.append("\"" + t.getThreadName() + "\"" + " tid=0x" + Long.toHexString(t.getThreadId())); // this is in the stack trace elements so we don't need to add it // if (t.getLockName() != null) { // pw.append(" "); // pw.append(StringUtils.toLowerCase(t.getThreadState().toString())); // pw.append(" on " + t.getLockName()); // } // priority is not known // daemon status is not known if (t.isSuspended()) { pw.append(" (suspended)"); } if (t.isInNative()) { pw.append(" (in native)"); } if (t.getLockOwnerName() != null) { pw.append(" owned by \"" + t.getLockOwnerName() + "\" tid=0x" + Long.toHexString(t.getLockOwnerId())); } pw.append('\n'); pw.append(" java.lang.Thread.State: " + t.getThreadState() + "\n"); int i = 0; StackTraceElement[] stackTrace = t.getStackTrace(); for (; i < stackTrace.length && i < MAX_STACK_FRAMES; i++) { StackTraceElement ste = stackTrace[i]; pw.append("\tat " + ste.toString()); pw.append('\n'); if (i == 0 && t.getLockInfo() != null) { Thread.State ts = t.getThreadState(); switch (ts) { case BLOCKED: pw.append("\t- blocked on " + t.getLockInfo()); pw.append('\n'); break; case WAITING: pw.append("\t- waiting on " + t.getLockInfo()); pw.append('\n'); break; case TIMED_WAITING: pw.append("\t- waiting on " + t.getLockInfo()); pw.append('\n'); break; default: } } for (MonitorInfo mi : t.getLockedMonitors()) { if (mi.getLockedStackDepth() == i) { pw.append("\t- locked " + mi); pw.append('\n'); } } } if (i < stackTrace.length) { pw.append("\t..."); pw.append('\n'); } LockInfo[] locks = t.getLockedSynchronizers(); if (locks.length > 0) { pw.append("\n\tNumber of locked synchronizers = " + locks.length); pw.append('\n'); for (LockInfo li : locks) { pw.append("\t- " + li); pw.append('\n'); } } pw.append('\n'); } /** * Find out if a process exists. * * @param pid the id of the process to check for * @return true if the process exists; false if it does not. */ public static boolean exists(int pid) { if (pureMode) { throw new RuntimeException( LocalizedStrings.OSProcess_EXISTS_NOT_ALLOWED_IN_PURE_JAVA_MODE.toLocalizedString()); } checkPid(pid); if (reapPid(pid)) { try { pids.remove(Integer.valueOf(pid)); } catch (Exception ignore) { } String trace = System.getProperty("org.apache.geode.internal.OSProcess.trace"); if (trace != null && trace.length() > 0) { System.out.println("reaped pid: " + pid); } } return nativeExists(pid); } private static native boolean nativeExists(int pid); // Private stuff /** * Waits for a child process to die and reaps it. */ private static native void waitForPid(int pid); /** * Waits until the identified process exits. If the process does not exist then returns * immediately. */ public static void waitForPidToExit(int pid) { if (pureMode) { throw new RuntimeException( LocalizedStrings.OSProcess_WAITFORPIDTOEXIT_NOT_ALLOWED_IN_PURE_JAVA_MODE .toLocalizedString()); } checkPid(pid); waitForPid(pid); } /** * Sets the current directory of this process. * * @return true if current directory was set; false if not. */ public static boolean setCurrentDirectory(File curDir) { if (pureMode) { throw new RuntimeException( LocalizedStrings.OSProcess_SETCURRENTDIRECTORY_NOT_ALLOWED_IN_PURE_JAVA_MODE .toLocalizedString()); } return jniSetCurDir(curDir.getAbsolutePath()); } /** * Returns true on success. Returns false and current directory is unchanged on failure. */ private static native boolean jniSetCurDir(String dir); /** * Reaps a child process if it has died. Does not wait for the child. * * @param pid the id of the process to reap * @return true if it was reaped or lost (someone else reaped it); false if the child still * exists. HACK: If pid is -1 then returns true if this platform needs reaping. */ protected static native boolean reapPid(int pid); private static Thread reaperThread; protected static Set pids = null; // myPid caches result of getProcessId . To provide a stable processId // on Linux, where processId may differ per thread, we cache the // processId of the reaper thread . static final int[] myPid = new int[1]; // cache of my processId static boolean reaperStarted = false; // true if cache is valid /** * On Linux, getProcessId returns the processId of the calling thread */ static native int getProcessId(); static { if (pureMode) { // just initialize the pid cache synchronized (myPid) { int pid = 0; // Windows checks have been disabled as the ManagementFactory hack // to find the PID has been seen to work on Windows 7. Add checks // for more specific versions of Windows if this fails on them // if(! System.getProperty("os.name", "").startsWith("Windows")) { String name = java.lang.management.ManagementFactory.getRuntimeMXBean().getName(); int idx = name.indexOf('@'); try { pid = Integer.parseInt(name.substring(0, idx)); } catch (NumberFormatException nfe) { // something changed in the RuntimeMXBean name } // } myPid[0] = pid; reaperStarted = true; } } else { if (reapPid(-1)) { pids = Collections.synchronizedSet(new HashSet()); ThreadGroup group = LoggingThreadGroup .createThreadGroup(LocalizedStrings.OSProcess_REAPER_THREAD.toLocalizedString()); reaperThread = new Thread(group, new Runnable() { public void run() { synchronized (myPid) { myPid[0] = getProcessId(); reaperStarted = true; } String trace = System.getProperty("org.apache.geode.internal.OSProcess.trace"); int secondsToSleep = (1000 * 60) * 1; // one minute if (trace != null && trace.length() > 0) { secondsToSleep = 1000; // every second } // reap all the pids we have every once in a while while (true) { SystemFailure.checkFailure(); try { Iterator it = pids.iterator(); while (it.hasNext()) { Object o = it.next(); int pid = ((Integer) o).intValue(); if (reapPid(pid)) { try { it.remove(); if (trace != null && trace.length() > 0) { System.out.println("reaped pid: " + pid); } } catch (Exception e) { // make sure and remove it since it was // reaped. pids.remove(o); if (trace != null && trace.length() > 0) { System.out.println("reaped pid: " + pid); } throw e; } } } Thread.sleep(secondsToSleep); } catch (InterruptedException e) { Thread.currentThread().interrupt(); break; } catch (Exception e) { // e.printStackTrace(); // DEBUG // ignore } } } }, "osprocess reaper"); reaperThread.setDaemon(true); reaperThread.start(); } else { // platform does not need a reaper thread, // just initialize the pid cache synchronized (myPid) { myPid[0] = getProcessId(); reaperStarted = true; } } } } /** * Get the vm's process id. On Linux, this returns the processId of the reaper thread. If we are * in {@linkplain PureJavaMode#isPure pure Java mode}, then <code>0</code> is returned. * * @return the vm's process id. */ public static int getId() { boolean done = false; int result = -1; for (;;) { synchronized (myPid) { done = reaperStarted; result = myPid[0]; } if (done) break; // wait for reaper thread to initialize myPid try { Thread.sleep(100); } catch (InterruptedException ignore) { Thread.currentThread().interrupt(); } } return result; } public static PrintStream redirectOutput(File newOutput) throws IOException { return redirectOutput(newOutput, true); } public static PrintStream redirectOutput(File newOutput, boolean setOut) throws IOException { FileOutputStream newFileStream = null; try { newFileStream = new FileOutputStream(newOutput, true); } catch (FileNotFoundException e) { throw new IOException("File not found: " + newOutput, e); } final PrintStream newPrintStream = new PrintStream(new BufferedOutputStream(newFileStream, 128), true); if (((DISABLE_REDIRECTION_CONFIGURATION) || (ENABLE_OUTPUT_REDIRECTION && !DISABLE_OUTPUT_REDIRECTION)) && setOut) { System.setOut(newPrintStream); if (System.err instanceof TeePrintStream) { ((TeePrintStream) System.err).getTeeOutputStream() .setBranchOutputStream(new BufferedOutputStream(newFileStream, 128)); } else { System.setErr(newPrintStream); } if (!pureMode) { redirectCOutput(newOutput.getPath()); } } assert newPrintStream != null; return newPrintStream; } private static native void redirectCOutput(String file); /** * Registers a signal handler for SIGQUIT on UNIX platforms. */ private static native void registerSigQuitHandler(); }
package org.spongycastle.crypto.engines; import org.spongycastle.crypto.BlockCipher; import org.spongycastle.crypto.CipherParameters; import org.spongycastle.crypto.DataLengthException; import org.spongycastle.crypto.OutputLengthException; import org.spongycastle.crypto.params.KeyParameter; /** * A class that provides a basic International Data Encryption Algorithm (IDEA) engine. * <p> * This implementation is based on the "HOWTO: INTERNATIONAL DATA ENCRYPTION ALGORITHM" * implementation summary by Fauzan Mirza (F.U.Mirza@sheffield.ac.uk). (barring 1 typo at the * end of the mulinv function!). * <p> * It can be found at ftp://ftp.funet.fi/pub/crypt/cryptography/symmetric/idea/ * <p> * Note: This algorithm was patented in the USA, Japan and Europe. These patents expired in 2011/2012. */ public class IDEAEngine implements BlockCipher { protected static final int BLOCK_SIZE = 8; private int[] workingKey = null; /** * standard constructor. */ public IDEAEngine() { } /** * initialise an IDEA cipher. * * @param forEncryption whether or not we are for encryption. * @param params the parameters required to set up the cipher. * @exception IllegalArgumentException if the params argument is * inappropriate. */ public void init( boolean forEncryption, CipherParameters params) { if (params instanceof KeyParameter) { workingKey = generateWorkingKey(forEncryption, ((KeyParameter)params).getKey()); return; } throw new IllegalArgumentException("invalid parameter passed to IDEA init - " + params.getClass().getName()); } public String getAlgorithmName() { return "IDEA"; } public int getBlockSize() { return BLOCK_SIZE; } public int processBlock( byte[] in, int inOff, byte[] out, int outOff) { if (workingKey == null) { throw new IllegalStateException("IDEA engine not initialised"); } if ((inOff + BLOCK_SIZE) > in.length) { throw new DataLengthException("input buffer too short"); } if ((outOff + BLOCK_SIZE) > out.length) { throw new OutputLengthException("output buffer too short"); } ideaFunc(workingKey, in, inOff, out, outOff); return BLOCK_SIZE; } public void reset() { } private static final int MASK = 0xffff; private static final int BASE = 0x10001; private int bytesToWord( byte[] in, int inOff) { return ((in[inOff] << 8) & 0xff00) + (in[inOff + 1] & 0xff); } private void wordToBytes( int word, byte[] out, int outOff) { out[outOff] = (byte)(word >>> 8); out[outOff + 1] = (byte)word; } /** * return x = x * y where the multiplication is done modulo * 65537 (0x10001) (as defined in the IDEA specification) and * a zero input is taken to be 65536 (0x10000). * * @param x the x value * @param y the y value * @return x = x * y */ private int mul( int x, int y) { if (x == 0) { x = (BASE - y); } else if (y == 0) { x = (BASE - x); } else { int p = x * y; y = p & MASK; x = p >>> 16; x = y - x + ((y < x) ? 1 : 0); } return x & MASK; } private void ideaFunc( int[] workingKey, byte[] in, int inOff, byte[] out, int outOff) { int x0, x1, x2, x3, t0, t1; int keyOff = 0; x0 = bytesToWord(in, inOff); x1 = bytesToWord(in, inOff + 2); x2 = bytesToWord(in, inOff + 4); x3 = bytesToWord(in, inOff + 6); for (int round = 0; round < 8; round++) { x0 = mul(x0, workingKey[keyOff++]); x1 += workingKey[keyOff++]; x1 &= MASK; x2 += workingKey[keyOff++]; x2 &= MASK; x3 = mul(x3, workingKey[keyOff++]); t0 = x1; t1 = x2; x2 ^= x0; x1 ^= x3; x2 = mul(x2, workingKey[keyOff++]); x1 += x2; x1 &= MASK; x1 = mul(x1, workingKey[keyOff++]); x2 += x1; x2 &= MASK; x0 ^= x1; x3 ^= x2; x1 ^= t1; x2 ^= t0; } wordToBytes(mul(x0, workingKey[keyOff++]), out, outOff); wordToBytes(x2 + workingKey[keyOff++], out, outOff + 2); /* NB: Order */ wordToBytes(x1 + workingKey[keyOff++], out, outOff + 4); wordToBytes(mul(x3, workingKey[keyOff]), out, outOff + 6); } /** * The following function is used to expand the user key to the encryption * subkey. The first 16 bytes are the user key, and the rest of the subkey * is calculated by rotating the previous 16 bytes by 25 bits to the left, * and so on until the subkey is completed. */ private int[] expandKey( byte[] uKey) { int[] key = new int[52]; if (uKey.length < 16) { byte[] tmp = new byte[16]; System.arraycopy(uKey, 0, tmp, tmp.length - uKey.length, uKey.length); uKey = tmp; } for (int i = 0; i < 8; i++) { key[i] = bytesToWord(uKey, i * 2); } for (int i = 8; i < 52; i++) { if ((i & 7) < 6) { key[i] = ((key[i - 7] & 127) << 9 | key[i - 6] >> 7) & MASK; } else if ((i & 7) == 6) { key[i] = ((key[i - 7] & 127) << 9 | key[i - 14] >> 7) & MASK; } else { key[i] = ((key[i - 15] & 127) << 9 | key[i - 14] >> 7) & MASK; } } return key; } /** * This function computes multiplicative inverse using Euclid's Greatest * Common Divisor algorithm. Zero and one are self inverse. * <p> * i.e. x * mulInv(x) == 1 (modulo BASE) */ private int mulInv( int x) { int t0, t1, q, y; if (x < 2) { return x; } t0 = 1; t1 = BASE / x; y = BASE % x; while (y != 1) { q = x / y; x = x % y; t0 = (t0 + (t1 * q)) & MASK; if (x == 1) { return t0; } q = y / x; y = y % x; t1 = (t1 + (t0 * q)) & MASK; } return (1 - t1) & MASK; } /** * Return the additive inverse of x. * <p> * i.e. x + addInv(x) == 0 */ int addInv( int x) { return (0 - x) & MASK; } /** * The function to invert the encryption subkey to the decryption subkey. * It also involves the multiplicative inverse and the additive inverse functions. */ private int[] invertKey( int[] inKey) { int t1, t2, t3, t4; int p = 52; /* We work backwards */ int[] key = new int[52]; int inOff = 0; t1 = mulInv(inKey[inOff++]); t2 = addInv(inKey[inOff++]); t3 = addInv(inKey[inOff++]); t4 = mulInv(inKey[inOff++]); key[--p] = t4; key[--p] = t3; key[--p] = t2; key[--p] = t1; for (int round = 1; round < 8; round++) { t1 = inKey[inOff++]; t2 = inKey[inOff++]; key[--p] = t2; key[--p] = t1; t1 = mulInv(inKey[inOff++]); t2 = addInv(inKey[inOff++]); t3 = addInv(inKey[inOff++]); t4 = mulInv(inKey[inOff++]); key[--p] = t4; key[--p] = t2; /* NB: Order */ key[--p] = t3; key[--p] = t1; } t1 = inKey[inOff++]; t2 = inKey[inOff++]; key[--p] = t2; key[--p] = t1; t1 = mulInv(inKey[inOff++]); t2 = addInv(inKey[inOff++]); t3 = addInv(inKey[inOff++]); t4 = mulInv(inKey[inOff]); key[--p] = t4; key[--p] = t3; key[--p] = t2; key[--p] = t1; return key; } private int[] generateWorkingKey( boolean forEncryption, byte[] userKey) { if (forEncryption) { return expandKey(userKey); } else { return invertKey(expandKey(userKey)); } } }
/* * Licensed to GraphHopper and Peter Karich under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.routing.ch; import com.graphhopper.routing.*; import com.graphhopper.routing.ch.PrepareContractionHierarchies.Shortcut; import com.graphhopper.routing.util.*; import com.graphhopper.storage.*; import com.graphhopper.util.*; import gnu.trove.list.TIntList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import static org.junit.Assert.*; import org.junit.Before; import org.junit.Test; /** * @author Peter Karich */ public class PrepareContractionHierarchiesTest { private final CarFlagEncoder carEncoder = new CarFlagEncoder(); private final EncodingManager encodingManager = new EncodingManager(carEncoder); private final Weighting weighting = new ShortestWeighting(carEncoder); private final TraversalMode tMode = TraversalMode.NODE_BASED; private Directory dir; GraphHopperStorage createGHStorage() { return new GraphBuilder(encodingManager).setCHGraph(weighting).create(); } GraphHopperStorage createExampleGraph() { GraphHopperStorage g = createGHStorage(); //5-1-----2 // \ __/| // 0 | // / | // 4-----3 // g.edge(0, 1, 1, true); g.edge(0, 2, 1, true); g.edge(0, 4, 3, true); g.edge(1, 2, 2, true); g.edge(2, 3, 1, true); g.edge(4, 3, 2, true); g.edge(5, 1, 2, true); return g; } @Before public void setUp() { dir = new GHDirectory("", DAType.RAM_INT); } @Test public void testShortestPathSkipNode() { GraphHopperStorage g = createExampleGraph(); double normalDist = new Dijkstra(g, carEncoder, weighting, tMode).calcPath(4, 2).getDistance(); DijkstraOneToMany algo = new DijkstraOneToMany(g, carEncoder, weighting, tMode); CHGraph lg = g.getGraph(CHGraph.class); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, g, lg, carEncoder, weighting, tMode); prepare.initFromGraph().prepareNodes(); algo.setEdgeFilter(new PrepareContractionHierarchies.IgnoreNodeFilter(lg, g.getNodes() + 1).setAvoidNode(3)); algo.setWeightLimit(100); int nodeEntry = algo.findEndNode(4, 2); assertTrue(algo.getWeight(nodeEntry) > normalDist); algo.clear(); nodeEntry = algo.setLimitVisitedNodes(1).findEndNode(4, 2); assertEquals(-1, nodeEntry); } @Test public void testShortestPathSkipNode2() { GraphHopperStorage g = createExampleGraph(); CHGraph lg = g.getGraph(CHGraph.class); double normalDist = new Dijkstra(g, carEncoder, weighting, tMode).calcPath(4, 2).getDistance(); assertEquals(3, normalDist, 1e-5); DijkstraOneToMany algo = new DijkstraOneToMany(g, carEncoder, weighting, tMode); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, g, lg, carEncoder, weighting, tMode); prepare.initFromGraph().prepareNodes(); algo.setEdgeFilter(new PrepareContractionHierarchies.IgnoreNodeFilter(lg, g.getNodes() + 1).setAvoidNode(3)); algo.setWeightLimit(10); int nodeEntry = algo.findEndNode(4, 2); assertEquals(4, algo.getWeight(nodeEntry), 1e-5); nodeEntry = algo.findEndNode(4, 1); assertEquals(4, algo.getWeight(nodeEntry), 1e-5); } @Test public void testShortestPathLimit() { GraphHopperStorage g = createExampleGraph(); CHGraph lg = g.getGraph(CHGraph.class); DijkstraOneToMany algo = new DijkstraOneToMany(g, carEncoder, weighting, tMode); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, g, lg, carEncoder, weighting, tMode); prepare.initFromGraph().prepareNodes(); algo.setEdgeFilter(new PrepareContractionHierarchies.IgnoreNodeFilter(lg, g.getNodes() + 1).setAvoidNode(0)); algo.setWeightLimit(2); int endNode = algo.findEndNode(4, 1); // did not reach endNode assertNotEquals(1, endNode); } @Test public void testAddShortcuts() { GraphHopperStorage g = createExampleGraph(); CHGraph lg = g.getGraph(CHGraph.class); int old = lg.getAllEdges().getMaxId(); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, g, lg, carEncoder, weighting, tMode); prepare.doWork(); assertEquals(old + 1, lg.getAllEdges().getMaxId()); } @Test public void testMoreComplexGraph() { GraphHopperStorage g = createGHStorage(); CHGraph lg = g.getGraph(CHGraph.class); initShortcutsGraph(lg); int oldCount = g.getAllEdges().getMaxId(); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, g, lg, carEncoder, weighting, tMode); prepare.doWork(); assertEquals(oldCount, g.getAllEdges().getMaxId()); assertEquals(oldCount + 7, lg.getAllEdges().getMaxId()); } @Test public void testDirectedGraph() { GraphHopperStorage g = createGHStorage(); CHGraph lg = g.getGraph(CHGraph.class); g.edge(5, 4, 3, false); g.edge(4, 5, 10, false); g.edge(2, 4, 1, false); g.edge(5, 2, 1, false); g.edge(3, 5, 1, false); g.edge(4, 3, 1, false); g.freeze(); int oldCount = GHUtility.count(lg.getAllEdges()); assertEquals(6, oldCount); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, g, lg, carEncoder, weighting, tMode); prepare.doWork(); assertEquals(2, prepare.getShortcuts()); assertEquals(oldCount + 2, GHUtility.count(lg.getAllEdges())); RoutingAlgorithm algo = prepare.createAlgo(lg, new AlgorithmOptions(AlgorithmOptions.DIJKSTRA_BI, carEncoder, weighting, tMode)); Path p = algo.calcPath(4, 2); assertEquals(3, p.getDistance(), 1e-6); assertEquals(Helper.createTList(4, 3, 5, 2), p.calcNodes()); } @Test public void testDirectedGraph2() { GraphHopperStorage g = createGHStorage(); CHGraph lg = g.getGraph(CHGraph.class); initDirected2(g); int oldCount = GHUtility.count(g.getAllEdges()); assertEquals(19, oldCount); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, g, lg, carEncoder, weighting, tMode); prepare.doWork(); // PrepareTowerNodesShortcutsTest.printEdges(g); assertEquals(oldCount, g.getAllEdges().getMaxId()); assertEquals(oldCount, GHUtility.count(g.getAllEdges())); assertEquals(9, prepare.getShortcuts()); assertEquals(oldCount + 9, lg.getAllEdges().getMaxId()); assertEquals(oldCount + 9, GHUtility.count(lg.getAllEdges())); RoutingAlgorithm algo = prepare.createAlgo(lg, new AlgorithmOptions(AlgorithmOptions.DIJKSTRA_BI, carEncoder, weighting, tMode)); Path p = algo.calcPath(0, 10); assertEquals(10, p.getDistance(), 1e-6); assertEquals(Helper.createTList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), p.calcNodes()); } @Test public void testDirectedGraph3() { GraphHopperStorage g = createGHStorage(); CHGraphImpl lg = (CHGraphImpl) g.getGraph(CHGraph.class); //5 6 7 // \|/ //4-3_1<-\ 10 // \_|/ // 0___2_11 g.edge(0, 2, 2, true); g.edge(10, 2, 2, true); g.edge(11, 2, 2, true); // create a longer one directional edge => no longish one-dir shortcut should be created g.edge(2, 1, 2, true); g.edge(2, 1, 10, false); g.edge(1, 3, 2, true); g.edge(3, 4, 2, true); g.edge(3, 5, 2, true); g.edge(3, 6, 2, true); g.edge(3, 7, 2, true); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, g, lg, carEncoder, weighting, tMode); prepare.initFromGraph(); prepare.prepareNodes(); // find all shortcuts if we contract node 1 Collection<Shortcut> scs = prepare.testFindShortcuts(1); assertEquals(2, scs.size()); Iterator<Shortcut> iter = scs.iterator(); Shortcut sc1 = iter.next(); Shortcut sc2 = iter.next(); if (sc1.weight > sc2.weight) { Shortcut tmp = sc1; sc1 = sc2; sc2 = tmp; } // both dirs assertTrue(sc1.toString(), sc1.from == 3 && sc1.to == 2); assertTrue(sc1.toString(), carEncoder.isForward(sc1.flags) && carEncoder.isBackward(sc1.flags)); // directed assertTrue(sc2.toString(), sc2.from == 2 && sc2.to == 3); assertTrue(sc2.toString(), carEncoder.isForward(sc2.flags)); assertEquals(sc1.toString(), 4, sc1.weight, 1e-4); assertEquals(sc2.toString(), 12, sc2.weight, 1e-4); } void initRoundaboutGraph( Graph g ) { // roundabout: //16-0-9-10--11 12<-13 // \ \ / \ // 17 \| 7-8-.. // -15-1--2--3--4 / / // / \-5->6/ / // -14 \________/ g.edge(16, 0, 1, true); g.edge(0, 9, 1, true); g.edge(0, 17, 1, true); g.edge(9, 10, 1, true); g.edge(10, 11, 1, true); g.edge(11, 28, 1, true); g.edge(28, 29, 1, true); g.edge(29, 30, 1, true); g.edge(30, 31, 1, true); g.edge(31, 4, 1, true); g.edge(17, 1, 1, true); g.edge(15, 1, 1, true); g.edge(14, 1, 1, true); g.edge(14, 18, 1, true); g.edge(18, 19, 1, true); g.edge(19, 20, 1, true); g.edge(20, 15, 1, true); g.edge(19, 21, 1, true); g.edge(21, 16, 1, true); g.edge(1, 2, 1, true); g.edge(2, 3, 1, true); g.edge(3, 4, 1, true); g.edge(4, 5, 1, false); g.edge(5, 6, 1, false); g.edge(6, 7, 1, false); g.edge(7, 13, 1, false); g.edge(13, 12, 1, false); g.edge(12, 4, 1, false); g.edge(7, 8, 1, true); g.edge(8, 22, 1, true); g.edge(22, 23, 1, true); g.edge(23, 24, 1, true); g.edge(24, 25, 1, true); g.edge(25, 27, 1, true); g.edge(27, 5, 1, true); g.edge(25, 26, 1, false); g.edge(26, 25, 1, false); } @Test public void testRoundaboutUnpacking() { GraphHopperStorage g = createGHStorage(); CHGraph lg = g.getGraph(CHGraph.class); initRoundaboutGraph(g); int oldCount = g.getAllEdges().getMaxId(); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, g, lg, carEncoder, weighting, tMode); prepare.doWork(); assertEquals(oldCount, g.getAllEdges().getMaxId()); assertEquals(oldCount + 23, lg.getAllEdges().getMaxId()); RoutingAlgorithm algo = prepare.createAlgo(lg, new AlgorithmOptions(AlgorithmOptions.DIJKSTRA_BI, carEncoder, weighting, tMode)); Path p = algo.calcPath(4, 7); assertEquals(Helper.createTList(4, 5, 6, 7), p.calcNodes()); } @Test public void testFindShortcuts_Roundabout() { GraphHopperStorage ghStorage = createGHStorage(); CHGraph lg = ghStorage.getGraph(CHGraph.class); EdgeIteratorState iter1_3 = ghStorage.edge(1, 3, 1, true); EdgeIteratorState iter3_4 = ghStorage.edge(3, 4, 1, true); EdgeIteratorState iter4_5 = ghStorage.edge(4, 5, 1, false); EdgeIteratorState iter5_6 = ghStorage.edge(5, 6, 1, false); EdgeIteratorState iter6_8 = ghStorage.edge(6, 8, 2, false); EdgeIteratorState iter8_4 = ghStorage.edge(8, 4, 1, false); ghStorage.edge(6, 7, 1, true); ghStorage.freeze(); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, ghStorage, lg, carEncoder, weighting, tMode); CHEdgeIteratorState tmp = lg.shortcut(1, 4); tmp.setFlags(PrepareEncoder.getScDirMask()); tmp.setWeight(2); tmp.setSkippedEdges(iter1_3.getEdge(), iter3_4.getEdge()); long f = PrepareEncoder.getScFwdDir(); tmp = lg.shortcut(4, 6); tmp.setFlags(f); tmp.setWeight(2); tmp.setSkippedEdges(iter4_5.getEdge(), iter5_6.getEdge()); tmp = lg.shortcut(6, 4); tmp.setFlags(f); tmp.setWeight(3); tmp.setSkippedEdges(iter6_8.getEdge(), iter8_4.getEdge()); prepare.initFromGraph(); prepare.prepareNodes(); lg.setLevel(3, 3); lg.setLevel(5, 5); lg.setLevel(7, 7); lg.setLevel(8, 8); // there should be two different shortcuts for both directions! Collection<Shortcut> sc = prepare.testFindShortcuts(4); assertEquals(2, sc.size()); Iterator<Shortcut> iter = sc.iterator(); Shortcut sc1 = iter.next(); Shortcut sc2 = iter.next(); if (sc1.from > sc2.from) { Shortcut tmpSc = sc1; sc1 = sc2; sc2 = tmpSc; } assertEquals("1->6, weight:4.0 (7,8)", sc1.toString()); assertEquals("6->1, weight:5.0 (9,7)", sc2.toString()); } void initUnpackingGraph( GraphHopperStorage ghStorage, CHGraph g, Weighting w ) { final long flags = carEncoder.setProperties(30, true, false); double dist = 1; g.edge(10, 0).setDistance(dist).setFlags(flags); EdgeIteratorState edgeState01 = g.edge(0, 1); edgeState01.setDistance(dist).setFlags(flags); EdgeIteratorState edgeState12 = g.edge(1, 2).setDistance(dist).setFlags(flags); EdgeIteratorState edgeState23 = g.edge(2, 3).setDistance(dist).setFlags(flags); EdgeIteratorState edgeState34 = g.edge(3, 4).setDistance(dist).setFlags(flags); EdgeIteratorState edgeState45 = g.edge(4, 5).setDistance(dist).setFlags(flags); EdgeIteratorState edgeState56 = g.edge(5, 6).setDistance(dist).setFlags(flags); long oneDirFlags = PrepareEncoder.getScFwdDir(); int tmpEdgeId = edgeState01.getEdge(); ghStorage.freeze(); CHEdgeIteratorState sc0_2 = g.shortcut(0, 2); int x = EdgeIterator.NO_EDGE; sc0_2.setWeight(w.calcWeight(edgeState01, false, x) + w.calcWeight(edgeState12, false, x)).setDistance(2 * dist).setFlags(oneDirFlags); sc0_2.setSkippedEdges(tmpEdgeId, edgeState12.getEdge()); tmpEdgeId = sc0_2.getEdge(); CHEdgeIteratorState sc0_3 = g.shortcut(0, 3); sc0_3.setWeight(sc0_2.getWeight() + w.calcWeight(edgeState23, false, x)).setDistance(3 * dist).setFlags(oneDirFlags); sc0_3.setSkippedEdges(tmpEdgeId, edgeState23.getEdge()); tmpEdgeId = sc0_3.getEdge(); CHEdgeIteratorState sc0_4 = g.shortcut(0, 4); sc0_4.setWeight(sc0_3.getWeight() + w.calcWeight(edgeState34, false, x)).setDistance(4).setFlags(oneDirFlags); sc0_4.setSkippedEdges(tmpEdgeId, edgeState34.getEdge()); tmpEdgeId = sc0_4.getEdge(); CHEdgeIteratorState sc0_5 = g.shortcut(0, 5); sc0_5.setWeight(sc0_4.getWeight() + w.calcWeight(edgeState45, false, x)).setDistance(5).setFlags(oneDirFlags); sc0_5.setSkippedEdges(tmpEdgeId, edgeState45.getEdge()); tmpEdgeId = sc0_5.getEdge(); CHEdgeIteratorState sc0_6 = g.shortcut(0, 6); sc0_6.setWeight(sc0_5.getWeight() + w.calcWeight(edgeState56, false, x)).setDistance(6).setFlags(oneDirFlags); sc0_6.setSkippedEdges(tmpEdgeId, edgeState56.getEdge()); g.setLevel(0, 10); g.setLevel(6, 9); g.setLevel(5, 8); g.setLevel(4, 7); g.setLevel(3, 6); g.setLevel(2, 5); g.setLevel(1, 4); g.setLevel(10, 3); } @Test public void testUnpackingOrder() { GraphHopperStorage ghStorage = createGHStorage(); CHGraph lg = ghStorage.getGraph(CHGraph.class); initUnpackingGraph(ghStorage, lg, weighting); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, ghStorage, lg, carEncoder, weighting, tMode); RoutingAlgorithm algo = prepare.createAlgo(lg, new AlgorithmOptions(AlgorithmOptions.DIJKSTRA_BI, carEncoder, weighting, tMode)); Path p = algo.calcPath(10, 6); assertEquals(7, p.getDistance(), 1e-5); assertEquals(Helper.createTList(10, 0, 1, 2, 3, 4, 5, 6), p.calcNodes()); } @Test public void testUnpackingOrder_Fastest() { GraphHopperStorage ghStorage = createGHStorage(); CHGraph lg = ghStorage.getGraph(CHGraph.class); Weighting w = new FastestWeighting(carEncoder); initUnpackingGraph(ghStorage, lg, w); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, ghStorage, lg, carEncoder, weighting, tMode); RoutingAlgorithm algo = prepare.createAlgo(lg, new AlgorithmOptions(AlgorithmOptions.DIJKSTRA_BI, carEncoder, weighting, tMode)); Path p = algo.calcPath(10, 6); assertEquals(7, p.getDistance(), 1e-1); assertEquals(Helper.createTList(10, 0, 1, 2, 3, 4, 5, 6), p.calcNodes()); } @Test public void testCircleBug() { GraphHopperStorage g = createGHStorage(); CHGraph lg = g.getGraph(CHGraph.class); // /--1 // -0--/ // | g.edge(0, 1, 10, true); g.edge(0, 1, 4, true); g.edge(0, 2, 10, true); g.edge(0, 3, 10, true); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, g, lg, carEncoder, weighting, tMode); prepare.doWork(); assertEquals(0, prepare.getShortcuts()); } @Test public void testBug178() { // 5--------6__ // | | \ // 0-1->-2--3--4 // \-<-/ // GraphHopperStorage g = createGHStorage(); CHGraph lg = g.getGraph(CHGraph.class); g.edge(1, 2, 1, false); g.edge(2, 1, 1, false); g.edge(5, 0, 1, true); g.edge(5, 6, 1, true); g.edge(0, 1, 1, true); g.edge(2, 3, 1, true); g.edge(3, 4, 1, true); g.edge(6, 3, 1, true); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, g, lg, carEncoder, weighting, tMode); prepare.doWork(); assertEquals(2, prepare.getShortcuts()); } // 0-1-2-3-4 // | / | // | 8 | // \ / / // 7-6-5-/ void initBiGraph( Graph graph ) { graph.edge(0, 1, 100, true); graph.edge(1, 2, 1, true); graph.edge(2, 3, 1, true); graph.edge(3, 4, 1, true); graph.edge(4, 5, 25, true); graph.edge(5, 6, 25, true); graph.edge(6, 7, 5, true); graph.edge(7, 0, 5, true); graph.edge(3, 8, 20, true); graph.edge(8, 6, 20, true); } // 0-1-.....-9-10 // | ^ \ // | | | // 17-16-...-11<-/ public static void initDirected2( Graph g ) { g.edge(0, 1, 1, true); g.edge(1, 2, 1, true); g.edge(2, 3, 1, true); g.edge(3, 4, 1, true); g.edge(4, 5, 1, true); g.edge(5, 6, 1, true); g.edge(6, 7, 1, true); g.edge(7, 8, 1, true); g.edge(8, 9, 1, true); g.edge(9, 10, 1, true); g.edge(10, 11, 1, false); g.edge(11, 12, 1, true); g.edge(11, 9, 3, false); g.edge(12, 13, 1, true); g.edge(13, 14, 1, true); g.edge(14, 15, 1, true); g.edge(15, 16, 1, true); g.edge(16, 17, 1, true); g.edge(17, 0, 1, true); } // 8 // | // 6->0->1->3->7 // | | // | v //10<-2---4<---5 // 9 public static void initDirected1( Graph g ) { g.edge(0, 8, 1, true); g.edge(0, 1, 1, false); g.edge(1, 3, 1, false); g.edge(3, 7, 1, false); g.edge(3, 5, 1, false); g.edge(5, 4, 1, false); g.edge(4, 2, 1, true); g.edge(2, 9, 1, false); g.edge(2, 10, 1, false); g.edge(2, 6, 1, true); g.edge(6, 0, 1, false); } // prepare-routing.svg public static Graph initShortcutsGraph( Graph g ) { g.edge(0, 1, 1, true); g.edge(0, 2, 1, true); g.edge(1, 2, 1, true); g.edge(2, 3, 1.5, true); g.edge(1, 4, 1, true); g.edge(2, 9, 1, true); g.edge(9, 3, 1, true); g.edge(10, 3, 1, true); g.edge(4, 5, 1, true); g.edge(5, 6, 1, true); g.edge(6, 7, 1, true); g.edge(7, 8, 1, true); g.edge(8, 9, 1, true); g.edge(4, 11, 1, true); g.edge(9, 14, 1, true); g.edge(10, 14, 1, true); g.edge(11, 12, 1, true); g.edge(12, 15, 1, true); g.edge(12, 13, 1, true); g.edge(13, 16, 1, true); g.edge(15, 16, 2, true); g.edge(14, 16, 1, true); return g; } // public static void printEdges(CHGraph g) { // RawEdgeIterator iter = g.getAllEdges(); // while (iter.next()) { // EdgeSkipIterator single = g.getEdgeProps(iter.edge(), iter.nodeB()); // System.out.println(iter.nodeA() + "<->" + iter.nodeB() + " \\" // + single.skippedEdge1() + "," + single.skippedEdge2() + " (" + iter.edge() + ")" // + ", dist: " + (float) iter.weight() // + ", level:" + g.getLevel(iter.nodeA()) + "<->" + g.getLevel(iter.nodeB()) // + ", bothDir:" + CarFlagEncoder.isBoth(iter.setProperties())); // } // System.out.println("---"); // } @Test public void testBits() { int fromNode = Integer.MAX_VALUE / 3 * 2; int endNode = Integer.MAX_VALUE / 37 * 17; long edgeId = (long) fromNode << 32 | endNode; assertEquals((BitUtil.BIG.toBitString(edgeId)), BitUtil.BIG.toLastBitString(fromNode, 32) + BitUtil.BIG.toLastBitString(endNode, 32)); } @Test public void testMultiplePreparationsIdenticalView() { CarFlagEncoder tmpCarEncoder = new CarFlagEncoder(); BikeFlagEncoder tmpBikeEncoder = new BikeFlagEncoder(); EncodingManager tmpEncodingManager = new EncodingManager(tmpCarEncoder, tmpBikeEncoder); // FastestWeighting would lead to different shortcuts due to different default speeds for bike and car Weighting carWeighting = new ShortestWeighting(tmpCarEncoder); Weighting bikeWeighting = new ShortestWeighting(tmpBikeEncoder); List<Weighting> chWeightings = Arrays.asList(carWeighting, bikeWeighting); GraphHopperStorage ghStorage = new GraphHopperStorage(chWeightings, dir, tmpEncodingManager, false, new GraphExtension.NoOpExtension()).create(1000); initShortcutsGraph(ghStorage); ghStorage.freeze(); for (Weighting w : chWeightings) { checkPath(ghStorage, w, 7, 5, Helper.createTList(3, 9, 14, 16, 13, 12)); } } @Test public void testMultiplePreparationsDifferentView() { CarFlagEncoder tmpCarEncoder = new CarFlagEncoder(); BikeFlagEncoder tmpBikeEncoder = new BikeFlagEncoder(); EncodingManager tmpEncodingManager = new EncodingManager(tmpCarEncoder, tmpBikeEncoder); Weighting carWeighting = new FastestWeighting(tmpCarEncoder); Weighting bikeWeighting = new FastestWeighting(tmpBikeEncoder); List<Weighting> chWeightings = Arrays.asList(carWeighting, bikeWeighting); GraphHopperStorage ghStorage = new GraphHopperStorage(chWeightings, dir, tmpEncodingManager, false, new GraphExtension.NoOpExtension()).create(1000); initShortcutsGraph(ghStorage); EdgeIteratorState edge = GHUtility.getEdge(ghStorage, 9, 14); edge.setFlags(tmpBikeEncoder.setAccess(edge.getFlags(), false, false)); ghStorage.freeze(); checkPath(ghStorage, carWeighting, 7, 5, Helper.createTList(3, 9, 14, 16, 13, 12)); // detour around blocked 9,14 checkPath(ghStorage, bikeWeighting, 9, 5, Helper.createTList(3, 10, 14, 16, 13, 12)); } void checkPath( GraphHopperStorage ghStorage, Weighting w, int expShortcuts, double expDistance, TIntList expNodes ) { CHGraph lg = ghStorage.getGraph(CHGraph.class, w); PrepareContractionHierarchies prepare = new PrepareContractionHierarchies(dir, ghStorage, lg, w.getFlagEncoder(), w, tMode); prepare.doWork(); assertEquals(w.toString(), expShortcuts, prepare.getShortcuts()); RoutingAlgorithm algo = prepare.createAlgo(lg, new AlgorithmOptions(AlgorithmOptions.DIJKSTRA_BI, w.getFlagEncoder(), w, tMode)); Path p = algo.calcPath(3, 12); assertEquals(w.toString(), expDistance, p.getDistance(), 1e-5); assertEquals(w.toString(), expNodes, p.calcNodes()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.xerces.impl.xs.traversers; import org.apache.xerces.impl.xs.SchemaGrammar; import org.apache.xerces.impl.xs.SchemaSymbols; import org.apache.xerces.impl.xs.XSAnnotationImpl; import org.apache.xerces.impl.xs.XSAttributeGroupDecl; import org.apache.xerces.impl.xs.util.XSObjectListImpl; import org.apache.xerces.util.DOMUtil; import org.apache.xerces.util.XMLSymbols; import org.apache.xerces.xni.QName; import org.apache.xerces.xs.XSObjectList; import org.w3c.dom.Element; /** * The attribute group definition schema component traverser. * * &lt;attributeGroup * id = ID * name = NCName * ref = QName * {any attributes with non-schema namespace . . .}&gt; * Content: (annotation?, ((attribute | attributeGroup)*, anyAttribute?)) * &lt;/attributeGroup&gt; * * @xerces.internal * * @author Rahul Srivastava, Sun Microsystems Inc. * @author Sandy Gao, IBM * * @version $Id$ */ class XSDAttributeGroupTraverser extends XSDAbstractTraverser { XSDAttributeGroupTraverser (XSDHandler handler, XSAttributeChecker gAttrCheck) { super(handler, gAttrCheck); } XSAttributeGroupDecl traverseLocal(Element elmNode, XSDocumentInfo schemaDoc, SchemaGrammar grammar) { // General Attribute Checking for elmNode declared locally Object[] attrValues = fAttrChecker.checkAttributes(elmNode, false, schemaDoc); // get attribute QName refAttr = (QName) attrValues[XSAttributeChecker.ATTIDX_REF]; XSAttributeGroupDecl attrGrp = null; // ref should be here. if (refAttr == null) { reportSchemaError("s4s-att-must-appear", new Object[]{"attributeGroup (local)", "ref"}, elmNode); fAttrChecker.returnAttrArray(attrValues, schemaDoc); return null; } // get global decl attrGrp = (XSAttributeGroupDecl)fSchemaHandler.getGlobalDecl(schemaDoc, XSDHandler.ATTRIBUTEGROUP_TYPE, refAttr, elmNode); // no children are allowed here except annotation, which is optional. Element child = DOMUtil.getFirstChildElement(elmNode); if (child != null) { String childName = DOMUtil.getLocalName(child); if (childName.equals(SchemaSymbols.ELT_ANNOTATION)) { traverseAnnotationDecl(child, attrValues, false, schemaDoc); child = DOMUtil.getNextSiblingElement(child); } else { String text = DOMUtil.getSyntheticAnnotation(child); if (text != null) { traverseSyntheticAnnotation(child, text, attrValues, false, schemaDoc); } } if (child != null) { Object[] args = new Object [] {refAttr.rawname, "(annotation?)", DOMUtil.getLocalName(child)}; reportSchemaError("s4s-elt-must-match.1", args, child); } } // if fAttrChecker.returnAttrArray(attrValues, schemaDoc); return attrGrp; } // traverseLocal XSAttributeGroupDecl traverseGlobal(Element elmNode, XSDocumentInfo schemaDoc, SchemaGrammar grammar) { XSAttributeGroupDecl attrGrp = new XSAttributeGroupDecl(); // General Attribute Checking for elmNode declared globally Object[] attrValues = fAttrChecker.checkAttributes(elmNode, true, schemaDoc); String nameAttr = (String) attrValues[XSAttributeChecker.ATTIDX_NAME]; // global declaration must have a name if (nameAttr == null) { reportSchemaError("s4s-att-must-appear", new Object[]{"attributeGroup (global)", "name"}, elmNode); nameAttr = NO_NAME; } attrGrp.fName = nameAttr; attrGrp.fTargetNamespace = schemaDoc.fTargetNamespace; // check the content Element child = DOMUtil.getFirstChildElement(elmNode); XSAnnotationImpl annotation = null; if (child!=null && DOMUtil.getLocalName(child).equals(SchemaSymbols.ELT_ANNOTATION)) { annotation = traverseAnnotationDecl(child, attrValues, false, schemaDoc); child = DOMUtil.getNextSiblingElement(child); } else { String text = DOMUtil.getSyntheticAnnotation(elmNode); if (text != null) { annotation = traverseSyntheticAnnotation(elmNode, text, attrValues, false, schemaDoc); } } // Traverse the attribute and attribute group elements and fill in the // attributeGroup structure Element nextNode = traverseAttrsAndAttrGrps(child, attrGrp, schemaDoc, grammar,attrGrp); if (nextNode!=null) { // An invalid element was found... Object[] args = new Object [] {nameAttr, "(annotation?, ((attribute | attributeGroup)*, anyAttribute?))", DOMUtil.getLocalName(nextNode)}; reportSchemaError("s4s-elt-must-match.1", args, nextNode); } if (nameAttr.equals(NO_NAME)) { // if a global group doesn't have a name, then don't add it. fAttrChecker.returnAttrArray(attrValues, schemaDoc); return null; } // Remove prohibited attributes from the set attrGrp.removeProhibitedAttrs(); // check for restricted redefine: XSAttributeGroupDecl redefinedAttrGrp = (XSAttributeGroupDecl)fSchemaHandler.getGrpOrAttrGrpRedefinedByRestriction( XSDHandler.ATTRIBUTEGROUP_TYPE, new QName(XMLSymbols.EMPTY_STRING, nameAttr, nameAttr, schemaDoc.fTargetNamespace), schemaDoc, elmNode); if(redefinedAttrGrp != null) { Object[] errArgs = attrGrp.validRestrictionOf(nameAttr, redefinedAttrGrp, fSchemaHandler.fXSConstraints); if (errArgs != null) { reportSchemaError((String)errArgs[errArgs.length-1], errArgs, child); reportSchemaError("src-redefine.7.2.2", new Object [] {nameAttr, errArgs[errArgs.length-1]}, child); } } XSObjectList annotations; if (annotation != null) { annotations = new XSObjectListImpl(); ((XSObjectListImpl)annotations).addXSObject (annotation); } else { annotations = XSObjectListImpl.EMPTY_LIST; } attrGrp.fAnnotations = annotations; // make an entry in global declarations. if (grammar.getGlobalAttributeGroupDecl(attrGrp.fName) == null || DOMUtil.getLocalName(DOMUtil.getParent(elmNode)).equals(SchemaSymbols.ELT_REDEFINE)) { grammar.addGlobalAttributeGroupDecl(attrGrp); } // also add it to extended map final String loc = fSchemaHandler.schemaDocument2SystemId(schemaDoc); final XSAttributeGroupDecl attrGrp2 = grammar.getGlobalAttributeGroupDecl(attrGrp.fName, loc); if (attrGrp2 == null) { grammar.addGlobalAttributeGroupDecl(attrGrp, loc); } // handle duplicates if (fSchemaHandler.fTolerateDuplicates) { if (attrGrp2 != null) { attrGrp = attrGrp2; } fSchemaHandler.addGlobalAttributeGroupDecl(attrGrp); } fAttrChecker.returnAttrArray(attrValues, schemaDoc); return attrGrp; } // traverseGlobal } // XSDAttributeGroupTraverser
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.service; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeoutException; import javax.management.NotificationEmitter; import javax.management.openmbean.TabularData; public interface StorageServiceMBean extends NotificationEmitter { /** * Retrieve the list of live nodes in the cluster, where "liveness" is * determined by the failure detector of the node being queried. * * @return set of IP addresses, as Strings */ public List<String> getLiveNodes(); /** * Retrieve the list of unreachable nodes in the cluster, as determined * by this node's failure detector. * * @return set of IP addresses, as Strings */ public List<String> getUnreachableNodes(); /** * Retrieve the list of nodes currently bootstrapping into the ring. * * @return set of IP addresses, as Strings */ public List<String> getJoiningNodes(); /** * Retrieve the list of nodes currently leaving the ring. * * @return set of IP addresses, as Strings */ public List<String> getLeavingNodes(); /** * Retrieve the list of nodes currently moving in the ring. * * @return set of IP addresses, as Strings */ public List<String> getMovingNodes(); /** * Fetch string representations of the tokens for this node. * * @return a collection of tokens formatted as strings */ public List<String> getTokens(); /** * Fetch string representations of the tokens for a specified node. * * @param endpoint string representation of an node * @return a collection of tokens formatted as strings */ public List<String> getTokens(String endpoint) throws UnknownHostException; /** * Fetch a string representation of the Cassandra version. * @return A string representation of the Cassandra version. */ public String getReleaseVersion(); /** * Fetch a string representation of the current Schema version. * @return A string representation of the Schema version. */ public String getSchemaVersion(); /** * Get the list of all data file locations from conf * @return String array of all locations */ public String[] getAllDataFileLocations(); /** * Get location of the commit log * @return a string path */ public String getCommitLogLocation(); /** * Get location of the saved caches dir * @return a string path */ public String getSavedCachesLocation(); /** * Retrieve a map of range to end points that describe the ring topology * of a Cassandra cluster. * * @return mapping of ranges to end points */ public Map<List<String>, List<String>> getRangeToEndpointMap(String keyspace); /** * Retrieve a map of range to rpc addresses that describe the ring topology * of a Cassandra cluster. * * @return mapping of ranges to rpc addresses */ public Map<List<String>, List<String>> getRangeToRpcaddressMap(String keyspace); /** * The same as {@code describeRing(String)} but converts TokenRange to the String for JMX compatibility * * @param keyspace The keyspace to fetch information about * * @return a List of TokenRange(s) converted to String for the given keyspace */ public List <String> describeRingJMX(String keyspace) throws IOException; /** * Retrieve a map of pending ranges to endpoints that describe the ring topology * @param keyspace the keyspace to get the pending range map for. * @return a map of pending ranges to endpoints */ public Map<List<String>, List<String>> getPendingRangeToEndpointMap(String keyspace); /** * Retrieve a map of tokens to endpoints, including the bootstrapping * ones. * * @return a map of tokens to endpoints in ascending order */ public Map<String, String> getTokenToEndpointMap(); /** Retrieve this hosts unique ID */ public String getLocalHostId(); /** Retrieve the mapping of endpoint to host ID */ public Map<String, String> getHostIdMap(); /** * Numeric load value. * @see org.apache.cassandra.metrics.StorageMetrics#load */ @Deprecated public double getLoad(); /** Human-readable load value */ public String getLoadString(); /** Human-readable load value. Keys are IP addresses. */ public Map<String, String> getLoadMap(); /** * Return the generation value for this node. * * @return generation number */ public int getCurrentGenerationNumber(); /** * This method returns the N endpoints that are responsible for storing the * specified key i.e for replication. * * @param keyspaceName keyspace name * @param cf Column family name * @param key - key for which we need to find the endpoint return value - * the endpoint responsible for this key */ public List<InetAddress> getNaturalEndpoints(String keyspaceName, String cf, String key); public List<InetAddress> getNaturalEndpoints(String keyspaceName, ByteBuffer key); /** * Takes the snapshot for the given keyspaces. A snapshot name must be specified. * * @param tag the tag given to the snapshot; may not be null or empty * @param keyspaceNames the name of the keyspaces to snapshot; empty means "all." */ public void takeSnapshot(String tag, String... keyspaceNames) throws IOException; /** * Takes the snapshot of a specific column family. A snapshot name must be specified. * * @param keyspaceName the keyspace which holds the specified column family * @param columnFamilyName the column family to snapshot * @param tag the tag given to the snapshot; may not be null or empty */ public void takeColumnFamilySnapshot(String keyspaceName, String columnFamilyName, String tag) throws IOException; /** * Remove the snapshot with the given name from the given keyspaces. * If no tag is specified we will remove all snapshots. */ public void clearSnapshot(String tag, String... keyspaceNames) throws IOException; /** * Get the details of all the snapshot * @return A map of snapshotName to all its details in Tabular form. */ public Map<String, TabularData> getSnapshotDetails(); /** * Get the true size taken by all snapshots across all keyspaces. * @return True size taken by all the snapshots. */ public long trueSnapshotsSize(); /** * Forces major compaction of a single keyspace */ public void forceKeyspaceCompaction(String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException; /** * Trigger a cleanup of keys on a single keyspace */ public void forceKeyspaceCleanup(String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException; /** * Scrub (deserialize + reserialize at the latest version, skipping bad rows if any) the given keyspace. * If columnFamilies array is empty, all CFs are scrubbed. * * Scrubbed CFs will be snapshotted first, if disableSnapshot is false */ public void scrub(boolean disableSnapshot, boolean skipCorrupted, String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException; /** * Rewrite all sstables to the latest version. * Unlike scrub, it doesn't skip bad rows and do not snapshot sstables first. */ public void upgradeSSTables(String keyspaceName, boolean excludeCurrentVersion, String... columnFamilies) throws IOException, ExecutionException, InterruptedException; /** * Flush all memtables for the given column families, or all columnfamilies for the given keyspace * if none are explicitly listed. * @param keyspaceName * @param columnFamilies * @throws IOException */ public void forceKeyspaceFlush(String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException; /** * Invoke repair asynchronously. * You can track repair progress by subscribing JMX notification sent from this StorageServiceMBean. * Notification format is: * type: "repair" * userObject: int array of length 2, [0]=command number, [1]=ordinal of AntiEntropyService.Status * * @return Repair command number, or 0 if nothing to repair */ public int forceRepairAsync(String keyspace, boolean isSequential, Collection<String> dataCenters, boolean primaryRange, String... columnFamilies); /** * Same as forceRepairAsync, but handles a specified range */ public int forceRepairRangeAsync(String beginToken, String endToken, final String keyspaceName, boolean isSequential, Collection<String> dataCenters, final String... columnFamilies); /** * Invoke repair asynchronously. * You can track repair progress by subscribing JMX notification sent from this StorageServiceMBean. * Notification format is: * type: "repair" * userObject: int array of length 2, [0]=command number, [1]=ordinal of AntiEntropyService.Status * * @return Repair command number, or 0 if nothing to repair * @see #forceKeyspaceRepair(String, boolean, boolean, String...) */ public int forceRepairAsync(String keyspace, boolean isSequential, boolean isLocal, boolean primaryRange, String... columnFamilies); /** * Same as forceRepairAsync, but handles a specified range */ public int forceRepairRangeAsync(String beginToken, String endToken, final String keyspaceName, boolean isSequential, boolean isLocal, final String... columnFamilies); /** * Triggers proactive repair for given column families, or all columnfamilies for the given keyspace * if none are explicitly listed. * @param keyspaceName * @param columnFamilies * @throws IOException */ public void forceKeyspaceRepair(String keyspaceName, boolean isSequential, boolean isLocal, String... columnFamilies) throws IOException; /** * Triggers proactive repair but only for the node primary range. */ public void forceKeyspaceRepairPrimaryRange(String keyspaceName, boolean isSequential, boolean isLocal, String... columnFamilies) throws IOException; /** * Perform repair of a specific range. * * This allows incremental repair to be performed by having an external controller submitting repair jobs. * Note that the provided range much be a subset of one of the node local range. */ public void forceKeyspaceRepairRange(String beginToken, String endToken, String keyspaceName, boolean isSequential, boolean isLocal, String... columnFamilies) throws IOException; public void forceTerminateAllRepairSessions(); /** * transfer this node's data to other machines and remove it from service. */ public void decommission() throws InterruptedException; /** * @param newToken token to move this node to. * This node will unload its data onto its neighbors, and bootstrap to the new token. */ public void move(String newToken) throws IOException; /** * @param srcTokens tokens to move to this node */ public void relocate(Collection<String> srcTokens) throws IOException; /** * removeToken removes token (and all data associated with * enpoint that had it) from the ring */ public void removeNode(String token); /** * Get the status of a token removal. */ public String getRemovalStatus(); /** * Force a remove operation to finish. */ public void forceRemoveCompletion(); /** set the logging level at runtime */ public void setLoggingLevel(String classQualifier, String level); /** get the operational mode (leaving, joining, normal, decommissioned, client) **/ public String getOperationMode(); /** get the progress of a drain operation */ public String getDrainProgress(); /** makes node unavailable for writes, flushes memtables and replays commitlog. */ public void drain() throws IOException, InterruptedException, ExecutionException; /** * Truncates (deletes) the given columnFamily from the provided keyspace. * Calling truncate results in actual deletion of all data in the cluster * under the given columnFamily and it will fail unless all hosts are up. * All data in the given column family will be deleted, but its definition * will not be affected. * * @param keyspace The keyspace to delete from * @param columnFamily The column family to delete data from. */ public void truncate(String keyspace, String columnFamily)throws TimeoutException, IOException; /** * given a list of tokens (representing the nodes in the cluster), returns * a mapping from "token -> %age of cluster owned by that token" */ public Map<InetAddress, Float> getOwnership(); /** * Effective ownership is % of the data each node owns given the keyspace * we calculate the percentage using replication factor. * If Keyspace == null, this method will try to verify if all the keyspaces * in the cluster have the same replication strategies and if yes then we will * use the first else a empty Map is returned. */ public Map<InetAddress, Float> effectiveOwnership(String keyspace) throws IllegalStateException; public List<String> getKeyspaces(); /** * Change endpointsnitch class and dynamic-ness (and dynamic attributes) at runtime * @param epSnitchClassName the canonical path name for a class implementing IEndpointSnitch * @param dynamic boolean that decides whether dynamicsnitch is used or not * @param dynamicUpdateInterval integer, in ms (default 100) * @param dynamicResetInterval integer, in ms (default 600,000) * @param dynamicBadnessThreshold double, (default 0.0) */ public void updateSnitch(String epSnitchClassName, Boolean dynamic, Integer dynamicUpdateInterval, Integer dynamicResetInterval, Double dynamicBadnessThreshold) throws ClassNotFoundException; // allows a user to forcibly 'kill' a sick node public void stopGossiping(); // allows a user to recover a forcibly 'killed' node public void startGossiping(); // allows a user to forcibly completely stop cassandra public void stopDaemon(); // to determine if gossip is disabled public boolean isInitialized(); // allows a user to disable thrift public void stopRPCServer(); // allows a user to reenable thrift public void startRPCServer(); // to determine if thrift is running public boolean isRPCServerRunning(); public void stopNativeTransport(); public void startNativeTransport(); public boolean isNativeTransportRunning(); // allows a node that have been started without joining the ring to join it public void joinRing() throws IOException; public boolean isJoined(); @Deprecated public int getExceptionCount(); public void setStreamThroughputMbPerSec(int value); public int getStreamThroughputMbPerSec(); public int getCompactionThroughputMbPerSec(); public void setCompactionThroughputMbPerSec(int value); public boolean isIncrementalBackupsEnabled(); public void setIncrementalBackupsEnabled(boolean value); /** * Initiate a process of streaming data for which we are responsible from other nodes. It is similar to bootstrap * except meant to be used on a node which is already in the cluster (typically containing no data) as an * alternative to running repair. * * @param sourceDc Name of DC from which to select sources for streaming or null to pick any node */ public void rebuild(String sourceDc); public void bulkLoad(String directory); public void rescheduleFailedDeletions(); /** * Load new SSTables to the given keyspace/columnFamily * * @param ksName The parent keyspace name * @param cfName The ColumnFamily name where SSTables belong */ public void loadNewSSTables(String ksName, String cfName); /** * Return a List of Tokens representing a sample of keys across all ColumnFamilyStores. * * Note: this should be left as an operation, not an attribute (methods starting with "get") * to avoid sending potentially multiple MB of data when accessing this mbean by default. See CASSANDRA-4452. * * @return set of Tokens as Strings */ public List<String> sampleKeyRange(); /** * rebuild the specified indexes */ public void rebuildSecondaryIndex(String ksName, String cfName, String... idxNames); public void resetLocalSchema() throws IOException; /** * Enables/Disables tracing for the whole system. Only thrift requests can start tracing currently. * * @param probability * ]0,1[ will enable tracing on a partial number of requests with the provided probability. 0 will * disable tracing and 1 will enable tracing for all requests (which mich severely cripple the system) */ public void setTraceProbability(double probability); /** * Returns the configured tracing probability. */ public double getTracingProbability(); /** Begin processing of queued range transfers. */ public void enableScheduledRangeXfers(); /** Disable processing of queued range transfers. */ public void disableScheduledRangeXfers(); void disableAutoCompaction(String ks, String ... columnFamilies) throws IOException; void enableAutoCompaction(String ks, String ... columnFamilies) throws IOException; public void deliverHints(String host) throws UnknownHostException; /** Returns the name of the cluster */ public String getClusterName(); /** Returns the cluster partitioner */ public String getPartitionerName(); /** Returns the threshold for warning of queries with many tombstones */ public int getTombstoneWarnThreshold(); /** Sets the threshold for warning queries with many tombstones */ public void setTombstoneWarnThreshold(int tombstoneDebugThreshold); /** Returns the threshold for abandoning queries with many tombstones */ public int getTombstoneFailureThreshold(); /** Sets the threshold for abandoning queries with many tombstones */ public void setTombstoneFailureThreshold(int tombstoneDebugThreshold); }
package com.samsung.dtl.patterntracker; import javax.microedition.khronos.egl.EGL10; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.egl.EGLContext; import javax.microedition.khronos.egl.EGLDisplay; import javax.microedition.khronos.egl.EGLSurface; import javax.microedition.khronos.opengles.GL10; import org.opencv.core.Mat; import android.app.Activity; import android.graphics.Point; import android.graphics.SurfaceTexture; import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.util.Log; import com.samsung.dtl.patterntracker.camera.CameraManager; import com.samsung.dtl.patterntracker.camera.ShaderManager; import com.samsung.dtl.bluetoothlibrary.profile.BtPosition6f; // TODO: Auto-generated Javadoc /** * The Class MyGLRenderer. */ public class CustomGLRenderer implements GLSurfaceView.Renderer{ // tracker public ColorGridTracker mCgTrack; /*!< The tracker instance. */ // bluetooth private BtPosition6f mPositionComm; /*!< The bluetooth class that communicates the position of pattern's origin. */ public boolean data_sent=false; /*!< is data_sent. */ private final boolean sendBT = true; /*!< allow data to be sent via bluetooth. */ // timing public long processedCaptureTime; /*!< The time when last frame's capture is processed. */ // fps private int frames = 0; /*!< The number of frames in the second. */ private long lastFPScomputeTime = System.nanoTime(); /*!< The time since last FPS was computed. */ // camera CameraManager mCameraManager; /*!< The camera manager. */ private SurfaceTexture mSTexture; /*!< The camera surface texture. */ private Point camera_res; /*!< The camera resolution. */ // graphics ShaderManager mShaderManager; /*!< The shader manager. */ private Point display_dim; /*!< The dimensions of the Android display. OpenGL viewport should be set to this. */ EGL10 mEgl; /*!< The egl instance. */ EGLDisplay mEglDisplay; /*!< The egl display instance. */ EGLSurface mEglSurface; /*!< The egl surface instance. */ EGLConfig mEglConfig; /*!< The egl config instance. */ EGLContext mEglContext; /*!< The egl context instance. */ // logging private final String TAG = "cgt"; /*!< The tag. */ /** * Instantiates a new glRenderer. * * @param view the view */ CustomGLRenderer (CustomGLSurfaceView view) { mCameraManager = new CameraManager(); mShaderManager = new ShaderManager(); camera_res = new Point(1920, 1080); processedCaptureTime=0; mCgTrack = new ColorGridTracker(); mShaderManager.initializeCoords(); data_sent = false; } /* (non-Javadoc) * @see android.opengl.GLSurfaceView.Renderer#onSurfaceCreated(javax.microedition.khronos.opengles.GL10, javax.microedition.khronos.egl.EGLConfig) */ public void onSurfaceCreated (GL10 unused, EGLConfig config) { mSTexture = mShaderManager.initTex(camera_res); mCameraManager.initializeCamera(camera_res, mSTexture); ColorGridTracker.initCL(camera_res.x, camera_res.y, mShaderManager.glTextures[0], mShaderManager.glTextures[1]); } /* (non-Javadoc) * @see android.opengl.GLSurfaceView.Renderer#onSurfaceChanged(javax.microedition.khronos.opengles.GL10, int, int) */ public void onSurfaceChanged ( GL10 unused, int width, int height ) { GLES20.glViewport( 0, 0, width, height); mCameraManager.mCamera.startPreview(); } /** * Capture frame. * * @return the long */ public long captureFrame(){ long captureTime = mShaderManager.cameraToTexture(mSTexture, camera_res); if(processedCaptureTime == captureTime || captureTime==0)return 0; processedCaptureTime = captureTime; mCameraManager.frameNo++; measureFPS(); return captureTime; } /* (non-Javadoc) * @see android.opengl.GLSurfaceView.Renderer#onDrawFrame(javax.microedition.khronos.opengles.GL10) */ public void onDrawFrame ( GL10 unused ) { // capture long captureTime = captureFrame(); if(captureTime==0)return; // track Mat origin = mCgTrack.trackGrid(camera_res.y, camera_res.x,frameIdForTracker(), captureTime); // send if(origin.rows()!=0 && sendBT){ mPositionComm.sendData((float)origin.get(0,0)[0],(float)origin.get(1,0)[0],(float)origin.get(2,0)[0],(float)origin.get(3,0)[0],(float)origin.get(4,0)[0],(float)origin.get(5,0)[0]); Log.e("Track", "x:"+(float)origin.get(0,0)[0]+" y:"+(float)origin.get(1,0)[0]+" z:"+(float)origin.get(2,0)[0]); } // update mCameraManager.updateCameraParams(mCgTrack,origin, camera_res); // debug if(mCgTrack.mDebugLevel==1)mShaderManager.renderFromTexture(mShaderManager.glTextures[1], display_dim); } /** * modifies the Frame id for opencl tracker for debugging. * * @return the int */ private int frameIdForTracker(){ if(mCgTrack.mDebugLevel==0)return -1; return mCameraManager.frameNo; } /** * Closes camera. */ public void close() { Log.e("camera", "stopping"); mSTexture.release(); mCameraManager.mCamera.stopPreview(); mCameraManager.mCamera.release(); ColorGridTracker.destroyCL(); mShaderManager.deleteTex(); } /** * Measure frames per second. */ public void measureFPS() { frames++; if(System.nanoTime() - lastFPScomputeTime >= 1000000000) { Log.d(TAG, "FPS: " + frames); frames = 0; lastFPScomputeTime = System.nanoTime(); } } /** * Sets the display dimension. * * @param displayDim the new display dimension */ public void setDisplayDim(Point displayDim) { display_dim = displayDim; } /** * Initializes the bluetooth. * * @param activity the current activity */ public void initBT(Activity activity){ if(sendBT){ mPositionComm = BtPosition6f.getInstance(activity); mPositionComm.start(); } } /** * On destroy. */ public void onDestroy() { if(sendBT){ mPositionComm.stop(); } ColorGridTracker.destroyCL(); } }
/* * Licensed to ObjectStyle LLC under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ObjectStyle LLC licenses * this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.bootique.test.junit; import io.bootique.BQCoreModule; import io.bootique.BQModuleOverrideBuilder; import io.bootique.BQModuleProvider; import io.bootique.Bootique; import io.bootique.di.BQModule; import io.bootique.log.BootLogger; import java.util.Collection; import java.util.HashMap; import java.util.Map; /** * @param <T> * @deprecated since 3.0.M1, as we are we phasing out JUnit 4 support in favor of JUnit 5 */ // parameterization is needed to enable covariant return types in subclasses @Deprecated public abstract class BQTestRuntimeBuilder<T extends BQTestRuntimeBuilder<T>> { protected Bootique bootique; protected Map<String, String> properties; // These two properties are used to make the test stack independent of the shell environment. // The properties are used for configuring of "io.bootique.env.Environment" inside BQCoreModule. private static final String EXCLUDE_SYSTEM_VARIABLES = "bq.core.excludeSystemVariables"; private static final String EXCLUDE_SYSTEM_PROPERTIES = "bq.core.excludeSystemProperties"; protected BQTestRuntimeBuilder(String[] args) { this.properties = new HashMap<>(); this.bootique = Bootique.app(args).moduleProvider(createPropertiesProvider()); property(EXCLUDE_SYSTEM_PROPERTIES, "true").property(EXCLUDE_SYSTEM_VARIABLES, "true"); } protected BQModuleProvider createPropertiesProvider() { return new BQModuleProvider() { @Override public BQModule module() { return binder -> BQCoreModule.extend(binder).setProperties(properties); } @Override public String name() { return "BQTestRuntimeBuilder:properties"; } }; } /** * Appends extra values to the test CLI arguments. * * @param args extra args to pass to Bootique. * @return this instance of test runtime builder. */ public T args(String... args) { bootique.args(args); return (T) this; } /** * Appends extra values to the test CLI arguments. * * @param args extra args to pass to Bootique. * @return this instance of test runtime builder. */ public T args(Collection<String> args) { bootique.args(args); return (T) this; } /** * Instructs Bootique to load any modules available on classpath that expose {@link io.bootique.BQModuleProvider} * provider. Auto-loaded modules will be used in default configuration. Factories within modules will of course be * configured dynamically from YAML. * * @return this instance of test runtime builder. */ public T autoLoadModules() { bootique.autoLoadModules(); return (T) this; } /** * @param bootLogger custom BootLogger to use for a given runtime. * @return this instance of test runtime builder. */ public T bootLogger(BootLogger bootLogger) { bootique.bootLogger(bootLogger); return (T) this; } /** * @param moduleType custom Module class to add to Bootique DI runtime. * @return this instance of test runtime builder. * @see #autoLoadModules() */ public T module(Class<? extends BQModule> moduleType) { bootique.module(moduleType); return (T) this; } /** * Adds an array of Module types to the Bootique DI runtime. Each type will * be instantiated by Bootique and added to the DI container. * * @param moduleTypes custom Module classes to add to Bootique DI runtime. * @return this instance of test runtime builder. * @see #autoLoadModules() */ @SafeVarargs public final T modules(Class<? extends BQModule>... moduleTypes) { bootique.modules(moduleTypes); return (T) this; } /** * @param m a module to add to the test runtime. * @return this instance of test runtime builder. */ public T module(BQModule m) { bootique.module(m); return (T) this; } /** * Adds an array of Modules to the Bootique DI runtime. * * @param modules an array of modules to add to Bootiqie DI runtime. * @return this instance of test runtime builder. */ public T modules(BQModule... modules) { bootique.modules(modules); return (T) this; } /** * Adds a Module generated by the provider. Provider may optionally specify * that the Module overrides services in some other Module. * * @param moduleProvider a provider of Module and override spec. * @return this instance of test runtime builder. * @since 2.0 */ public T moduleProvider(BQModuleProvider moduleProvider) { bootique.moduleProvider(moduleProvider); return (T) this; } /** * Starts an API call chain to override an array of Modules. * * @param overriddenTypes an array of modules whose bindings should be overridden. * @return {@link BQModuleOverrideBuilder} object to specify a Module * overriding other modules. */ public BQModuleOverrideBuilder<T> override(Class<? extends BQModule>... overriddenTypes) { BQModuleOverrideBuilder<Bootique> subBuilder = bootique.override(overriddenTypes); return new BQModuleOverrideBuilder<T>() { @Override public T with(Class<? extends BQModule> moduleType) { subBuilder.with(moduleType); return (T) BQTestRuntimeBuilder.this; } @Override public T with(BQModule module) { subBuilder.with(module); return (T) BQTestRuntimeBuilder.this; } }; } public T property(String key, String value) { properties.put(key, value); return (T) this; } }
/** * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.services.task.impl.model; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.Collections; import java.util.List; import javax.persistence.Basic; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Embedded; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.OneToMany; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.persistence.Version; import org.jbpm.services.task.utils.CollectionUtils; import org.kie.api.task.model.I18NText; import org.kie.api.task.model.PeopleAssignments; import org.kie.api.task.model.TaskData; import org.kie.internal.task.api.model.Deadlines; import org.kie.internal.task.api.model.Delegation; import org.kie.internal.task.api.model.InternalTask; import org.kie.internal.task.api.model.SubTasksStrategy; @Entity @Table(name="Task") @SequenceGenerator(name="taskIdSeq", sequenceName="TASK_ID_SEQ", allocationSize=1) public class TaskImpl implements InternalTask { /** * WSHT uses a name for the unique identifier, for now we use a generated ID which is also the key, which can be * mapped to the name or a unique name field added later. */ @Id @GeneratedValue(strategy = GenerationType.AUTO, generator="taskIdSeq") @Column(name = "id") private Long id = 0L; @Version @Column(name = "OPTLOCK") private int version; /** * While WSHT says this is an expression, it always resolves to an integer, so resolve before setting * default value is 0. */ private int priority; private String name; private String subject; private String description; @OneToMany(cascade = CascadeType.ALL, targetEntity=I18NTextImpl.class) @JoinColumn(name = "Task_Names_Id", nullable = true) private List<I18NText> names = Collections.emptyList(); @OneToMany(cascade = CascadeType.ALL, targetEntity=I18NTextImpl.class) @JoinColumn(name = "Task_Subjects_Id", nullable = true) private List<I18NText> subjects = Collections.emptyList(); @OneToMany(cascade = CascadeType.ALL, targetEntity=I18NTextImpl.class) @JoinColumn(name = "Task_Descriptions_Id", nullable = true) private List<I18NText> descriptions = Collections.emptyList(); @Embedded private PeopleAssignmentsImpl peopleAssignments; @Embedded private DelegationImpl delegation; @Embedded private TaskDataImpl taskData; @Embedded private DeadlinesImpl deadlines; @Enumerated(EnumType.STRING) // Default Behaviour private SubTasksStrategy subTaskStrategy = SubTasksStrategy.NoAction; private String taskType; private String formName; @Basic private Short archived = 0; public TaskImpl() { } public void writeExternal(ObjectOutput out) throws IOException { out.writeLong( id ); out.writeInt( priority ); out.writeShort( archived ); if (taskType != null) { out.writeUTF(taskType); } else { out.writeUTF(""); } if (formName != null) { out.writeUTF(formName); } else { out.writeUTF(""); } if (name != null) { out.writeUTF(name); } else { out.writeUTF(""); } if (subject != null) { out.writeUTF(subject); } else { out.writeUTF(""); } if (description != null) { out.writeUTF(description); } else { out.writeUTF(""); } CollectionUtils.writeI18NTextList( names, out ); CollectionUtils.writeI18NTextList( subjects, out ); CollectionUtils.writeI18NTextList( descriptions, out ); if (subTaskStrategy != null) { out.writeBoolean(true); out.writeUTF(subTaskStrategy.toString()); } else { out.writeBoolean(false); } if ( peopleAssignments != null ) { out.writeBoolean( true ); peopleAssignments.writeExternal( out ); } else { out.writeBoolean( false ); } if ( delegation != null ) { out.writeBoolean( true ); delegation.writeExternal( out ); } else { out.writeBoolean( false ); } if ( taskData != null ) { out.writeBoolean( true ); taskData.writeExternal( out ); } else { out.writeBoolean( false ); } if ( deadlines != null ) { out.writeBoolean( true ); deadlines.writeExternal( out ); } else { out.writeBoolean( false ); } } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { id = in.readLong(); priority = in.readInt(); archived = in.readShort(); taskType = in.readUTF(); formName = in.readUTF(); name = in.readUTF(); subject = in.readUTF(); description = in.readUTF(); names = CollectionUtils.readI18NTextList( in ); subjects = CollectionUtils.readI18NTextList( in ); descriptions = CollectionUtils.readI18NTextList( in ); if (in.readBoolean()) { subTaskStrategy = SubTasksStrategy.valueOf(in.readUTF()); } if ( in.readBoolean() ) { peopleAssignments = new PeopleAssignmentsImpl(); peopleAssignments.readExternal( in ); } if ( in.readBoolean() ) { delegation = new DelegationImpl(); delegation.readExternal( in ); } if ( in.readBoolean() ) { taskData = new TaskDataImpl(); taskData.readExternal( in ); } if ( in.readBoolean() ) { deadlines = new DeadlinesImpl(); deadlines.readExternal( in ); } } public Long getId() { return id; } public void setId(long id) { this.id = id; } public Boolean isArchived() { if (archived == null) { return null; } return (archived == 1) ? Boolean.TRUE : Boolean.FALSE; } public void setArchived(Boolean archived) { if (archived == null) { this.archived = null; } else { this.archived = (archived == true) ? new Short("1") : new Short("0"); } } public int getVersion() { return this.version; } public int getPriority() { return priority; } public void setPriority(int priority) { this.priority = priority; } public List<I18NText> getNames() { return names; } public void setNames(List<I18NText> names) { this.names = names; } public List<I18NText> getSubjects() { return subjects; } public void setSubjects(List<I18NText> subjects) { this.subjects = subjects; } public List<I18NText> getDescriptions() { return descriptions; } public void setDescriptions(List<I18NText> descriptions) { this.descriptions = descriptions; } public PeopleAssignments getPeopleAssignments() { return peopleAssignments; } public void setPeopleAssignments(PeopleAssignments peopleAssignments) { this.peopleAssignments = (PeopleAssignmentsImpl) peopleAssignments; } public Delegation getDelegation() { return delegation; } public void setDelegation(Delegation delegation) { this.delegation = (DelegationImpl) delegation; } public TaskData getTaskData() { return taskData; } public void setTaskData(TaskData taskData) { this.taskData = (TaskDataImpl) taskData; } public Deadlines getDeadlines() { return deadlines; } public void setDeadlines(Deadlines deadlines) { this.deadlines = (DeadlinesImpl) deadlines; } public String getTaskType() { return taskType; } public void setTaskType(String taskType) { this.taskType = taskType; } public String getFormName() { return formName; } public void setFormName(String formName) { this.formName = formName; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + version; result = prime * result + priority; result = prime * result + archived.hashCode(); result = prime * result + ((taskType == null) ? 0 : taskType.hashCode()); result = prime * result + CollectionUtils.hashCode( descriptions ); result = prime * result + CollectionUtils.hashCode( names ); result = prime * result + CollectionUtils.hashCode( subjects ); result = prime * result + ((peopleAssignments == null) ? 0 : peopleAssignments.hashCode()); result = prime * result + ((delegation == null) ? 0 : delegation.hashCode()); result = prime * result + ((taskData == null) ? 0 : taskData.hashCode()); result = prime * result + ((deadlines == null) ? 0 : deadlines.hashCode()); return result; } @Override public boolean equals(Object obj) { if ( this == obj ) return true; if ( obj == null ) return false; if ( !(obj instanceof TaskImpl) ) return false; TaskImpl other = (TaskImpl) obj; if ( this.version != other.version ) { return false; } if ( this.archived != other.archived ) { return false; } if (taskType == null) { if (other.taskType != null) return false; } else if (!taskType.equals(other.taskType)) return false; if ( deadlines == null ) { if ( other.deadlines != null ) { } } else if ( !deadlines.equals( other.deadlines ) ) return false; if ( delegation == null ) { if ( other.delegation != null ) return false; } else if ( !delegation.equals( other.delegation ) ) return false; if ( peopleAssignments == null ) { if ( other.peopleAssignments != null ) return false; } else if ( !peopleAssignments.equals( other.peopleAssignments ) ) return false; if ( priority != other.priority ) return false; if ( taskData == null ) { if ( other.taskData != null ) return false; } else if ( !taskData.equals( other.taskData ) ) return false; return ( CollectionUtils.equals( descriptions, other.descriptions ) && CollectionUtils.equals( names, other.names ) && CollectionUtils.equals( subjects, other.subjects )); } public SubTasksStrategy getSubTaskStrategy() { return subTaskStrategy; } public void setSubTaskStrategy(SubTasksStrategy subTaskStrategy) { this.subTaskStrategy = subTaskStrategy; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getSubject() { return subject; } public void setSubject(String subject) { this.subject = subject; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.pkgcache; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertThrows; import com.google.common.base.Functions; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import com.google.common.collect.Maps; import com.google.common.collect.MoreCollectors; import com.google.devtools.build.lib.actions.ActionKeyContext; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.BuildView; import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.analysis.util.AnalysisMock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.PackageIdentifier; import com.google.devtools.build.lib.cmdline.TargetParsingException; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.EventKind; import com.google.devtools.build.lib.events.ExtendedEventHandler.Postable; import com.google.devtools.build.lib.events.StoredEventHandler; import com.google.devtools.build.lib.packages.BuildFileContainsErrorsException; import com.google.devtools.build.lib.packages.ConstantRuleVisibility; import com.google.devtools.build.lib.packages.PackageFactory; import com.google.devtools.build.lib.packages.StarlarkSemanticsOptions; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.packages.util.MockToolsConfig; import com.google.devtools.build.lib.rules.repository.RepositoryDelegatorFunction; import com.google.devtools.build.lib.server.FailureDetails.PackageLoading; import com.google.devtools.build.lib.skyframe.BazelSkyframeExecutorConstants; import com.google.devtools.build.lib.skyframe.PatternExpandingError; import com.google.devtools.build.lib.skyframe.PrecomputedValue; import com.google.devtools.build.lib.skyframe.SkyframeExecutor; import com.google.devtools.build.lib.skyframe.TargetPatternPhaseValue; import com.google.devtools.build.lib.testutil.ManualClock; import com.google.devtools.build.lib.testutil.MoreAsserts; import com.google.devtools.build.lib.testutil.SkyframeExecutorTestHelper; import com.google.devtools.build.lib.util.DetailedExitCode; import com.google.devtools.build.lib.util.ExitCode; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.ModifiedFileSet; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem; import com.google.devtools.common.options.Options; import com.google.devtools.common.options.OptionsParser; import com.google.devtools.common.options.OptionsParsingException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link SkyframeExecutor#loadTargetPatternsWithFilters}. */ @RunWith(JUnit4.class) public class LoadingPhaseRunnerTest { private static final ImmutableList<Logger> loggers = ImmutableList.of( Logger.getLogger(BuildView.class.getName())); static { for (Logger logger : loggers) { logger.setLevel(Level.OFF); } } private LoadingPhaseTester tester; @Before public final void createLoadingPhaseTester() throws Exception { tester = new LoadingPhaseTester(); } private List<Label> getLabels(String... labels) throws Exception { List<Label> result = new ArrayList<>(); for (String label : labels) { result.add(Label.parseAbsoluteUnchecked(label)); } return result; } private void assertCircularSymlinksDuringTargetParsing(String targetPattern) throws Exception { assertThrows(TargetParsingException.class, () -> tester.load(targetPattern)); tester.assertContainsError("circular symlinks detected"); TargetPatternPhaseValue result = tester.loadKeepGoing(targetPattern); assertThat(result.hasError()).isTrue(); } private TargetPatternPhaseValue assertNoErrors(TargetPatternPhaseValue loadingResult) { assertThat(loadingResult.hasError()).isFalse(); assertThat(loadingResult.hasPostExpansionError()).isFalse(); tester.assertNoEvents(); return loadingResult; } @Test public void testSmoke() throws Exception { tester.addFile("base/BUILD", "filegroup(name = 'hello', srcs = ['foo.txt'])"); TargetPatternPhaseValue loadingResult = assertNoErrors(tester.load("//base:hello")); assertThat(loadingResult.getTargetLabels()) .containsExactlyElementsIn(getLabels("//base:hello")); assertThat(loadingResult.getTestsToRunLabels()).isNull(); } @Test public void testNonExistentPackage() throws Exception { TargetPatternPhaseValue loadingResult = tester.loadKeepGoing("//base:missing"); assertThat(loadingResult.hasError()).isTrue(); assertThat(loadingResult.hasPostExpansionError()).isFalse(); assertThat(loadingResult.getTargetLabels()).isEmpty(); assertThat(loadingResult.getTestsToRunLabels()).isNull(); tester.assertContainsError("Skipping '//base:missing': no such package 'base'"); tester.assertContainsWarning("Target pattern parsing failed."); PatternExpandingError err = tester.findPostOnce(PatternExpandingError.class); assertThat(err.getPattern()).containsExactly("//base:missing"); } @Test public void testNonExistentPackageWithoutKeepGoing() throws Exception { assertThrows(TargetParsingException.class, () -> tester.load("//does/not/exist")); PatternExpandingError err = tester.findPostOnce(PatternExpandingError.class); assertThat(err.getPattern()).containsExactly("//does/not/exist"); } @Test public void testNonExistentTarget() throws Exception { tester.addFile("base/BUILD"); TargetPatternPhaseValue loadingResult = tester.loadKeepGoing("//base:missing"); assertThat(loadingResult.hasError()).isTrue(); assertThat(loadingResult.hasPostExpansionError()).isFalse(); assertThat(loadingResult.getTargetLabels()).isEmpty(); assertThat(loadingResult.getTestsToRunLabels()).isNull(); tester.assertContainsError("Skipping '//base:missing': no such target '//base:missing'"); tester.assertContainsWarning("Target pattern parsing failed."); PatternExpandingError err = tester.findPostOnce(PatternExpandingError.class); assertThat(err.getPattern()).containsExactly("//base:missing"); } @Test public void testExistingAndNonExistentTargetsWithKeepGoing() throws Exception { tester.addFile("base/BUILD", "filegroup(name = 'hello', srcs = ['foo.txt'])"); tester.loadKeepGoing("//base:hello", "//base:missing"); PatternExpandingError err = tester.findPostOnce(PatternExpandingError.class); assertThat(err.getPattern()).containsExactly("//base:missing"); TargetParsingCompleteEvent event = tester.findPostOnce(TargetParsingCompleteEvent.class); assertThat(event.getOriginalTargetPattern()).containsExactly("//base:hello", "//base:missing"); assertThat(event.getFailedTargetPatterns()).containsExactly("//base:missing"); } @Test public void testRecursiveAllRules() throws Exception { tester.addFile("base/BUILD", "filegroup(name = 'base', srcs = ['base.txt'])"); tester.addFile("base/foo/BUILD", "filegroup(name = 'foo', srcs = ['foo.txt'])"); tester.addFile("base/bar/BUILD", "filegroup(name = 'bar', srcs = ['bar.txt'])"); TargetPatternPhaseValue loadingResult = tester.load("//base/..."); assertThat(loadingResult.getTargetLabels()) .containsExactlyElementsIn(getLabels("//base", "//base/foo", "//base/bar")); loadingResult = tester.load("//base/bar/..."); assertThat(loadingResult.getTargetLabels()) .containsExactlyElementsIn(getLabels("//base/bar")); } @Test public void testRecursiveAllTargets() throws Exception { tester.addFile("base/BUILD", "filegroup(name = 'base', srcs = ['base.txt'])"); tester.addFile("base/foo/BUILD", "filegroup(name = 'foo', srcs = ['foo.txt'])"); tester.addFile("base/bar/BUILD", "filegroup(name = 'bar', srcs = ['bar.txt'])"); TargetPatternPhaseValue loadingResult = tester.load("//base/...:*"); assertThat(loadingResult.getTargetLabels()) .containsExactlyElementsIn( getLabels( "//base:BUILD", "//base:base", "//base:base.txt", "//base/foo:BUILD", "//base/foo:foo", "//base/foo:foo.txt", "//base/bar:BUILD", "//base/bar:bar", "//base/bar:bar.txt")); loadingResult = tester.load("//base/...:all-targets"); assertThat(loadingResult.getTargetLabels()) .containsExactlyElementsIn( getLabels( "//base:BUILD", "//base:base", "//base:base.txt", "//base/foo:BUILD", "//base/foo:foo", "//base/foo:foo.txt", "//base/bar:BUILD", "//base/bar:bar", "//base/bar:bar.txt")); } @Test public void testNonExistentRecursive() throws Exception { TargetPatternPhaseValue loadingResult = tester.loadKeepGoing("//base/..."); assertThat(loadingResult.hasError()).isTrue(); assertThat(loadingResult.hasPostExpansionError()).isFalse(); assertThat(loadingResult.getTargetLabels()).isEmpty(); assertThat(loadingResult.getTestsToRunLabels()).isNull(); tester.assertContainsError("Skipping '//base/...': no targets found beneath 'base'"); tester.assertContainsWarning("Target pattern parsing failed."); PatternExpandingError err = tester.findPostOnce(PatternExpandingError.class); assertThat(err.getPattern()).containsExactly("//base/..."); } @Test public void testMistypedTarget() throws Exception { TargetParsingException e = assertThrows(TargetParsingException.class, () -> tester.load("foo//bar:missing")); assertThat(e) .hasMessageThat() .contains( "invalid target format 'foo//bar:missing': " + "invalid package name 'foo//bar': " + "package names may not contain '//' path separators"); ParsingFailedEvent err = tester.findPostOnce(ParsingFailedEvent.class); assertThat(err.getPattern()).isEqualTo("foo//bar:missing"); } @Test public void testEmptyTarget() throws Exception { TargetParsingException e = assertThrows(TargetParsingException.class, () -> tester.load("")); assertThat(e).hasMessageThat().contains("the empty string is not a valid target"); } @Test public void testMistypedTargetKeepGoing() throws Exception { TargetPatternPhaseValue result = tester.loadKeepGoing("foo//bar:missing"); assertThat(result.hasError()).isTrue(); tester.assertContainsError( "invalid target format 'foo//bar:missing': " + "invalid package name 'foo//bar': " + "package names may not contain '//' path separators"); ParsingFailedEvent err = tester.findPostOnce(ParsingFailedEvent.class); assertThat(err.getPattern()).isEqualTo("foo//bar:missing"); } @Test public void testBadTargetPatternWithTest() throws Exception { tester.addFile("base/BUILD"); TargetPatternPhaseValue loadingResult = tester.loadTestsKeepGoing("//base:missing"); assertThat(loadingResult.hasError()).isTrue(); assertThat(loadingResult.hasPostExpansionError()).isFalse(); assertThat(loadingResult.getTargetLabels()).isEmpty(); assertThat(loadingResult.getTestsToRunLabels()).isEmpty(); tester.assertContainsError("Skipping '//base:missing': no such target '//base:missing'"); tester.assertContainsWarning("Target pattern parsing failed."); } @Test public void testManualTarget() throws Exception { AnalysisMock.get().ccSupport().setup(tester.mockToolsConfig); tester.addFile("cc/BUILD", "cc_library(name = 'my_lib', srcs = ['lib.cc'], tags = ['manual'])"); TargetPatternPhaseValue loadingResult = assertNoErrors(tester.load("//cc:all")); assertThat(loadingResult.getTargetLabels()).containsExactlyElementsIn(getLabels()); // Explicitly specified on the command line. loadingResult = assertNoErrors(tester.load("//cc:my_lib")); assertThat(loadingResult.getTargetLabels()).containsExactlyElementsIn(getLabels("//cc:my_lib")); } @Test public void testConfigSettingTarget() throws Exception { AnalysisMock.get().ccSupport().setup(tester.mockToolsConfig); tester.addFile("config/BUILD", "cc_library(name = 'somelib', srcs = [ 'somelib.cc' ], hdrs = [ 'somelib.h' ])", "config_setting(name = 'configa', values = { 'define': 'foo=a' })", "config_setting(name = 'configb', values = { 'define': 'foo=b' })"); TargetPatternPhaseValue result = assertNoErrors(tester.load("//config:all")); assertThat(result.getTargetLabels()).containsExactlyElementsIn(getLabels("//config:somelib")); // Explicitly specified on the command line. result = assertNoErrors(tester.load("//config:configa")); assertThat(result.getTargetLabels()).containsExactlyElementsIn(getLabels("//config:configa")); } @Test public void testNegativeTestDoesNotShowUpAtAll() throws Exception { tester.addFile("my_test/BUILD", "sh_test(name = 'my_test', srcs = ['test.cc'])"); assertNoErrors(tester.loadTests("-//my_test")); assertThat(tester.getFilteredTargets()).isEmpty(); assertThat(tester.getTestFilteredTargets()).isEmpty(); } @Test public void testNegativeTargetDoesNotShowUpAtAll() throws Exception { tester.addFile("my_library/BUILD", "cc_library(name = 'my_library', srcs = ['test.cc'])"); assertNoErrors(tester.loadTests("-//my_library")); assertThat(tester.getFilteredTargets()).isEmpty(); assertThat(tester.getTestFilteredTargets()).isEmpty(); } @Test public void testTestMinusAllTests() throws Exception { tester.addFile( "test/BUILD", "cc_library(name = 'bar1')", "cc_test(name = 'test', deps = [':bar1'], tags = ['manual'])"); TargetPatternPhaseValue result = tester.loadTests("//test:test", "-//test:all"); assertThat(result.hasError()).isFalse(); assertThat(result.hasPostExpansionError()).isFalse(); tester.assertContainsWarning("All specified test targets were excluded by filters"); assertThat(tester.getFilteredTargets()).containsExactlyElementsIn(getLabels("//test:test")); assertThat(result.getTargetLabels()).isEmpty(); } @Test public void testFindLongestPrefix() throws Exception { tester.addFile("base/BUILD", "exports_files(['bar', 'bar/bar', 'bar/baz'])"); TargetPatternPhaseValue result = assertNoErrors(tester.load("base/bar/baz")); assertThat(result.getTargetLabels()).containsExactlyElementsIn(getLabels("//base:bar/baz")); result = assertNoErrors(tester.load("base/bar")); assertThat(result.getTargetLabels()).containsExactlyElementsIn(getLabels("//base:bar")); } @Test public void testMultiSegmentLabel() throws Exception { tester.addFile("base/foo/BUILD", "exports_files(['bar/baz'])"); TargetPatternPhaseValue value = assertNoErrors(tester.load("base/foo:bar/baz")); assertThat(value.getTargetLabels()).containsExactlyElementsIn(getLabels("//base/foo:bar/baz")); } @Test public void testMultiSegmentLabelRelative() throws Exception { tester.addFile("base/foo/BUILD", "exports_files(['bar/baz'])"); tester.setRelativeWorkingDirectory("base"); TargetPatternPhaseValue value = assertNoErrors(tester.load("foo:bar/baz")); assertThat(value.getTargetLabels()).containsExactlyElementsIn(getLabels("//base/foo:bar/baz")); } @Test public void testDeletedPackage() throws Exception { tester.addFile("base/BUILD", "exports_files(['base'])"); tester.setDeletedPackages(PackageIdentifier.createInMainRepo("base")); TargetPatternPhaseValue result = tester.loadKeepGoing("//base"); assertThat(result.hasError()).isTrue(); tester.assertContainsError( "no such package 'base': Package is considered deleted due to --deleted_packages"); ParsingFailedEvent err = tester.findPostOnce(ParsingFailedEvent.class); assertThat(err.getPattern()).isEqualTo("//base"); } private void writeBuildFilesForTestFiltering() throws Exception { tester.addFile("tests/BUILD", "sh_test(name = 't1', srcs = ['pass.sh'], size= 'small', local=1)", "sh_test(name = 't2', srcs = ['pass.sh'], size = 'medium')", "sh_test(name = 't3', srcs = ['pass.sh'], tags = ['manual', 'local'])"); } @Test public void testTestFiltering() throws Exception { writeBuildFilesForTestFiltering(); TargetPatternPhaseValue loadingResult = assertNoErrors(tester.loadTests("//tests:all")); assertThat(loadingResult.getTargetLabels()) .containsExactlyElementsIn(getLabels("//tests:t1", "//tests:t2")); assertThat(loadingResult.getTestsToRunLabels()) .containsExactlyElementsIn(getLabels("//tests:t1", "//tests:t2")); assertThat(tester.getFilteredTargets()).isEmpty(); assertThat(tester.getTestFilteredTargets()).isEmpty(); } @Test public void testTestFilteringIncludingManual() throws Exception { writeBuildFilesForTestFiltering(); tester.useLoadingOptions("--build_manual_tests"); TargetPatternPhaseValue loadingResult = assertNoErrors(tester.loadTests("//tests:all")); assertThat(loadingResult.getTargetLabels()) .containsExactlyElementsIn(getLabels("//tests:t1", "//tests:t2", "//tests:t3")); assertThat(loadingResult.getTestsToRunLabels()) .containsExactlyElementsIn(getLabels("//tests:t1", "//tests:t2")); assertThat(tester.getFilteredTargets()).isEmpty(); assertThat(tester.getTestFilteredTargets()).isEmpty(); } @Test public void testTestFilteringBuildTestsOnly() throws Exception { writeBuildFilesForTestFiltering(); tester.useLoadingOptions("--build_tests_only"); TargetPatternPhaseValue result = assertNoErrors(tester.loadTests("//tests:all")); assertThat(result.getTargetLabels()) .containsExactlyElementsIn(getLabels("//tests:t1", "//tests:t2")); assertThat(result.getTestsToRunLabels()) .containsExactlyElementsIn(getLabels("//tests:t1", "//tests:t2")); assertThat(tester.getFilteredTargets()).isEmpty(); assertThat(tester.getTestFilteredTargets()).isEmpty(); } @Test public void testTestFilteringSize() throws Exception { writeBuildFilesForTestFiltering(); tester.useLoadingOptions("--test_size_filters=small"); TargetPatternPhaseValue result = assertNoErrors(tester.loadTests("//tests:all")); assertThat(result.getTargetLabels()) .containsExactlyElementsIn(getLabels("//tests:t1", "//tests:t2")); assertThat(result.getTestsToRunLabels()).containsExactlyElementsIn(getLabels("//tests:t1")); assertThat(tester.getFilteredTargets()).isEmpty(); assertThat(tester.getTestFilteredTargets()).isEmpty(); } @Test public void testTestFilteringSizeAndBuildTestsOnly() throws Exception { writeBuildFilesForTestFiltering(); tester.useLoadingOptions("--test_size_filters=small", "--build_tests_only"); TargetPatternPhaseValue result = assertNoErrors(tester.loadTests("//tests:all")); assertThat(result.getTargetLabels()).containsExactlyElementsIn(getLabels("//tests:t1")); assertThat(result.getTestsToRunLabels()).containsExactlyElementsIn(getLabels("//tests:t1")); assertThat(tester.getFilteredTargets()).isEmpty(); assertThat(tester.getTestFilteredTargets()).containsExactlyElementsIn(getLabels("//tests:t2")); } @Test public void testTestFilteringLocalAndBuildTestsOnly() throws Exception { writeBuildFilesForTestFiltering(); tester.useLoadingOptions("--test_tag_filters=local", "--build_tests_only"); TargetPatternPhaseValue result = assertNoErrors(tester.loadTests("//tests:all", "//tests:t3")); assertThat(result.getTargetLabels()) .containsExactlyElementsIn(getLabels("//tests:t1", "//tests:t3")); assertThat(result.getTestsToRunLabels()) .containsExactlyElementsIn(getLabels("//tests:t1", "//tests:t3")); assertThat(tester.getFilteredTargets()).isEmpty(); assertThat(tester.getTestFilteredTargets()).containsExactlyElementsIn(getLabels("//tests:t2")); } @Test public void testTestSuiteExpansion() throws Exception { AnalysisMock.get().ccSupport().setup(tester.mockToolsConfig); tester.addFile("cc/BUILD", "cc_test(name = 'my_test', srcs = ['test.cc'])", "test_suite(name = 'tests', tests = [':my_test'])"); TargetPatternPhaseValue loadingResult = assertNoErrors(tester.loadTests("//cc:tests")); assertThat(loadingResult.getTargetLabels()) .containsExactlyElementsIn(getLabels("//cc:my_test")); assertThat(loadingResult.getTestsToRunLabels()) .containsExactlyElementsIn(getLabels("//cc:my_test")); assertThat(tester.getOriginalTargets()) .containsExactlyElementsIn(getLabels("//cc:tests", "//cc:my_test")); assertThat(tester.getTestSuiteTargets()) .containsExactly(Label.parseAbsoluteUnchecked("//cc:tests")); } @Test public void testTestSuiteExpansionFails() throws Exception { tester.addFile("ts/BUILD", "test_suite(name = 'tests', tests = ['//nonexistent:my_test'])"); tester.useLoadingOptions("--build_tests_only"); TargetPatternPhaseValue loadingResult = tester.loadTestsKeepGoing("//ts:tests"); assertThat(loadingResult.hasError()).isTrue(); assertThat(loadingResult.hasPostExpansionError()).isFalse(); tester.assertContainsError("no such package 'nonexistent'"); } @Test public void testTestSuiteExpansionFailsForBuild() throws Exception { tester.addFile("ts/BUILD", "test_suite(name = 'tests', tests = [':nonexistent_test'])"); TargetPatternPhaseValue loadingResult = tester.loadKeepGoing("//ts:tests"); assertThat(loadingResult.hasError()).isFalse(); assertThat(loadingResult.hasPostExpansionError()).isTrue(); tester.assertContainsError( "expecting a test or a test_suite rule but '//ts:nonexistent_test' is not one"); } @Test public void failureWhileLoadingTestsForTestSuiteKeepGoing() throws Exception { tester.addFile("ts/BUILD", "test_suite(name = 'tests', tests = ['//pkg:tests'])"); tester.addFile("pkg/BUILD", "test_suite(name = 'tests')", "test_suite()"); TargetPatternPhaseValue loadingResult = tester.loadKeepGoing("//ts:tests"); assertThat(loadingResult.hasError()).isFalse(); assertThat(loadingResult.hasPostExpansionError()).isTrue(); tester.assertContainsError("test_suite rule has no 'name' attribute"); } @Test public void failureWhileLoadingTestsForTestSuiteNoKeepGoing() throws Exception { tester.addFile("ts/BUILD", "test_suite(name = 'tests', tests = ['//pkg:tests'])"); tester.addFile("pkg/BUILD", "test_suite(name = 'tests')", "test_suite()"); TargetParsingException e = assertThrows(TargetParsingException.class, () -> tester.load("//ts:tests")); assertThat(e) .hasMessageThat() .isEqualTo("error loading package 'pkg': Package 'pkg' contains errors"); tester.assertContainsError("test_suite rule has no 'name' attribute"); } @Test public void testTestSuiteExpansionFailsMissingTarget() throws Exception { tester.addFile("other/BUILD", ""); tester.addFile("ts/BUILD", "test_suite(name = 'tests', tests = ['//other:no_such_test'])"); TargetPatternPhaseValue result = tester.loadTestsKeepGoing("//ts:tests"); assertThat(result.hasError()).isTrue(); assertThat(result.hasPostExpansionError()).isTrue(); tester.assertContainsError("no such target '//other:no_such_test'"); } @Test public void testTestSuiteExpansionFailsMultipleSuites() throws Exception { tester.addFile("other/BUILD", ""); tester.addFile("ts/BUILD", "test_suite(name = 'a', tests = ['//other:no_such_test'])", "test_suite(name = 'b', tests = [])"); TargetPatternPhaseValue result = tester.loadTestsKeepGoing("//ts:all"); assertThat(result.hasError()).isTrue(); assertThat(result.hasPostExpansionError()).isTrue(); tester.assertContainsError("no such target '//other:no_such_test'"); } @Test public void testTestSuiteOverridesManualWithBuildTestsOnly() throws Exception { tester.addFile("foo/BUILD", "sh_test(name = 'foo', srcs = ['foo.sh'], tags = ['manual'])", "sh_test(name = 'bar', srcs = ['bar.sh'], tags = ['manual'])", "sh_test(name = 'baz', srcs = ['baz.sh'])", "test_suite(name = 'foo_suite', tests = [':foo', ':baz'])"); tester.useLoadingOptions("--build_tests_only"); TargetPatternPhaseValue result = assertNoErrors(tester.loadTests("//foo:all")); assertThat(result.getTargetLabels()) .containsExactlyElementsIn(getLabels("//foo:foo", "//foo:baz")); assertThat(result.getTestsToRunLabels()) .containsExactlyElementsIn(getLabels("//foo:foo", "//foo:baz")); assertThat(tester.getFilteredTargets()).isEmpty(); assertThat(tester.getTestFilteredTargets()) .containsExactlyElementsIn(getLabels("//foo:foo_suite")); } /** Regression test for bug: "subtracting tests from test doesn't work" */ @Test public void testFilterNegativeTestFromTestSuite() throws Exception { AnalysisMock.get().ccSupport().setup(tester.mockToolsConfig); tester.addFile("cc/BUILD", "cc_test(name = 'my_test', srcs = ['test.cc'])", "cc_test(name = 'my_other_test', srcs = ['other_test.cc'])", "test_suite(name = 'tests', tests = [':my_test', ':my_other_test'])"); TargetPatternPhaseValue result = assertNoErrors(tester.loadTests("//cc:tests", "-//cc:my_test")); assertThat(result.getTargetLabels()) .containsExactlyElementsIn(getLabels("//cc:my_other_test", "//cc:my_test")); assertThat(result.getTestsToRunLabels()) .containsExactlyElementsIn(getLabels("//cc:my_other_test")); } /** Regression test for bug: "blaze doesn't seem to respect target subtractions" */ @Test public void testNegativeTestSuiteExpanded() throws Exception { AnalysisMock.get().ccSupport().setup(tester.mockToolsConfig); tester.addFile("cc/BUILD", "cc_test(name = 'my_test', srcs = ['test.cc'])", "cc_test(name = 'my_other_test', srcs = ['other_test.cc'])", "test_suite(name = 'tests', tests = [':my_test'])", "test_suite(name = 'all_tests', tests = ['my_other_test'])"); TargetPatternPhaseValue result = assertNoErrors(tester.loadTests("//cc:all_tests", "-//cc:tests")); assertThat(result.getTargetLabels()) .containsExactlyElementsIn(getLabels("//cc:my_other_test")); assertThat(result.getTestsToRunLabels()) .containsExactlyElementsIn(getLabels("//cc:my_other_test")); } @Test public void testTestSuiteIsSubtracted() throws Exception { // Test suites are expanded for each target pattern in sequence, not the whole set of target // patterns after all the inclusions and exclusions are processed. AnalysisMock.get().ccSupport().setup(tester.mockToolsConfig); tester.addFile("cc/BUILD", "cc_test(name = 'my_test', srcs = ['test.cc'])", "cc_test(name = 'my_other_test', srcs = ['other_test.cc'])", "test_suite(name = 'tests', tests = [':my_test'])"); TargetPatternPhaseValue result = assertNoErrors(tester.loadTests("//cc:all", "-//cc:tests")); assertThat(result.getTargetLabels()) .containsExactlyElementsIn(getLabels("//cc:my_test", "//cc:my_other_test")); assertThat(result.getTestsToRunLabels()) .containsExactlyElementsIn(getLabels("//cc:my_other_test")); } /** Regression test for bug: "blaze test "no targets found" warning now fatal" */ @Test public void testNoTestsInRecursivePattern() throws Exception { AnalysisMock.get().ccSupport().setup(tester.mockToolsConfig); tester.addFile("foo/BUILD", "cc_library(name = 'foo', srcs = ['foo.cc'])"); TargetPatternPhaseValue result = assertNoErrors(tester.loadTests("//foo/...")); assertThat(result.getTargetLabels()) .containsExactlyElementsIn(getLabels("//foo")); assertThat(result.getTestsToRunLabels()).isEmpty(); } @Test public void testComplexTestSuite() throws Exception { AnalysisMock.get().ccSupport().setup(tester.mockToolsConfig); tester.addFile("cc/BUILD", "cc_test(name = 'test1', srcs = ['test.cc'])", "cc_test(name = 'test2', srcs = ['test.cc'])", "test_suite(name = 'empty', tags = ['impossible'], tests = [])", "test_suite(name = 'suite1', tests = ['empty', 'test1'])", "test_suite(name = 'suite2', tests = ['test2'])", "test_suite(name = 'all_tests', tests = ['suite1', 'suite2'])"); TargetPatternPhaseValue result = assertNoErrors(tester.loadTests("//cc:all_tests")); assertThat(result.getTargetLabels()) .containsExactlyElementsIn(getLabels("//cc:test1", "//cc:test2")); } @Test public void testAllExcludesManualTest() throws Exception { AnalysisMock.get().ccSupport().setup(tester.mockToolsConfig); tester.addFile("cc/BUILD", "cc_test(name = 'my_test', srcs = ['test.cc'])", "cc_test(name = 'my_other_test', srcs = ['other_test.cc'], tags = ['manual'])"); TargetPatternPhaseValue result = assertNoErrors(tester.loadTests("//cc:all")); assertThat(result.getTargetLabels()) .containsExactlyElementsIn(getLabels("//cc:my_test")); assertThat(result.getTestsToRunLabels()) .containsExactlyElementsIn(getLabels("//cc:my_test")); } @Test public void testBuildFilterDoesNotApplyToTests() throws Exception { tester.addFile( "foo/BUILD", "sh_test(name = 'foo', srcs = ['foo.sh'])", "sh_library(name = 'lib', srcs = ['lib.sh'])", "sh_library(name = 'nofoo', srcs = ['nofoo.sh'], tags = ['nofoo'])"); tester.useLoadingOptions("--build_tag_filters=nofoo"); TargetPatternPhaseValue result = assertNoErrors(tester.loadTests("//foo:all")); assertThat(result.getTargetLabels()) .containsExactlyElementsIn(getLabels("//foo:foo", "//foo:nofoo")); assertThat(result.getTestsToRunLabels()).containsExactlyElementsIn(getLabels("//foo:foo")); } /** * Regression test for bug: "blaze is lying to me about what tests exist (have been specified)" */ @Test public void testTotalNegationEmitsWarning() throws Exception { AnalysisMock.get().ccSupport().setup(tester.mockToolsConfig); tester.addFile("cc/BUILD", "cc_test(name = 'my_test', srcs = ['test.cc'])", "test_suite(name = 'tests', tests = [':my_test'])"); TargetPatternPhaseValue result = tester.loadTests("//cc:tests", "-//cc:my_test"); tester.assertContainsWarning("All specified test targets were excluded by filters"); assertThat(result.getTestsToRunLabels()).containsExactlyElementsIn(getLabels()); } @Test public void testRepeatedSameLoad() throws Exception { tester.addFile("base/BUILD", "filegroup(name = 'hello', srcs = ['foo.txt'])"); TargetPatternPhaseValue firstResult = assertNoErrors(tester.load("//base:hello")); TargetPatternPhaseValue secondResult = assertNoErrors(tester.load("//base:hello")); assertThat(secondResult.getTargetLabels()).isEqualTo(firstResult.getTargetLabels()); assertThat(secondResult.getTestsToRunLabels()).isEqualTo(firstResult.getTestsToRunLabels()); } /** * Tests whether globs can update correctly when a new file is added. * * <p>The usage of {@link LoadingPhaseTester#sync()} triggers this via * {@link SkyframeExecutor#invalidateFilesUnderPathForTesting}. */ @Test public void testGlobPicksUpNewFile() throws Exception { tester.addFile("foo/BUILD", "filegroup(name='x', srcs=glob(['*.y']))"); tester.addFile("foo/a.y"); Label label = Iterables.getOnlyElement(assertNoErrors(tester.load("//foo:x")).getTargetLabels()); Target result = tester.getTarget(label.toString()); assertThat( Iterables.transform(result.getAssociatedRule().getLabels(), Functions.toStringFunction())) .containsExactly("//foo:a.y"); tester.addFile("foo/b.y"); tester.sync(); label = Iterables.getOnlyElement(assertNoErrors(tester.load("//foo:x")).getTargetLabels()); result = tester.getTarget(label.toString()); assertThat( Iterables.transform(result.getAssociatedRule().getLabels(), Functions.toStringFunction())) .containsExactly("//foo:a.y", "//foo:b.y"); } /** Regression test: handle symlink cycles gracefully. */ @Test public void testCycleReporting_symlinkCycleDuringTargetParsing() throws Exception { tester.addFile("hello/BUILD", "cc_library(name = 'a', srcs = glob(['*.cc']))"); Path buildFilePath = tester.getWorkspace().getRelative("hello/BUILD"); Path dirPath = buildFilePath.getParentDirectory(); Path fooFilePath = dirPath.getRelative("foo.cc"); Path barFilePath = dirPath.getRelative("bar.cc"); Path bazFilePath = dirPath.getRelative("baz.cc"); fooFilePath.createSymbolicLink(barFilePath); barFilePath.createSymbolicLink(bazFilePath); bazFilePath.createSymbolicLink(fooFilePath); assertCircularSymlinksDuringTargetParsing("//hello:a"); } @Test public void testRecursivePatternWithCircularSymlink() throws Exception { tester.getWorkspace().getChild("broken").createDirectory(); // Create a circular symlink. tester.getWorkspace().getRelative(PathFragment.create("broken/BUILD")) .createSymbolicLink(PathFragment.create("BUILD")); assertCircularSymlinksDuringTargetParsing("//broken/..."); } @Test public void testRecursivePatternWithTwoCircularSymlinks() throws Exception { tester.getWorkspace().getChild("broken").createDirectory(); // Create a circular symlink. tester.getWorkspace().getRelative(PathFragment.create("broken/BUILD")) .createSymbolicLink(PathFragment.create("x")); tester.getWorkspace().getRelative(PathFragment.create("broken/x")) .createSymbolicLink(PathFragment.create("BUILD")); assertCircularSymlinksDuringTargetParsing("//broken/..."); } @Test public void testSuiteInSuite() throws Exception { tester.addFile("suite/BUILD", "test_suite(name = 'a', tests = [':b'])", "test_suite(name = 'b', tests = [':c'])", "sh_test(name = 'c', srcs = ['test.cc'])"); TargetPatternPhaseValue result = assertNoErrors(tester.load("//suite:a")); assertThat(result.getTargetLabels()).containsExactlyElementsIn(getLabels("//suite:c")); } @Test public void testTopLevelTargetErrorsPrintedExactlyOnce_noKeepGoing() throws Exception { tester.addFile("bad/BUILD", "sh_binary(name = 'bad', srcs = ['bad.sh'])", "fail('some error')"); assertThrows(TargetParsingException.class, () -> tester.load("//bad")); tester.assertContainsEventWithFrequency("some error", 1); PatternExpandingError err = tester.findPostOnce(PatternExpandingError.class); assertThat(err.getPattern()).containsExactly("//bad"); } @Test public void testTopLevelTargetErrorsPrintedExactlyOnce_keepGoing() throws Exception { tester.addFile("bad/BUILD", "sh_binary(name = 'bad', srcs = ['bad.sh'])", "fail('some error')"); TargetPatternPhaseValue result = tester.loadKeepGoing("//bad"); assertThat(result.hasError()).isTrue(); tester.assertContainsEventWithFrequency("some error", 1); } @Test public void testCompileOneDependency() throws Exception { tester.addFile("base/BUILD", "cc_library(name = 'hello', srcs = ['hello.cc'])"); tester.useLoadingOptions("--compile_one_dependency"); TargetPatternPhaseValue result = assertNoErrors(tester.load("base/hello.cc")); assertThat(result.getTargetLabels()).containsExactlyElementsIn(getLabels("//base:hello")); } @Test public void testCompileOneDependencyNonExistentSource() throws Exception { tester.addFile("base/BUILD", "cc_library(name = 'hello', srcs = ['hello.cc', '//bad:bad.cc'])"); tester.useLoadingOptions("--compile_one_dependency"); try { TargetPatternPhaseValue loadingResult = tester.load("base/hello.cc"); assertThat(loadingResult.hasPostExpansionError()).isFalse(); } catch (LoadingFailedException expected) { tester.assertContainsError("no such package 'bad'"); } } @Test public void testCompileOneDependencyNonExistentSourceKeepGoing() throws Exception { tester.addFile("base/BUILD", "cc_library(name = 'hello', srcs = ['hello.cc', '//bad:bad.cc'])"); tester.useLoadingOptions("--compile_one_dependency"); TargetPatternPhaseValue loadingResult = tester.loadKeepGoing("base/hello.cc"); assertThat(loadingResult.hasPostExpansionError()).isFalse(); } @Test public void testCompileOneDependencyReferencesFile() throws Exception { tester.addFile("base/BUILD", "cc_library(name = 'hello', srcs = ['hello.cc', '//bad:bad.cc'])"); tester.useLoadingOptions("--compile_one_dependency"); TargetParsingException e = assertThrows(TargetParsingException.class, () -> tester.load("//base:hello")); assertThat(e) .hasMessageThat() .contains("--compile_one_dependency target '//base:hello' must be a file"); } @Test public void testParsingFailureReported() throws Exception { TargetPatternPhaseValue loadingResult = tester.loadKeepGoing("//does_not_exist"); assertThat(loadingResult.hasError()).isTrue(); ParsingFailedEvent event = tester.findPostOnce(ParsingFailedEvent.class); assertThat(event.getPattern()).isEqualTo("//does_not_exist"); assertThat(event.getMessage()).contains("BUILD file not found"); } @Test public void testCyclesKeepGoing() throws Exception { tester.addFile("test/BUILD", "load(':cycle1.bzl', 'make_cycle')"); tester.addFile("test/cycle1.bzl", "load(':cycle2.bzl', 'make_cycle')"); tester.addFile("test/cycle2.bzl", "load(':cycle1.bzl', 'make_cycle')"); // The skyframe target pattern evaluator isn't able to provide partial results in the presence // of cycles, so it simply raises an exception rather than returning an empty result. TargetParsingException e = assertThrows(TargetParsingException.class, () -> tester.load("//test:cycle1")); assertThat(e).hasMessageThat().contains("cycles detected"); tester.assertContainsEventWithFrequency("cycle detected in extension", 1); PatternExpandingError err = tester.findPostOnce(PatternExpandingError.class); assertThat(err.getPattern()).containsExactly("//test:cycle1"); } @Test public void testCyclesNoKeepGoing() throws Exception { tester.addFile("test/BUILD", "load(':cycle1.bzl', 'make_cycle')"); tester.addFile("test/cycle1.bzl", "load(':cycle2.bzl', 'make_cycle')"); tester.addFile("test/cycle2.bzl", "load(':cycle1.bzl', 'make_cycle')"); TargetParsingException e = assertThrows(TargetParsingException.class, () -> tester.load("//test:cycle1")); assertThat(e).hasMessageThat().contains("cycles detected"); tester.assertContainsEventWithFrequency("cycle detected in extension", 1); PatternExpandingError err = tester.findPostOnce(PatternExpandingError.class); assertThat(err.getPattern()).containsExactly("//test:cycle1"); } @Test public void mapsOriginalPatternsToLabels() throws Exception { tester.addFile("test/a/BUILD", "cc_library(name = 'a_lib', srcs = ['a.cc'])"); tester.addFile("test/b/BUILD", "cc_library(name = 'b_lib', srcs = ['b.cc'])"); tester.load("test/a:all", "test/b:all", "test/..."); assertThat(tester.getOriginalPatternsToLabels()) .containsExactly( "test/a:all", Label.parseAbsoluteUnchecked("//test/a:a_lib"), "test/b:all", Label.parseAbsoluteUnchecked("//test/b:b_lib"), "test/...", Label.parseAbsoluteUnchecked("//test/a:a_lib"), "test/...", Label.parseAbsoluteUnchecked("//test/b:b_lib")); } @Test public void testSuiteCycle() throws Exception { tester.addFile( "BUILD", "test_suite(name = 'a', tests = [':b']); test_suite(name = 'b', tests = [':a'])"); assertThat( assertThrows(TargetParsingException.class, () -> tester.loadKeepGoing("//:a", "//:b"))) .hasMessageThat() .contains("cycles detected"); assertThat(tester.assertContainsError("cycle in dependency graph").toString()) .containsMatch("in test_suite rule //:.: cycle in dependency graph"); PatternExpandingError err = tester.findPostOnce(PatternExpandingError.class); assertThat(err.getPattern()).containsExactly("//:a", "//:b"); } @Test public void mapsOriginalPatternsToLabels_omitsExcludedTargets() throws Exception { tester.addFile("test/a/BUILD", "cc_library(name = 'a_lib', srcs = ['a.cc'])"); tester.load("test/...", "-test/a:a_lib"); assertThat(tester.getOriginalPatternsToLabels()).isEmpty(); } @Test public void testWildcard() throws Exception { tester.addFile("foo/lib/BUILD", "sh_library(name = 'lib2', srcs = ['foo.cc'])"); TargetPatternPhaseValue value = assertNoErrors(tester.load("//foo/lib:all-targets")); assertThat(value.getTargetLabels()) .containsExactlyElementsIn( getLabels("//foo/lib:BUILD", "//foo/lib:lib2", "//foo/lib:foo.cc")); value = assertNoErrors(tester.load("//foo/lib:*")); assertThat(value.getTargetLabels()) .containsExactlyElementsIn( getLabels("//foo/lib:BUILD", "//foo/lib:lib2", "//foo/lib:foo.cc")); } @Test public void testWildcardConflict() throws Exception { tester.addFile("foo/lib/BUILD", "cc_library(name = 'lib1')", "cc_library(name = 'lib2')", "cc_library(name = 'all-targets')", "cc_library(name = 'all')"); assertWildcardConflict("//foo/lib:all", ":all"); assertWildcardConflict("//foo/lib:all-targets", ":all-targets"); } private void assertWildcardConflict(String label, String suffix) throws Exception { TargetPatternPhaseValue value = tester.load(label); assertThat(value.getTargetLabels()).containsExactlyElementsIn(getLabels(label)); tester.assertContainsWarning(String.format("The target pattern '%s' is ambiguous: '%s' is both " + "a wildcard, and the name of an existing cc_library rule; " + "using the latter interpretation", label, suffix)); } @Test public void testAbsolutePatternEndsWithSlashAll() throws Exception { tester.addFile("foo/all/BUILD", "cc_library(name = 'all')"); TargetPatternPhaseValue value = tester.load("//foo/all"); assertThat(value.getTargetLabels()).containsExactlyElementsIn(getLabels("//foo/all:all")); } @Test public void testRelativeLabel() throws Exception { tester.addFile("base/BUILD", "filegroup(name = 'hello', srcs = ['foo.txt'])"); TargetPatternPhaseValue value = assertNoErrors(tester.load("base:hello")); assertThat(value.getTargetLabels()).containsExactlyElementsIn(getLabels("//base:hello")); } @Test public void testAbsoluteLabelWithOffset() throws Exception { tester.addFile("base/BUILD", "filegroup(name = 'hello', srcs = ['foo.txt'])"); tester.setRelativeWorkingDirectory("base"); TargetPatternPhaseValue value = assertNoErrors(tester.load("//base:hello")); assertThat(value.getTargetLabels()).containsExactlyElementsIn(getLabels("//base:hello")); } @Test public void testRelativeLabelWithOffset() throws Exception { tester.addFile("base/BUILD", "filegroup(name = 'hello', srcs = ['foo.txt'])"); tester.setRelativeWorkingDirectory("base"); TargetPatternPhaseValue value = assertNoErrors(tester.load(":hello")); assertThat(value.getTargetLabels()).containsExactlyElementsIn(getLabels("//base:hello")); } private void expectError(String pattern, String message) throws Exception { TargetParsingException e = assertThrows(TargetParsingException.class, () -> tester.load(pattern)); assertThat(e).hasMessageThat().contains(message); } @Test public void testPatternWithSingleSlashIsError() throws Exception { expectError( "/single/slash", "not a valid absolute pattern (absolute target patterns must start with exactly " + "two slashes): '/single/slash'"); } @Test public void testPatternWithSingleSlashIsErrorAndOffset() throws Exception { tester.setRelativeWorkingDirectory("base"); expectError( "/single/slash", "not a valid absolute pattern (absolute target patterns must start with exactly " + "two slashes): '/single/slash'"); } @Test public void testPatternWithTripleSlashIsError() throws Exception { expectError( "///triple/slash", "not a valid absolute pattern (absolute target patterns must start with exactly " + "two slashes): '///triple/slash'"); } @Test public void testPatternEndingWithSingleSlashIsError() throws Exception { expectError( "foo/", "The package part of 'foo/' should not end in a slash"); } @Test public void testPatternStartingWithDotDotSlash() throws Exception { expectError( "../foo", "Bad target pattern '../foo': package name component contains only '.' characters"); } private void runTestPackageLoadingError(boolean keepGoing, String... patterns) throws Exception { tester.addFile("bad/BUILD", "nope"); if (keepGoing) { TargetPatternPhaseValue value = tester.loadKeepGoing(patterns); assertThat(value.hasError()).isTrue(); tester.assertContainsWarning("Target pattern parsing failed"); } else { TargetParsingException exn = assertThrows(TargetParsingException.class, () -> tester.load(patterns)); assertThat(exn).hasCauseThat().isInstanceOf(BuildFileContainsErrorsException.class); assertThat(exn).hasCauseThat().hasMessageThat().contains("Package 'bad' contains errors"); } tester.assertContainsError("/workspace/bad/BUILD:1:1: name 'nope' is not defined"); } @Test public void testPackageLoadingError_keepGoing_explicitTarget() throws Exception { runTestPackageLoadingError(/*keepGoing=*/ true, "//bad:BUILD"); } @Test public void testPackageLoadingError_noKeepGoing_explicitTarget() throws Exception { runTestPackageLoadingError(/*keepGoing=*/ false, "//bad:BUILD"); } @Test public void testPackageLoadingError_keepGoing_targetsInPackage() throws Exception { runTestPackageLoadingError(/*keepGoing=*/ true, "//bad:all"); } @Test public void testPackageLoadingError_noKeepGoing_targetsInPackage() throws Exception { runTestPackageLoadingError(/*keepGoing=*/ false, "//bad:all"); } @Test public void testPackageLoadingError_keepGoing_targetsBeneathDirectory() throws Exception { runTestPackageLoadingError(/*keepGoing=*/ true, "//bad/..."); } @Test public void testPackageLoadingError_noKeepGoing_targetsBeneathDirectory() throws Exception { runTestPackageLoadingError(/*keepGoing=*/ false, "//bad/..."); } @Test public void testPackageLoadingError_keepGoing_someGoodTargetsBeneathDirectory() throws Exception { tester.addFile("good/BUILD", "sh_library(name = 't')\n"); runTestPackageLoadingError(/*keepGoing=*/ true, "//..."); } @Test public void testPackageLoadingError_noKeepGoing_someGoodTargetsBeneathDirectory() throws Exception { tester.addFile("good/BUILD", "sh_library(name = 't')\n"); runTestPackageLoadingError(/*keepGoing=*/ false, "//..."); } private void runTestPackageFileInconsistencyError(boolean keepGoing, String... patterns) throws Exception { tester.addFile("bad/BUILD", "sh_library(name = 't')\n"); IOException ioExn = new IOException("nope"); tester.throwExceptionOnGetInputStream(tester.getWorkspace().getRelative("bad/BUILD"), ioExn); if (keepGoing) { TargetPatternPhaseValue value = tester.loadKeepGoing(patterns); assertThat(value.hasError()).isTrue(); tester.assertContainsWarning("Target pattern parsing failed"); tester.assertContainsError("error loading package 'bad': nope"); } else { TargetParsingException exn = assertThrows(TargetParsingException.class, () -> tester.load(patterns)); assertThat(exn).hasCauseThat().isInstanceOf(BuildFileContainsErrorsException.class); assertThat(exn).hasCauseThat().hasMessageThat().contains("error loading package 'bad': nope"); } } @Test public void testPackageFileInconsistencyError_keepGoing_explicitTarget() throws Exception { runTestPackageFileInconsistencyError(true, "//bad:BUILD"); } @Test public void testPackageFileInconsistencyError_noKeepGoing_explicitTarget() throws Exception { runTestPackageFileInconsistencyError(false, "//bad:BUILD"); } @Test public void testPackageFileInconsistencyError_keepGoing_targetsInPackage() throws Exception { runTestPackageFileInconsistencyError(true, "//bad:all"); } @Test public void testPackageFileInconsistencyError_noKeepGoing_targetsInPackage() throws Exception { runTestPackageFileInconsistencyError(false, "//bad:all"); } @Test public void testPackageFileInconsistencyError_keepGoing_argetsBeneathDirectory() throws Exception { runTestPackageFileInconsistencyError(true, "//bad/..."); } @Test public void testPackageFileInconsistencyError_noKeepGoing_targetsBeneathDirectory() throws Exception { runTestPackageFileInconsistencyError(false, "//bad/..."); } @Test public void testPackageFileInconsistencyError_keepGoing_someGoodTargetsBeneathDirectory() throws Exception { tester.addFile("good/BUILD", "sh_library(name = 't')\n"); runTestPackageFileInconsistencyError(true, "//..."); } @Test public void testPackageFileInconsistencyError_noKeepGoing_someGoodTargetsBeneathDirectory() throws Exception { tester.addFile("good/BUILD", "sh_library(name = 't')\n"); runTestPackageFileInconsistencyError(false, "//..."); } private void runTestExtensionLoadingError(boolean keepGoing, String... patterns) throws Exception { tester.addFile("bad/f1.bzl", "nope"); tester.addFile("bad/BUILD", "load(\":f1.bzl\", \"not_a_symbol\")"); if (keepGoing) { TargetPatternPhaseValue value = tester.loadKeepGoing(patterns); assertThat(value.hasError()).isTrue(); tester.assertContainsWarning("Target pattern parsing failed"); } else { TargetParsingException exn = assertThrows(TargetParsingException.class, () -> tester.load(patterns)); assertThat(exn).hasCauseThat().isInstanceOf(BuildFileContainsErrorsException.class); assertThat(exn).hasCauseThat().hasMessageThat().contains("Extension 'bad/f1.bzl' has errors"); DetailedExitCode detailedExitCode = exn.getDetailedExitCode(); assertThat(detailedExitCode.getExitCode()).isEqualTo(ExitCode.BUILD_FAILURE); assertThat(detailedExitCode.getFailureDetail().getPackageLoading().getCode()) .isEqualTo(PackageLoading.Code.IMPORT_STARLARK_FILE_ERROR); } tester.assertContainsError("/workspace/bad/f1.bzl:1:1: name 'nope' is not defined"); } @Test public void testExtensionLoadingError_keepGoing_explicitTarget() throws Exception { runTestExtensionLoadingError(/*keepGoing=*/ true, "//bad:BUILD"); } @Test public void testExtensionLoadingError_noKeepGoing_explicitTarget() throws Exception { runTestExtensionLoadingError(/*keepGoing=*/ false, "//bad:BUILD"); } @Test public void testExtensionLoadingError_keepGoing_targetsInPackage() throws Exception { runTestExtensionLoadingError(/*keepGoing=*/ true, "//bad:all"); } @Test public void testExtensionLoadingError_noKeepGoing_targetsInPackage() throws Exception { runTestExtensionLoadingError(/*keepGoing=*/ false, "//bad:all"); } @Test public void testExtensionLoadingError_keepGoing_targetsBeneathDirectory() throws Exception { runTestExtensionLoadingError(/*keepGoing=*/ true, "//bad/..."); } @Test public void testExtensionLoadingError_noKeepGoing_targetsBeneathDirectory() throws Exception { runTestExtensionLoadingError(/*keepGoing=*/ false, "//bad/..."); } @Test public void testExtensionLoadingError_keepGoing_someGoodTargetsBeneathDirectory() throws Exception { tester.addFile("good/BUILD", "sh_library(name = 't')\n"); runTestExtensionLoadingError(/*keepGoing=*/ true, "//..."); } @Test public void testExtensionLoadingError_noKeepGoing_someGoodTargetsBeneathDirectory() throws Exception { tester.addFile("good/BUILD", "sh_library(name = 't')\n"); runTestExtensionLoadingError(/*keepGoing=*/ false, "//..."); } private static class LoadingPhaseTester { private final ManualClock clock = new ManualClock(); private final CustomInMemoryFs fs = new CustomInMemoryFs(clock); private final Path workspace; private final AnalysisMock analysisMock; private final SkyframeExecutor skyframeExecutor; private final List<Path> changes = new ArrayList<>(); private final BlazeDirectories directories; private final ActionKeyContext actionKeyContext = new ActionKeyContext(); private LoadingOptions options; private final StoredEventHandler storedErrors; private PathFragment relativeWorkingDirectory = PathFragment.EMPTY_FRAGMENT; private TargetParsingCompleteEvent targetParsingCompleteEvent; private LoadingPhaseCompleteEvent loadingPhaseCompleteEvent; private MockToolsConfig mockToolsConfig; public LoadingPhaseTester() throws IOException { this.workspace = fs.getPath("/workspace"); workspace.createDirectory(); mockToolsConfig = new MockToolsConfig(workspace); analysisMock = AnalysisMock.get(); analysisMock.setupMockClient(mockToolsConfig); directories = new BlazeDirectories( new ServerDirectories( fs.getPath("/install"), fs.getPath("/output"), fs.getPath("/userRoot")), workspace, /* defaultSystemJavabase= */ null, analysisMock.getProductName()); workspace.getRelative("base").deleteTree(); ConfiguredRuleClassProvider ruleClassProvider = analysisMock.createRuleClassProvider(); PackageFactory pkgFactory = analysisMock.getPackageFactoryBuilderForTesting(directories).build(ruleClassProvider, fs); PackageOptions options = Options.getDefaults(PackageOptions.class); storedErrors = new StoredEventHandler(); BuildOptions defaultBuildOptions; try { defaultBuildOptions = BuildOptions.of(ImmutableList.of()); } catch (OptionsParsingException e) { throw new RuntimeException(e); } skyframeExecutor = BazelSkyframeExecutorConstants.newBazelSkyframeExecutorBuilder() .setPkgFactory(pkgFactory) .setFileSystem(fs) .setDirectories(directories) .setActionKeyContext(actionKeyContext) .setDefaultBuildOptions(defaultBuildOptions) .setExtraSkyFunctions(analysisMock.getSkyFunctions(directories)) .build(); SkyframeExecutorTestHelper.process(skyframeExecutor); PathPackageLocator pkgLocator = PathPackageLocator.create( null, options.packagePath, storedErrors, workspace, workspace, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY); PackageOptions packageOptions = Options.getDefaults(PackageOptions.class); packageOptions.defaultVisibility = ConstantRuleVisibility.PRIVATE; packageOptions.showLoadingProgress = true; packageOptions.globbingThreads = 7; skyframeExecutor.injectExtraPrecomputedValues( ImmutableList.of( PrecomputedValue.injected( RepositoryDelegatorFunction.RESOLVED_FILE_INSTEAD_OF_WORKSPACE, Optional.empty()))); skyframeExecutor.preparePackageLoading( pkgLocator, packageOptions, Options.getDefaults(StarlarkSemanticsOptions.class), UUID.randomUUID(), ImmutableMap.<String, String>of(), new TimestampGranularityMonitor(clock)); skyframeExecutor.setActionEnv(ImmutableMap.<String, String>of()); this.options = Options.getDefaults(LoadingOptions.class); } public void useLoadingOptions(String... options) throws OptionsParsingException { OptionsParser parser = OptionsParser.builder().optionsClasses(LoadingOptions.class).build(); parser.parse(ImmutableList.copyOf(options)); this.options = parser.getOptions(LoadingOptions.class); } public void setRelativeWorkingDirectory(String relativeWorkingDirectory) { this.relativeWorkingDirectory = PathFragment.create(relativeWorkingDirectory); } public void setDeletedPackages(PackageIdentifier... packages) { skyframeExecutor.setDeletedPackages(ImmutableList.copyOf(packages)); } public TargetPatternPhaseValue load(String... patterns) throws Exception { return loadWithFlags(/*keepGoing=*/false, /*determineTests=*/false, patterns); } public TargetPatternPhaseValue loadKeepGoing(String... patterns) throws Exception { return loadWithFlags(/*keepGoing=*/true, /*determineTests=*/false, patterns); } public TargetPatternPhaseValue loadTests(String... patterns) throws Exception { return loadWithFlags(/*keepGoing=*/false, /*determineTests=*/true, patterns); } public TargetPatternPhaseValue loadTestsKeepGoing(String... patterns) throws Exception { return loadWithFlags(/*keepGoing=*/true, /*determineTests=*/true, patterns); } public TargetPatternPhaseValue loadWithFlags( boolean keepGoing, boolean determineTests, String... patterns) throws Exception { sync(); storedErrors.clear(); TargetPatternPhaseValue result = skyframeExecutor.loadTargetPatternsWithFilters( storedErrors, ImmutableList.copyOf(patterns), relativeWorkingDirectory, options, // We load very few packages, and everything is in memory; two should be plenty. /* threadCount= */ 2, keepGoing, determineTests); this.targetParsingCompleteEvent = findPost(TargetParsingCompleteEvent.class); this.loadingPhaseCompleteEvent = findPost(LoadingPhaseCompleteEvent.class); if (!keepGoing) { assertThat(storedErrors.hasErrors()).isFalse(); } return result; } public Path getWorkspace() { return workspace; } public void addFile(String fileName, String... content) throws IOException { Path buildFile = workspace.getRelative(fileName); Preconditions.checkState(!buildFile.exists()); Path currentPath = buildFile; // Add the new file and all the directories that will be created by // createDirectoryAndParents() while (!currentPath.exists()) { changes.add(currentPath); currentPath = currentPath.getParentDirectory(); } buildFile.getParentDirectory().createDirectoryAndParents(); FileSystemUtils.writeContentAsLatin1(buildFile, Joiner.on('\n').join(content)); } private void sync() throws InterruptedException { clock.advanceMillis(1); ModifiedFileSet.Builder builder = ModifiedFileSet.builder(); for (Path path : changes) { if (!path.startsWith(workspace)) { continue; } PathFragment workspacePath = path.relativeTo(workspace); builder.modify(workspacePath); } ModifiedFileSet modified = builder.build(); skyframeExecutor.invalidateFilesUnderPathForTesting( storedErrors, modified, Root.fromPath(workspace)); changes.clear(); } public Target getTarget(String targetName) throws Exception { StoredEventHandler eventHandler = new StoredEventHandler(); Target target = getPkgManager().getTarget( eventHandler, Label.parseAbsoluteUnchecked(targetName)); assertThat(eventHandler.hasErrors()).isFalse(); return target; } private PackageManager getPkgManager() { return skyframeExecutor.getPackageManager(); } public ImmutableSet<Label> getFilteredTargets() { return ImmutableSet.copyOf(targetParsingCompleteEvent.getFilteredLabels()); } public ImmutableSet<Label> getTestFilteredTargets() { return ImmutableSet.copyOf(targetParsingCompleteEvent.getTestFilteredLabels()); } public ImmutableSet<Label> getOriginalTargets() { return ImmutableSet.copyOf(targetParsingCompleteEvent.getLabels()); } public ImmutableSetMultimap<String, Label> getOriginalPatternsToLabels() { return targetParsingCompleteEvent.getOriginalPatternsToLabels(); } public ImmutableSet<Label> getTestSuiteTargets() { return loadingPhaseCompleteEvent.getFilteredLabels(); } void throwExceptionOnGetInputStream(Path path, IOException exn) { fs.throwExceptionOnGetInputStream(path, exn); } private Iterable<Event> filteredEvents() { return Iterables.filter(storedErrors.getEvents(), new Predicate<Event>() { @Override public boolean apply(Event event) { return event.getKind() != EventKind.PROGRESS; } }); } public void assertNoEvents() { MoreAsserts.assertNoEvents(filteredEvents()); } public Event assertContainsWarning(String expectedMessage) { return MoreAsserts.assertContainsEvent(filteredEvents(), expectedMessage, EventKind.WARNING); } public Event assertContainsError(String expectedMessage) { return MoreAsserts.assertContainsEvent(filteredEvents(), expectedMessage, EventKind.ERRORS); } public void assertContainsEventWithFrequency(String expectedMessage, int expectedFrequency) { MoreAsserts.assertContainsEventWithFrequency( filteredEvents(), expectedMessage, expectedFrequency); } public <T extends Postable> T findPost(Class<T> clazz) { return Iterators.getNext( storedErrors.getPosts().stream().filter(clazz::isInstance).map(clazz::cast).iterator(), null); } public <T extends Postable> T findPostOnce(Class<T> clazz) { return storedErrors .getPosts() .stream() .filter(clazz::isInstance) .map(clazz::cast) .collect(MoreCollectors.onlyElement()); } } /** * Custom {@link InMemoryFileSystem} that can be pre-configured per-file to throw a supplied * IOException instead of the usual behavior. */ private static class CustomInMemoryFs extends InMemoryFileSystem { private final Map<Path, IOException> pathsToErrorOnGetInputStream = Maps.newHashMap(); CustomInMemoryFs(ManualClock manualClock) { super(manualClock); } synchronized void throwExceptionOnGetInputStream(Path path, IOException exn) { pathsToErrorOnGetInputStream.put(path, exn); } @Override protected synchronized InputStream getInputStream(Path path) throws IOException { IOException exnToThrow = pathsToErrorOnGetInputStream.get(path); if (exnToThrow != null) { throw exnToThrow; } return super.getInputStream(path); } } }
package org.linkedin.json; import static java.lang.String.format; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.*; /* Copyright (c) 2002 JSON.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The Software shall be used for Good, not Evil. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /** * A JSON Pointer is a simple query language defined for JSON documents by * <a href="https://tools.ietf.org/html/rfc6901">RFC 6901</a>. * * In a nutshell, JSONPointer allows the user to navigate into a JSON document * using strings, and retrieve targeted objects, like a simple form of XPATH. * Path segments are separated by the '/' char, which signifies the root of * the document when it appears as the first char of the string. Array * elements are navigated using ordinals, counting from 0. JSONPointer strings * may be extended to any arbitrary number of segments. If the navigation * is successful, the matched item is returned. A matched item may be a * JSONObject, a JSONArray, or a JSON value. If the JSONPointer string building * fails, an appropriate exception is thrown. If the navigation fails to find * a match, a JSONPointerException is thrown. * * @author JSON.org * @version 2016-05-14 */ public class JSONPointer { // used for URL encoding and decoding private static final String ENCODING = "utf-8"; /** * This class allows the user to build a JSONPointer in steps, using * exactly one segment in each step. */ public static class Builder { // Segments for the eventual JSONPointer string private final List<String> refTokens = new ArrayList<String>(); /** * Creates a {@code JSONPointer} instance using the tokens previously set using the * {@link #append(String)} method calls. */ public JSONPointer build() { return new JSONPointer(refTokens); } /** * Adds an arbitary token to the list of reference tokens. It can be any non-null value. * * Unlike in the case of JSON string or URI fragment representation of JSON pointers, the * argument of this method MUST NOT be escaped. If you want to query the property called * {@code "a~b"} then you should simply pass the {@code "a~b"} string as-is, there is no * need to escape it as {@code "a~0b"}. * * @param token the new token to be appended to the list * @return {@code this} * @throws NullPointerException if {@code token} is null */ public Builder append(String token) { if (token == null) { throw new NullPointerException("token cannot be null"); } refTokens.add(token); return this; } /** * Adds an integer to the reference token list. Although not necessarily, mostly this token will * denote an array index. * * @param arrayIndex the array index to be added to the token list * @return {@code this} */ public Builder append(int arrayIndex) { refTokens.add(String.valueOf(arrayIndex)); return this; } } /** * Static factory method for {@link Builder}. Example usage: * * <pre><code> * JSONPointer pointer = JSONPointer.builder() * .append("obj") * .append("other~key").append("another/key") * .append("\"") * .append(0) * .build(); * </code></pre> * * @return a builder instance which can be used to construct a {@code JSONPointer} instance by chained * {@link Builder#append(String)} calls. */ public static Builder builder() { return new Builder(); } // Segments for the JSONPointer string private final List<String> refTokens; /** * Pre-parses and initializes a new {@code JSONPointer} instance. If you want to * evaluate the same JSON Pointer on different JSON documents then it is recommended * to keep the {@code JSONPointer} instances due to performance considerations. * * @param pointer the JSON String or URI Fragment representation of the JSON pointer. * @throws IllegalArgumentException if {@code pointer} is not a valid JSON pointer */ public JSONPointer(String pointer) { if (pointer == null) { throw new NullPointerException("pointer cannot be null"); } if (pointer.isEmpty() || pointer.equals("#")) { refTokens = Collections.emptyList(); return; } if (pointer.startsWith("#/")) { pointer = pointer.substring(2); try { pointer = URLDecoder.decode(pointer, ENCODING); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } else if (pointer.startsWith("/")) { pointer = pointer.substring(1); } else { throw new IllegalArgumentException("a JSON pointer should start with '/' or '#/'"); } refTokens = new ArrayList<String>(); for (String token : pointer.split("/")) { refTokens.add(unescape(token)); } } public JSONPointer(List<String> refTokens) { this.refTokens = new ArrayList<String>(refTokens); } private String unescape(String token) { return token.replace("~1", "/").replace("~0", "~") .replace("\\\"", "\"") .replace("\\\\", "\\"); } /** * Evaluates this JSON Pointer on the given {@code document}. The {@code document} * is usually a {@link JSONObject} or a {@link JSONArray} instance, but the empty * JSON Pointer ({@code ""}) can be evaluated on any JSON values and in such case the * returned value will be {@code document} itself. * * @param document the JSON document which should be the subject of querying. * @return the result of the evaluation * @throws JSONPointerException if an error occurs during evaluation */ public Object queryFrom(Object document) { if (refTokens.isEmpty()) { return document; } Object current = document; for (String token : refTokens) { if (current instanceof JSONObject) { current = ((JSONObject) current).opt(unescape(token)); } else if (current instanceof JSONArray) { current = readByIndexToken(current, token); } else { throw new JSONPointerException(format( "value [%s] is not an array or object therefore its key %s cannot be resolved", current, token)); } } return current; } /** * Matches a JSONArray element by ordinal position * @param current the JSONArray to be evaluated * @param indexToken the array index in string form * @return the matched object. If no matching item is found a * JSONPointerException is thrown */ private Object readByIndexToken(Object current, String indexToken) { try { int index = Integer.parseInt(indexToken); JSONArray currentArr = (JSONArray) current; if (index >= currentArr.length()) { throw new JSONPointerException(format("index %d is out of bounds - the array has %d elements", index, currentArr.length())); } return currentArr.get(index); } catch (NumberFormatException e) { throw new JSONPointerException(format("%s is not an array index", indexToken), e); } } /** * Returns a string representing the JSONPointer path value using string * representation */ @Override public String toString() { StringBuilder rval = new StringBuilder(""); for (String token: refTokens) { rval.append('/').append(escape(token)); } return rval.toString(); } /** * Escapes path segment values to an unambiguous form. * The escape char to be inserted is '~'. The chars to be escaped * are ~, which maps to ~0, and /, which maps to ~1. Backslashes * and double quote chars are also escaped. * @param token the JSONPointer segment value to be escaped * @return the escaped value for the token */ private String escape(String token) { return token.replace("~", "~0") .replace("/", "~1") .replace("\\", "\\\\") .replace("\"", "\\\""); } /** * Returns a string representing the JSONPointer path value using URI * fragment identifier representation */ public String toURIFragment() { try { StringBuilder rval = new StringBuilder("#"); for (String token : refTokens) { rval.append('/').append(URLEncoder.encode(token, ENCODING)); } return rval.toString(); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.accumulo.index; import com.facebook.presto.accumulo.Types; import com.facebook.presto.accumulo.iterators.MaxByteArrayCombiner; import com.facebook.presto.accumulo.iterators.MinByteArrayCombiner; import com.facebook.presto.accumulo.metadata.AccumuloTable; import com.facebook.presto.accumulo.model.AccumuloColumnHandle; import com.facebook.presto.accumulo.serializers.AccumuloRowSerializer; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.type.Type; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; import com.google.common.primitives.UnsignedBytes; import org.apache.accumulo.core.client.BatchWriter; import org.apache.accumulo.core.client.BatchWriterConfig; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.MutationsRejectedException; import org.apache.accumulo.core.client.Scanner; import org.apache.accumulo.core.client.TableNotFoundException; import org.apache.accumulo.core.data.ColumnUpdate; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.iterators.LongCombiner; import org.apache.accumulo.core.iterators.TypedValueCombiner; import org.apache.accumulo.core.iterators.user.SummingCombiner; import org.apache.accumulo.core.security.Authorizations; import org.apache.accumulo.core.security.ColumnVisibility; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.io.Text; import javax.annotation.concurrent.NotThreadSafe; import java.io.Closeable; import java.nio.ByteBuffer; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import static com.facebook.presto.accumulo.AccumuloErrorCode.ACCUMULO_TABLE_DNE; import static com.facebook.presto.accumulo.AccumuloErrorCode.UNEXPECTED_ACCUMULO_ERROR; import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED; import static com.google.common.base.MoreObjects.toStringHelper; import static java.nio.ByteBuffer.wrap; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Objects.requireNonNull; /** * This utility class assists the Presto connector, and external applications, * in populating the index table and metrics table for Accumulo-backed Presto tables. * <p> * This class is totally not thread safe. * <p> * When creating a table, if it contains indexed columns, users will have to create the index table * and the index metrics table, the names of which can be retrieved using the static functions in * this class. Additionally, users MUST add iterators to the index metrics table (also available via * static function), and, while not required, recommended to add the locality groups to the index * table to improve index lookup times. * <p> * Sample usage of an indexer: * <p> * <pre> * <code> * Indexer indexer = new Indexer(connector, userAuths, table, writerConf); * for (Mutation m : mutationsToNormalTable) { * indexer.index(m); * } * * // can flush indexer w/regular BatchWriter * indexer.flush() * * // finished adding new mutations, close the indexer * indexer.close(); * </code> * </pre> */ @NotThreadSafe public class Indexer implements Closeable { public static final ByteBuffer METRICS_TABLE_ROW_ID = wrap("___METRICS_TABLE___".getBytes(UTF_8)); public static final ByteBuffer METRICS_TABLE_ROWS_CF = wrap("___rows___".getBytes(UTF_8)); public static final MetricsKey METRICS_TABLE_ROW_COUNT = new MetricsKey(METRICS_TABLE_ROW_ID, METRICS_TABLE_ROWS_CF); public static final ByteBuffer METRICS_TABLE_FIRST_ROW_CQ = wrap("___first_row___".getBytes(UTF_8)); public static final ByteBuffer METRICS_TABLE_LAST_ROW_CQ = wrap("___last_row___".getBytes(UTF_8)); public static final byte[] CARDINALITY_CQ = "___card___".getBytes(UTF_8); public static final Text CARDINALITY_CQ_AS_TEXT = new Text(CARDINALITY_CQ); public static final Text METRICS_TABLE_ROWS_CF_AS_TEXT = new Text(METRICS_TABLE_ROWS_CF.array()); public static final Text METRICS_TABLE_ROWID_AS_TEXT = new Text(METRICS_TABLE_ROW_ID.array()); private static final byte[] EMPTY_BYTES = new byte[0]; private static final byte UNDERSCORE = '_'; private static final TypedValueCombiner.Encoder<Long> ENCODER = new LongCombiner.StringEncoder(); private final AccumuloTable table; private final BatchWriter indexWriter; private final BatchWriterConfig writerConfig; private final Connector connector; private final Map<MetricsKey, AtomicLong> metrics = new HashMap<>(); private final Multimap<ByteBuffer, ByteBuffer> indexColumns; private final Map<ByteBuffer, Map<ByteBuffer, Type>> indexColumnTypes; private final AccumuloRowSerializer serializer; private final Comparator<byte[]> byteArrayComparator = UnsignedBytes.lexicographicalComparator(); private byte[] firstRow = null; private byte[] lastRow = null; public Indexer( Connector connector, Authorizations auths, AccumuloTable table, BatchWriterConfig writerConfig) throws TableNotFoundException { this.connector = requireNonNull(connector, "connector is null"); this.table = requireNonNull(table, "table is null"); this.writerConfig = requireNonNull(writerConfig, "writerConfig is null"); requireNonNull(auths, "auths is null"); this.serializer = table.getSerializerInstance(); // Create our batch writer indexWriter = connector.createBatchWriter(table.getIndexTableName(), writerConfig); ImmutableMultimap.Builder<ByteBuffer, ByteBuffer> indexColumnsBuilder = ImmutableMultimap.builder(); Map<ByteBuffer, Map<ByteBuffer, Type>> indexColumnTypesBuilder = new HashMap<>(); // Initialize metadata table.getColumns().forEach(columnHandle -> { if (columnHandle.isIndexed()) { // Wrap the column family and qualifier for this column and add it to // collection of indexed columns ByteBuffer family = wrap(columnHandle.getFamily().get().getBytes(UTF_8)); ByteBuffer qualifier = wrap(columnHandle.getQualifier().get().getBytes(UTF_8)); indexColumnsBuilder.put(family, qualifier); // Create a mapping for this column's Presto type, again creating a new one for the // family if necessary Map<ByteBuffer, Type> types = indexColumnTypesBuilder.get(family); if (types == null) { types = new HashMap<>(); indexColumnTypesBuilder.put(family, types); } types.put(qualifier, columnHandle.getType()); } }); indexColumns = indexColumnsBuilder.build(); indexColumnTypes = ImmutableMap.copyOf(indexColumnTypesBuilder); // If there are no indexed columns, throw an exception if (indexColumns.isEmpty()) { throw new PrestoException(NOT_SUPPORTED, "No indexed columns in table metadata. Refusing to index a table with no indexed columns"); } // Initialize metrics map // This metrics map is for column cardinality metrics.put(METRICS_TABLE_ROW_COUNT, new AtomicLong(0)); // Scan the metrics table for existing first row and last row Pair<byte[], byte[]> minmax = getMinMaxRowIds(connector, table, auths); firstRow = minmax.getLeft(); lastRow = minmax.getRight(); } /** * Index the given mutation, adding mutations to the index and metrics table * <p> * Like typical use of a BatchWriter, this method does not flush mutations to the underlying index table. * For higher throughput the modifications to the metrics table are tracked in memory and added to the metrics table when the indexer is flushed or closed. * * @param mutation Mutation to index */ public void index(Mutation mutation) { // Increment the cardinality for the number of rows in the table metrics.get(METRICS_TABLE_ROW_COUNT).incrementAndGet(); // Set the first and last row values of the table based on existing row IDs if (firstRow == null || byteArrayComparator.compare(mutation.getRow(), firstRow) < 0) { firstRow = mutation.getRow(); } if (lastRow == null || byteArrayComparator.compare(mutation.getRow(), lastRow) > 0) { lastRow = mutation.getRow(); } // For each column update in this mutation for (ColumnUpdate columnUpdate : mutation.getUpdates()) { // Get the column qualifiers we want to index for this column family (if any) ByteBuffer family = wrap(columnUpdate.getColumnFamily()); Collection<ByteBuffer> indexQualifiers = indexColumns.get(family); // If we have column qualifiers we want to index for this column family if (indexQualifiers != null) { // Check if we want to index this particular qualifier ByteBuffer qualifier = wrap(columnUpdate.getColumnQualifier()); if (indexQualifiers.contains(qualifier)) { // If so, create a mutation using the following mapping: // Row ID = column value // Column Family = columnqualifier_columnfamily // Column Qualifier = row ID // Value = empty ByteBuffer indexFamily = getIndexColumnFamily(columnUpdate.getColumnFamily(), columnUpdate.getColumnQualifier()); Type type = indexColumnTypes.get(family).get(qualifier); ColumnVisibility visibility = new ColumnVisibility(columnUpdate.getColumnVisibility()); // If this is an array type, then index each individual element in the array if (Types.isArrayType(type)) { Type elementType = Types.getElementType(type); List<?> elements = serializer.decode(type, columnUpdate.getValue()); for (Object element : elements) { addIndexMutation(wrap(serializer.encode(elementType, element)), indexFamily, visibility, mutation.getRow()); } } else { addIndexMutation(wrap(columnUpdate.getValue()), indexFamily, visibility, mutation.getRow()); } } } } } public void index(Iterable<Mutation> mutations) { for (Mutation mutation : mutations) { index(mutation); } } private void addIndexMutation(ByteBuffer row, ByteBuffer family, ColumnVisibility visibility, byte[] qualifier) { // Create the mutation and add it to the batch writer Mutation indexMutation = new Mutation(row.array()); indexMutation.put(family.array(), qualifier, visibility, EMPTY_BYTES); try { indexWriter.addMutation(indexMutation); } catch (MutationsRejectedException e) { throw new PrestoException(UNEXPECTED_ACCUMULO_ERROR, "Index mutation rejected by server", e); } // Increment the cardinality metrics for this value of index // metrics is a mapping of row ID to column family MetricsKey key = new MetricsKey(row, family, visibility); AtomicLong count = metrics.get(key); if (count == null) { count = new AtomicLong(0); metrics.put(key, count); } count.incrementAndGet(); } /** * Flushes all Mutations in the index writer. And all metric mutations to the metrics table. * Note that the metrics table is not updated until this method is explicitly called (or implicitly via close). */ public void flush() { try { // Flush index writer indexWriter.flush(); // Write out metrics mutations BatchWriter metricsWriter = connector.createBatchWriter(table.getMetricsTableName(), writerConfig); metricsWriter.addMutations(getMetricsMutations()); metricsWriter.close(); // Re-initialize the metrics metrics.clear(); metrics.put(METRICS_TABLE_ROW_COUNT, new AtomicLong(0)); } catch (MutationsRejectedException e) { throw new PrestoException(UNEXPECTED_ACCUMULO_ERROR, "Index mutation was rejected by server on flush", e); } catch (TableNotFoundException e) { throw new PrestoException(ACCUMULO_TABLE_DNE, "Accumulo table does not exist", e); } } /** * Flushes all remaining mutations via {@link Indexer#flush} and closes the index writer. */ @Override public void close() { try { flush(); indexWriter.close(); } catch (MutationsRejectedException e) { throw new PrestoException(UNEXPECTED_ACCUMULO_ERROR, "Mutation was rejected by server on close", e); } } private Collection<Mutation> getMetricsMutations() { ImmutableList.Builder<Mutation> mutationBuilder = ImmutableList.builder(); // Mapping of column value to column to number of row IDs that contain that value for (Entry<MetricsKey, AtomicLong> entry : metrics.entrySet()) { // Row ID: Column value // Family: columnfamily_columnqualifier // Qualifier: CARDINALITY_CQ // Visibility: Inherited from indexed Mutation // Value: Cardinality Mutation mut = new Mutation(entry.getKey().row.array()); mut.put(entry.getKey().family.array(), CARDINALITY_CQ, entry.getKey().visibility, ENCODER.encode(entry.getValue().get())); // Add to our list of mutations mutationBuilder.add(mut); } // If the first row and last row are both not null, // which would really be for a brand new table that has zero rows and no indexed elements... // Talk about your edge cases! if (firstRow != null && lastRow != null) { // Add a some columns to the special metrics table row ID for the first/last row. // Note that if the values on the server side are greater/lesser, // the configured iterator will take care of this at scan/compaction time Mutation firstLastMutation = new Mutation(METRICS_TABLE_ROW_ID.array()); firstLastMutation.put(METRICS_TABLE_ROWS_CF.array(), METRICS_TABLE_FIRST_ROW_CQ.array(), firstRow); firstLastMutation.put(METRICS_TABLE_ROWS_CF.array(), METRICS_TABLE_LAST_ROW_CQ.array(), lastRow); mutationBuilder.add(firstLastMutation); } return mutationBuilder.build(); } /** * Gets a collection of iterator settings that should be added to the metric table for the given Accumulo table. Don't forget! Please! * * @param table Table for retrieving metrics iterators, see AccumuloClient#getTable * @return Collection of iterator settings */ public static Collection<IteratorSetting> getMetricIterators(AccumuloTable table) { String cardQualifier = new String(CARDINALITY_CQ); String rowsFamily = new String(METRICS_TABLE_ROWS_CF.array()); // Build a string for all columns where the summing combiner should be applied, // i.e. all indexed columns StringBuilder cardBuilder = new StringBuilder(rowsFamily + ":" + cardQualifier + ","); for (String s : getLocalityGroups(table).keySet()) { cardBuilder.append(s).append(":").append(cardQualifier).append(','); } cardBuilder.deleteCharAt(cardBuilder.length() - 1); // Configuration rows for the Min/Max combiners String firstRowColumn = rowsFamily + ":" + new String(METRICS_TABLE_FIRST_ROW_CQ.array()); String lastRowColumn = rowsFamily + ":" + new String(METRICS_TABLE_LAST_ROW_CQ.array()); // Summing combiner for cardinality columns IteratorSetting s1 = new IteratorSetting(1, SummingCombiner.class, ImmutableMap.of("columns", cardBuilder.toString(), "type", "STRING")); // Min/Max combiner for the first/last rows of the table IteratorSetting s2 = new IteratorSetting(2, MinByteArrayCombiner.class, ImmutableMap.of("columns", firstRowColumn)); IteratorSetting s3 = new IteratorSetting(3, MaxByteArrayCombiner.class, ImmutableMap.of("columns", lastRowColumn)); return ImmutableList.of(s1, s2, s3); } /** * Gets the column family of the index table based on the given column family and qualifier. * * @param columnFamily Presto column family * @param columnQualifier Presto column qualifier * @return ByteBuffer of the given index column family */ public static ByteBuffer getIndexColumnFamily(byte[] columnFamily, byte[] columnQualifier) { return wrap(ArrayUtils.addAll(ArrayUtils.add(columnFamily, UNDERSCORE), columnQualifier)); } /** * Gets a set of locality groups that should be added to the index table (not the metrics table). * * @param table Table for the locality groups, see AccumuloClient#getTable * @return Mapping of locality group to column families in the locality group, 1:1 mapping in * this case */ public static Map<String, Set<Text>> getLocalityGroups(AccumuloTable table) { Map<String, Set<Text>> groups = new HashMap<>(); // For each indexed column for (AccumuloColumnHandle columnHandle : table.getColumns().stream().filter(AccumuloColumnHandle::isIndexed).collect(Collectors.toList())) { // Create a Text version of the index column family Text indexColumnFamily = new Text(getIndexColumnFamily(columnHandle.getFamily().get().getBytes(UTF_8), columnHandle.getQualifier().get().getBytes(UTF_8)).array()); // Add this to the locality groups, // it is a 1:1 mapping of locality group to column families groups.put(indexColumnFamily.toString(), ImmutableSet.of(indexColumnFamily)); } return groups; } /** * Gets the fully-qualified index table name for the given table. * * @param schema Schema name * @param table Table name * @return Qualified index table name */ public static String getIndexTableName(String schema, String table) { return schema.equals("default") ? table + "_idx" : schema + '.' + table + "_idx"; } /** * Gets the fully-qualified index table name for the given table. * * @param tableName Schema table name * @return Qualified index table name */ public static String getIndexTableName(SchemaTableName tableName) { return getIndexTableName(tableName.getSchemaName(), tableName.getTableName()); } /** * Gets the fully-qualified index metrics table name for the given table. * * @param schema Schema name * @param table Table name * @return Qualified index metrics table name */ public static String getMetricsTableName(String schema, String table) { return schema.equals("default") ? table + "_idx_metrics" : schema + '.' + table + "_idx_metrics"; } /** * Gets the fully-qualified index metrics table name for the given table. * * @param tableName Schema table name * @return Qualified index metrics table name */ public static String getMetricsTableName(SchemaTableName tableName) { return getMetricsTableName(tableName.getSchemaName(), tableName.getTableName()); } public static Pair<byte[], byte[]> getMinMaxRowIds(Connector connector, AccumuloTable table, Authorizations auths) throws TableNotFoundException { Scanner scanner = connector.createScanner(table.getMetricsTableName(), auths); scanner.setRange(new Range(new Text(Indexer.METRICS_TABLE_ROW_ID.array()))); Text family = new Text(Indexer.METRICS_TABLE_ROWS_CF.array()); Text firstRowQualifier = new Text(Indexer.METRICS_TABLE_FIRST_ROW_CQ.array()); Text lastRowQualifier = new Text(Indexer.METRICS_TABLE_LAST_ROW_CQ.array()); scanner.fetchColumn(family, firstRowQualifier); scanner.fetchColumn(family, lastRowQualifier); byte[] firstRow = null; byte[] lastRow = null; for (Entry<Key, Value> entry : scanner) { if (entry.getKey().compareColumnQualifier(firstRowQualifier) == 0) { firstRow = entry.getValue().get(); } if (entry.getKey().compareColumnQualifier(lastRowQualifier) == 0) { lastRow = entry.getValue().get(); } } scanner.close(); return Pair.of(firstRow, lastRow); } /** * Class containing the key for aggregating the local metrics counter. */ private static class MetricsKey { private static final ColumnVisibility EMPTY_VISIBILITY = new ColumnVisibility(); public final ByteBuffer row; public final ByteBuffer family; public final ColumnVisibility visibility; public MetricsKey(ByteBuffer row, ByteBuffer family) { requireNonNull(row, "row is null"); requireNonNull(family, "family is null"); this.row = row; this.family = family; this.visibility = EMPTY_VISIBILITY; } public MetricsKey(ByteBuffer row, ByteBuffer family, ColumnVisibility visibility) { requireNonNull(row, "row is null"); requireNonNull(family, "family is null"); requireNonNull(visibility, "visibility is null"); this.row = row; this.family = family; this.visibility = visibility.getExpression() != null ? visibility : EMPTY_VISIBILITY; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if ((obj == null) || (getClass() != obj.getClass())) { return false; } MetricsKey other = (MetricsKey) obj; return Objects.equals(this.row, other.row) && Objects.equals(this.family, other.family) && Objects.equals(this.visibility, other.visibility); } @Override public int hashCode() { return Objects.hash(row, family, visibility); } @Override public String toString() { return toStringHelper(this) .add("row", new String(row.array(), UTF_8)) .add("family", new String(row.array(), UTF_8)) .add("visibility", visibility.toString()) .toString(); } } }
/* * Copyright 2017 RedRoma, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tech.aroma.data.cassandra; import java.util.*; import java.util.function.Function; import javax.inject.Inject; import com.datastax.driver.core.*; import com.datastax.driver.core.querybuilder.Insert; import com.datastax.driver.core.querybuilder.QueryBuilder; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import tech.aroma.data.ActivityRepository; import tech.aroma.data.cassandra.Tables.Activity; import tech.aroma.thrift.LengthOfTime; import tech.aroma.thrift.User; import tech.aroma.thrift.events.Event; import tech.aroma.thrift.exceptions.*; import tech.aroma.thrift.functions.TimeFunctions; import tech.sirwellington.alchemy.thrift.ThriftObjects; import static com.datastax.driver.core.querybuilder.QueryBuilder.eq; import static com.datastax.driver.core.querybuilder.QueryBuilder.ttl; import static java.util.stream.Collectors.toList; import static tech.aroma.data.assertions.RequestAssertions.validUserId; import static tech.sirwellington.alchemy.arguments.Arguments.*; import static tech.sirwellington.alchemy.arguments.assertions.Assertions.notNull; import static tech.sirwellington.alchemy.arguments.assertions.BooleanAssertions.trueStatement; import static tech.sirwellington.alchemy.arguments.assertions.NumberAssertions.greaterThan; import static tech.sirwellington.alchemy.arguments.assertions.StringAssertions.*; /** * * @author SirWellington */ final class CassandraActivityRepository implements ActivityRepository { private final static Logger LOG = LoggerFactory.getLogger(CassandraActivityRepository.class); private final Session session; private final Function<Row, Event> eventMapper; @Inject CassandraActivityRepository(Session session, Function<Row, Event> eventMapper) { checkThat(session, eventMapper) .are(notNull()); this.session = session; this.eventMapper = eventMapper; } @Override public void saveEvent(Event event, User forUser, LengthOfTime lifetime) throws TException { checkEvent(event); checkUser(forUser); checkLifetime(lifetime); User user = forUser; Statement insertStatement = createStatementToSaveEventForUser(event, user, lifetime); tryToExecute(insertStatement, "saveEvent"); } @Override public boolean containsEvent(String eventId, User user) throws TException { checkEventId(eventId); checkUser(user); Statement query = createQueryToCheckIfEventExists(eventId, user); ResultSet results = tryToExecute(query, "containsEvent"); Row row = results.one(); checkThat(row) .throwing(OperationFailedException.class) .usingMessage("Failed to query for event with ID " + eventId) .is(notNull()); return row.getLong(0) > 0L; } @Override public Event getEvent(String eventId, User user) throws TException { checkEventId(eventId); checkUser(user); Statement query = createQueryToGetEvent(eventId, user); ResultSet results = tryToExecute(query, "getEvent"); Row row = results.one(); checkThat(row) .throwing(DoesNotExistException.class) .usingMessage("No such event with ID " + eventId + " for user " + user) .is(notNull()); return mapRowToEvent(row); } @Override public List<Event> getAllEventsFor(User user) throws TException { checkUser(user); Statement query = createQueryToGetAllEventsForUser(user); ResultSet results = tryToExecute(query, "getAllEvents"); return results.all().parallelStream() .map(eventMapper::apply) .filter(Objects::nonNull) .collect(toList()); } @Override public void deleteEvent(String eventId, User user) throws TException { checkEventId(eventId); checkUser(user); Statement deleteStatement = createStatementToDelete(eventId, user); tryToExecute(deleteStatement, "deleteEvent"); } @Override public void deleteAllEventsFor(User user) throws TException { checkUser(user); Statement deleteStatement = createStatementToDeleteAllEventsFor(user); tryToExecute(deleteStatement, "deleteAllEvents"); } private void checkUser(User user) throws InvalidArgumentException { checkThat(user) .usingMessage("user cannot be null") .throwing(InvalidArgumentException.class) .is(notNull()); checkThat(user.userId) .throwing(InvalidArgumentException.class) .is(validUserId()); } private void checkEvent(Event event) throws InvalidArgumentException { checkThat(event) .throwing(InvalidArgumentException.class) .usingMessage("Event cannot be null") .is(notNull()); checkThat(event.eventId) .usingMessage("eventId must be a valid UUID") .throwing(InvalidArgumentException.class) .is(validUUID()); checkThat(event.eventType.isSet()) .throwing(InvalidArgumentException.class) .usingMessage("EventType must be set") .is(trueStatement()); } private Statement createStatementToSaveEventForUser(Event event, User user, LengthOfTime lifetime) throws TException { UUID eventId = UUID.fromString(event.eventId); UUID userId = UUID.fromString(user.userId); String serializedEvent = ThriftObjects.toJson(event); Insert statement = QueryBuilder .insertInto(Activity.TABLE_NAME) .value(Activity.USER_ID, userId) .value(Activity.EVENT_ID, eventId) .value(Activity.SERIALIZED_EVENT, serializedEvent); UUID appId; UUID actorId; Date timeOfEvent; if (event.isSetApplicationId()) { appId = UUID.fromString(event.applicationId); statement = statement.value(Activity.APP_ID, appId); } if (event.isSetUserIdOfActor()) { actorId = UUID.fromString(event.userIdOfActor); statement = statement.value(Activity.ACTOR_ID, actorId); } if (event.isSetTimestamp()) { timeOfEvent = new Date(event.timestamp); statement = statement.value(Activity.TIME_OF_EVENT, timeOfEvent); } int ttl = (int) TimeFunctions.toSeconds(lifetime); return statement.using(ttl(ttl)); } private ResultSet tryToExecute(Statement statement, String operationName) throws OperationFailedException { try { return session.execute(statement); } catch (Exception ex) { LOG.error("Failed to execute Cassandra Statement: {}", operationName, ex); throw new OperationFailedException("Could not perform operation: " + ex.getMessage()); } } private void checkEventId(String eventId) throws InvalidArgumentException { checkThat(eventId) .throwing(InvalidArgumentException.class) .usingMessage("eventId missing") .is(nonEmptyString()) .usingMessage("eventId must be a valid uuid") .is(validUUID()); } private Statement createQueryToCheckIfEventExists(String eventId, User user) { UUID eventUuid = UUID.fromString(eventId); UUID userUuid = UUID.fromString(user.userId); return QueryBuilder .select() .countAll() .from(Activity.TABLE_NAME) .where(eq(Activity.USER_ID, userUuid)) .and(eq(Activity.EVENT_ID, eventUuid)); } private Statement createQueryToGetEvent(String eventId, User user) { UUID eventUuid = UUID.fromString(eventId); UUID userUuid = UUID.fromString(user.userId); return QueryBuilder .select() .all() .from(Activity.TABLE_NAME) .where(eq(Activity.USER_ID, userUuid)) .and(eq(Activity.EVENT_ID, eventUuid)); } private Statement createQueryToGetAllEventsForUser(User user) { UUID userUuid = UUID.fromString(user.userId); return QueryBuilder .select() .all() .from(Activity.TABLE_NAME) .where(eq(Activity.USER_ID, userUuid)); } private Statement createStatementToDelete(String eventId, User user) { UUID eventUuid = UUID.fromString(eventId); UUID userUuid = UUID.fromString(user.userId); return QueryBuilder .delete() .all() .from(Activity.TABLE_NAME) .where(eq(Activity.USER_ID, userUuid)) .and(eq(Activity.EVENT_ID, eventUuid)); } private Statement createStatementToDeleteAllEventsFor(User user) { UUID userUuid = UUID.fromString(user.userId); return QueryBuilder .delete() .all() .from(Activity.TABLE_NAME) .where(eq(Activity.USER_ID, userUuid)); } private Event mapRowToEvent(Row row) throws DoesNotExistException { if (row == null) { return new Event(); } Event event = eventMapper.apply(row); checkThat(event) .usingMessage("event does not exist") .throwing(DoesNotExistException.class) .is(notNull()); return event; } private void checkLifetime(LengthOfTime lifetime) throws InvalidArgumentException { checkThat(lifetime) .throwing(InvalidArgumentException.class) .usingMessage("lifetime missing") .is(notNull()); checkThat(lifetime.value) .throwing(InvalidArgumentException.class) .usingMessage("Lifetime duration must be > 0") .is(greaterThan(0L)); } }
//======================================================================== //Copyright 2007-2011 David Yu dyuproject@gmail.com //------------------------------------------------------------------------ //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at //http://www.apache.org/licenses/LICENSE-2.0 //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. //======================================================================== package io.protostuff.runtime; import java.io.IOException; import java.util.Collection; import java.util.EnumSet; import io.protostuff.CollectionSchema.MessageFactory; import io.protostuff.GraphInput; import io.protostuff.Input; import io.protostuff.Message; import io.protostuff.Morph; import io.protostuff.Output; import io.protostuff.Pipe; import io.protostuff.Schema; import io.protostuff.Tag; import io.protostuff.WireFormat.FieldType; /** * Static utility for creating runtime repeated (list/collection) fields. * * @author David Yu * @created Jan 23, 2011 */ final class RuntimeRepeatedFieldFactory { private RuntimeRepeatedFieldFactory() { } /** * For lazy initialization called by {@link RuntimeFieldFactory}. */ static RuntimeFieldFactory<Collection<?>> getFactory() { return REPEATED; } private static <T> Field<T> createCollectionInlineV(int number, String name, final java.lang.reflect.Field f, final MessageFactory messageFactory, final Delegate<Object> inline) { return new Field<T>(inline.getFieldType(), number, name, true, f.getAnnotation(Tag.class)) { { f.setAccessible(true); } @Override @SuppressWarnings("unchecked") protected void mergeFrom(Input input, T message) throws IOException { final Object value = inline.readFrom(input); try { final Collection<Object> existing = (Collection<Object>) f .get(message); if (existing == null) { final Collection<Object> collection = messageFactory .newMessage(); collection.add(value); f.set(message, collection); } else existing.add(value); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } } @Override @SuppressWarnings("unchecked") protected void writeTo(Output output, T message) throws IOException { final Collection<Object> collection; try { collection = (Collection<Object>) f.get(message); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } if (collection != null && !collection.isEmpty()) { for (Object o : collection) { if (o != null) inline.writeTo(output, number, o, true); } } } @Override protected void transfer(Pipe pipe, Input input, Output output, boolean repeated) throws IOException { inline.transfer(pipe, input, output, number, repeated); } }; } private static <T> Field<T> createCollectionEnumV(int number, String name, final java.lang.reflect.Field f, final MessageFactory messageFactory, final Class<Object> genericType, IdStrategy strategy) { final EnumIO<?> eio = strategy.getEnumIO(genericType); return new Field<T>(FieldType.ENUM, number, name, true, f.getAnnotation(Tag.class)) { { f.setAccessible(true); } @Override @SuppressWarnings("unchecked") protected void mergeFrom(Input input, T message) throws IOException { final Enum<?> value = eio.readFrom(input); try { final Collection<Enum<?>> existing = (Collection<Enum<?>>) f .get(message); if (existing == null) { final Collection<Enum<?>> collection = messageFactory .newMessage(); collection.add(value); f.set(message, collection); } else existing.add(value); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } } @Override @SuppressWarnings("unchecked") protected void writeTo(Output output, T message) throws IOException { final Collection<Enum<?>> collection; try { collection = (Collection<Enum<?>>) f.get(message); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } if (collection != null && !collection.isEmpty()) { for (Enum<?> en : collection) eio.writeTo(output, number, true, en); } } @Override protected void transfer(Pipe pipe, Input input, Output output, boolean repeated) throws IOException { EnumIO.transfer(pipe, input, output, number, repeated); } }; } private static <T> Field<T> createCollectionPojoV(int number, String name, final java.lang.reflect.Field f, final MessageFactory messageFactory, final Class<Object> genericType, IdStrategy strategy) { return new RuntimeMessageField<T, Object>(genericType, strategy.getSchemaWrapper(genericType, true), FieldType.MESSAGE, number, name, true, f.getAnnotation(Tag.class)) { { f.setAccessible(true); } @Override @SuppressWarnings("unchecked") protected void mergeFrom(Input input, T message) throws IOException { final Object value = input.mergeObject(null, getSchema()); try { final Collection<Object> existing = (Collection<Object>) f .get(message); if (existing == null) { final Collection<Object> collection = messageFactory .newMessage(); collection.add(value); f.set(message, collection); } else existing.add(value); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } } @Override @SuppressWarnings("unchecked") protected void writeTo(Output output, T message) throws IOException { final Collection<Object> collection; try { collection = (Collection<Object>) f.get(message); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } if (collection != null && !collection.isEmpty()) { final Schema<Object> schema = getSchema(); for (Object o : collection) { if (o != null) output.writeObject(number, o, schema, true); } } } @Override protected void transfer(Pipe pipe, Input input, Output output, boolean repeated) throws IOException { output.writeObject(number, pipe, getPipeSchema(), repeated); } }; } private static <T> Field<T> createCollectionPolymorphicV(int number, String name, final java.lang.reflect.Field f, final MessageFactory messageFactory, final Class<Object> genericType, IdStrategy strategy) { return new RuntimeDerivativeField<T>(genericType, FieldType.MESSAGE, number, name, true, f.getAnnotation(Tag.class), strategy) { { f.setAccessible(true); } @Override @SuppressWarnings("unchecked") protected void mergeFrom(Input input, T message) throws IOException { final Object value = input.mergeObject(message, schema); if (input instanceof GraphInput && ((GraphInput) input).isCurrentMessageReference()) { // a reference from polymorphic+cyclic graph deser try { final Collection<Object> existing = (Collection<Object>) f .get(message); if (existing == null) { final Collection<Object> collection = messageFactory .newMessage(); collection.add(value); f.set(message, collection); } else existing.add(value); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } } } @Override @SuppressWarnings("unchecked") protected void writeTo(Output output, T message) throws IOException { final Collection<Object> existing; try { existing = (Collection<Object>) f.get(message); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } if (existing != null && !existing.isEmpty()) { for (Object o : existing) { if (o != null) output.writeObject(number, o, schema, true); } } } @Override protected void transfer(Pipe pipe, Input input, Output output, boolean repeated) throws IOException { output.writeObject(number, pipe, schema.pipeSchema, repeated); } @Override @SuppressWarnings("unchecked") protected void doMergeFrom(Input input, Schema<Object> schema, Object message) throws IOException { final Object value = schema.newMessage(); if (input instanceof GraphInput) { // update the actual reference. ((GraphInput) input).updateLast(value, message); } schema.mergeFrom(input, value); try { final Collection<Object> existing = (Collection<Object>) f .get(message); if (existing == null) { final Collection<Object> collection = messageFactory .newMessage(); collection.add(value); f.set(message, collection); } else existing.add(value); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } } }; } private static <T> Field<T> createCollectionObjectV(int number, String name, final java.lang.reflect.Field f, final MessageFactory messageFactory, Class<Object> genericType, PolymorphicSchema.Factory factory, IdStrategy strategy) { return new RuntimeObjectField<T>(genericType, FieldType.MESSAGE, number, name, true, f.getAnnotation(Tag.class), factory, strategy) { { f.setAccessible(true); } @Override @SuppressWarnings("unchecked") protected void mergeFrom(Input input, T message) throws IOException { final Object value = input.mergeObject(message, schema); if (input instanceof GraphInput && ((GraphInput) input).isCurrentMessageReference()) { // a reference from polymorphic+cyclic graph deser try { final Collection<Object> existing = (Collection<Object>) f .get(message); if (existing == null) { final Collection<Object> collection = messageFactory .newMessage(); collection.add(value); f.set(message, collection); } else existing.add(value); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } } } @Override @SuppressWarnings("unchecked") protected void writeTo(Output output, T message) throws IOException { final Collection<Object> existing; try { existing = (Collection<Object>) f.get(message); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } if (existing != null && !existing.isEmpty()) { for (Object o : existing) { if (o != null) output.writeObject(number, o, schema, true); } } } @Override protected void transfer(Pipe pipe, Input input, Output output, boolean repeated) throws IOException { output.writeObject(number, pipe, schema.getPipeSchema(), repeated); } @Override @SuppressWarnings("unchecked") public void setValue(Object value, Object message) { try { final Collection<Object> existing = (Collection<Object>) f .get(message); if (existing == null) { final Collection<Object> collection = messageFactory .newMessage(); collection.add(value); f.set(message, collection); } else existing.add(value); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } } }; } private static final RuntimeFieldFactory<Collection<?>> REPEATED = new RuntimeFieldFactory<Collection<?>>( RuntimeFieldFactory.ID_COLLECTION) { @Override @SuppressWarnings("unchecked") public <T> Field<T> create(int number, String name, final java.lang.reflect.Field f, IdStrategy strategy) { if (null != f.getAnnotation(Morph.class)) { // can be used to override the configured system property: // RuntimeEnv.COLLECTION_SCHEMA_ON_REPEATED_FIELDS // In this context, Morph annotation will force using a // collection // schema only for this particular field. return RuntimeCollectionFieldFactory.getFactory().create( number, name, f, strategy); } if (EnumSet.class.isAssignableFrom(f.getType())) { final Class<Object> enumType = (Class<Object>) getGenericType( f, 0); if (enumType == null) { // still handle the serialization of EnumSets even without // generics return RuntimeFieldFactory.OBJECT.create(number, name, f, strategy); } return createCollectionEnumV(number, name, f, strategy .getEnumIO(enumType).getEnumSetFactory(), enumType, strategy); } final MessageFactory messageFactory = strategy .getCollectionFactory(f.getType()); final Class<Object> genericType = (Class<Object>) getGenericType(f, 0); if (genericType == null) { // the value is not a simple parameterized type. return createCollectionObjectV(number, name, f, messageFactory, genericType, PolymorphicSchemaFactories.OBJECT, strategy); } final Delegate<Object> inline = getDelegateOrInline(genericType, strategy); if (inline != null) return createCollectionInlineV(number, name, f, messageFactory, inline); if (Message.class.isAssignableFrom(genericType)) return createCollectionPojoV(number, name, f, messageFactory, genericType, strategy); if (genericType.isEnum()) return createCollectionEnumV(number, name, f, messageFactory, genericType, strategy); final PolymorphicSchema.Factory factory = PolymorphicSchemaFactories .getFactoryFromRepeatedValueGenericType(genericType); if (factory != null) { return createCollectionObjectV(number, name, f, messageFactory, genericType, factory, strategy); } if (pojo(genericType, f.getAnnotation(Morph.class), strategy)) return createCollectionPojoV(number, name, f, messageFactory, genericType, strategy); if (genericType.isInterface()) { return createCollectionObjectV(number, name, f, messageFactory, genericType, PolymorphicSchemaFactories.OBJECT, strategy); } return createCollectionPolymorphicV(number, name, f, messageFactory, genericType, strategy); } @Override public void transfer(Pipe pipe, Input input, Output output, int number, boolean repeated) throws IOException { throw new UnsupportedOperationException(); } @Override public Collection<?> readFrom(Input input) throws IOException { throw new UnsupportedOperationException(); } @Override public void writeTo(Output output, int number, Collection<?> value, boolean repeated) throws IOException { throw new UnsupportedOperationException(); } @Override public FieldType getFieldType() { throw new UnsupportedOperationException(); } @Override public Class<?> typeClass() { throw new UnsupportedOperationException(); } }; }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.transitions.everywhere; import android.animation.Animator; import android.animation.TimeInterpolator; import android.annotation.TargetApi; import android.content.Context; import android.content.res.TypedArray; import android.os.Build; import android.transitions.everywhere.utils.ViewUtils; import android.util.AttributeSet; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; import android.view.animation.AccelerateInterpolator; import android.view.animation.DecelerateInterpolator; /** * This transition tracks changes to the visibility of target views in the * start and end scenes and moves views in or out from one of the edges of the * scene. Visibility is determined by both the * {@link View#setVisibility(int)} state of the view as well as whether it * is parented in the current view hierarchy. Disappearing Views are * limited as described in {@link Visibility#onDisappear(android.view.ViewGroup, * TransitionValues, int, TransitionValues, int)}. */ @TargetApi(Build.VERSION_CODES.HONEYCOMB) public class Slide extends Visibility { protected static final TimeInterpolator sDecelerate = new DecelerateInterpolator(); protected static final TimeInterpolator sAccelerate = new AccelerateInterpolator(); protected CalculateSlide mSlideCalculator = sCalculateBottom; private int mSlideEdge = Gravity.BOTTOM; protected interface CalculateSlide { /** Returns the translation value for view when it goes out of the scene */ float getGoneX(ViewGroup sceneRoot, View view); /** Returns the translation value for view when it goes out of the scene */ float getGoneY(ViewGroup sceneRoot, View view); } protected static abstract class CalculateSlideHorizontal implements CalculateSlide { @Override public float getGoneY(ViewGroup sceneRoot, View view) { return view.getTranslationY(); } } protected static abstract class CalculateSlideVertical implements CalculateSlide { @Override public float getGoneX(ViewGroup sceneRoot, View view) { return view.getTranslationX(); } } private static final CalculateSlide sCalculateLeft = new CalculateSlideHorizontal() { @Override public float getGoneX(ViewGroup sceneRoot, View view) { return view.getTranslationX() - sceneRoot.getWidth(); } }; private static final CalculateSlide sCalculateStart = new CalculateSlideHorizontal() { @Override public float getGoneX(ViewGroup sceneRoot, View view) { final boolean isRtl = ViewUtils.isRtl(sceneRoot); final float x; if (isRtl) { x = view.getTranslationX() + sceneRoot.getWidth(); } else { x = view.getTranslationX() - sceneRoot.getWidth(); } return x; } }; private static final CalculateSlide sCalculateTop = new CalculateSlideVertical() { @Override public float getGoneY(ViewGroup sceneRoot, View view) { return view.getTranslationY() - sceneRoot.getHeight(); } }; private static final CalculateSlide sCalculateRight = new CalculateSlideHorizontal() { @Override public float getGoneX(ViewGroup sceneRoot, View view) { return view.getTranslationX() + sceneRoot.getWidth(); } }; private static final CalculateSlide sCalculateEnd = new CalculateSlideHorizontal() { @Override public float getGoneX(ViewGroup sceneRoot, View view) { final boolean isRtl = ViewUtils.isRtl(sceneRoot); final float x; if (isRtl) { x = view.getTranslationX() - sceneRoot.getWidth(); } else { x = view.getTranslationX() + sceneRoot.getWidth(); } return x; } }; private static final CalculateSlide sCalculateBottom = new CalculateSlideVertical() { @Override public float getGoneY(ViewGroup sceneRoot, View view) { return view.getTranslationY() + sceneRoot.getHeight(); } }; /** * Constructor using the default {@link Gravity#BOTTOM} * slide edge direction. */ public Slide() { setSlideEdge(Gravity.BOTTOM); } /** * Constructor using the provided slide edge direction. */ public Slide(int slideEdge) { setSlideEdge(slideEdge); } public Slide(Context context, AttributeSet attrs) { super(context, attrs); TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.Slide); int edge = a.getInt(R.styleable.Slide_slideEdge, Gravity.BOTTOM); a.recycle(); setSlideEdge(edge); } /** * Change the edge that Views appear and disappear from. * * @param slideEdge The edge of the scene to use for Views appearing and disappearing. One of * {@link android.view.Gravity#LEFT}, {@link android.view.Gravity#TOP}, * {@link android.view.Gravity#RIGHT}, {@link android.view.Gravity#BOTTOM}. * @attr ref android.R.styleable#Slide_slideEdge */ public void setSlideEdge(int slideEdge) { switch (slideEdge) { case Gravity.LEFT: mSlideCalculator = sCalculateLeft; break; case Gravity.TOP: mSlideCalculator = sCalculateTop; break; case Gravity.RIGHT: mSlideCalculator = sCalculateRight; break; case Gravity.BOTTOM: mSlideCalculator = sCalculateBottom; break; case Gravity.START: mSlideCalculator = sCalculateStart; break; case Gravity.END: mSlideCalculator = sCalculateEnd; break; default: throw new IllegalArgumentException("Invalid slide direction"); } mSlideEdge = slideEdge; SidePropagation propagation = new SidePropagation(); propagation.setSide(slideEdge); setPropagation(propagation); } /** * Returns the edge that Views appear and disappear from. * * @return the edge of the scene to use for Views appearing and disappearing. One of * {@link android.view.Gravity#LEFT}, {@link android.view.Gravity#TOP}, * {@link android.view.Gravity#RIGHT}, {@link android.view.Gravity#BOTTOM}, * {@link android.view.Gravity#START}, {@link android.view.Gravity#END}. * @attr ref android.R.styleable#Slide_slideEdge */ public int getSlideEdge() { return mSlideEdge; } @Override public Animator onAppear(ViewGroup sceneRoot, View view, TransitionValues startValues, TransitionValues endValues) { if (endValues == null) { return null; } int[] position = (int[]) endValues.values.get(PROPNAME_SCREEN_LOCATION); float endX = view.getTranslationX(); float endY = view.getTranslationY(); float startX = mSlideCalculator.getGoneX(sceneRoot, view); float startY = mSlideCalculator.getGoneY(sceneRoot, view); return TranslationAnimationCreator .createAnimation(view, endValues, position[0], position[1], startX, startY, endX, endY, sDecelerate, this); } @Override public Animator onDisappear(ViewGroup sceneRoot, View view, TransitionValues startValues, TransitionValues endValues) { if (startValues == null) { return null; } int[] position = (int[]) startValues.values.get(PROPNAME_SCREEN_LOCATION); float startX = view.getTranslationX(); float startY = view.getTranslationY(); float endX = mSlideCalculator.getGoneX(sceneRoot, view); float endY = mSlideCalculator.getGoneY(sceneRoot, view); return TranslationAnimationCreator .createAnimation(view, startValues, position[0], position[1], startX, startY, endX, endY, sAccelerate, this); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.test; import org.apache.calcite.adapter.enumerable.EnumerableRules; import org.apache.calcite.plan.ConventionTraitDef; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelOptUtil; import org.apache.calcite.plan.volcano.VolcanoPlanner; import org.apache.calcite.rel.RelCollationTraitDef; import org.apache.calcite.rel.rules.CoreRules; import org.apache.calcite.rel.rules.JoinPushThroughJoinRule; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.junit.jupiter.api.Test; import java.util.List; /** * Unit test for top-down optimization. * * <p>As input, the test supplies a SQL statement and rules; the SQL is * translated into relational algebra and then fed into a * {@link VolcanoPlanner}. The plan before and after "optimization" is * diffed against a .ref file using {@link DiffRepository}. * * <p>Procedure for adding a new test case: * * <ol> * <li>Add a new public test method for your rule, following the existing * examples. You'll have to come up with an SQL statement to which your rule * will apply in a meaningful way. See * {@link org.apache.calcite.test.catalog.MockCatalogReaderSimple} class * for details on the schema. * * <li>Run the test. It should fail. Inspect the output in * {@code build/resources/test/.../TopDownOptTest_actual.xml}. * * <li>Verify that the "planBefore" is the correct * translation of your SQL, and that it contains the pattern on which your rule * is supposed to fire. If all is well, replace * {@code src/test/resources/.../TopDownOptTest.xml} and * with the new {@code build/resources/test/.../TopDownOptTest_actual.xml}. * * <li>Run the test again. It should fail again, but this time it should contain * a "planAfter" entry for your rule. Verify that your rule applied its * transformation correctly, and then update the * {@code src/test/resources/.../TopDownOptTest.xml} file again. * * <li>Run the test one last time; this time it should pass. * </ol> */ class TopDownOptTest extends RelOptTestBase { @Test void testValuesTraitRequest() { final String sql = "SELECT * from (values (1, 1), (2, 1), (1, 2), (2, 2))\n" + "as t(a, b) order by b, a"; Query.create(sql).check(); } @Test void testValuesTraitRequestNeg() { final String sql = "SELECT * from (values (1, 1), (2, 1), (3, 2), (2, 2))\n" + "as t(a, b) order by b, a"; Query.create(sql).check(); } @Test void testSortAgg() { final String sql = "select mgr, count(*) from sales.emp\n" + "group by mgr order by mgr desc nulls last limit 5"; Query.create(sql).check(); } @Test void testSortAggPartialKey() { final String sql = "select mgr,deptno,comm,count(*) from sales.emp\n" + "group by mgr,deptno,comm\n" + "order by comm desc nulls last, deptno nulls first"; Query.create(sql).check(); } @Test void testSortMergeJoin() { final String sql = "select * from\n" + "sales.emp r join sales.bonus s on r.ename=s.ename and r.job=s.job\n" + "order by r.job desc nulls last, r.ename nulls first"; Query.create(sql).check(); } @Test void testSortMergeJoinRight() { final String sql = "select * from\n" + "sales.emp r join sales.bonus s on r.ename=s.ename and r.job=s.job\n" + "order by s.job desc nulls last, s.ename nulls first"; Query.create(sql).check(); } @Test void testMergeJoinDeriveLeft1() { final String sql = "select * from\n" + "(select ename, job, max(sal) from sales.emp group by ename, job) r\n" + "join sales.bonus s on r.job=s.job and r.ename=s.ename"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .check(); } @Test void testMergeJoinDeriveLeft2() { final String sql = "select * from\n" + "(select ename, job, mgr, max(sal) from sales.emp group by ename, job, mgr) r\n" + "join sales.bonus s on r.job=s.job and r.ename=s.ename"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .check(); } @Test void testMergeJoinDeriveRight1() { final String sql = "select * from sales.bonus s join\n" + "(select ename, job, max(sal) from sales.emp group by ename, job) r\n" + "on r.job=s.job and r.ename=s.ename"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .check(); } @Test void testMergeJoinDeriveRight2() { final String sql = "select * from sales.bonus s join\n" + "(select ename, job, mgr, max(sal) from sales.emp group by ename, job, mgr) r\n" + "on r.job=s.job and r.ename=s.ename"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .check(); } // Order by left field(s): push down sort to left input. @Test void testCorrelateInnerJoinDeriveLeft() { final String sql = "select * from emp e\n" + "join dept d on e.deptno=d.deptno\n" + "order by e.ename"; Query.create(sql) .addRule(CoreRules.JOIN_TO_CORRELATE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // Order by contains right field: sort cannot be pushed down. @Test void testCorrelateInnerJoinNoDerive() { final String sql = "select * from emp e\n" + "join dept d on e.deptno=d.deptno\n" + "order by e.ename, d.name"; Query.create(sql) .addRule(CoreRules.JOIN_TO_CORRELATE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // Order by left field(s): push down sort to left input. @Test void testCorrelateLeftJoinDeriveLeft() { final String sql = "select * from emp e\n" + "left join dept d on e.deptno=d.deptno\n" + "order by e.ename"; Query.create(sql) .addRule(CoreRules.JOIN_TO_CORRELATE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // Order by contains right field: sort cannot be pushed down. @Test void testCorrelateLeftJoinNoDerive() { final String sql = "select * from emp e\n" + "left join dept d on e.deptno=d.deptno\n" + "order by e.ename, d.name"; Query.create(sql) .addRule(CoreRules.JOIN_TO_CORRELATE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // Order by left field(s): push down sort to left input. @Test void testCorrelateSemiJoinDeriveLeft() { final String sql = "select * from dept d\n" + "where exists (select 1 from emp e where e.deptno=d.deptno)\n" + "order by d.name"; Query.create(sql) .addRule(CoreRules.JOIN_TO_CORRELATE) .addRule(CoreRules.JOIN_TO_SEMI_JOIN) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // test if "order by mgr desc nulls last" can be pushed through the projection ("select mgr"). @Test void testSortProject() { final String sql = "select mgr from sales.emp order by mgr desc nulls last"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // test that Sort cannot push through projection because of non-trival call // (e.g. RexCall(sal * -1)). In this example, the reason is that "sal * -1" // creates opposite ordering if Sort is pushed down. @Test void testSortProjectOnRexCall() { final String sql = "select ename, sal * -1 as sal, mgr from\n" + "sales.emp order by ename desc, sal desc, mgr desc nulls last"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // test that Sort can push through projection when cast is monotonic. @Test void testSortProjectWhenCastLeadingToMonotonic() { final String sql = "select deptno from sales.emp order by cast(deptno as float) desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // test that Sort cannot push through projection when cast is not monotonic. @Test void testSortProjectWhenCastLeadingToNonMonotonic() { final String sql = "select deptno from sales.emp order by cast(deptno as varchar) desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // No sort on left join input. @Test void testSortProjectDeriveWhenCastLeadingToMonotonic() { final String sql = "select * from\n" + "(select ename, cast(job as varchar) as job, max_sal + 1 from\n" + "(select ename, job, max(sal) as max_sal from sales.emp group by ename, job) t) r\n" + "join sales.bonus s on r.job=s.job and r.ename=s.ename"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .check(); } // need sort on left join input. @Test void testSortProjectDeriveOnRexCall() { final String sql = "select * from\n" + "(select ename, sal * -1 as sal, max_job from\n" + "(select ename, sal, max(job) as max_job from sales.emp group by ename, sal) t) r\n" + "join sales.bonus s on r.sal=s.sal and r.ename=s.ename"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .check(); } // need sort on left join input. @Test void testSortProjectDeriveWhenCastLeadingToNonMonotonic() { final String sql = "select * from\n" + "(select ename, cast(job as numeric) as job, max_sal + 1 from\n" + "(select ename, job, max(sal) as max_sal from sales.emp group by ename, job) t) r\n" + "join sales.bonus s on r.job=s.job and r.ename=s.ename"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .check(); } // no Sort need for left join input. @Test void testSortProjectDerive3() { final String sql = "select * from\n" + "(select ename, cast(job as varchar) as job, sal + 1 from\n" + "(select ename, job, sal from sales.emp limit 100) t) r\n" + "join sales.bonus s on r.job=s.job and r.ename=s.ename"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .check(); } // need Sort on left join input. @Test void testSortProjectDerive4() { final String sql = "select * from\n" + "(select ename, cast(job as bigint) as job, sal + 1 from\n" + "(select ename, job, sal from sales.emp limit 100) t) r\n" + "join sales.bonus s on r.job=s.job and r.ename=s.ename"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .check(); } // test if top projection can enforce sort when inner sort cannot produce satisfying ordering. @Test void testSortProjectDerive5() { final String sql = "select ename, empno*-1, job from\n" + "(select * from sales.emp order by ename, empno, job limit 10) order by ename, job"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } @Test void testSortProjectDerive() { final String sql = "select * from\n" + "(select ename, job, max_sal + 1 from\n" + "(select ename, job, max(sal) as max_sal from sales.emp group by ename, job) t) r\n" + "join sales.bonus s on r.job=s.job and r.ename=s.ename"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .check(); } // need Sort on projection. @Test void testSortProjectDerive2() { final String sql = "select distinct ename, sal*-2, mgr\n" + "from (select ename, mgr, sal from sales.emp order by ename, mgr, sal limit 100) t"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } @Test void testSortProjectDerive6() { final String sql = "select comm, deptno, slacker from\n" + "(select * from sales.emp order by comm, deptno, slacker limit 10) t\n" + "order by comm, slacker"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // test traits push through filter. @Test void testSortFilter() { final String sql = "select ename, job, mgr, max_sal from\n" + "(select ename, job, mgr, max(sal) as max_sal from sales.emp group by ename, job, mgr) as t\n" + "where max_sal > 1000\n" + "order by mgr desc, ename"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // test traits derivation in filter. @Test void testSortFilterDerive() { final String sql = "select * from\n" + "(select ename, job, max_sal from\n" + "(select ename, job, max(sal) as max_sal from sales.emp group by ename, job) t where job > 1000) r\n" + "join sales.bonus s on r.job=s.job and r.ename=s.ename"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .check(); } // Not push down sort for hash join in full outer join case. @Test void testHashJoinFullOuterJoinNotPushDownSort() { final String sql = "select * from\n" + "sales.emp r full outer join sales.bonus s on r.ename=s.ename and r.job=s.job\n" + "order by r.job desc nulls last, r.ename nulls first"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .check(); } // Push down sort to left input. @Test void testHashJoinLeftOuterJoinPushDownSort() { final String sql = "select * from\n" + "(select contactno, email from customer.contact_peek) r left outer join\n" + "(select acctno, type from customer.account) s\n" + "on r.contactno=s.acctno and r.email=s.type\n" + "order by r.contactno desc, r.email desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // Push down sort to left input. @Test void testHashJoinLeftOuterJoinPushDownSort2() { final String sql = "select * from\n" + "customer.contact_peek r left outer join\n" + "customer.account s\n" + "on r.contactno=s.acctno and r.email=s.type\n" + "order by r.fname desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // Push down sort to left input. @Test void testHashJoinInnerJoinPushDownSort() { final String sql = "select * from\n" + "(select contactno, email from customer.contact_peek) r inner join\n" + "(select acctno, type from customer.account) s\n" + "on r.contactno=s.acctno and r.email=s.type\n" + "order by r.contactno desc, r.email desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // do not push down sort. @Test void testHashJoinRightOuterJoinPushDownSort() { final String sql = "select * from\n" + "(select contactno, email from customer.contact_peek) r right outer join\n" + "(select acctno, type from customer.account) s\n" + "on r.contactno=s.acctno and r.email=s.type\n" + "order by s.acctno desc, s.type desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // push sort to left input @Test void testNestedLoopJoinLeftOuterJoinPushDownSort() { final String sql = "select * from\n" + " customer.contact_peek r left outer join\n" + "customer.account s\n" + "on r.contactno>s.acctno and r.email<s.type\n" + "order by r.contactno desc, r.email desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // push sort to left input @Test void testNestedLoopJoinLeftOuterJoinPushDownSort2() { final String sql = "select * from\n" + " customer.contact_peek r left outer join\n" + "customer.account s\n" + "on r.contactno>s.acctno and r.email<s.type\n" + "order by r.fname desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // do not push sort to left input cause sort keys are on right input. @Test void testNestedLoopJoinLeftOuterJoinSortKeyOnRightInput() { final String sql = "select * from\n" + " customer.contact_peek r left outer join\n" + "customer.account s\n" + "on r.contactno>s.acctno and r.email<s.type\n" + "order by s.acctno desc, s.type desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // do not push down sort to right input because traits propagation does not work // for right/full outer join. @Test void testNestedLoopJoinRightOuterJoinSortPushDown() { final String sql = "select r.contactno, r.email, s.acctno, s.type from\n" + " customer.contact_peek r right outer join\n" + "customer.account s\n" + "on r.contactno>s.acctno and r.email<s.type\n" + "order by s.acctno desc, s.type desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // Collation can be derived from left input so that top Sort is removed. @Test void testHashJoinTraitDerivation() { final String sql = "select * from\n" + "(select ename, job, mgr from sales.emp order by ename desc, job desc, mgr limit 10) r\n" + "join sales.bonus s on r.ename=s.ename and r.job=s.job\n" + "order by r.ename desc, r.job desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // Collation can be derived from left input so that top Sort is removed. @Test void testHashJoinTraitDerivation2() { final String sql = "select * from\n" + "(select ename, job, mgr from sales.emp order by mgr desc limit 10) r\n" + "join sales.bonus s on r.ename=s.ename and r.job=s.job\n" + "order by r.mgr desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // Collation derived from left input is not what the top Sort needs. @Test void testHashJoinTraitDerivationNegativeCase() { final String sql = "select * from\n" + "(select ename, job, mgr from sales.emp order by mgr desc limit 10) r\n" + "join sales.bonus s on r.ename=s.ename and r.job=s.job\n" + "order by r.mgr"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // Collation can be derived from left input so that top Sort is removed. @Test void testNestedLoopJoinTraitDerivation() { final String sql = "select * from\n" + "(select ename, job, mgr from sales.emp order by ename desc, job desc, mgr limit 10) r\n" + "join sales.bonus s on r.ename>s.ename and r.job<s.job\n" + "order by r.ename desc, r.job desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // Collation can be derived from left input so that top Sort is removed. @Test void testNestedLoopJoinTraitDerivation2() { final String sql = "select * from\n" + "(select ename, job, mgr from sales.emp order by mgr limit 10) r\n" + "join sales.bonus s on r.ename>s.ename and r.job<s.job\n" + "order by r.mgr"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // Collation derived from left input is not what the top Sort needs. @Test void testNestedLoopJoinTraitDerivationNegativeCase() { final String sql = "select * from\n" + "(select ename, job, mgr from sales.emp order by mgr limit 10) r\n" + "join sales.bonus s on r.ename>s.ename and r.job<s.job\n" + "order by r.mgr desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .check(); } // test if "order by mgr desc nulls last" can be pushed through the calc ("select mgr"). @Test void testSortCalc() { final String sql = "select mgr from sales.emp order by mgr desc nulls last"; Query.create(sql) .addRule(CoreRules.PROJECT_TO_CALC) .addRule(EnumerableRules.ENUMERABLE_CALC_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_PROJECT_RULE) .check(); } // test that Sort cannot push through calc because of non-trival call // (e.g. RexCall(sal * -1)). In this example, the reason is that "sal * -1" // creates opposite ordering if Sort is pushed down. @Test void testSortCalcOnRexCall() { final String sql = "select ename, sal * -1 as sal, mgr from\n" + "sales.emp order by ename desc, sal desc, mgr desc nulls last"; Query.create(sql) .addRule(CoreRules.PROJECT_TO_CALC) .addRule(EnumerableRules.ENUMERABLE_CALC_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_PROJECT_RULE) .check(); } // test that Sort can push through calc when cast is monotonic. @Test void testSortCalcWhenCastLeadingToMonotonic() { final String sql = "select cast(deptno as float) from sales.emp order by deptno desc"; Query.create(sql) .addRule(CoreRules.PROJECT_TO_CALC) .addRule(EnumerableRules.ENUMERABLE_CALC_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_PROJECT_RULE) .check(); } // test that Sort cannot push through calc when cast is not monotonic. @Test void testSortCalcWhenCastLeadingToNonMonotonic() { final String sql = "select deptno from sales.emp order by cast(deptno as varchar) desc"; Query.create(sql) .addRule(CoreRules.PROJECT_TO_CALC) .addRule(EnumerableRules.ENUMERABLE_CALC_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_PROJECT_RULE) .check(); } // test traits push through calc with filter. @Test void testSortCalcWithFilter() { final String sql = "select ename, job, mgr, max_sal from\n" + "(select ename, job, mgr, max(sal) as max_sal from sales.emp group by ename, job, mgr) as t\n" + "where max_sal > 1000\n" + "order by mgr desc, ename"; Query.create(sql) .addRule(CoreRules.PROJECT_TO_CALC) .addRule(CoreRules.FILTER_TO_CALC) .addRule(EnumerableRules.ENUMERABLE_CALC_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_PROJECT_RULE) .removeRule(EnumerableRules.ENUMERABLE_FILTER_RULE) .check(); } // Do not need Sort for calc. @Test void testSortCalcDerive1() { final String sql = "select * from\n" + "(select ename, job, max_sal + 1 from\n" + "(select ename, job, max(sal) as max_sal from sales.emp " + "group by ename, job) t) r\n" + "join sales.bonus s on r.job=s.job and r.ename=s.ename"; Query.create(sql) .addRule(CoreRules.PROJECT_TO_CALC) .addRule(EnumerableRules.ENUMERABLE_CALC_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_PROJECT_RULE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .check(); } // Need Sort for calc. @Test void testSortCalcDerive2() { final String sql = "select distinct ename, sal*-2, mgr\n" + "from (select ename, mgr, sal from sales.emp order by ename, mgr, sal limit 100) t"; Query.create(sql) .addRule(CoreRules.PROJECT_TO_CALC) .addRule(EnumerableRules.ENUMERABLE_CALC_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_PROJECT_RULE) .check(); } // Do not need Sort for left join input. @Test void testSortCalcDerive3() { final String sql = "select * from\n" + "(select ename, cast(job as varchar) as job, sal + 1 from\n" + "(select ename, job, sal from sales.emp limit 100) t) r\n" + "join sales.bonus s on r.job=s.job and r.ename=s.ename"; Query.create(sql) .addRule(CoreRules.PROJECT_TO_CALC) .addRule(EnumerableRules.ENUMERABLE_CALC_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .removeRule(EnumerableRules.ENUMERABLE_PROJECT_RULE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .check(); } // push sort to left input @Test void testBatchNestedLoopJoinLeftOuterJoinPushDownSort() { final String sql = "select * from\n" + " customer.contact_peek r left outer join\n" + "customer.account s\n" + "on r.contactno>s.acctno and r.email<s.type\n" + "order by r.contactno desc, r.email desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .addRule(EnumerableRules.ENUMERABLE_BATCH_NESTED_LOOP_JOIN_RULE) .check(); } // Collation can be derived from left input so that top Sort is removed. @Test void testBatchNestedLoopJoinTraitDerivation() { final String sql = "select * from\n" + "(select ename, job, mgr from sales.emp order by ename desc, job desc, mgr limit 10) r\n" + "join sales.bonus s on r.ename>s.ename and r.job<s.job\n" + "order by r.ename desc, r.job desc"; Query.create(sql) .removeRule(EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE) .removeRule(EnumerableRules.ENUMERABLE_SORT_RULE) .addRule(EnumerableRules.ENUMERABLE_BATCH_NESTED_LOOP_JOIN_RULE) .check(); } } /** * A helper class that creates Volcano planner with top-down optimization enabled. This class * allows easy-to-add and easy-to-remove rules from the planner. */ class Query extends RelOptTestBase { protected DiffRepository getDiffRepos() { return DiffRepository.lookup(TopDownOptTest.class); } private String sql; private VolcanoPlanner planner; private Query(String sql) { this.sql = sql; planner = new VolcanoPlanner(); // Always use top-down optimization planner.setTopDownOpt(true); planner.addRelTraitDef(ConventionTraitDef.INSTANCE); planner.addRelTraitDef(RelCollationTraitDef.INSTANCE); RelOptUtil.registerDefaultRules(planner, false, false); // Remove to Keep deterministic join order. planner.removeRule(CoreRules.JOIN_COMMUTE); planner.removeRule(JoinPushThroughJoinRule.LEFT); planner.removeRule(JoinPushThroughJoinRule.RIGHT); // Always use sorted agg. planner.addRule(EnumerableRules.ENUMERABLE_SORTED_AGGREGATE_RULE); planner.removeRule(EnumerableRules.ENUMERABLE_AGGREGATE_RULE); // pushing down sort should be handled by top-down optimization. planner.removeRule(CoreRules.SORT_PROJECT_TRANSPOSE); // Sort will only be pushed down by traits propagation. planner.removeRule(CoreRules.SORT_JOIN_TRANSPOSE); planner.removeRule(CoreRules.SORT_JOIN_COPY); } public static Query create(String sql) { return new Query(sql); } public Query addRule(RelOptRule ruleToAdd) { planner.addRule(ruleToAdd); return this; } public Query addRules(List<RelOptRule> rulesToAdd) { for (RelOptRule ruleToAdd : rulesToAdd) { planner.addRule(ruleToAdd); } return this; } public Query removeRule(RelOptRule ruleToRemove) { planner.removeRule(ruleToRemove); return this; } public Query removeRules(List<RelOptRule> rulesToRemove) { for (RelOptRule ruleToRemove : rulesToRemove) { planner.removeRule(ruleToRemove); } return this; } public void check() { SqlToRelTestBase.Tester tester = createTester().withDecorrelation(true) .withClusterFactory(cluster -> RelOptCluster.create(planner, cluster.getRexBuilder())); new Sql(tester, sql, null, planner, ImmutableMap.of(), ImmutableList.of()).check(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.client.impl.schema.generic; import com.google.common.collect.Lists; import lombok.extern.slf4j.Slf4j; import org.apache.pulsar.client.api.Schema; import org.apache.pulsar.client.api.schema.GenericRecord; import org.apache.pulsar.client.api.schema.GenericSchema; import org.apache.pulsar.client.impl.schema.AutoConsumeSchema; import org.apache.pulsar.client.impl.schema.KeyValueSchemaImpl; import org.apache.pulsar.client.impl.schema.KeyValueSchemaInfo; import org.apache.pulsar.client.impl.schema.SchemaTestUtils.Bar; import org.apache.pulsar.client.impl.schema.SchemaTestUtils.Foo; import org.apache.pulsar.common.schema.KeyValue; import org.apache.pulsar.common.schema.KeyValueEncodingType; import org.apache.pulsar.common.schema.LongSchemaVersion; import org.testng.annotations.Test; import java.util.List; import java.util.concurrent.CompletableFuture; import static java.nio.charset.StandardCharsets.UTF_8; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; /** * Unit testing generic schemas. * this test is duplicated with GenericSchemaImplTest independent of GenericSchemaImpl */ @Slf4j public class GenericSchemaTest { @Test public void testGenericAvroSchema() { Schema<Foo> encodeSchema = Schema.AVRO(Foo.class); GenericSchema decodeSchema = GenericAvroSchema.of(encodeSchema.getSchemaInfo()); testEncodeAndDecodeGenericRecord(encodeSchema, decodeSchema); } @Test public void testGenericJsonSchema() { Schema<Foo> encodeSchema = Schema.JSON(Foo.class); GenericSchema decodeSchema = GenericJsonSchema.of(encodeSchema.getSchemaInfo()); testEncodeAndDecodeGenericRecord(encodeSchema, decodeSchema); } @Test public void testAutoAvroSchema() { // configure encode schema Schema<Foo> encodeSchema = Schema.AVRO(Foo.class); // configure the schema info provider MultiVersionSchemaInfoProvider multiVersionGenericSchemaProvider = mock(MultiVersionSchemaInfoProvider.class); when(multiVersionGenericSchemaProvider.getSchemaByVersion(any(byte[].class))) .thenReturn(CompletableFuture.completedFuture(encodeSchema.getSchemaInfo())); // configure decode schema AutoConsumeSchema decodeSchema = new AutoConsumeSchema(); decodeSchema.configureSchemaInfo( "test-topic", "topic", encodeSchema.getSchemaInfo() ); decodeSchema.setSchemaInfoProvider(multiVersionGenericSchemaProvider); testEncodeAndDecodeGenericRecord(encodeSchema, decodeSchema); } @Test public void testAutoJsonSchema() { // configure the schema info provider MultiVersionSchemaInfoProvider multiVersionSchemaInfoProvider = mock(MultiVersionSchemaInfoProvider.class); GenericSchema genericAvroSchema = GenericAvroSchema.of(Schema.JSON(Foo.class).getSchemaInfo()); when(multiVersionSchemaInfoProvider.getSchemaByVersion(any(byte[].class))) .thenReturn(CompletableFuture.completedFuture(genericAvroSchema.getSchemaInfo())); // configure encode schema Schema<Foo> encodeSchema = Schema.JSON(Foo.class); // configure decode schema AutoConsumeSchema decodeSchema = new AutoConsumeSchema(); decodeSchema.configureSchemaInfo("test-topic", "topic", encodeSchema.getSchemaInfo()); decodeSchema.setSchemaInfoProvider(multiVersionSchemaInfoProvider); testEncodeAndDecodeGenericRecord(encodeSchema, decodeSchema); } private void testEncodeAndDecodeGenericRecord(Schema<Foo> encodeSchema, Schema<GenericRecord> decodeSchema) { int numRecords = 10; for (int i = 0; i < numRecords; i++) { Foo foo = newFoo(i); byte[] data = encodeSchema.encode(foo); log.info("Decoding : {}", new String(data, UTF_8)); GenericRecord record; if (decodeSchema instanceof AutoConsumeSchema) { record = decodeSchema.decode(data, new LongSchemaVersion(0L).bytes()); } else { record = decodeSchema.decode(data); } verifyFooRecord(record, i); } } @Test public void testKeyValueSchema() { // configure the schema info provider MultiVersionSchemaInfoProvider multiVersionSchemaInfoProvider = mock(MultiVersionSchemaInfoProvider.class); List<Schema<Foo>> encodeSchemas = Lists.newArrayList( Schema.JSON(Foo.class), Schema.AVRO(Foo.class) ); for (Schema<Foo> keySchema : encodeSchemas) { for (Schema<Foo> valueSchema : encodeSchemas) { // configure encode schema Schema<KeyValue<Foo, Foo>> kvSchema = KeyValueSchemaImpl.of( keySchema, valueSchema ); // configure decode schema Schema<KeyValue<GenericRecord, GenericRecord>> decodeSchema = KeyValueSchemaImpl.of( Schema.AUTO_CONSUME(), Schema.AUTO_CONSUME() ); decodeSchema.configureSchemaInfo( "test-topic", "topic",kvSchema.getSchemaInfo() ); when(multiVersionSchemaInfoProvider.getSchemaByVersion(any(byte[].class))) .thenReturn(CompletableFuture.completedFuture( KeyValueSchemaInfo.encodeKeyValueSchemaInfo( keySchema, valueSchema, KeyValueEncodingType.INLINE ) )); decodeSchema.setSchemaInfoProvider(multiVersionSchemaInfoProvider); testEncodeAndDecodeKeyValues(kvSchema, decodeSchema); } } } private void testEncodeAndDecodeKeyValues(Schema<KeyValue<Foo, Foo>> encodeSchema, Schema<KeyValue<GenericRecord, GenericRecord>> decodeSchema) { int numRecords = 10; for (int i = 0; i < numRecords; i++) { Foo foo = newFoo(i); byte[] data = encodeSchema.encode(new KeyValue<>(foo, foo)); KeyValue<GenericRecord, GenericRecord> kv = decodeSchema.decode(data, new LongSchemaVersion(1L).bytes()); verifyFooRecord(kv.getKey(), i); verifyFooRecord(kv.getValue(), i); } } private static Foo newFoo(int i) { Foo foo = new Foo(); foo.setField1("field-1-" + i); foo.setField2("field-2-" + i); foo.setField3(i); Bar bar = new Bar(); bar.setField1(i % 2 == 0); foo.setField4(bar); foo.setFieldUnableNull("fieldUnableNull-1-" + i); return foo; } private static void verifyFooRecord(GenericRecord record, int i) { Object field1 = record.getField("field1"); assertEquals("field-1-" + i, field1, "Field 1 is " + field1.getClass()); Object field2 = record.getField("field2"); assertEquals("field-2-" + i, field2, "Field 2 is " + field2.getClass()); Object field3 = record.getField("field3"); assertEquals(i, field3, "Field 3 is " + field3.getClass()); Object field4 = record.getField("field4"); assertTrue(field4 instanceof GenericRecord); GenericRecord field4Record = (GenericRecord) field4; assertEquals(i % 2 == 0, field4Record.getField("field1")); Object fieldUnableNull = record.getField("fieldUnableNull"); assertEquals("fieldUnableNull-1-" + i, fieldUnableNull, "fieldUnableNull 1 is " + fieldUnableNull.getClass()); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.facebook.presto.common.predicate.Domain; import com.facebook.presto.hive.HiveBucketing.HiveBucketFilter; import com.facebook.presto.hive.HiveSplit.BucketConversion; import com.facebook.presto.hive.filesystem.ExtendedFileSystem; import com.facebook.presto.hive.metastore.Column; import com.facebook.presto.hive.metastore.Partition; import com.facebook.presto.hive.metastore.Storage; import com.facebook.presto.hive.metastore.Table; import com.facebook.presto.hive.util.HiveFileIterator.NestedDirectoryNotAllowedException; import com.facebook.presto.hive.util.InternalHiveSplitFactory; import com.facebook.presto.hive.util.ResumableTask; import com.facebook.presto.hive.util.ResumableTasks; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.SchemaTableName; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; import com.google.common.io.CharStreams; import com.google.common.util.concurrent.ListenableFuture; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hudi.hadoop.HoodieParquetInputFormat; import org.apache.hudi.hadoop.HoodieROTablePathFilter; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.lang.annotation.Annotation; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Deque; import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.Properties; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.Executor; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.IntPredicate; import java.util.function.Supplier; import static com.facebook.presto.hive.HiveBucketing.getVirtualBucketNumber; import static com.facebook.presto.hive.HiveColumnHandle.pathColumnHandle; import static com.facebook.presto.hive.HiveErrorCode.HIVE_BAD_DATA; import static com.facebook.presto.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR; import static com.facebook.presto.hive.HiveErrorCode.HIVE_INVALID_BUCKET_FILES; import static com.facebook.presto.hive.HiveErrorCode.HIVE_INVALID_METADATA; import static com.facebook.presto.hive.HiveErrorCode.HIVE_INVALID_PARTITION_VALUE; import static com.facebook.presto.hive.HiveErrorCode.HIVE_UNKNOWN_ERROR; import static com.facebook.presto.hive.HiveSessionProperties.getNodeSelectionStrategy; import static com.facebook.presto.hive.HiveSessionProperties.isUseListDirectoryCache; import static com.facebook.presto.hive.HiveUtil.getFooterCount; import static com.facebook.presto.hive.HiveUtil.getHeaderCount; import static com.facebook.presto.hive.HiveUtil.getInputFormat; import static com.facebook.presto.hive.NestedDirectoryPolicy.FAIL; import static com.facebook.presto.hive.NestedDirectoryPolicy.IGNORED; import static com.facebook.presto.hive.NestedDirectoryPolicy.RECURSE; import static com.facebook.presto.hive.S3SelectPushdown.shouldEnablePushdownForTable; import static com.facebook.presto.hive.metastore.MetastoreUtil.checkCondition; import static com.facebook.presto.hive.metastore.MetastoreUtil.getHiveSchema; import static com.facebook.presto.hive.metastore.MetastoreUtil.getPartitionLocation; import static com.facebook.presto.hive.util.ConfigurationUtils.toJobConf; import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.Streams.stream; import static com.google.common.util.concurrent.Futures.immediateFuture; import static java.lang.Math.max; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static org.apache.hadoop.hive.common.FileUtils.HIDDEN_FILES_PATH_FILTER; public class BackgroundHiveSplitLoader implements HiveSplitLoader { private static final ListenableFuture<?> COMPLETED_FUTURE = immediateFuture(null); private final Table table; private final Optional<Domain> pathDomain; private final Optional<BucketSplitInfo> tableBucketInfo; private final HdfsEnvironment hdfsEnvironment; private final HdfsContext hdfsContext; private final NamenodeStats namenodeStats; private final DirectoryLister directoryLister; private final int loaderConcurrency; private final boolean recursiveDirWalkerEnabled; private final Executor executor; private final ConnectorSession session; private final ConcurrentLazyQueue<HivePartitionMetadata> partitions; private final Deque<Iterator<InternalHiveSplit>> fileIterators = new ConcurrentLinkedDeque<>(); private final boolean schedulerUsesHostAddresses; private final Supplier<HoodieROTablePathFilter> hoodiePathFilterSupplier; // Purpose of this lock: // * Write lock: when you need a consistent view across partitions, fileIterators, and hiveSplitSource. // * Read lock: when you need to modify any of the above. // Make sure the lock is held throughout the period during which they may not be consistent with each other. // Details: // * When write lock is acquired, except the holder, no one can do any of the following: // ** poll from (or check empty) partitions // ** poll from (or check empty) or push to fileIterators // ** push to hiveSplitSource // * When any of the above three operations is carried out, either a read lock or a write lock must be held. // * When a series of operations involving two or more of the above three operations are carried out, the lock // must be continuously held throughout the series of operations. // Implications: // * if you hold a read lock but not a write lock, you can do any of the above three operations, but you may // see a series of operations involving two or more of the operations carried out half way. private final ReentrantReadWriteLock taskExecutionLock = new ReentrantReadWriteLock(); private HiveSplitSource hiveSplitSource; private volatile boolean stopped; public BackgroundHiveSplitLoader( Table table, Iterable<HivePartitionMetadata> partitions, Optional<Domain> pathDomain, Optional<BucketSplitInfo> tableBucketInfo, ConnectorSession session, HdfsEnvironment hdfsEnvironment, NamenodeStats namenodeStats, DirectoryLister directoryLister, Executor executor, int loaderConcurrency, boolean recursiveDirWalkerEnabled, boolean schedulerUsesHostAddresses) { this.table = requireNonNull(table, "table is null"); this.pathDomain = requireNonNull(pathDomain, "pathDomain is null"); this.tableBucketInfo = requireNonNull(tableBucketInfo, "tableBucketInfo is null"); this.loaderConcurrency = loaderConcurrency; this.session = requireNonNull(session, "session is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.namenodeStats = requireNonNull(namenodeStats, "namenodeStats is null"); this.directoryLister = requireNonNull(directoryLister, "directoryLister is null"); this.recursiveDirWalkerEnabled = recursiveDirWalkerEnabled; this.executor = requireNonNull(executor, "executor is null"); this.partitions = new ConcurrentLazyQueue<>(requireNonNull(partitions, "partitions is null")); this.hdfsContext = new HdfsContext(session, table.getDatabaseName(), table.getTableName()); this.schedulerUsesHostAddresses = schedulerUsesHostAddresses; this.hoodiePathFilterSupplier = Suppliers.memoize(HoodieROTablePathFilter::new)::get; } @Override public void start(HiveSplitSource splitSource) { this.hiveSplitSource = splitSource; for (int i = 0; i < loaderConcurrency; i++) { ResumableTasks.submit(executor, new HiveSplitLoaderTask()); } } @Override public void stop() { stopped = true; } private class HiveSplitLoaderTask implements ResumableTask { @Override public TaskStatus process() { while (true) { if (stopped) { return TaskStatus.finished(); } ListenableFuture<?> future; taskExecutionLock.readLock().lock(); try { future = loadSplits(); } catch (Exception e) { if (e instanceof IOException) { e = new PrestoException(HIVE_FILESYSTEM_ERROR, e); } else if (!(e instanceof PrestoException)) { e = new PrestoException(HIVE_UNKNOWN_ERROR, e); } // Fail the split source before releasing the execution lock // Otherwise, a race could occur where the split source is completed before we fail it. hiveSplitSource.fail(e); checkState(stopped); return TaskStatus.finished(); } finally { taskExecutionLock.readLock().unlock(); } invokeNoMoreSplitsIfNecessary(); if (!future.isDone()) { return TaskStatus.continueOn(future); } } } } private void invokeNoMoreSplitsIfNecessary() { taskExecutionLock.readLock().lock(); try { // This is an opportunistic check to avoid getting the write lock unnecessarily if (!partitions.isEmpty() || !fileIterators.isEmpty()) { return; } } catch (Exception e) { hiveSplitSource.fail(e); checkState(stopped, "Task is not marked as stopped even though it failed"); return; } finally { taskExecutionLock.readLock().unlock(); } taskExecutionLock.writeLock().lock(); try { // the write lock guarantees that no one is operating on the partitions, fileIterators, or hiveSplitSource, or half way through doing so. if (partitions.isEmpty() && fileIterators.isEmpty()) { // It is legal to call `noMoreSplits` multiple times or after `stop` was called. // Nothing bad will happen if `noMoreSplits` implementation calls methods that will try to obtain a read lock because the lock is re-entrant. hiveSplitSource.noMoreSplits(); } } catch (Exception e) { hiveSplitSource.fail(e); checkState(stopped, "Task is not marked as stopped even though it failed"); } finally { taskExecutionLock.writeLock().unlock(); } } private ListenableFuture<?> loadSplits() throws IOException { Iterator<InternalHiveSplit> splits = fileIterators.poll(); if (splits == null) { HivePartitionMetadata partition = partitions.poll(); if (partition == null) { return COMPLETED_FUTURE; } return loadPartition(partition); } while (splits.hasNext() && !stopped) { ListenableFuture<?> future = hiveSplitSource.addToQueue(splits.next()); if (!future.isDone()) { fileIterators.addFirst(splits); return future; } } // No need to put the iterator back, since it's either empty or we've stopped return COMPLETED_FUTURE; } private ListenableFuture<?> loadPartition(HivePartitionMetadata partition) throws IOException { String partitionName = partition.getHivePartition().getPartitionId(); Storage storage = partition.getPartition().map(Partition::getStorage).orElse(table.getStorage()); String inputFormatName = storage.getStorageFormat().getInputFormat(); int partitionDataColumnCount = partition.getPartition() .map(p -> p.getColumns().size()) .orElse(table.getDataColumns().size()); List<HivePartitionKey> partitionKeys = getPartitionKeys(table, partition.getPartition()); Path path = new Path(getPartitionLocation(table, partition.getPartition())); Configuration configuration = hdfsEnvironment.getConfiguration(hdfsContext, path); InputFormat<?, ?> inputFormat = getInputFormat(configuration, inputFormatName, false); ExtendedFileSystem fs = hdfsEnvironment.getFileSystem(hdfsContext, path); boolean s3SelectPushdownEnabled = shouldEnablePushdownForTable(session, table, path.toString(), partition.getPartition()); if (inputFormat instanceof SymlinkTextInputFormat) { if (tableBucketInfo.isPresent()) { throw new PrestoException(NOT_SUPPORTED, "Bucketed table in SymlinkTextInputFormat is not yet supported"); } // TODO: This should use an iterator like the HiveFileIterator ListenableFuture<?> lastResult = COMPLETED_FUTURE; for (Path targetPath : getTargetPathsFromSymlink(fs, path)) { // The input should be in TextInputFormat. TextInputFormat targetInputFormat = new TextInputFormat(); // the splits must be generated using the file system for the target path // get the configuration for the target path -- it may be a different hdfs instance ExtendedFileSystem targetFilesystem = hdfsEnvironment.getFileSystem(hdfsContext, targetPath); JobConf targetJob = toJobConf(targetFilesystem.getConf()); targetJob.setInputFormat(TextInputFormat.class); targetInputFormat.configure(targetJob); FileInputFormat.setInputPaths(targetJob, targetPath); InputSplit[] targetSplits = targetInputFormat.getSplits(targetJob, 0); InternalHiveSplitFactory splitFactory = new InternalHiveSplitFactory( targetFilesystem, inputFormat, pathDomain, getNodeSelectionStrategy(session), s3SelectPushdownEnabled, new HiveSplitPartitionInfo(storage, path.toUri(), partitionKeys, partitionName, partitionDataColumnCount, partition.getPartitionSchemaDifference(), Optional.empty()), schedulerUsesHostAddresses, partition.getEncryptionInformation()); lastResult = addSplitsToSource(targetSplits, splitFactory); if (stopped) { return COMPLETED_FUTURE; } } return lastResult; } Optional<BucketConversion> bucketConversion = Optional.empty(); boolean bucketConversionRequiresWorkerParticipation = false; if (partition.getPartition().isPresent()) { Optional<HiveBucketProperty> partitionBucketProperty = partition.getPartition().get().getStorage().getBucketProperty(); if (tableBucketInfo.isPresent() && partitionBucketProperty.isPresent()) { int tableBucketCount = tableBucketInfo.get().getTableBucketCount(); int partitionBucketCount = partitionBucketProperty.get().getBucketCount(); // Validation was done in HiveSplitManager#getPartitionMetadata. // Here, it's just trying to see if its needs the BucketConversion. if (tableBucketCount != partitionBucketCount) { bucketConversion = Optional.of(new BucketConversion(tableBucketCount, partitionBucketCount, tableBucketInfo.get().getBucketColumns())); if (tableBucketCount > partitionBucketCount) { bucketConversionRequiresWorkerParticipation = true; } } } } InternalHiveSplitFactory splitFactory = new InternalHiveSplitFactory( fs, inputFormat, pathDomain, getNodeSelectionStrategy(session), s3SelectPushdownEnabled, new HiveSplitPartitionInfo( storage, path.toUri(), partitionKeys, partitionName, partitionDataColumnCount, partition.getPartitionSchemaDifference(), bucketConversionRequiresWorkerParticipation ? bucketConversion : Optional.empty()), schedulerUsesHostAddresses, partition.getEncryptionInformation()); if (!isHudiInputFormat(inputFormat) && shouldUseFileSplitsFromInputFormat(inputFormat)) { if (tableBucketInfo.isPresent()) { throw new PrestoException(NOT_SUPPORTED, "Presto cannot read bucketed partition in an input format with UseFileSplitsFromInputFormat annotation: " + inputFormat.getClass().getSimpleName()); } JobConf jobConf = toJobConf(configuration); FileInputFormat.setInputPaths(jobConf, path); InputSplit[] splits = inputFormat.getSplits(jobConf, 0); return addSplitsToSource(splits, splitFactory); } PathFilter pathFilter = isHudiInputFormat(inputFormat) ? hoodiePathFilterSupplier.get() : path1 -> true; // S3 Select pushdown works at the granularity of individual S3 objects, // therefore we must not split files when it is enabled. Properties schema = getHiveSchema(storage.getSerdeParameters(), table.getParameters()); boolean splittable = getHeaderCount(schema) == 0 && getFooterCount(schema) == 0 && !s3SelectPushdownEnabled; // Bucketed partitions are fully loaded immediately since all files must be loaded to determine the file to bucket mapping if (tableBucketInfo.isPresent()) { if (tableBucketInfo.get().isVirtuallyBucketed()) { // For virtual bucket, bucket conversion must not be present because there is no physical partition bucket count checkState(!bucketConversion.isPresent(), "Virtually bucketed table must not have partitions that are physically bucketed"); checkState( tableBucketInfo.get().getTableBucketCount() == tableBucketInfo.get().getReadBucketCount(), "Table and read bucket count should be the same for virtual bucket"); return hiveSplitSource.addToQueue(getVirtuallyBucketedSplits(path, fs, splitFactory, tableBucketInfo.get().getReadBucketCount(), splittable, pathFilter)); } return hiveSplitSource.addToQueue(getBucketedSplits(path, fs, splitFactory, tableBucketInfo.get(), bucketConversion, partitionName, splittable, pathFilter)); } fileIterators.addLast(createInternalHiveSplitIterator(path, fs, splitFactory, splittable, pathFilter)); return COMPLETED_FUTURE; } private ListenableFuture<?> addSplitsToSource(InputSplit[] targetSplits, InternalHiveSplitFactory splitFactory) throws IOException { ListenableFuture<?> lastResult = COMPLETED_FUTURE; for (InputSplit inputSplit : targetSplits) { Optional<InternalHiveSplit> internalHiveSplit = splitFactory.createInternalHiveSplit((FileSplit) inputSplit); if (internalHiveSplit.isPresent()) { lastResult = hiveSplitSource.addToQueue(internalHiveSplit.get()); } if (stopped) { return COMPLETED_FUTURE; } } return lastResult; } private static boolean isHudiInputFormat(InputFormat<?, ?> inputFormat) { return inputFormat instanceof HoodieParquetInputFormat; } private static boolean shouldUseFileSplitsFromInputFormat(InputFormat<?, ?> inputFormat) { return Arrays.stream(inputFormat.getClass().getAnnotations()) .map(Annotation::annotationType) .map(Class::getSimpleName) .anyMatch(name -> name.equals("UseFileSplitsFromInputFormat")); } private Iterator<InternalHiveSplit> createInternalHiveSplitIterator(Path path, ExtendedFileSystem fileSystem, InternalHiveSplitFactory splitFactory, boolean splittable, PathFilter pathFilter) { HiveDirectoryContext hiveDirectoryContext = new HiveDirectoryContext(recursiveDirWalkerEnabled ? RECURSE : IGNORED, isUseListDirectoryCache(session)); return stream(directoryLister.list(fileSystem, table, path, namenodeStats, pathFilter, hiveDirectoryContext)) .map(status -> splitFactory.createInternalHiveSplit(status, splittable)) .filter(Optional::isPresent) .map(Optional::get) .iterator(); } private List<InternalHiveSplit> getBucketedSplits( Path path, ExtendedFileSystem fileSystem, InternalHiveSplitFactory splitFactory, BucketSplitInfo bucketSplitInfo, Optional<BucketConversion> bucketConversion, String partitionName, boolean splittable, PathFilter pathFilter) { int readBucketCount = bucketSplitInfo.getReadBucketCount(); int tableBucketCount = bucketSplitInfo.getTableBucketCount(); int partitionBucketCount = bucketConversion.map(BucketConversion::getPartitionBucketCount).orElse(tableBucketCount); checkState(readBucketCount <= tableBucketCount, "readBucketCount(%s) should be less than or equal to tableBucketCount(%s)", readBucketCount, tableBucketCount); // list all files in the partition List<HiveFileInfo> fileInfos = new ArrayList<>(partitionBucketCount); try { Iterators.addAll(fileInfos, directoryLister.list(fileSystem, table, path, namenodeStats, pathFilter, new HiveDirectoryContext(FAIL, isUseListDirectoryCache(session)))); } catch (NestedDirectoryNotAllowedException e) { // Fail here to be on the safe side. This seems to be the same as what Hive does throw new PrestoException( HIVE_INVALID_BUCKET_FILES, format("Hive table '%s' is corrupt. Found sub-directory in bucket directory for partition: %s", new SchemaTableName(table.getDatabaseName(), table.getTableName()), partitionName)); } // verify we found one file per bucket if (fileInfos.size() != partitionBucketCount) { throw new PrestoException( HIVE_INVALID_BUCKET_FILES, format("Hive table '%s' is corrupt. The number of files in the directory (%s) does not match the declared bucket count (%s) for partition: %s", new SchemaTableName(table.getDatabaseName(), table.getTableName()), fileInfos.size(), partitionBucketCount, partitionName)); } // Sort FileStatus objects (instead of, e.g., fileStatus.getPath().toString). This matches org.apache.hadoop.hive.ql.metadata.Table.getSortedPaths fileInfos.sort(null); // convert files internal splits List<InternalHiveSplit> splitList = new ArrayList<>(); for (int bucketNumber = 0; bucketNumber < max(readBucketCount, partitionBucketCount); bucketNumber++) { // Physical bucket #. This determine file name. It also determines the order of splits in the result. int partitionBucketNumber = bucketNumber % partitionBucketCount; // Logical bucket #. Each logical bucket corresponds to a "bucket" from engine's perspective. int readBucketNumber = bucketNumber % readBucketCount; boolean containsIneligibleTableBucket = false; List<Integer> eligibleTableBucketNumbers = new ArrayList<>(); for (int tableBucketNumber = bucketNumber % tableBucketCount; tableBucketNumber < tableBucketCount; tableBucketNumber += max(readBucketCount, partitionBucketCount)) { // table bucket number: this is used for evaluating "$bucket" filters. if (bucketSplitInfo.isTableBucketEnabled(tableBucketNumber)) { eligibleTableBucketNumbers.add(tableBucketNumber); } else { containsIneligibleTableBucket = true; } } if (!eligibleTableBucketNumbers.isEmpty() && containsIneligibleTableBucket) { throw new PrestoException( NOT_SUPPORTED, "The bucket filter cannot be satisfied. There are restrictions on the bucket filter when all the following is true: " + "1. a table has a different buckets count as at least one of its partitions that is read in this query; " + "2. the table has a different but compatible bucket number with another table in the query; " + "3. some buckets of the table is filtered out from the query, most likely using a filter on \"$bucket\". " + "(table name: " + table.getTableName() + ", table bucket count: " + tableBucketCount + ", " + "partition bucket count: " + partitionBucketCount + ", effective reading bucket count: " + readBucketCount + ")"); } if (!eligibleTableBucketNumbers.isEmpty()) { HiveFileInfo fileInfo = fileInfos.get(partitionBucketNumber); eligibleTableBucketNumbers.stream() .map(tableBucketNumber -> splitFactory.createInternalHiveSplit(fileInfo, readBucketNumber, tableBucketNumber, splittable)) .forEach(optionalSplit -> optionalSplit.ifPresent(splitList::add)); } } return splitList; } private List<InternalHiveSplit> getVirtuallyBucketedSplits(Path path, ExtendedFileSystem fileSystem, InternalHiveSplitFactory splitFactory, int bucketCount, boolean splittable, PathFilter pathFilter) { // List all files recursively in the partition and assign virtual bucket number to each of them HiveDirectoryContext hiveDirectoryContext = new HiveDirectoryContext(recursiveDirWalkerEnabled ? RECURSE : IGNORED, isUseListDirectoryCache(session)); return stream(directoryLister.list(fileSystem, table, path, namenodeStats, pathFilter, hiveDirectoryContext)) .map(fileInfo -> { int virtualBucketNumber = getVirtualBucketNumber(bucketCount, fileInfo.getPath()); return splitFactory.createInternalHiveSplit(fileInfo, virtualBucketNumber, virtualBucketNumber, splittable); }) .filter(Optional::isPresent) .map(Optional::get) .collect(toImmutableList()); } private static List<Path> getTargetPathsFromSymlink(ExtendedFileSystem fileSystem, Path symlinkDir) { try { FileStatus[] symlinks = fileSystem.listStatus(symlinkDir, HIDDEN_FILES_PATH_FILTER); List<Path> targets = new ArrayList<>(); for (FileStatus symlink : symlinks) { try (BufferedReader reader = new BufferedReader(new InputStreamReader(fileSystem.open(symlink.getPath()), StandardCharsets.UTF_8))) { CharStreams.readLines(reader).stream() .map(Path::new) .forEach(targets::add); } } return targets; } catch (IOException e) { throw new PrestoException(HIVE_BAD_DATA, "Error parsing symlinks from: " + symlinkDir, e); } } private static List<HivePartitionKey> getPartitionKeys(Table table, Optional<Partition> partition) { if (!partition.isPresent()) { return ImmutableList.of(); } ImmutableList.Builder<HivePartitionKey> partitionKeys = ImmutableList.builder(); List<Column> keys = table.getPartitionColumns(); List<String> values = partition.get().getValues(); checkCondition(keys.size() == values.size(), HIVE_INVALID_METADATA, "Expected %s partition key values, but got %s", keys.size(), values.size()); for (int i = 0; i < keys.size(); i++) { String name = keys.get(i).getName(); HiveType hiveType = keys.get(i).getType(); if (!hiveType.isSupportedType()) { throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type %s found in partition keys of table %s.%s", hiveType, table.getDatabaseName(), table.getTableName())); } String value = values.get(i); checkCondition(value != null, HIVE_INVALID_PARTITION_VALUE, "partition key value cannot be null for field: %s", name); partitionKeys.add(new HivePartitionKey(name, value)); } return partitionKeys.build(); } public static class BucketSplitInfo { private final List<HiveColumnHandle> bucketColumns; private final int tableBucketCount; private final int readBucketCount; private final IntPredicate bucketFilter; public static Optional<BucketSplitInfo> createBucketSplitInfo(Optional<HiveBucketHandle> bucketHandle, Optional<HiveBucketFilter> bucketFilter) { requireNonNull(bucketHandle, "bucketHandle is null"); requireNonNull(bucketFilter, "buckets is null"); if (!bucketHandle.isPresent()) { checkArgument(!bucketFilter.isPresent(), "bucketHandle must be present if bucketFilter is present"); return Optional.empty(); } int tableBucketCount = bucketHandle.get().getTableBucketCount(); int readBucketCount = bucketHandle.get().getReadBucketCount(); List<HiveColumnHandle> bucketColumns = bucketHandle.get().getColumns(); IntPredicate predicate = bucketFilter .<IntPredicate>map(filter -> filter.getBucketsToKeep()::contains) .orElse(bucket -> true); return Optional.of(new BucketSplitInfo(bucketColumns, tableBucketCount, readBucketCount, predicate)); } private BucketSplitInfo(List<HiveColumnHandle> bucketColumns, int tableBucketCount, int readBucketCount, IntPredicate bucketFilter) { this.bucketColumns = ImmutableList.copyOf(requireNonNull(bucketColumns, "bucketColumns is null")); this.tableBucketCount = tableBucketCount; this.readBucketCount = readBucketCount; this.bucketFilter = requireNonNull(bucketFilter, "bucketFilter is null"); } public List<HiveColumnHandle> getBucketColumns() { return bucketColumns; } public int getTableBucketCount() { return tableBucketCount; } public int getReadBucketCount() { return readBucketCount; } public boolean isVirtuallyBucketed() { return bucketColumns.size() == 1 && bucketColumns.get(0).equals(pathColumnHandle()); } /** * Evaluates whether the provided table bucket number passes the bucket predicate. * A bucket predicate can be present in two cases: * <ul> * <li>Filter on "$bucket" column. e.g. {@code "$bucket" between 0 and 100} * <li>Single-value equality filter on all bucket columns. e.g. for a table with two bucketing columns, * {@code bucketCol1 = 'a' AND bucketCol2 = 123} * </ul> */ public boolean isTableBucketEnabled(int tableBucketNumber) { return bucketFilter.test(tableBucketNumber); } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.keymap.impl; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.components.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.keymap.KeyMapBundle; import com.intellij.openapi.keymap.Keymap; import com.intellij.openapi.keymap.KeymapManagerListener; import com.intellij.openapi.keymap.ex.KeymapManagerEx; import com.intellij.openapi.options.Scheme; import com.intellij.openapi.options.SchemeProcessor; import com.intellij.openapi.options.SchemesManager; import com.intellij.openapi.options.SchemesManagerFactory; import com.intellij.openapi.util.InvalidDataException; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.WriteExternalException; import com.intellij.util.containers.ContainerUtil; import org.jdom.Document; import org.jdom.Element; import org.jdom.JDOMException; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.util.*; @State( name = "KeymapManager", roamingType = RoamingType.PER_PLATFORM, storages = { @Storage( id="keymap", file = "$APP_CONFIG$/keymap.xml" )} ) public class KeymapManagerImpl extends KeymapManagerEx implements PersistentStateComponent<Element>, ExportableApplicationComponent { private static final Logger LOG = Logger.getInstance("#com.intellij.keymap.KeymapManager"); private final List<KeymapManagerListener> myListeners = ContainerUtil.createEmptyCOWList(); private String myActiveKeymapName; private final Map<String, String> myBoundShortcuts = new HashMap<String, String>(); @NonNls private static final String KEYMAP = "keymap"; @NonNls private static final String KEYMAPS = "keymaps"; @NonNls private static final String ACTIVE_KEYMAP = "active_keymap"; @NonNls private static final String NAME_ATTRIBUTE = "name"; private final SchemesManager<Keymap, KeymapImpl> mySchemesManager; public static boolean ourKeymapManagerInitialized = false; KeymapManagerImpl(DefaultKeymap defaultKeymap, SchemesManagerFactory factory) { mySchemesManager = factory.createSchemesManager( "$ROOT_CONFIG$/keymaps", new SchemeProcessor<KeymapImpl>(){ public KeymapImpl readScheme(final Document schemeContent) throws InvalidDataException, IOException, JDOMException { return readKeymap(schemeContent); } public Document writeScheme(final KeymapImpl scheme) throws WriteExternalException { return new Document(scheme.writeExternal()); } public boolean shouldBeSaved(final KeymapImpl scheme) { return scheme.canModify(); } public void initScheme(final KeymapImpl scheme) { } public void onSchemeAdded(final KeymapImpl scheme) { } public void onSchemeDeleted(final KeymapImpl scheme) { } public void onCurrentSchemeChanged(final Scheme newCurrentScheme) { } }, RoamingType.PER_USER); Keymap[] keymaps = defaultKeymap.getKeymaps(); for (Keymap keymap : keymaps) { addKeymap(keymap); String systemDefaultKeymap = SystemInfo.isMac ? MAC_OS_X_KEYMAP : DEFAULT_IDEA_KEYMAP; if (systemDefaultKeymap.equals(keymap.getName())) { setActiveKeymap(keymap); } } load(); ourKeymapManagerInitialized = true; } @NotNull public File[] getExportFiles() { return new File[]{new File(PathManager.getOptionsPath()+File.separatorChar+"keymap.xml"),getKeymapDirectory(true)}; } @NotNull public String getPresentableName() { return KeyMapBundle.message("key.maps.name"); } public Keymap[] getAllKeymaps() { Collection<Keymap> keymaps = mySchemesManager.getAllSchemes(); return keymaps.toArray(new Keymap[keymaps.size()]); } @Nullable public Keymap getKeymap(String name) { return mySchemesManager.findSchemeByName( name); } public Keymap getActiveKeymap() { return mySchemesManager.getCurrentScheme(); } public void setActiveKeymap(Keymap activeKeymap) { mySchemesManager.setCurrentSchemeName(activeKeymap == null ? null : activeKeymap.getName()); fireActiveKeymapChanged(); } public void bindShortcuts(String sourceActionId, String targetActionId) { myBoundShortcuts.put(targetActionId, sourceActionId); } public Set<String> getBoundActions() { return myBoundShortcuts.keySet(); } public String getActionBinding(String actionId) { return myBoundShortcuts.get(actionId); } public SchemesManager<Keymap, KeymapImpl> getSchemesManager() { return mySchemesManager; } public void addKeymap(Keymap keymap) { mySchemesManager.addNewScheme(keymap, true); } public void removeAllKeymapsExceptUnmodifiable() { for (Keymap keymap : mySchemesManager.getAllSchemes()) { if (keymap.canModify()) { mySchemesManager.removeScheme(keymap); } } mySchemesManager.setCurrentSchemeName(null); Collection<Keymap> keymaps = mySchemesManager.getAllSchemes(); if (keymaps.size() > 0) { mySchemesManager.setCurrentSchemeName(keymaps.iterator().next().getName()); } } public String getExternalFileName() { return "keymap"; } public Element getState() { Element result = new Element("component"); try { writeExternal(result); } catch (WriteExternalException e) { LOG.error(e); } return result; } public void loadState(final Element state) { try { readExternal(state); } catch (InvalidDataException e) { LOG.error(e); } } public void readExternal(Element element) throws InvalidDataException{ Element child = element.getChild(ACTIVE_KEYMAP); if (child != null) { myActiveKeymapName = child.getAttributeValue(NAME_ATTRIBUTE); } if (myActiveKeymapName != null) { Keymap keymap = getKeymap(myActiveKeymapName); if (keymap != null) { setActiveKeymap(keymap); } } } public void writeExternal(Element element) throws WriteExternalException{ if (mySchemesManager.getCurrentScheme() != null) { Element e = new Element(ACTIVE_KEYMAP); Keymap currentScheme = mySchemesManager.getCurrentScheme(); if (currentScheme != null) { e.setAttribute(NAME_ATTRIBUTE, currentScheme.getName()); } element.addContent(e); } } private void load(){ mySchemesManager.loadSchemes(); } private KeymapImpl readKeymap(Document document) throws JDOMException,InvalidDataException, IOException{ if (document == null) throw new InvalidDataException(); Element root = document.getRootElement(); if (root == null || !KEYMAP.equals(root.getName())) { throw new InvalidDataException(); } KeymapImpl keymap = new KeymapImpl(); keymap.readExternal(root, getAllKeymaps()); return keymap; } @Nullable private static File getKeymapDirectory(boolean toCreate) { String directoryPath = PathManager.getConfigPath() + File.separator + KEYMAPS; File directory = new File(directoryPath); if (!directory.exists()) { if (!toCreate) return null; if (!directory.mkdir()) { LOG.error("Cannot create directory: " + directory.getAbsolutePath()); return null; } } return directory; } private void fireActiveKeymapChanged() { for (KeymapManagerListener listener : myListeners) { listener.activeKeymapChanged(mySchemesManager.getCurrentScheme()); } } public void addKeymapManagerListener(KeymapManagerListener listener) { myListeners.add(listener); } public void removeKeymapManagerListener(KeymapManagerListener listener) { myListeners.remove(listener); } @NotNull public String getComponentName() { return "KeymapManager"; } public void initComponent() {} public void disposeComponent() {} }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ui.content.impl; import com.intellij.ide.DataManager; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.DataProvider; import com.intellij.openapi.actionSystem.PlatformDataKeys; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.psi.PsiManager; import com.intellij.psi.impl.PsiManagerEx; import com.intellij.ui.components.panels.NonOpaquePanel; import com.intellij.ui.content.*; import com.intellij.util.EventDispatcher; import com.intellij.util.SmartList; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.List; import java.util.*; /** * @author Anton Katilin * @author Vladimir Kondratyev */ public class ContentManagerImpl implements ContentManager, PropertyChangeListener, Disposable.Parent { private static final Logger LOG = Logger.getInstance(ContentManagerImpl.class); private ContentUI myUI; private final List<Content> myContents = new ArrayList<>(); private final EventDispatcher<ContentManagerListener> myDispatcher = EventDispatcher.create(ContentManagerListener.class); private final List<Content> mySelection = new ArrayList<>(); private final boolean myCanCloseContents; private MyNonOpaquePanel myComponent; private final Set<Content> myContentWithChangedComponent = new HashSet<>(); private boolean myDisposed; private final Project myProject; private final List<DataProvider> dataProviders = new SmartList<>(); private final ArrayList<Content> mySelectionHistory = new ArrayList<>(); /** * WARNING: as this class adds listener to the ProjectManager which is removed on projectClosed event, all instances of this class * must be created on already OPENED projects, otherwise there will be memory leak! */ public ContentManagerImpl(@NotNull ContentUI contentUI, boolean canCloseContents, @NotNull Project project) { myProject = project; myCanCloseContents = canCloseContents; myUI = contentUI; myUI.setManager(this); // register on FileManager because before Content disposal the UsageView is disposed before which virtual file pointers should be externalized for which they need to be restored for which com.intellij.psi.impl.smartPointers.SelfElementInfo.restoreFileFromVirtual() must be able to work for which the findFile() must access filemanager for which it must be alive Disposer.register(((PsiManagerEx)PsiManager.getInstance(project)).getFileManager(), this); Disposer.register(this, contentUI); } @Override public boolean canCloseContents() { return myCanCloseContents; } @NotNull @Override public JComponent getComponent() { if (myComponent == null) { myComponent = new MyNonOpaquePanel(); NonOpaquePanel contentComponent = new NonOpaquePanel(); contentComponent.setContent(myUI.getComponent()); myComponent.add(contentComponent, BorderLayout.CENTER); } return myComponent; } @NotNull @Override public ActionCallback getReady(@NotNull Object requestor) { Content selected = getSelectedContent(); if (selected == null) return ActionCallback.DONE; BusyObject busyObject = selected.getBusyObject(); return busyObject != null ? busyObject.getReady(requestor) : ActionCallback.DONE; } private class MyNonOpaquePanel extends NonOpaquePanel implements DataProvider { MyNonOpaquePanel() { super(new BorderLayout()); } @Override @Nullable public Object getData(@NotNull @NonNls String dataId) { if (PlatformDataKeys.CONTENT_MANAGER.is(dataId) || PlatformDataKeys.NONEMPTY_CONTENT_MANAGER.is(dataId) && getContentCount() > 1) { return ContentManagerImpl.this; } for (DataProvider dataProvider : dataProviders) { Object data = dataProvider.getData(dataId); if (data != null) { return data; } } if (myUI instanceof DataProvider) { return ((DataProvider)myUI).getData(dataId); } DataProvider provider = DataManager.getDataProvider(this); return provider == null ? null : provider.getData(dataId); } } @Override public void addContent(@NotNull Content content, final int order) { doAddContent(content, order); } @Override public void addContent(@NotNull Content content) { doAddContent(content, -1); } private void doAddContent(@NotNull final Content content, final int index) { ApplicationManager.getApplication().assertIsDispatchThread(); if (myContents.contains(content)) { myContents.remove(content); myContents.add(index == -1 ? myContents.size() : index, content); return; } ((ContentImpl)content).setManager(this); final int insertIndex = index == -1 ? myContents.size() : index; myContents.add(insertIndex, content); content.addPropertyChangeListener(this); fireContentAdded(content, insertIndex); if (myUI.isToSelectAddedContent() || mySelection.isEmpty() && !myUI.canBeEmptySelection()) { if (myUI.isSingleSelection()) { setSelectedContent(content); } else { addSelectedContent(content); } } Disposer.register(this, content); } @Override public boolean removeContent(@NotNull Content content, final boolean dispose) { boolean wasFocused = content.getComponent() != null && UIUtil.isFocusAncestor(content.getComponent()); return removeContent(content, dispose, wasFocused, false).isDone(); } @NotNull @Override public ActionCallback removeContent(@NotNull Content content, boolean dispose, final boolean requestFocus, final boolean forcedFocus) { final ActionCallback result = new ActionCallback(); doRemoveContent(content, dispose).doWhenDone(() -> { if (requestFocus) { Content current = getSelectedContent(); if (current != null) { setSelectedContent(current, true, true, !forcedFocus).notify(result); } else { result.setDone(); } } else { result.setDone(); } }); return result; } @NotNull private ActionCallback doRemoveContent(@NotNull Content content, boolean dispose) { ApplicationManager.getApplication().assertIsDispatchThread(); int indexToBeRemoved = getIndexOfContent(content); if (indexToBeRemoved == -1) return ActionCallback.REJECTED; try { Content selection = mySelection.isEmpty() ? null : mySelection.get(mySelection.size() - 1); int selectedIndex = selection != null ? myContents.indexOf(selection) : -1; if (!fireContentRemoveQuery(content, indexToBeRemoved)) { return ActionCallback.REJECTED; } if (!content.isValid()) { return ActionCallback.REJECTED; } boolean wasSelected = isSelected(content); if (wasSelected) { removeFromSelection(content); } int indexToSelect = -1; if (wasSelected) { int i = indexToBeRemoved - 1; if (i >= 0) { indexToSelect = i; } else if (getContentCount() > 1) { indexToSelect = 0; } } else if (selectedIndex > indexToBeRemoved) { indexToSelect = selectedIndex - 1; } mySelectionHistory.remove(content); myContents.remove(content); content.removePropertyChangeListener(this); fireContentRemoved(content, indexToBeRemoved); ((ContentImpl)content).setManager(null); if (dispose) { Disposer.dispose(content); } int newSize = myContents.size(); if (newSize > 0) { if (indexToSelect > -1) { final Content toSelect = !mySelectionHistory.isEmpty() ? mySelectionHistory.get(0) : myContents.get(indexToSelect); if (!isSelected(toSelect)) { if (myUI.isSingleSelection()) { ActionCallback result = new ActionCallback(); setSelectedContentCB(toSelect).notify(result); return result; } else { addSelectedContent(toSelect); } } } } else { mySelection.clear(); } return ActionCallback.DONE; } finally { if (ApplicationManager.getApplication().isDispatchThread()) { if (!myDisposed) { myUI.getComponent().updateUI(); //cleanup visibleComponent from Alloy...TabbedPaneUI } } } } @Override public void removeAllContents(final boolean dispose) { Content[] contents = getContents(); for (Content content : contents) { removeContent(content, dispose); } } @Override public int getContentCount() { return myContents.size(); } @Override @NotNull public Content[] getContents() { return myContents.toArray(new Content[0]); } //TODO[anton,vova] is this method needed? @Override public Content findContent(String displayName) { for (Content content : myContents) { if (content.getDisplayName().equals(displayName)) { return content; } } return null; } @Override public Content getContent(int index) { return index >= 0 && index < myContents.size() ? myContents.get(index) : null; } @Override public Content getContent(@NotNull JComponent component) { Content[] contents = getContents(); for (Content content : contents) { if (Comparing.equal(component, content.getComponent())) { return content; } } return null; } @Override public int getIndexOfContent(@NotNull Content content) { return myContents.indexOf(content); } @NotNull @Override public String getCloseActionName() { return myUI.getCloseActionName(); } @NotNull @Override public String getCloseAllButThisActionName() { return myUI.getCloseAllButThisActionName(); } @NotNull @Override public String getPreviousContentActionName() { return myUI.getPreviousContentActionName(); } @NotNull @Override public String getNextContentActionName() { return myUI.getNextContentActionName(); } @NotNull @Override public List<AnAction> getAdditionalPopupActions(@NotNull final Content content) { return Collections.emptyList(); } @Override public boolean canCloseAllContents() { if (!canCloseContents()) { return false; } for (Content content : myContents) { if (content.isCloseable()) { return true; } } return false; } @Override public void addSelectedContent(@NotNull final Content content) { if (!checkSelectionChangeShouldBeProcessed(content, false)) return; if (getIndexOfContent(content) == -1) { throw new IllegalArgumentException("content not found: " + content); } if (!isSelected(content)) { mySelection.add(content); fireSelectionChanged(content, ContentManagerEvent.ContentOperation.add); } } private boolean checkSelectionChangeShouldBeProcessed(Content content, boolean implicit) { if (!myUI.canChangeSelectionTo(content, implicit)) { return false; } final boolean result = !isSelected(content) || myContentWithChangedComponent.contains(content); myContentWithChangedComponent.remove(content); return result; } @Override public void removeFromSelection(@NotNull Content content) { if (!isSelected(content)) return; mySelection.remove(content); fireSelectionChanged(content, ContentManagerEvent.ContentOperation.remove); } @Override public boolean isSelected(@NotNull Content content) { return mySelection.contains(content); } @Override @NotNull public Content[] getSelectedContents() { return mySelection.toArray(new Content[0]); } @Override @Nullable public Content getSelectedContent() { return mySelection.isEmpty() ? null : mySelection.get(0); } @Override public void setSelectedContent(@NotNull Content content, boolean requestFocus) { setSelectedContentCB(content, requestFocus); } @NotNull @Override public ActionCallback setSelectedContentCB(@NotNull final Content content, final boolean requestFocus) { return setSelectedContentCB(content, requestFocus, true); } @Override public void setSelectedContent(@NotNull Content content, boolean requestFocus, boolean forcedFocus) { setSelectedContentCB(content, requestFocus, forcedFocus); } @NotNull @Override public ActionCallback setSelectedContentCB(@NotNull final Content content, final boolean requestFocus, final boolean forcedFocus) { return setSelectedContent(content, requestFocus, forcedFocus, false); } @NotNull @Override public ActionCallback setSelectedContent(@NotNull final Content content, final boolean requestFocus, final boolean forcedFocus, boolean implicit) { mySelectionHistory.remove(content); mySelectionHistory.add(0, content); if (isSelected(content) && requestFocus) { return requestFocus(content, forcedFocus); } if (!checkSelectionChangeShouldBeProcessed(content, implicit)) { return ActionCallback.REJECTED; } if (!myContents.contains(content)) { throw new IllegalArgumentException("Cannot find content:" + content.getDisplayName()); } final boolean focused = isSelectionHoldsFocus(); final Content[] old = getSelectedContents(); final ActiveRunnable selection = new ActiveRunnable() { @NotNull @Override public ActionCallback run() { if (myDisposed || getIndexOfContent(content) == -1) return ActionCallback.REJECTED; for (Content each : old) { removeFromSelection(each); } addSelectedContent(content); if (requestFocus) { requestFocus(content, forcedFocus); } return ActionCallback.DONE; } }; final ActionCallback result = new ActionCallback(); boolean enabledFocus = getFocusManager().isFocusTransferEnabled(); if (focused || requestFocus) { if (enabledFocus) { return getFocusManager().requestFocus(getComponent(), true).doWhenProcessed(() -> selection.run().notify(result)); } } return selection.run().notify(result); } private boolean isSelectionHoldsFocus() { boolean focused = false; final Content[] selection = getSelectedContents(); for (Content each : selection) { if (UIUtil.isFocusAncestor(each.getComponent())) { focused = true; break; } } return focused; } @NotNull @Override public ActionCallback setSelectedContentCB(@NotNull Content content) { return setSelectedContentCB(content, false); } @Override public void setSelectedContent(@NotNull final Content content) { setSelectedContentCB(content); } @Override public ActionCallback selectPreviousContent() { int contentCount = getContentCount(); LOG.assertTrue(contentCount > 1); Content selectedContent = getSelectedContent(); int index = selectedContent == null ? -1 : getIndexOfContent(selectedContent); index = (index - 1 + contentCount) % contentCount; final Content content = getContent(index); if (content == null) { return null; } return setSelectedContentCB(content, true); } @Override public ActionCallback selectNextContent() { int contentCount = getContentCount(); LOG.assertTrue(contentCount > 1); Content selectedContent = getSelectedContent(); int index = selectedContent == null ? -1 : getIndexOfContent(selectedContent); index = (index + 1) % contentCount; final Content content = getContent(index); if (content == null) { return null; } return setSelectedContentCB(content, true); } @Override public void addContentManagerListener(@NotNull ContentManagerListener l) { myDispatcher.getListeners().add(0, l); } @Override public void removeContentManagerListener(@NotNull ContentManagerListener l) { myDispatcher.removeListener(l); } private void fireContentAdded(@NotNull Content content, int newIndex) { ContentManagerEvent e = new ContentManagerEvent(this, content, newIndex, ContentManagerEvent.ContentOperation.add); myDispatcher.getMulticaster().contentAdded(e); } private void fireContentRemoved(@NotNull Content content, int oldIndex) { ContentManagerEvent e = new ContentManagerEvent(this, content, oldIndex, ContentManagerEvent.ContentOperation.remove); myDispatcher.getMulticaster().contentRemoved(e); } private void fireSelectionChanged(@NotNull Content content, ContentManagerEvent.ContentOperation operation) { ContentManagerEvent e = new ContentManagerEvent(this, content, getIndexOfContent(content), operation); myDispatcher.getMulticaster().selectionChanged(e); } private boolean fireContentRemoveQuery(@NotNull Content content, int oldIndex) { ContentManagerEvent event = new ContentManagerEvent(this, content, oldIndex, ContentManagerEvent.ContentOperation.undefined); for (ContentManagerListener listener : myDispatcher.getListeners()) { listener.contentRemoveQuery(event); if (event.isConsumed()) { return false; } } return true; } @NotNull @Override public ActionCallback requestFocus(final Content content, final boolean forced) { final Content toSelect = content == null ? getSelectedContent() : content; if (toSelect == null) return ActionCallback.REJECTED; assert myContents.contains(toSelect); JComponent preferredFocusableComponent = toSelect.getPreferredFocusableComponent(); return preferredFocusableComponent != null ? getFocusManager().requestFocusInProject(preferredFocusableComponent, myProject) : ActionCallback.REJECTED; } private IdeFocusManager getFocusManager() { return IdeFocusManager.getInstance(myProject); } @Override public void addDataProvider(@NotNull final DataProvider provider) { dataProviders.add(provider); } @Override public void propertyChange(@NotNull PropertyChangeEvent event) { if (Content.PROP_COMPONENT.equals(event.getPropertyName())) { myContentWithChangedComponent.add((Content)event.getSource()); } } @Override @NotNull public ContentFactory getFactory() { return ServiceManager.getService(ContentFactory.class); } @Override public void beforeTreeDispose() { if (myDisposed) return; myUI.beforeDispose(); } @Override public void dispose() { if (myDisposed) return; myDisposed = true; myContents.clear(); mySelection.clear(); myContentWithChangedComponent.clear(); myUI = null; myDispatcher.getListeners().clear(); dataProviders.clear(); myComponent = null; } @Override public boolean isDisposed() { return myDisposed; } @Override public boolean isSingleSelection() { return myUI.isSingleSelection(); } }
/* * Copyright 2005 Ryan Bloom * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.rkbloom.logdriver; import org.apache.log4j.Logger; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLWarning; import java.sql.Statement; /** * LogStatement is a wrapper class around the JDBC Statement. It will log the * SQL statements being executed, then forward the calls to the embedded * JDBC Statement. * @version $Rev$ */ public class LogStatement implements Statement { private Statement embedded; private Connection conn; private static Logger log = Logger.getLogger(LogStatement.class); LogStatement(Statement stmt, Connection c) { embedded = stmt; conn = c; } /** * {@inheritDoc} */ public int getFetchDirection() throws SQLException { return embedded.getFetchDirection(); } /** * {@inheritDoc} */ public int getFetchSize() throws SQLException { return embedded.getFetchSize(); } /** * {@inheritDoc} */ public int getMaxFieldSize() throws SQLException { return embedded.getMaxFieldSize(); } /** * {@inheritDoc} */ public int getMaxRows() throws SQLException { return embedded.getMaxRows(); } /** * {@inheritDoc} */ public int getQueryTimeout() throws SQLException { return embedded.getQueryTimeout(); } /** * {@inheritDoc} */ public int getResultSetConcurrency() throws SQLException { return embedded.getResultSetConcurrency(); } /** * {@inheritDoc} */ public int getResultSetHoldability() throws SQLException { return embedded.getResultSetHoldability(); } /** * {@inheritDoc} */ public int getResultSetType() throws SQLException { return embedded.getResultSetType(); } /** * {@inheritDoc} */ public int getUpdateCount() throws SQLException { return embedded.getUpdateCount(); } /** * {@inheritDoc} */ public void cancel() throws SQLException { embedded.cancel(); } /** * {@inheritDoc} */ public void clearBatch() throws SQLException { embedded.clearBatch(); } /** * {@inheritDoc} */ public void clearWarnings() throws SQLException { embedded.clearWarnings(); } /** * {@inheritDoc} */ public void close() throws SQLException { embedded.close(); } /** * {@inheritDoc} */ public boolean getMoreResults() throws SQLException { return embedded.getMoreResults(); } /** * {@inheritDoc} */ public int[] executeBatch() throws SQLException { log.debug("Executing the entire batch"); return embedded.executeBatch(); } /** * {@inheritDoc} */ public void setFetchDirection(int direction) throws SQLException { embedded.setFetchDirection(direction); } /** * {@inheritDoc} */ public void setFetchSize(int rows) throws SQLException { embedded.setFetchSize(rows); } /** * {@inheritDoc} */ public void setMaxFieldSize(int max) throws SQLException { embedded.setMaxFieldSize(max); } /** * {@inheritDoc} */ public void setMaxRows(int max) throws SQLException { embedded.setMaxRows(max); } /** * {@inheritDoc} */ public void setQueryTimeout(int seconds) throws SQLException { embedded.setQueryTimeout(seconds); } /** * {@inheritDoc} */ public boolean getMoreResults(int current) throws SQLException { return embedded.getMoreResults(current); } /** * {@inheritDoc} */ public void setEscapeProcessing(boolean enable) throws SQLException { embedded.setEscapeProcessing(enable); } /** * {@inheritDoc} */ public int executeUpdate(String sql) throws SQLException { log.debug("Executing Update: " + sql); return embedded.executeUpdate(sql); } /** * {@inheritDoc} */ public void addBatch(String sql) throws SQLException { log.debug("Adding '" + sql + "' to the batch"); embedded.addBatch(sql); } /** * {@inheritDoc} */ public void setCursorName(String name) throws SQLException { embedded.setCursorName(name); } /** * {@inheritDoc} */ public boolean execute(String sql) throws SQLException { log.debug("Executing: " + sql); return embedded.execute(sql); } /** * {@inheritDoc} */ public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { log.debug("Executing Update: " + sql); return embedded.executeUpdate(sql, autoGeneratedKeys); } /** * {@inheritDoc} */ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { log.debug("Executing: " + sql); return embedded.execute(sql, autoGeneratedKeys); } /** * {@inheritDoc} */ public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { log.debug("Executing Update: " + sql); return embedded.executeUpdate(sql, columnIndexes); } /** * {@inheritDoc} */ public boolean execute(String sql, int[] columnIndexes) throws SQLException { log.debug("Executing: " + sql); return embedded.execute(sql, columnIndexes); } /** * {@inheritDoc} */ public Connection getConnection() throws SQLException { return conn; } /** * {@inheritDoc} */ public ResultSet getGeneratedKeys() throws SQLException { return embedded.getGeneratedKeys(); } /** * {@inheritDoc} */ public ResultSet getResultSet() throws SQLException { return embedded.getResultSet(); } /** * {@inheritDoc} */ public SQLWarning getWarnings() throws SQLException { return embedded.getWarnings(); } /** * {@inheritDoc} */ public int executeUpdate(String sql, String[] columnNames) throws SQLException { log.debug("Executing Update: " + sql); return embedded.executeUpdate(sql, columnNames); } /** * {@inheritDoc} */ public boolean execute(String sql, String[] columnNames) throws SQLException { log.debug("Executing: " + sql); return embedded.execute(sql, columnNames); } /** * {@inheritDoc} */ public ResultSet executeQuery(String sql) throws SQLException { log.debug("Executing Query: " + sql); return embedded.executeQuery(sql); } /** * {@inheritDoc} */ public <T> T unwrap(Class<T> iface) throws SQLException { return embedded.unwrap(iface); } /** * {@inheritDoc} */ public boolean isWrapperFor(Class<?> iface) throws SQLException { return embedded.isWrapperFor(iface); } /** * {@inheritDoc} */ public boolean isClosed() throws SQLException { return embedded.isClosed(); } /** * {@inheritDoc} */ public void setPoolable(boolean poolable) throws SQLException { embedded.setPoolable(poolable); } /** * {@inheritDoc} */ public boolean isPoolable() throws SQLException { return embedded.isPoolable(); } /** * {@inheritDoc} */ public void closeOnCompletion() throws SQLException { embedded.closeOnCompletion(); } /** * {@inheritDoc} */ public boolean isCloseOnCompletion() throws SQLException { return embedded.isCloseOnCompletion(); } }
package org.andengine.entity.sprite; import java.util.Arrays; import org.andengine.util.math.MathUtils; import org.andengine.util.modifier.IModifier.DeepCopyNotSupportedException; import org.andengine.util.time.TimeConstants; /** * (c) Zynga 2012 * * @author Nicolas Gramlich <ngramlich@zynga.com> * @since 11:43:01 - 04.05.2012 */ public class AnimationData implements IAnimationData { // =========================================================== // Constants // =========================================================== // =========================================================== // Fields // =========================================================== private int mFrameCount; private int[] mFrames; private long[] mFrameDurations; private int mFirstFrameIndex; private int mLoopCount; private long[] mFrameEndsInNanoseconds; private long mAnimationDuration; // =========================================================== // Constructors // =========================================================== @Deprecated public AnimationData() { } public AnimationData(final long pFrameDurationEach, final int pFrameCount) { this.set(pFrameDurationEach, pFrameCount); } public AnimationData(final long pFrameDurationEach, final int pFrameCount, final boolean pLoop) { this.set(pFrameDurationEach, pFrameCount, pLoop); } public AnimationData(final long pFrameDurationEach, final int pFrameCount, final int pLoopCount) { this.set(pFrameDurationEach, pFrameCount, pLoopCount); } public AnimationData(final long[] pFrameDurations) { this.set(pFrameDurations); } public AnimationData(final long[] pFrameDurations, final boolean pLoop) { this.set(pFrameDurations, pLoop); } public AnimationData(final long[] pFrameDurations, final int pLoopCount) { this.set(pFrameDurations, pLoopCount); } public AnimationData(final long[] pFrameDurations, final int pFirstFrameIndex, final int pLastFrameIndex, final boolean pLoop) { this.set(pFrameDurations, pFirstFrameIndex, pLastFrameIndex, pLoop); } /** * Animate specifics frames. * * @param pFrameDurations * must have the same length as pFrames. * @param pFrames * indices of the frames to animate. * @param pLoopCount */ public AnimationData(final long[] pFrameDurations, final int[] pFrames, final int pLoopCount) { this.set(pFrameDurations, pFrames, pLoopCount); } /** * @param pFrameDurations * must have the same length as pFirstFrameIndex to * pLastFrameIndex. * @param pFirstFrameIndex * @param pLastFrameIndex * @param pLoopCount */ public AnimationData(final long[] pFrameDurations, final int pFirstFrameIndex, final int pLastFrameIndex, final int pLoopCount) { this.set(pFrameDurations, pFirstFrameIndex, pLastFrameIndex, pLoopCount); } public AnimationData(final IAnimationData pAnimationData) { this.set(pAnimationData); } @Override public IAnimationData deepCopy() throws DeepCopyNotSupportedException { return new AnimationData(this); } // =========================================================== // Getter & Setter // =========================================================== // =========================================================== // Methods for/from SuperClass/Interfaces // =========================================================== @Override public int[] getFrames() { return this.mFrames; } @Override public long[] getFrameDurations() { return this.mFrameDurations; } @Override public int getLoopCount() { return this.mLoopCount; } @Override public int getFrameCount() { return this.mFrameCount; } @Override public int getFirstFrameIndex() { return this.mFirstFrameIndex; } @Override public long getAnimationDuration() { return this.mAnimationDuration; } /** * @param pAnimationProgress * in milliseconds. * @return */ @Override public int calculateCurrentFrameIndex(final long pAnimationProgress) { final long[] frameEnds = this.mFrameEndsInNanoseconds; final int frameCount = this.mFrameCount; for (int i = 0; i < frameCount; i++) { if (frameEnds[i] > pAnimationProgress) { return i; } } return frameCount - 1; } @Override public void set(final long pFrameDurationEach, final int pFrameCount) { this.set(pFrameDurationEach, pFrameCount, true); } @Override public void set(final long pFrameDurationEach, final int pFrameCount, final boolean pLoop) { this.set(pFrameDurationEach, pFrameCount, (pLoop) ? IAnimationData.LOOP_CONTINUOUS : 0); } @Override public void set(final long pFrameDurationEach, final int pFrameCount, final int pLoopCount) { this.set(AnimationData.fillFrameDurations(pFrameDurationEach, pFrameCount), pLoopCount); } @Override public void set(final long[] pFrameDurations) { this.set(pFrameDurations, true); } @Override public void set(final long[] pFrameDurations, final boolean pLoop) { this.set(pFrameDurations, (pLoop) ? IAnimationData.LOOP_CONTINUOUS : 0); } @Override public void set(final long[] pFrameDurations, final int pLoopCount) { this.set(pFrameDurations, 0, pFrameDurations.length - 1, pLoopCount); } @Override public void set(final long[] pFrameDurations, final int pFirstFrameIndex, final int pLastFrameIndex) { this.set(pFrameDurations, pFirstFrameIndex, pLastFrameIndex, true); } @Override public void set(final long[] pFrameDurations, final int pFirstFrameIndex, final int pLastFrameIndex, final boolean pLoop) { this.set(pFrameDurations, pFirstFrameIndex, pLastFrameIndex, (pLoop) ? IAnimationData.LOOP_CONTINUOUS : 0); } /** * @param pFrameDurations * must have the same length as pFirstFrameIndex to * pLastFrameIndex. * @param pFirstFrameIndex * @param pLastFrameIndex * @param pLoopCount */ @Override public void set(final long[] pFrameDurations, final int pFirstFrameIndex, final int pLastFrameIndex, final int pLoopCount) { this.set(pFrameDurations, (pLastFrameIndex - pFirstFrameIndex) + 1, null, pFirstFrameIndex, pLoopCount); if ((pFirstFrameIndex + 1) > pLastFrameIndex) { throw new IllegalArgumentException("An animation needs at least two tiles to animate between."); } } /** * Animate specifics frames. * * @param pFrameDurations * must have the same length as pFrames. * @param pFrames * indices of the frames to animate. */ @Override public void set(final long[] pFrameDurations, final int[] pFrames) { this.set(pFrameDurations, pFrames, true); } /** * Animate specifics frames. * * @param pFrameDurations * must have the same length as pFrames. * @param pFrames * indices of the frames to animate. * @param pLoop */ @Override public void set(final long[] pFrameDurations, final int[] pFrames, final boolean pLoop) { this.set(pFrameDurations, pFrames, (pLoop) ? IAnimationData.LOOP_CONTINUOUS : 0); } /** * Animate specifics frames. * * @param pFrameDurations * must have the same length as pFrames. * @param pFrames * indices of the frames to animate. * @param pLoopCount */ @Override public void set(final long[] pFrameDurations, final int[] pFrames, final int pLoopCount) { this.set(pFrameDurations, pFrames.length, pFrames, 0, pLoopCount); } @Override public void set(final IAnimationData pAnimationData) { this.set(pAnimationData.getFrameDurations(), pAnimationData.getFrameCount(), pAnimationData.getFrames(), pAnimationData.getFirstFrameIndex(), pAnimationData.getLoopCount()); } private void set(final long[] pFrameDurations, final int pFrameCount, final int[] pFrames, final int pFirstFrameIndex, final int pLoopCount) { if (pFrameDurations.length != pFrameCount) { throw new IllegalArgumentException("pFrameDurations does not equal pFrameCount!"); } this.mFrameDurations = pFrameDurations; this.mFrameCount = pFrameCount; this.mFrames = pFrames; this.mFirstFrameIndex = pFirstFrameIndex; this.mLoopCount = pLoopCount; if ((this.mFrameEndsInNanoseconds == null) || (this.mFrameCount > this.mFrameEndsInNanoseconds.length)) { this.mFrameEndsInNanoseconds = new long[this.mFrameCount]; } final long[] frameEndsInNanoseconds = this.mFrameEndsInNanoseconds; MathUtils.arraySumInto(this.mFrameDurations, frameEndsInNanoseconds, TimeConstants.NANOSECONDS_PER_MILLISECOND); final long lastFrameEnd = frameEndsInNanoseconds[this.mFrameCount - 1]; this.mAnimationDuration = lastFrameEnd; } // =========================================================== // Methods // =========================================================== private static long[] fillFrameDurations(final long pFrameDurationEach, final int pFrameCount) { final long[] frameDurations = new long[pFrameCount]; Arrays.fill(frameDurations, pFrameDurationEach); return frameDurations; } // =========================================================== // Inner and Anonymous Classes // =========================================================== }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/tasks/v2beta3/queue.proto package com.google.cloud.tasks.v2beta3; /** * * * <pre> * Statistics for a queue. * </pre> * * Protobuf type {@code google.cloud.tasks.v2beta3.QueueStats} */ public final class QueueStats extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.tasks.v2beta3.QueueStats) QueueStatsOrBuilder { private static final long serialVersionUID = 0L; // Use QueueStats.newBuilder() to construct. private QueueStats(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private QueueStats() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new QueueStats(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private QueueStats( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { tasksCount_ = input.readInt64(); break; } case 18: { com.google.protobuf.Timestamp.Builder subBuilder = null; if (oldestEstimatedArrivalTime_ != null) { subBuilder = oldestEstimatedArrivalTime_.toBuilder(); } oldestEstimatedArrivalTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(oldestEstimatedArrivalTime_); oldestEstimatedArrivalTime_ = subBuilder.buildPartial(); } break; } case 24: { executedLastMinuteCount_ = input.readInt64(); break; } case 32: { concurrentDispatchesCount_ = input.readInt64(); break; } case 41: { effectiveExecutionRate_ = input.readDouble(); break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.tasks.v2beta3.QueueProto .internal_static_google_cloud_tasks_v2beta3_QueueStats_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.tasks.v2beta3.QueueProto .internal_static_google_cloud_tasks_v2beta3_QueueStats_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.tasks.v2beta3.QueueStats.class, com.google.cloud.tasks.v2beta3.QueueStats.Builder.class); } public static final int TASKS_COUNT_FIELD_NUMBER = 1; private long tasksCount_; /** * * * <pre> * Output only. An estimation of the number of tasks in the queue, that is, the tasks in * the queue that haven't been executed, the tasks in the queue which the * queue has dispatched but has not yet received a reply for, and the failed * tasks that the queue is retrying. * </pre> * * <code>int64 tasks_count = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The tasksCount. */ @java.lang.Override public long getTasksCount() { return tasksCount_; } public static final int OLDEST_ESTIMATED_ARRIVAL_TIME_FIELD_NUMBER = 2; private com.google.protobuf.Timestamp oldestEstimatedArrivalTime_; /** * * * <pre> * Output only. An estimation of the nearest time in the future where a task in the queue * is scheduled to be executed. * </pre> * * <code> * .google.protobuf.Timestamp oldest_estimated_arrival_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the oldestEstimatedArrivalTime field is set. */ @java.lang.Override public boolean hasOldestEstimatedArrivalTime() { return oldestEstimatedArrivalTime_ != null; } /** * * * <pre> * Output only. An estimation of the nearest time in the future where a task in the queue * is scheduled to be executed. * </pre> * * <code> * .google.protobuf.Timestamp oldest_estimated_arrival_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The oldestEstimatedArrivalTime. */ @java.lang.Override public com.google.protobuf.Timestamp getOldestEstimatedArrivalTime() { return oldestEstimatedArrivalTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : oldestEstimatedArrivalTime_; } /** * * * <pre> * Output only. An estimation of the nearest time in the future where a task in the queue * is scheduled to be executed. * </pre> * * <code> * .google.protobuf.Timestamp oldest_estimated_arrival_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getOldestEstimatedArrivalTimeOrBuilder() { return getOldestEstimatedArrivalTime(); } public static final int EXECUTED_LAST_MINUTE_COUNT_FIELD_NUMBER = 3; private long executedLastMinuteCount_; /** * * * <pre> * Output only. The number of tasks that the queue has dispatched and received a reply for * during the last minute. This variable counts both successful and * non-successful executions. * </pre> * * <code>int64 executed_last_minute_count = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The executedLastMinuteCount. */ @java.lang.Override public long getExecutedLastMinuteCount() { return executedLastMinuteCount_; } public static final int CONCURRENT_DISPATCHES_COUNT_FIELD_NUMBER = 4; private long concurrentDispatchesCount_; /** * * * <pre> * Output only. The number of requests that the queue has dispatched but has not received * a reply for yet. * </pre> * * <code>int64 concurrent_dispatches_count = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The concurrentDispatchesCount. */ @java.lang.Override public long getConcurrentDispatchesCount() { return concurrentDispatchesCount_; } public static final int EFFECTIVE_EXECUTION_RATE_FIELD_NUMBER = 5; private double effectiveExecutionRate_; /** * * * <pre> * Output only. The current maximum number of tasks per second executed by the queue. * The maximum value of this variable is controlled by the RateLimits of the * Queue. However, this value could be less to avoid overloading the endpoints * tasks in the queue are targeting. * </pre> * * <code>double effective_execution_rate = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The effectiveExecutionRate. */ @java.lang.Override public double getEffectiveExecutionRate() { return effectiveExecutionRate_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (tasksCount_ != 0L) { output.writeInt64(1, tasksCount_); } if (oldestEstimatedArrivalTime_ != null) { output.writeMessage(2, getOldestEstimatedArrivalTime()); } if (executedLastMinuteCount_ != 0L) { output.writeInt64(3, executedLastMinuteCount_); } if (concurrentDispatchesCount_ != 0L) { output.writeInt64(4, concurrentDispatchesCount_); } if (effectiveExecutionRate_ != 0D) { output.writeDouble(5, effectiveExecutionRate_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (tasksCount_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(1, tasksCount_); } if (oldestEstimatedArrivalTime_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, getOldestEstimatedArrivalTime()); } if (executedLastMinuteCount_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, executedLastMinuteCount_); } if (concurrentDispatchesCount_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(4, concurrentDispatchesCount_); } if (effectiveExecutionRate_ != 0D) { size += com.google.protobuf.CodedOutputStream.computeDoubleSize(5, effectiveExecutionRate_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.tasks.v2beta3.QueueStats)) { return super.equals(obj); } com.google.cloud.tasks.v2beta3.QueueStats other = (com.google.cloud.tasks.v2beta3.QueueStats) obj; if (getTasksCount() != other.getTasksCount()) return false; if (hasOldestEstimatedArrivalTime() != other.hasOldestEstimatedArrivalTime()) return false; if (hasOldestEstimatedArrivalTime()) { if (!getOldestEstimatedArrivalTime().equals(other.getOldestEstimatedArrivalTime())) return false; } if (getExecutedLastMinuteCount() != other.getExecutedLastMinuteCount()) return false; if (getConcurrentDispatchesCount() != other.getConcurrentDispatchesCount()) return false; if (java.lang.Double.doubleToLongBits(getEffectiveExecutionRate()) != java.lang.Double.doubleToLongBits(other.getEffectiveExecutionRate())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + TASKS_COUNT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getTasksCount()); if (hasOldestEstimatedArrivalTime()) { hash = (37 * hash) + OLDEST_ESTIMATED_ARRIVAL_TIME_FIELD_NUMBER; hash = (53 * hash) + getOldestEstimatedArrivalTime().hashCode(); } hash = (37 * hash) + EXECUTED_LAST_MINUTE_COUNT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getExecutedLastMinuteCount()); hash = (37 * hash) + CONCURRENT_DISPATCHES_COUNT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getConcurrentDispatchesCount()); hash = (37 * hash) + EFFECTIVE_EXECUTION_RATE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( java.lang.Double.doubleToLongBits(getEffectiveExecutionRate())); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.tasks.v2beta3.QueueStats parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tasks.v2beta3.QueueStats parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tasks.v2beta3.QueueStats parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tasks.v2beta3.QueueStats parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tasks.v2beta3.QueueStats parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tasks.v2beta3.QueueStats parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tasks.v2beta3.QueueStats parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.tasks.v2beta3.QueueStats parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.tasks.v2beta3.QueueStats parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.tasks.v2beta3.QueueStats parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.tasks.v2beta3.QueueStats parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.tasks.v2beta3.QueueStats parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.tasks.v2beta3.QueueStats prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Statistics for a queue. * </pre> * * Protobuf type {@code google.cloud.tasks.v2beta3.QueueStats} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.tasks.v2beta3.QueueStats) com.google.cloud.tasks.v2beta3.QueueStatsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.tasks.v2beta3.QueueProto .internal_static_google_cloud_tasks_v2beta3_QueueStats_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.tasks.v2beta3.QueueProto .internal_static_google_cloud_tasks_v2beta3_QueueStats_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.tasks.v2beta3.QueueStats.class, com.google.cloud.tasks.v2beta3.QueueStats.Builder.class); } // Construct using com.google.cloud.tasks.v2beta3.QueueStats.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); tasksCount_ = 0L; if (oldestEstimatedArrivalTimeBuilder_ == null) { oldestEstimatedArrivalTime_ = null; } else { oldestEstimatedArrivalTime_ = null; oldestEstimatedArrivalTimeBuilder_ = null; } executedLastMinuteCount_ = 0L; concurrentDispatchesCount_ = 0L; effectiveExecutionRate_ = 0D; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.tasks.v2beta3.QueueProto .internal_static_google_cloud_tasks_v2beta3_QueueStats_descriptor; } @java.lang.Override public com.google.cloud.tasks.v2beta3.QueueStats getDefaultInstanceForType() { return com.google.cloud.tasks.v2beta3.QueueStats.getDefaultInstance(); } @java.lang.Override public com.google.cloud.tasks.v2beta3.QueueStats build() { com.google.cloud.tasks.v2beta3.QueueStats result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.tasks.v2beta3.QueueStats buildPartial() { com.google.cloud.tasks.v2beta3.QueueStats result = new com.google.cloud.tasks.v2beta3.QueueStats(this); result.tasksCount_ = tasksCount_; if (oldestEstimatedArrivalTimeBuilder_ == null) { result.oldestEstimatedArrivalTime_ = oldestEstimatedArrivalTime_; } else { result.oldestEstimatedArrivalTime_ = oldestEstimatedArrivalTimeBuilder_.build(); } result.executedLastMinuteCount_ = executedLastMinuteCount_; result.concurrentDispatchesCount_ = concurrentDispatchesCount_; result.effectiveExecutionRate_ = effectiveExecutionRate_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.tasks.v2beta3.QueueStats) { return mergeFrom((com.google.cloud.tasks.v2beta3.QueueStats) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.tasks.v2beta3.QueueStats other) { if (other == com.google.cloud.tasks.v2beta3.QueueStats.getDefaultInstance()) return this; if (other.getTasksCount() != 0L) { setTasksCount(other.getTasksCount()); } if (other.hasOldestEstimatedArrivalTime()) { mergeOldestEstimatedArrivalTime(other.getOldestEstimatedArrivalTime()); } if (other.getExecutedLastMinuteCount() != 0L) { setExecutedLastMinuteCount(other.getExecutedLastMinuteCount()); } if (other.getConcurrentDispatchesCount() != 0L) { setConcurrentDispatchesCount(other.getConcurrentDispatchesCount()); } if (other.getEffectiveExecutionRate() != 0D) { setEffectiveExecutionRate(other.getEffectiveExecutionRate()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.tasks.v2beta3.QueueStats parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.tasks.v2beta3.QueueStats) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private long tasksCount_; /** * * * <pre> * Output only. An estimation of the number of tasks in the queue, that is, the tasks in * the queue that haven't been executed, the tasks in the queue which the * queue has dispatched but has not yet received a reply for, and the failed * tasks that the queue is retrying. * </pre> * * <code>int64 tasks_count = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The tasksCount. */ @java.lang.Override public long getTasksCount() { return tasksCount_; } /** * * * <pre> * Output only. An estimation of the number of tasks in the queue, that is, the tasks in * the queue that haven't been executed, the tasks in the queue which the * queue has dispatched but has not yet received a reply for, and the failed * tasks that the queue is retrying. * </pre> * * <code>int64 tasks_count = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The tasksCount to set. * @return This builder for chaining. */ public Builder setTasksCount(long value) { tasksCount_ = value; onChanged(); return this; } /** * * * <pre> * Output only. An estimation of the number of tasks in the queue, that is, the tasks in * the queue that haven't been executed, the tasks in the queue which the * queue has dispatched but has not yet received a reply for, and the failed * tasks that the queue is retrying. * </pre> * * <code>int64 tasks_count = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearTasksCount() { tasksCount_ = 0L; onChanged(); return this; } private com.google.protobuf.Timestamp oldestEstimatedArrivalTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> oldestEstimatedArrivalTimeBuilder_; /** * * * <pre> * Output only. An estimation of the nearest time in the future where a task in the queue * is scheduled to be executed. * </pre> * * <code> * .google.protobuf.Timestamp oldest_estimated_arrival_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the oldestEstimatedArrivalTime field is set. */ public boolean hasOldestEstimatedArrivalTime() { return oldestEstimatedArrivalTimeBuilder_ != null || oldestEstimatedArrivalTime_ != null; } /** * * * <pre> * Output only. An estimation of the nearest time in the future where a task in the queue * is scheduled to be executed. * </pre> * * <code> * .google.protobuf.Timestamp oldest_estimated_arrival_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The oldestEstimatedArrivalTime. */ public com.google.protobuf.Timestamp getOldestEstimatedArrivalTime() { if (oldestEstimatedArrivalTimeBuilder_ == null) { return oldestEstimatedArrivalTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : oldestEstimatedArrivalTime_; } else { return oldestEstimatedArrivalTimeBuilder_.getMessage(); } } /** * * * <pre> * Output only. An estimation of the nearest time in the future where a task in the queue * is scheduled to be executed. * </pre> * * <code> * .google.protobuf.Timestamp oldest_estimated_arrival_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setOldestEstimatedArrivalTime(com.google.protobuf.Timestamp value) { if (oldestEstimatedArrivalTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } oldestEstimatedArrivalTime_ = value; onChanged(); } else { oldestEstimatedArrivalTimeBuilder_.setMessage(value); } return this; } /** * * * <pre> * Output only. An estimation of the nearest time in the future where a task in the queue * is scheduled to be executed. * </pre> * * <code> * .google.protobuf.Timestamp oldest_estimated_arrival_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setOldestEstimatedArrivalTime( com.google.protobuf.Timestamp.Builder builderForValue) { if (oldestEstimatedArrivalTimeBuilder_ == null) { oldestEstimatedArrivalTime_ = builderForValue.build(); onChanged(); } else { oldestEstimatedArrivalTimeBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Output only. An estimation of the nearest time in the future where a task in the queue * is scheduled to be executed. * </pre> * * <code> * .google.protobuf.Timestamp oldest_estimated_arrival_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder mergeOldestEstimatedArrivalTime(com.google.protobuf.Timestamp value) { if (oldestEstimatedArrivalTimeBuilder_ == null) { if (oldestEstimatedArrivalTime_ != null) { oldestEstimatedArrivalTime_ = com.google.protobuf.Timestamp.newBuilder(oldestEstimatedArrivalTime_) .mergeFrom(value) .buildPartial(); } else { oldestEstimatedArrivalTime_ = value; } onChanged(); } else { oldestEstimatedArrivalTimeBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Output only. An estimation of the nearest time in the future where a task in the queue * is scheduled to be executed. * </pre> * * <code> * .google.protobuf.Timestamp oldest_estimated_arrival_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearOldestEstimatedArrivalTime() { if (oldestEstimatedArrivalTimeBuilder_ == null) { oldestEstimatedArrivalTime_ = null; onChanged(); } else { oldestEstimatedArrivalTime_ = null; oldestEstimatedArrivalTimeBuilder_ = null; } return this; } /** * * * <pre> * Output only. An estimation of the nearest time in the future where a task in the queue * is scheduled to be executed. * </pre> * * <code> * .google.protobuf.Timestamp oldest_estimated_arrival_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.protobuf.Timestamp.Builder getOldestEstimatedArrivalTimeBuilder() { onChanged(); return getOldestEstimatedArrivalTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. An estimation of the nearest time in the future where a task in the queue * is scheduled to be executed. * </pre> * * <code> * .google.protobuf.Timestamp oldest_estimated_arrival_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.protobuf.TimestampOrBuilder getOldestEstimatedArrivalTimeOrBuilder() { if (oldestEstimatedArrivalTimeBuilder_ != null) { return oldestEstimatedArrivalTimeBuilder_.getMessageOrBuilder(); } else { return oldestEstimatedArrivalTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : oldestEstimatedArrivalTime_; } } /** * * * <pre> * Output only. An estimation of the nearest time in the future where a task in the queue * is scheduled to be executed. * </pre> * * <code> * .google.protobuf.Timestamp oldest_estimated_arrival_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getOldestEstimatedArrivalTimeFieldBuilder() { if (oldestEstimatedArrivalTimeBuilder_ == null) { oldestEstimatedArrivalTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getOldestEstimatedArrivalTime(), getParentForChildren(), isClean()); oldestEstimatedArrivalTime_ = null; } return oldestEstimatedArrivalTimeBuilder_; } private long executedLastMinuteCount_; /** * * * <pre> * Output only. The number of tasks that the queue has dispatched and received a reply for * during the last minute. This variable counts both successful and * non-successful executions. * </pre> * * <code>int64 executed_last_minute_count = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The executedLastMinuteCount. */ @java.lang.Override public long getExecutedLastMinuteCount() { return executedLastMinuteCount_; } /** * * * <pre> * Output only. The number of tasks that the queue has dispatched and received a reply for * during the last minute. This variable counts both successful and * non-successful executions. * </pre> * * <code>int64 executed_last_minute_count = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The executedLastMinuteCount to set. * @return This builder for chaining. */ public Builder setExecutedLastMinuteCount(long value) { executedLastMinuteCount_ = value; onChanged(); return this; } /** * * * <pre> * Output only. The number of tasks that the queue has dispatched and received a reply for * during the last minute. This variable counts both successful and * non-successful executions. * </pre> * * <code>int64 executed_last_minute_count = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return This builder for chaining. */ public Builder clearExecutedLastMinuteCount() { executedLastMinuteCount_ = 0L; onChanged(); return this; } private long concurrentDispatchesCount_; /** * * * <pre> * Output only. The number of requests that the queue has dispatched but has not received * a reply for yet. * </pre> * * <code>int64 concurrent_dispatches_count = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The concurrentDispatchesCount. */ @java.lang.Override public long getConcurrentDispatchesCount() { return concurrentDispatchesCount_; } /** * * * <pre> * Output only. The number of requests that the queue has dispatched but has not received * a reply for yet. * </pre> * * <code>int64 concurrent_dispatches_count = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The concurrentDispatchesCount to set. * @return This builder for chaining. */ public Builder setConcurrentDispatchesCount(long value) { concurrentDispatchesCount_ = value; onChanged(); return this; } /** * * * <pre> * Output only. The number of requests that the queue has dispatched but has not received * a reply for yet. * </pre> * * <code>int64 concurrent_dispatches_count = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return This builder for chaining. */ public Builder clearConcurrentDispatchesCount() { concurrentDispatchesCount_ = 0L; onChanged(); return this; } private double effectiveExecutionRate_; /** * * * <pre> * Output only. The current maximum number of tasks per second executed by the queue. * The maximum value of this variable is controlled by the RateLimits of the * Queue. However, this value could be less to avoid overloading the endpoints * tasks in the queue are targeting. * </pre> * * <code>double effective_execution_rate = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The effectiveExecutionRate. */ @java.lang.Override public double getEffectiveExecutionRate() { return effectiveExecutionRate_; } /** * * * <pre> * Output only. The current maximum number of tasks per second executed by the queue. * The maximum value of this variable is controlled by the RateLimits of the * Queue. However, this value could be less to avoid overloading the endpoints * tasks in the queue are targeting. * </pre> * * <code>double effective_execution_rate = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The effectiveExecutionRate to set. * @return This builder for chaining. */ public Builder setEffectiveExecutionRate(double value) { effectiveExecutionRate_ = value; onChanged(); return this; } /** * * * <pre> * Output only. The current maximum number of tasks per second executed by the queue. * The maximum value of this variable is controlled by the RateLimits of the * Queue. However, this value could be less to avoid overloading the endpoints * tasks in the queue are targeting. * </pre> * * <code>double effective_execution_rate = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return This builder for chaining. */ public Builder clearEffectiveExecutionRate() { effectiveExecutionRate_ = 0D; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.tasks.v2beta3.QueueStats) } // @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.QueueStats) private static final com.google.cloud.tasks.v2beta3.QueueStats DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.tasks.v2beta3.QueueStats(); } public static com.google.cloud.tasks.v2beta3.QueueStats getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<QueueStats> PARSER = new com.google.protobuf.AbstractParser<QueueStats>() { @java.lang.Override public QueueStats parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new QueueStats(input, extensionRegistry); } }; public static com.google.protobuf.Parser<QueueStats> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<QueueStats> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.tasks.v2beta3.QueueStats getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.reef.runtime.yarn.driver; import com.google.protobuf.ByteString; import org.apache.commons.collections.ListUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.hadoop.fs.*; import org.apache.hadoop.service.Service; import org.apache.hadoop.yarn.api.records.*; import org.apache.hadoop.yarn.client.api.AMRMClient; import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync; import org.apache.hadoop.yarn.client.api.async.NMClientAsync; import org.apache.hadoop.yarn.client.api.async.impl.NMClientAsyncImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.reef.annotations.audience.DriverSide; import org.apache.reef.annotations.audience.Private; import org.apache.reef.driver.ProgressProvider; import org.apache.reef.proto.ReefServiceProtos; import org.apache.reef.runtime.common.driver.DriverStatusManager; import org.apache.reef.runtime.common.driver.evaluator.pojos.State; import org.apache.reef.runtime.common.driver.resourcemanager.NodeDescriptorEventImpl; import org.apache.reef.runtime.common.driver.resourcemanager.ResourceEventImpl; import org.apache.reef.runtime.common.driver.resourcemanager.ResourceStatusEventImpl; import org.apache.reef.runtime.common.driver.resourcemanager.RuntimeStatusEventImpl; import org.apache.reef.runtime.common.files.REEFFileNames; import org.apache.reef.runtime.yarn.client.unmanaged.YarnProxyUser; import org.apache.reef.runtime.yarn.driver.parameters.JobSubmissionDirectory; import org.apache.reef.runtime.yarn.driver.parameters.YarnHeartbeatPeriod; import org.apache.reef.tang.InjectionFuture; import org.apache.reef.tang.annotations.Parameter; import org.apache.reef.util.Optional; import org.apache.reef.wake.remote.address.LocalAddressProvider; import org.apache.reef.wake.remote.impl.ObjectSerializableCodec; import javax.inject.Inject; import java.io.*; import java.nio.ByteBuffer; import java.security.PrivilegedExceptionAction; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.logging.Level; import java.util.logging.Logger; @Private @DriverSide final class YarnContainerManager implements AMRMClientAsync.CallbackHandler, NMClientAsync.CallbackHandler { private static final Logger LOG = Logger.getLogger(YarnContainerManager.class.getName()); private static final String RUNTIME_NAME = "YARN"; /** Default port number to provide in the Application Master registration. */ private static final int AM_REGISTRATION_PORT = -1; private final Queue<AMRMClient.ContainerRequest> requestsBeforeSentToRM = new ConcurrentLinkedQueue<>(); private final Queue<AMRMClient.ContainerRequest> requestsAfterSentToRM = new ConcurrentLinkedQueue<>(); private final Map<String, String> nodeIdToRackName = new ConcurrentHashMap<>(); private final YarnConfiguration yarnConf; private final AMRMClientAsync<AMRMClient.ContainerRequest> resourceManager; private final YarnProxyUser yarnProxyUser; private final NMClientAsync nodeManager; private final REEFEventHandlers reefEventHandlers; private final Containers containers; private final ApplicationMasterRegistration registration; private final ContainerRequestCounter containerRequestCounter; private final DriverStatusManager driverStatusManager; private final String trackingUrl; private final String amRegistrationHost; private final String jobSubmissionDirectory; private final REEFFileNames reefFileNames; private final RackNameFormatter rackNameFormatter; private final InjectionFuture<ProgressProvider> progressProvider; @Inject private YarnContainerManager( @Parameter(YarnHeartbeatPeriod.class) final int yarnRMHeartbeatPeriod, @Parameter(JobSubmissionDirectory.class) final String jobSubmissionDirectory, final YarnConfiguration yarnConf, final YarnProxyUser yarnProxyUser, final REEFEventHandlers reefEventHandlers, final Containers containers, final ApplicationMasterRegistration registration, final ContainerRequestCounter containerRequestCounter, final DriverStatusManager driverStatusManager, final REEFFileNames reefFileNames, final TrackingURLProvider trackingURLProvider, final LocalAddressProvider addressProvider, final RackNameFormatter rackNameFormatter, final InjectionFuture<ProgressProvider> progressProvider) throws IOException { this.reefEventHandlers = reefEventHandlers; this.driverStatusManager = driverStatusManager; this.containers = containers; this.registration = registration; this.containerRequestCounter = containerRequestCounter; this.yarnConf = yarnConf; this.yarnProxyUser = yarnProxyUser; this.rackNameFormatter = rackNameFormatter; this.trackingUrl = trackingURLProvider.getTrackingUrl(); this.amRegistrationHost = addressProvider.getLocalAddress(); this.resourceManager = AMRMClientAsync.createAMRMClientAsync(yarnRMHeartbeatPeriod, this); this.nodeManager = new NMClientAsyncImpl(this); this.jobSubmissionDirectory = jobSubmissionDirectory; this.reefFileNames = reefFileNames; this.progressProvider = progressProvider; LOG.log(Level.FINEST, "Instantiated YarnContainerManager: {0} {1}", new Object[] {this.registration, this.yarnProxyUser}); } /** * RM Callback: RM reports some completed containers. Update status of each container in the list. * @param completedContainers list of completed containers. */ @Override public void onContainersCompleted(final List<ContainerStatus> completedContainers) { for (final ContainerStatus containerStatus : completedContainers) { this.onContainerStatus(containerStatus); } } /** * RM Callback: RM reports that some containers have been allocated. * @param allocatedContainers list of containers newly allocated by RM. */ @Override public void onContainersAllocated(final List<Container> allocatedContainers) { String id = null; // ID is used for logging only if (LOG.isLoggable(Level.FINE)) { id = String.format("%s:%d", Thread.currentThread().getName().replace(' ', '_'), System.currentTimeMillis()); LOG.log(Level.FINE, "TIME: Allocated Containers {0} {1} of {2}", new Object[] {id, allocatedContainers.size(), this.containerRequestCounter.get()}); } for (final Container container : allocatedContainers) { this.handleNewContainer(container); } LOG.log(Level.FINE, "TIME: Processed Containers {0}", id); } /** * RM Callback: RM requests application shutdown. */ @Override public void onShutdownRequest() { this.reefEventHandlers.onRuntimeStatus(RuntimeStatusEventImpl.newBuilder() .setName(RUNTIME_NAME).setState(State.DONE).build()); this.driverStatusManager.onError(new Exception("Shutdown requested by YARN.")); } /** * RM Callback: RM reports status change of some nodes. * @param nodeReports list of nodes with changed status. */ @Override public void onNodesUpdated(final List<NodeReport> nodeReports) { for (final NodeReport nodeReport : nodeReports) { this.nodeIdToRackName.put(nodeReport.getNodeId().toString(), nodeReport.getRackName()); this.onNodeReport(nodeReport); } } /** * RM Callback: Report application progress to RM. * Progress is a floating point number between 0 and 1. * @return a floating point number between 0 and 1. */ @Override public float getProgress() { try { return Math.max(Math.min(1, progressProvider.get().getProgress()), 0); } catch (final Exception e) { // An Exception must be caught and logged here because YARN swallows the Exception and fails the job. LOG.log(Level.WARNING, "Cannot get the application progress. Will return 0.", e); return 0; } } /** * RM Callback: RM reports an error. * @param throwable An exception thrown from RM. */ @Override public void onError(final Throwable throwable) { this.onRuntimeError(throwable); } /** * NM Callback: NM accepts the starting container request. * @param containerId ID of a new container being started. * @param stringByteBufferMap a Map between the auxiliary service names and their outputs. Not used. */ @Override public void onContainerStarted(final ContainerId containerId, final Map<String, ByteBuffer> stringByteBufferMap) { final Optional<Container> container = this.containers.getOptional(containerId.toString()); if (container.isPresent()) { this.nodeManager.getContainerStatusAsync(containerId, container.get().getNodeId()); } } /** * NM Callback: NM reports container status. * @param containerId ID of a container with the status being reported. * @param containerStatus YARN container status. */ @Override public void onContainerStatusReceived(final ContainerId containerId, final ContainerStatus containerStatus) { onContainerStatus(containerStatus); } /** * NM Callback: NM reports stop of a container. * @param containerId ID of a container stopped. */ @Override public void onContainerStopped(final ContainerId containerId) { final boolean hasContainer = this.containers.hasContainer(containerId.toString()); if (hasContainer) { this.reefEventHandlers.onResourceStatus( ResourceStatusEventImpl.newBuilder() .setIdentifier(containerId.toString()) .setState(State.DONE) .build()); } } /** * NM Callback: NM reports failure on container start. * @param containerId ID of a container that has failed to start. * @param throwable An error that caused container to fail. */ @Override public void onStartContainerError(final ContainerId containerId, final Throwable throwable) { this.handleContainerError(containerId, throwable); } /** * NM Callback: NM can not obtain status of the container. * @param containerId ID of a container that failed to report its status. * @param throwable An error that occured when querying status of a container. */ @Override public void onGetContainerStatusError(final ContainerId containerId, final Throwable throwable) { this.handleContainerError(containerId, throwable); } /** * NM Callback: NM fails to stop the container. * @param containerId ID of the container that failed to stop. * @param throwable An error that occurred when trying to stop the container. */ @Override public void onStopContainerError(final ContainerId containerId, final Throwable throwable) { handleContainerError(containerId, throwable); } /** * Called by {@link YarnDriverRuntimeRestartManager} to record recovered containers * such that containers can be released properly on unrecoverable containers. */ public void onContainersRecovered(final Set<Container> recoveredContainers) { for (final Container container : recoveredContainers) { containers.add(container); } } /** * Submit the given launchContext to the given container. */ void submit(final Container container, final ContainerLaunchContext launchContext) { this.nodeManager.startContainerAsync(container, launchContext); } /** * Release the given container. */ void release(final String containerId) { LOG.log(Level.FINE, "Release container: {0}", containerId); final Container container = this.containers.removeAndGet(containerId); this.resourceManager.releaseAssignedContainer(container.getId()); updateRuntimeStatus(); } /** * Start the YARN container manager. * This method is called from DriverRuntimeStartHandler via YARNRuntimeStartHandler. */ void onStart() { LOG.log(Level.FINEST, "YARN registration: begin"); this.nodeManager.init(this.yarnConf); this.nodeManager.start(); try { this.yarnProxyUser.doAs( new PrivilegedExceptionAction<Object>() { @Override public Object run() throws Exception { resourceManager.init(yarnConf); resourceManager.start(); return null; } }); LOG.log(Level.FINE, "YARN registration: register AM at \"{0}:{1}\" tracking URL \"{2}\"", new Object[] {amRegistrationHost, AM_REGISTRATION_PORT, this.trackingUrl}); this.registration.setRegistration(this.resourceManager.registerApplicationMaster( amRegistrationHost, AM_REGISTRATION_PORT, this.trackingUrl)); LOG.log(Level.FINE, "YARN registration: AM registered: {0}", this.registration); final FileSystem fs = FileSystem.get(this.yarnConf); final Path outputFileName = new Path(this.jobSubmissionDirectory, this.reefFileNames.getDriverHttpEndpoint()); try (final FSDataOutputStream out = fs.create(outputFileName)) { out.writeBytes(this.trackingUrl + '\n'); } } catch (final Exception e) { LOG.log(Level.WARNING, "Unable to register application master.", e); onRuntimeError(e); } LOG.log(Level.FINEST, "YARN registration: done: {0}", this.registration); } /** * Shut down YARN container manager. * This method is called from DriverRuntimeStopHandler via YARNRuntimeStopHandler. * @param exception Exception that caused driver to stop. Can be null if there was no error. */ void onStop(final Throwable exception) { LOG.log(Level.FINE, "Stop Runtime: RM status {0}", this.resourceManager.getServiceState()); if (this.resourceManager.getServiceState() == Service.STATE.STARTED) { // invariant: if RM is still running then we declare success. try { this.reefEventHandlers.close(); if (exception == null) { this.resourceManager.unregisterApplicationMaster( FinalApplicationStatus.SUCCEEDED, "Success!", this.trackingUrl); } else { // Note: We don't allow RM to restart our applications if it's an application level failure. // If applications are to be long-running, they should catch Exceptions before the REEF level // instead of relying on the RM restart mechanism. // For this case, we make a strong assumption that REEF does not allow its own unhandled Exceptions // to leak to this stage. final String failureMsg = String.format("Application failed due to:%n%s%n" + "With stack trace:%n%s", exception.getMessage(), ExceptionUtils.getStackTrace(exception)); this.resourceManager.unregisterApplicationMaster( FinalApplicationStatus.FAILED, failureMsg, this.trackingUrl); } this.resourceManager.close(); LOG.log(Level.FINEST, "Container ResourceManager stopped successfully"); } catch (final Exception e) { LOG.log(Level.WARNING, "Error shutting down YARN application", e); } } if (this.nodeManager.getServiceState() == Service.STATE.STARTED) { try { this.nodeManager.close(); LOG.log(Level.FINEST, "Container NodeManager stopped successfully"); } catch (final IOException e) { LOG.log(Level.WARNING, "Error closing YARN Node Manager", e); } } } ///////////////////////////////////////////////////////////// // HELPER METHODS private void onNodeReport(final NodeReport nodeReport) { LOG.log(Level.FINE, "Send node descriptor: {0}", nodeReport); this.reefEventHandlers.onNodeDescriptor(NodeDescriptorEventImpl.newBuilder() .setIdentifier(nodeReport.getNodeId().toString()) .setHostName(nodeReport.getNodeId().getHost()) .setPort(nodeReport.getNodeId().getPort()) .setMemorySize(nodeReport.getCapability().getMemory()) .setRackName(nodeReport.getRackName()) .build()); } private void handleContainerError(final ContainerId containerId, final Throwable throwable) { this.reefEventHandlers.onResourceStatus(ResourceStatusEventImpl.newBuilder() .setIdentifier(containerId.toString()) .setState(State.FAILED) .setExitCode(1) .setDiagnostics(throwable.getMessage()) .build()); } /** * Handles container status reports. Calls come from YARN. * @param value containing the container status. */ private void onContainerStatus(final ContainerStatus value) { final String containerId = value.getContainerId().toString(); final boolean hasContainer = this.containers.hasContainer(containerId); if (hasContainer) { LOG.log(Level.FINE, "Received container status: {0}", containerId); final ResourceStatusEventImpl.Builder status = ResourceStatusEventImpl.newBuilder().setIdentifier(containerId); switch (value.getState()) { case COMPLETE: LOG.log(Level.FINE, "Container completed: status {0}", value.getExitStatus()); switch (value.getExitStatus()) { case 0: status.setState(State.DONE); break; case 143: status.setState(State.KILLED); break; default: status.setState(State.FAILED); } status.setExitCode(value.getExitStatus()); break; default: LOG.info("Container running"); status.setState(State.RUNNING); } if (value.getDiagnostics() != null) { LOG.log(Level.FINE, "Container diagnostics: {0}", value.getDiagnostics()); status.setDiagnostics(value.getDiagnostics()); } // ResourceStatusHandler should close and release the Evaluator for us if the state is a terminal state. this.reefEventHandlers.onResourceStatus(status.build()); } } void onContainerRequest(final AMRMClient.ContainerRequest... containerRequests) { synchronized (this) { this.containerRequestCounter.incrementBy(containerRequests.length); this.requestsBeforeSentToRM.addAll(Arrays.asList(containerRequests)); this.doHomogeneousRequests(); } this.updateRuntimeStatus(); } /** * Handles new container allocations. Calls come from YARN. * @param container newly allocated YARN container. */ private void handleNewContainer(final Container container) { LOG.log(Level.FINE, "allocated container: id[ {0} ]", container.getId()); synchronized (this) { if (!matchContainerWithPendingRequest(container)) { LOG.log(Level.WARNING, "Got an extra container {0} that doesn't match, releasing...", container.getId()); this.resourceManager.releaseAssignedContainer(container.getId()); return; } final AMRMClient.ContainerRequest matchedRequest = this.requestsAfterSentToRM.peek(); this.containerRequestCounter.decrement(); this.containers.add(container); LOG.log(Level.FINEST, "{0} matched with {1}", new Object[] {container, matchedRequest}); // Due to the bug YARN-314 and the workings of AMRMCClientAsync, when x-priority m-capacity zero-container // request and x-priority n-capacity nonzero-container request are sent together, where m > n, RM ignores // the latter. // Therefore it is necessary avoid sending zero-container request, even if it means getting extra containers. // It is okay to send nonzero m-capacity and n-capacity request together since bigger containers // can be matched. // TODO[JIRA REEF-42, REEF-942]: revisit this when implementing locality-strictness. // (i.e. a specific rack request can be ignored) if (this.requestsAfterSentToRM.size() > 1) { try { this.resourceManager.removeContainerRequest(matchedRequest); } catch (final Exception e) { LOG.log(Level.WARNING, "Error removing request from Async AMRM client queue: " + matchedRequest, e); } } this.requestsAfterSentToRM.remove(); this.doHomogeneousRequests(); LOG.log(Level.FINEST, "Allocated Container: memory = {0}, core number = {1}", new Object[] {container.getResource().getMemory(), container.getResource().getVirtualCores()}); this.reefEventHandlers.onResourceAllocation(ResourceEventImpl.newAllocationBuilder() .setIdentifier(container.getId().toString()) .setNodeId(container.getNodeId().toString()) .setResourceMemory(container.getResource().getMemory()) .setVirtualCores(container.getResource().getVirtualCores()) .setRackName(rackNameFormatter.getRackName(container)) .setRuntimeName(RuntimeIdentifier.RUNTIME_NAME) .build()); this.updateRuntimeStatus(); } } private synchronized void doHomogeneousRequests() { if (this.requestsAfterSentToRM.isEmpty()) { final AMRMClient.ContainerRequest firstRequest = this.requestsBeforeSentToRM.peek(); while (!this.requestsBeforeSentToRM.isEmpty() && isSameKindOfRequest(firstRequest, this.requestsBeforeSentToRM.peek())) { final AMRMClient.ContainerRequest homogeneousRequest = this.requestsBeforeSentToRM.remove(); this.resourceManager.addContainerRequest(homogeneousRequest); this.requestsAfterSentToRM.add(homogeneousRequest); } } } private boolean isSameKindOfRequest(final AMRMClient.ContainerRequest r1, final AMRMClient.ContainerRequest r2) { return r1.getPriority().compareTo(r2.getPriority()) == 0 && r1.getCapability().compareTo(r2.getCapability()) == 0 && r1.getRelaxLocality() == r2.getRelaxLocality() && ListUtils.isEqualList(r1.getNodes(), r2.getNodes()) && ListUtils.isEqualList(r1.getRacks(), r2.getRacks()); } /** * Match to see whether the container satisfies the request. * We take into consideration that RM has some freedom in rounding * up the allocation and in placing containers on other machines. */ private boolean matchContainerWithPendingRequest(final Container container) { if (this.requestsAfterSentToRM.isEmpty()) { return false; } final AMRMClient.ContainerRequest request = this.requestsAfterSentToRM.peek(); final boolean resourceCondition = container.getResource().getMemory() >= request.getCapability().getMemory(); // TODO[JIRA REEF-35]: check vcores once YARN-2380 is resolved final boolean nodeCondition = request.getNodes() == null || request.getNodes().contains(container.getNodeId().getHost()); final boolean rackCondition = request.getRacks() == null || request.getRacks().contains(this.nodeIdToRackName.get(container.getNodeId().toString())); return resourceCondition && (request.getRelaxLocality() || rackCondition && nodeCondition); } /** * Update the driver with my current status. */ private void updateRuntimeStatus() { final RuntimeStatusEventImpl.Builder builder = RuntimeStatusEventImpl.newBuilder() .setName(RUNTIME_NAME) .setState(State.RUNNING) .setOutstandingContainerRequests(this.containerRequestCounter.get()); for (final String allocatedContainerId : this.containers.getContainerIds()) { builder.addContainerAllocation(allocatedContainerId); } this.reefEventHandlers.onRuntimeStatus(builder.build()); } private void onRuntimeError(final Throwable throwable) { // SHUTDOWN YARN try { this.reefEventHandlers.close(); this.resourceManager.unregisterApplicationMaster( FinalApplicationStatus.FAILED, throwable.getMessage(), this.trackingUrl); } catch (final Exception e) { LOG.log(Level.WARNING, "Error shutting down YARN application", e); } finally { this.resourceManager.stop(); } final ReefServiceProtos.RuntimeErrorProto runtimeError = ReefServiceProtos.RuntimeErrorProto.newBuilder() .setName(RUNTIME_NAME) .setMessage(throwable.getMessage()) .setException(ByteString.copyFrom(new ObjectSerializableCodec<>().encode(throwable))) .build(); this.reefEventHandlers.onRuntimeStatus( RuntimeStatusEventImpl.newBuilder() .setState(State.FAILED) .setName(RUNTIME_NAME) .setError(runtimeError) .build()); } }
package com.timeanddate.services.tests; import static org.junit.Assert.*; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.List; import java.util.TimeZone; import org.junit.Before; import org.junit.Test; import com.timeanddate.services.ConvertTimeService; import com.timeanddate.services.ConvertedTimes; import com.timeanddate.services.common.AuthenticationException; import com.timeanddate.services.common.ServerSideException; import com.timeanddate.services.dataTypes.places.Coordinates; import com.timeanddate.services.dataTypes.places.Location; import com.timeanddate.services.dataTypes.places.LocationId; import com.timeanddate.services.dataTypes.time.TADDateTime; public class ConvertTimeServiceTests { public static TimeZone UsTimezone = TimeZone.getTimeZone("America/Alaska"); public static Calendar UsTimestamp = Calendar.getInstance(UsTimezone); public static TimeZone ArticTimezone = TimeZone .getTimeZone("Antarctica/Troll"); public static Calendar ArticTimestamp = Calendar.getInstance(TimeZone .getTimeZone("UTC")); public final String fromCountry = "Norway"; public final String fromCity = "Oslo"; public static LocationId fromCoords; public String fromFormat = "norway/oslo"; public static LocationId fromId; public final String toUsState = "Alaska"; public final String toUsCountry = "USA"; public final String toUsCity = "Anchorage"; public String toUsFormat = "usa/anchorage"; public LocationId toUsId; public final String toArticCountry = "Antarctica"; public final String toArticCity = "Troll"; public String toArticFormat = "antarctica/troll"; public LocationId toArticId; public final TADDateTime timeToConvert = new TADDateTime(2015, 05, 14, 11, 1, 54); public final TADDateTime timeToConvertInUTC = new TADDateTime(2015, 05, 14, 9, 1, 54); public final TADDateTime expectedConvertedTimeInAnchorage = new TADDateTime( 2015, 05, 14, 1, 1, 54); public final TADDateTime expectedConvertedTimeInTroll = new TADDateTime( 2015, 05, 14, 11, 1, 54); public static DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'"); @Before public void Setup() { fromCoords = new LocationId(new Coordinates(59.913d, 10.752d)); fromId = new LocationId(fromFormat); toUsId = new LocationId(toUsFormat); ArticTimestamp.setTimeInMillis(Calendar.getInstance(ArticTimezone) .getTimeInMillis()); toArticId = new LocationId(toArticFormat); } @Test public void Calling_ConvertTimeService_WithNoId_And_WithDateTime_Should_ReturnCorrectConvertedTime() throws AuthenticationException, IllegalArgumentException, ServerSideException { // Arrange // Act ConvertTimeService service = new ConvertTimeService(Config.AccessKey, Config.SecretKey); ConvertedTimes result = service.convertTime(fromId, timeToConvert); Location oslo = getLocationById(result, "187"); // Assert assertEquals(fromCity, oslo.getGeography().getName()); assertEquals(fromCountry, oslo.getGeography().getCountry().getName()); HasCorrectUtc(timeToConvertInUTC, result.Utc.getDateTime()); } @Test public void Calling_ConvertTimeService_WithOneToId_And_WithDateTime_Should_ReturnCorrectConvertedTime() throws AuthenticationException, IllegalArgumentException, ServerSideException { // Arrange List<LocationId> toId = new ArrayList<LocationId>(); toId.add(toUsId); // Act ConvertTimeService service = new ConvertTimeService(Config.AccessKey, Config.SecretKey); ConvertedTimes result = service .convertTime(fromId, timeToConvert, toId); Location anchorage = getLocationById(result, "18"); Location oslo = getLocationById(result, "187"); // Assert assertEquals(toUsState, anchorage.getGeography().getState()); assertEquals(toUsCity, anchorage.getGeography().getName()); assertEquals(oslo.getGeography().getName(), fromCity); assertEquals(oslo.getGeography().getCountry().getName(), fromCountry); HasCorrectUtc(timeToConvertInUTC, result.Utc.getDateTime()); HasCorrectLocation(expectedConvertedTimeInAnchorage, anchorage); } @Test public void Calling_ConvertTimeService_WithMultipleToIds_And_WithDateTime_Should_ReturnCorrectConvertedTime() throws AuthenticationException, IllegalArgumentException, ServerSideException { // Arrange List<LocationId> toId = new ArrayList<LocationId>(); toId.add(toUsId); toId.add(toArticId); // Act ConvertTimeService service = new ConvertTimeService(Config.AccessKey, Config.SecretKey); ConvertedTimes result = service .convertTime(fromId, timeToConvert, toId); Location anchorage = getLocationById(result, "18"); Location oslo = getLocationById(result, "187"); Location troll = getLocationById(result, "4365"); // Assert assertEquals(toUsState, anchorage.getGeography().getState()); assertEquals(toUsCity, anchorage.getGeography().getName()); assertEquals(troll.getGeography().getCountry().getName(), toArticCountry); assertEquals(troll.getGeography().getName(), toArticCity + " Station"); assertEquals(oslo.getGeography().getName(), fromCity); assertEquals(oslo.getGeography().getCountry().getName(), fromCountry); HasCorrectLocation(expectedConvertedTimeInAnchorage, anchorage); HasCorrectLocation(expectedConvertedTimeInTroll, troll); HasCorrectUtc(timeToConvertInUTC, result.Utc.getDateTime()); } @Test public void Calling_ConvertTimeService_WithMultipleToIds_And_WithISO_Should_ReturnCorrectConvertedTime() throws AuthenticationException, IllegalArgumentException, ServerSideException { // Arrange List<LocationId> toId = new ArrayList<LocationId>(); toId.add(toUsId); toId.add(toArticId); // Act ConvertTimeService service = new ConvertTimeService(Config.AccessKey, Config.SecretKey); ConvertedTimes result = service.convertTime(fromId, timeToConvert.toString(), toId); Location anchorage = getLocationById(result, "18"); Location oslo = getLocationById(result, "187"); Location troll = getLocationById(result, "4365"); // Assert assertEquals(toUsState, anchorage.getGeography().getState()); assertEquals(toUsCity, anchorage.getGeography().getName()); assertEquals(toArticCountry, troll.getGeography().getCountry().getName()); assertEquals(toArticCity + " Station", troll.getGeography().getName()); assertEquals(fromCity, oslo.getGeography().getName()); assertEquals(fromCountry, oslo.getGeography().getCountry().getName()); HasCorrectLocation(expectedConvertedTimeInAnchorage, anchorage); HasCorrectLocation(expectedConvertedTimeInTroll, troll); HasCorrectUtc(timeToConvertInUTC, result.Utc.getDateTime()); } @Test public void Calling_ConvertTimeService_WithNoId_And_WithISO_Should_ReturnCorrectConvertedTime() throws AuthenticationException, IllegalArgumentException, ServerSideException { // Arrange // Act ConvertTimeService service = new ConvertTimeService(Config.AccessKey, Config.SecretKey); ConvertedTimes result = service.convertTime(fromId, timeToConvert.toString()); Location oslo = getLocationById(result, "187"); // Assert assertEquals(fromCity, oslo.getGeography().getName()); assertEquals(fromCountry, oslo.getGeography().getCountry().getName()); HasCorrectUtc(timeToConvertInUTC, result.Utc.getDateTime()); } @Test public void Calling_ConvertTimeService_WithOneToId_And_WithISO_Should_ReturnCorrectConvertedTime() throws AuthenticationException, IllegalArgumentException, ServerSideException { // Arrange List<LocationId> toId = new ArrayList<LocationId>(); toId.add(toUsId); // Act ConvertTimeService service = new ConvertTimeService(Config.AccessKey, Config.SecretKey); ConvertedTimes result = service.convertTime(fromId, timeToConvert.toString(), toId); Location anchorage = getLocationById(result, "18"); Location oslo = getLocationById(result, "187"); // Assert assertEquals(toUsState, anchorage.getGeography().getState()); assertEquals(toUsCity, anchorage.getGeography().getName()); assertEquals(fromCity, oslo.getGeography().getName()); assertEquals(fromCountry, oslo.getGeography().getCountry().getName()); HasCorrectLocation(expectedConvertedTimeInAnchorage, anchorage); HasCorrectUtc(timeToConvertInUTC, result.Utc.getDateTime()); } @Test public void Calling_ConvertTimeService_WithoutTimeChanges_Should_NotReturnTimeChanges() throws AuthenticationException, IllegalArgumentException, ServerSideException { // Arrange List<LocationId> toId = new ArrayList<LocationId>(); toId.add(toUsId); // Act ConvertTimeService service = new ConvertTimeService(Config.AccessKey, Config.SecretKey); service.setIncludeTimeChanges(false); ConvertedTimes result = service.convertTime(fromId, timeToConvert.toString(), toId); Location anchorage = getLocationById(result, "18"); Location oslo = getLocationById(result, "187"); // Assert assertEquals(toUsState, anchorage.getGeography().getState()); assertEquals(toUsCity, anchorage.getGeography().getName()); assertEquals(fromCity, oslo.getGeography().getName()); assertEquals(fromCountry, oslo.getGeography().getCountry().getName()); for (Location loc : result.Locations) { assertEquals(0, loc.getTimeChanges().size()); } } @Test public void Calling_ConvertTimeService_WithTimeChanges_Should_ReturnTimeChanges() throws AuthenticationException, IllegalArgumentException, ServerSideException { // Arrange List<LocationId> toId = new ArrayList<LocationId>(); toId.add(toUsId); // Act ConvertTimeService service = new ConvertTimeService(Config.AccessKey, Config.SecretKey); service.setIncludeTimeChanges(true); ConvertedTimes result = service.convertTime(fromId, timeToConvert.toString(), toId); Location anchorage = getLocationById(result, "18"); Location oslo = getLocationById(result, "187"); // Assert assertEquals(toUsState, anchorage.getGeography().getState()); assertEquals(toUsCity, anchorage.getGeography().getName()); assertEquals(fromCity, oslo.getGeography().getName()); assertEquals(fromCountry, oslo.getGeography().getCountry().getName()); for (Location loc : result.Locations) { assertNotNull(loc.getTimeChanges()); } } @Test public void Calling_ConvertTimeService_WithoutTimezone_Should_NotReturnTZInformation() throws AuthenticationException, IllegalArgumentException, ServerSideException { // Arrange List<LocationId> toId = new ArrayList<LocationId>(); toId.add(toUsId); // Act ConvertTimeService service = new ConvertTimeService(Config.AccessKey, Config.SecretKey); service.setIncludeTimezoneInformation(false); ConvertedTimes result = service.convertTime(fromId, timeToConvert.toString(), toId); Location anchorage = getLocationById(result, "18"); Location oslo = getLocationById(result, "187"); // Assert assertEquals(toUsState, anchorage.getGeography().getState()); assertEquals(toUsCity, anchorage.getGeography().getName()); assertEquals(fromCity, oslo.getGeography().getName()); assertEquals(fromCountry, oslo.getGeography().getCountry().getName()); for (Location loc : result.Locations) { assertNull(loc.getTime().getTimezone()); } } @Test public void Calling_ConvertTimeService_WithTimezone_Should_ReturnTZInformation() throws AuthenticationException, IllegalArgumentException, ServerSideException { // Arrange List<LocationId> toId = new ArrayList<LocationId>(); toId.add(toUsId); // Act ConvertTimeService service = new ConvertTimeService(Config.AccessKey, Config.SecretKey); service.setIncludeTimezoneInformation(true); ConvertedTimes result = service.convertTime(fromId, timeToConvert.toString(), toId); Location anchorage = getLocationById(result, "18"); Location oslo = getLocationById(result, "187"); // Assert assertEquals(toUsState, anchorage.getGeography().getState()); assertEquals(toUsCity, anchorage.getGeography().getName()); assertEquals(fromCity, oslo.getGeography().getName()); assertEquals(fromCountry, oslo.getGeography().getCountry().getName()); for (Location loc : result.Locations) { assertNotNull(loc.getTime().getTimezone()); } } @Test public void Calling_ConvertTimeService_WithRadius_Should_ReturnCorrectLocation() throws AuthenticationException, IllegalArgumentException, ServerSideException { // Arrange List<LocationId> toId = new ArrayList<LocationId>(); toId.add(toUsId); // Act ConvertTimeService service = new ConvertTimeService(Config.AccessKey, Config.SecretKey); service.setRadius(50); ConvertedTimes result = service.convertTime(fromCoords, timeToConvert.toString(), toId); Location anchorage = getLocationById(result, "18"); Location oslo = getLocationById(result, "187"); // Assert assertEquals(toUsState, anchorage.getGeography().getState()); assertEquals(toUsCity, anchorage.getGeography().getName()); assertEquals(fromCity, oslo.getGeography().getName()); assertEquals(fromCountry, oslo.getGeography().getCountry().getName()); for (Location loc : result.Locations) { assertNotNull(loc.getTime().getTimezone()); } } public void HasCorrectLocation(TADDateTime date, Location location) { assertEquals(date.getYear(), location.getTime().getDateTime().getYear()); assertEquals(date.getMonth(), location.getTime().getDateTime().getMonth()); assertEquals(date.getDayOfMonth(), location.getTime().getDateTime().getDayOfMonth()); assertEquals(date.getHour(), location.getTime().getDateTime().getHour()); assertEquals(date.getMinute(), location.getTime().getDateTime().getMinute()); } public void HasCorrectUtc(TADDateTime utc, TADDateTime date) { assertEquals(utc.getYear(), date.getYear()); assertEquals(utc.getMonth(), date.getMonth()); assertEquals(utc.getDayOfMonth(), date.getDayOfMonth()); assertEquals(utc.getHour(), date.getHour()); assertEquals(utc.getMinute(), date.getMinute()); assertEquals(utc.getSecond(), date.getSecond()); } private Location getLocationById(ConvertedTimes ct, String id) { for (Location loc : ct.Locations) { if (loc.getId().equals(id)) return loc; } return null; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.1-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2013.07.27 at 08:11:57 PM EEST // package eu.datex2.schema._1_0._1_0; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlEnumValue; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for PoorEnvironmentTypeEnum. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="PoorEnvironmentTypeEnum"> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="abnormalTemperature"/> * &lt;enumeration value="badWeather"/> * &lt;enumeration value="blizzard"/> * &lt;enumeration value="blowingDust"/> * &lt;enumeration value="blowingSnow"/> * &lt;enumeration value="crosswinds"/> * &lt;enumeration value="damagingHail"/> * &lt;enumeration value="denseFog"/> * &lt;enumeration value="eclipse"/> * &lt;enumeration value="extremeCold"/> * &lt;enumeration value="extremeHeat"/> * &lt;enumeration value="fog"/> * &lt;enumeration value="freezingFog"/> * &lt;enumeration value="frost"/> * &lt;enumeration value="gales"/> * &lt;enumeration value="gustyWinds"/> * &lt;enumeration value="hail"/> * &lt;enumeration value="heavyFrost"/> * &lt;enumeration value="heavyRain"/> * &lt;enumeration value="heavySnowfall"/> * &lt;enumeration value="hurricaneForceWinds"/> * &lt;enumeration value="lowSunGlare"/> * &lt;enumeration value="moderateFog"/> * &lt;enumeration value="ozonePollution"/> * &lt;enumeration value="patchyFog"/> * &lt;enumeration value="precipitationInTheArea"/> * &lt;enumeration value="rain"/> * &lt;enumeration value="rainChangingToSnow"/> * &lt;enumeration value="sandStorms"/> * &lt;enumeration value="severeExhaustPollution"/> * &lt;enumeration value="severeSmog"/> * &lt;enumeration value="showers"/> * &lt;enumeration value="sleet"/> * &lt;enumeration value="smogAlert"/> * &lt;enumeration value="smokeHazard"/> * &lt;enumeration value="snowChangingToRain"/> * &lt;enumeration value="snowfall"/> * &lt;enumeration value="sprayHazard"/> * &lt;enumeration value="stormForceWinds"/> * &lt;enumeration value="strongGustsOfWind"/> * &lt;enumeration value="strongWinds"/> * &lt;enumeration value="swarmsOfInsects"/> * &lt;enumeration value="temperatureFalling"/> * &lt;enumeration value="thunderstorms"/> * &lt;enumeration value="tornadoes"/> * &lt;enumeration value="veryStrongGustsOfWind"/> * &lt;enumeration value="visibilityReduced"/> * &lt;enumeration value="whiteOut"/> * &lt;enumeration value="winterStorm"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "PoorEnvironmentTypeEnum") @XmlEnum public enum PoorEnvironmentTypeEnum { /** * The temperature is outside the normally expected range. * */ @XmlEnumValue("abnormalTemperature") ABNORMAL_TEMPERATURE("abnormalTemperature"), /** * Adverse weather conditions are affecting driving conditions. * */ @XmlEnumValue("badWeather") BAD_WEATHER("badWeather"), /** * Heavy snowfall in combination with strong winds, limiting visibility to 50m or less. * */ @XmlEnumValue("blizzard") BLIZZARD("blizzard"), /** * Dust blowing across the roadway causing significantly reduced visibility. * */ @XmlEnumValue("blowingDust") BLOWING_DUST("blowingDust"), /** * Fallen snow moving due to the forces of wind. * */ @XmlEnumValue("blowingSnow") BLOWING_SNOW("blowingSnow"), /** * Strong cross winds across the direction of the roadway (e.g. on a ridge or bridge). * */ @XmlEnumValue("crosswinds") CROSSWINDS("crosswinds"), /** * Large falling ice pellets or frozen rain capable of causing injury or damage to property. * */ @XmlEnumValue("damagingHail") DAMAGING_HAIL("damagingHail"), /** * Dense fog, limiting visibility to 50m or less. * */ @XmlEnumValue("denseFog") DENSE_FOG("denseFog"), /** * Eclipse, either partial or full, of the sun causing low light levels during normal daylight period. * */ @XmlEnumValue("eclipse") ECLIPSE("eclipse"), /** * Abnormally low temperatures. * */ @XmlEnumValue("extremeCold") EXTREME_COLD("extremeCold"), /** * Abnormally high expected maximum temperature. * */ @XmlEnumValue("extremeHeat") EXTREME_HEAT("extremeHeat"), /** * Fog, visibility more than 50m. * */ @XmlEnumValue("fog") FOG("fog"), /** * Fog, in conjunction with sub-zero air temperatures causing possible freezing of road surface. * */ @XmlEnumValue("freezingFog") FREEZING_FOG("freezingFog"), /** * Frost can be expected. * */ @XmlEnumValue("frost") FROST("frost"), /** * Winds between 60 km/h and 90 km/h. * */ @XmlEnumValue("gales") GALES("gales"), /** * Constantly varying winds, significant at times. * */ @XmlEnumValue("gustyWinds") GUSTY_WINDS("gustyWinds"), /** * Falling ice pellets or frozen rain. * */ @XmlEnumValue("hail") HAIL("hail"), /** * A thick coating of frost can be expected. * */ @XmlEnumValue("heavyFrost") HEAVY_FROST("heavyFrost"), /** * Heavy rainfall, limiting visibility to 50m or less. * */ @XmlEnumValue("heavyRain") HEAVY_RAIN("heavyRain"), /** * Dense falling snow, limiting visibility to 50m or less. * */ @XmlEnumValue("heavySnowfall") HEAVY_SNOWFALL("heavySnowfall"), /** * Winds over 120 km/h. * */ @XmlEnumValue("hurricaneForceWinds") HURRICANE_FORCE_WINDS("hurricaneForceWinds"), /** * Difficult visibility conditions created by low elevation sunlight. * */ @XmlEnumValue("lowSunGlare") LOW_SUN_GLARE("lowSunGlare"), /** * Misty conditions impairing vision over 100m. * */ @XmlEnumValue("moderateFog") MODERATE_FOG("moderateFog"), /** * High concentrations of ozone are present. * */ @XmlEnumValue("ozonePollution") OZONE_POLLUTION("ozonePollution"), /** * Fog, in which intermittent areas of dense fog may be encountered. * */ @XmlEnumValue("patchyFog") PATCHY_FOG("patchyFog"), /** * Unspecified precipitation is falling on the area. * */ @XmlEnumValue("precipitationInTheArea") PRECIPITATION_IN_THE_AREA("precipitationInTheArea"), /** * Rain, visibility more than 50m. * */ @XmlEnumValue("rain") RAIN("rain"), /** * Falling rain is changing to snow. * */ @XmlEnumValue("rainChangingToSnow") RAIN_CHANGING_TO_SNOW("rainChangingToSnow"), /** * Sand blowing across the roadway causing significantly reduced visibility. * */ @XmlEnumValue("sandStorms") SAND_STORMS("sandStorms"), /** * Pollution from exhaust fumes has reached a level sufficient to cause concern. * */ @XmlEnumValue("severeExhaustPollution") SEVERE_EXHAUST_POLLUTION("severeExhaustPollution"), /** * Environmental warning of very poor air quality resulting from smog. * */ @XmlEnumValue("severeSmog") SEVERE_SMOG("severeSmog"), /** * Light rain or intermittent rain. * */ @XmlEnumValue("showers") SHOWERS("showers"), /** * Rain mingled with snow or hail. * */ @XmlEnumValue("sleet") SLEET("sleet"), /** * Environmental warning of poor air quality resulting from smog. * */ @XmlEnumValue("smogAlert") SMOG_ALERT("smogAlert"), /** * Smoke drifting across the roadway causing significantly reduced visibility. * */ @XmlEnumValue("smokeHazard") SMOKE_HAZARD("smokeHazard"), /** * Falling snow is changing to rain. * */ @XmlEnumValue("snowChangingToRain") SNOW_CHANGING_TO_RAIN("snowChangingToRain"), /** * Falling snow, visibility more than 50m. * */ @XmlEnumValue("snowfall") SNOWFALL("snowfall"), /** * Reduced visibility resulting from spray created by moving vehicles on a wet roadway. * */ @XmlEnumValue("sprayHazard") SPRAY_HAZARD("sprayHazard"), /** * Winds between 90 km/h and 120 km/h. * */ @XmlEnumValue("stormForceWinds") STORM_FORCE_WINDS("stormForceWinds"), /** * Constantly varying winds, strong at times. * */ @XmlEnumValue("strongGustsOfWind") STRONG_GUSTS_OF_WIND("strongGustsOfWind"), /** * Winds between 40 km/h and 60 km/h. * */ @XmlEnumValue("strongWinds") STRONG_WINDS("strongWinds"), /** * Large numbers of insects which create a hazard for road users through reduced visibility. * */ @XmlEnumValue("swarmsOfInsects") SWARMS_OF_INSECTS("swarmsOfInsects"), /** * The temperature is falling significantly. * */ @XmlEnumValue("temperatureFalling") TEMPERATURE_FALLING("temperatureFalling"), /** * Electrical storms, generally with heavy rain. * */ @XmlEnumValue("thunderstorms") THUNDERSTORMS("thunderstorms"), /** * Very violent, whirling windstorms affecting narrow strips of country. * */ @XmlEnumValue("tornadoes") TORNADOES("tornadoes"), /** * Constantly varying winds, very strong at times. * */ @XmlEnumValue("veryStrongGustsOfWind") VERY_STRONG_GUSTS_OF_WIND("veryStrongGustsOfWind"), /** * Environmental conditions causing reduced visibility. * */ @XmlEnumValue("visibilityReduced") VISIBILITY_REDUCED("visibilityReduced"), /** * Falling snow in blizzard conditions resulting in very reduced visibility. * */ @XmlEnumValue("whiteOut") WHITE_OUT("whiteOut"), /** * Heavy rain, sleet, hail and/or snow in combination with strong winds, limiting visibility to 50m or less. * */ @XmlEnumValue("winterStorm") WINTER_STORM("winterStorm"); private final String value; PoorEnvironmentTypeEnum(String v) { value = v; } public String value() { return value; } public static PoorEnvironmentTypeEnum fromValue(String v) { for (PoorEnvironmentTypeEnum c: PoorEnvironmentTypeEnum.values()) { if (c.value.equals(v)) { return c; } } throw new IllegalArgumentException(v); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataproc/v1beta2/workflow_templates.proto package com.google.cloud.dataproc.v1beta2; /** * * * <pre> * A request to instantiate a workflow template. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest} */ public final class InstantiateWorkflowTemplateRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest) InstantiateWorkflowTemplateRequestOrBuilder { private static final long serialVersionUID = 0L; // Use InstantiateWorkflowTemplateRequest.newBuilder() to construct. private InstantiateWorkflowTemplateRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private InstantiateWorkflowTemplateRequest() { name_ = ""; version_ = 0; instanceId_ = ""; requestId_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private InstantiateWorkflowTemplateRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } case 16: { version_ = input.readInt32(); break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); instanceId_ = s; break; } case 34: { if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { parameters_ = com.google.protobuf.MapField.newMapField( ParametersDefaultEntryHolder.defaultEntry); mutable_bitField0_ |= 0x00000010; } com.google.protobuf.MapEntry<java.lang.String, java.lang.String> parameters__ = input.readMessage( ParametersDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); parameters_.getMutableMap().put(parameters__.getKey(), parameters__.getValue()); break; } case 42: { java.lang.String s = input.readStringRequireUtf8(); requestId_ = s; break; } default: { if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto .internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_descriptor; } @SuppressWarnings({"rawtypes"}) @java.lang.Override protected com.google.protobuf.MapField internalGetMapField(int number) { switch (number) { case 4: return internalGetParameters(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto .internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * * * <pre> * Required. The "resource name" of the workflow template, as described * in https://cloud.google.com/apis/design/resource_names of the form * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * </pre> * * <code>string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The "resource name" of the workflow template, as described * in https://cloud.google.com/apis/design/resource_names of the form * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * </pre> * * <code>string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VERSION_FIELD_NUMBER = 2; private int version_; /** * * * <pre> * Optional. The version of workflow template to instantiate. If specified, * the workflow will be instantiated only if the current version of * the workflow template has the supplied version. * This option cannot be used to instantiate a previous version of * workflow template. * </pre> * * <code>int32 version = 2;</code> */ public int getVersion() { return version_; } public static final int INSTANCE_ID_FIELD_NUMBER = 3; private volatile java.lang.Object instanceId_; /** * * * <pre> * Deprecated. Please use `request_id` field instead. * </pre> * * <code>string instance_id = 3 [deprecated = true];</code> */ @java.lang.Deprecated public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } } /** * * * <pre> * Deprecated. Please use `request_id` field instead. * </pre> * * <code>string instance_id = 3 [deprecated = true];</code> */ @java.lang.Deprecated public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 5; private volatile java.lang.Object requestId_; /** * * * <pre> * Optional. A tag that prevents multiple concurrent workflow * instances with the same tag from running. This mitigates risk of * concurrent instances started due to retries. * It is recommended to always set this value to a * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). * The tag must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). The maximum length is 40 characters. * </pre> * * <code>string request_id = 5;</code> */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. A tag that prevents multiple concurrent workflow * instances with the same tag from running. This mitigates risk of * concurrent instances started due to retries. * It is recommended to always set this value to a * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). * The tag must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). The maximum length is 40 characters. * </pre> * * <code>string request_id = 5;</code> */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PARAMETERS_FIELD_NUMBER = 4; private static final class ParametersDefaultEntryHolder { static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry = com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance( com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto .internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_ParametersEntry_descriptor, com.google.protobuf.WireFormat.FieldType.STRING, "", com.google.protobuf.WireFormat.FieldType.STRING, ""); } private com.google.protobuf.MapField<java.lang.String, java.lang.String> parameters_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetParameters() { if (parameters_ == null) { return com.google.protobuf.MapField.emptyMapField(ParametersDefaultEntryHolder.defaultEntry); } return parameters_; } public int getParametersCount() { return internalGetParameters().getMap().size(); } /** * * * <pre> * Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * </pre> * * <code>map&lt;string, string&gt; parameters = 4;</code> */ public boolean containsParameters(java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } return internalGetParameters().getMap().containsKey(key); } /** Use {@link #getParametersMap()} instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getParameters() { return getParametersMap(); } /** * * * <pre> * Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * </pre> * * <code>map&lt;string, string&gt; parameters = 4;</code> */ public java.util.Map<java.lang.String, java.lang.String> getParametersMap() { return internalGetParameters().getMap(); } /** * * * <pre> * Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * </pre> * * <code>map&lt;string, string&gt; parameters = 4;</code> */ public java.lang.String getParametersOrDefault( java.lang.String key, java.lang.String defaultValue) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetParameters().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * </pre> * * <code>map&lt;string, string&gt; parameters = 4;</code> */ public java.lang.String getParametersOrThrow(java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetParameters().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (version_ != 0) { output.writeInt32(2, version_); } if (!getInstanceIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, instanceId_); } com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( output, internalGetParameters(), ParametersDefaultEntryHolder.defaultEntry, 4); if (!getRequestIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, requestId_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (version_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, version_); } if (!getInstanceIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, instanceId_); } for (java.util.Map.Entry<java.lang.String, java.lang.String> entry : internalGetParameters().getMap().entrySet()) { com.google.protobuf.MapEntry<java.lang.String, java.lang.String> parameters__ = ParametersDefaultEntryHolder.defaultEntry .newBuilderForType() .setKey(entry.getKey()) .setValue(entry.getValue()) .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, parameters__); } if (!getRequestIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, requestId_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest)) { return super.equals(obj); } com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest other = (com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest) obj; boolean result = true; result = result && getName().equals(other.getName()); result = result && (getVersion() == other.getVersion()); result = result && getInstanceId().equals(other.getInstanceId()); result = result && getRequestId().equals(other.getRequestId()); result = result && internalGetParameters().equals(other.internalGetParameters()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion(); hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER; hash = (53 * hash) + getInstanceId().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); if (!internalGetParameters().getMap().isEmpty()) { hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; hash = (53 * hash) + internalGetParameters().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request to instantiate a workflow template. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest) com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto .internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_descriptor; } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapField internalGetMapField(int number) { switch (number) { case 4: return internalGetParameters(); default: throw new RuntimeException("Invalid map field number: " + number); } } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapField internalGetMutableMapField(int number) { switch (number) { case 4: return internalGetMutableParameters(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto .internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.Builder.class); } // Construct using // com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); name_ = ""; version_ = 0; instanceId_ = ""; requestId_ = ""; internalGetMutableParameters().clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto .internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_descriptor; } @java.lang.Override public com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest getDefaultInstanceForType() { return com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest build() { com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest buildPartial() { com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest result = new com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; result.name_ = name_; result.version_ = version_; result.instanceId_ = instanceId_; result.requestId_ = requestId_; result.parameters_ = internalGetParameters(); result.parameters_.makeImmutable(); result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return (Builder) super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest) { return mergeFrom( (com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest other) { if (other == com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest .getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } if (other.getVersion() != 0) { setVersion(other.getVersion()); } if (!other.getInstanceId().isEmpty()) { instanceId_ = other.instanceId_; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; onChanged(); } internalGetMutableParameters().mergeFrom(other.internalGetParameters()); this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. The "resource name" of the workflow template, as described * in https://cloud.google.com/apis/design/resource_names of the form * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * </pre> * * <code>string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The "resource name" of the workflow template, as described * in https://cloud.google.com/apis/design/resource_names of the form * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * </pre> * * <code>string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The "resource name" of the workflow template, as described * in https://cloud.google.com/apis/design/resource_names of the form * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * </pre> * * <code>string name = 1;</code> */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * * * <pre> * Required. The "resource name" of the workflow template, as described * in https://cloud.google.com/apis/design/resource_names of the form * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * </pre> * * <code>string name = 1;</code> */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * * * <pre> * Required. The "resource name" of the workflow template, as described * in https://cloud.google.com/apis/design/resource_names of the form * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * </pre> * * <code>string name = 1;</code> */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } private int version_; /** * * * <pre> * Optional. The version of workflow template to instantiate. If specified, * the workflow will be instantiated only if the current version of * the workflow template has the supplied version. * This option cannot be used to instantiate a previous version of * workflow template. * </pre> * * <code>int32 version = 2;</code> */ public int getVersion() { return version_; } /** * * * <pre> * Optional. The version of workflow template to instantiate. If specified, * the workflow will be instantiated only if the current version of * the workflow template has the supplied version. * This option cannot be used to instantiate a previous version of * workflow template. * </pre> * * <code>int32 version = 2;</code> */ public Builder setVersion(int value) { version_ = value; onChanged(); return this; } /** * * * <pre> * Optional. The version of workflow template to instantiate. If specified, * the workflow will be instantiated only if the current version of * the workflow template has the supplied version. * This option cannot be used to instantiate a previous version of * workflow template. * </pre> * * <code>int32 version = 2;</code> */ public Builder clearVersion() { version_ = 0; onChanged(); return this; } private java.lang.Object instanceId_ = ""; /** * * * <pre> * Deprecated. Please use `request_id` field instead. * </pre> * * <code>string instance_id = 3 [deprecated = true];</code> */ @java.lang.Deprecated public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Deprecated. Please use `request_id` field instead. * </pre> * * <code>string instance_id = 3 [deprecated = true];</code> */ @java.lang.Deprecated public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Deprecated. Please use `request_id` field instead. * </pre> * * <code>string instance_id = 3 [deprecated = true];</code> */ @java.lang.Deprecated public Builder setInstanceId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } instanceId_ = value; onChanged(); return this; } /** * * * <pre> * Deprecated. Please use `request_id` field instead. * </pre> * * <code>string instance_id = 3 [deprecated = true];</code> */ @java.lang.Deprecated public Builder clearInstanceId() { instanceId_ = getDefaultInstance().getInstanceId(); onChanged(); return this; } /** * * * <pre> * Deprecated. Please use `request_id` field instead. * </pre> * * <code>string instance_id = 3 [deprecated = true];</code> */ @java.lang.Deprecated public Builder setInstanceIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); instanceId_ = value; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. A tag that prevents multiple concurrent workflow * instances with the same tag from running. This mitigates risk of * concurrent instances started due to retries. * It is recommended to always set this value to a * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). * The tag must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). The maximum length is 40 characters. * </pre> * * <code>string request_id = 5;</code> */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A tag that prevents multiple concurrent workflow * instances with the same tag from running. This mitigates risk of * concurrent instances started due to retries. * It is recommended to always set this value to a * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). * The tag must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). The maximum length is 40 characters. * </pre> * * <code>string request_id = 5;</code> */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A tag that prevents multiple concurrent workflow * instances with the same tag from running. This mitigates risk of * concurrent instances started due to retries. * It is recommended to always set this value to a * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). * The tag must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). The maximum length is 40 characters. * </pre> * * <code>string request_id = 5;</code> */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; onChanged(); return this; } /** * * * <pre> * Optional. A tag that prevents multiple concurrent workflow * instances with the same tag from running. This mitigates risk of * concurrent instances started due to retries. * It is recommended to always set this value to a * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). * The tag must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). The maximum length is 40 characters. * </pre> * * <code>string request_id = 5;</code> */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); onChanged(); return this; } /** * * * <pre> * Optional. A tag that prevents multiple concurrent workflow * instances with the same tag from running. This mitigates risk of * concurrent instances started due to retries. * It is recommended to always set this value to a * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). * The tag must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). The maximum length is 40 characters. * </pre> * * <code>string request_id = 5;</code> */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; onChanged(); return this; } private com.google.protobuf.MapField<java.lang.String, java.lang.String> parameters_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetParameters() { if (parameters_ == null) { return com.google.protobuf.MapField.emptyMapField( ParametersDefaultEntryHolder.defaultEntry); } return parameters_; } private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetMutableParameters() { onChanged(); ; if (parameters_ == null) { parameters_ = com.google.protobuf.MapField.newMapField(ParametersDefaultEntryHolder.defaultEntry); } if (!parameters_.isMutable()) { parameters_ = parameters_.copy(); } return parameters_; } public int getParametersCount() { return internalGetParameters().getMap().size(); } /** * * * <pre> * Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * </pre> * * <code>map&lt;string, string&gt; parameters = 4;</code> */ public boolean containsParameters(java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } return internalGetParameters().getMap().containsKey(key); } /** Use {@link #getParametersMap()} instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getParameters() { return getParametersMap(); } /** * * * <pre> * Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * </pre> * * <code>map&lt;string, string&gt; parameters = 4;</code> */ public java.util.Map<java.lang.String, java.lang.String> getParametersMap() { return internalGetParameters().getMap(); } /** * * * <pre> * Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * </pre> * * <code>map&lt;string, string&gt; parameters = 4;</code> */ public java.lang.String getParametersOrDefault( java.lang.String key, java.lang.String defaultValue) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetParameters().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * </pre> * * <code>map&lt;string, string&gt; parameters = 4;</code> */ public java.lang.String getParametersOrThrow(java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetParameters().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } public Builder clearParameters() { internalGetMutableParameters().getMutableMap().clear(); return this; } /** * * * <pre> * Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * </pre> * * <code>map&lt;string, string&gt; parameters = 4;</code> */ public Builder removeParameters(java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } internalGetMutableParameters().getMutableMap().remove(key); return this; } /** Use alternate mutation accessors instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getMutableParameters() { return internalGetMutableParameters().getMutableMap(); } /** * * * <pre> * Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * </pre> * * <code>map&lt;string, string&gt; parameters = 4;</code> */ public Builder putParameters(java.lang.String key, java.lang.String value) { if (key == null) { throw new java.lang.NullPointerException(); } if (value == null) { throw new java.lang.NullPointerException(); } internalGetMutableParameters().getMutableMap().put(key, value); return this; } /** * * * <pre> * Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * </pre> * * <code>map&lt;string, string&gt; parameters = 4;</code> */ public Builder putAllParameters(java.util.Map<java.lang.String, java.lang.String> values) { internalGetMutableParameters().getMutableMap().putAll(values); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest) private static final com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest(); } public static com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<InstantiateWorkflowTemplateRequest> PARSER = new com.google.protobuf.AbstractParser<InstantiateWorkflowTemplateRequest>() { @java.lang.Override public InstantiateWorkflowTemplateRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new InstantiateWorkflowTemplateRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<InstantiateWorkflowTemplateRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<InstantiateWorkflowTemplateRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.cep.operator; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeinfo.BasicTypeInfo; import org.apache.flink.api.java.functions.KeySelector; import org.apache.flink.cep.Event; import org.apache.flink.cep.SubEvent; import org.apache.flink.cep.nfa.NFA; import org.apache.flink.cep.nfa.compiler.NFACompiler; import org.apache.flink.cep.pattern.Pattern; import org.apache.flink.cep.pattern.conditions.SimpleCondition; import org.apache.flink.runtime.state.KeyGroupRangeAssignment; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.api.windowing.time.Time; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.runtime.tasks.OperatorStateHandles; import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness; import org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness; import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; import org.junit.Test; import java.util.List; import java.util.Map; import java.util.Queue; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class CEPRescalingTest { @Test public void testCEPFunctionScalingUp() throws Exception { int maxParallelism = 10; KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() { private static final long serialVersionUID = -4873366487571254798L; @Override public Integer getKey(Event value) throws Exception { return value.getId(); } }; // valid pattern events belong to different keygroups // that will be shipped to different tasks when changing parallelism. Event startEvent1 = new Event(7, "start", 1.0); SubEvent middleEvent1 = new SubEvent(7, "foo", 1.0, 10.0); Event endEvent1= new Event(7, "end", 1.0); int keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent1), maxParallelism); assertEquals(1, keygroup); assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup)); Event startEvent2 = new Event(10, "start", 1.0); // this will go to task index 2 SubEvent middleEvent2 = new SubEvent(10, "foo", 1.0, 10.0); Event endEvent2 = new Event(10, "end", 1.0); keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent2), maxParallelism); assertEquals(9, keygroup); assertEquals(1, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup)); // now we start the test, we go from parallelism 1 to 2. OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = null; OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness1 = null; OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness2 = null; try { harness = getTestHarness(maxParallelism, 1, 0); harness.open(); harness.processElement( new StreamRecord<>(startEvent1, 1)); // valid element harness.processElement(new StreamRecord<>(new Event(7, "foobar", 1.0), 2)); harness.processElement( new StreamRecord<>(startEvent2, 3)); // valid element harness.processElement( new StreamRecord<Event>(middleEvent2, 4)); // valid element // take a snapshot with some elements in internal sorting queue OperatorStateHandles snapshot = harness.snapshot(0, 0); harness.close(); // initialize two sub-tasks with the previously snapshotted state to simulate scaling up // we know that the valid element will go to index 0, // so we initialize the two tasks and we put the rest of // the valid elements for the pattern on task 0. harness1 = getTestHarness(maxParallelism, 2, 0); harness1.setup(); harness1.initializeState(snapshot); harness1.open(); // if element timestamps are not correctly checkpointed/restored this will lead to // a pruning time underflow exception in NFA harness1.processWatermark(new Watermark(2)); harness1.processElement( new StreamRecord<Event>(middleEvent1, 3)); // valid element harness1.processElement( new StreamRecord<>(endEvent1, 5)); // valid element harness1.processWatermark(new Watermark(Long.MAX_VALUE)); // watermarks and the result assertEquals(3, harness1.getOutput().size()); verifyWatermark(harness1.getOutput().poll(), 2); verifyPattern(harness1.getOutput().poll(), startEvent1, middleEvent1, endEvent1); harness2 = getTestHarness(maxParallelism, 2, 1); harness2.setup(); harness2.initializeState(snapshot); harness2.open(); // now we move to the second parallel task harness2.processWatermark(new Watermark(2)); harness2.processElement(new StreamRecord<>(endEvent2, 5)); harness2.processElement(new StreamRecord<>(new Event(42, "start", 1.0), 4)); harness2.processWatermark(new Watermark(Long.MAX_VALUE)); assertEquals(3, harness2.getOutput().size()); verifyWatermark(harness2.getOutput().poll(), 2); verifyPattern(harness2.getOutput().poll(), startEvent2, middleEvent2, endEvent2); } finally { closeSilently(harness); closeSilently(harness1); closeSilently(harness2); } } private static void closeSilently(OneInputStreamOperatorTestHarness<?, ?> harness) { if (harness != null) { try { harness.close(); } catch (Throwable ignored) { } } } @Test public void testCEPFunctionScalingDown() throws Exception { int maxParallelism = 10; KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() { private static final long serialVersionUID = -4873366487571254798L; @Override public Integer getKey(Event value) throws Exception { return value.getId(); } }; // create some valid pattern events on predetermined key groups and task indices Event startEvent1 = new Event(7, "start", 1.0); // this will go to task index 0 SubEvent middleEvent1 = new SubEvent(7, "foo", 1.0, 10.0); Event endEvent1 = new Event(7, "end", 1.0); // verification of the key choice int keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent1), maxParallelism); assertEquals(1, keygroup); assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 3, keygroup)); assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup)); Event startEvent2 = new Event(45, "start", 1.0); // this will go to task index 1 SubEvent middleEvent2 = new SubEvent(45, "foo", 1.0, 10.0); Event endEvent2 = new Event(45, "end", 1.0); keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent2), maxParallelism); assertEquals(6, keygroup); assertEquals(1, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 3, keygroup)); assertEquals(1, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup)); Event startEvent3 = new Event(90, "start", 1.0); // this will go to task index 0 SubEvent middleEvent3 = new SubEvent(90, "foo", 1.0, 10.0); Event endEvent3 = new Event(90, "end", 1.0); keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent3), maxParallelism); assertEquals(2, keygroup); assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 3, keygroup)); assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup)); Event startEvent4 = new Event(10, "start", 1.0); // this will go to task index 2 SubEvent middleEvent4 = new SubEvent(10, "foo", 1.0, 10.0); Event endEvent4 = new Event(10, "end", 1.0); keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent4), maxParallelism); assertEquals(9, keygroup); assertEquals(2, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 3, keygroup)); assertEquals(1, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup)); // starting the test, we will go from parallelism of 3 to parallelism of 2 OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness1 = getTestHarness(maxParallelism, 3, 0); harness1.open(); OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness2 = getTestHarness(maxParallelism, 3, 1); harness2.open(); OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness3 = getTestHarness(maxParallelism, 3, 2); harness3.open(); OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness4 = null; OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness5 = null; try { harness1.processWatermark(Long.MIN_VALUE); harness2.processWatermark(Long.MIN_VALUE); harness3.processWatermark(Long.MIN_VALUE); harness1.processElement( new StreamRecord<>(startEvent1, 1)); // valid element harness1.processElement(new StreamRecord<>(new Event(7, "foobar", 1.0), 2)); harness1.processElement( new StreamRecord<Event>(middleEvent1, 3)); // valid element harness1.processElement( new StreamRecord<>(endEvent1, 5)); // valid element // till here we have a valid sequence, so after creating the // new instance and sending it a watermark, we expect it to fire, // even with no new elements. harness1.processElement(new StreamRecord<>(startEvent3, 10)); harness1.processElement(new StreamRecord<>(startEvent1, 10)); harness2.processElement(new StreamRecord<>(startEvent2, 7)); harness2.processElement(new StreamRecord<Event>(middleEvent2, 8)); harness3.processElement(new StreamRecord<>(startEvent4, 15)); harness3.processElement(new StreamRecord<Event>(middleEvent4, 16)); harness3.processElement(new StreamRecord<>(endEvent4, 17)); // so far we only have the initial watermark assertEquals(1, harness1.getOutput().size()); verifyWatermark(harness1.getOutput().poll(), Long.MIN_VALUE); assertEquals(1, harness2.getOutput().size()); verifyWatermark(harness2.getOutput().poll(), Long.MIN_VALUE); assertEquals(1, harness3.getOutput().size()); verifyWatermark(harness3.getOutput().poll(), Long.MIN_VALUE); // we take a snapshot and make it look as a single operator // this will be the initial state of all downstream tasks. OperatorStateHandles snapshot = AbstractStreamOperatorTestHarness.repackageState( harness2.snapshot(0, 0), harness1.snapshot(0, 0), harness3.snapshot(0, 0) ); harness4 = getTestHarness(maxParallelism, 2, 0); harness4.setup(); harness4.initializeState(snapshot); harness4.open(); harness5 = getTestHarness(maxParallelism, 2, 1); harness5.setup(); harness5.initializeState(snapshot); harness5.open(); harness5.processElement(new StreamRecord<>(endEvent2, 11)); harness5.processWatermark(new Watermark(12)); verifyPattern(harness5.getOutput().poll(), startEvent2, middleEvent2, endEvent2); verifyWatermark(harness5.getOutput().poll(), 12); // if element timestamps are not correctly checkpointed/restored this will lead to // a pruning time underflow exception in NFA harness4.processWatermark(new Watermark(12)); assertEquals(2, harness4.getOutput().size()); verifyPattern(harness4.getOutput().poll(), startEvent1, middleEvent1, endEvent1); verifyWatermark(harness4.getOutput().poll(), 12); harness4.processElement( new StreamRecord<Event>(middleEvent3, 15)); // valid element harness4.processElement( new StreamRecord<>(endEvent3, 16)); // valid element harness4.processElement( new StreamRecord<Event>(middleEvent1, 15)); // valid element harness4.processElement( new StreamRecord<>(endEvent1, 16)); // valid element harness4.processWatermark(new Watermark(Long.MAX_VALUE)); harness5.processWatermark(new Watermark(Long.MAX_VALUE)); // verify result assertEquals(3, harness4.getOutput().size()); // check the order of the events in the output Queue<Object> output = harness4.getOutput(); StreamRecord<?> resultRecord = (StreamRecord<?>) output.peek(); assertTrue(resultRecord.getValue() instanceof Map); @SuppressWarnings("unchecked") Map<String, List<Event>> patternMap = (Map<String, List<Event>>) resultRecord.getValue(); if (patternMap.get("start").get(0).getId() == 7) { verifyPattern(harness4.getOutput().poll(), startEvent1, middleEvent1, endEvent1); verifyPattern(harness4.getOutput().poll(), startEvent3, middleEvent3, endEvent3); } else { verifyPattern(harness4.getOutput().poll(), startEvent3, middleEvent3, endEvent3); verifyPattern(harness4.getOutput().poll(), startEvent1, middleEvent1, endEvent1); } // after scaling down this should end up here assertEquals(2, harness5.getOutput().size()); verifyPattern(harness5.getOutput().poll(), startEvent4, middleEvent4, endEvent4); } finally { closeSilently(harness1); closeSilently(harness2); closeSilently(harness3); closeSilently(harness4); closeSilently(harness5); } } private void verifyWatermark(Object outputObject, long timestamp) { assertTrue(outputObject instanceof Watermark); assertEquals(timestamp, ((Watermark) outputObject).getTimestamp()); } private void verifyPattern(Object outputObject, Event start, SubEvent middle, Event end) { assertTrue(outputObject instanceof StreamRecord); StreamRecord<?> resultRecord = (StreamRecord<?>) outputObject; assertTrue(resultRecord.getValue() instanceof Map); @SuppressWarnings("unchecked") Map<String, List<Event>> patternMap = (Map<String, List<Event>>) resultRecord.getValue(); assertEquals(start, patternMap.get("start").get(0)); assertEquals(middle, patternMap.get("middle").get(0)); assertEquals(end, patternMap.get("end").get(0)); } private KeyedOneInputStreamOperatorTestHarness<Integer, Event, Map<String, List<Event>>> getTestHarness( int maxParallelism, int taskParallelism, int subtaskIdx) throws Exception { KeySelector<Event, Integer> keySelector = new TestKeySelector(); return new KeyedOneInputStreamOperatorTestHarness<>( new KeyedCEPPatternOperator<>( Event.createTypeSerializer(), false, BasicTypeInfo.INT_TYPE_INFO.createSerializer(new ExecutionConfig()), new NFAFactory(), true), keySelector, BasicTypeInfo.INT_TYPE_INFO, maxParallelism, taskParallelism, subtaskIdx); } private static class NFAFactory implements NFACompiler.NFAFactory<Event> { private static final long serialVersionUID = 1173020762472766713L; private final boolean handleTimeout; private NFAFactory() { this(false); } private NFAFactory(boolean handleTimeout) { this.handleTimeout = handleTimeout; } @Override public NFA<Event> createNFA() { Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new SimpleCondition<Event>() { private static final long serialVersionUID = 5726188262756267490L; @Override public boolean filter(Event value) throws Exception { return value.getName().equals("start"); } }) .followedBy("middle").subtype(SubEvent.class).where(new SimpleCondition<SubEvent>() { private static final long serialVersionUID = 6215754202506583964L; @Override public boolean filter(SubEvent value) throws Exception { return value.getVolume() > 5.0; } }) .followedBy("end").where(new SimpleCondition<Event>() { private static final long serialVersionUID = 7056763917392056548L; @Override public boolean filter(Event value) throws Exception { return value.getName().equals("end"); } }) // add a window timeout to test whether timestamps of elements in the // priority queue in CEP operator are correctly checkpointed/restored .within(Time.milliseconds(10L)); return NFACompiler.compile(pattern, Event.createTypeSerializer(), handleTimeout); } } /** * A simple {@link KeySelector} that returns as key the id of the {@link Event} * provided as argument in the {@link #getKey(Event)}. * */ private static class TestKeySelector implements KeySelector<Event, Integer> { private static final long serialVersionUID = -4873366487571254798L; @Override public Integer getKey(Event value) throws Exception { return value.getId(); } } }
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0, (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tle.web.api.item.resource.impl; import com.dytech.edge.common.FileInfo; import com.google.common.base.Strings; import com.google.common.base.Throwables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.ByteStreams; import com.google.common.io.Closeables; import com.tle.common.Check; import com.tle.common.PathUtils; import com.tle.common.filesystem.FileEntry; import com.tle.common.filesystem.handle.FileHandle; import com.tle.common.filesystem.handle.StagingFile; import com.tle.common.usermanagement.user.CurrentUser; import com.tle.core.filesystem.staging.service.StagingService; import com.tle.core.guice.Bind; import com.tle.core.mimetypes.MimeTypeService; import com.tle.core.services.FileSystemService; import com.tle.exceptions.AccessDeniedException; import com.tle.web.api.interfaces.beans.BlobBean; import com.tle.web.api.staging.interfaces.StagingResource; import com.tle.web.api.staging.interfaces.beans.MultipartBean; import com.tle.web.api.staging.interfaces.beans.MultipartCompleteBean; import com.tle.web.api.staging.interfaces.beans.PartBean; import com.tle.web.api.staging.interfaces.beans.StagingBean; import com.tle.web.remoting.rest.service.UrlLinkService; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.UUID; import javax.inject.Inject; import javax.inject.Singleton; import javax.ws.rs.BadRequestException; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.StreamingOutput; import javax.ws.rs.core.UriInfo; import org.apache.log4j.Logger; import org.jboss.resteasy.util.DateUtil; @SuppressWarnings("nls") @Bind(StagingResource.class) @Singleton public class StagingResourceImpl implements StagingResource { private static final Logger LOGGER = Logger.getLogger(StagingResourceImpl.class); @Inject private MimeTypeService mimeService; @Inject private StagingService stagingService; @Inject private FileSystemService fileSystemService; @Inject private UrlLinkService urlLinkService; @Override public Response createStaging() { checkPermissions(); final StagingFile stagingFile = stagingService.createStagingArea(); // Need compatibility with EPS endpoint :( return Response.created(stagingUri(stagingFile.getUuid())) .header("x-eps-stagingid", stagingFile.getUuid()) .build(); } @Override public StagingBean getStaging(UriInfo uriInfo, String stagingUuid) { checkPermissions(); StagingFile stagingFile = getStagingFile(stagingUuid); try { FileEntry base = fileSystemService.enumerateTree(stagingFile, null, null); List<BlobBean> blobs = Lists.newArrayList(); for (FileEntry fileEntry : base.getFiles()) { buildBlobBeans(stagingFile, stagingUuid, blobs, fileEntry, ""); } Collections.sort( blobs, new Comparator<BlobBean>() { @Override public int compare(BlobBean o1, BlobBean o2) { return o1.getName().compareToIgnoreCase(o2.getName()); } }); URI directUrl = stagingUri(stagingUuid); StagingBean stagingBean = new StagingBean(); stagingBean.setFiles(blobs); stagingBean.setUuid(stagingUuid); stagingBean.setDirectUrl(directUrl.toString()); Map<String, URI> links = Maps.newLinkedHashMap(); links.put("self", directUrl); stagingBean.set("links", links); return stagingBean; } catch (IOException e) { throw new WebApplicationException(Status.NOT_FOUND); } } private void buildBlobBeans( FileHandle fileHandle, String stagingUuid, List<BlobBean> blobs, FileEntry entry, String currentPath) { // Folders are not listed if (entry.isFolder()) { for (FileEntry subEntry : entry.getFiles()) { buildBlobBeans( fileHandle, stagingUuid, blobs, subEntry, PathUtils.filePath(currentPath, entry.getName())); } } else { final BlobBean blobBean = new BlobBean(); final String filename = entry.getName(); final String filePath = PathUtils.filePath(currentPath, filename); try { String md5CheckSum = fileSystemService.getMD5Checksum(fileHandle, filePath); blobBean.setEtag("\"" + md5CheckSum + "\""); } catch (IOException e) { // Whatever } blobBean.setName(filePath); blobBean.setSize(entry.getLength()); blobBean.setContentType(mimeService.getMimeTypeForFilename(filename)); final Map<String, URI> links = new HashMap<>(); links.put( "self", urlLinkService .getMethodUriBuilder(StagingResource.class, "getFile") .build(stagingUuid, filePath)); blobBean.set("links", links); blobs.add(blobBean); } } @Override public Response headFile(String uuid, String filepath) { checkPermissions(); try { ensureFileExists(getStagingFile(uuid), filepath); FileInfo fileInfo = fileSystemService.getFileInfo(new StagingFile(uuid), filepath); if (fileInfo == null) { return Response.status(Status.NOT_FOUND).build(); } return makeResponseHeaders(uuid, filepath).build(); } catch (IOException io) { LOGGER.error("Error getting HEAD for file", io); return Response.serverError().build(); } } @Override public Response getFile(HttpHeaders headers, String uuid, String filepath) { checkPermissions(); final StagingFile stagingFile = getStagingFile(uuid); ensureFileExists(stagingFile, filepath); try { final String etag = headers.getHeaderString(HttpHeaders.IF_NONE_MATCH); if (etag != null) { String md5Checksum = fileSystemService.getMD5Checksum(stagingFile, filepath); String quotedChecksum = "\"" + md5Checksum + "\""; if (Objects.equals(etag, quotedChecksum)) { return Response.notModified(quotedChecksum).build(); } } final String modifiedSince = headers.getHeaderString(HttpHeaders.IF_MODIFIED_SINCE); if (modifiedSince != null) { final Date lastModified = new Date(fileSystemService.lastModified(stagingFile, filepath)); if (Objects.equals(modifiedSince, DateUtil.formatDate(lastModified))) { return Response.notModified().build(); } } final InputStream input = fileSystemService.read(stagingFile, filepath); final ResponseBuilder responseBuilder = makeResponseHeaders(uuid, filepath); return responseBuilder .entity( new StreamingOutput() { @Override public void write(OutputStream output) throws IOException, WebApplicationException { try { ByteStreams.copy(input, output); } finally { Closeables.close(input, false); } } }) .build(); } catch (IOException e) { throw Throwables.propagate(e); } } @Override public Response deleteFile(String stagingUuid, String filepath, String uploadId) throws IOException { checkPermissions(); final StagingFile stagingFile = getStagingFile(stagingUuid); ensureFileExists(stagingFile, filepath); boolean removed = fileSystemService.removeFile(stagingFile, filepath); if (!removed) { throw new WebApplicationException(Status.INTERNAL_SERVER_ERROR); } return Response.status(Status.NO_CONTENT).build(); } @Override public Response deleteStaging(String uuid) throws IOException { checkPermissions(); StagingFile stagingFile = getStagingFile(uuid); stagingService.removeStagingArea(stagingFile, true); return Response.status(Status.NO_CONTENT).build(); } @Override public Response completeMultipart( String uuid, String filepath, String uploadId, MultipartCompleteBean completion) throws IOException { checkPermissions(); StagingFile stagingFile = getStagingFile(uuid); List<PartBean> parts = completion.getParts(); int[] partNumbers = new int[parts.size()]; String[] etags = new String[parts.size()]; int i = 0; for (PartBean partBean : parts) { partNumbers[i] = partBean.getPartNumber(); etags[i++] = partBean.getEtag(); } String folderPath = "multipart/" + uploadId; if (!fileSystemService.fileExists(stagingFile, folderPath)) { throw new BadRequestException("Multipart upload doesn't exist: " + uploadId); } File folder = fileSystemService.getExternalFile(stagingFile, folderPath); for (int partNumber : partNumbers) { fileSystemService.write( stagingFile, filepath, fileSystemService.read(stagingFile, folder + "/" + Integer.toString(partNumber)), true); } fileSystemService.removeFile(stagingFile, folderPath); ResponseBuilder resp = Response.ok(); return resp.build(); } @Override public MultipartBean startMultipart(String uuid, String filepath, Boolean uploads) { checkPermissions(); if (uploads == null) { throw new BadRequestException("Must use PUT for uploading files"); } StagingFile stagingFile = getStagingFile(uuid); String uploadId = UUID.randomUUID().toString(); String folderPath = "multipart/" + uploadId; ensureMultipartDir(stagingFile); try { fileSystemService.mkdir(stagingFile, folderPath); return new MultipartBean(uploadId); } catch (Exception e) { throw new WebApplicationException(Status.INTERNAL_SERVER_ERROR); } } private void ensureMultipartDir(StagingFile handle) { try { if (!fileSystemService.fileExists(handle, "multipart")) { fileSystemService.mkdir(handle, "multipart"); } } catch (Exception e) { throw new WebApplicationException(Status.INTERNAL_SERVER_ERROR); } } @Override public Response putFile( String uuid, String filepath, InputStream data, String unzipTo, String copySource, int partNumber, String uploadId, long size, String contentType) throws IOException { checkPermissions(); final StagingFile stagingFile = getStagingFile(uuid); if (fileSystemService.fileExists(stagingFile, filepath)) { throw new WebApplicationException(Status.BAD_REQUEST); } if (!Strings.isNullOrEmpty(copySource)) { fileSystemService.copy(stagingFile, copySource, stagingFile, filepath); String md5 = fileSystemService.getMD5Checksum(stagingFile, filepath); return Response.ok() .header(HttpHeaders.ETAG, "\"" + md5 + "\"") .location(stagingUri(uuid, filepath)) .build(); } try (InputStream bd = data) { checkValidContentType(contentType); FileInfo info = fileSystemService.write(stagingFile, filepath, bd, false, true); if (!Check.isEmpty(unzipTo)) { fileSystemService.mkdir(stagingFile, unzipTo); info = fileSystemService.unzipFile(stagingFile, filepath, unzipTo); } return Response.ok() .header(HttpHeaders.ETAG, "\"" + info.getMd5CheckSum() + "\"") .location(stagingUri(uuid, filepath)) .build(); } catch (IOException e) { throw Throwables.propagate(e); } } private void ensureFileExists(StagingFile staging, String filepath) { if (!fileSystemService.fileExists(staging, filepath)) { throw new WebApplicationException(Status.NOT_FOUND); } } private void checkValidContentType(String contentType) { if (contentType != null && contentType.startsWith("multipart/form-data")) { throw new BadRequestException( "Don't use multipart encoding to upload files, upload the file directly"); } } private StagingFile getStagingFile(String stagingUuid) { ensureStagingExists(stagingUuid); return new StagingFile(stagingUuid); } private ResponseBuilder makeResponseHeaders(String uuid, String filepath) throws IOException { ResponseBuilder builder = Response.ok(); StagingFile handle = new StagingFile(uuid); FileInfo fileInfo = fileSystemService.getFileInfo(handle, filepath); builder.lastModified(new Date(fileSystemService.lastModified(handle, filepath))); builder.header(HttpHeaders.CONTENT_LENGTH, fileInfo.getLength()); builder.header( HttpHeaders.CONTENT_TYPE, mimeService.getMimeTypeForFilename(fileInfo.getFilename())); builder.header( HttpHeaders.ETAG, "\"" + fileSystemService.getMD5Checksum(handle, filepath) + "\""); return builder; } private void ensureStagingExists(String stagingUuid) { if (!stagingService.stagingExists(stagingUuid) || !fileSystemService.fileExists(new StagingFile(stagingUuid), null)) { throw new WebApplicationException(Status.NOT_FOUND); } } private URI stagingUri(String stagingUuid) { return urlLinkService .getMethodUriBuilder(StagingResource.class, "getStaging") .build(stagingUuid); } private URI stagingUri(String stagingUuid, String filepath) { return urlLinkService .getMethodUriBuilder(StagingResource.class, "getFile") .build(stagingUuid, filepath); } private void checkPermissions() { if (CurrentUser.isGuest()) { throw new AccessDeniedException("You need to be logged in to use a staging area."); } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v9.resources; import com.google.api.pathtemplate.PathTemplate; import com.google.api.resourcenames.ResourceName; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. @Generated("by gapic-generator-java") public class SmartCampaignSearchTermViewName implements ResourceName { private static final PathTemplate CUSTOMER_ID_CAMPAIGN_ID_QUERY = PathTemplate.createWithoutUrlEncoding( "customers/{customer_id}/smartCampaignSearchTermViews/{campaign_id}~{query}"); private volatile Map<String, String> fieldValuesMap; private final String customerId; private final String campaignId; private final String query; @Deprecated protected SmartCampaignSearchTermViewName() { customerId = null; campaignId = null; query = null; } private SmartCampaignSearchTermViewName(Builder builder) { customerId = Preconditions.checkNotNull(builder.getCustomerId()); campaignId = Preconditions.checkNotNull(builder.getCampaignId()); query = Preconditions.checkNotNull(builder.getQuery()); } public String getCustomerId() { return customerId; } public String getCampaignId() { return campaignId; } public String getQuery() { return query; } public static Builder newBuilder() { return new Builder(); } public Builder toBuilder() { return new Builder(this); } public static SmartCampaignSearchTermViewName of( String customerId, String campaignId, String query) { return newBuilder().setCustomerId(customerId).setCampaignId(campaignId).setQuery(query).build(); } public static String format(String customerId, String campaignId, String query) { return newBuilder() .setCustomerId(customerId) .setCampaignId(campaignId) .setQuery(query) .build() .toString(); } public static SmartCampaignSearchTermViewName parse(String formattedString) { if (formattedString.isEmpty()) { return null; } Map<String, String> matchMap = CUSTOMER_ID_CAMPAIGN_ID_QUERY.validatedMatch( formattedString, "SmartCampaignSearchTermViewName.parse: formattedString not in valid format"); return of(matchMap.get("customer_id"), matchMap.get("campaign_id"), matchMap.get("query")); } public static List<SmartCampaignSearchTermViewName> parseList(List<String> formattedStrings) { List<SmartCampaignSearchTermViewName> list = new ArrayList<>(formattedStrings.size()); for (String formattedString : formattedStrings) { list.add(parse(formattedString)); } return list; } public static List<String> toStringList(List<SmartCampaignSearchTermViewName> values) { List<String> list = new ArrayList<>(values.size()); for (SmartCampaignSearchTermViewName value : values) { if (value == null) { list.add(""); } else { list.add(value.toString()); } } return list; } public static boolean isParsableFrom(String formattedString) { return CUSTOMER_ID_CAMPAIGN_ID_QUERY.matches(formattedString); } @Override public Map<String, String> getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder(); if (customerId != null) { fieldMapBuilder.put("customer_id", customerId); } if (campaignId != null) { fieldMapBuilder.put("campaign_id", campaignId); } if (query != null) { fieldMapBuilder.put("query", query); } fieldValuesMap = fieldMapBuilder.build(); } } } return fieldValuesMap; } public String getFieldValue(String fieldName) { return getFieldValuesMap().get(fieldName); } @Override public String toString() { return CUSTOMER_ID_CAMPAIGN_ID_QUERY.instantiate( "customer_id", customerId, "campaign_id", campaignId, "query", query); } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o != null || getClass() == o.getClass()) { SmartCampaignSearchTermViewName that = ((SmartCampaignSearchTermViewName) o); return Objects.equals(this.customerId, that.customerId) && Objects.equals(this.campaignId, that.campaignId) && Objects.equals(this.query, that.query); } return false; } @Override public int hashCode() { int h = 1; h *= 1000003; h ^= Objects.hashCode(customerId); h *= 1000003; h ^= Objects.hashCode(campaignId); h *= 1000003; h ^= Objects.hashCode(query); return h; } /** Builder for customers/{customer_id}/smartCampaignSearchTermViews/{campaign_id}~{query}. */ public static class Builder { private String customerId; private String campaignId; private String query; protected Builder() {} public String getCustomerId() { return customerId; } public String getCampaignId() { return campaignId; } public String getQuery() { return query; } public Builder setCustomerId(String customerId) { this.customerId = customerId; return this; } public Builder setCampaignId(String campaignId) { this.campaignId = campaignId; return this; } public Builder setQuery(String query) { this.query = query; return this; } private Builder(SmartCampaignSearchTermViewName smartCampaignSearchTermViewName) { this.customerId = smartCampaignSearchTermViewName.customerId; this.campaignId = smartCampaignSearchTermViewName.campaignId; this.query = smartCampaignSearchTermViewName.query; } public SmartCampaignSearchTermViewName build() { return new SmartCampaignSearchTermViewName(this); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore; import java.io.BufferedWriter; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.charset.StandardCharsets; import java.util.AbstractList; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.api.DataOperationType; import org.apache.hadoop.hive.metastore.api.LockRequest; import org.apache.hadoop.hive.metastore.api.LockResponse; import org.apache.hadoop.hive.metastore.api.LockState; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.MetastoreException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.utils.FileUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; import org.apache.hadoop.hive.metastore.utils.ObjectPair; import org.apache.hadoop.hive.metastore.utils.RetryUtilities; import org.apache.hadoop.security.UserGroupInformation; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; /** * Msck repairs table metadata specifically related to partition information to be in-sync with directories in table * location. */ public class Msck { public static final Logger LOG = LoggerFactory.getLogger(Msck.class); public static final int separator = 9; // tabCode private static final int terminator = 10; // newLineCode private boolean acquireLock; private boolean deleteData; private Configuration conf; private IMetaStoreClient msc; public Msck(boolean acquireLock, boolean deleteData) { this.acquireLock = acquireLock; this.deleteData = deleteData; } public Configuration getConf() { return conf; } public void setConf(final Configuration conf) { this.conf = conf; } public void init(Configuration conf) throws MetaException { if (msc == null) { // the only reason we are using new conf here is to override EXPRESSION_PROXY_CLASS Configuration metastoreConf = MetastoreConf.newMetastoreConf(new Configuration(conf)); metastoreConf.set(MetastoreConf.ConfVars.EXPRESSION_PROXY_CLASS.getVarname(), MsckPartitionExpressionProxy.class.getCanonicalName()); setConf(metastoreConf); this.msc = new HiveMetaStoreClient(metastoreConf); } } /** * MetastoreCheck, see if the data in the metastore matches what is on the * dfs. Current version checks for tables and partitions that are either * missing on disk on in the metastore. * * @param msckInfo Information about the tables and partitions we want to check for. * @return Returns 0 when execution succeeds and above 0 if it fails. */ public int repair(MsckInfo msckInfo) { CheckResult result = new CheckResult(); List<String> repairOutput = new ArrayList<>(); String qualifiedTableName = null; boolean success = false; long txnId = -1; int ret = 0; try { Table table = getMsc().getTable(msckInfo.getCatalogName(), msckInfo.getDbName(), msckInfo.getTableName()); qualifiedTableName = Warehouse.getCatalogQualifiedTableName(table); if (getConf().getBoolean(MetastoreConf.ConfVars.MSCK_REPAIR_ENABLE_PARTITION_RETENTION.getHiveName(), false)) { msckInfo.setPartitionExpirySeconds(PartitionManagementTask.getRetentionPeriodInSeconds(table)); LOG.info("{} - Retention period ({}s) for partition is enabled for MSCK REPAIR..", qualifiedTableName, msckInfo.getPartitionExpirySeconds()); } HiveMetaStoreChecker checker = new HiveMetaStoreChecker(getMsc(), getConf(), msckInfo.getPartitionExpirySeconds()); // checkMetastore call will fill in result with partitions that are present in filesystem // and missing in metastore - accessed through getPartitionsNotInMs // And partitions that are not present in filesystem and metadata exists in metastore - // accessed through getPartitionNotOnFS checker.checkMetastore(msckInfo.getCatalogName(), msckInfo.getDbName(), msckInfo.getTableName(), msckInfo.getPartSpecs(), result); Set<CheckResult.PartitionResult> partsNotInMs = result.getPartitionsNotInMs(); Set<CheckResult.PartitionResult> partsNotInFs = result.getPartitionsNotOnFs(); Set<CheckResult.PartitionResult> expiredPartitions = result.getExpiredPartitions(); int totalPartsToFix = partsNotInMs.size() + partsNotInFs.size() + expiredPartitions.size(); // if nothing changed to partitions and if we are not repairing (add or drop) don't acquire for lock unnecessarily boolean lockRequired = totalPartsToFix > 0 && msckInfo.isRepairPartitions() && (msckInfo.isAddPartitions() || msckInfo.isDropPartitions()); LOG.info("{} - #partsNotInMs: {} #partsNotInFs: {} #expiredPartitions: {} lockRequired: {} (R: {} A: {} D: {})", qualifiedTableName, partsNotInMs.size(), partsNotInFs.size(), expiredPartitions.size(), lockRequired, msckInfo.isRepairPartitions(), msckInfo.isAddPartitions(), msckInfo.isDropPartitions()); if (msckInfo.isRepairPartitions()) { // Repair metadata in HMS long lockId; if (acquireLock && lockRequired && table.getParameters() != null && MetaStoreServerUtils.isTransactionalTable(table.getParameters())) { // Running MSCK from beeline/cli will make DDL task acquire X lock when repair is enabled, since we are directly // invoking msck.repair() without SQL statement, we need to do the same and acquire X lock (repair is default) LockRequest lockRequest = createLockRequest(msckInfo.getDbName(), msckInfo.getTableName()); txnId = lockRequest.getTxnid(); try { LockResponse res = getMsc().lock(lockRequest); if (res.getState() != LockState.ACQUIRED) { throw new MetastoreException("Unable to acquire lock(X) on " + qualifiedTableName); } lockId = res.getLockid(); } catch (TException e) { throw new MetastoreException("Unable to acquire lock(X) on " + qualifiedTableName, e); } LOG.info("Acquired lock(X) on {}. LockId: {}", qualifiedTableName, lockId); } int maxRetries = MetastoreConf.getIntVar(getConf(), MetastoreConf.ConfVars.MSCK_REPAIR_BATCH_MAX_RETRIES); int decayingFactor = 2; if (msckInfo.isAddPartitions() && !partsNotInMs.isEmpty()) { // MSCK called to add missing paritions into metastore and there are // missing partitions. int batchSize = MetastoreConf.getIntVar(getConf(), MetastoreConf.ConfVars.MSCK_REPAIR_BATCH_SIZE); if (batchSize == 0) { //batching is not enabled. Try to add all the partitions in one call batchSize = partsNotInMs.size(); } AbstractList<String> vals = null; String settingStr = MetastoreConf.getVar(getConf(), MetastoreConf.ConfVars.MSCK_PATH_VALIDATION); boolean doValidate = !("ignore".equals(settingStr)); boolean doSkip = doValidate && "skip".equals(settingStr); // The default setting is "throw"; assume doValidate && !doSkip means throw. if (doValidate) { // Validate that we can add partition without escaping. Escaping was originally intended // to avoid creating invalid HDFS paths; however, if we escape the HDFS path (that we // deem invalid but HDFS actually supports - it is possible to create HDFS paths with // unprintable characters like ASCII 7), metastore will create another directory instead // of the one we are trying to "repair" here. Iterator<CheckResult.PartitionResult> iter = partsNotInMs.iterator(); while (iter.hasNext()) { CheckResult.PartitionResult part = iter.next(); try { vals = Warehouse.makeValsFromName(part.getPartitionName(), vals); } catch (MetaException ex) { throw new MetastoreException(ex); } for (String val : vals) { String escapedPath = FileUtils.escapePathName(val); assert escapedPath != null; if (escapedPath.equals(val)) { continue; } String errorMsg = "Repair: Cannot add partition " + msckInfo.getTableName() + ':' + part.getPartitionName() + " due to invalid characters in the name"; if (doSkip) { repairOutput.add(errorMsg); iter.remove(); } else { throw new MetastoreException(errorMsg); } } } } try { createPartitionsInBatches(getMsc(), repairOutput, partsNotInMs, table, batchSize, decayingFactor, maxRetries); } catch (Exception e) { throw new MetastoreException(e); } } if (msckInfo.isDropPartitions() && (!partsNotInFs.isEmpty() || !expiredPartitions.isEmpty())) { // MSCK called to drop stale paritions from metastore and there are // stale partitions. int batchSize = MetastoreConf.getIntVar(getConf(), MetastoreConf.ConfVars.MSCK_REPAIR_BATCH_SIZE); if (batchSize == 0) { //batching is not enabled. Try to drop all the partitions in one call batchSize = partsNotInFs.size() + expiredPartitions.size(); } try { dropPartitionsInBatches(getMsc(), repairOutput, partsNotInFs, expiredPartitions, table, batchSize, decayingFactor, maxRetries); } catch (Exception e) { throw new MetastoreException(e); } } } success = true; } catch (Exception e) { LOG.warn("Failed to run metacheck: ", e); success = false; ret = 1; } finally { if (msckInfo.getResFile() != null) { BufferedWriter resultOut = null; try { Path resFile = new Path(msckInfo.getResFile()); FileSystem fs = resFile.getFileSystem(getConf()); resultOut = new BufferedWriter(new OutputStreamWriter(fs.create(resFile))); boolean firstWritten = false; firstWritten |= writeMsckResult(result.getTablesNotInMs(), "Tables not in metastore:", resultOut, firstWritten); firstWritten |= writeMsckResult(result.getTablesNotOnFs(), "Tables missing on filesystem:", resultOut, firstWritten); firstWritten |= writeMsckResult(result.getPartitionsNotInMs(), "Partitions not in metastore:", resultOut, firstWritten); firstWritten |= writeMsckResult(result.getPartitionsNotOnFs(), "Partitions missing from filesystem:", resultOut, firstWritten); firstWritten |= writeMsckResult(result.getExpiredPartitions(), "Expired partitions (retention period: " + msckInfo.getPartitionExpirySeconds() + "s) :", resultOut, firstWritten); // sorting to stabilize qfile output (msck_repair_drop.q) Collections.sort(repairOutput); for (String rout : repairOutput) { if (firstWritten) { resultOut.write(terminator); } else { firstWritten = true; } resultOut.write(rout); } } catch (IOException e) { LOG.warn("Failed to save metacheck output: ", e); ret = 1; } finally { if (resultOut != null) { try { resultOut.close(); } catch (IOException e) { LOG.warn("Failed to close output file: ", e); ret = 1; } } } } LOG.info("Tables not in metastore: {}", result.getTablesNotInMs()); LOG.info("Tables missing on filesystem: {}", result.getTablesNotOnFs()); LOG.info("Partitions not in metastore: {}", result.getPartitionsNotInMs()); LOG.info("Partitions missing from filesystem: {}", result.getPartitionsNotOnFs()); LOG.info("Expired partitions: {}", result.getExpiredPartitions()); if (acquireLock && txnId > 0) { if (success) { try { LOG.info("txnId: {} succeeded. Committing..", txnId); getMsc().commitTxn(txnId); } catch (Exception e) { LOG.warn("Error while committing txnId: {} for table: {}", txnId, qualifiedTableName, e); ret = 1; } } else { try { LOG.info("txnId: {} failed. Aborting..", txnId); getMsc().abortTxns(Lists.newArrayList(txnId)); } catch (Exception e) { LOG.warn("Error while aborting txnId: {} for table: {}", txnId, qualifiedTableName, e); ret = 1; } } } if (getMsc() != null) { getMsc().close(); msc = null; } } return ret; } private LockRequest createLockRequest(final String dbName, final String tableName) throws TException { UserGroupInformation loggedInUser = null; String username; try { loggedInUser = UserGroupInformation.getLoginUser(); } catch (IOException e) { LOG.warn("Unable to get logged in user via UGI. err: {}", e.getMessage()); } if (loggedInUser == null) { username = System.getProperty("user.name"); } else { username = loggedInUser.getShortUserName(); } long txnId = getMsc().openTxn(username); String agentInfo = Thread.currentThread().getName(); LockRequestBuilder requestBuilder = new LockRequestBuilder(agentInfo); requestBuilder.setUser(username); requestBuilder.setTransactionId(txnId); LockComponentBuilder lockCompBuilder = new LockComponentBuilder() .setDbName(dbName) .setTableName(tableName) .setIsTransactional(true) .setExclusive() // WriteType is DDL_EXCLUSIVE for MSCK REPAIR so we need NO_TXN. Refer AcidUtils.makeLockComponents .setOperationType(DataOperationType.NO_TXN); requestBuilder.addLockComponent(lockCompBuilder.build()); LOG.info("Created lock(X) request with info - user: {} txnId: {} agentInfo: {} dbName: {} tableName: {}", username, txnId, agentInfo, dbName, tableName); return requestBuilder.build(); } public IMetaStoreClient getMsc() { return msc; } @VisibleForTesting public void createPartitionsInBatches(final IMetaStoreClient metastoreClient, List<String> repairOutput, Set<CheckResult.PartitionResult> partsNotInMs, Table table, int batchSize, int decayingFactor, int maxRetries) throws Exception { String addMsgFormat = "Repair: Added partition to metastore " + table.getTableName() + ":%s"; Set<CheckResult.PartitionResult> batchWork = new HashSet<>(partsNotInMs); new RetryUtilities.ExponentiallyDecayingBatchWork<Void>(batchSize, decayingFactor, maxRetries) { @Override public Void execute(int size) throws MetastoreException { try { while (!batchWork.isEmpty()) { List<Partition> partsToAdd = new ArrayList<>(); //get the current batch size int currentBatchSize = size; //store the partitions temporarily until processed List<CheckResult.PartitionResult> lastBatch = new ArrayList<>(currentBatchSize); List<String> addMsgs = new ArrayList<>(currentBatchSize); //add the number of partitions given by the current batchsize for (CheckResult.PartitionResult part : batchWork) { if (currentBatchSize == 0) { break; } Path tablePath = MetaStoreServerUtils.getPath(table); if (tablePath == null) { continue; } Map<String, String> partSpec = Warehouse.makeSpecFromName(part.getPartitionName()); Path location = new Path(tablePath, Warehouse.makePartPath(partSpec)); Partition partition = MetaStoreServerUtils.createMetaPartitionObject(table, partSpec, location); partition.setWriteId(table.getWriteId()); partsToAdd.add(partition); lastBatch.add(part); addMsgs.add(String.format(addMsgFormat, part.getPartitionName())); currentBatchSize--; } metastoreClient.add_partitions(partsToAdd, true, false); // if last batch is successful remove it from partsNotInMs batchWork.removeAll(lastBatch); repairOutput.addAll(addMsgs); } return null; } catch (TException e) { throw new MetastoreException(e); } } }.run(); } private static String makePartExpr(Map<String, String> spec) throws MetaException { StringBuilder suffixBuf = new StringBuilder(); int i = 0; for (Map.Entry<String, String> e : spec.entrySet()) { if (e.getValue() == null || e.getValue().length() == 0) { throw new MetaException("Partition spec is incorrect. " + spec); } if (i > 0) { suffixBuf.append(" AND "); } suffixBuf.append(Warehouse.escapePathName(e.getKey())); suffixBuf.append('='); suffixBuf.append("'").append(Warehouse.escapePathName(e.getValue())).append("'"); i++; } return suffixBuf.toString(); } // Drops partitions in batches. partNotInFs is split into batches based on batchSize // and dropped. The dropping will be through RetryUtilities which will retry when there is a // failure after reducing the batchSize by decayingFactor. Retrying will cease when maxRetries // limit is reached or batchSize reduces to 0, whichever comes earlier. @VisibleForTesting public void dropPartitionsInBatches(final IMetaStoreClient metastoreClient, List<String> repairOutput, Set<CheckResult.PartitionResult> partsNotInFs, Set<CheckResult.PartitionResult> expiredPartitions, Table table, int batchSize, int decayingFactor, int maxRetries) throws Exception { String dropMsgFormat = "Repair: Dropped partition from metastore " + Warehouse.getCatalogQualifiedTableName(table) + ":%s"; // Copy of partitions that will be split into batches Set<CheckResult.PartitionResult> batchWork = new HashSet<>(partsNotInFs); if (expiredPartitions != null && !expiredPartitions.isEmpty()) { batchWork.addAll(expiredPartitions); } PartitionDropOptions dropOptions = new PartitionDropOptions().deleteData(deleteData).ifExists(true); new RetryUtilities.ExponentiallyDecayingBatchWork<Void>(batchSize, decayingFactor, maxRetries) { @Override public Void execute(int size) throws MetastoreException { try { while (!batchWork.isEmpty()) { int currentBatchSize = size; // to store the partitions that are currently being processed List<CheckResult.PartitionResult> lastBatch = new ArrayList<>(currentBatchSize); // drop messages for the dropped partitions List<String> dropMsgs = new ArrayList<>(currentBatchSize); // Partitions to be dropped List<String> dropParts = new ArrayList<>(currentBatchSize); for (CheckResult.PartitionResult part : batchWork) { // This batch is full: break out of for loop to execute if (currentBatchSize == 0) { break; } dropParts.add(part.getPartitionName()); // Add the part to lastBatch to track the parition being dropped lastBatch.add(part); // Update messages dropMsgs.add(String.format(dropMsgFormat, part.getPartitionName())); // Decrement batch size. When this gets to 0, the batch will be executed currentBatchSize--; } // this call is deleting partitions that are already missing from filesystem // so 3rd parameter (deleteData) is set to false // msck is doing a clean up of hms. if for some reason the partition is already // deleted, then it is good. So, the last parameter ifexists is set to true List<ObjectPair<Integer, byte[]>> partExprs = getPartitionExpr(dropParts); metastoreClient.dropPartitions(table.getCatName(), table.getDbName(), table.getTableName(), partExprs, dropOptions); // if last batch is successful remove it from partsNotInFs batchWork.removeAll(lastBatch); repairOutput.addAll(dropMsgs); } return null; } catch (TException e) { throw new MetastoreException(e); } } private List<ObjectPair<Integer, byte[]>> getPartitionExpr(final List<String> parts) throws MetaException { List<ObjectPair<Integer, byte[]>> expr = new ArrayList<>(parts.size()); for (int i = 0; i < parts.size(); i++) { String partName = parts.get(i); Map<String, String> partSpec = Warehouse.makeSpecFromName(partName); String partExpr = makePartExpr(partSpec); if (LOG.isDebugEnabled()) { LOG.debug("Generated partExpr: {} for partName: {}", partExpr, partName); } expr.add(new ObjectPair<>(i, partExpr.getBytes(StandardCharsets.UTF_8))); } return expr; } }.run(); } /** * Write the result of msck to a writer. * * @param result The result we're going to write * @param msg Message to write. * @param out Writer to write to * @param wrote if any previous call wrote data * @return true if something was written * @throws IOException In case the writing fails */ private boolean writeMsckResult(Set<?> result, String msg, Writer out, boolean wrote) throws IOException { if (!result.isEmpty()) { if (wrote) { out.write(terminator); } out.write(msg); for (Object entry : result) { out.write(separator); out.write(entry.toString()); } return true; } return false; } }
/******************************************************************************* * Copyright (c) 2013, 2016 Pivotal Software Inc. and IBM Corporation * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * * http://www.eclipse.org/legal/epl-v10.html * * and the Apache License v2.0 is available at * * http://www.apache.org/licenses/LICENSE-2.0 * * You may elect to redistribute this code under either of these licenses. * * Contributors: * Pivotal Software, Inc. - initial API and implementation * IBM Corporation - Add additional async invocation method ********************************************************************************/ package org.eclipse.cft.server.ui.internal.wizards; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import org.eclipse.cft.server.core.internal.CloudFoundryPlugin; import org.eclipse.cft.server.ui.internal.CFUiUtil; import org.eclipse.cft.server.ui.internal.ICoreRunnable; import org.eclipse.cft.server.ui.internal.IEventSource; import org.eclipse.cft.server.ui.internal.IPartChangeListener; import org.eclipse.cft.server.ui.internal.Messages; import org.eclipse.cft.server.ui.internal.PartChangeEvent; import org.eclipse.cft.server.ui.internal.WizardPartChangeEvent; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.OperationCanceledException; import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.jobs.Job; import org.eclipse.jface.dialogs.DialogPage; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.swt.widgets.Display; import org.eclipse.ui.progress.UIJob; /** * Wizard page that manages multiple UI parts, and handles errors from each * part. In terms of errors, UI parts are each treated as atomic units, meaning * that any error generated by any part is considered to be from the part as a * whole, rather from individual controls in that part. * * <p/> * In order for the page to manage errors from parts, the page MUST be added as * a listener to each UI Part that is created. * */ public abstract class PartsWizardPage extends WizardPage implements IPartChangeListener { protected Map<IEventSource<?>, IStatus> partStatus = new HashMap<IEventSource<?>, IStatus>(); protected PartsWizardPage(String pageName, String title, ImageDescriptor titleImage) { super(pageName, title, titleImage); } protected IStatus getNextNonOKStatus() { // Check if there are other errors that haven't yet been resolved IStatus status = null; for (Entry<IEventSource<?>, IStatus> entry : partStatus.entrySet()) { status = entry.getValue(); if (status != null && !status.isOK()) { break; } } return status; } public void handleChange(PartChangeEvent event) { IStatus status = event.getStatus(); if (status == null) { status = Status.OK_STATUS; } // If the part indicates its OK, remove it from the list of tracked // parts, as any error it would have previously // generated has now been fixed. if (status.isOK()) { partStatus.remove(event.getSource()); // Check if there are other errors that haven't yet been resolved for (Entry<IEventSource<?>, IStatus> entry : partStatus.entrySet()) { status = entry.getValue(); break; } } else if (event.getSource() != null) { partStatus.put(event.getSource(), status); } boolean updateButtons = !(event instanceof WizardPartChangeEvent) || ((WizardPartChangeEvent) event).updateWizardButtons(); update(updateButtons, status); } @Override public boolean isPageComplete() { IStatus status = getNextNonOKStatus(); return status == null; } /** * This should be the ONLY way to notify the wizard page whether the page is * complete or not, as well as display any error or warning messages. * * <p/> * * The wizard page will only be complete if it receives an OK status. * * <p/> * * It is up to the caller to correctly set the OK state of the page in case * it sets a non-OK status, and the non-OK status gets resolved. * * @param updateButtons true if force the wizard button states to be * refreshed. NOTE that if true, it is up to the caller to ensure that the * wizard page has been added to the wizard , and the wizard page is * visible. * @param status if status is OK, the wizard can complete. False otherwise. */ protected void update(boolean updateButtons, IStatus status) { if (status == null) { status = Status.OK_STATUS; } if (status.isOK()) { setErrorMessage(null); } else if (status.getSeverity() == IStatus.ERROR) { setErrorMessage(status.getMessage() != null ? status.getMessage() : Messages.PartsWizardPage_ERROR_UNKNOWN); } else if (status.getSeverity() == IStatus.INFO) { setMessage(status.getMessage(), DialogPage.INFORMATION); } else if (status.getSeverity() == IStatus.WARNING) { setMessage(status.getMessage(), DialogPage.WARNING); } // Container or page may not be available when update request is received if (updateButtons && getWizard() != null && getWizard().getContainer() != null && getWizard().getContainer().getCurrentPage() != null) { getWizard().getContainer().updateButtons(); } } @Override public void setVisible(boolean visible) { super.setVisible(visible); if (visible) { if (getPreviousPage() == null) { // delay until dialog is actually visible if (!getControl().isDisposed()) { performWhenPageVisible(); } } else { performWhenPageVisible(); } IStatus status = getNextNonOKStatus(); if (status != null) { update(true, status); } } } protected void performWhenPageVisible() { // Do nothing by default; } /** Runs the specific runnable without using the wizard container progress context */ protected void runAsync(final ICoreRunnable runnable, String operationLabel) { if (runnable == null) { return; } if (operationLabel == null) { operationLabel = ""; //$NON-NLS-1$ } Job job = new Job(operationLabel) { @Override protected IStatus run(IProgressMonitor monitor) { CoreException cex = null; try { runnable.run(monitor); } catch (OperationCanceledException e) { // Not an error. User can still enter manual values } catch (CoreException ce) { cex = ce; } // Do not update the wizard with an error, as users can still // complete the wizard with manual values. if (cex != null) { CloudFoundryPlugin.logError(cex); } return Status.OK_STATUS; } }; job.setSystem(false); job.schedule(); } /** * Runs the specified runnable asynchronously in a worker thread. Caller is * responsible for ensuring that any UI behaviour in the runnable is * executed in the UI thread, either synchronously (synch exec through * {@link Display} or asynch through {@link Display} or {@link UIJob}). * @param runnable * @param operationLabel */ protected void runAsynchWithWizardProgress(final ICoreRunnable runnable, String operationLabel) { if (runnable == null) { return; } if (operationLabel == null) { operationLabel = ""; //$NON-NLS-1$ } // Asynch launch as a UI job, as the wizard messages get updated before // and after the forked operation UIJob job = new UIJob(operationLabel) { @Override public IStatus runInUIThread(IProgressMonitor monitor) { CoreException cex = null; try { // Fork in a worker thread. CFUiUtil.runForked(runnable, getWizard().getContainer()); } catch (OperationCanceledException e) { // Not an error. User can still enter manual values } catch (CoreException ce) { cex = ce; } // Do not update the wizard with an error, as users can still // complete the wizard with manual values. if (cex != null) { CloudFoundryPlugin.logError(cex); } return Status.OK_STATUS; } }; job.setSystem(true); job.schedule(); } }
/** * Copyright 2009-2014 Ibrahim Chaehoi * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package net.jawr.web.resource.bundle.factory; import static net.jawr.web.resource.bundle.factory.PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_DEBUG_URL; import static net.jawr.web.resource.bundle.factory.PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_MAPPINGS; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; import net.jawr.web.resource.bundle.DebugInclusion; import net.jawr.web.resource.bundle.InclusionPattern; import net.jawr.web.resource.bundle.JoinableResourceBundle; import net.jawr.web.resource.bundle.JoinableResourceBundleImpl; import net.jawr.web.resource.bundle.factory.postprocessor.PostProcessorChainFactory; import net.jawr.web.resource.bundle.factory.util.PropertiesConfigHelper; import net.jawr.web.resource.bundle.generator.GeneratorRegistry; import net.jawr.web.resource.bundle.variant.VariantSet; import net.jawr.web.resource.handler.reader.ResourceReaderHandler; import net.jawr.web.util.StringUtils; /** * This factory is used to build JoinableResourceBundle from the generated properties mapping file, * which contains all calculated information about the bundle. * * @author Ibrahim Chaehoi * */ public class FullMappingPropertiesBasedBundlesHandlerFactory { /** The post processor chain factory */ private PostProcessorChainFactory chainFactory; /** The resource type */ private String resourceType; /** The resource handler */ private ResourceReaderHandler rsReaderHandler; /** The generator registry */ private GeneratorRegistry generatorRegistry; /** * Create a PropertiesBasedBundlesHandlerFactory using the specified properties. * * @param resourceType js or css * @param rsHandler ResourceHandler to access files. * @param generatorRegistry the generator registry * @param chainFactory the post processor chain factory */ public FullMappingPropertiesBasedBundlesHandlerFactory(String resourceType, ResourceReaderHandler rsHandler, GeneratorRegistry generatorRegistry, PostProcessorChainFactory chainFactory) { this.resourceType = resourceType; this.chainFactory = chainFactory; this.rsReaderHandler = rsHandler; this.generatorRegistry = generatorRegistry; } /** * Returns the list of joinable resource bundle * * @return the list of joinable resource bundle */ public List<JoinableResourceBundle> getResourceBundles(Properties properties) { PropertiesConfigHelper props = new PropertiesConfigHelper(properties, resourceType); String fileExtension = "." + resourceType; // Initialize custom bundles List<JoinableResourceBundle> customBundles = new ArrayList<JoinableResourceBundle>(); // Check if we should use the bundle names property or // find the bundle name using the bundle id declaration : // jawr.<type>.bundle.<name>.id if (null != props .getProperty(PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_NAMES)) { StringTokenizer tk = new StringTokenizer( props .getProperty(PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_NAMES), ","); while (tk.hasMoreTokens()) { customBundles.add(buildJoinableResourceBundle(props, tk.nextToken() .trim(), fileExtension, rsReaderHandler)); } } else { Iterator<String> bundleNames = props.getPropertyBundleNameSet().iterator(); while (bundleNames.hasNext()) { customBundles.add(buildJoinableResourceBundle(props, bundleNames.next(), fileExtension, rsReaderHandler)); } } // Initialize the bundles dependencies Iterator<String> bundleNames = props.getPropertyBundleNameSet().iterator(); while (bundleNames.hasNext()) { String bundleName = (String) bundleNames.next(); List<String> bundleNameDependencies = props.getCustomBundlePropertyAsList(bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_DEPENDENCIES); if(!bundleNameDependencies.isEmpty()){ JoinableResourceBundle bundle = getBundleFromName(bundleName, customBundles); List<JoinableResourceBundle> bundleDependencies = getBundlesFromName(bundleNameDependencies, customBundles); bundle.setDependencies(bundleDependencies); } } return customBundles; } /** * Returns a bundle using the bundle name from a list of bundles * @param bundleName the bundle name * @param bundles the list of bundle * @return a bundle */ private JoinableResourceBundle getBundleFromName(String bundleName, List<JoinableResourceBundle> bundles) { JoinableResourceBundle bundle = null; List<String> names = new ArrayList<String>(); names.add(bundleName); List<JoinableResourceBundle> result = getBundlesFromName(names, bundles); if(!result.isEmpty()){ bundle = result.get(0); } return bundle; } /** * Returns a list of bundles using the bundle names from a list of bundles * @param names the list of bundle name * @param bundles the list of bundle * @return a list of bundles */ private List<JoinableResourceBundle> getBundlesFromName(List<String> names, List<JoinableResourceBundle> bundles) { List<JoinableResourceBundle> resultBundles = new ArrayList<JoinableResourceBundle>(); for (Iterator<String> iterator = names.iterator(); iterator.hasNext();) { String name = iterator.next(); for (Iterator<JoinableResourceBundle> itBundle = bundles.iterator(); itBundle.hasNext();) { JoinableResourceBundle bundle = itBundle.next(); if(bundle.getName().equals(name)){ resultBundles.add(bundle); } } } return resultBundles; } /** * Create a JoinableResourceBundle based on the properties file. * * @param props the properties config helper * @param bundleName the bundle name * @param rsHandler the resource handler * @return the Resource Bundle */ private JoinableResourceBundle buildJoinableResourceBundle( PropertiesConfigHelper props, String bundleName, String fileExtension, ResourceReaderHandler rsHandler) { // Id for the bundle String bundleId = props.getCustomBundleProperty(bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_ID); String bundlePrefix = props.getCustomBundleProperty(bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_BUNDLE_PREFIX); InclusionPattern inclusionPattern = getInclusionPattern(props, bundleName); JoinableResourceBundleImpl bundle = new JoinableResourceBundleImpl( bundleId, bundleName, bundlePrefix, fileExtension, inclusionPattern, rsHandler, generatorRegistry); // Override bundle postprocessor String bundlePostProcessors = props.getCustomBundleProperty(bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_POSTPROCESSOR); if (StringUtils.isNotEmpty(bundlePostProcessors)) { bundle.setBundlePostProcessor(chainFactory .buildPostProcessorChain(bundlePostProcessors)); } // Override unitary postprocessor String unitaryPostProcessors = props .getCustomBundleProperty( bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_FILE_POSTPROCESSOR); if (StringUtils.isNotEmpty(unitaryPostProcessors)) { bundle.setUnitaryPostProcessor(chainFactory .buildPostProcessorChain(unitaryPostProcessors)); } // Set conditional comment for IE, in case one is specified String explorerConditionalCondition = props .getCustomBundleProperty( bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_IE_CONDITIONAL_EXPRESSION); if (StringUtils.isNotEmpty(explorerConditionalCondition)) { bundle .setExplorerConditionalExpression(explorerConditionalCondition); } // Sets the alternate URL for production mode. String alternateProductionURL = props .getCustomBundleProperty( bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_PRODUCTION_ALT_URL); if (StringUtils.isNotEmpty(alternateProductionURL)) { bundle .setAlternateProductionURL(props .getCustomBundleProperty( bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_PRODUCTION_ALT_URL)); } boolean hasDebugURL = false; // Sets the debug URL for debug mode. if (null != props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_DEBUG_URL)){ bundle.setDebugURL(props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_DEBUG_URL)); hasDebugURL = true; if(StringUtils.isEmpty(bundle.getAlternateProductionURL())){ throw new IllegalArgumentException( "The bundle '"+bundleName+"', which use a static external resource in debug mode, must use an external resource in Production mode.\n" + "Please check your configuration. "); } if(StringUtils.isNotEmpty(props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_MAPPINGS))){ throw new IllegalArgumentException( "The bundle '"+bundleName+"', which use a static external resource in debug mode, can't have a bundle mapping.\n" + "Please check your configuration. "); } } // Sets the licence path lists. Set<String> licencePathList = props .getCustomBundlePropertyAsSet( bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_LICENCE_PATH_LIST); if (!licencePathList.isEmpty()) { bundle.setLicensesPathList(licencePathList); } List<String> mappings = props.getCustomBundlePropertyAsList(bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_MAPPINGS); if (!hasDebugURL && mappings.isEmpty()) { throw new IllegalArgumentException( "No mappings were defined for the bundle with name:" + bundleName + ". Please specify at least one in configuration. "); } if (!hasDebugURL){ // Add the mappings bundle.setMappings(mappings); Map<String, VariantSet> variants = props.getCustomBundleVariantSets(bundleName); bundle.setVariants(variants); for (Iterator<String> iterator = bundle.getVariantKeys().iterator(); iterator.hasNext();) { String variantKey = iterator.next(); if(StringUtils.isNotEmpty(variantKey)){ String hashcode = props.getCustomBundleProperty(bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_HASHCODE_VARIANT+variantKey); bundle.setBundleDataHashCode(variantKey, hashcode); } } String hashcode = props.getCustomBundleProperty(bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_HASHCODE); bundle.setBundleDataHashCode(null, hashcode); } return bundle; } /** * Returns the inclusion pattern for a bundle * * @param props the properties helper * @param bundleName the bundle name * @return the inclusion pattern for a bundle */ private InclusionPattern getInclusionPattern(PropertiesConfigHelper props, String bundleName) { // Wether it's global or not boolean isGlobal = Boolean .valueOf( props .getCustomBundleProperty( bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_GLOBAL_FLAG, "false")).booleanValue(); // Set order if its a global bundle int order = 0; if (isGlobal) { order = Integer.parseInt(props.getCustomBundleProperty(bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_ORDER, "0")); } // Use only with debug mode on boolean isDebugOnly = Boolean .valueOf( props .getCustomBundleProperty( bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_DEBUGONLY, "false")).booleanValue(); // Use only with debug mode off boolean isDebugNever = Boolean .valueOf( props .getCustomBundleProperty( bundleName, PropertiesBundleConstant.BUNDLE_FACTORY_CUSTOM_DEBUGNEVER, "false")).booleanValue(); return new InclusionPattern(isGlobal, order, DebugInclusion.get(isDebugOnly, isDebugNever)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.aries.subsystem.core.internal; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import org.apache.aries.subsystem.core.archive.AriesSubsystemParentsHeader; import org.apache.aries.subsystem.core.archive.Attribute; import org.apache.aries.subsystem.core.archive.DeployedContentHeader; import org.apache.aries.subsystem.core.archive.DeploymentManifest; import org.apache.aries.subsystem.core.archive.Header; import org.apache.aries.subsystem.core.archive.ImportPackageHeader; import org.apache.aries.subsystem.core.archive.ImportPackageRequirement; import org.apache.aries.subsystem.core.archive.ProvisionResourceHeader; import org.apache.aries.subsystem.core.archive.RequireBundleHeader; import org.apache.aries.subsystem.core.archive.RequireBundleRequirement; import org.apache.aries.subsystem.core.archive.RequireCapabilityHeader; import org.apache.aries.subsystem.core.archive.RequireCapabilityRequirement; import org.apache.aries.subsystem.core.archive.SubsystemContentHeader; import org.apache.aries.subsystem.core.archive.SubsystemExportServiceHeader; import org.apache.aries.subsystem.core.archive.SubsystemImportServiceHeader; import org.apache.aries.subsystem.core.archive.SubsystemImportServiceRequirement; import org.apache.aries.subsystem.core.archive.SubsystemManifest; import org.apache.aries.util.filesystem.FileSystem; import org.apache.aries.util.filesystem.IDirectory; import org.eclipse.equinox.region.Region; import org.eclipse.equinox.region.RegionDigraph; import org.eclipse.equinox.region.RegionFilter; import org.eclipse.equinox.region.RegionFilterBuilder; import org.osgi.framework.Bundle; import org.osgi.framework.BundleException; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.namespace.ExecutionEnvironmentNamespace; import org.osgi.framework.namespace.IdentityNamespace; import org.osgi.framework.namespace.NativeNamespace; import org.osgi.framework.wiring.BundleRevision; import org.osgi.namespace.service.ServiceNamespace; import org.osgi.resource.Capability; import org.osgi.resource.Namespace; import org.osgi.resource.Requirement; import org.osgi.resource.Resource; import org.osgi.resource.Wire; import org.osgi.service.coordinator.Coordination; import org.osgi.service.coordinator.Participant; import org.osgi.service.resolver.ResolutionException; import org.osgi.service.resolver.ResolveContext; import org.osgi.service.subsystem.Subsystem; import org.osgi.service.subsystem.SubsystemConstants; import org.osgi.service.subsystem.SubsystemException; public class SubsystemResource implements Resource { private Region region; private final List<Capability> capabilities; private final DeploymentManifest deploymentManifest; private final Collection<Resource> installableContent = new HashSet<Resource>(); private final Collection<Resource> installableDependencies = new HashSet<Resource>(); private final Collection<Resource> mandatoryResources = new HashSet<Resource>(); private final Collection<DeployedContentHeader.Clause> missingResources = new HashSet<DeployedContentHeader.Clause>(); private final Collection<Resource> optionalResources = new HashSet<Resource>(); private final BasicSubsystem parent; private final RawSubsystemResource resource; private final Collection<Resource> sharedContent = new HashSet<Resource>(); private final Collection<Resource> sharedDependencies = new HashSet<Resource>(); public SubsystemResource(String location, IDirectory content, BasicSubsystem parent) throws URISyntaxException, IOException, ResolutionException, BundleException, InvalidSyntaxException { this(new RawSubsystemResource(location, content, parent), parent); } public SubsystemResource(RawSubsystemResource resource, BasicSubsystem parent) throws IOException, BundleException, InvalidSyntaxException, URISyntaxException { this.parent = parent; this.resource = resource; computeContentResources(resource.getDeploymentManifest()); capabilities = computeCapabilities(); if (this.getSubsystemManifest().getSubsystemTypeHeader().getAriesProvisionDependenciesDirective().isInstall()) { /* compute dependencies now only if we intend to provision them during install */ computeDependencies(resource.getDeploymentManifest()); } deploymentManifest = computeDeploymentManifest(); } public SubsystemResource(File file) throws IOException, URISyntaxException, ResolutionException, BundleException, InvalidSyntaxException { this(null, FileSystem.getFSRoot(file)); } public SubsystemResource(BasicSubsystem subsystem, IDirectory directory) throws IOException, URISyntaxException, ResolutionException, BundleException, InvalidSyntaxException { if (subsystem == null) { // This is intended to only support the case where the root subsystem // is being initialized from a non-persistent state. parent = null; } else { parent = Utils.findScopedSubsystemInRegion(subsystem); } resource = new RawSubsystemResource(directory, parent); deploymentManifest = resource.getDeploymentManifest(); computeContentResources(deploymentManifest); capabilities = computeCapabilities(); if (getSubsystemManifest().getSubsystemTypeHeader().getAriesProvisionDependenciesDirective().isInstall()) { /* compute dependencies if we intend to provision them during install */ computeDependencies(resource.getDeploymentManifest()); } } @Override public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof SubsystemResource)) return false; SubsystemResource that = (SubsystemResource)o; return getLocation().equals(that.getLocation()); } @Override public List<Capability> getCapabilities(String namespace) { return Collections.unmodifiableList(capabilities); } private List<Capability> computeCapabilities() throws InvalidSyntaxException { List<Capability> capabilities = new ArrayList<Capability>(); if (isScoped()) computeScopedCapabilities(capabilities); else computeUnscopedCapabilities(capabilities); return capabilities; } private void computeUnscopedCapabilities(List<Capability> capabilities) { capabilities.addAll(resource.getCapabilities(null)); for (Resource r : getContentResources()) capabilities.addAll(r.getCapabilities(null)); } private void computeScopedCapabilities(List<Capability> capabilities) throws InvalidSyntaxException { capabilities.addAll(resource.getCapabilities(null)); computeOsgiServiceCapabilities(capabilities); } private void computeOsgiServiceCapabilities(List<Capability> capabilities) throws InvalidSyntaxException { SubsystemExportServiceHeader header = getSubsystemManifest().getSubsystemExportServiceHeader(); if (header == null) return; for (Resource resource : getContentResources()) capabilities.addAll(header.toCapabilities(resource)); } public DeploymentManifest getDeploymentManifest() { return deploymentManifest; } public long getId() { return resource.getId(); } public Collection<Resource> getInstallableContent() { return installableContent; } public Collection<Resource> getInstallableDependencies() { return installableDependencies; } public org.apache.aries.subsystem.core.repository.Repository getLocalRepository() { return resource.getLocalRepository(); } public String getLocation() { return resource.getLocation().getValue(); } Collection<Resource> getMandatoryResources() { return mandatoryResources; } public Collection<DeployedContentHeader.Clause> getMissingResources() { return missingResources; } Collection<Resource> getOptionalResources() { return optionalResources; } public Collection<BasicSubsystem> getParents() { if (parent == null) { AriesSubsystemParentsHeader header = getDeploymentManifest().getAriesSubsystemParentsHeader(); if (header == null) return Collections.emptyList(); Collection<AriesSubsystemParentsHeader.Clause> clauses = header.getClauses(); Collection<BasicSubsystem> result = new ArrayList<BasicSubsystem>(clauses.size()); Subsystems subsystems = Activator.getInstance().getSubsystems(); for (AriesSubsystemParentsHeader.Clause clause : clauses) { result.add(subsystems.getSubsystemById(clause.getId())); } return result; } return Collections.singleton(parent); } public synchronized Region getRegion() throws BundleException, IOException, InvalidSyntaxException, URISyntaxException { if (region == null) { region = createRegion(getId()); Coordination coordination = Activator.getInstance().getCoordinator().peek(); coordination.addParticipant(new Participant() { @Override public void ended(Coordination arg0) throws Exception { // Nothing. } @Override public void failed(Coordination arg0) throws Exception { if (isScoped()) region.getRegionDigraph().removeRegion(region); } }); if (!isApplication()) { setImportIsolationPolicy(); } } return region; } @Override public List<Requirement> getRequirements(String namespace) { if (isScoped()) return resource.getRequirements(namespace); else { ArrayList<Requirement> result = new ArrayList<Requirement>(); result.addAll(resource.getRequirements(namespace)); for (Resource r : getContentResources()) result.addAll(r.getRequirements(namespace)); result.trimToSize(); return result; } } public Collection<Resource> getSharedContent() { return sharedContent; } public Collection<Resource> getSharedDependencies() { return sharedDependencies; } public SubsystemManifest getSubsystemManifest() { return resource.getSubsystemManifest(); } public Collection<TranslationFile> getTranslations() { return resource.getTranslations(); } @Override public int hashCode() { int result = 17; result = 31 * result + getLocation().hashCode(); return result; } private void addContentResource(Resource resource) { if (resource == null) return; if (isMandatory(resource)) mandatoryResources.add(resource); else optionalResources.add(resource); if (isInstallable(resource)) installableContent.add(resource); else sharedContent.add(resource); } private void addDependency(Resource resource) { if (resource == null) return; if (isInstallable(resource)) installableDependencies.add(resource); else sharedDependencies.add(resource); } private void addMissingResource(DeployedContentHeader.Clause resource) { missingResources.add(resource); } private void addSubsystemServiceImportToSharingPolicy( RegionFilterBuilder builder) throws InvalidSyntaxException, BundleException, IOException, URISyntaxException { builder.allow( RegionFilter.VISIBLE_SERVICE_NAMESPACE, new StringBuilder("(&(") .append(org.osgi.framework.Constants.OBJECTCLASS) .append('=').append(Subsystem.class.getName()) .append(")(") .append(Constants.SubsystemServicePropertyRegions) .append('=').append(getRegion().getName()) .append("))").toString()); } private void addSubsystemServiceImportToSharingPolicy(RegionFilterBuilder builder, Region to) throws InvalidSyntaxException, BundleException, IOException, URISyntaxException { Region root = Activator.getInstance().getSubsystems().getRootSubsystem().getRegion(); if (to.getName().equals(root.getName())) addSubsystemServiceImportToSharingPolicy(builder); else { to = root; builder = to.getRegionDigraph().createRegionFilterBuilder(); addSubsystemServiceImportToSharingPolicy(builder); RegionFilter regionFilter = builder.build(); getRegion().connectRegion(to, regionFilter); } } private void computeContentResources(DeploymentManifest manifest) throws BundleException, IOException, InvalidSyntaxException, URISyntaxException { if (manifest == null) computeContentResources(getSubsystemManifest()); else { DeployedContentHeader header = manifest.getDeployedContentHeader(); if (header == null) return; for (DeployedContentHeader.Clause clause : header.getClauses()) { Resource resource = findContent(clause); if (resource == null) addMissingResource(clause); else addContentResource(resource); } } } private void computeContentResources(SubsystemManifest manifest) throws BundleException, IOException, InvalidSyntaxException, URISyntaxException { SubsystemContentHeader contentHeader = manifest.getSubsystemContentHeader(); if (contentHeader == null) return; for (SubsystemContentHeader.Clause clause : contentHeader.getClauses()) { Requirement requirement = clause.toRequirement(this); Resource resource = findContent(requirement); if (resource == null) { if (clause.isMandatory()) throw new SubsystemException("A required content resource could not be found. This means the resource was either missing or not recognized as a supported resource format due to, for example, an invalid bundle manifest or blueprint XML file. Turn on debug logging for more information. The resource was: " + requirement); continue; } addContentResource(resource); } } void computeDependencies(DeploymentManifest manifest) { if (manifest == null) { computeDependencies(getSubsystemManifest()); } else { ProvisionResourceHeader header = manifest.getProvisionResourceHeader(); if (header == null) return; for (ProvisionResourceHeader.Clause clause : header.getClauses()) { Resource resource = findDependency(clause); if (resource == null) throw new SubsystemException("A required dependency could not be found. This means the resource was either missing or not recognized as a supported resource format due to, for example, an invalid bundle manifest or blueprint XML file. Turn on debug logging for more information. The resource was: " + resource); addDependency(resource); } } } private void computeDependencies(SubsystemManifest manifest) { SubsystemContentHeader contentHeader = manifest.getSubsystemContentHeader(); try { Map<Resource, List<Wire>> resolution = Activator.getInstance().getResolver().resolve(createResolveContext()); setImportIsolationPolicy(resolution); for (Map.Entry<Resource, List<Wire>> entry : resolution.entrySet()) { Resource key = entry.getKey(); String type = ResourceHelper.getTypeAttribute(key); // Do not include synthetic resources in the dependencies. if (!Constants.ResourceTypeSynthesized.equals(type) && !contentHeader.contains(key)) { addDependency(key); } for (Wire wire : entry.getValue()) { Resource provider = wire.getProvider(); type = ResourceHelper.getTypeAttribute(provider); // Do not include synthetic resources in the dependencies. if (!Constants.ResourceTypeSynthesized.equals(type) && !contentHeader.contains(provider)) { addDependency(provider); } } } } catch (ResolutionException e) { throw new SubsystemException(e); } catch (Exception e) { if (e instanceof SubsystemException) { throw (SubsystemException)e; } if (e instanceof SecurityException) { throw (SecurityException)e; } throw new SubsystemException(e); } } private DeployedContentHeader computeDeployedContentHeader() { Collection<Resource> content = getContentResources(); if (content.isEmpty()) return null; return DeployedContentHeader.newInstance(content); } private DeploymentManifest computeDeploymentManifest() throws IOException { DeploymentManifest result = computeExistingDeploymentManifest(); if (result != null) return result; result = new DeploymentManifest.Builder().manifest(resource.getSubsystemManifest()) .header(computeDeployedContentHeader()) .header(computeProvisionResourceHeader()).build(); return result; } private DeploymentManifest computeExistingDeploymentManifest() throws IOException { return resource.getDeploymentManifest(); } ProvisionResourceHeader computeProvisionResourceHeader() { Collection<Resource> dependencies = getDependencies(); if (dependencies.isEmpty()) return null; return ProvisionResourceHeader.newInstance(dependencies); } private Region createRegion(long id) throws BundleException { if (!isScoped()) return getParents().iterator().next().getRegion(); Activator activator = Activator.getInstance(); RegionDigraph digraph = activator.getRegionDigraph(); if (getParents().isEmpty()) // This is the root subsystem. Associate it with the region in which // the subsystems implementation bundle was installed. return digraph.getRegion(activator.getBundleContext().getBundle()); String name = getSubsystemManifest() .getSubsystemSymbolicNameHeader().getSymbolicName() + ';' + getSubsystemManifest().getSubsystemVersionHeader() .getVersion() + ';' + getSubsystemManifest().getSubsystemTypeHeader() .getType() + ';' + Long.toString(id); Region region = digraph.getRegion(name); // TODO New regions need to be cleaned up if this subsystem fails to // install, but there's no access to the coordination here. if (region == null) return digraph.createRegion(name); return region; } private ResolveContext createResolveContext() { return new org.apache.aries.subsystem.core.internal.ResolveContext(this); } private Resource findContent(Requirement requirement) throws BundleException, IOException, InvalidSyntaxException, URISyntaxException { Map<Requirement, Collection<Capability>> map; // TODO System repository for scoped subsystems should be searched in // the case of a persisted subsystem. if (isUnscoped()) { map = Activator.getInstance().getSystemRepository().findProviders(Collections.singleton(requirement)); if (map.containsKey(requirement)) { Collection<Capability> capabilities = map.get(requirement); for (Capability capability : capabilities) { Resource provider = capability.getResource(); if (provider instanceof BundleRevision) { if (getRegion().contains(((BundleRevision)provider).getBundle())) { return provider; } } else if (provider instanceof BasicSubsystem) { if (getRegion().equals(((BasicSubsystem)provider).getRegion())) { return provider; } } } } } // First search the local repository. map = resource.getLocalRepository().findProviders(Collections.singleton(requirement)); Collection<Capability> capabilities = map.get(requirement); if (capabilities.isEmpty()) { // Nothing found in the local repository so search the repository services. capabilities = new RepositoryServiceRepository().findProviders(requirement); } if (capabilities.isEmpty()) { // Nothing found period. return null; } for (Capability capability : capabilities) { if (!IdentityNamespace.TYPE_FRAGMENT.equals( capability.getAttributes().get(IdentityNamespace.CAPABILITY_TYPE_ATTRIBUTE))) { // Favor the first resource that is not a fragment bundle. // See ARIES-1425. return capability.getResource(); } } // Nothing here but fragment bundles. Return the first one. return capabilities.iterator().next().getResource(); } private Resource findContent(DeployedContentHeader.Clause clause) throws BundleException, IOException, InvalidSyntaxException, URISyntaxException { Attribute attribute = clause.getAttribute(DeployedContentHeader.Clause.ATTRIBUTE_RESOURCEID); long resourceId = attribute == null ? -1 : Long.parseLong(String.valueOf(attribute.getValue())); if (resourceId != -1) { String type = clause.getType(); if (IdentityNamespace.TYPE_BUNDLE.equals(type) || IdentityNamespace.TYPE_FRAGMENT.equals(type)) { Bundle resource = Activator.getInstance().getBundleContext().getBundle(0).getBundleContext().getBundle(resourceId); if (resource == null) return null; return resource.adapt(BundleRevision.class); } else return Activator.getInstance().getSubsystems().getSubsystemById(resourceId); } return findContent(clause.toRequirement(this)); } private Resource findDependency(ProvisionResourceHeader.Clause clause) { Attribute attribute = clause.getAttribute(DeployedContentHeader.Clause.ATTRIBUTE_RESOURCEID); long resourceId = attribute == null ? -1 : Long.parseLong(String.valueOf(attribute.getValue())); if (resourceId != -1) { String type = clause.getType(); if (IdentityNamespace.TYPE_BUNDLE.equals(type) || IdentityNamespace.TYPE_FRAGMENT.equals(type)) return Activator.getInstance().getBundleContext().getBundle(0).getBundleContext().getBundle(resourceId).adapt(BundleRevision.class); else return Activator.getInstance().getSubsystems().getSubsystemById(resourceId); } OsgiIdentityRequirement requirement = new OsgiIdentityRequirement( clause.getPath(), clause.getDeployedVersion(), clause.getType(), true); List<Capability> capabilities = createResolveContext().findProviders(requirement); if (capabilities.isEmpty()) return null; return capabilities.get(0).getResource(); } private Collection<Resource> getContentResources() { Collection<Resource> result = new ArrayList<Resource>(installableContent.size() + sharedContent.size()); result.addAll(installableContent); result.addAll(sharedContent); return result; } private Collection<Resource> getDependencies() { Collection<Resource> result = new ArrayList<Resource>(installableDependencies.size() + sharedDependencies.size()); result.addAll(installableDependencies); result.addAll(sharedDependencies); return result; } boolean isApplication() { String type = resource.getSubsystemManifest().getSubsystemTypeHeader().getType(); return SubsystemConstants.SUBSYSTEM_TYPE_APPLICATION.equals(type); } boolean isComposite() { String type = resource.getSubsystemManifest().getSubsystemTypeHeader().getType(); return SubsystemConstants.SUBSYSTEM_TYPE_COMPOSITE.equals(type); } boolean isContent(Resource resource) { if (installableContent.contains(resource) || sharedContent.contains(resource)) { return true; } // Allow for implicit subsystem installations. An implicit installation // occurs when a subsystem containing other subsystems as content is // installed. When identifying the region to be used for validation // purposes during resolution, resources that are content of children // must be treated as content of this subsystem. See ResolveContext.isValid(). for (Resource installableResource : installableContent) { if (installableResource instanceof RawSubsystemResource) { if (((RawSubsystemResource)installableResource).getSubsystemManifest().getSubsystemContentHeader().contains(resource)) { return true; } } } return false; } private boolean isInstallable(Resource resource) { return !isShared(resource); } private boolean isMandatory(Resource resource) { SubsystemContentHeader header = this.resource.getSubsystemManifest().getSubsystemContentHeader(); if (header == null) return false; return header.isMandatory(resource); } boolean isRoot() { return BasicSubsystem.ROOT_LOCATION.equals(getLocation()); } private boolean isShared(Resource resource) { return Utils.isSharedResource(resource); } private boolean isScoped() { return isApplication() || isComposite(); } private boolean isUnscoped() { return !isScoped(); } private void setImportIsolationPolicy(Map<Resource, List<Wire>> resolution) throws Exception { if (!isApplication()) { return; } SubsystemContentHeader contentHeader = getSubsystemManifest().getSubsystemContentHeader(); // Prepare the regions and filter builder to set the sharing policy. Region from = getRegion(); Region to = ((BasicSubsystem)getParents().iterator().next()).getRegion(); RegionFilterBuilder builder = from.getRegionDigraph().createRegionFilterBuilder(); // Always provide visibility to this subsystem's service registration. addSubsystemServiceImportToSharingPolicy(builder, to); for (Resource resource : resolution.keySet()) { // If the resource is content but the wire provider is not, // the sharing policy must be updated. List<Wire> wires = resolution.get(resource); for (Wire wire : wires) { Resource provider = wire.getProvider(); // First check: If the provider is content there is no need to // update the sharing policy because the capability is already // visible. if (contentHeader.contains(provider)) { continue; } // Second check: If the provider is synthesized but not offering // a MissingCapability, then the resource is acting as a // placeholder as part of the Application-ImportService header // functionality, and the sharing policy does not need to be // updated. // Do not exclude resources providing a MissingCapability // even though they are synthesized. These are added by the // resolve context to ensure that unsatisfied optional // requirements become part of the sharing policy. if (!(wire.getCapability() instanceof DependencyCalculator.MissingCapability) && Constants.ResourceTypeSynthesized.equals(ResourceHelper.getTypeAttribute(provider))) { continue; } // The requirement must be added to the sharing policy. Requirement requirement = wire.getRequirement(); List<String> namespaces = new ArrayList<String>(2); namespaces.add(requirement.getNamespace()); if (ServiceNamespace.SERVICE_NAMESPACE.equals(namespaces.get(0))) { // Both service capabilities and services must be visible. namespaces.add(RegionFilter.VISIBLE_SERVICE_NAMESPACE); } String filter = requirement.getDirectives().get(Namespace.REQUIREMENT_FILTER_DIRECTIVE); if (filter == null) { for (String namespace : namespaces) builder.allowAll(namespace); } else { for (String namespace : namespaces) builder.allow(namespace, filter); } } } // Always add access to osgi.ee and osgi.native namespaces setImplicitAccessToNativeAndEECapabilities(builder); // Now set the sharing policy, if the regions are different. RegionFilter regionFilter = builder.build(); from.connectRegion(to, regionFilter); } private void setImportIsolationPolicy() throws BundleException, IOException, InvalidSyntaxException, URISyntaxException { if (isRoot() || !isScoped()) return; Region region = getRegion(); Region from = region; RegionFilterBuilder builder = from.getRegionDigraph().createRegionFilterBuilder(); Region to = getParents().iterator().next().getRegion(); addSubsystemServiceImportToSharingPolicy(builder, to); // TODO Is this check really necessary? Looks like it was done at the beginning of this method. if (isScoped()) { // Both applications and composites have Import-Package headers that require processing. // In the case of applications, the header is generated. Header<?> header = getSubsystemManifest().getImportPackageHeader(); setImportIsolationPolicy(builder, (ImportPackageHeader)header); // Both applications and composites have Require-Capability headers that require processing. // In the case of applications, the header is generated. header = getSubsystemManifest().getRequireCapabilityHeader(); setImportIsolationPolicy(builder, (RequireCapabilityHeader)header); // Both applications and composites have Subsystem-ImportService headers that require processing. // In the case of applications, the header is generated. header = getSubsystemManifest().getSubsystemImportServiceHeader(); setImportIsolationPolicy(builder, (SubsystemImportServiceHeader)header); header = getSubsystemManifest().getRequireBundleHeader(); setImportIsolationPolicy(builder, (RequireBundleHeader)header); // Always add access to osgi.ee and osgi.native namespaces setImplicitAccessToNativeAndEECapabilities(builder); } RegionFilter regionFilter = builder.build(); from.connectRegion(to, regionFilter); } private void setImportIsolationPolicy(RegionFilterBuilder builder, ImportPackageHeader header) throws InvalidSyntaxException { String policy = RegionFilter.VISIBLE_PACKAGE_NAMESPACE; if (header == null) return; for (ImportPackageHeader.Clause clause : header.getClauses()) { ImportPackageRequirement requirement = new ImportPackageRequirement(clause, this); String filter = requirement.getDirectives().get(ImportPackageRequirement.DIRECTIVE_FILTER); builder.allow(policy, filter); } } private void setImportIsolationPolicy(RegionFilterBuilder builder, RequireBundleHeader header) throws InvalidSyntaxException { if (header == null) return; for (RequireBundleHeader.Clause clause : header.getClauses()) { RequireBundleRequirement requirement = new RequireBundleRequirement(clause, this); String policy = RegionFilter.VISIBLE_REQUIRE_NAMESPACE; String filter = requirement.getDirectives().get(RequireBundleRequirement.DIRECTIVE_FILTER); builder.allow(policy, filter); } } private void setImportIsolationPolicy(RegionFilterBuilder builder, RequireCapabilityHeader header) throws InvalidSyntaxException { if (header == null) return; for (RequireCapabilityHeader.Clause clause : header.getClauses()) { RequireCapabilityRequirement requirement = new RequireCapabilityRequirement(clause, this); String policy = requirement.getNamespace(); String filter = requirement.getDirectives().get(RequireCapabilityRequirement.DIRECTIVE_FILTER); if (filter == null) // A null filter directive means the requirement matches any // capability from the same namespace. builder.allowAll(policy); else // Otherwise, the capabilities must be filtered accordingly. builder.allow(policy, filter); } } private void setImplicitAccessToNativeAndEECapabilities(RegionFilterBuilder builder) { builder.allowAll(ExecutionEnvironmentNamespace.EXECUTION_ENVIRONMENT_NAMESPACE); builder.allowAll(NativeNamespace.NATIVE_NAMESPACE); } private void setImportIsolationPolicy(RegionFilterBuilder builder, SubsystemImportServiceHeader header) throws InvalidSyntaxException { if (header == null) return; for (SubsystemImportServiceHeader.Clause clause : header.getClauses()) { SubsystemImportServiceRequirement requirement = new SubsystemImportServiceRequirement(clause, this); String policy = RegionFilter.VISIBLE_SERVICE_NAMESPACE; String filter = requirement.getDirectives().get(SubsystemImportServiceRequirement.DIRECTIVE_FILTER); builder.allow(policy, filter); } } }
/* * Copyright (c) 2016 HERE Europe B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.here.account.http.java; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLEncoder; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import com.here.account.http.HttpConstants; import com.here.account.http.HttpException; import com.here.account.http.HttpProvider; /** * A pure-Java implementation of the HttpProvider interface using * {@link HttpURLConnection}. * * @author kmccrack * */ public class JavaHttpProvider implements HttpProvider { public static class Builder { private Builder() { } /** * Build using builders, builders, and more builders. * * @return the built HttpProvider implementation based on Java * {@link HttpURLConnection}. */ public HttpProvider build() { // uses Java's default connection pooling by default return new JavaHttpProvider(); } } static class JavaHttpResponse implements HttpResponse { private final int statusCode; private final long contentLength; private final InputStream responseBody; private final Map<String, List<String>> headers; public JavaHttpResponse(int statusCode, long contentLength, InputStream responseBody, Map<String, List<String>> headers) { this.statusCode = statusCode; this.contentLength = contentLength; this.responseBody = responseBody; this.headers = headers; } /** * {@inheritDoc} */ @Override public int getStatusCode() { return statusCode; } /** * {@inheritDoc} */ @Override public long getContentLength() { return contentLength; } /** * {@inheritDoc} */ @Override public InputStream getResponseBody() throws IOException { return responseBody; } /** * {@inheritDoc} */ public Map<String, List<String>> getHeaders() { return headers; } } private static class JavaHttpRequest implements HttpRequest { private final String method; private final String url; private String authorizationHeader; private Map<String, String> additionalHeaders; private byte[] body; private final String contentType; private final String contentLength; private JavaHttpRequest(String method, String url) { this.method = method; this.url = url; this.additionalHeaders = new HashMap<String, String>(); contentType = null; body = null; contentLength = null; } private JavaHttpRequest(String method, String url, String requestBodyJson) { this.method = method; this.url = url; this.additionalHeaders = new HashMap<String, String>(); contentType = HttpConstants.CONTENT_TYPE_JSON; body = requestBodyJson.getBytes(HttpConstants.ENCODING_CHARSET); contentLength = String.valueOf(body.length); } private JavaHttpRequest(String method, String url, Map<String, List<String>> formParams) { this.method = method; this.url = url; this.additionalHeaders = new HashMap<String, String>(); try { contentType = HttpConstants.CONTENT_TYPE_FORM_URLENCODED; body = getFormBody(formParams); contentLength = String.valueOf(body.length); } catch (UnsupportedEncodingException e) { throw new IllegalArgumentException(e); } } /** * {@inheritDoc} */ @Override public void addAuthorizationHeader(String value) { this.authorizationHeader = value; } /** * {@inheritDoc} */ @Override public void addHeader(String name, String value) { this.additionalHeaders.put(name, value); } /** * Get the HTTP Request method value. */ public String getMethod() { return method; } public String getUrl() { return url; } public String getAuthorizationHeader() { return authorizationHeader; } public Map<String, String> getAdditionalHeaders() { return additionalHeaders; } public byte[] getBody() { return body; } public String getContentType() { return contentType; } public String getContentLength() { return contentLength; } } /** * Only the Builder can construct a JavaHttpProvider. */ private JavaHttpProvider() { } /** * {@inheritDoc} */ @Override public void close() throws IOException { // nothing to do } @Override public HttpRequest getRequest(HttpRequestAuthorizer httpRequestAuthorizer, String method, String url, String requestBodyJson) { HttpRequest httpRequest; if (null == requestBodyJson) { httpRequest = new JavaHttpRequest( method, url); } else { httpRequest = new JavaHttpRequest( method, url, requestBodyJson); } httpRequestAuthorizer.authorize(httpRequest, method, url, null); return httpRequest; } @Override public HttpRequest getRequest(HttpRequestAuthorizer httpRequestAuthorizer, String method, String url, Map<String, List<String>> formParams) { HttpRequest httpRequest; if (null == formParams) { httpRequest = new JavaHttpRequest(method, url); } else { httpRequest = new JavaHttpRequest( method, url, formParams); } httpRequestAuthorizer.authorize(httpRequest, method, url, formParams); return httpRequest; } protected long getContentLength(HttpURLConnection connection) { String contentLengthString = connection.getHeaderField(HttpConstants.CONTENT_LENGTH_HEADER); return Long.parseLong(contentLengthString); } protected HttpURLConnection getHttpUrlConnection(String urlString) throws IOException { URL url = new URL(urlString); return (HttpURLConnection) url.openConnection(); } @Override public HttpResponse execute(HttpRequest httpRequest) throws HttpException, IOException { if (!(httpRequest instanceof JavaHttpRequest)) { throw new IllegalArgumentException("httpRequest is not of expected type; use " + getClass() + ".getRequest(..) to get a request of the expected type"); } JavaHttpRequest javaHttpRequest = (JavaHttpRequest) httpRequest; HttpURLConnection connection = getHttpUrlConnection(javaHttpRequest.getUrl()); connection.setDoOutput(true); connection.setRequestMethod(javaHttpRequest.getMethod()); byte[] body = javaHttpRequest.getBody(); if (null != body) { connection.setRequestProperty(HttpConstants.CONTENT_TYPE_HEADER, javaHttpRequest.getContentType()); connection.setRequestProperty(HttpConstants.CONTENT_LENGTH_HEADER, javaHttpRequest.getContentLength()); } String authorizationHeader = javaHttpRequest.getAuthorizationHeader(); if (null != authorizationHeader) { connection.setRequestProperty(HttpConstants.AUTHORIZATION_HEADER, authorizationHeader); } Map<String, String> additionalHeaders = javaHttpRequest.getAdditionalHeaders(); if (null != additionalHeaders) { for (Entry<String, String> additionalHeader : additionalHeaders.entrySet()) { String key = additionalHeader.getKey(); String value = additionalHeader.getValue(); connection.setRequestProperty(key, value); } } // Write data if (null != body) { try ( OutputStream outputStream = connection.getOutputStream() ) { outputStream.write(body); outputStream.flush(); } } // Read response int statusCode = connection.getResponseCode(); long responseContentLength = getContentLength(connection); Map<String, List<String>> headers = connection.getHeaderFields(); InputStream inputStream; if (statusCode < HttpURLConnection.HTTP_BAD_REQUEST) { inputStream = connection.getInputStream(); } else { /* error from server */ inputStream = connection.getErrorStream(); } return new JavaHttpResponse(statusCode, responseContentLength, inputStream, headers); } protected static byte[] getFormBody(Map<String, List<String>> formParams) throws UnsupportedEncodingException { StringBuilder formBuf = new StringBuilder(); boolean first = true; Set<Entry<String, List<String>>> formEntrySet = formParams.entrySet(); for (Entry<String, List<String>> formEntry : formEntrySet) { String key = formEntry.getKey(); List<String> values = formEntry.getValue(); String encodedKey = URLEncoder.encode(key, HttpConstants.CHARSET_STRING); if (null != values && !values.isEmpty()) { for (String value : values) { if (first) { first = false; } else { formBuf.append('&'); } formBuf .append(encodedKey) .append('=') .append(URLEncoder.encode(value, HttpConstants.CHARSET_STRING)); } } else { if (first) { first = false; } else { formBuf.append('&'); } formBuf.append(encodedKey); } } return formBuf.toString().getBytes(HttpConstants.ENCODING_CHARSET); } public static Builder builder() { return new Builder(); } }
/* * Copyright 2011-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticmapreduce.util; import java.util.ArrayList; import java.util.List; import com.amazonaws.services.elasticmapreduce.model.BootstrapActionConfig; import com.amazonaws.services.elasticmapreduce.model.ScriptBootstrapActionConfig; /** * Class that provides helper methods for constructing predefined bootstrap actions. * * <pre> * AWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey); * AmazonElasticMapReduce emr = new AmazonElasticMapReduceClient(credentials); * * BootstrapActions bootstrapActions = new BootstrapActions(); * * RunJobFlowRequest request = new RunJobFlowRequest() * .withName("Job Flow With Bootstrap Actions") * .withBootstrapActions( * bootstrapActions.newRunIf( * "instance.isMaster=true", * bootstrapActions.newConfigureDaemons() * .withHeapSize(Daemon.JobTracker, 2048) * .build())) * .withLogUri("s3://log-bucket/") * .withInstances(new JobFlowInstancesConfig() * .withEc2KeyName("keypair") * .withHadoopVersion("0.20") * .withInstanceCount(5) * .withKeepJobFlowAliveWhenNoSteps(true) * .withMasterInstanceType("m1.small") * .withSlaveInstanceType("m1.small")); * * RunJobFlowResult result = emr.runJobFlow(request); * </pre> */ public class BootstrapActions { private final String bucket; /** * Creates a new default BootstrapActions for us in us-east-1. */ public BootstrapActions() { this("us-east-1.elasticmapreduce"); } /** * Creates a new BootstrapActions. * @param bucket the bucket from which to download the bootstrap actions. */ public BootstrapActions(String bucket) { this.bucket = bucket; } /** * Create a new run-if bootstrap action which lets you conditionally run bootstrap actions. * @param condition The condition to evaluate, if true the bootstrap action executes. * @param config The bootstrap action to execute in case of successful evaluation. * @return A BootstrapActionConfig to be provided when running a job flow. */ public BootstrapActionConfig newRunIf(String condition, BootstrapActionConfig config) { List<String> args = config.getScriptBootstrapAction().getArgs(); args.add(0, condition); args.add(1, config.getScriptBootstrapAction().getPath()); return new BootstrapActionConfig() .withName("Run If, " + config.getName()) .withScriptBootstrapAction(new ScriptBootstrapActionConfig() .withPath("s3://" + bucket + "/bootstrap-actions/run-if") .withArgs(args)); } /** * Enum specifying all valid config files. */ public enum ConfigFile { Site, Default, Core, Hdfs, Mapred } /** * Create a new bootstrap action which lets you configure Hadoop's XML files. */ public ConfigureHadoop newConfigureHadoop() { return new ConfigureHadoop(); } public class ConfigureHadoop { List<String> args = new ArrayList<String>(); private ConfigureHadoop() { } /** * Specify an XML file in S3 to merge with Hadoop's default configuration. * @param file The config file to merge with. * @param xmlPath The path in S3 of the XML file. * @return A reference to this updated object so that method calls can be chained * together. */ public ConfigureHadoop withXml(ConfigFile file, String xmlPath) { String arg = ""; switch (file) { case Site: arg = "-S"; break; case Default: arg = "-D"; break; case Core: arg = "-C"; break; case Hdfs: arg = "-H"; break; case Mapred: arg = "-M"; break; } args.add(arg); args.add(xmlPath); return this; } /** * Specify a key-value pair to merge with Hadoop's default configuration. * @param file The config file to merge with. * @param key The config key. * @param value The config value. * @return A reference to this updated object so that method calls can be chained * together. */ public ConfigureHadoop withKeyValue(ConfigFile file, String key, String value) { String arg = ""; switch (file) { case Site: arg = "-s"; break; case Default: arg = "-d"; break; case Core: arg = "-c"; break; case Hdfs: arg = "-h"; break; case Mapred: arg = "-m"; break; } args.add(arg); args.add(key + "=" + value); return this; } /** * Returns an object which can be used in a RunJobflow call. * @return an object which can be used in a RunJobflow call. */ public BootstrapActionConfig build() { return new BootstrapActionConfig() .withName("Configure Hadoop") .withScriptBootstrapAction(new ScriptBootstrapActionConfig() .withPath("s3://" + bucket + "/bootstrap-actions/configure-hadoop") .withArgs(args)); } } /** * List of Hadoop daemons which can be configured. */ public enum Daemon { NameNode, DataNode, JobTracker, TaskTracker, Client; } /** * Create a new bootstrap action which lets you configure Hadoop's daemons. The options * are written to the hadoop-user-env.sh file. */ public ConfigureDaemons newConfigureDaemons() { return new ConfigureDaemons(); } public class ConfigureDaemons { List<String> args = new ArrayList<String>(); boolean replace = false; private ConfigureDaemons() { } /** * Set the heap size of a daemon. * @param daemon The deamon to configure. * @param megabytes The requested heap size of the daemon. * @return A reference to this updated object so that method calls can be chained * together. */ public ConfigureDaemons withHeapSize(Daemon daemon, int megabytes) { args.add("--" + daemon.name().toLowerCase() + "-heap-size=" + megabytes); return this; } /** * Specify additional Java opts to be included when the daemon starts. * @param daemon The daemon to add opts to. * @param opts Additional Java command line arguments. * @return A reference to this updated object so that method calls can be chained * together. */ public ConfigureDaemons withOpts(Daemon daemon, String opts) { args.add("--" + daemon.name().toLowerCase() + "-opts=\"" + opts + "\""); return this; } /** * Replace the existing hadoop-user-env.sh file if it exists. * @param replace whether the file should be replaced. * @return A reference to this updated object so that method calls can be chained * together. */ public ConfigureDaemons withReplace(boolean replace) { this.replace = replace; return this; } /** * Returns an object which can be used in a RunJobflow call. * @return an object which can be used in a RunJobflow call. */ public BootstrapActionConfig build() { if (replace) { args.add("--replace"); } return new BootstrapActionConfig() .withName("Configure Daemons") .withScriptBootstrapAction(new ScriptBootstrapActionConfig() .withPath("s3://" + bucket + "/bootstrap-actions/configure-daemons") .withArgs(args)); } } }
package com.schnettler.AdvancedLayersThemeInstaller.activities; import android.app.Activity; import android.app.AlertDialog; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.PackageManager; import android.net.Uri; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.widget.AdapterView; import android.widget.ListView; import android.widget.TextView; import android.widget.Toolbar; import com.schnettler.AdvancedLayersThemeInstaller.R; import com.schnettler.AdvancedLayersThemeInstaller.adapters.CustomListAdapter; public class AboutActivity extends Activity { ListView list; ListView list2; ListView list3; ListView list4; ListView list5; String community = null; String LinkToYourProfile = null; String[] web1 = { "Niklas Schnettler", } ; String[] web2 = { "Bitsyko Development Team", } ; String[] web4 = { "Mailson Campos", } ; String[] web5 = { "Stefano Trevisani", } ; Integer[] imageId1 = { R.drawable.niklas, }; Integer[] imageId2 = { R.drawable.bitsyko, }; Integer[] imageId3 = { R.drawable.themedeveloper, }; Integer[] imageId4 = { R.drawable.mailson, }; Integer[] imageId5 = { R.drawable.stefano, }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_about); String test = this.getString(R.string.NameOfThemeDeveloper); community = this.getString(R.string.community); LinkToYourProfile = this.getString(R.string.LinkToYourProfile); String[] web3 = { test, }; Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); toolbar.setNavigationIcon(R.drawable.abc_ic_ab_back_mtrl_am_alpha); setActionBar(toolbar); //List 2 CustomListAdapter adapter = new CustomListAdapter(AboutActivity.this, web1, imageId1); list=(ListView)findViewById(R.id.listView7); list.setAdapter(adapter); list.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://plus.google.com/+NiklasSchnettler/posts"))); } }); //List 1 CustomListAdapter adapter3 = new CustomListAdapter(AboutActivity.this, web3, imageId3); list3=(ListView)findViewById(R.id.listView6); list3.setAdapter(adapter3); list3.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(LinkToYourProfile))); } }); //List3 CustomListAdapter adapter2 = new CustomListAdapter(AboutActivity.this, web2, imageId2); list2=(ListView)findViewById(R.id.listView2); list2.setAdapter(adapter2); list2.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://plus.google.com/u/0/communities/102261717366580091389"))); } }); //List4 CustomListAdapter adapter4 = new CustomListAdapter(AboutActivity.this, web4, imageId4); list4=(ListView)findViewById(R.id.listView4); list4.setAdapter(adapter4); list4.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://plus.google.com/u/0/+MailsonCampos/posts"))); } }); //List5 CustomListAdapter adapter5 = new CustomListAdapter(AboutActivity.this, web5, imageId5); list5=(ListView)findViewById(R.id.listView5); list5.setAdapter(adapter5); list5.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://plus.google.com/+StefanoTrevisani/posts"))); } }); TextView tv_version = (TextView) findViewById(R.id.textView_version); try { String versionName = AboutActivity.this.getPackageManager() .getPackageInfo(AboutActivity.this.getPackageName(), 0).versionName; tv_version.setText("Version " + versionName); } catch (PackageManager.NameNotFoundException e) { e.printStackTrace(); } list2.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(community))); } }); } public void License1(View view) { // Do something in response to button //Intent intent = new Intent(this, About.class); new AlertDialog.Builder(this) .setTitle("Snackbar") .setView(LayoutInflater.from(this).inflate(R.layout.about_license1dialog, null)) .setPositiveButton(R.string.VisitGithub, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://github.com/nispok/snackbar")); startActivity(browserIntent); } }) .setNegativeButton(R.string.Close, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .show(); } public void License3(View view) { // Do something in response to button //Intent intent = new Intent(this, About.class); new AlertDialog.Builder(this) .setTitle("ObservableScrollView") .setView(LayoutInflater.from(this).inflate(R.layout.about_license2dialog, null)) .setPositiveButton(R.string.VisitGithub, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://github.com/ksoichiro/Android-ObservableScrollView")); startActivity(browserIntent); } }) .setNegativeButton(R.string.Close, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .show(); } public void License4(View view) { // Do something in response to button //Intent intent = new Intent(this, About.class); new AlertDialog.Builder(this) .setTitle("Floating Action Button") .setView(LayoutInflater.from(this).inflate(R.layout.about_license3dialog, null)) .setPositiveButton(R.string.VisitGithub, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://github.com/makovkastar/FloatingActionButton")); startActivity(browserIntent); } }) .setNegativeButton(R.string.Close, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .show(); } public void License5(View view) { // Do something in response to button //Intent intent = new Intent(this, About.class); new AlertDialog.Builder(this) .setTitle("NineOldAndroids") .setView(LayoutInflater.from(this).inflate(R.layout.about_license4dialog, null)) .setPositiveButton(R.string.VisitGithub, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://github.com/JakeWharton/NineOldAndroids/")); startActivity(browserIntent); } }) .setNegativeButton(R.string.Close, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .show(); } public void License6(View view) { // Do something in response to button //Intent intent = new Intent(this, About.class); new AlertDialog.Builder(this) .setTitle("Root Tools") .setView(LayoutInflater.from(this).inflate(R.layout.about_license5dialog, null)) .setPositiveButton(R.string.VisitGithub, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://github.com/Stericson/RootTools")); startActivity(browserIntent); } }) .setNegativeButton(R.string.Close, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .show(); } public void License9(View view) { // Do something in response to button //Intent intent = new Intent(this, About.class); new AlertDialog.Builder(this) .setTitle("ViewPagerIndicator") .setView(LayoutInflater.from(this).inflate(R.layout.about_license6dialog, null)) .setPositiveButton(R.string.VisitGithub, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://github.com/JakeWharton/ViewPagerIndicator")); startActivity(browserIntent); } }) .setNegativeButton(R.string.Close, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .show(); } public void openCommunity(View view) { // Do something in response to button startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(community))); } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.psi; import com.intellij.lang.jvm.JvmClass; import com.intellij.lang.jvm.JvmClassKind; import com.intellij.lang.jvm.JvmMethod; import com.intellij.lang.jvm.types.JvmReferenceType; import com.intellij.openapi.util.NlsSafe; import com.intellij.openapi.util.Pair; import com.intellij.pom.PomRenameableTarget; import com.intellij.util.ArrayFactory; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collection; import java.util.List; /** * Represents a Java class or interface. * * @see PsiJavaFile#getClasses() */ public interface PsiClass extends PsiNameIdentifierOwner, PsiModifierListOwner, PsiDocCommentOwner, PsiTypeParameterListOwner, PsiQualifiedNamedElement, PsiTarget, PomRenameableTarget<PsiElement>, JvmClass { /** * The empty array of PSI classes which can be reused to avoid unnecessary allocations. */ PsiClass @NotNull [] EMPTY_ARRAY = new PsiClass[0]; ArrayFactory<PsiClass> ARRAY_FACTORY = count -> count == 0 ? EMPTY_ARRAY : new PsiClass[count]; /** * Returns the fully qualified name of the class. * * @return the qualified name of the class, or null for anonymous and local classes, and for type parameters */ @Override @Nullable @NlsSafe String getQualifiedName(); /** * Checks if the class is an interface. * * @return true if the class is an interface, false otherwise. */ boolean isInterface(); /** * Checks if the class is an annotation type. * * @return true if the class is an annotation type, false otherwise */ boolean isAnnotationType(); /** * Checks if the class is an enumeration. * * @return true if the class is an enumeration, false otherwise. */ boolean isEnum(); /** * Checks if the class is a record. * * @return true if the class is an record, false otherwise. */ default boolean isRecord() { return false; } /** * Returns the list of classes that this class or interface extends. * * @return the extends list, or null for anonymous classes. */ @Nullable PsiReferenceList getExtendsList(); /** * Returns the list of interfaces that this class implements. * * @return the implements list, or null for anonymous classes */ @Nullable PsiReferenceList getImplementsList(); /** * Returns the list of class types for the classes that this class or interface extends. * * @return the list of extended class types, or an empty list for anonymous classes. */ PsiClassType @NotNull [] getExtendsListTypes(); /** * Returns the list of class types for the interfaces that this class implements. * * @return the list of extended class types, or an empty list for anonymous classes, * enums and annotation types */ PsiClassType @NotNull [] getImplementsListTypes(); /** * Returns the list of classes that this class or interface permits. * * @return the permits list. */ @Nullable @ApiStatus.Experimental default PsiReferenceList getPermitsList() { return null; } /** * Returns the list of class types that this class or interface explicitly permits. * * @return the list of explicitly permitted classes. */ @ApiStatus.Experimental default PsiClassType @NotNull [] getPermitsListTypes() { PsiReferenceList permitsList = getPermitsList(); if (permitsList != null) { return permitsList.getReferencedTypes(); } return PsiClassType.EMPTY_ARRAY; } /** * Returns the base class of this class. * * @return the base class. May return null when jdk is not configured, so no java.lang.Object is found, * or for java.lang.Object itself */ @Nullable PsiClass getSuperClass(); /** * Returns the list of interfaces implemented by the class, or extended by the interface. * * @return the list of interfaces. */ PsiClass @NotNull [] getInterfaces(); /** * Returns the list of classes and interfaces extended or implemented by the class. * * @return the list of classes or interfaces. May return zero elements when jdk is * not configured, so no java.lang.Object is found */ PsiClass @NotNull [] getSupers(); /** * Returns the list of class types for the classes and interfaces extended or * implemented by the class. * * @return the list of class types for the classes or interfaces. * For the class with no explicit extends list, the returned list always contains at least one element for the java.lang.Object type. * If psiClass is java.lang.Object, returned list is empty. */ PsiClassType @NotNull [] getSuperTypes(); /** * Returns the list of fields in the class. * * @return the list of fields. */ @Override PsiField @NotNull [] getFields(); /** * Returns the list of methods in the class. * * @return the list of methods. */ @Override PsiMethod @NotNull [] getMethods(); /** * Returns the list of constructors for the class. * * @return the list of constructors, */ PsiMethod @NotNull [] getConstructors(); /** * Returns the list of inner classes for the class. * * @return the list of inner classes. */ @Override PsiClass @NotNull [] getInnerClasses(); /** * Returns the list of class initializers for the class. * * @return the list of class initializers. */ PsiClassInitializer @NotNull [] getInitializers(); /** * Returns the list of fields in the class and all its superclasses. * * @return the list of fields. */ PsiField @NotNull [] getAllFields(); /** * Returns the list of methods in the class and all its superclasses. * * @return the list of methods. */ PsiMethod @NotNull [] getAllMethods(); /** * Returns the list of inner classes for the class and all its superclasses. * * @return the list of inner classes. */ PsiClass @NotNull [] getAllInnerClasses(); /** * Searches the class (and optionally its superclasses) for the field with the specified name. * * @param name the name of the field to find. * @param checkBases if true, the field is also searched in the base classes of the class. * @return the field instance, or null if the field cannot be found. */ @Nullable PsiField findFieldByName(@NonNls String name, boolean checkBases); /** * Searches the class (and optionally its superclasses) for the method with * the signature matching the signature of the specified method. * * @param patternMethod the method used as a pattern for the search. * @param checkBases if true, the method is also searched in the base classes of the class. * @return the method instance, or null if the method cannot be found. */ @Nullable PsiMethod findMethodBySignature(@NotNull PsiMethod patternMethod, boolean checkBases); /** * Searches the class (and optionally its superclasses) for the methods with the signature * matching the signature of the specified method. If the superclasses are not searched, * the method returns multiple results only in case of a syntax error (duplicate method). * * @param patternMethod the method used as a pattern for the search. * @param checkBases if true, the method is also searched in the base classes of the class. * @return the found methods, or an empty array if no methods are found. */ PsiMethod @NotNull [] findMethodsBySignature(@NotNull PsiMethod patternMethod, boolean checkBases); @Override default JvmMethod @NotNull [] findMethodsByName(@NotNull String methodName) { return findMethodsByName(methodName, false); } /** * Searches the class (and optionally its superclasses) for the methods with the specified name. * * @param name the name of the methods to find. * @param checkBases if true, the methods are also searched in the base classes of the class. * @return the found methods, or an empty array if no methods are found. */ PsiMethod @NotNull [] findMethodsByName(@NonNls String name, boolean checkBases); /** * Searches the class (and optionally its superclasses) for the methods with the specified name * and returns the methods along with their substitutors. * * @param name the name of the methods to find. * @param checkBases if true, the methods are also searched in the base classes of the class. * @return the found methods and their substitutors, or an empty list if no methods are found. */ @NotNull List<Pair<PsiMethod, PsiSubstitutor>> findMethodsAndTheirSubstitutorsByName(@NonNls @NotNull String name, boolean checkBases); /** * Returns the list of methods in the class and all its superclasses, along with their * substitutors. * * @return the list of methods and their substitutors */ @NotNull List<Pair<PsiMethod, PsiSubstitutor>> getAllMethodsAndTheirSubstitutors(); /** * Searches the class (and optionally its superclasses) for the inner class with the specified name. * * @param name the name of the inner class to find. * @param checkBases if true, the inner class is also searched in the base classes of the class. * @return the inner class instance, or null if the inner class cannot be found. */ @Nullable PsiClass findInnerClassByName(@NonNls String name, boolean checkBases); /** * Returns the token representing the opening curly brace of the class. * * @return the token instance, or null if the token is missing in the source code file. */ @Nullable PsiElement getLBrace(); /** * Returns the token representing the closing curly brace of the class. * * @return the token instance, or null if the token is missing in the source code file. */ @Nullable PsiElement getRBrace(); /** * Returns the name identifier of the class. * * @return the name identifier, or null if the class is anonymous or synthetic jsp class */ @Override @Nullable PsiIdentifier getNameIdentifier(); /** * Returns the PSI member in which the class has been declared (for example, * the method containing the anonymous inner class, or the file containing a regular * class, or the class owning a type parameter). * * @return the member in which the class has been declared. */ PsiElement getScope(); /** * Checks if this class is an inheritor of the specified base class. * Only java inheritance rules are considered. * Note that {@link com.intellij.psi.search.searches.ClassInheritorsSearch} * may return classes that are inheritors in broader, e.g. in ejb sense, but not in java sense. * * @param baseClass the base class to check the inheritance. * @param checkDeep if false, only direct inheritance is checked; if true, the base class is * searched in the entire inheritance chain * @return true if the class is an inheritor, false otherwise */ boolean isInheritor(@NotNull PsiClass baseClass, boolean checkDeep); /** * Checks if this class is a deep inheritor of the specified base class possibly bypassing a class * when checking inheritance chain. * Only java inheritance rules are considered. * Note that {@link com.intellij.psi.search.searches.ClassInheritorsSearch} * may return classes that are inheritors in broader, e.g. in ejb sense, but not in java sense. * * @param baseClass the base class to check the inheritance. * searched in the entire inheritance chain * @param classToByPass class to bypass the inheritance check for * @return true if the class is an inheritor, false otherwise */ boolean isInheritorDeep(@NotNull PsiClass baseClass, @Nullable PsiClass classToByPass); /** * For an inner class, returns its containing class. * * @return the containing class, or null if the class is not an inner class. */ @Override @Nullable PsiClass getContainingClass(); /** * Returns the hierarchical signatures for all methods in the specified class and * its superclasses and superinterfaces. * * @return the collection of signatures. */ @NotNull Collection<HierarchicalMethodSignature> getVisibleSignatures(); @Override PsiElement setName(@NonNls @NotNull String name) throws IncorrectOperationException; @NotNull @Override default JvmClassKind getClassKind() { return PsiJvmConversionHelper.getJvmClassKind(this); } @Nullable @Override default JvmReferenceType getSuperClassType() { return PsiJvmConversionHelper.getClassSuperType(this); } @Override default JvmReferenceType @NotNull [] getInterfaceTypes() { return PsiJvmConversionHelper.getClassInterfaces(this); } default PsiRecordComponent @NotNull [] getRecordComponents() { return PsiRecordComponent.EMPTY_ARRAY; } @Nullable default PsiRecordHeader getRecordHeader() { return null; } }
package com.nicholasgot.citypulse.androidapp; import java.util.ArrayList; import java.util.List; import android.app.Activity; import android.app.AlertDialog; import android.app.FragmentManager; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.graphics.Color; import android.location.Location; import android.location.LocationListener; import android.location.LocationManager; import android.os.Bundle; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuItem; import android.widget.TextView; import citypulse.commons.contextual_filtering.city_event_ontology.CityEvent; import citypulse.commons.contextual_filtering.city_event_ontology.CriticalEventResults; import citypulse.commons.contextual_filtering.contextual_event_request.ContextualEventRequest; import citypulse.commons.contextual_filtering.contextual_event_request.Place; import citypulse.commons.contextual_filtering.contextual_event_request.PlaceAdapter; import citypulse.commons.contextual_filtering.contextual_event_request.Route; import citypulse.commons.data.Coordinate; import citypulse.commons.event_request.DataFederationRequest; import citypulse.commons.event_request.DataFederationRequest.DataFederationPropertyType; import citypulse.commons.event_request.DataFederationResult; import citypulse.commons.event_request.QosVector; import citypulse.commons.event_request.WeightVector; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.MapFragment; import com.google.android.gms.maps.model.BitmapDescriptorFactory; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.Marker; import com.google.android.gms.maps.model.MarkerOptions; import com.google.android.gms.maps.model.PolylineOptions; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.nicholasgot.citypulse.androidapp.common.DefaultValues; import com.nicholasgot.citypulse.androidapp.common.MessageConverters; public class Execution extends Activity implements LocationListener { public static final String EXECUTION_DETAILS = "execution details"; public static final String STARTING_POINT = "starting point"; public static final String DESTINATION_POINT = "destination point"; public static final String INTEREST_POINT = "interest point"; public static final String PARKING_CONTEXTUAL_EVENT_REQUESTS = "parking contextual event request"; public static final String ROUTE_CONTEXTUAL_EVENT_REQUESTS = "route contextual event request"; public static final String DECISION_SUPPORT_PARKING_PLANNER_RESONSE = "decision support parking planner response"; public static final String DECISION_SUPPORT_PARKING_PLANNER_REQUEST = "decision support parking planner request"; public static final String DECISION_SUPPORT_TRAVEL_PLANNER_RESONSE = "decision support travel planner response"; public static final String DECISION_SUPPORT_TRAVEL_PLANNER_REQUEST = "decision support travel planner request"; public static final String TRAVEL_STATUS_REQUEST = "travel status request"; public static final String TRAVEL_STATUS_EVENT = "travel status event"; public static final String TRAVEL_STATUS_EVENT_PAYLOAD = "travel status event payload"; private String parkingContextualEventRequestString = null; private String routeContextualEventRequestString = null; private Intent parkingServiceIntent = null; private Intent routeServiceIntent = null; private Intent travelStatusServiceIntent = null; private Coordinate startingPoint = null; private Coordinate destinationPoint = null; private Coordinate interestPoint = null; private GoogleMap map; private boolean locationAvailable; private Marker userPositionMarker; private ContextualEventRequest parkingContextualEventRequest = null; private ContextualEventRequest routeContextualEventRequest = null; private LocationManager locationManager; private Activity currentActivity = this; private BroadcastReceiver alertsBroadcastReceiver; private BroadcastReceiver statusBroadcastReceiver; private BroadcastReceiver errorBroadcastReceiver; private String travelStatusEventRequest = null; private TextView trafficStatusTextView; private TextView pollutionStatusTextView; private List<AlertDialog> alerDialogList = new ArrayList<AlertDialog>(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_travel_planner_execution); trafficStatusTextView = (TextView) findViewById(R.id.executionTrafficStatus); pollutionStatusTextView = (TextView) findViewById(R.id.executionPollutionStatus); System.out.println("Starting execution with the following details:"); Intent intent = getIntent(); Bundle bundle = intent.getBundleExtra(EXECUTION_DETAILS); if (bundle.containsKey(PARKING_CONTEXTUAL_EVENT_REQUESTS)) { parkingContextualEventRequestString = bundle .getString(PARKING_CONTEXTUAL_EVENT_REQUESTS); System.out.println("PARKING_CONTEXTUAL_EVENT_REQUESTS: " + parkingContextualEventRequestString); } if (bundle.containsKey(ROUTE_CONTEXTUAL_EVENT_REQUESTS)) { routeContextualEventRequestString = bundle .getString(ROUTE_CONTEXTUAL_EVENT_REQUESTS); System.out.println("ROUTE_CONTEXTUAL_EVENT_REQUESTS: " + routeContextualEventRequestString); } if (bundle.containsKey(STARTING_POINT)) { startingPoint = new Gson().fromJson( bundle.getString(STARTING_POINT), Coordinate.class); System.out.println("STARTING_POINT: " + startingPoint); } if (bundle.containsKey(DESTINATION_POINT)) { destinationPoint = new Gson().fromJson( bundle.getString(DESTINATION_POINT), Coordinate.class); System.out.println("DESTINATION_POINT: " + destinationPoint); } if (bundle.containsKey(INTEREST_POINT)) { interestPoint = new Gson().fromJson( bundle.getString(INTEREST_POINT), Coordinate.class); System.out.println("INTEREST_POINT: " + interestPoint); } if (parkingContextualEventRequestString != null) { parkingServiceIntent = new Intent(this, ParkingNotificationService.class); parkingServiceIntent.putExtra(PARKING_CONTEXTUAL_EVENT_REQUESTS, parkingContextualEventRequestString); startService(parkingServiceIntent); // Dan please check parkingContextualEventRequest = MessageConverters .contextualEventRequestFromJSON(parkingContextualEventRequestString); } if (routeContextualEventRequestString != null) { routeServiceIntent = new Intent(this, TravelNotificationService.class); routeServiceIntent.putExtra(ROUTE_CONTEXTUAL_EVENT_REQUESTS, routeContextualEventRequestString); startService(routeServiceIntent); // Dan please check routeContextualEventRequest = MessageConverters .contextualEventRequestFromJSON(routeContextualEventRequestString); launchTravelStatusService(); } FragmentManager myFragmentManager = getFragmentManager(); MapFragment myMapFragment = (MapFragment) myFragmentManager .findFragmentById(R.id.travelPlannerSelectionMap); map = myMapFragment.getMap(); locationManager = (LocationManager) getSystemService(Context.LOCATION_SERVICE); locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 1000, 10, this); alertsBroadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { String criticalEventResultsString = intent .getStringExtra(DefaultValues.EVENT_ALERT_MESSAGE_PAYLOAD); System.out.println("critical event: " + criticalEventResultsString); GsonBuilder builder = new GsonBuilder(); builder.registerTypeAdapter(Place.class, new PlaceAdapter()); Gson gson = builder.create(); CriticalEventResults criticalEventResults = gson.fromJson( criticalEventResultsString, CriticalEventResults.class); StringBuilder messageStringBuilder = new StringBuilder( "The following events have been received: "); Boolean eventOK = true; for (CityEvent contextualEvent : criticalEventResults .getContextualEvents()) { if (contextualEvent.getEventLevel() > 0) { messageStringBuilder.append(contextualEvent .getEventCategory() + "[level = " + contextualEvent.getEventLevel() + ", coordinates(" + contextualEvent.getEventPlace() .getCentreCoordinate().toString() + ")]; "); } else { eventOK = false; } } if (eventOK) { for (AlertDialog alertDialog : alerDialogList) { if (alertDialog.isShowing()) alertDialog.cancel(); } AlertDialog alertDialog = new AlertDialog.Builder( currentActivity) .setTitle("Event notification") .setMessage(messageStringBuilder.toString()) .setPositiveButton("Go to parking selection", new DialogInterface.OnClickListener() { public void onClick( DialogInterface dialog, int which) { Intent intent = new Intent(); intent.setAction(DefaultValues.COMMAND_GO_TO_PARKING_RECOMANDATION); currentActivity .sendBroadcast(intent); currentActivity.finish(); } }) .setNegativeButton("Go to route selection", new DialogInterface.OnClickListener() { public void onClick( DialogInterface dialog, int which) { Intent intent = new Intent(); intent.setAction(DefaultValues.COMMAND_GO_TO_TRAVEL_RECOMANDATION); currentActivity .sendBroadcast(intent); currentActivity.finish(); } }) .setIcon(android.R.drawable.ic_dialog_alert) .setNeutralButton("Continue", new DialogInterface.OnClickListener() { public void onClick( DialogInterface dialog, int which) { System.out .println("The user has decided to continue."); } }).show(); alerDialogList.add(alertDialog); } else { System.out.println("The event was not displayed!"); } } }; IntentFilter alertIntentFilter = new IntentFilter(); alertIntentFilter.addAction(DefaultValues.EVENT_ALERT_MESSAGE); registerReceiver(alertsBroadcastReceiver, alertIntentFilter); errorBroadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { String errorMessage = intent .getStringExtra(DefaultValues.ERROR_MESSAGE_PAYLOAD); Intent intentError = new Intent(currentActivity, ErrorPanel.class); intentError.putExtra( "Error", errorMessage); startActivity(intentError); } }; IntentFilter errorIntentFilter = new IntentFilter(); errorIntentFilter.addAction(DefaultValues.ERROR_MESSAGE); registerReceiver(errorBroadcastReceiver, errorIntentFilter); statusBroadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { String statusEvent = intent .getStringExtra(Execution.TRAVEL_STATUS_EVENT_PAYLOAD); System.out.println("received event from data federation " + statusEvent); if (!statusEvent.contains("FAULT:")) { DataFederationResult dataFederationRequest = (DataFederationResult) new Gson() .fromJson(statusEvent, DataFederationResult.class); if (dataFederationRequest.getResult().containsKey( "http://ict-citypulse.eu/city#AirPollutionIndex")) { char[] pollutionstatusMessage = ("Air qualit index: " + dataFederationRequest .getResult() .get("http://ict-citypulse.eu/city#AirPollutionIndex") .get(0)).toCharArray(); pollutionStatusTextView.setText(pollutionstatusMessage, 0, pollutionstatusMessage.length); } if (dataFederationRequest.getResult().containsKey( "http://ict-citypulse.eu/city#AverageSpeed")) { char[] trafficstatusMessage = ("Average speed: " + ((Double) Double .parseDouble(dataFederationRequest .getResult() .get("http://ict-citypulse.eu/city#AverageSpeed") .get(0))).intValue() + " km/h") .toCharArray(); trafficStatusTextView.setText(trafficstatusMessage, 0, trafficstatusMessage.length); } } else { System.out .println("The data federation event cannot be parsed!"); Intent errorIntent = new Intent(currentActivity, ErrorPanel.class); errorIntent .putExtra( "Error", "Invalid message received from data federation component. The application will not display values for air quality index and average speed. The message is: " + statusEvent); // startActivity(errorIntent); } } }; IntentFilter statusIntentFilter = new IntentFilter(); statusIntentFilter.addAction(Execution.TRAVEL_STATUS_EVENT); registerReceiver(statusBroadcastReceiver, statusIntentFilter); } @Override protected void onDestroy() { for (AlertDialog alertDialog : alerDialogList) { if (alertDialog.isShowing()) alertDialog.cancel(); } if (parkingServiceIntent != null) { stopService(parkingServiceIntent); } if (routeServiceIntent != null) { stopService(routeServiceIntent); } if (travelStatusServiceIntent != null) { stopService(travelStatusServiceIntent); } System.out.println("1"); unregisterReceiver(alertsBroadcastReceiver); System.out.println("2"); unregisterReceiver(statusBroadcastReceiver); System.out.println("3"); super.onDestroy(); } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK && event.getRepeatCount() == 0) { finish(); return true; } return super.onKeyDown(keyCode, event); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.travel_planner_execution, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } @Override protected void onResume() { super.onResume(); locationAvailable = false; } @Override public void onLocationChanged(Location location) { LatLng latLng = new LatLng(location.getLatitude(), location.getLongitude()); if (locationAvailable == false) { map.clear(); userPositionMarker = map.addMarker(new MarkerOptions() .position(latLng) .title("my position") .icon(BitmapDescriptorFactory .fromResource(R.drawable.user_position_marker))); if (startingPoint != null) map.addMarker(new MarkerOptions() .position( new LatLng(startingPoint.getLatitude(), startingPoint.getLongitude())) .title("Starting point") .icon(BitmapDescriptorFactory .defaultMarker(BitmapDescriptorFactory.HUE_ORANGE))); if (destinationPoint != null) map.addMarker(new MarkerOptions().title("Destination point") .position( new LatLng(destinationPoint.getLatitude(), destinationPoint.getLongitude()))); if (interestPoint != null) { map.addMarker(new MarkerOptions() .title("Point of interest") .position( new LatLng(interestPoint.getLatitude(), interestPoint.getLongitude())) .icon(BitmapDescriptorFactory .defaultMarker(BitmapDescriptorFactory.HUE_BLUE))); } if (routeContextualEventRequest != null) { List<Coordinate> route = ((Route) routeContextualEventRequest .getPlace()).getRoute(); PolylineOptions routePolyline = new PolylineOptions(); for (Coordinate coordinate : route) { routePolyline.add(new LatLng(coordinate.getLatitude(), coordinate.getLongitude())); } routePolyline.color(Color.RED); map.addPolyline(routePolyline); // map.moveCamera(CameraUpdateFactory.newLatLngZoom(latLng, // 12)); } map.moveCamera(CameraUpdateFactory.newLatLngZoom(latLng, 12)); } else { userPositionMarker.setPosition(latLng); } locationAvailable = true; } @Override public void onStatusChanged(String provider, int status, Bundle extras) { } @Override public void onProviderEnabled(String provider) { } @Override public void onProviderDisabled(String provider) { } private void launchTravelStatusService() { List<DataFederationPropertyType> dataFederationPropertyTypes = new ArrayList<DataFederationRequest.DataFederationPropertyType>(); dataFederationPropertyTypes.add(DataFederationPropertyType.air_quality); dataFederationPropertyTypes .add(DataFederationPropertyType.average_speed); SharedPreferences settingsPreferences = getSharedPreferences( "SettingsPreferences", Context.MODE_PRIVATE); final Editor settingsEditor = settingsPreferences.edit(); int latencySmallerThanRestoredValue = settingsPreferences.getInt( "latencySmallerThanValue", 0); boolean latencySmallerThanRestoredCheckBox = settingsPreferences .getBoolean("latencySmallerThanCheckBox", false); int priceSmallerThanRestoredValue = settingsPreferences.getInt( "priceSmallerThanValue", 0); boolean priceSmallerThanRestoredCheckBox = settingsPreferences .getBoolean("priceSmallerThanCheckBox", false); int securityLevelRestoredValue = settingsPreferences.getInt( "securityLevelValue", 0); boolean securityLevelRestoredCheckBox = settingsPreferences.getBoolean( "securityLevelCheckBox", false); int accuracyBiggerThanRestoredValue = settingsPreferences.getInt( "accuracyBiggerThanValue", 0); boolean accuracyBiggerThanRestoredCheckBox = settingsPreferences .getBoolean("accuracyBiggerThanCheckBox", false); int completnessBiggerThanRestoredValue = settingsPreferences.getInt( "completnessBiggerThanValue", 0); boolean completnessBiggerThanRestoredCheckBox = settingsPreferences .getBoolean("completnessBiggerThanCheckBox", false); int bandwithBiggerThanRestoredValue = settingsPreferences.getInt( "bandwithBiggerThanValue", 0); boolean bandwithBiggerThanRestoredCheckBox = settingsPreferences .getBoolean("bandwithBiggerThanCheckBox", false); QosVector qosVector = new QosVector(); if (latencySmallerThanRestoredCheckBox) qosVector.setLatency(latencySmallerThanRestoredValue); if (priceSmallerThanRestoredCheckBox) qosVector.setPrice(priceSmallerThanRestoredValue); if (securityLevelRestoredCheckBox) qosVector.setSecurity(securityLevelRestoredValue); if (accuracyBiggerThanRestoredCheckBox) qosVector.setAccuracy(new Double(accuracyBiggerThanRestoredValue)); if (completnessBiggerThanRestoredCheckBox) qosVector.setReliability(new Double( completnessBiggerThanRestoredValue)); if (bandwithBiggerThanRestoredCheckBox) qosVector.setTraffic(new Double(bandwithBiggerThanRestoredValue)); WeightVector weightVector = new WeightVector(); // Dan Please check DataFederationRequest dataFederationRequest = new DataFederationRequest( dataFederationPropertyTypes, ((Route) routeContextualEventRequest.getPlace()).getRoute(), true, qosVector, weightVector); travelStatusEventRequest = new Gson().toJson(dataFederationRequest); System.out.println("dataFederationRequest " + travelStatusEventRequest); travelStatusServiceIntent = new Intent(this, TravelStatusService.class); travelStatusServiceIntent.putExtra(TRAVEL_STATUS_REQUEST, travelStatusEventRequest); startService(travelStatusServiceIntent); } }
/* The following code was generated by JFlex 1.2.2 on 20.10.00 09:36 */ /* * $Header: /cvsroot/remotetea/remotetea/src/org/acplt/oncrpc/apps/jrpcgen/JrpcgenScanner.java,v 1.1.1.1 2003/08/13 12:03:47 haraldalbrecht Exp $ * * Copyright (c) 1999, 2000 * Lehrstuhl fuer Prozessleittechnik (PLT), RWTH Aachen * D-52064 Aachen, Germany. * All rights reserved. * * This library is free software; you can redistribute it and/or modify * it under the terms of the GNU Library General Public License as * published by the Free Software Foundation; either version 2 of the * License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Library General Public License for more details. * * You should have received a copy of the GNU Library General Public * License along with this program (see the file COPYING.LIB for more * details); if not, write to the Free Software Foundation, Inc., * 675 Mass Ave, Cambridge, MA 02139, USA. */ /* * To compile into java code use: * java -jar <whereever/>JFlex.jar JrpcgenScanner.flex */ package org.acplt.oncrpc.apps.jrpcgen; import org.acplt.oncrpc.apps.jrpcgen.cup_runtime.*; /** * This class is a scanner generated by * <a href="http://www.informatik.tu-muenchen.de/~kleing/jflex/">JFlex</a> 1.2.2 * on 20.10.00 09:36 from the specification file * <tt>file:/G:/JAVA/SRC/ORG/ACPLT/ONCRPC/APPS/JRPCGEN/JrpcgenScanner.flex</tt> */ class JrpcgenScanner implements org.acplt.oncrpc.apps.jrpcgen.cup_runtime.Scanner { /** this character denotes the end of file */ final public static int YYEOF = -1; /** lexical states */ final public static int YYINITIAL = 0; /** * Translates characters to character classes */ final private static String yycmap_packed = "\11\0\1\3\1\2\1\0\1\3\1\1\22\0\1\3\7\0\1\63"+ "\1\64\1\5\1\0\1\60\1\14\1\0\1\4\1\7\7\13\2\10"+ "\1\61\1\57\1\71\1\62\1\72\2\0\1\34\3\12\1\37\1\12"+ "\1\33\1\6\1\41\3\6\1\35\1\42\1\32\1\30\1\6\1\31"+ "\1\40\2\6\1\36\4\6\1\67\1\0\1\70\1\0\1\54\1\0"+ "\1\21\1\55\1\43\1\46\1\24\1\47\1\20\1\51\1\26\2\6"+ "\1\53\1\22\1\27\1\17\1\15\1\56\1\16\1\25\1\44\1\52"+ "\1\23\1\50\1\11\1\45\1\6\1\65\1\0\1\66\uff82\0"; /** * Translates characters to character classes */ final private static char [] yycmap = yy_unpack_cmap(yycmap_packed); /** * Translates a state to a row index in the transition table */ final private static int yy_rowMap [] = { 0, 59, 118, 59, 177, 59, 236, 295, 354, 413, 472, 531, 590, 649, 708, 767, 826, 885, 944, 1003, 1062, 1121, 1180, 1239, 1298, 1357, 1416, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 1475, 1534, 1593, 1652, 1711, 1770, 1829, 1888, 1947, 2006, 2065, 2124, 2183, 2242, 2301, 2360, 2419, 2478, 2537, 2596, 2655, 2714, 2773, 2832, 2891, 2950, 3009, 3068, 1475, 3127, 1652, 3186, 3245, 3304, 3363, 3422, 3481, 3540, 3599, 236, 3658, 3717, 3776, 3835, 3894, 3953, 4012, 4071, 4130, 4189, 4248, 4307, 4366, 4425, 4484, 4543, 4602, 4661, 236, 4720, 236, 4779, 4838, 4897, 4956, 5015, 5074, 5133, 236, 236, 5192, 5251, 5310, 5369, 5428, 5487, 5546, 236, 5605, 5664, 5723, 5782, 5841, 5900, 5959, 6018, 236, 6077, 6136, 236, 6195, 6254, 6313, 236, 236, 6372, 236, 6431, 6490, 6549, 236, 6608, 236, 236, 236, 6667, 6726, 6785, 236, 6844, 6903, 236, 6962, 236, 236, 236, 236, 7021, 7080, 236, 7139, 236 }; /** * The packed transition table of the DFA */ final private static String yy_packed = "\1\2\1\3\2\4\1\5\1\6\1\7\1\10\1\11"+ "\2\7\1\11\1\12\1\13\1\7\1\14\3\7\1\15"+ "\1\16\1\17\1\20\1\7\1\21\5\7\1\22\4\7"+ "\1\23\1\24\1\7\1\25\1\26\1\7\1\27\1\30"+ "\1\31\1\7\1\32\1\33\1\34\1\35\1\36\1\37"+ "\1\40\1\41\1\42\1\43\1\44\1\45\1\46\1\47"+ "\75\0\1\4\74\0\1\50\1\51\73\0\6\7\1\0"+ "\42\7\23\0\1\52\1\0\1\53\1\0\1\52\66\0"+ "\2\11\2\0\1\11\67\0\1\11\2\0\1\11\65\0"+ "\6\7\1\0\1\7\1\54\40\7\22\0\6\7\1\0"+ "\1\55\41\7\22\0\6\7\1\0\2\7\1\56\4\7"+ "\1\57\32\7\22\0\6\7\1\0\12\7\1\60\27\7"+ "\22\0\6\7\1\0\27\7\1\61\3\7\1\62\1\63"+ "\5\7\22\0\6\7\1\0\12\7\1\64\27\7\22\0"+ "\6\7\1\0\14\7\1\65\25\7\22\0\6\7\1\0"+ "\22\7\1\66\17\7\22\0\6\7\1\0\2\7\1\67"+ "\1\7\1\70\27\7\1\71\5\7\22\0\6\7\1\0"+ "\30\7\1\72\11\7\22\0\6\7\1\0\2\7\1\73"+ "\4\7\1\74\32\7\22\0\6\7\1\0\36\7\1\75"+ "\3\7\22\0\6\7\1\0\30\7\1\76\11\7\22\0"+ "\6\7\1\0\12\7\1\77\24\7\1\100\2\7\22\0"+ "\6\7\1\0\2\7\1\101\37\7\22\0\6\7\1\0"+ "\2\7\1\102\37\7\22\0\6\7\1\0\35\7\1\103"+ "\4\7\14\0\1\50\1\104\1\4\70\50\5\51\1\105"+ "\65\51\7\0\1\52\3\0\1\52\66\0\2\106\1\0"+ "\2\106\5\0\1\106\2\0\1\106\7\0\1\106\2\0"+ "\1\106\3\0\1\106\2\0\2\106\5\0\1\106\23\0"+ "\6\7\1\0\2\7\1\107\37\7\22\0\6\7\1\0"+ "\4\7\1\110\35\7\22\0\6\7\1\0\11\7\1\111"+ "\30\7\22\0\6\7\1\0\1\7\1\112\40\7\22\0"+ "\6\7\1\0\35\7\1\113\4\7\22\0\6\7\1\0"+ "\1\7\1\114\40\7\22\0\6\7\1\0\11\7\1\115"+ "\30\7\22\0\6\7\1\0\2\7\1\116\37\7\22\0"+ "\6\7\1\0\27\7\1\117\12\7\22\0\6\7\1\0"+ "\15\7\1\120\24\7\22\0\6\7\1\0\14\7\1\121"+ "\25\7\22\0\6\7\1\0\12\7\1\122\27\7\22\0"+ "\6\7\1\0\10\7\1\123\31\7\22\0\6\7\1\0"+ "\4\7\1\124\35\7\22\0\6\7\1\0\1\125\41\7"+ "\22\0\6\7\1\0\35\7\1\126\4\7\22\0\6\7"+ "\1\0\32\7\1\127\7\7\22\0\6\7\1\0\2\7"+ "\1\130\37\7\22\0\6\7\1\0\1\131\41\7\22\0"+ "\6\7\1\0\10\7\1\132\1\133\30\7\22\0\6\7"+ "\1\0\10\7\1\134\1\20\24\7\1\31\3\7\22\0"+ "\6\7\1\0\12\7\1\135\27\7\22\0\6\7\1\0"+ "\2\7\1\136\37\7\22\0\6\7\1\0\4\7\1\137"+ "\35\7\14\0\4\51\1\4\1\105\65\51\6\0\6\7"+ "\1\0\3\7\1\140\36\7\22\0\6\7\1\0\41\7"+ "\1\141\22\0\6\7\1\0\31\7\1\142\10\7\22\0"+ "\6\7\1\0\10\7\1\143\31\7\22\0\6\7\1\0"+ "\5\7\1\144\34\7\22\0\6\7\1\0\11\7\1\145"+ "\23\7\1\146\4\7\22\0\6\7\1\0\27\7\1\147"+ "\12\7\22\0\6\7\1\0\1\7\1\150\40\7\22\0"+ "\6\7\1\0\16\7\1\151\23\7\22\0\6\7\1\0"+ "\23\7\1\152\16\7\22\0\6\7\1\0\10\7\1\153"+ "\31\7\22\0\6\7\1\0\7\7\1\154\32\7\22\0"+ "\6\7\1\0\1\7\1\155\40\7\22\0\6\7\1\0"+ "\7\7\1\156\32\7\22\0\6\7\1\0\40\7\1\157"+ "\1\7\22\0\6\7\1\0\4\7\1\160\35\7\22\0"+ "\6\7\1\0\4\7\1\161\35\7\22\0\6\7\1\0"+ "\7\7\1\162\32\7\22\0\6\7\1\0\11\7\1\163"+ "\30\7\22\0\6\7\1\0\2\7\1\164\37\7\22\0"+ "\6\7\1\0\34\7\1\63\5\7\22\0\6\7\1\0"+ "\3\7\1\165\36\7\22\0\6\7\1\0\36\7\1\166"+ "\3\7\22\0\6\7\1\0\31\7\1\167\10\7\22\0"+ "\6\7\1\0\1\7\1\170\40\7\22\0\6\7\1\0"+ "\35\7\1\171\4\7\22\0\6\7\1\0\11\7\1\172"+ "\30\7\22\0\6\7\1\0\12\7\1\173\27\7\22\0"+ "\6\7\1\0\26\7\1\174\13\7\22\0\6\7\1\0"+ "\26\7\1\175\13\7\22\0\6\7\1\0\27\7\1\176"+ "\12\7\22\0\6\7\1\0\14\7\1\177\25\7\22\0"+ "\6\7\1\0\24\7\1\200\15\7\22\0\6\7\1\0"+ "\27\7\1\201\12\7\22\0\6\7\1\0\31\7\1\202"+ "\10\7\22\0\6\7\1\0\36\7\1\203\3\7\22\0"+ "\6\7\1\0\35\7\1\204\4\7\22\0\6\7\1\0"+ "\27\7\1\205\12\7\22\0\6\7\1\0\1\7\1\206"+ "\40\7\22\0\6\7\1\0\3\7\1\207\36\7\22\0"+ "\6\7\1\0\12\7\1\210\27\7\22\0\6\7\1\0"+ "\37\7\1\211\2\7\22\0\6\7\1\0\1\7\1\212"+ "\40\7\22\0\6\7\1\0\4\7\1\213\35\7\22\0"+ "\6\7\1\0\7\7\1\214\32\7\22\0\6\7\1\0"+ "\2\7\1\215\37\7\22\0\6\7\1\0\3\7\1\216"+ "\36\7\22\0\6\7\1\0\27\7\1\217\12\7\22\0"+ "\6\7\1\0\34\7\1\220\5\7\22\0\6\7\1\0"+ "\17\7\1\221\22\7\22\0\6\7\1\0\15\7\1\222"+ "\24\7\22\0\6\7\1\0\7\7\1\223\32\7\22\0"+ "\6\7\1\0\7\7\1\224\32\7\22\0\6\7\1\0"+ "\36\7\1\225\3\7\22\0\6\7\1\0\12\7\1\226"+ "\27\7\22\0\6\7\1\0\27\7\1\227\12\7\22\0"+ "\6\7\1\0\35\7\1\230\4\7\22\0\6\7\1\0"+ "\5\7\1\231\34\7\22\0\6\7\1\0\12\7\1\232"+ "\27\7\22\0\6\7\1\0\20\7\1\231\21\7\22\0"+ "\6\7\1\0\25\7\1\232\14\7\22\0\6\7\1\0"+ "\32\7\1\233\7\7\22\0\6\7\1\0\27\7\1\234"+ "\12\7\22\0\6\7\1\0\7\7\1\235\32\7\22\0"+ "\6\7\1\0\1\236\41\7\22\0\6\7\1\0\31\7"+ "\1\237\10\7\22\0\6\7\1\0\36\7\1\240\3\7"+ "\22\0\6\7\1\0\7\7\1\241\32\7\14\0"; /** * The transition table of the DFA */ final private static int yytrans [] = yy_unpack(yy_packed); /* error codes */ final private static int YY_UNKNOWN_ERROR = 0; final private static int YY_ILLEGAL_STATE = 1; final private static int YY_NO_MATCH = 2; final private static int YY_PUSHBACK_2BIG = 3; /* error messages for the codes above */ final private static String YY_ERROR_MSG[] = { "Unkown internal scanner error", "Internal error: unknown state", "Error: could not match input", "Error: pushback value was too large" }; /** * YY_ATTRIBUTE[aState] contains the attributes of state <code>aState</code> */ private final static byte YY_ATTRIBUTE[] = { 0, 9, 1, 9, 1, 9, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; /** the input device */ private java.io.Reader yy_reader; /** the current state of the DFA */ private int yy_state; /** the current lexical state */ private int yy_lexical_state = YYINITIAL; /** this buffer contains the current text to be matched and is the source of the yytext() string */ private char yy_buffer[] = new char[16384]; /** the textposition at the last accepting state */ private int yy_markedPos; /** the textposition at the last state to be included in yytext */ private int yy_pushbackPos; /** the current text position in the buffer */ private int yy_currentPos; /** startRead marks the beginning of the yytext() string in the buffer */ private int yy_startRead; /** endRead marks the last character in the buffer, that has been read from input */ private int yy_endRead; /** number of newlines encountered up to the start of the matched text */ private int yyline; /** the number of characters up to the start of the matched text */ private int yychar; /** * the number of characters from the last newline up to the start of the * matched text */ private int yycolumn; /** * yy_atBOL == true <=> the scanner is currently at the beginning of a line */ private boolean yy_atBOL; /** yy_atEOF == true <=> the scanner has returned a value for EOF */ private boolean yy_atEOF; /** denotes if the user-EOF-code has already been executed */ private boolean yy_eof_done; /* user code: */ StringBuffer string = new StringBuffer(); private Symbol symbol(int type) { return new Symbol(type, yyline+1, yycolumn+1); } private Symbol symbol(int type, Object value) { return new Symbol(type, yyline+1, yycolumn+1, value); } /* assumes correct representation of a long value for specified radix in String s */ private long parseLong(String s, int radix) { int max = s.length(); long result = 0; long digit; for (int i = 0; i < max; i++) { digit = Character.digit(yy_buffer[i],radix); result*= radix; result+= digit; } return result; } /** * Creates a new scanner * There is also a java.io.InputStream version of this constructor. * * @param in the java.io.Reader to read input from. */ JrpcgenScanner(java.io.Reader in) { this.yy_reader = in; } /** * Creates a new scanner. * There is also java.io.Reader version of this constructor. * * @param in the java.io.Inputstream to read input from. */ JrpcgenScanner(java.io.InputStream in) { this(new java.io.InputStreamReader(in)); } /** * Unpacks the compressed DFA transition table. * * @param packed the packed transition table * @return the unpacked transition table */ private static int [] yy_unpack(String packed) { int [] trans = new int[7198]; int i = 0; /* index in packed string */ int j = 0; /* index in unpacked array */ while (i < 1550) { int count = packed.charAt(i++); int value = packed.charAt(i++); value--; do trans[j++] = value; while (--count > 0); } return trans; } /** * Unpacks the compressed character translation table. * * @param packed the packed character translation table * @return the unpacked character translation table */ private static char [] yy_unpack_cmap(String packed) { char [] map = new char[0x10000]; int i = 0; /* index in packed string */ int j = 0; /* index in unpacked array */ while (i < 158) { int count = packed.charAt(i++); char value = packed.charAt(i++); do map[j++] = value; while (--count > 0); } return map; } /** * Gets the next input character. * * @return the next character of the input stream, EOF if the * end of the stream is reached. * @exception IOException if any I/O-Error occurs */ private int yy_advance() throws java.io.IOException { /* standard case */ if (yy_currentPos < yy_endRead) return yy_buffer[yy_currentPos++]; /* if the eof is reached, we don't need to work hard */ if (yy_atEOF) return YYEOF; /* otherwise: need to refill the buffer */ /* first: make room (if you can) */ if (yy_startRead > 0) { System.arraycopy(yy_buffer, yy_startRead, yy_buffer, 0, yy_endRead-yy_startRead); /* translate stored positions */ yy_endRead-= yy_startRead; yy_currentPos-= yy_startRead; yy_markedPos-= yy_startRead; yy_pushbackPos-= yy_startRead; yy_startRead = 0; } /* is the buffer big enough? */ if (yy_currentPos >= yy_buffer.length) { /* if not: blow it up */ char newBuffer[] = new char[yy_currentPos*2]; System.arraycopy(yy_buffer, 0, newBuffer, 0, yy_buffer.length); yy_buffer = newBuffer; } /* finally: fill the buffer with new input */ int numRead = yy_reader.read(yy_buffer, yy_endRead, yy_buffer.length-yy_endRead); if ( numRead == -1 ) return YYEOF; yy_endRead+= numRead; return yy_buffer[yy_currentPos++]; } /** * Closes the input stream. */ final public void yyclose() throws java.io.IOException { yy_atEOF = true; /* indicate end of file */ yy_endRead = yy_startRead; /* invalidate buffer */ yy_reader.close(); } /** * Returns the current lexical state. */ final public int yystate() { return yy_lexical_state; } /** * Enters a new lexical state * * @param newState the new lexical state */ final public void yybegin(int newState) { yy_lexical_state = newState; } /** * Returns the text matched by the current regular expression. */ final public String yytext() { return new String( yy_buffer, yy_startRead, yy_markedPos-yy_startRead ); } /** * Returns the length of the matched text region. */ final public int yylength() { return yy_markedPos-yy_startRead; } /** * Reports an error that occured while scanning. * * @param errorCode the code of the errormessage to display */ private void yy_ScanError(int errorCode) { try { System.out.println(YY_ERROR_MSG[errorCode]); } catch (ArrayIndexOutOfBoundsException e) { System.out.println(YY_ERROR_MSG[YY_UNKNOWN_ERROR]); } System.exit(1); } /** * Pushes the specified amount of characters back into the input stream. * * They will be read again by then next call of the scanning method * * @param number the number of characters to be read again. * This number must not be greater than yylength()! */ private void yypushback(int number) { if ( number > yylength() ) yy_ScanError(YY_PUSHBACK_2BIG); yy_markedPos -= number; } /** * Contains user EOF-code, which will be executed exactly once, * when the end of file is reached */ private void yy_do_eof() throws java.io.IOException { if (!yy_eof_done) { yy_eof_done = true; yyclose(); } } /** * Resumes scanning until the next regular expression is matched, * the end of input is encountered or an I/O-Error occurs. * * @return the next token * @exception IOException if any I/O-Error occurs */ public org.acplt.oncrpc.apps.jrpcgen.cup_runtime.Symbol next_token() throws java.io.IOException { int yy_input; int yy_action; while (true) { boolean yy_counted = false; for (yy_currentPos = yy_startRead; yy_currentPos < yy_markedPos; yy_currentPos++) { switch (yy_buffer[yy_currentPos]) { case '\r': yyline++; yycolumn = 0; yy_counted = true; break; case '\n': if (yy_counted) yy_counted = false; else { yyline++; yycolumn = 0; } break; default: yy_counted = false; yycolumn++; } } if (yy_counted) { if ( yy_advance() == '\n' ) yyline--; if ( !yy_atEOF ) yy_currentPos--; } yy_action = -1; yy_currentPos = yy_startRead = yy_markedPos; yy_state = yy_lexical_state; yy_forAction: { while (true) { yy_input = yy_advance(); if ( yy_input == YYEOF ) break yy_forAction; int yy_next = yytrans[ yy_rowMap[yy_state] + yycmap[yy_input] ]; if (yy_next == -1) break yy_forAction; yy_state = yy_next; int yy_attributes = YY_ATTRIBUTE[yy_state]; if ( (yy_attributes & 1) > 0 ) { yy_action = yy_state; yy_markedPos = yy_currentPos; if ( (yy_attributes & 8) > 0 ) break yy_forAction; } } } switch (yy_action) { case 160: { return symbol(JrpcgenSymbols.QUADRUPLE); } case 162: break; case 158: { return symbol(JrpcgenSymbols.UNSIGNED); } case 163: break; case 155: { return symbol(JrpcgenSymbols.DEFAULT); } case 164: break; case 154: { return symbol(JrpcgenSymbols.TYPEDEF); } case 165: break; case 153: { return symbol(JrpcgenSymbols.VERSION); } case 166: break; case 38: { return symbol(JrpcgenSymbols.RANGLE); } case 167: break; case 37: { return symbol(JrpcgenSymbols.LANGLE); } case 168: break; case 36: { return symbol(JrpcgenSymbols.RBRACKET); } case 169: break; case 35: { return symbol(JrpcgenSymbols.LBRACKET); } case 170: break; case 34: { return symbol(JrpcgenSymbols.RBRACE); } case 171: break; case 33: { return symbol(JrpcgenSymbols.LBRACE); } case 172: break; case 32: { return symbol(JrpcgenSymbols.RPAREN); } case 173: break; case 31: { return symbol(JrpcgenSymbols.LPAREN); } case 174: break; case 30: { return symbol(JrpcgenSymbols.EQUAL); } case 175: break; case 1: case 4: case 9: { throw new Error("Illegal character \"" + yytext() + "\""); } case 176: break; case 2: case 3: case 67: { /* ignore */ } case 177: break; case 5: { return symbol(JrpcgenSymbols.STAR); } case 178: break; case 27: { return symbol(JrpcgenSymbols.SEMICOLON); } case 179: break; case 28: { return symbol(JrpcgenSymbols.COMMA); } case 180: break; case 29: { return symbol(JrpcgenSymbols.COLON); } case 181: break; case 78: { return symbol(JrpcgenSymbols.INT); } case 182: break; case 97: { return symbol(JrpcgenSymbols.VOID); } case 183: break; case 99: { return symbol(JrpcgenSymbols.ENUM); } case 184: break; case 107: { return symbol(JrpcgenSymbols.CASE); } case 185: break; case 108: { return symbol(JrpcgenSymbols.CHAR); } case 186: break; case 116: { return symbol(JrpcgenSymbols.LONG); } case 187: break; case 117: case 150: { return symbol(JrpcgenSymbols.BOOL); } case 188: break; case 125: { return symbol(JrpcgenSymbols.SHORT); } case 189: break; case 128: { return symbol(JrpcgenSymbols.CONST); } case 190: break; case 132: { return symbol(JrpcgenSymbols.FLOAT); } case 191: break; case 133: { return symbol(JrpcgenSymbols.HYPER); } case 192: break; case 135: { return symbol(JrpcgenSymbols.UNION); } case 193: break; case 139: { return symbol(JrpcgenSymbols.OPAQUE); } case 194: break; case 141: { return symbol(JrpcgenSymbols.STRING); } case 195: break; case 142: { return symbol(JrpcgenSymbols.STRUCT); } case 196: break; case 143: { return symbol(JrpcgenSymbols.SWITCH); } case 197: break; case 147: { return symbol(JrpcgenSymbols.DOUBLE); } case 198: break; case 152: { return symbol(JrpcgenSymbols.PROGRAM); } case 199: break; case 6: case 10: case 11: case 12: case 13: case 14: case 15: case 16: case 17: case 18: case 19: case 20: case 21: case 22: case 23: case 24: case 25: case 26: case 43: case 44: case 45: case 46: case 47: case 48: case 49: case 50: case 51: case 52: case 53: case 54: case 55: case 56: case 57: case 58: case 59: case 60: case 61: case 62: case 63: case 64: case 65: case 66: case 70: case 71: case 72: case 73: case 74: case 75: case 76: case 77: case 79: case 80: case 81: case 82: case 83: case 84: case 85: case 86: case 87: case 88: case 89: case 90: case 91: case 92: case 93: case 94: case 95: case 96: case 98: case 100: case 101: case 102: case 103: case 104: case 105: case 106: case 109: case 110: case 111: case 112: case 113: case 114: case 115: case 118: case 119: case 120: case 121: case 122: case 123: case 124: case 126: case 127: case 129: case 130: case 131: case 134: case 136: case 137: case 138: case 140: case 144: case 145: case 146: case 148: case 149: case 151: case 156: case 157: case 159: { return symbol(JrpcgenSymbols.IDENTIFIER, yytext()); } case 200: break; case 7: case 8: case 41: case 69: { return symbol(JrpcgenSymbols.INTEGER_LITERAL, yytext()); } case 201: break; default: if (yy_input == YYEOF && yy_startRead == yy_currentPos) { yy_atEOF = true; yy_do_eof(); { return new Symbol(JrpcgenSymbols.EOF); } } else { yy_ScanError(YY_NO_MATCH); } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.security.authorization.restriction; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import javax.jcr.PropertyType; import javax.jcr.Value; import javax.jcr.security.AccessControlException; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import org.apache.jackrabbit.JcrConstants; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.namepath.NamePathMapper; import org.apache.jackrabbit.oak.plugins.memory.PropertyStates; import org.apache.jackrabbit.oak.plugins.value.jcr.PartialValueFactory; import org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol.AccessControlConstants; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class AbstractRestrictionProviderTest implements AccessControlConstants { private String unsupportedPath = null; private String testPath = "/testRoot"; private Value globValue; private Value[] nameValues; private Value nameValue; private final NamePathMapper namePathMapper = NamePathMapper.DEFAULT; private PartialValueFactory valueFactory; private Map<String, ? extends RestrictionDefinition> supported; private AbstractRestrictionProvider restrictionProvider; @Before public void before() throws Exception { valueFactory = new PartialValueFactory(namePathMapper); globValue = valueFactory.createValue("*"); nameValue = valueFactory.createValue("nt:file", PropertyType.NAME); nameValues = new Value[] { valueFactory.createValue("nt:folder", PropertyType.NAME), valueFactory.createValue("nt:file", PropertyType.NAME) }; RestrictionDefinition glob = new RestrictionDefinitionImpl(REP_GLOB, Type.STRING, false); RestrictionDefinition nts = new RestrictionDefinitionImpl(REP_NT_NAMES, Type.NAMES, false); RestrictionDefinition mand = new RestrictionDefinitionImpl("mandatory", Type.BOOLEAN, true); RestrictionDefinition undef = mock(RestrictionDefinition.class); when(undef.getName()).thenReturn("undefined"); when(undef.getRequiredType()).thenReturn((Type) Type.UNDEFINED); supported = ImmutableMap.of(glob.getName(), glob, nts.getName(), nts, mand.getName(), mand, undef.getName(), undef); restrictionProvider = new AbstractRestrictionProvider(supported) { @NotNull @Override public RestrictionPattern getPattern(@Nullable String oakPath, @NotNull Tree tree) { throw new UnsupportedOperationException(); } @NotNull @Override public RestrictionPattern getPattern(@Nullable String oakPath, @NotNull Set<Restriction> restrictions) { throw new UnsupportedOperationException(); } }; } private Tree getAceTree(Restriction... restrictions) { Tree restrictionsTree = Mockito.mock(Tree.class);; when(restrictionsTree.getName()).thenReturn(REP_RESTRICTIONS); PropertyState primaryType = PropertyStates.createProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_RESTRICTIONS, Type.NAME); when(restrictionsTree.getProperty(JcrConstants.JCR_PRIMARYTYPE)).thenReturn(primaryType); List<PropertyState> properties = new ArrayList<>(); for (Restriction r : restrictions) { when(restrictionsTree.getProperty(r.getDefinition().getName())).thenReturn(r.getProperty()); properties.add(r.getProperty()); } properties.add(primaryType); properties.add(PropertyStates.createProperty(Iterables.get(AccessControlConstants.ACE_PROPERTY_NAMES, 0), "value")); when(restrictionsTree.getProperties()).thenReturn((Iterable)properties); when(restrictionsTree.exists()).thenReturn(true); Tree ace = Mockito.mock(Tree.class); when(ace.getProperty(JcrConstants.JCR_PRIMARYTYPE)).thenReturn(PropertyStates.createProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_GRANT_ACE, Type.NAME)); when(ace.getChild(REP_RESTRICTIONS)).thenReturn(restrictionsTree); when(ace.exists()).thenReturn(true); return ace; } @Test public void testGetSupportedRestrictions() { Set<RestrictionDefinition> defs = restrictionProvider.getSupportedRestrictions(testPath); assertNotNull(defs); assertEquals(supported.size(), defs.size()); for (RestrictionDefinition def : supported.values()) { assertTrue(defs.contains(def)); } } @Test public void testGetSupportedRestrictionsForUnsupportedPath() { Set<RestrictionDefinition> defs = restrictionProvider.getSupportedRestrictions(unsupportedPath); assertNotNull(defs); assertTrue(defs.isEmpty()); } @Test(expected = AccessControlException.class) public void testCreateForUnsupportedPath() throws Exception { restrictionProvider.createRestriction(unsupportedPath, REP_GLOB, globValue); } @Test(expected = AccessControlException.class) public void testCreateForUnsupportedName() throws Exception { restrictionProvider.createRestriction(testPath, "unsupported", nameValue); } @Test(expected = AccessControlException.class) public void testCreateForUnsupportedType() throws Exception { restrictionProvider.createRestriction(testPath, REP_NT_NAMES, valueFactory.createValue("nt:file", PropertyType.NAME), valueFactory.createValue(true)); } @Test(expected = AccessControlException.class) public void testCreateForUnsupportedMultiValues() throws Exception { restrictionProvider.createRestriction(testPath, REP_GLOB, valueFactory.createValue("*"), valueFactory.createValue("/a/*")); } @Test public void testCreateRestriction() throws Exception { Restriction r = restrictionProvider.createRestriction(testPath, REP_GLOB, globValue); assertNotNull(r); assertEquals(REP_GLOB, r.getDefinition().getName()); assertEquals(globValue.getString(), r.getProperty().getValue(Type.STRING)); } @Test public void testCreateRestrictionFromArray() throws Exception { Restriction r = restrictionProvider.createRestriction(testPath, REP_GLOB, new Value[] {globValue}); assertNotNull(r); assertEquals(REP_GLOB, r.getDefinition().getName()); assertEquals(globValue.getString(), r.getProperty().getValue(Type.STRING)); assertFalse(r.getProperty().isArray()); } @Test public void testCreateMvRestriction() throws Exception { Restriction r = restrictionProvider.createRestriction(testPath, REP_NT_NAMES, valueFactory.createValue("nt:folder", PropertyType.NAME), valueFactory.createValue("nt:file", PropertyType.NAME)); assertNotNull(r); assertEquals(REP_NT_NAMES, r.getDefinition().getName()); assertEquals(Type.NAMES, r.getDefinition().getRequiredType()); PropertyState ps = r.getProperty(); assertTrue(ps.isArray()); assertEquals(Type.NAMES, ps.getType()); List<Value> vs = valueFactory.createValues(ps); assertArrayEquals(nameValues, vs.toArray(new Value[0])); } @Test public void testCreateMvRestriction2() throws Exception { Restriction r = restrictionProvider.createRestriction(testPath, REP_NT_NAMES, nameValues); assertNotNull(r); assertEquals(REP_NT_NAMES, r.getDefinition().getName()); assertEquals(Type.NAMES, r.getDefinition().getRequiredType()); PropertyState ps = r.getProperty(); assertTrue(ps.isArray()); assertEquals(Type.NAMES, ps.getType()); List<Value> vs = valueFactory.createValues(ps); assertArrayEquals(nameValues, vs.toArray(new Value[0])); } @Test public void testCreateMvRestriction3() throws Exception { Restriction r = restrictionProvider.createRestriction(testPath, REP_NT_NAMES, nameValue); assertNotNull(r); assertEquals(REP_NT_NAMES, r.getDefinition().getName()); assertEquals(Type.NAMES, r.getDefinition().getRequiredType()); assertTrue(r.getProperty().isArray()); assertEquals(Type.NAMES, r.getProperty().getType()); List<Value> vs = valueFactory.createValues(r.getProperty()); assertArrayEquals(new Value[] {nameValue}, vs.toArray(new Value[0])); } @Test public void testCreateEmptyMvRestriction() throws Exception { Restriction r = restrictionProvider.createRestriction(testPath, REP_NT_NAMES); assertNotNull(r); assertEquals(REP_NT_NAMES, r.getDefinition().getName()); assertEquals(Type.NAMES, r.getDefinition().getRequiredType()); assertTrue(r.getProperty().isArray()); assertEquals(Type.NAMES, r.getProperty().getType()); List<Value> vs = valueFactory.createValues(r.getProperty()); assertNotNull(vs); assertEquals(0, vs.size()); } @Test public void testCreateEmptyMvRestriction2() throws Exception { Restriction r = restrictionProvider.createRestriction(testPath, REP_NT_NAMES); assertNotNull(r); assertEquals(REP_NT_NAMES, r.getDefinition().getName()); assertEquals(Type.NAMES, r.getDefinition().getRequiredType()); assertTrue(r.getProperty().isArray()); assertEquals(Type.NAMES, r.getProperty().getType()); List<Value> vs = valueFactory.createValues(r.getProperty()); assertNotNull(vs); assertEquals(0, vs.size()); } @Test public void testCreatedUndefinedType() throws Exception { Restriction r = restrictionProvider.createRestriction(testPath, "undefined", valueFactory.createValue(23)); } @Test(expected = AccessControlException.class) public void testCreateUndefinedTypeMV() throws Exception { Restriction r2 = restrictionProvider.createRestriction(testPath, "undefined", valueFactory.createValue(23), valueFactory.createValue(false)); } @Test public void testReadRestrictionsForUnsupportedPath() { Set<Restriction> restrictions = restrictionProvider.readRestrictions(unsupportedPath, getAceTree()); assertTrue(restrictions.isEmpty()); } @Test public void testReadRestrictions() throws Exception { Restriction r = restrictionProvider.createRestriction(testPath, REP_GLOB, globValue); Tree aceTree = getAceTree(r); Set<Restriction> restrictions = restrictionProvider.readRestrictions(testPath, aceTree); assertEquals(1, restrictions.size()); assertTrue(restrictions.contains(r)); } @Test public void testValidateRestrictionsUnsupportedPathEmptyRestrictions() throws Exception { // empty restrictions => must succeed restrictionProvider.validateRestrictions(null, getAceTree()); } @Test(expected = AccessControlException.class) public void testValidateRestrictionsUnsupportedPath() throws Exception { // non-empty restrictions => must fail Restriction restr = restrictionProvider.createRestriction(testPath, REP_GLOB, globValue); restrictionProvider.validateRestrictions(null, getAceTree(restr)); } @Test(expected = AccessControlException.class) public void testValidateRestrictionsWrongType() throws Exception { Restriction mand = restrictionProvider.createRestriction(testPath, "mandatory", valueFactory.createValue(true)); Tree ace = getAceTree(mand, new RestrictionImpl(PropertyStates.createProperty(REP_GLOB, true), false)); restrictionProvider.validateRestrictions(testPath, ace); } @Test(expected = AccessControlException.class) public void testValidateRestrictionsUnsupportedRestriction() throws Exception { Restriction mand = restrictionProvider.createRestriction(testPath, "mandatory", valueFactory.createValue(true)); Tree ace = getAceTree(mand, new RestrictionImpl(PropertyStates.createProperty("unsupported", "value"), false)); restrictionProvider.validateRestrictions(testPath, ace); } @Test(expected = AccessControlException.class) public void testValidateRestrictionsMissingMandatory() throws Exception { Restriction glob = restrictionProvider.createRestriction(testPath, REP_GLOB, globValue); restrictionProvider.validateRestrictions(testPath, getAceTree(glob)); } @Test public void testValidateRestrictions() throws Exception { Restriction glob = restrictionProvider.createRestriction(testPath, REP_GLOB, globValue); Restriction ntNames = restrictionProvider.createRestriction(testPath, REP_NT_NAMES, nameValues); Restriction mand = restrictionProvider.createRestriction(testPath, "mandatory", valueFactory.createValue(true)); restrictionProvider.validateRestrictions(testPath, getAceTree(mand)); restrictionProvider.validateRestrictions(testPath, getAceTree(mand, glob)); restrictionProvider.validateRestrictions(testPath, getAceTree(mand, ntNames)); restrictionProvider.validateRestrictions(testPath, getAceTree(mand, glob, ntNames)); } @Test public void testGetRestrictionTree() { Tree aceTree = getAceTree(); Tree restrictionTree = restrictionProvider.getRestrictionsTree(aceTree); assertEquals(aceTree.getChild(REP_RESTRICTIONS), restrictionTree); } @Test public void testGetRestrictionTreeMissing() { Tree aceTree = when(mock(Tree.class).getChild(REP_RESTRICTIONS)).thenReturn(mock(Tree.class)).getMock(); Tree restrictionTree = restrictionProvider.getRestrictionsTree(aceTree); assertEquals(aceTree, restrictionTree); } @Test public void testWriteEmptyRestrictions() throws Exception { restrictionProvider.writeRestrictions(null, getAceTree(), Collections.emptySet()); } @Test public void testWriteRestrictions() throws Exception { Restriction ntNames = restrictionProvider.createRestriction(testPath, REP_NT_NAMES, nameValues); Tree aceTree = getAceTree(); restrictionProvider.writeRestrictions(null, aceTree, Collections.singleton(ntNames)); verify(aceTree, times(1)).getChild(REP_RESTRICTIONS); verify(aceTree.getChild(REP_RESTRICTIONS), times(1)).setProperty(ntNames.getProperty()); } }
/* * Copyright (C) 2015 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.util.concurrent; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Strings.isNullOrEmpty; import static com.google.common.util.concurrent.Futures.getDone; import static com.google.common.util.concurrent.MoreExecutors.directExecutor; import com.google.common.util.concurrent.internal.InternalFutureFailureAccess; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.logging.Level; import java.util.logging.Logger; import org.checkerframework.checker.nullness.qual.Nullable; /** Emulation for AbstractFuture in GWT. */ public abstract class AbstractFuture<V> extends InternalFutureFailureAccess implements ListenableFuture<V> { /** * Tag interface marking trusted subclasses. This enables some optimizations. The implementation * of this interface must also be an AbstractFuture and must not override or expose for overriding * any of the public methods of ListenableFuture. */ interface Trusted<V> extends ListenableFuture<V> {} abstract static class TrustedFuture<V> extends AbstractFuture<V> implements Trusted<V> { @Override public final V get() throws InterruptedException, ExecutionException { return super.get(); } @Override public final V get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return super.get(timeout, unit); } @Override public final boolean isDone() { return super.isDone(); } @Override public final boolean isCancelled() { return super.isCancelled(); } @Override public final void addListener(Runnable listener, Executor executor) { super.addListener(listener, executor); } @Override public final boolean cancel(boolean mayInterruptIfRunning) { return super.cancel(mayInterruptIfRunning); } } private static final Logger log = Logger.getLogger(AbstractFuture.class.getName()); private State state; private V value; private Future<? extends V> delegate; private Throwable throwable; private boolean mayInterruptIfRunning; private List<Listener> listeners; protected AbstractFuture() { state = State.PENDING; listeners = new ArrayList<Listener>(); } @Override public boolean cancel(boolean mayInterruptIfRunning) { if (!state.permitsPublicUserToTransitionTo(State.CANCELLED)) { return false; } this.mayInterruptIfRunning = mayInterruptIfRunning; state = State.CANCELLED; notifyAndClearListeners(); if (delegate != null) { // TODO(lukes): consider adding the StackOverflowError protection from the server version delegate.cancel(mayInterruptIfRunning); } return true; } protected void interruptTask() {} @Override public boolean isCancelled() { return state.isCancelled(); } @Override public boolean isDone() { return state.isDone(); } /* * ForwardingFluentFuture needs to override those methods, so they are not final. */ @Override public V get() throws InterruptedException, ExecutionException { state.maybeThrowOnGet(throwable); return value; } @Override public V get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { checkNotNull(unit); return get(); } @Override public void addListener(Runnable runnable, Executor executor) { Listener listener = new Listener(runnable, executor); if (isDone()) { listener.execute(); } else { listeners.add(listener); } } protected boolean setException(Throwable throwable) { checkNotNull(throwable); if (!state.permitsPublicUserToTransitionTo(State.FAILURE)) { return false; } forceSetException(throwable); return true; } private void forceSetException(Throwable throwable) { this.throwable = throwable; this.state = State.FAILURE; notifyAndClearListeners(); } protected boolean set(V value) { if (!state.permitsPublicUserToTransitionTo(State.VALUE)) { return false; } forceSet(value); return true; } private void forceSet(V value) { this.value = value; this.state = State.VALUE; notifyAndClearListeners(); } protected boolean setFuture(ListenableFuture<? extends V> future) { checkNotNull(future); // If this future is already cancelled, cancel the delegate. // TODO(cpovirk): Should we do this at the end of the method, as in the server version? // TODO(cpovirk): Use maybePropagateCancellationTo? if (isCancelled()) { future.cancel(mayInterruptIfRunning); } if (!state.permitsPublicUserToTransitionTo(State.DELEGATED)) { return false; } state = State.DELEGATED; this.delegate = future; future.addListener(new SetFuture(future), directExecutor()); return true; } protected final boolean wasInterrupted() { return mayInterruptIfRunning; } private void notifyAndClearListeners() { afterDone(); // TODO(lukes): consider adding the StackOverflowError protection from the server version // TODO(cpovirk): consider clearing this.delegate for (Listener listener : listeners) { listener.execute(); } listeners = null; } protected void afterDone() {} @Override protected final Throwable tryInternalFastPathGetFailure() { if (this instanceof Trusted) { return state == State.FAILURE ? throwable : null; } return null; } final Throwable trustedGetException() { checkState(state == State.FAILURE); return throwable; } final void maybePropagateCancellationTo(@Nullable Future<?> related) { if (related != null & isCancelled()) { related.cancel(wasInterrupted()); } } @Override public String toString() { StringBuilder builder = new StringBuilder().append(super.toString()).append("[status="); if (isCancelled()) { builder.append("CANCELLED"); } else if (isDone()) { addDoneString(builder); } else { String pendingDescription; try { pendingDescription = pendingToString(); } catch (RuntimeException e) { // Don't call getMessage or toString() on the exception, in case the exception thrown by the // subclass is implemented with bugs similar to the subclass. pendingDescription = "Exception thrown from implementation: " + e.getClass(); } // The future may complete during or before the call to getPendingToString, so we use null // as a signal that we should try checking if the future is done again. if (!isNullOrEmpty(pendingDescription)) { builder.append("PENDING, info=[").append(pendingDescription).append("]"); } else if (isDone()) { addDoneString(builder); } else { builder.append("PENDING"); } } return builder.append("]").toString(); } /** * Provide a human-readable explanation of why this future has not yet completed. * * @return null if an explanation cannot be provided because the future is done. */ @Nullable String pendingToString() { if (state == State.DELEGATED) { return "setFuture=[" + delegate + "]"; } return null; } private void addDoneString(StringBuilder builder) { try { V value = getDone(this); builder.append("SUCCESS, result=[").append(value).append("]"); } catch (ExecutionException e) { builder.append("FAILURE, cause=[").append(e.getCause()).append("]"); } catch (CancellationException e) { builder.append("CANCELLED"); } catch (RuntimeException e) { builder.append("UNKNOWN, cause=[").append(e.getClass()).append(" thrown from get()]"); } } private enum State { PENDING { @Override boolean isDone() { return false; } @Override void maybeThrowOnGet(Throwable cause) throws ExecutionException { throw new IllegalStateException("Cannot get() on a pending future."); } @Override boolean permitsPublicUserToTransitionTo(State state) { return !state.equals(PENDING); } }, DELEGATED { @Override boolean isDone() { return false; } @Override void maybeThrowOnGet(Throwable cause) throws ExecutionException { throw new IllegalStateException("Cannot get() on a pending future."); } boolean permitsPublicUserToTransitionTo(State state) { return state.equals(CANCELLED); } }, VALUE, FAILURE { @Override void maybeThrowOnGet(Throwable cause) throws ExecutionException { throw new ExecutionException(cause); } }, CANCELLED { @Override boolean isCancelled() { return true; } @Override void maybeThrowOnGet(Throwable cause) throws ExecutionException { // TODO(cpovirk): chain in a CancellationException created at the cancel() call? throw new CancellationException(); } }; boolean isDone() { return true; } boolean isCancelled() { return false; } void maybeThrowOnGet(Throwable cause) throws ExecutionException {} boolean permitsPublicUserToTransitionTo(State state) { return false; } } private static final class Listener { final Runnable command; final Executor executor; Listener(Runnable command, Executor executor) { this.command = checkNotNull(command); this.executor = checkNotNull(executor); } void execute() { try { executor.execute(command); } catch (RuntimeException e) { log.log( Level.SEVERE, "RuntimeException while executing runnable " + command + " with executor " + executor, e); } } } private final class SetFuture implements Runnable { final ListenableFuture<? extends V> delegate; SetFuture(ListenableFuture<? extends V> delegate) { this.delegate = delegate; } @Override public void run() { if (isCancelled()) { return; } if (delegate instanceof AbstractFuture) { AbstractFuture<? extends V> other = (AbstractFuture<? extends V>) delegate; value = other.value; throwable = other.throwable; // don't copy the mayInterruptIfRunning bit, for consistency with the server, to ensure that // interruptTask() is called if and only if the bit is true and because we cannot infer the // interrupt status from non AbstractFuture futures. state = other.state; notifyAndClearListeners(); return; } /* * Almost everything in GWT is an AbstractFuture (which is as good as TrustedFuture under * GWT). But ImmediateFuture and UncheckedThrowingFuture aren't, so we still need this case. */ try { forceSet(getDone(delegate)); } catch (ExecutionException exception) { forceSetException(exception.getCause()); } catch (CancellationException cancellation) { cancel(false); } catch (Throwable t) { forceSetException(t); } } } }
/* * PowerAuth Web Flow and related software components * Copyright (C) 2017 Wultra s.r.o. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package io.getlime.security.powerauth.app.nextstep.controller; import io.getlime.core.rest.model.base.request.ObjectRequest; import io.getlime.core.rest.model.base.response.ObjectResponse; import io.getlime.core.rest.model.base.response.Response; import io.getlime.security.powerauth.app.nextstep.converter.OperationConverter; import io.getlime.security.powerauth.app.nextstep.repository.model.entity.OperationEntity; import io.getlime.security.powerauth.app.nextstep.service.MobileTokenConfigurationService; import io.getlime.security.powerauth.app.nextstep.service.OperationConfigurationService; import io.getlime.security.powerauth.app.nextstep.service.OperationPersistenceService; import io.getlime.security.powerauth.app.nextstep.service.StepResolutionService; import io.getlime.security.powerauth.lib.nextstep.model.entity.AuthStep; import io.getlime.security.powerauth.lib.nextstep.model.entity.enumeration.UserAccountStatus; import io.getlime.security.powerauth.lib.nextstep.model.enumeration.AuthMethod; import io.getlime.security.powerauth.lib.nextstep.model.exception.*; import io.getlime.security.powerauth.lib.nextstep.model.request.*; import io.getlime.security.powerauth.lib.nextstep.model.response.*; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.*; import javax.validation.Valid; import javax.validation.constraints.NotBlank; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import java.util.ArrayList; import java.util.List; /** * REST controller class related to Next Step operations. * * @author Petr Dvorak, petr@wultra.com */ @RestController @Validated public class OperationController { private static final Logger logger = LoggerFactory.getLogger(OperationController.class); private final OperationPersistenceService operationPersistenceService; private final OperationConfigurationService operationConfigurationService; private final StepResolutionService stepResolutionService; private final MobileTokenConfigurationService mobileTokenConfigurationService; private final OperationConverter operationConverter; /** * REST controller constructor. * @param operationPersistenceService Operation persistence service. * @param operationConfigurationService Operation configuration service. * @param stepResolutionService Step resolution service. * @param mobileTokenConfigurationService Mobile token configuration service. * @param operationConverter Operation converter. */ @Autowired public OperationController(OperationPersistenceService operationPersistenceService, OperationConfigurationService operationConfigurationService, StepResolutionService stepResolutionService, MobileTokenConfigurationService mobileTokenConfigurationService, OperationConverter operationConverter) { this.operationPersistenceService = operationPersistenceService; this.operationConfigurationService = operationConfigurationService; this.stepResolutionService = stepResolutionService; this.mobileTokenConfigurationService = mobileTokenConfigurationService; this.operationConverter = operationConverter; } /** * Create a new operation with given name and data. * * @param request Create operation request. * @return Create operation response. * @throws OperationAlreadyExistsException Thrown when operation already exists. * @throws InvalidConfigurationException Thrown when Next Step configuration is invalid. * @throws OrganizationNotFoundException Thrown when organization is not found. */ @Operation(summary = "Create an operation") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation was created"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_ALREADY_EXISTS, INVALID_CONFIGURATION, ORGANIZATION_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation", method = RequestMethod.POST) public ObjectResponse<CreateOperationResponse> createOperation(@Valid @RequestBody ObjectRequest<CreateOperationRequest> request) throws OperationAlreadyExistsException, InvalidConfigurationException, OrganizationNotFoundException { logger.info("Received createOperation request, operation ID: {}, operation name: {}", request.getRequestObject().getOperationId(), request.getRequestObject().getOperationName()); // resolve response based on dynamic step definitions final CreateOperationResponse response = stepResolutionService.resolveNextStepResponse(request.getRequestObject()); // persist new operation operationPersistenceService.createOperation(request.getRequestObject(), response); logger.info("The createOperation request succeeded, operation ID: {}, result: {}", response.getOperationId(), response.getResult().toString()); for (AuthStep step: response.getSteps()) { logger.info("Next authentication method for operation ID: {}, authentication method: {}", response.getOperationId(), step.getAuthMethod().toString()); } return new ObjectResponse<>(response); } /** * Update operation with given ID with a previous authentication step result (PUT method). * * @param request Update operation request. * @return Update operation response. * @throws InvalidRequestException Thrown when request is invalid. * @throws AuthMethodNotFoundException Thrown when authentication method is not found. * @throws OperationAlreadyFinishedException Thrown when operation is already finished. * @throws OperationAlreadyFailedException Thrown when operation is already failed. * @throws OperationAlreadyCanceledException Thrown when operation is already canceled. * @throws OperationNotValidException Thrown when operation is not valid. * @throws OperationNotFoundException Thrown when operation is not found. * @throws InvalidConfigurationException Thrown when Next Step configuration is invalid. * @throws OrganizationNotFoundException Thrown when organization is not found. */ @Operation(summary = "Update an operation") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation was updated"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, INVALID_REQUEST, AUTH_METHOD_NOT_FOUND, OPERATION_ALREADY_FINISHED, OPERATION_ALREADY_FAILED, OPERATION_ALREADY_CANCELED, OPERATION_NOT_VALID, OPERATION_NOT_FOUND, INVALID_CONFIGURATION, ORGANIZATION_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation", method = RequestMethod.PUT) public ObjectResponse<UpdateOperationResponse> updateOperation(@Valid @RequestBody ObjectRequest<UpdateOperationRequest> request) throws InvalidRequestException, AuthMethodNotFoundException, OperationAlreadyFinishedException, OperationAlreadyFailedException, OperationAlreadyCanceledException, OperationNotValidException, OperationNotFoundException, InvalidConfigurationException, OrganizationNotFoundException { return updateOperationImpl(request); } /** * Update operation with given ID with a previous authentication step result (POST method alternative). * * @param request Update operation request. * @return Update operation response. * @throws InvalidRequestException Thrown when request is invalid. * @throws AuthMethodNotFoundException Thrown when authentication method is not found. * @throws OperationAlreadyFinishedException Thrown when operation is already finished. * @throws OperationAlreadyFailedException Thrown when operation is already failed. * @throws OperationAlreadyCanceledException Thrown when operation is already canceled. * @throws OperationNotValidException Thrown when operation is not valid. * @throws OperationNotFoundException Thrown when operation is not found. * @throws InvalidConfigurationException Thrown when Next Step configuration is invalid. * @throws OrganizationNotFoundException Thrown when organization is not found. */ @Operation(summary = "Update an operation") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation was updated"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, INVALID_REQUEST, AUTH_METHOD_NOT_FOUND, OPERATION_ALREADY_FINISHED, OPERATION_ALREADY_FAILED, OPERATION_ALREADY_CANCELED, OPERATION_NOT_VALID, OPERATION_NOT_FOUND, INVALID_CONFIGURATION, ORGANIZATION_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/update", method = RequestMethod.POST) public ObjectResponse<UpdateOperationResponse> updateOperationPost(@Valid @RequestBody ObjectRequest<UpdateOperationRequest> request) throws InvalidRequestException, AuthMethodNotFoundException, OperationAlreadyFinishedException, OperationAlreadyFailedException, OperationNotValidException, OperationNotFoundException, InvalidConfigurationException, OperationAlreadyCanceledException, OrganizationNotFoundException { return updateOperationImpl(request); } private ObjectResponse<UpdateOperationResponse> updateOperationImpl(ObjectRequest<UpdateOperationRequest> request) throws OperationAlreadyFinishedException, AuthMethodNotFoundException, OperationAlreadyFailedException, InvalidConfigurationException, OperationNotValidException, OperationNotFoundException, InvalidRequestException, OperationAlreadyCanceledException, OrganizationNotFoundException { logger.info("Received updateOperation request, operation ID: {}", request.getRequestObject().getOperationId()); final UpdateOperationResponse response = operationPersistenceService.updateOperation(request.getRequestObject()); logger.info("The updateOperation request succeeded, operation ID: {}, result: {}", response.getOperationId(), response.getResult().toString()); for (AuthStep step: response.getSteps()) { logger.info("Next authentication method for operation ID: {}, authentication method: {}", response.getOperationId(), step.getAuthMethod().toString()); } return new ObjectResponse<>(response); } /** * Assign user ID and organization ID to and operation. * * @param request Update operation user request. * @return Response. * @throws OperationNotFoundException Thrown when operation is not found. * @throws OrganizationNotFoundException Thrown when organization is not found. */ @Operation(summary = "Update user for an operation") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation user was updated"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_NOT_FOUND, ORGANIZATION_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/user", method = RequestMethod.PUT) public Response updateOperationUser(@Valid @RequestBody ObjectRequest<UpdateOperationUserRequest> request) throws OperationNotFoundException, OrganizationNotFoundException { return updateOperationUserImpl(request); } /** * Assign user ID and organization ID to and operation (POST alternative). * * @param request Update operation user request. * @return Response. * @throws OperationNotFoundException Thrown when operation is not found. * @throws OrganizationNotFoundException Thrown when organization is not found. */ @Operation(summary = "Update user for an operation") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation user was updated"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_NOT_FOUND, ORGANIZATION_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/user/update", method = RequestMethod.POST) public Response updateOperationUserPost(@Valid @RequestBody ObjectRequest<UpdateOperationUserRequest> request) throws OperationNotFoundException, OrganizationNotFoundException { return updateOperationUserImpl(request); } private Response updateOperationUserImpl(ObjectRequest<UpdateOperationUserRequest> request) throws OperationNotFoundException, OrganizationNotFoundException { final String operationId = request.getRequestObject().getOperationId(); final String userId = request.getRequestObject().getUserId(); final String organizationId = request.getRequestObject().getOrganizationId(); final UserAccountStatus accountStatus = request.getRequestObject().getAccountStatus(); logger.info("Received updateOperationUser request, operation ID: {}, user ID: {}, organization ID: {}, account status: {}", operationId, userId, organizationId, accountStatus); // persist operation user update operationPersistenceService.updateOperationUser(request.getRequestObject()); logger.info("The updateOperationUser request succeeded, operation ID: {}, user ID: {}, organization ID: {}, account status: {}", operationId, userId, organizationId, accountStatus); return new Response(); } /** * Get detail of an operation with given ID. * * @param operationId Operation ID. * @return Get operation detail response. * @throws OperationNotFoundException Thrown when operation does not exist. * @throws OperationNotValidException Thrown when operation is invalid. */ @Operation(summary = "Get operation detail") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation detail sent in response"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_NOT_FOUND, OPERATION_NOT_VALID"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/detail", method = RequestMethod.GET) public ObjectResponse<GetOperationDetailResponse> operationDetail(@RequestParam @NotBlank @Size(min = 1, max = 256) String operationId) throws OperationNotFoundException, OperationNotValidException { // Log level is FINE to avoid flooding logs, this endpoint is used all the time. logger.debug("Received operationDetail request, operation ID: {}", operationId); final OperationEntity operation = operationPersistenceService.getOperation(operationId, true); final GetOperationDetailResponse response = operationConverter.fromEntity(operation); // add steps from current response response.getSteps().addAll(operationPersistenceService.getResponseAuthSteps(operation)); // set number of remaining authentication attempts response.setRemainingAttempts(stepResolutionService.getNumberOfRemainingAttempts(operation)); response.setTimestampCreated(operation.getTimestampCreated()); response.setTimestampExpires(operation.getTimestampExpires()); logger.debug("The operationDetail request succeeded, operation ID: {}", response.getOperationId()); return new ObjectResponse<>(response); } /** * Get detail of an operation with given ID using POST method. * * @param request Get operation detail request. * @return Get operation detail response. * @throws OperationNotFoundException Thrown when operation does not exist. * @throws OperationNotValidException Thrown when operation is invalid. */ @Operation(summary = "Get operation detail") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation detail sent in response"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_NOT_FOUND, OPERATION_NOT_VALID"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/detail", method = RequestMethod.POST) public ObjectResponse<GetOperationDetailResponse> operationDetailPost(@Valid @RequestBody ObjectRequest<GetOperationDetailRequest> request) throws OperationNotFoundException, OperationNotValidException { // Log level is FINE to avoid flooding logs, this endpoint is used all the time. logger.debug("Received operationDetail request, operation ID: {}", request.getRequestObject().getOperationId()); final GetOperationDetailRequest requestObject = request.getRequestObject(); final OperationEntity operation = operationPersistenceService.getOperation(requestObject.getOperationId(), true); final GetOperationDetailResponse response = operationConverter.fromEntity(operation); // add steps from current response response.getSteps().addAll(operationPersistenceService.getResponseAuthSteps(operation)); // set number of remaining authentication attempts response.setRemainingAttempts(stepResolutionService.getNumberOfRemainingAttempts(operation)); response.setTimestampCreated(operation.getTimestampCreated()); response.setTimestampExpires(operation.getTimestampExpires()); logger.debug("The operationDetail request succeeded, operation ID: {}", response.getOperationId()); return new ObjectResponse<>(response); } /** * Get configuration of an operation with given operation name. * * @param operationName Operation name. * @return Get operation configuration response. * @throws OperationConfigNotFoundException Thrown when operation is not configured. */ @Operation(summary = "Get operation configuration detail") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation configuration detail sent in response"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_CONFIG_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/config/detail", method = RequestMethod.GET) public ObjectResponse<GetOperationConfigDetailResponse> getOperationConfigDetail(@RequestParam @NotBlank @Size(min = 2, max = 256) String operationName) throws OperationConfigNotFoundException { // Log level is FINE to avoid flooding logs, this endpoint is used all the time. logger.debug("Received getOperationConfigDetail request, operation name: {}", operationName); final GetOperationConfigDetailResponse response = operationConfigurationService.getOperationConfig(operationName); logger.debug("The getOperationConfigDetail request succeeded, operation name: {}", operationName); return new ObjectResponse<>(response); } /** * Get configuration of an operation with given operation name using POST method. * * @param request Get operation configuration request. * @return Get operation configuration response. * @throws OperationConfigNotFoundException Thrown when operation is not configured. */ @Operation(summary = "Get operation configuration detail") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation configuration detail sent in response"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_CONFIG_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/config/detail", method = RequestMethod.POST) public ObjectResponse<GetOperationConfigDetailResponse> getOperationConfigDetailPost(@Valid @RequestBody ObjectRequest<GetOperationConfigDetailRequest> request) throws OperationConfigNotFoundException { // Log level is FINE to avoid flooding logs, this endpoint is used all the time. logger.debug("Received getOperationConfigDetail request, operation name: {}", request.getRequestObject().getOperationName()); final GetOperationConfigDetailRequest requestObject = request.getRequestObject(); final GetOperationConfigDetailResponse response = operationConfigurationService.getOperationConfig(requestObject.getOperationName()); logger.debug("The getOperationConfigDetail request succeeded, operation name: {}", request.getRequestObject().getOperationName()); return new ObjectResponse<>(response); } /** * Get configurations of all operations. * * @return Get operation configurations response. */ @Operation(summary = "Get operation configuration list") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation configuration list sent in response"), @ApiResponse(responseCode = "400", description = "Invalid request"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/config", method = RequestMethod.GET) public ObjectResponse<GetOperationConfigListResponse> getOperationConfigList() { // Log level is FINE to avoid flooding logs, this endpoint is used all the time. logger.debug("Received getOperationConfigList request"); final GetOperationConfigListResponse response = operationConfigurationService.getOperationConfigList(); logger.debug("The getOperationConfigList request succeeded, operation config list size: {}", response.getOperationConfigs().size()); return new ObjectResponse<>(response); } /** * Get configurations of all operations using POST method. * * @param request Get configurations of all operations request. * @return Get operation configurations response. */ @Operation(summary = "Get operation configuration list") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation configuration list sent in response"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/config/list", method = RequestMethod.POST) public ObjectResponse<GetOperationConfigListResponse> getOperationConfigListPost(@Valid @RequestBody ObjectRequest<GetOperationConfigListRequest> request) { // Log level is FINE to avoid flooding logs, this endpoint is used all the time. logger.debug("Received getOperationConfigListPost request"); final GetOperationConfigListResponse response = operationConfigurationService.getOperationConfigList(); logger.debug("The getOperationConfigListPost request succeeded, operation config list size: {}", response.getOperationConfigs().size()); return new ObjectResponse<>(response); } /** * Get the list of pending operations for user. * * @param userId User ID. * @param mobileTokenOnly Whether only operations with mobile token should be returned * @return List with operation details. */ @Operation(summary = "Get pending operation list") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Pending operation list sent in response"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "user/operation", method = RequestMethod.GET) public ObjectResponse<List<GetOperationDetailResponse>> getPendingOperations(@RequestParam @NotBlank @Size(min = 1, max = 256) String userId, @RequestParam boolean mobileTokenOnly) { // Log level is FINE to avoid flooding logs, this endpoint is used all the time. logger.debug("Received getPendingOperations request, user ID: {}", userId); final List<GetOperationDetailResponse> responseList = new ArrayList<>(); final List<OperationEntity> operations = operationPersistenceService.getPendingOperations(userId, mobileTokenOnly); for (OperationEntity operation : operations) { final GetOperationDetailResponse response = operationConverter.fromEntity(operation); responseList.add(response); } logger.debug("The getPendingOperations request succeeded, operation list size: {}", responseList.size()); return new ObjectResponse<>(responseList); } /** * Get the list of pending operations for user using POST method. * * @param request Get pending operations request. * @return List with operation details. */ @Operation(summary = "Get pending operation list") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Pending operation list sent in response"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "user/operation/list", method = RequestMethod.POST) public ObjectResponse<List<GetOperationDetailResponse>> getPendingOperationsPost(@Valid @RequestBody ObjectRequest<GetPendingOperationsRequest> request) { // Log level is FINE to avoid flooding logs, this endpoint is used all the time. logger.debug("Received getPendingOperationsPost request, user ID: {}", request.getRequestObject().getUserId()); final GetPendingOperationsRequest requestObject = request.getRequestObject(); final List<GetOperationDetailResponse> responseList = new ArrayList<>(); final List<OperationEntity> operations = operationPersistenceService.getPendingOperations(requestObject.getUserId(), requestObject.isMobileTokenOnly()); for (OperationEntity operation : operations) { final GetOperationDetailResponse response = operationConverter.fromEntity(operation); responseList.add(response); } logger.debug("The getPendingOperationsPost request succeeded, operation list size: {}", responseList.size()); return new ObjectResponse<>(responseList); } /** * Lookup operations for given external transaction ID. * * @param request Lookup operations by external transaction ID request. * @return Response for operations lookup by external transaction ID. */ @Operation(summary = "Lookup operations by external ID") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation list sent in response"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/lookup/external", method = RequestMethod.POST) public ObjectResponse<LookupOperationsByExternalIdResponse> lookupOperationsByExternalId(@Valid @RequestBody ObjectRequest<LookupOperationsByExternalIdRequest> request) { // Log level is FINE to avoid flooding logs, this endpoint is used all the time. logger.debug("Received lookupOperationsByExternalId request, external transaction ID: {}", request.getRequestObject().getExternalTransactionId()); final LookupOperationsByExternalIdRequest requestObject = request.getRequestObject(); final LookupOperationsByExternalIdResponse response = new LookupOperationsByExternalIdResponse(); final List<OperationEntity> operations = operationPersistenceService.findByExternalTransactionId(requestObject.getExternalTransactionId()); for (OperationEntity operation : operations) { final GetOperationDetailResponse operationDetail = operationConverter.fromEntity(operation); response.getOperations().add(operationDetail); } logger.debug("The lookupOperationsByExternalId request succeeded, operation list size: {}", response.getOperations().size()); return new ObjectResponse<>(response); } /** * Update operation with updated form data (PUT method). * * @param request Update operation request. * @return Update operation response. * @throws OperationNotFoundException Thrown when operation is not found. */ @Operation(summary = "Update operation form data") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation form data was updated"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/formData", method = RequestMethod.PUT) public Response updateOperationFormData(@Valid @RequestBody ObjectRequest<UpdateFormDataRequest> request) throws OperationNotFoundException { return updateOperationFormDataImpl(request); } /** * Update operation with updated form data (POST method alternative). * * @param request Update operation request. * @return Update operation response. * @throws OperationNotFoundException Thrown when operation is not found. */ @Operation(summary = "Update operation form data") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation form data was updated"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/formData/update", method = RequestMethod.POST) public Response updateOperationFormDataPost(@Valid @RequestBody ObjectRequest<UpdateFormDataRequest> request) throws OperationNotFoundException { return updateOperationFormDataImpl(request); } private Response updateOperationFormDataImpl(ObjectRequest<UpdateFormDataRequest> request) throws OperationNotFoundException { logger.info("Received updateOperationFormData request, operation ID: {}", request.getRequestObject().getOperationId()); // persist operation form data update operationPersistenceService.updateFormData(request.getRequestObject()); logger.debug("The updateOperationFormData request succeeded"); return new Response(); } /** * Update operation with chosen authentication method (PUT method). * @param request Update operation request. * @return Update operation response. * @throws OperationNotFoundException Thrown when operation is not found. * @throws InvalidRequestException Thrown when request is invalid. * @throws OperationNotValidException Thrown when operation is invalid. */ @Operation(summary = "Update chosen authentication method for an operation") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Chosen authentication method was updated"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_NOT_FOUND, INVALID_REQUEST, OPERATION_NOT_VALID"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/chosenAuthMethod", method = RequestMethod.PUT) public Response updateChosenAuthMethod(@Valid @RequestBody ObjectRequest<UpdateChosenAuthMethodRequest> request) throws OperationNotFoundException, InvalidRequestException, OperationNotValidException { return updateChosenAuthMethodImpl(request); } /** * Update operation with chosen authentication method (POST method alternative). * @param request Update operation request. * @return Update operation response. * @throws OperationNotFoundException Thrown when operation is not found. * @throws InvalidRequestException Thrown when request is invalid. * @throws OperationNotValidException Thrown when operation is invalid. */ @Operation(summary = "Update chosen authentication method for an operation") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Chosen authentication method was updated"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_NOT_FOUND, INVALID_REQUEST, OPERATION_NOT_VALID"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/chosenAuthMethod/update", method = RequestMethod.POST) public Response updateChosenAuthMethodPost(@Valid @RequestBody ObjectRequest<UpdateChosenAuthMethodRequest> request) throws OperationNotFoundException, InvalidRequestException, OperationNotValidException { return updateChosenAuthMethodImpl(request); } private Response updateChosenAuthMethodImpl(ObjectRequest<UpdateChosenAuthMethodRequest> request) throws OperationNotFoundException, InvalidRequestException, OperationNotValidException { logger.info("Received updateChosenAuthMethod request, operation ID: {}, chosen authentication method: {}", request.getRequestObject().getOperationId(), request.getRequestObject().getChosenAuthMethod().toString()); // persist chosen auth method update operationPersistenceService.updateChosenAuthMethod(request.getRequestObject()); logger.debug("The updateChosenAuthMethod request succeeded"); return new Response(); } /** * Update mobile token status for an operation (PUT method). * @param request Update operation request. * @return Update operation response. * @throws OperationNotFoundException Thrown when operation is not found. * @throws OperationNotValidException Thrown when operation is not valid. * @throws InvalidConfigurationException Thrown when Next Step configuration is invalid. */ @Operation(summary = "Update mobile token status for an operation") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Mobile token status was updated"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_NOT_FOUND, OPERATION_NOT_VALID, INVALID_CONFIGURATION"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/mobileToken/status", method = RequestMethod.PUT) public Response updateMobileToken(@Valid @RequestBody ObjectRequest<UpdateMobileTokenRequest> request) throws OperationNotFoundException, OperationNotValidException, InvalidConfigurationException { return updateMobileTokenImpl(request); } /** * Update operation with chosen authentication method (POST method alternative). * @param request Update operation request. * @return Update operation response. * @throws OperationNotFoundException Thrown when operation is not found. * @throws OperationNotValidException Thrown when operation is not valid. * @throws InvalidConfigurationException Thrown when Next Step configuration is invalid. */ @Operation(summary = "Update mobile token status for an operation") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Mobile token status was updated"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_NOT_FOUND, OPERATION_NOT_VALID, INVALID_CONFIGURATION"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/mobileToken/status/update", method = RequestMethod.POST) public @ResponseBody Response updateMobileTokenPost(@Valid @RequestBody ObjectRequest<UpdateMobileTokenRequest> request) throws OperationNotFoundException, OperationNotValidException, InvalidConfigurationException { return updateMobileTokenImpl(request); } private Response updateMobileTokenImpl(ObjectRequest<UpdateMobileTokenRequest> request) throws OperationNotFoundException, OperationNotValidException, InvalidConfigurationException { logger.info("Received updateMobileToken request, operation ID: {}, mobile token active: {}", request.getRequestObject().getOperationId(), request.getRequestObject().isMobileTokenActive()); // persist mobile token update operationPersistenceService.updateMobileToken(request.getRequestObject()); logger.debug("The updateMobileToken request succeeded"); return new Response(); } /** * Get mobile token configuration. * @param userId User ID. * @param operationName Operation name. * @param authMethod Authentication method. * @return Get mobile token configuration response. * @throws InvalidConfigurationException Thrown when Next Step configuration is invalid. */ @Operation(summary = "Get mobile token configuration") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Mobile token configuration sent in response"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, INVALID_CONFIGURATION"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/mobileToken/config/detail", method = RequestMethod.GET) public ObjectResponse<GetMobileTokenConfigResponse> getMobileTokenConfig(@RequestParam @NotBlank @Size(min = 1, max = 256) String userId, @RequestParam @NotBlank @Size(min = 2, max = 256) String operationName, @RequestParam @NotNull AuthMethod authMethod) throws InvalidConfigurationException { logger.info("Received getMobileTokenConfig request, user ID: {}, operation name: {}, authentication method: {}", userId, operationName, authMethod); final boolean isMobileTokenEnabled = mobileTokenConfigurationService.isMobileTokenActive(userId, operationName, authMethod); final GetMobileTokenConfigResponse response = new GetMobileTokenConfigResponse(); response.setMobileTokenEnabled(isMobileTokenEnabled); logger.debug("The getMobileTokenConfig request succeeded"); return new ObjectResponse<>(response); } /** * Get mobile token configuration using POST method. * @param request Get mobile token configuration request. * @return Get mobile token configuration response. * @throws InvalidConfigurationException Thrown when Next Step configuration is invalid. */ @Operation(summary = "Get mobile token configuration") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Mobile token configuration sent in response"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, INVALID_CONFIGURATION"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/mobileToken/config/detail", method = RequestMethod.POST) public ObjectResponse<GetMobileTokenConfigResponse> getMobileTokenConfigPost(@Valid @RequestBody ObjectRequest<GetMobileTokenConfigRequest> request) throws InvalidConfigurationException { final String userId = request.getRequestObject().getUserId(); final String operationName = request.getRequestObject().getOperationName(); final AuthMethod authMethod = request.getRequestObject().getAuthMethod(); logger.info("Received getMobileTokenConfigPost request, user ID: {}, operation name: {}, authentication method: {}", userId, operationName, authMethod); final boolean isMobileTokenEnabled = mobileTokenConfigurationService.isMobileTokenActive(userId, operationName, authMethod); final GetMobileTokenConfigResponse response = new GetMobileTokenConfigResponse(); response.setMobileTokenEnabled(isMobileTokenEnabled); logger.debug("The getMobileTokenConfigPost request succeeded"); return new ObjectResponse<>(response); } /** * Update application context for an operation (PUT method). * @param request Update application context request. * @return Response. * @throws OperationNotFoundException Thrown when operation is not found. */ @Operation(summary = "Update application context for an operation") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Application context was updated"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/application", method = RequestMethod.PUT) public Response updateApplicationContext(@Valid @RequestBody ObjectRequest<UpdateApplicationContextRequest> request) throws OperationNotFoundException { return updateApplicationContextImpl(request); } /** * Update application context for an operation (POST method alternative). * @param request Update application context request. * @return Response. * @throws OperationNotFoundException Thrown when operation is not found. */ @Operation(summary = "Update application context for an operation") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Application context was updated"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/application/update", method = RequestMethod.POST) public Response updateApplicationContextPost(@Valid @RequestBody ObjectRequest<UpdateApplicationContextRequest> request) throws OperationNotFoundException { return updateApplicationContextImpl(request); } /** * Create an AFS action and store it in Next Step. * @param request Create AFS action request. * @return Response. */ @Operation(summary = "Create an AFS action") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "AFS action was created"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/afs/action", method = RequestMethod.POST) public Response createAfsAction(@Valid @RequestBody ObjectRequest<CreateAfsActionRequest> request) { final CreateAfsActionRequest afsRequest = request.getRequestObject(); logger.info("Received createAfsAction request, operation ID: {}, AFS action: {}", afsRequest.getOperationId(), afsRequest.getAfsAction()); // persist AFS action for operation operationPersistenceService.createAfsAction(afsRequest); logger.debug("The createAfsAction request succeeded"); return new Response(); } private Response updateApplicationContextImpl(ObjectRequest<UpdateApplicationContextRequest> request) throws OperationNotFoundException { logger.info("Received updateApplicationContext request, operation ID: {}", request.getRequestObject().getOperationId()); // persist application context update operationPersistenceService.updateApplicationContext(request.getRequestObject()); logger.debug("The updateApplicationContext request succeeded"); return new Response(); } /** * Create an operation configuration. * @param request Create operation configuration request. * @return Create operation configuration response. * @throws OperationConfigAlreadyExists Thrown when operation configuration already exists. */ @Operation(summary = "Create an operation configuration") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation configuration was created"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_CONFIG_ALREADY_EXISTS"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/config", method = RequestMethod.POST) public ObjectResponse<CreateOperationConfigResponse> createOperationConfig(@Valid @RequestBody ObjectRequest<CreateOperationConfigRequest> request) throws OperationConfigAlreadyExists { logger.info("Received createOperationConfig request, operation name: {}", request.getRequestObject().getOperationName()); final CreateOperationConfigResponse response = operationConfigurationService.createOperationConfig(request.getRequestObject()); logger.info("The createOperationConfig request succeeded, operation name: {}", request.getRequestObject().getOperationName()); return new ObjectResponse<>(response); } /** * Delete an operation configuration. * @param request Delete operation configuration request. * @return Delete operation configuration response. * @throws OperationConfigNotFoundException Thrown when operation configuration is not found. * @throws DeleteNotAllowedException Thrown when delete action is not allowed. */ @Operation(summary = "Delete an operation configuration") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation configuration was deleted"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_CONFIG_NOT_FOUND, DELETE_NOT_ALLOWED"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/config/delete", method = RequestMethod.POST) public ObjectResponse<DeleteOperationConfigResponse> deleteOperationConfig(@Valid @RequestBody ObjectRequest<DeleteOperationConfigRequest> request) throws OperationConfigNotFoundException, DeleteNotAllowedException { logger.info("Received deleteOperationConfig request, operation name: {}", request.getRequestObject().getOperationName()); final DeleteOperationConfigResponse response = operationConfigurationService.deleteOperationConfig(request.getRequestObject()); logger.info("The deleteOperationConfig request succeeded, operation name: {}", request.getRequestObject().getOperationName()); return new ObjectResponse<>(response); } /** * Create a configuration for authentication method by operation name. * @param request Create operation and authentication method configuration request. * @return Create operation and authentication method configuration response. * @throws OperationMethodConfigAlreadyExists Thrown when operation and authentication method configuration already exists. * @throws OperationConfigNotFoundException Thrown when operation configuration is not found. * @throws AuthMethodNotFoundException Thrown when authentication method is not found. */ @Operation(summary = "Create an operation and authentication method configuration") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation and authentication method configuration was created"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_METHOD_CONFIG_ALREADY_EXISTS, OPERATION_CONFIG_NOT_FOUND, AUTH_METHOD_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/auth-method/config", method = RequestMethod.POST) public ObjectResponse<CreateOperationMethodConfigResponse> createOperationMethodConfig(@Valid @RequestBody ObjectRequest<CreateOperationMethodConfigRequest> request) throws OperationMethodConfigAlreadyExists, OperationConfigNotFoundException, AuthMethodNotFoundException { logger.info("Received createOperationMethodConfig request, operation name: {}, authentication method: {}", request.getRequestObject().getOperationName(), request.getRequestObject().getAuthMethod()); final CreateOperationMethodConfigResponse response = operationConfigurationService.createOperationMethodConfig(request.getRequestObject()); logger.info("The createOperationMethodConfig request succeeded, operation name: {}, authentication method: {}", request.getRequestObject().getOperationName(), request.getRequestObject().getAuthMethod()); return new ObjectResponse<>(response); } /** * Get a configuration for operation and authentication method. * @param operationName Operation name. * @param authMethod Authentication method. * @return Get operation and authentication method config detail response. * @throws OperationMethodConfigNotFoundException Thrown when operation and authentication method configuration is not found. */ @Operation(summary = "Get an operation and authentication method configuration detail") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation and authentication method configuration detail sent in response"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_METHOD_CONFIG_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/auth-method/config/detail", method = RequestMethod.GET) public ObjectResponse<GetOperationMethodConfigDetailResponse> getOperationMethodConfigDetail(@RequestParam @NotBlank @Size(min = 2, max = 256) String operationName, @RequestParam @NotNull AuthMethod authMethod) throws OperationMethodConfigNotFoundException { logger.info("Received getOperationMethodConfigDetail request, operation name: {}, authentication method: {}", operationName, authMethod); GetOperationMethodConfigDetailRequest request = new GetOperationMethodConfigDetailRequest(); request.setOperationName(operationName); request.setAuthMethod(authMethod); final GetOperationMethodConfigDetailResponse response = operationConfigurationService.getOperationMethodConfigDetail(request); logger.info("The getOperationMethodConfigDetail request succeeded, operation name: {}, authentication method: {}", operationName, authMethod); return new ObjectResponse<>(response); } /** * Get a configuration for operation and authentication method using POST method. * @param request Get operation and authentication method config detail request. * @return Get operation and authentication method config detail response. * @throws OperationMethodConfigNotFoundException Thrown when operation and authentication method configuration is not found. */ @Operation(summary = "Get an operation and authentication method configuration detail") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation and authentication method configuration detail sent in response"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_METHOD_CONFIG_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/auth-method/config/detail", method = RequestMethod.POST) public ObjectResponse<GetOperationMethodConfigDetailResponse> getOperationMethodConfigDetailPost(@Valid @RequestBody ObjectRequest<GetOperationMethodConfigDetailRequest> request) throws OperationMethodConfigNotFoundException { logger.info("Received getOperationMethodConfigDetailPost request, operation name: {}, authentication method: {}", request.getRequestObject().getOperationName(), request.getRequestObject().getAuthMethod()); final GetOperationMethodConfigDetailResponse response = operationConfigurationService.getOperationMethodConfigDetail(request.getRequestObject()); logger.info("The getOperationMethodConfigDetailPost request succeeded, operation name: {}, authentication method: {}", request.getRequestObject().getOperationName(), request.getRequestObject().getAuthMethod()); return new ObjectResponse<>(response); } /** * Delete a configuration for operation and authentication method. * @param request Delete operation and authentication method config request. * @return Delete operation and authentication method config response. * @throws OperationMethodConfigNotFoundException Thrown when operation and authentication method configuration is not found. */ @Operation(summary = "Delete an operation and authentication method configuration detail") @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Operation and authentication method configuration detail was deleted"), @ApiResponse(responseCode = "400", description = "Invalid request, error codes: REQUEST_VALIDATION_FAILED, OPERATION_METHOD_CONFIG_NOT_FOUND"), @ApiResponse(responseCode = "500", description = "Unexpected error") }) @RequestMapping(value = "operation/auth-method/config/delete", method = RequestMethod.POST) public ObjectResponse<DeleteOperationMethodConfigResponse> deleteOperationMethodConfig(@Valid @RequestBody ObjectRequest<DeleteOperationMethodConfigRequest> request) throws OperationMethodConfigNotFoundException { logger.info("Received deleteOperationMethodConfig request, operation name: {}, authentication method: {}", request.getRequestObject().getOperationName(), request.getRequestObject().getAuthMethod()); final DeleteOperationMethodConfigResponse response = operationConfigurationService.deleteOperationMethodConfig(request.getRequestObject()); logger.info("The deleteOperationMethodConfig request succeeded, operation name: {}, authentication method: {}", request.getRequestObject().getOperationName(), request.getRequestObject().getAuthMethod()); return new ObjectResponse<>(response); } }
package com.btj10.io.xml.object; import com.btj10.io.xml.*; import com.btj10.io.xml.array.XMLArray; import com.btj10.io.xml.enums.XMLValueTypeEnum; import com.btj10.io.xml.properties.AbstractXMLDataStructure; import com.btj10.io.xml.properties.IXMLValue; import com.btj10.io.xml.properties.XMLAttribute; import com.btj10.io.xml.properties.XMLComment; import com.btj10.io.xml.string.XMLString; import com.btj10.io.xml.string.XMLStringArray; import com.btj10.io.xml.utils.XMLStatics; import com.btj10.io.xml.utils.XMLUtils; import java.util.*; /** * XMLObject is the main class for creating and parsing XML data. XMLObject enables the * user to modify, add and remove all data inside of its class before the XML is generated. * Adding, replacing, and removing data from the XMLObject is similar to that of a LinkedHashMap. * * A unique feature is the ability to use Dot Notation (for get() and replace() functions) to access * data inside of the XMLObject. For example: * If the structure of the XMLObject looked like this:<br> * &lt;root&gt;<br> * &lt;subroot&gt;<br> * &lt;tag1&gt;value1&lt;/tag1&gt;<br> * &lt;tag2&gt;value2&lt;/tag2&gt;<br> * &lt;/subroot&gt;<br> * &lt;/root&gt;<br> * The XML Element <code>tag1</code> can be accessed by using the get() function * XMLString tag1String = xmlObject.get("subroot.tag1"); * * The put() and remove() functions do not use Dot Notation, however. To add or remove an IXMLValue that is * one or more levels down into an XMLObject, first access the IXMLValue that you will modify and * then apply the function to the IXMLValue. * For example: * Using the above example structure, to add another XML Element to <code>subroot</code>, first access * <code>subroot</code> by getting the subroot object:<br> * <code>XMLObject subrootObject = xmlObject.get("subroot")</code> * From there, add a new IXMLValue (e.g. Element Name - tag3, Value - value3)<br> * <code>subrootObject.put("tag3", new XMLString("tag3", "value3"));</code> * OR * <code>subrootObject.put("tag3", "value3");</code> */ public class XMLObject extends AbstractXMLDataStructure<LinkedHashMap<String, IXMLValue>> { /** * Constructor. */ public XMLObject() { super(new LinkedHashMap<String, IXMLValue>()); } /** * Constructor. * @param map Map object that contains data */ public XMLObject(Map map){ this(map, (XMLAttribute[]) null); } /** * Constructor. * @param map Map object that contains data * @param xmlAttributes XML attributes to be added to this XMLObject. */ public XMLObject(Map map, XMLAttribute... xmlAttributes){ this(); for(Object key : map.keySet()){ this.put(key.toString(), map.get(key)); } if(xmlAttributes != null) { this.addXMLAttribute(xmlAttributes); } } /** * Put a IXMLValue object into this XMLObject based on a String value. * * @param key The String associated with the IXMLValue * @param value IXMLValue being added to this XMLObject * @return the IXMLValue that was added to this XMLObject */ public IXMLValue put(String key, IXMLValue value){ return this.put(key, value, (XMLAttribute[]) null); } /** * Put a IXMLValue object into this XMLObject based on a String value. * * @param key The String associated with the IXMLValue * @param value IXMLValue being added to this XMLObject * @param xmlAttributes array of XML Attributes that can be added to the IXMLValue * @return the IXMLValue that was added to this XMLObject */ public IXMLValue put(String key, IXMLValue value, XMLAttribute... xmlAttributes) { if(value == null){ return this.put(key, (String) null, xmlAttributes); } switch(value.getType()){ case OBJECT: XMLObject xmlObject = (XMLObject) value; xmlObject.setRootElementName(key); if(xmlAttributes != null) { xmlObject.addXMLAttribute(xmlAttributes); } return getDataStructure().put(key, xmlObject); case ARRAY: XMLArray xmlArray = (XMLArray) value; xmlArray.setRootElementName(key); if(xmlAttributes != null) { xmlArray.addXMLAttribute(xmlAttributes); } return getDataStructure().put(key, xmlArray); case STRING: XMLString xmlString = (XMLString) value; if(xmlAttributes != null) { xmlString.addXMLAttribute(xmlAttributes); } return getDataStructure().put(key, xmlString); case STRING_ARRAY: XMLStringArray xmlStringArray = (XMLStringArray) value; if(xmlAttributes != null) { xmlStringArray.addXMLAttribute(xmlAttributes); } return getDataStructure().put(key, xmlStringArray); } // value is of XMLComment if it gets to this return return getDataStructure().put(key, value); } /** * Create an XMLString or XMLComment and put value into this XMLObject based on a String value. * Note: If key contains an exclamation (!) mark, the IXMLValue will be treated as an XMLComment. * @param key The String associated with the IXMLValue being added * @param value IXMLValue being added to this XMLObject * @return the IXMLValue that was added to this XMLObject */ public IXMLValue put(String key, String value) { return this.put(key, value, (XMLAttribute[]) null); } /** * Create an XMLString or XMLComment and put value into this XMLObject based on a String value. * Note: If key contains an exclamation (!) mark, the IXMLValue will be treated as an XMLComment. * @param key The String associated with the IXMLValue being added * @param value IXMLValue being added to this XMLObject * @param xmlAttributes array of XML Attributes that can be added to the IXMLValue. Note: If * IXMLValue is of type XMLComment, the array of XMLAttributes are IGNORED. * @return the IXMLValue that was added to this XMLObject */ public IXMLValue put(String key, String value, XMLAttribute... xmlAttributes) { if(key.contains(XMLStatics.XML_COMMENT_INDICATOR)){ return getDataStructure().put(key, new XMLComment(value)); } return getDataStructure().put(key, new XMLString(key, value, xmlAttributes)); } /** * Create an XMLStringArray and put value into this XMLObject based on a String value. * Note: If key contains an exclamation (!) mark, the symbol will be removed as it is reserved * for XMLComment objects only. * @param key The String associated with the IXMLValue being added * @param values array of String values that will added to this XMLObject. * @return the IXMLValue that was added to this XMLObject */ public IXMLValue put(String key, String... values){ return this.put(key, values, (XMLAttribute[]) null); } /** * Create an XMLStringArray and put value into this XMLObject based on a String value. * Note: If key contains an exclamation (!) mark, the symbol will be removed as it is reserved * for XMLComment objects only. * @param key The String associated with the IXMLValue being added * @param values array of String values that will added to this XMLObject. * @param xmlAttributes array of XML Attributes that can be added to the IXMLValue * @return the IXMLValue that was added to this XMLObject */ public IXMLValue put(String key, String[] values, XMLAttribute... xmlAttributes) { // remove comment indicator just in case (should not be allowed in this function) key = key.replace(XMLStatics.XML_COMMENT_INDICATOR, XMLStatics.EMPTY_STRING); return getDataStructure().put(key, new XMLStringArray(key, values, xmlAttributes)); } /** * Creates an XMLObject and puts value into this XMLObject based on a String value. * Note: If key contains an exclamation (!) mark, the symbol will be removed as it is reserved * for XMLComment objects only. * @param key The String associated with the IXMLValue being added * @param map Map object that will be used to create a new XMLObject instance. * @return the IXMLValue that was added to this XMLObject */ public IXMLValue put(String key, Map map){ return this.put(key, map, (XMLAttribute[]) null); } /** * Creates an XMLObject and puts value into this XMLObject based on a String value. * Note: If key contains an exclamation (!) mark, the symbol will be removed as it is reserved * for XMLComment objects only. * @param key The String associated with the IXMLValue being added * @param map Map object that will be used to create a new XMLObject instance. * @param xmlAttributes array of XML Attributes that can be added to the IXMLValue * @return the IXMLValue that was added to this XMLObject */ public IXMLValue put(String key, Map map, XMLAttribute... xmlAttributes){ // remove comment indicator just in case (should not be allowed in this function) key = key.replace(XMLStatics.XML_COMMENT_INDICATOR, XMLStatics.EMPTY_STRING); XMLObject xmlObject = new XMLObject(); xmlObject.setRootElementName(key); for(Object mapKey : map.keySet()){ xmlObject.put(mapKey.toString(), map.get(mapKey)); } if(xmlAttributes != null){ xmlObject.addXMLAttribute(xmlAttributes); } return this.put(key, xmlObject); } /** * Creates the appropriate IXMLValue based on the value's class and puts value into this XMLObject based on a String value. * @param key The String associated with the IXMLValue being added * @param value New value that will be added to this XMLObject * @return the IXMLValue that was added to this XMLObject */ public IXMLValue put(String key, Object value){ return this.put(key, value, (XMLAttribute[]) null); } /** * Creates the appropriate IXMLValue based on the value's class and puts value into this XMLObject based on a String value. * @param key The String associated with the IXMLValue being added * @param value New value that will be added to this XMLObject * @param xmlAttributes array of XML Attributes that can be added to the IXMLValue * @return the IXMLValue that was added to this XMLObject */ public IXMLValue put(String key, Object value, XMLAttribute... xmlAttributes){ if(value == null){ return this.put(key, new XMLString(key, null), xmlAttributes); } if(value instanceof IXMLValue){ return this.put(key, (IXMLValue) value, xmlAttributes); } XMLValueTypeEnum xmlValueTypeEnum = XMLValueTypeEnum.findXMLValueBasedonClass(value.getClass()); if(xmlValueTypeEnum != null){ switch(xmlValueTypeEnum){ case STRING: if(key.contains(XMLStatics.XML_COMMENT_INDICATOR)){ return this.put(key, new XMLComment(value.toString())); } return this.put(key, new XMLString(key, value.toString()), xmlAttributes); case STRING_ARRAY: return this.put(key, new XMLStringArray(key, (String[]) value), xmlAttributes); case OBJECT: return this.put(key, (Map) value, xmlAttributes); case ARRAY: if(value instanceof ArrayList) { ArrayList<Object> valueArray = (ArrayList) value; XMLArray xmlArray = new XMLArray(); for (Object object : valueArray) { if (object instanceof HashMap) { HashMap innerHashMap = (HashMap) object; XMLObject innerXMLObject = new XMLObject(); innerXMLObject.setRootElementName(key + valueArray.indexOf(object)); for (Object innerKey : innerHashMap.keySet()) { innerXMLObject.put(innerKey.toString(), innerHashMap.get(innerKey)); } xmlArray.add(innerXMLObject); } if (object instanceof XMLObject) { xmlArray.add((XMLObject) object); } } xmlArray.addXMLAttribute(xmlAttributes); return this.put(key, xmlArray); } } } throw new XMLUnsupportedTypeException(value.getClass()); } /** * Gets the IXMLValue associated with the appropriate key value. * A unique feature is the ability to use Dot Notation to access data inside of the XMLObject. * For example: * If the structure of the XMLObject looked like this:<br> * &lt;root&gt;<br> * &lt;subroot&gt;<br> * &lt;tag1&gt;value1&lt;/tag1&gt;<br> * &lt;tag2&gt;value2&lt;/tag2&gt;<br> * &lt;/subroot&gt;<br> * &lt;/root&gt;<br> * The XML Element <code>tag1</code> can be accessed by using the get() function * <code>XMLString tag1String = xmlObject.get("subroot.tag1");</code> * @param key The String associated with a specific IXMLValue * @return IXMLValue associated with a particular key value */ public IXMLValue get(String key){ if(key.contains(XMLStatics.PERIOD)){ List<String> list = Arrays.asList(key.split(XMLStatics.DOT_NOTATION)); String rootKey = list.get(0); IXMLValue value = getDataStructure().get(rootKey); if(list.size() == 1){ return value; } if(value != null){ switch(value.getType()){ case OBJECT: return ((XMLObject) value).get(XMLUtils.reconstructString(list.subList(1, list.size()), XMLStatics.PERIOD)); default: return null; } } return null; } return getDataStructure().get(key); } /** * Replace the IXMLValue associated with a particular String value. A unique feature is the ability to use Dot Notation to modify data inside of the XMLObject. * For example: * If the structure of the XMLObject looked like this: <br> * &lt;root&gt;<br> * &lt;subroot&gt;<br> * &lt;tag1&gt;value1&lt;/tag1&gt;<br> * &lt;tag2&gt;value2&lt;/tag2&gt;<br> * &lt;/subroot&gt;<br> * &lt;/root&gt;<br> * &lt;/code&gt;<br> * The value for XML Element <code>tag1</code> can be replaced by using the replace() function * <code>XMLString tag1String = xmlObject.replace("subroot.tag1", new XMLString("tag1", "newValue"));</code> * @param key The String associated with a specific IXMLValue * @param value new IXMLValue being added to the associated key * @return The new IXMLValue associated with the particular key */ public IXMLValue replace(String key, IXMLValue value){ if(key.contains(XMLStatics.PERIOD)) { List<String> list = Arrays.asList(key.split(XMLStatics.DOT_NOTATION)); IXMLValue parentValue = this.get(XMLUtils.reconstructString(list.subList(0, list.size()-1), XMLStatics.PERIOD)); if(parentValue == null) { return null; } if(parentValue.getType() != XMLValueTypeEnum.OBJECT) { return null; } XMLObject parentObject = (XMLObject) parentValue; String childKey = list.get(list.size()-1); if(parentObject.containsKey(childKey)){ return parentObject.replace(childKey, value); } return null; } getDataStructure().remove(key); return getDataStructure().put(key, value); } /** * Checks if this XMLObject contains a particular key. * * @param key String value that is associated with a particular IXMLValue in this XMLObject * @return true if this XMLObject contains a particular key value. False, otherwise. */ public boolean containsKey(String key) { return getDataStructure().containsKey(key); } /** * Remove an IXMLValue from this XMLObject. * * @param key String value that is associated with a particular IXMLValue in this XMLObject * @return the IXMLValue that was removed from this XMLObject. */ public IXMLValue remove(String key){ return getDataStructure().remove(key); } @Override public String toXMLString() { if(getDataStructure().isEmpty()){ return XMLUtils.buildEmptyXMLElementTag(getRootElementName(), getXMLAttributes()); } StringBuilder builder = new StringBuilder(); // build open tag builder.append(XMLUtils.buildOpenXMLElementTag(getRootElementName(), getXMLAttributes())); builder.append(XMLStatics.NEW_LINE); for(String key : getDataStructure().keySet()){ switch(getDataStructure().get(key).getType()){ case STRING: XMLString xmlString = (XMLString) getDataStructure().get(key); xmlString.setElementTagFormat(this.getElementTagFormat()); builder.append(xmlString.toXMLString()); break; case STRING_ARRAY: XMLStringArray xmlStringArray = (XMLStringArray) getDataStructure().get(key); xmlStringArray.setElementTagFormat(this.getElementTagFormat()); builder.append(xmlStringArray.toXMLString()); break; case OBJECT: XMLObject xmlObject = (XMLObject) getDataStructure().get(key); xmlObject.setElementTagFormat(this.getElementTagFormat()); builder.append(xmlObject.toXMLString()); break; case ARRAY: XMLArray xmlArray = (XMLArray) getDataStructure().get(key); xmlArray.setElementTagFormat(this.getElementTagFormat()); builder.append(xmlArray.toXMLString()); break; case COMMENT: XMLComment xmlComment = (XMLComment) getDataStructure().get(key); builder.append(xmlComment.toXMLString()); break; } builder.append(XMLStatics.NEW_LINE); } // build close tag builder.append(XMLUtils.buildCloseXMLElementTag(getRootElementName())); return builder.toString(); } @Override public XMLValueTypeEnum getType() { return XMLValueTypeEnum.OBJECT; } /** * Returns a Set containing all key values in this XMLObject. * * @return Set containing all key values in this XMLObject */ public Set<String> keySet() { return getDataStructure().keySet(); } /** * Returns the size of this XMLObject. * * @return size of this XMLObject. */ public int size() { return getDataStructure().size(); } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.execution.process; import com.intellij.execution.ExecutionException; import com.intellij.execution.KillableProcess; import com.intellij.execution.configurations.GeneralCommandLine; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.registry.Registry; import com.intellij.remote.RemoteProcess; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.util.Set; /** * This process handler supports the "soft-kill" feature (see {@link KillableProcessHandler}). * At first "stop" button send SIGINT signal to process, if it still hangs user can terminate it recursively with SIGKILL signal. * <p> * Soft kill works on Unix, and also on Windows if a mediator process was used. */ public class KillableProcessHandler extends OSProcessHandler implements KillableProcess { private static final Logger LOG = Logger.getInstance(KillableProcessHandler.class); private static final Key<Boolean> MEDIATOR_KEY = Key.create("KillableProcessHandler.Mediator.Process"); private boolean myShouldKillProcessSoftly = true; private final boolean myMediatedProcess; private boolean myShouldKillProcessSoftlyWithWinP = SystemInfo.isWin10OrNewer && Registry.is("use.winp.for.graceful.process.termination"); public KillableProcessHandler(@NotNull GeneralCommandLine commandLine) throws ExecutionException { super(commandLine); myMediatedProcess = MEDIATOR_KEY.get(commandLine) == Boolean.TRUE; } /** * Starts a process with a {@link RunnerMediator mediator} when {@code withMediator} is set to {@code true} and the platform is Windows. */ public KillableProcessHandler(@NotNull GeneralCommandLine commandLine, boolean withMediator) throws ExecutionException { this(mediate(commandLine, withMediator, false)); } /** * {@code commandLine} must not be not empty (for correct thread attribution in the stacktrace) */ public KillableProcessHandler(@NotNull Process process, /*@NotNull*/ String commandLine) { super(process, commandLine); myMediatedProcess = false; } /** * {@code commandLine} must not be not empty (for correct thread attribution in the stacktrace) */ public KillableProcessHandler(@NotNull Process process, /*@NotNull*/ String commandLine, @NotNull Charset charset) { this(process, commandLine, charset, null); } /** * {@code commandLine} must not be not empty (for correct thread attribution in the stacktrace) */ public KillableProcessHandler(@NotNull Process process, /*@NotNull*/ String commandLine, @NotNull Charset charset, @Nullable Set<? extends File> filesToDelete) { super(process, commandLine, charset, filesToDelete); myMediatedProcess = false; } @NotNull protected static GeneralCommandLine mediate(@NotNull GeneralCommandLine commandLine, boolean withMediator, boolean showConsole) { if (withMediator && SystemInfo.isWindows && MEDIATOR_KEY.get(commandLine) == null) { boolean mediatorInjected = RunnerMediator.injectRunnerCommand(commandLine, showConsole); MEDIATOR_KEY.set(commandLine, mediatorInjected); } return commandLine; } /** * @return true, if graceful process termination should be attempted first */ public boolean shouldKillProcessSoftly() { return myShouldKillProcessSoftly; } /** * Sets whether the process will be terminated gracefully. * * @param shouldKillProcessSoftly true, if graceful process termination should be attempted first (i.e. soft kill) */ public void setShouldKillProcessSoftly(boolean shouldKillProcessSoftly) { myShouldKillProcessSoftly = shouldKillProcessSoftly; } /** * This method shouldn't be overridden, see shouldKillProcessSoftly */ private boolean canKillProcessSoftly() { if (processCanBeKilledByOS(myProcess)) { if (SystemInfo.isWindows) { return myMediatedProcess || myShouldKillProcessSoftlyWithWinP; } else if (SystemInfo.isUnix) { // 'kill -SIGINT <pid>' will be executed return true; } } return false; } @Override protected void destroyProcessImpl() { // Don't close streams, because a process may survive graceful termination. // Streams will be closed after the process is really terminated. try { myProcess.getOutputStream().flush(); } catch (IOException e) { LOG.warn(e); } finally { doDestroyProcess(); } } @Override protected void notifyProcessTerminated(int exitCode) { try { super.closeStreams(); } finally { super.notifyProcessTerminated(exitCode); } } @Override protected void doDestroyProcess() { boolean gracefulTerminationAttempted = shouldKillProcessSoftly() && canKillProcessSoftly() && destroyProcessGracefully(); if (!gracefulTerminationAttempted) { // execute default process destroy super.doDestroyProcess(); } } /** * Enables sending Ctrl+C to a Windows-process on first termination attempt. * This is an experimental API which will be removed in future releases once stabilized. * Please do not use this API. * @param shouldKillProcessSoftlyWithWinP true to use */ @ApiStatus.Experimental public void setShouldKillProcessSoftlyWithWinP(boolean shouldKillProcessSoftlyWithWinP) { myShouldKillProcessSoftlyWithWinP = shouldKillProcessSoftlyWithWinP; } protected boolean destroyProcessGracefully() { if (SystemInfo.isWindows) { if (myMediatedProcess) { return RunnerMediator.destroyProcess(myProcess, true); } if (myShouldKillProcessSoftlyWithWinP && !Registry.is("disable.winp")) { try { if (!myProcess.isAlive()) { OSProcessUtil.logSkippedActionWithTerminatedProcess(myProcess, "destroy", getCommandLine()); return true; } return OSProcessUtil.createWinProcess(myProcess).sendCtrlC(); } catch (Throwable e) { if (!myProcess.isAlive()) { OSProcessUtil.logSkippedActionWithTerminatedProcess(myProcess, "destroy", getCommandLine()); return true; } LOG.error("Failed to send Ctrl+C, fallback to default termination: " + getCommandLine(), e); } } } else if (SystemInfo.isUnix) { return UnixProcessManager.sendSigIntToProcessTree(myProcess); } return false; } @Override public boolean canKillProcess() { return processCanBeKilledByOS(getProcess()) || getProcess() instanceof RemoteProcess; } @Override public void killProcess() { if (processCanBeKilledByOS(getProcess())) { // execute 'kill -SIGKILL <pid>' on Unix killProcessTree(getProcess()); } else if (getProcess() instanceof RemoteProcess) { ((RemoteProcess)getProcess()).killProcessTree(); } } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.partition.operation; import com.hazelcast.internal.partition.MigrationCycleOperation; import com.hazelcast.internal.partition.MigrationInfo; import com.hazelcast.internal.partition.impl.InternalPartitionServiceImpl; import com.hazelcast.internal.partition.impl.PartitionReplicaManager; import com.hazelcast.internal.partition.impl.PartitionStateManager; import com.hazelcast.logging.ILogger; import com.hazelcast.nio.ObjectDataInput; import com.hazelcast.nio.ObjectDataOutput; import com.hazelcast.spi.MigrationAwareService; import com.hazelcast.spi.PartitionAwareOperation; import com.hazelcast.spi.PartitionMigrationEvent; import com.hazelcast.spi.ServiceNamespace; import com.hazelcast.spi.impl.NodeEngineImpl; import com.hazelcast.spi.partition.MigrationEndpoint; import java.io.IOException; import java.util.Arrays; import java.util.Collection; /** * Invoked locally on the source or destination of the migration to finalize the migration. * This will notify the {@link MigrationAwareService}s that the migration finished, updates the replica versions, * clears the migration flag and notifies the node engine when successful. */ public final class FinalizeMigrationOperation extends AbstractPartitionOperation implements PartitionAwareOperation, MigrationCycleOperation { private final MigrationInfo migrationInfo; /** Defines if this node is the source or destination of the migration. */ private final MigrationEndpoint endpoint; private final boolean success; /** * This constructor should not be used to obtain an instance of this class; it exists to fulfill IdentifiedDataSerializable * coding conventions. */ public FinalizeMigrationOperation() { migrationInfo = null; endpoint = null; success = false; } public FinalizeMigrationOperation(MigrationInfo migrationInfo, MigrationEndpoint endpoint, boolean success) { this.migrationInfo = migrationInfo; this.endpoint = endpoint; this.success = success; } @Override public void run() { NodeEngineImpl nodeEngine = (NodeEngineImpl) getNodeEngine(); notifyServices(nodeEngine); if (endpoint == MigrationEndpoint.SOURCE && success) { commitSource(); } else if (endpoint == MigrationEndpoint.DESTINATION && !success) { rollbackDestination(); } InternalPartitionServiceImpl partitionService = getService(); PartitionStateManager partitionStateManager = partitionService.getPartitionStateManager(); partitionStateManager.clearMigratingFlag(migrationInfo.getPartitionId()); if (success) { nodeEngine.onPartitionMigrate(migrationInfo); } } /** * Notifies all {@link MigrationAwareService}s that the migration finished. The services can then execute the commit or * rollback logic. If this node was the source and backup replica for a partition, the services will first be notified that * the migration is starting. */ private void notifyServices(NodeEngineImpl nodeEngine) { PartitionMigrationEvent event = getPartitionMigrationEvent(); Collection<MigrationAwareService> migrationAwareServices = getMigrationAwareServices(); // Old backup owner is not notified about migration until migration // is committed on destination. This is the only place on backup owner // knows replica is moved away from itself. if (nodeEngine.getThisAddress().equals(migrationInfo.getSource()) && migrationInfo.getSourceCurrentReplicaIndex() > 0) { // execute beforeMigration on old backup before commit/rollback for (MigrationAwareService service : migrationAwareServices) { beforeMigration(event, service); } } for (MigrationAwareService service : migrationAwareServices) { finishMigration(event, service); } } private PartitionMigrationEvent getPartitionMigrationEvent() { int partitionId = getPartitionId(); return new PartitionMigrationEvent(endpoint, partitionId, endpoint == MigrationEndpoint.SOURCE ? migrationInfo.getSourceCurrentReplicaIndex() : migrationInfo.getDestinationCurrentReplicaIndex(), endpoint == MigrationEndpoint.SOURCE ? migrationInfo.getSourceNewReplicaIndex() : migrationInfo.getDestinationNewReplicaIndex()); } /** Updates the replica versions on the migration source if the replica index has changed. */ private void commitSource() { int partitionId = getPartitionId(); InternalPartitionServiceImpl partitionService = getService(); PartitionReplicaManager replicaManager = partitionService.getReplicaManager(); ILogger logger = getLogger(); int sourceNewReplicaIndex = migrationInfo.getSourceNewReplicaIndex(); if (sourceNewReplicaIndex < 0) { clearPartitionReplicaVersions(partitionId); if (logger.isFinestEnabled()) { logger.finest("Replica versions are cleared in source after migration. partitionId=" + partitionId); } } else if (migrationInfo.getSourceCurrentReplicaIndex() != sourceNewReplicaIndex && sourceNewReplicaIndex > 1) { for (ServiceNamespace namespace : replicaManager.getNamespaces(partitionId)) { long[] versions = updatePartitionReplicaVersions(replicaManager, partitionId, namespace, sourceNewReplicaIndex - 1); if (logger.isFinestEnabled()) { logger.finest("Replica versions are set after SHIFT DOWN migration. partitionId=" + partitionId + " namespace: " + namespace + " replica versions=" + Arrays.toString(versions)); } } } } private void clearPartitionReplicaVersions(int partitionId) { InternalPartitionServiceImpl partitionService = getService(); PartitionReplicaManager replicaManager = partitionService.getReplicaManager(); for (ServiceNamespace namespace : replicaManager.getNamespaces(partitionId)) { replicaManager.clearPartitionReplicaVersions(partitionId, namespace); } } /** Updates the replica versions on the migration destination. */ private void rollbackDestination() { int partitionId = getPartitionId(); InternalPartitionServiceImpl partitionService = getService(); PartitionReplicaManager replicaManager = partitionService.getReplicaManager(); ILogger logger = getLogger(); int destinationCurrentReplicaIndex = migrationInfo.getDestinationCurrentReplicaIndex(); if (destinationCurrentReplicaIndex == -1) { clearPartitionReplicaVersions(partitionId); if (logger.isFinestEnabled()) { logger.finest("Replica versions are cleared in destination after failed migration. partitionId=" + partitionId); } } else { int replicaOffset = migrationInfo.getDestinationCurrentReplicaIndex() <= 1 ? 1 : migrationInfo .getDestinationCurrentReplicaIndex(); for (ServiceNamespace namespace : replicaManager.getNamespaces(partitionId)) { long[] versions = updatePartitionReplicaVersions(replicaManager, partitionId, namespace, replicaOffset - 1); if (logger.isFinestEnabled()) { logger.finest("Replica versions are rolled back in destination after failed migration. partitionId=" + partitionId + " namespace: " + namespace + " replica versions=" + Arrays.toString(versions)); } } } } /** Sets all replica versions to {@code 0} up to the {@code replicaIndex}. */ private long[] updatePartitionReplicaVersions(PartitionReplicaManager replicaManager, int partitionId, ServiceNamespace namespace, int replicaIndex) { long[] versions = replicaManager.getPartitionReplicaVersions(partitionId, namespace); // No need to set versions back right now. actual version array is modified directly. Arrays.fill(versions, 0, replicaIndex, 0); return versions; } private void beforeMigration(PartitionMigrationEvent event, MigrationAwareService service) { try { service.beforeMigration(event); } catch (Throwable e) { getLogger().warning("Error before migration -> " + event, e); } } private void finishMigration(PartitionMigrationEvent event, MigrationAwareService service) { try { if (success) { service.commitMigration(event); } else { service.rollbackMigration(event); } } catch (Throwable e) { getLogger().warning("Error while finalizing migration -> " + event, e); } } @Override public boolean returnsResponse() { return false; } @Override public boolean validatesTarget() { return false; } @Override protected void readInternal(ObjectDataInput in) throws IOException { throw new UnsupportedOperationException(); } @Override protected void writeInternal(ObjectDataOutput out) throws IOException { throw new UnsupportedOperationException(); } @Override public int getId() { throw new UnsupportedOperationException(); } }
/** * */ package org.openxava.jpa.impl; import java.lang.annotation.Annotation; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Map; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityTransaction; import javax.persistence.FlushModeType; import javax.persistence.LockModeType; import javax.persistence.Query; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.metamodel.Metamodel; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openxava.annotations.PostCreate; import org.openxava.annotations.PreCreate; import org.openxava.annotations.PreDelete; import org.openxava.util.Classes; import org.openxava.util.XavaException; import org.openxava.validators.ValidationException; /** * @author Federico Alcantara * */ public class EntityManagerDecorator implements EntityManager { private static final Log log = LogFactory.getLog(EntityManagerDecorator.class); private EntityManager decoratedManager; /** * Constructor for attaching decoration to EntityManager * @param unDecoratedManager */ public EntityManagerDecorator(EntityManager unDecoratedManager) { this.decoratedManager = unDecoratedManager; } /** * Before and after persisting an object the * PreCreate and PostCreate call backs found in the object are executed. * These calls occurs within a transaction. * @param object Object to be persisted */ public void persist(Object object) { executeCallbacks(object, PreCreate.class); decoratedManager.persist(object); executeCallbacks(object, PostCreate.class); } /** * Before removing an object the * PreDelete call backs encountered in the object are executed. * These calls occurs within a transaction. * @param arg0 Object to be removed */ public void remove(Object arg0) { executeCallbacks(arg0, PreDelete.class); decoratedManager.remove(arg0); } /** * Calls all methods annotated with the given annotation. * @param object Object with methods. * @param annotation Annotation to look for. */ private void executeCallbacks(Object object, Class<? extends Annotation> annotation) { for (Method method : Classes.getMethodsAnnotatedWith(object.getClass(), annotation)) { try { method.invoke(object, new Object[]{}); } catch (InvocationTargetException e) { // In this way the XavaException doesn't swallow the real cause. if (e.getCause() != null) { log.error(e.getCause().getMessage(), e.getCause()); if (e.getCause() instanceof ValidationException) { throw ((ValidationException) e.getCause()); } throw new XavaException(e.getCause().getMessage()); } log.error(e.getMessage(), e); throw new XavaException(e.getMessage()); } catch (Exception e) { log.error(e.getMessage(), e); throw new XavaException(e.getMessage()); } } } public void clear() { decoratedManager.clear(); } public void close() { decoratedManager.close(); } public boolean contains(Object arg0) { return decoratedManager.contains(arg0); } public Query createNamedQuery(String arg0) { return decoratedManager.createNamedQuery(arg0); } public <T> TypedQuery<T> createNamedQuery(String arg0, Class<T> arg1) { return decoratedManager.createNamedQuery(arg0, arg1); } public Query createNativeQuery(String arg0) { return decoratedManager.createNativeQuery(arg0); } @SuppressWarnings("rawtypes") public Query createNativeQuery(String arg0, Class arg1) { return decoratedManager.createNativeQuery(arg0, arg1); } public Query createNativeQuery(String arg0, String arg1) { return decoratedManager.createNativeQuery(arg0, arg1); } public Query createQuery(String arg0) { return decoratedManager.createQuery(arg0); } public <T> TypedQuery<T> createQuery(CriteriaQuery<T> arg0) { return decoratedManager.createQuery(arg0); } public <T> TypedQuery<T> createQuery(String arg0, Class<T> arg1) { return decoratedManager.createQuery(arg0, arg1); } public void detach(Object arg0) { decoratedManager.detach(arg0); } public <T> T find(Class<T> arg0, Object arg1) { return decoratedManager.find(arg0, arg1); } public <T> T find(Class<T> arg0, Object arg1, Map<String, Object> arg2) { return decoratedManager.find(arg0, arg1, arg2); } public <T> T find(Class<T> arg0, Object arg1, LockModeType arg2) { return decoratedManager.find(arg0, arg1, arg2); } public <T> T find(Class<T> arg0, Object arg1, LockModeType arg2, Map<String, Object> arg3) { return decoratedManager.find(arg0, arg1, arg2, arg3); } public void flush() { decoratedManager.flush(); } public CriteriaBuilder getCriteriaBuilder() { return decoratedManager.getCriteriaBuilder(); } public Object getDelegate() { return decoratedManager.getDelegate(); } public EntityManagerFactory getEntityManagerFactory() { return decoratedManager.getEntityManagerFactory(); } public FlushModeType getFlushMode() { return decoratedManager.getFlushMode(); } public LockModeType getLockMode(Object arg0) { return decoratedManager.getLockMode(arg0); } public Metamodel getMetamodel() { return decoratedManager.getMetamodel(); } public Map<String, Object> getProperties() { return decoratedManager.getProperties(); } public <T> T getReference(Class<T> arg0, Object arg1) { return decoratedManager.getReference(arg0, arg1); } public EntityTransaction getTransaction() { return decoratedManager.getTransaction(); } public boolean isOpen() { return decoratedManager.isOpen(); } public void joinTransaction() { decoratedManager.joinTransaction(); } public void lock(Object arg0, LockModeType arg1) { decoratedManager.lock(arg0, arg1); } public void lock(Object arg0, LockModeType arg1, Map<String, Object> arg2) { decoratedManager.lock(arg0, arg1, arg2); } public <T> T merge(T arg0) { return decoratedManager.merge(arg0); } public void refresh(Object arg0) { decoratedManager.refresh(arg0); } public void refresh(Object arg0, Map<String, Object> arg1) { decoratedManager.refresh(arg0, arg1); } public void refresh(Object arg0, LockModeType arg1) { decoratedManager.refresh(arg0, arg1); } public void refresh(Object arg0, LockModeType arg1, Map<String, Object> arg2) { decoratedManager.refresh(arg0, arg1, arg2); } public void setFlushMode(FlushModeType arg0) { decoratedManager.setFlushMode(arg0); } public void setProperty(String arg0, Object arg1) { decoratedManager.setProperty(arg0, arg1); } public <T> T unwrap(Class<T> arg0) { return decoratedManager.unwrap(arg0); } }
package org.qcri.rheem.spark.operators; import org.apache.spark.SparkContext; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.function.Function2; import org.qcri.rheem.basic.operators.SampleOperator; import org.qcri.rheem.core.api.Configuration; import org.qcri.rheem.core.optimizer.OptimizationContext; import org.qcri.rheem.core.optimizer.costs.DefaultLoadEstimator; import org.qcri.rheem.core.optimizer.costs.LoadProfileEstimator; import org.qcri.rheem.core.optimizer.costs.NestableLoadProfileEstimator; import org.qcri.rheem.core.plan.rheemplan.ExecutionOperator; import org.qcri.rheem.core.platform.ChannelDescriptor; import org.qcri.rheem.core.platform.ChannelInstance; import org.qcri.rheem.core.platform.lineage.ExecutionLineageNode; import org.qcri.rheem.core.types.DataSetType; import org.qcri.rheem.core.util.Tuple; import org.qcri.rheem.java.channels.CollectionChannel; import org.qcri.rheem.spark.channels.RddChannel; import org.qcri.rheem.spark.execution.SparkExecutor; import scala.collection.JavaConversions; import scala.collection.convert.Wrappers; import scala.runtime.AbstractFunction1; import java.io.Serializable; import java.util.*; import java.util.function.IntUnaryOperator; import java.util.function.LongUnaryOperator; /** * Spark implementation of the {@link SparkShufflePartitionSampleOperator}. */ public class SparkShufflePartitionSampleOperator<Type> extends SampleOperator<Type> implements SparkExecutionOperator { private Random rand; private int partitionID = 0; private int tupleID = 0; private int nb_partitions = 0; private List<Integer> partitions; private JavaRDD<Type> shuffledRDD; /** * Creates a new instance. */ public SparkShufflePartitionSampleOperator(IntUnaryOperator sampleSizeFunction, DataSetType<Type> type, LongUnaryOperator seedFunction) { super(sampleSizeFunction, type, Methods.SHUFFLE_PARTITION_FIRST, seedFunction); } /** * Copies an instance (exclusive of broadcasts). * * @param that that should be copied */ public SparkShufflePartitionSampleOperator(SampleOperator<Type> that) { super(that); assert that.getSampleMethod() == Methods.SHUFFLE_PARTITION_FIRST || that.getSampleMethod() == Methods.ANY; } @Override public Tuple<Collection<ExecutionLineageNode>, Collection<ChannelInstance>> evaluate( ChannelInstance[] inputs, ChannelInstance[] outputs, SparkExecutor sparkExecutor, OptimizationContext.OperatorContext operatorContext) { assert inputs.length == this.getNumInputs(); assert outputs.length == this.getNumOutputs(); RddChannel.Instance input = (RddChannel.Instance) inputs[0]; JavaRDD<Type> inputRdd = input.provideRdd(); long datasetSize = this.isDataSetSizeKnown() ? this.getDatasetSize() : inputRdd.cache().count(); int sampleSize = this.getSampleSize(operatorContext); if (sampleSize >= datasetSize) { //return all and return ((CollectionChannel.Instance) outputs[0]).accept(inputRdd.collect()); return null; } long seed = this.getSeed(operatorContext); rand = new Random(seed); List<Type> result = new ArrayList<>(); final SparkContext sparkContext = inputRdd.context(); boolean miscalculated = false; do { if (tupleID == 0) { if (nb_partitions == 0) { //it's the first time we sample or we read all partitions already, start again nb_partitions = inputRdd.partitions().size(); partitions = new ArrayList<>(); for (int i = 0; i < nb_partitions; i++) partitions.add(i, i); } //choose a random partition partitionID = partitions.remove(rand.nextInt(nb_partitions--)); // shuffle the partition shuffledRDD = inputRdd.<Type>mapPartitionsWithIndex(new ShufflePartition<>(partitionID, seed), true).cache(); miscalculated = false; } List<Integer> pars = new ArrayList<>(1); pars.add(partitionID); //read sequentially from partitionID Object samples = sparkContext.runJob(shuffledRDD.rdd(), new TakeSampleFunction(tupleID, tupleID + sampleSize), (scala.collection.Seq) JavaConversions.asScalaBuffer(pars), true, scala.reflect.ClassTag$.MODULE$.apply(List.class)); tupleID += sampleSize; result.addAll(((List<Type>[]) samples)[0]); if (result.size() < sampleSize) { //we reached end of partition, start again miscalculated = true; tupleID = 0; sampleSize = sampleSize - result.size(); } } while (miscalculated); // assuming the sample is small better use a collection instance, the optimizer can transform the output if necessary ((CollectionChannel.Instance) outputs[0]).accept(result); return ExecutionOperator.modelLazyExecution(inputs, outputs, operatorContext); } @Override protected ExecutionOperator createCopy() { return new SparkShufflePartitionSampleOperator<>(this); } @Override public Collection<String> getLoadProfileEstimatorConfigurationKeys() { return Collections.singleton("rheem.spark.shuffle-partition-sample.load"); } @Override public List<ChannelDescriptor> getSupportedInputChannels(int index) { assert index <= this.getNumInputs() || (index == 0 && this.getNumInputs() == 0); return Arrays.asList(RddChannel.UNCACHED_DESCRIPTOR, RddChannel.CACHED_DESCRIPTOR); } @Override public List<ChannelDescriptor> getSupportedOutputChannels(int index) { assert index <= this.getNumOutputs() || (index == 0 && this.getNumOutputs() == 0); return Collections.singletonList(CollectionChannel.DESCRIPTOR); } @Override public boolean containsAction() { return true; } } class ShufflePartition<V, T, R> implements Function2<V, T, R> { private int partitionID; private Random rand; ShufflePartition(int partitionID) { this.partitionID = partitionID; this.rand = new Random(); } ShufflePartition(int partitionID, long seed) { this.partitionID = partitionID; this.rand = new Random(seed); } @Override public Object call(Object o, Object o2) throws Exception { int myPartitionID = (int) o; if (myPartitionID == partitionID) { Wrappers.IteratorWrapper<T> sparkIt = (Wrappers.IteratorWrapper) o2; List<T> list = new ArrayList<>(); while (sparkIt.hasNext()) list.add(sparkIt.next()); Collections.shuffle(list, rand); return list.iterator(); } return Collections.emptyIterator(); } } class TakeSampleFunction<V> extends AbstractFunction1<scala.collection.Iterator<V>, List<V>> implements Serializable { private int start_id; private int end_id; TakeSampleFunction(int start_id, int end_id) { this.start_id = start_id; this.end_id = end_id; } @Override public List<V> apply(scala.collection.Iterator<V> iterator) { List<V> list = new ArrayList<>(end_id - start_id); int count = 0; V element; //sample from start_id to end_id while (iterator.hasNext()) { element = iterator.next(); if (count >= start_id & count < end_id) list.add(element); count++; if (count >= end_id) break; } return list; } }
/* ****************************************************************************** * Copyright 2016 Michael Snavely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ****************************************************************************** */ package com.darkstar.beanCartography; import com.darkstar.beanCartography.utils.NameUtils; import com.darkstar.beanCartography.utils.finder.Filter; import com.darkstar.beanCartography.utils.finder.Finder; import java.lang.reflect.Array; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.util.*; /** * @author michael snavely */ public class Cartographer { private boolean copyCollections = false; // business field name to field formatter map... private final Map<String, FieldFormatter> fieldFormatters = new HashMap<>(); /** * Constructor */ public Cartographer() { super(); } /** * Constructor * * @param copyCollections set to <code>true</code> if collections, maps, and arrays should be copied */ public Cartographer(boolean copyCollections) { super(); this.copyCollections = copyCollections; } /** * @param businessFieldName name to associate to the formatter * @param f formatter to use */ public void addFieldFormatter(String businessFieldName, FieldFormatter f) {fieldFormatters.put(businessFieldName, f);} /** * @param map containing names to formatter mappings */ public void addFieldFormatter(Map<String, FieldFormatter> map) { fieldFormatters.putAll(map); } /** * This is the main processing method. * * @param sourceObj object containing named field values to copy * @param targetObj object that will serve as the target of the copy */ private void process(Object sourceObj, Object targetObj) { if (sourceObj == null) throw new IllegalArgumentException("sourceObj cannot be null"); if (targetObj == null) throw new IllegalArgumentException("targetObj cannot be null"); Finder walker = new Finder(); // map the source object... Filter businessNameFilter = new NameFilter(); NameInterceptor intercepter = new NameInterceptor(); walker.addFilterIntecepter(businessNameFilter, intercepter); walker.find(sourceObj); Map<String, List<NamedClassBean>> sourceMap = intercepter.getNameToBusinessClassMap(); walker = new Finder(false, false, false); // map the target object... businessNameFilter = new NameFilter(); intercepter = new NameInterceptor(); walker.addFilterIntecepter(businessNameFilter, intercepter); walker.find(targetObj); Map<String, List<NamedClassBean>> targetMap = intercepter.getNameToBusinessClassMap(); /* * now attend to the business of matching and moving field data from source to target one field at a time... */ // let's drive the process off of the map of the target object... String targetBusinessName = null; List<NamedClassBean> targetClassBeanList = null; for (Map.Entry<String, List<NamedClassBean>> entry : targetMap.entrySet()) { targetBusinessName = entry.getKey(); targetClassBeanList = entry.getValue(); boolean foundClassMatch = false; for (NamedClassBean targetBean : targetClassBeanList) { foundClassMatch = false; // process matching class name // we have a matching business CLASS name among the business class names // which means we have a matching business class type... (should i support the same business name on different types? --i dont think so) if (sourceMap.containsKey(targetBusinessName)) { foundClassMatch = processBusinessClassName(targetBusinessName, targetBean, sourceMap); } // process composite classes else if (NameUtils.hasBusinessComposites(targetBean.getClazz())) { // copy all fields from composites (if found)... Set<String> processedFieldNames = processComposites(targetBean, sourceMap); // remove processed field names if they are terminal types... Iterator<NamePointerBean> it = targetBean.getFields().iterator(); NamePointerBean bnpb = null; while (it.hasNext()) { bnpb = it.next(); if (!NamePointerBean.NAME_TYPE.TERMINAL.equals(bnpb.getType())) continue; if (processedFieldNames.contains(bnpb.getName())) it.remove(); } // process the remaining fields... for (NamePointerBean targetNameBean : targetBean.getFields()) { processField(targetNameBean.getName(), targetBean, sourceMap); } foundClassMatch = true; } // check for the business CLASS name in the contained fields for all of the business classes... // this should end up being a class... else { foundClassMatch = processField(targetBusinessName, targetBean, sourceMap); } // if we get here we have no matching business class names anywhere. What we need to do now // is search through his contained business FIELDs for a match on business class or contained field... if (!foundClassMatch) { for (NamePointerBean targetNameBean : targetBean.getFields()) { if (!processBusinessClassName(targetNameBean.getName(), targetBean, sourceMap)) processField(targetNameBean.getName(), targetBean, sourceMap); } } } } } /** * Use this mehtod to copy source named fields to target named fields. * * @param sourceObj object to use as the source * @param targetObj object to use as the target * @throws IllegalAccessException */ public void mapObject(Object sourceObj, Object targetObj) throws IllegalAccessException { if (sourceObj == null) throw new IllegalArgumentException("sourceObj cannot be null"); if (targetObj == null) throw new IllegalArgumentException("targetObj cannot be null"); process(sourceObj, targetObj); } /** * This method will process the classes that have the composite annotation at the class level. All composites * listed will be used to populate this class. * * @param targetBean bean containing target data * @param sourceMap map of named class beans * @return set of named fields that have been processed */ private Set<String> processComposites(NamedClassBean targetBean, Map<String, List<NamedClassBean>> sourceMap) { if (targetBean == null) throw new IllegalArgumentException("cannot be null"); String[] compositeNames = NameUtils.getBusinessComposites(targetBean.getClazz()); Set<String> processedFieldNames = new HashSet<>(); if (compositeNames == null || compositeNames.length == 0) return processedFieldNames; for (String businessName : compositeNames) { // we have a matching business name among the business class names // which means we have a matching business class type... (should i support the same business name on different types? --i dont think so) if (sourceMap.containsKey(businessName)) { List<NamedClassBean> sourceClassBeanList = sourceMap.get(businessName); // find the source bean having the same business name that we are looking for and copy the fields... for (NamedClassBean bcb : sourceClassBeanList) { if (bcb.getName().equals(businessName)) { NamedClassBean sourceBean = bcb; // copy the matching class fields... // get the container instances... List<Object> targetInstances = targetBean.getInstances(); // HOW DO I PICK AN INSTANCE AGAIN????? let's use the first one for now Object targetInstance = targetInstances.get(0); // get the business fields... List<NamePointerBean> sourceFields = sourceBean.getFields(); List<NamePointerBean> targetFields = targetBean.getFields(); processedFieldNames.addAll(copyMatchingFields(sourceFields, targetFields, targetInstance, sourceMap)); break; } } } } return processedFieldNames; } /** * If we are searching for a business class name then the only place that can be found will be in the business name * map! All class level business names will exist as keys in this map. * * @param targetBusinessName target name * @param targetBean target bean * @param sourceMap map of source name beans * @return true if the business class name was found and the field contents were copied */ private boolean processBusinessClassName(String targetBusinessName, NamedClassBean targetBean, Map<String, List<NamedClassBean>> sourceMap) { // we have a matching business name among the business class names // which means we have a matching business class type... (should i support the same business name on different types? --i dont think so) if (sourceMap.containsKey(targetBusinessName)) { List<NamedClassBean> sourceClassBeanList = sourceMap.get(targetBusinessName); // if the source contains the same bean as the target then copy the fields... if (sourceClassBeanList.contains(targetBean)) { NamedClassBean sourceBean = sourceClassBeanList.get(sourceClassBeanList.indexOf(targetBean)); // copy the matching class fields... // get the container instances... List<Object> targetInstances = targetBean.getInstances(); // HOW DO I PICK AN INSTANCE AGAIN????? let's use the first one for now Object targetInstance = targetInstances.get(0); // get the business fields... List<NamePointerBean> sourceFields = sourceBean.getFields(); List<NamePointerBean> targetFields = targetBean.getFields(); copyMatchingFields(sourceFields, targetFields, targetInstance, sourceMap); } return true; } return false; } /** * Copy source business field value to target field as long as their business names match and they are terminal * types! * * @param sourceFields list of source name beans * @param targetFields list of target fields * @param targetInstance target instance * @return List of business field names that were copied */ private List<String> copyMatchingFields(List<NamePointerBean> sourceFields, List<NamePointerBean> targetFields, Object targetInstance, Map<String, List<NamedClassBean>> sourceMap) { List<String> copiedFields = new ArrayList<>(); sourceFields.stream() .filter(targetFields::contains) .forEach(sourceField -> { copyFieldContents(targetInstance, targetFields.get(targetFields.indexOf(sourceField)), sourceField); copiedFields.add(sourceField.getName()); }); return copiedFields; } /** * This method will look through all of the source fields for a match on target business name. If found that field * will be copied to the target provided it is a terminal field and not another complex object. * * @param targetBusinessName target name * @param targetBean target bean * @param sourceMap map of source names * @return <code>true</code> if match found */ private boolean processField(String targetBusinessName, NamedClassBean targetBean, Map<String, List<NamedClassBean>> sourceMap) { List<Object> targetInstances = targetBean.getInstances(); // HOW DO I PICK AN INSTANCE AGAIN????? let's use the first one for now Object instance = targetInstances.get(0); return processField(targetBusinessName, targetBean.getFields(), instance, sourceMap); } /** * This method will look through all of the source fields for a match on target business name. If found that field * will be copied to the target provided it is a terminal field and not another complex object. * * @param targetBusinessName target name * @param targetFields target field list * @param targetInstance target instance * @param sourceMap map of source names * @return <code>true</code> if match found */ private boolean processField(String targetBusinessName, List<NamePointerBean> targetFields, Object targetInstance, Map<String, List<NamedClassBean>> sourceMap) { boolean foundClassMatch = false; for (Map.Entry<String, List<NamedClassBean>> sourceMapEntry : sourceMap.entrySet()) { for (NamedClassBean sourceClassBean : sourceMapEntry.getValue()) { for (NamePointerBean sourceNameBean : sourceClassBean.getFields()) { if (targetBusinessName.equals(sourceNameBean.getName())) { // copy the field data... copyFieldContents(targetInstance, targetFields.get(targetFields.indexOf(sourceNameBean)), sourceNameBean); foundClassMatch = true; break; } } if (foundClassMatch) break; } if (foundClassMatch) break; } return foundClassMatch; } /** * Create a bean instance from it's class. * * @param namePointer name pointer bean containing an instance */ private static void createInstanceFromClass(NamePointerBean namePointer) { if (namePointer != null && namePointer.getInstance() == null) { try { namePointer.setInstance(namePointer.getClass().newInstance()); } catch (Exception e) { e.printStackTrace(); } } } /** * Copy data from one array to another. * * @param targetNameBean target bean * @param sourceNameBean source name bean */ private void copyArray(NamePointerBean targetNameBean, NamePointerBean sourceNameBean) { // is this feature enabled? if (!copyCollections) return; // if the source bean has an instance associated with it (otherwise we ignore it)... if (sourceNameBean.getInstance() != null) { // if target bean array object is null create one and set it back into the target bean... if (targetNameBean.getInstance() == null) { targetNameBean.setInstance(Array.newInstance(targetNameBean.getField().getType().getComponentType(), Array.getLength(sourceNameBean.getInstance()))); } final Class<?> targetElementClass; targetElementClass = targetNameBean.getInstance().getClass().getComponentType(); // if the target bean array instance exists... if (targetNameBean.getInstance() != null) { Object sourceArray = sourceNameBean.getInstance(); Object targetArray = targetNameBean.getInstance(); final boolean isTerminal = NameUtils.isImmutable(targetElementClass); // for each element in the source collection create a new element for the target collection // populating it by recursively calling mapObject. Once the new instance has been mapped, // add him to the target collection... Object sourceElement = null; for (int i = 0; i < Array.getLength(sourceArray); i++) { try { sourceElement = Array.get(sourceArray, i); Object targetElement; if (sourceElement == null || isTerminal) targetElement = sourceElement; else { targetElement = targetElementClass.newInstance(); process(sourceElement, targetElementClass.cast(targetElement)); } Array.set(targetArray, i, targetElement); } catch (Exception e) { e.printStackTrace(); } } } } } /** * Copy source collection to a target collection. * * @param targetNameBean target name bean * @param sourceNameBean source name bean */ private void copyCollection(NamePointerBean targetNameBean, NamePointerBean sourceNameBean) { // is this feature enabled? if (!copyCollections) return; // if the source bean has an instance associated with it (otherwise we ignore it)... if (sourceNameBean.getInstance() != null) { // if target bean collection object is null create one and set it back into the target bean... createInstanceFromClass(targetNameBean); final Class<?> targetElementClass; try { targetElementClass = Class.forName(((ParameterizedType) targetNameBean.getField().getGenericType()).getActualTypeArguments()[0].getTypeName()); } catch (ClassNotFoundException e) { e.printStackTrace(); return; } // if the target bean collection instance exists... if (targetNameBean.getInstance() != null) { Collection<?> sourceCollection = (Collection<?>) sourceNameBean.getInstance(); Collection<?> targetCollection = (Collection<?>) targetNameBean.getInstance(); final boolean isTerminal = NameUtils.isImmutable(targetElementClass); // for each element in the source collection create a new element for the target collection // populating it by recursively calling mapObject. Once the new instance has been mapped, // add him to the target collection... sourceCollection.stream() .forEach(sourceElement -> { try { Object targetElement; if (sourceElement == null || isTerminal) targetElement = sourceElement; else { targetElement = targetElementClass.newInstance(); process(sourceElement, targetElement); } Method add = null; add = targetCollection.getClass().getDeclaredMethod("add", Object.class); add.invoke(targetCollection, targetElement); } catch (Exception e) { e.printStackTrace(); } }); } } } /** * Copy source map to target map * * @param targetNameBean target name bean * @param sourceNameBean source name bean */ private void copyMap(NamePointerBean targetNameBean, NamePointerBean sourceNameBean) { // is this feature enabled? if (!copyCollections) return; // if the source bean has an instance associated with it (otherwise we ignore it)... if (sourceNameBean.getInstance() != null) { // if target bean map object is null create one and set it back into the target bean... createInstanceFromClass(targetNameBean); final Class<?> targetKeyClass; final Class<?> targetValueClass; try { targetKeyClass = Class.forName(((ParameterizedType) targetNameBean.getField().getGenericType()).getActualTypeArguments()[0].getTypeName()); targetValueClass = Class.forName(((ParameterizedType) targetNameBean.getField().getGenericType()).getActualTypeArguments()[1].getTypeName()); } catch (ClassNotFoundException e) { e.printStackTrace(); return; } Map<?, ?> sourceMap = (Map<?, ?>) sourceNameBean.getInstance(); Map<?, ?> targetMap = (Map<?, ?>) targetNameBean.getInstance(); boolean isKeyTerminal = NameUtils.isImmutable(targetKeyClass); boolean isValueTerminal = NameUtils.isImmutable(targetValueClass); // for each element in the source collection create a new element for the target collection // populating it by recursively calling mapObject. Once the new instance has been mapped, // add him to the target collection... sourceMap.entrySet().stream().forEach(entry -> { try { Object targetKey; if (isKeyTerminal) targetKey = entry.getKey(); else { targetKey = targetKeyClass.newInstance(); process(entry.getKey(), targetKey); } Object targetValue; if (isValueTerminal) targetValue = entry.getValue(); else { if (entry.getValue() != null) { targetValue = targetValueClass.newInstance(); process(entry.getValue(), targetValue); } else targetValue = null; } Method put = targetMap.getClass().getDeclaredMethod("put", Object.class, Object.class); put.invoke(targetMap, targetKey, targetValue); } catch (Exception e) { e.printStackTrace(); } }); } } /** * Copy source field value to target field value but ONLY FOR TERMINAL TYPES! Class types will eventually get processed. * <p> * If the source field is a collection then recursively call the process again with the target object. If the target * collection does not have an element one will be created. * * @param targetInstance target object instance * @param targetNameBean target name bean * @param sourceNameBean source name bean */ private void copyFieldContents(Object targetInstance, NamePointerBean targetNameBean, NamePointerBean sourceNameBean) { // source field is a COLLECTION... if (NamePointerBean.NAME_TYPE.COLLECTION.equals(sourceNameBean.getType())) { copyCollection(targetNameBean, sourceNameBean); } // process MAPS... else if (NamePointerBean.NAME_TYPE.MAP.equals(sourceNameBean.getType())) { copyMap(targetNameBean, sourceNameBean); } // process ARRAYS... else if (NamePointerBean.NAME_TYPE.ARRAY.equals(sourceNameBean.getType())) { copyArray(targetNameBean, sourceNameBean); } // process TERMINAL fields... else if (NamePointerBean.NAME_TYPE.TERMINAL.equals(sourceNameBean.getType()) && NamePointerBean.NAME_TYPE.TERMINAL.equals(targetNameBean.getType())) { try { sourceNameBean.getField().setAccessible(true); targetNameBean.getField().setAccessible(true); // we have a field formatter so use it... if (fieldFormatters.containsKey(targetNameBean.getName())) targetNameBean.getField().set(targetInstance, fieldFormatters.get(targetNameBean.getName()).format(sourceNameBean.getInstance())); else targetNameBean.getField().set(targetInstance, sourceNameBean.getInstance()); } catch (Exception e) { e.printStackTrace(); } } } }
/*- * * * Copyright 2015 Skymind,Inc. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * */ package org.nd4j.linalg.api.ops.executioner; import org.bytedeco.javacpp.Pointer; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.*; import org.nd4j.linalg.api.ops.aggregates.Aggregate; import org.nd4j.linalg.api.ops.aggregates.Batch; import org.nd4j.linalg.api.ops.impl.accum.Variance; import org.nd4j.linalg.api.rng.Random; import org.nd4j.linalg.cache.TADManager; import java.util.List; import java.util.Map; import java.util.Properties; /** * An operation executioner handles storage specific details of * executing an operation * * @author Adam Gibson */ public interface OpExecutioner { enum ExecutionMode { JAVA, NATIVE } enum ProfilingMode { DISABLED, NAN_PANIC, INF_PANIC, ANY_PANIC, OPERATIONS, METHODS, ALL, SCOPE_PANIC } /** * This method returns opName of the last invoked op * * @return */ String getLastOp(); /** * Execute the operation * * @param op the operation to execute */ Op exec(Op op); /** * Iterate over every row of every slice * * @param op the operation to apply */ void iterateOverAllRows(Op op); /** * Iterate over every column of every slice * * @param op the operation to apply */ void iterateOverAllColumns(Op op); /**Execute a TransformOp and return the result * @param op the operation to execute */ INDArray execAndReturn(TransformOp op); /** * Execute and return the result from an accumulation * * @param op the operation to execute * @return the accumulated result */ Accumulation execAndReturn(Accumulation op); /** * Execute and return the result from an accumulation * * @param op the operation to execute * @return the accumulated result */ Accumulation execAndReturn(Variance op, boolean biasCorrected); /**Execute and return the result from an index accumulation * @param op the index accumulation operation to execute * @return the accumulated index */ IndexAccumulation execAndReturn(IndexAccumulation op); /**Execute and return the result from a scalar op * * @param op the operation to execute * @return the accumulated result */ INDArray execAndReturn(ScalarOp op); /** Execute and return the result from a vector op * @param op*/ INDArray execAndReturn(BroadcastOp op); /** Execute and return the result from a vector op * @param op*/ INDArray execAndReturn(ShapeOp op); /**Execute the operation along 1 or more dimensions * * @param op the operation to execute */ Op exec(Op op, int... dimension); /** * Execute an accumulation along one or more dimensions * @param accumulation the accumulation * @param dimension the dimension * @return the accumulation op */ INDArray exec(Accumulation accumulation, int... dimension); /** * Execute an broadcast along one or more dimensions * @param broadcast the accumulation * @param dimension the dimension * @return the broadcast op */ INDArray exec(BroadcastOp broadcast, int... dimension); /** * Execute an accumulation along one or more dimensions * @param accumulation the accumulation * @param dimension the dimension * @return the accmulation op */ INDArray exec(Variance accumulation, boolean biasCorrected, int... dimension); /** Execute an index accumulation along one or more dimensions * @param indexAccum the index accumulation operation * @param dimension the dimension/s to execute along * @return result */ INDArray exec(IndexAccumulation indexAccum, int... dimension); /** * * Execute and return a result * ndarray from the given op * @param op the operation to execute * @return the result from the operation */ INDArray execAndReturn(Op op); /**Get the execution mode for this * executioner * @return the execution mode for this executioner */ ExecutionMode executionMode(); /**Set the execution mode * @param executionMode the execution mode */ void setExecutionMode(ExecutionMode executionMode); /** * Execute MetaOp * * @param op */ void exec(MetaOp op); /** * Execute GridOp * @param op */ void exec(GridOp op); /** * * @param op */ void exec(Aggregate op); /** * * @param op */ void exec(ShapeOp op); /** * This method executes previously built batch * * @param batch */ <T extends Aggregate> void exec(Batch<T> batch); /** * This method takes arbitrary sized list of aggregates, * and packs them into batches * * @param batch */ void exec(List<Aggregate> batch); /** * This method executes specified RandomOp using default RNG available via Nd4j.getRandom() * * @param op */ INDArray exec(RandomOp op); /** * This method executes specific RandomOp against specified RNG * * @param op * @param rng */ INDArray exec(RandomOp op, Random rng); /** * This method return set of key/value and * key/key/value objects, * describing current environment * * @return */ Properties getEnvironmentInformation(); /** * This method specifies desired profiling mode * * @param mode */ void setProfilingMode(ProfilingMode mode); /** * Ths method returns current profiling * * @return */ ProfilingMode getProfilingMode(); /** * This method returns TADManager instance used for this OpExecutioner * * @return */ TADManager getTADManager(); /** * This method prints out environmental information returned by getEnvironmentInformation() method */ void printEnvironmentInformation(); /** * This method ensures all operations that supposed to be executed at this moment, are executed. */ void push(); /** * This method ensures all operations that supposed to be executed at this moment, are executed and finished. */ void commit(); /** * This method encodes array as thresholds, updating input array at the same time * * @param input * @return encoded array is returned */ INDArray thresholdEncode(INDArray input, double threshold); /** * This method encodes array as thresholds, updating input array at the same time * * @param input * @return encoded array is returned */ INDArray thresholdEncode(INDArray input, double threshold, Integer boundary); /** * This method decodes thresholds array, and puts it into target array * * @param encoded * @param target * @return target is returned */ INDArray thresholdDecode(INDArray encoded, INDArray target); /** * This method returns number of elements affected by encoder * @param indArray * @param target * @param threshold * @return */ long bitmapEncode(INDArray indArray, INDArray target, double threshold); /** * * @param indArray * @param threshold * @return */ INDArray bitmapEncode(INDArray indArray, double threshold); /** * * @param encoded * @param target * @return */ INDArray bitmapDecode(INDArray encoded, INDArray target); /** * This method returns names of all custom operations available in current backend, and their number of input/output arguments * @return */ Map<String, CustomOpDescriptor> getCustomOperations(); /** * This method executes given CustomOp * * PLEASE NOTE: You're responsible for input/output validation * @param op */ void exec(CustomOp op); List<int[]> calculateOutputShape(CustomOp op); void enableDebugMode(boolean reallyEnable); void enableVerboseMode(boolean reallyEnable); void registerGraph(long id, Pointer graph); Map<String, INDArray> executeGraph(long id, Map<String, INDArray> map); void forgetGraph(long id); /** * This method allows to set desired number of elements per thread, for performance optimization purposes. * I.e. if array contains 2048 elements, and threshold is set to 1024, 2 threads will be used for given op execution. * * Default value: 1024 * * @param threshold */ void setElementsThreshold(int threshold); /** * This method allows to set desired number of sub-arrays per thread, for performance optimization purposes. * I.e. if matrix has shape of 64 x 128, and threshold is set to 8, each thread will be processing 8 sub-arrays (sure, if you have 8 core cpu). * If your cpu has, say, 4, cores, only 4 threads will be spawned, and each will process 16 sub-arrays * * Default value: 8 * @param threshold */ void setTadThreshold(int threshold); }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.compiler.ant; import com.intellij.application.options.ReplacePathToMacroMap; import com.intellij.compiler.ModuleCompilerUtil; import com.intellij.openapi.application.PathMacros; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.ProjectJdkTable; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.CompilerModuleExtension; import com.intellij.openapi.roots.OrderRootType; import com.intellij.openapi.util.SystemInfo; import com.intellij.util.Chunk; import com.intellij.util.IncorrectOperationException; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.graph.CachingSemiGraph; import com.intellij.util.graph.Graph; import com.intellij.util.graph.GraphGenerator; import com.intellij.util.graph.InboundSemiGraph; import java.io.File; import java.util.*; /** * Implementation class for Ant generation options * * @author Eugene Zhuravlev */ public class GenerationOptionsImpl extends GenerationOptions { /** * from absolute path to macro substitutions */ private final ReplacePathToMacroMap myMacroReplacementMap; /** * from absolute path to macro substitutions */ private final Map<String, String> myOutputUrlToPropertyRefMap; /** * module chunks */ private final ModuleChunk[] myModuleChunks; /** * the project to be converted */ private final Project myProject; private final boolean myGenerateIdeaHomeProperty; private final String myOutputFileName; private Set<String> myJdkUrls; /** * Custom compilers used in the ant build. */ private final Set<ChunkCustomCompilerExtension> myCustomCompilers = new HashSet<>(); /** * map from modules to chunks */ private final Map<Module, ModuleChunk> myModuleToChunkMap = new HashMap<>(); /** * A constructor * * @param project a project to generate * @param generateSingleFile a value of corresponding option * @param enableFormCompiler a value of corresponding option * @param backupPreviouslyGeneratedFiles a value of corresponding option * @param forceTargetJdk a value of corresponding option * @param inlineRuntimeClasspath if true a runtiem classpaths are inlined * @param representativeModuleNames a module name that represents module chunks. * @param outputFileName a name for the output file */ public GenerationOptionsImpl(Project project, boolean generateSingleFile, boolean enableFormCompiler, boolean backupPreviouslyGeneratedFiles, boolean forceTargetJdk, boolean inlineRuntimeClasspath, boolean generateIdeaHomeProperty, String[] representativeModuleNames, String outputFileName) { super(forceTargetJdk, generateSingleFile, enableFormCompiler, backupPreviouslyGeneratedFiles, inlineRuntimeClasspath); myProject = project; myGenerateIdeaHomeProperty = generateIdeaHomeProperty; myOutputFileName = outputFileName; myMacroReplacementMap = createReplacementMap(); myModuleChunks = createModuleChunks(representativeModuleNames); myOutputUrlToPropertyRefMap = createOutputUrlToPropertyRefMap(myModuleChunks); } /** * A constructor * * @param project a project to generate * @param forceTargetJdk a value of corresponding option * @param generateSingleFile a value of corresponding option * @param enableFormCompiler a value of corresponding option * @param backupPreviouslyGeneratedFiles a value of corresponding option * @param representativeModuleNames a module name that represents module chunks. */ @Deprecated public GenerationOptionsImpl(Project project, boolean generateSingleFile, boolean enableFormCompiler, boolean backupPreviouslyGeneratedFiles, boolean forceTargetJdk, String[] representativeModuleNames) { this(project, forceTargetJdk, generateSingleFile, enableFormCompiler, backupPreviouslyGeneratedFiles, false, false, representativeModuleNames, null); } /** * {@inheritDoc} */ @Override public boolean isIdeaHomeGenerated() { return myGenerateIdeaHomeProperty; } public String getBuildFileName() { return getOutputFileName() + ".xml"; } public String getPropertiesFileName() { return getOutputFileName() + ".properties"; } private String getOutputFileName() { if (myOutputFileName == null || myOutputFileName.length() == 0) { return BuildProperties.getProjectBuildFileName(myProject); } return myOutputFileName; } /** * {@inheritDoc} */ @Override public ModuleChunk getChunkByModule(final Module module) { if (myModuleToChunkMap.isEmpty()) { for (ModuleChunk c : myModuleChunks) { for (Module m : c.getModules()) { myModuleToChunkMap.put(m, c); } } } return myModuleToChunkMap.get(module); } @Override public String subsitutePathWithMacros(String path) { return myMacroReplacementMap.substitute(path, SystemInfo.isFileSystemCaseSensitive); } public String getPropertyRefForUrl(String url) { return myOutputUrlToPropertyRefMap.get(url); } private static ReplacePathToMacroMap createReplacementMap() { final PathMacros pathMacros = PathMacros.getInstance(); final Set<String> macroNames = pathMacros.getUserMacroNames(); final ReplacePathToMacroMap map = new ReplacePathToMacroMap(); for (final String macroName : macroNames) { map.put(GenerationUtils.normalizePath(pathMacros.getValue(macroName)), BuildProperties.propertyRef(BuildProperties.getPathMacroProperty(macroName))); } map.put(GenerationUtils.normalizePath(PathManager.getHomePath()), BuildProperties.propertyRef(BuildProperties.PROPERTY_IDEA_HOME)); return map; } private static Map<String, String> createOutputUrlToPropertyRefMap(ModuleChunk[] chunks) { final Map<String, String> map = new HashMap<>(); for (final ModuleChunk chunk : chunks) { final String outputPathRef = BuildProperties.propertyRef(BuildProperties.getOutputPathProperty(chunk.getName())); final String testsOutputPathRef = BuildProperties.propertyRef(BuildProperties.getOutputPathForTestsProperty(chunk.getName())); final Module[] modules = chunk.getModules(); for (final Module module : modules) { final String outputPathUrl = CompilerModuleExtension.getInstance(module).getCompilerOutputUrl(); if (outputPathUrl != null) { map.put(outputPathUrl, outputPathRef); } final String outputPathForTestsUrl = CompilerModuleExtension.getInstance(module).getCompilerOutputUrlForTests(); if (outputPathForTestsUrl != null) { if (outputPathUrl == null || !outputPathForTestsUrl.equals(outputPathUrl)) { map.put(outputPathForTestsUrl, testsOutputPathRef); } } } } return map; } @Override public ModuleChunk[] getModuleChunks() { return myModuleChunks; } private ModuleChunk[] createModuleChunks(String[] representativeModuleNames) { final Set<String> mainModuleNames = new HashSet<>(Arrays.asList(representativeModuleNames)); final Graph<Chunk<Module>> chunkGraph = ModuleCompilerUtil.toChunkGraph(ModuleManager.getInstance(myProject).moduleGraph()); final Map<Chunk<Module>, ModuleChunk> map = new HashMap<>(); final Map<ModuleChunk, Chunk<Module>> reverseMap = new HashMap<>(); for (final Chunk<Module> chunk : chunkGraph.getNodes()) { final Set<Module> modules = chunk.getNodes(); final ModuleChunk moduleChunk = new ModuleChunk(modules.toArray(Module.EMPTY_ARRAY)); for (final Module module : modules) { if (mainModuleNames.contains(module.getName())) { moduleChunk.setMainModule(module); break; } } map.put(chunk, moduleChunk); reverseMap.put(moduleChunk, chunk); } final Graph<ModuleChunk> moduleChunkGraph = GraphGenerator.generate(CachingSemiGraph.cache(new InboundSemiGraph<ModuleChunk>() { public Collection<ModuleChunk> getNodes() { return map.values(); } public Iterator<ModuleChunk> getIn(ModuleChunk n) { final Chunk<Module> chunk = reverseMap.get(n); final Iterator<Chunk<Module>> in = chunkGraph.getIn(chunk); return new Iterator<ModuleChunk>() { public boolean hasNext() { return in.hasNext(); } public ModuleChunk next() { return map.get(in.next()); } public void remove() { throw new IncorrectOperationException("Method is not supported"); } }; } })); final Collection<ModuleChunk> nodes = moduleChunkGraph.getNodes(); final ModuleChunk[] moduleChunks = nodes.toArray(new ModuleChunk[0]); for (ModuleChunk moduleChunk : moduleChunks) { final Iterator<ModuleChunk> depsIterator = moduleChunkGraph.getIn(moduleChunk); List<ModuleChunk> deps = new ArrayList<>(); while (depsIterator.hasNext()) { deps.add(depsIterator.next()); } moduleChunk.setDependentChunks(deps.toArray(new ModuleChunk[0])); ContainerUtil.addAll(myCustomCompilers, moduleChunk.getCustomCompilers()); } Arrays.sort(moduleChunks, new ChunksComparator()); if (generateSingleFile) { final File baseDir = BuildProperties.getProjectBaseDir(myProject); for (ModuleChunk chunk : moduleChunks) { chunk.setBaseDir(baseDir); } } return moduleChunks; } /** * {@inheritDoc} */ public ChunkCustomCompilerExtension[] getCustomCompilers() { ChunkCustomCompilerExtension[] sorted = myCustomCompilers.toArray(new ChunkCustomCompilerExtension[0]); Arrays.sort(sorted, ChunkCustomCompilerExtension.COMPARATOR); return sorted; } Set<String> getAllJdkUrls() { if (myJdkUrls != null) { return myJdkUrls; } final Sdk[] projectJdks = ProjectJdkTable.getInstance().getAllJdks(); myJdkUrls = new HashSet<>(); for (Sdk jdk : projectJdks) { ContainerUtil.addAll(myJdkUrls, jdk.getRootProvider().getUrls(OrderRootType.CLASSES)); } return myJdkUrls; } private static class ChunksComparator implements Comparator<ModuleChunk> { final Map<ModuleChunk, Integer> myCachedLevels = new HashMap<>(); public int compare(final ModuleChunk o1, final ModuleChunk o2) { final int level1 = getChunkLevel(o1); final int level2 = getChunkLevel(o2); return (level1 == level2) ? o1.getName().compareToIgnoreCase(o2.getName()) : (level1 - level2); } private int getChunkLevel(ModuleChunk chunk) { Integer level = myCachedLevels.get(chunk); if (level == null) { final ModuleChunk[] chunks = chunk.getDependentChunks(); if (chunks.length > 0) { int maxLevel = 0; for (ModuleChunk dependent : chunks) { maxLevel = Math.max(maxLevel, getChunkLevel(dependent)); } level = 1 + maxLevel; } else { level = 0; } myCachedLevels.put(chunk, level); } return level.intValue(); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.type; import com.facebook.presto.operator.scalar.FunctionAssertions; import com.facebook.presto.spi.type.SqlIntervalDayTime; import com.facebook.presto.spi.type.Type; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.IntervalDayTimeType.INTERVAL_DAY_TIME; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; public class TestIntervalDayTime { private FunctionAssertions functionAssertions; @BeforeClass public void setUp() { functionAssertions = new FunctionAssertions(); } private void assertFunction(String projection, Type expectedType, Object expected) { functionAssertions.assertFunction(projection, expectedType, expected); } @Test public void testLiteral() throws Exception { assertFunction("INTERVAL '12 10:45:32.123' DAY TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12, 10, 45, 32, 123)); assertFunction("INTERVAL '12 10:45:32.12' DAY TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12, 10, 45, 32, 120)); assertFunction("INTERVAL '12 10:45:32' DAY TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12, 10, 45, 32, 0)); assertFunction("INTERVAL '12 10:45' DAY TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12, 10, 45, 0, 0)); assertFunction("INTERVAL '12 10' DAY TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12, 10, 0, 0, 0)); assertFunction("INTERVAL '12' DAY TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12, 0, 0, 0, 0)); assertFunction("INTERVAL '12 10:45' DAY TO MINUTE", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12, 10, 45, 0, 0)); assertFunction("INTERVAL '12 10' DAY TO MINUTE", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12, 10, 0, 0, 0)); assertFunction("INTERVAL '12' DAY TO MINUTE", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12, 0, 0, 0, 0)); assertFunction("INTERVAL '12 10' DAY TO HOUR", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12, 10, 0, 0, 0)); assertFunction("INTERVAL '12' DAY TO HOUR", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12, 0, 0, 0, 0)); assertFunction("INTERVAL '12' DAY", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12, 0, 0, 0, 0)); assertFunction("INTERVAL '10:45:32.123' HOUR TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 10, 45, 32, 123)); assertFunction("INTERVAL '10:45:32.12' HOUR TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 10, 45, 32, 120)); assertFunction("INTERVAL '10:45:32' HOUR TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 10, 45, 32, 0)); assertFunction("INTERVAL '10:45' HOUR TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 10, 45, 0, 0)); assertFunction("INTERVAL '10' HOUR TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 10, 0, 0, 0)); assertFunction("INTERVAL '10:45' HOUR TO MINUTE", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 10, 45, 0, 0)); assertFunction("INTERVAL '10' HOUR TO MINUTE", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 10, 0, 0, 0)); assertFunction("INTERVAL '10' HOUR", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 10, 0, 0, 0)); assertFunction("INTERVAL '45:32.123' MINUTE TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 0, 45, 32, 123)); assertFunction("INTERVAL '45:32.12' MINUTE TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 0, 45, 32, 120)); assertFunction("INTERVAL '45:32' MINUTE TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 0, 45, 32, 0)); assertFunction("INTERVAL '45' MINUTE TO SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 0, 45, 0, 0)); assertFunction("INTERVAL '45' MINUTE", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 0, 45, 0, 0)); assertFunction("INTERVAL '32.123' SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 0, 0, 32, 123)); assertFunction("INTERVAL '32.12' SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 0, 0, 32, 120)); assertFunction("INTERVAL '32' SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(0, 0, 0, 32, 0)); } @Test public void testAdd() throws Exception { assertFunction("INTERVAL '3' SECOND + INTERVAL '3' SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(6 * 1000)); assertFunction("INTERVAL '6' DAY + INTERVAL '6' DAY", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12 * 24 * 60 * 60 * 1000)); assertFunction("INTERVAL '3' SECOND + INTERVAL '6' DAY", INTERVAL_DAY_TIME, new SqlIntervalDayTime((6 * 24 * 60 * 60 * 1000) + (3 * 1000))); } @Test public void testSubtract() throws Exception { assertFunction("INTERVAL '6' SECOND - INTERVAL '3' SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(3 * 1000)); assertFunction("INTERVAL '9' DAY - INTERVAL '6' DAY", INTERVAL_DAY_TIME, new SqlIntervalDayTime(3 * 24 * 60 * 60 * 1000)); assertFunction("INTERVAL '3' SECOND - INTERVAL '6' DAY", INTERVAL_DAY_TIME, new SqlIntervalDayTime((3 * 1000) - (6 * 24 * 60 * 60 * 1000))); } @Test public void testMultiply() throws Exception { assertFunction("INTERVAL '6' SECOND * 2", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12 * 1000)); assertFunction("2 * INTERVAL '6' SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12 * 1000)); assertFunction("INTERVAL '1' SECOND * 2.5", INTERVAL_DAY_TIME, new SqlIntervalDayTime(2500)); assertFunction("2.5 * INTERVAL '1' SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(2500)); assertFunction("INTERVAL '6' DAY * 2", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12 * 24 * 60 * 60 * 1000)); assertFunction("2 * INTERVAL '6' DAY", INTERVAL_DAY_TIME, new SqlIntervalDayTime(12 * 24 * 60 * 60 * 1000)); assertFunction("INTERVAL '1' DAY * 2.5", INTERVAL_DAY_TIME, new SqlIntervalDayTime((long) (2.5 * 24 * 60 * 60 * 1000))); assertFunction("2.5 * INTERVAL '1' DAY", INTERVAL_DAY_TIME, new SqlIntervalDayTime((long) (2.5 * 24 * 60 * 60 * 1000))); } @Test public void testDivide() throws Exception { assertFunction("INTERVAL '3' SECOND / 2", INTERVAL_DAY_TIME, new SqlIntervalDayTime(1500)); assertFunction("INTERVAL '6' SECOND / 2.5", INTERVAL_DAY_TIME, new SqlIntervalDayTime(2400)); assertFunction("INTERVAL '3' DAY / 2", INTERVAL_DAY_TIME, new SqlIntervalDayTime((long) (1.5 * 24 * 60 * 60 * 1000))); assertFunction("INTERVAL '4' DAY / 2.5", INTERVAL_DAY_TIME, new SqlIntervalDayTime((long) (1.6 * 24 * 60 * 60 * 1000))); } @Test public void testNegation() throws Exception { assertFunction("- INTERVAL '3' SECOND", INTERVAL_DAY_TIME, new SqlIntervalDayTime(-3 * 1000)); assertFunction("- INTERVAL '6' DAY", INTERVAL_DAY_TIME, new SqlIntervalDayTime(-6 * 24 * 60 * 60 * 1000)); } @Test public void testEqual() throws Exception { assertFunction("INTERVAL '3' SECOND = INTERVAL '3' SECOND", BOOLEAN, true); assertFunction("INTERVAL '6' DAY = INTERVAL '6' DAY", BOOLEAN, true); assertFunction("INTERVAL '3' SECOND = INTERVAL '4' SECOND", BOOLEAN, false); assertFunction("INTERVAL '7' DAY = INTERVAL '6' DAY", BOOLEAN, false); } @Test public void testNotEqual() throws Exception { assertFunction("INTERVAL '3' SECOND <> INTERVAL '4' SECOND", BOOLEAN, true); assertFunction("INTERVAL '6' DAY <> INTERVAL '7' DAY", BOOLEAN, true); assertFunction("INTERVAL '3' SECOND <> INTERVAL '3' SECOND", BOOLEAN, false); assertFunction("INTERVAL '6' DAY <> INTERVAL '6' DAY", BOOLEAN, false); } @Test public void testLessThan() throws Exception { assertFunction("INTERVAL '3' SECOND < INTERVAL '4' SECOND", BOOLEAN, true); assertFunction("INTERVAL '6' DAY < INTERVAL '7' DAY", BOOLEAN, true); assertFunction("INTERVAL '3' SECOND < INTERVAL '3' SECOND", BOOLEAN, false); assertFunction("INTERVAL '3' SECOND < INTERVAL '2' SECOND", BOOLEAN, false); assertFunction("INTERVAL '6' DAY < INTERVAL '6' DAY", BOOLEAN, false); assertFunction("INTERVAL '6' DAY < INTERVAL '5' DAY", BOOLEAN, false); } @Test public void testLessThanOrEqual() throws Exception { assertFunction("INTERVAL '3' SECOND <= INTERVAL '4' SECOND", BOOLEAN, true); assertFunction("INTERVAL '3' SECOND <= INTERVAL '3' SECOND", BOOLEAN, true); assertFunction("INTERVAL '6' DAY <= INTERVAL '6' DAY", BOOLEAN, true); assertFunction("INTERVAL '6' DAY <= INTERVAL '7' DAY", BOOLEAN, true); assertFunction("INTERVAL '3' SECOND <= INTERVAL '2' SECOND", BOOLEAN, false); assertFunction("INTERVAL '6' DAY <= INTERVAL '5' DAY", BOOLEAN, false); } @Test public void testGreaterThan() throws Exception { assertFunction("INTERVAL '3' SECOND > INTERVAL '2' SECOND", BOOLEAN, true); assertFunction("INTERVAL '6' DAY > INTERVAL '5' DAY", BOOLEAN, true); assertFunction("INTERVAL '3' SECOND > INTERVAL '3' SECOND", BOOLEAN, false); assertFunction("INTERVAL '3' SECOND > INTERVAL '4' SECOND", BOOLEAN, false); assertFunction("INTERVAL '6' DAY > INTERVAL '6' DAY", BOOLEAN, false); assertFunction("INTERVAL '6' DAY > INTERVAL '7' DAY", BOOLEAN, false); } @Test public void testGreaterThanOrEqual() throws Exception { assertFunction("INTERVAL '3' SECOND >= INTERVAL '2' SECOND", BOOLEAN, true); assertFunction("INTERVAL '3' SECOND >= INTERVAL '3' SECOND", BOOLEAN, true); assertFunction("INTERVAL '6' DAY >= INTERVAL '5' DAY", BOOLEAN, true); assertFunction("INTERVAL '6' DAY >= INTERVAL '6' DAY", BOOLEAN, true); assertFunction("INTERVAL '3' SECOND >= INTERVAL '4' SECOND", BOOLEAN, false); assertFunction("INTERVAL '6' DAY >= INTERVAL '7' DAY", BOOLEAN, false); } @Test public void testBetween() throws Exception { assertFunction("INTERVAL '3' SECOND between INTERVAL '2' SECOND and INTERVAL '4' SECOND", BOOLEAN, true); assertFunction("INTERVAL '3' SECOND between INTERVAL '3' SECOND and INTERVAL '4' SECOND", BOOLEAN, true); assertFunction("INTERVAL '3' SECOND between INTERVAL '2' SECOND and INTERVAL '3' SECOND", BOOLEAN, true); assertFunction("INTERVAL '3' SECOND between INTERVAL '3' SECOND and INTERVAL '3' SECOND", BOOLEAN, true); assertFunction("INTERVAL '3' SECOND between INTERVAL '4' SECOND and INTERVAL '5' SECOND", BOOLEAN, false); assertFunction("INTERVAL '3' SECOND between INTERVAL '1' SECOND and INTERVAL '2' SECOND", BOOLEAN, false); assertFunction("INTERVAL '3' SECOND between INTERVAL '4' SECOND and INTERVAL '2' SECOND", BOOLEAN, false); } @Test public void testCastToSlice() throws Exception { assertFunction("cast(INTERVAL '12 10:45:32.123' DAY TO SECOND as varchar)", VARCHAR, "12 10:45:32.123"); assertFunction("cast(INTERVAL '12 10:45:32.123' DAY TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(12, 10, 45, 32, 123).toString()); assertFunction("cast(INTERVAL '12 10:45:32.12' DAY TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(12, 10, 45, 32, 120).toString()); assertFunction("cast(INTERVAL '12 10:45:32' DAY TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(12, 10, 45, 32, 0).toString()); assertFunction("cast(INTERVAL '12 10:45' DAY TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(12, 10, 45, 0, 0).toString()); assertFunction("cast(INTERVAL '12 10' DAY TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(12, 10, 0, 0, 0).toString()); assertFunction("cast(INTERVAL '12' DAY TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(12, 0, 0, 0, 0).toString()); assertFunction("cast(INTERVAL '12 10:45' DAY TO MINUTE as varchar)", VARCHAR, new SqlIntervalDayTime(12, 10, 45, 0, 0).toString()); assertFunction("cast(INTERVAL '12 10' DAY TO MINUTE as varchar)", VARCHAR, new SqlIntervalDayTime(12, 10, 0, 0, 0).toString()); assertFunction("cast(INTERVAL '12' DAY TO MINUTE as varchar)", VARCHAR, new SqlIntervalDayTime(12, 0, 0, 0, 0).toString()); assertFunction("cast(INTERVAL '12 10' DAY TO HOUR as varchar)", VARCHAR, new SqlIntervalDayTime(12, 10, 0, 0, 0).toString()); assertFunction("cast(INTERVAL '12' DAY TO HOUR as varchar)", VARCHAR, new SqlIntervalDayTime(12, 0, 0, 0, 0).toString()); assertFunction("cast(INTERVAL '12' DAY as varchar)", VARCHAR, new SqlIntervalDayTime(12, 0, 0, 0, 0).toString()); assertFunction("cast(INTERVAL '10:45:32.123' HOUR TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(0, 10, 45, 32, 123).toString()); assertFunction("cast(INTERVAL '10:45:32.12' HOUR TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(0, 10, 45, 32, 120).toString()); assertFunction("cast(INTERVAL '10:45:32' HOUR TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(0, 10, 45, 32, 0).toString()); assertFunction("cast(INTERVAL '10:45' HOUR TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(0, 10, 45, 0, 0).toString()); assertFunction("cast(INTERVAL '10' HOUR TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(0, 10, 0, 0, 0).toString()); assertFunction("cast(INTERVAL '10:45' HOUR TO MINUTE as varchar)", VARCHAR, new SqlIntervalDayTime(0, 10, 45, 0, 0).toString()); assertFunction("cast(INTERVAL '10' HOUR TO MINUTE as varchar)", VARCHAR, new SqlIntervalDayTime(0, 10, 0, 0, 0).toString()); assertFunction("cast(INTERVAL '10' HOUR as varchar)", VARCHAR, new SqlIntervalDayTime(0, 10, 0, 0, 0).toString()); assertFunction("cast(INTERVAL '45:32.123' MINUTE TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(0, 0, 45, 32, 123).toString()); assertFunction("cast(INTERVAL '45:32.12' MINUTE TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(0, 0, 45, 32, 120).toString()); assertFunction("cast(INTERVAL '45:32' MINUTE TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(0, 0, 45, 32, 0).toString()); assertFunction("cast(INTERVAL '45' MINUTE TO SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(0, 0, 45, 0, 0).toString()); assertFunction("cast(INTERVAL '45' MINUTE as varchar)", VARCHAR, new SqlIntervalDayTime(0, 0, 45, 0, 0).toString()); assertFunction("cast(INTERVAL '32.123' SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(0, 0, 0, 32, 123).toString()); assertFunction("cast(INTERVAL '32.12' SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(0, 0, 0, 32, 120).toString()); assertFunction("cast(INTERVAL '32' SECOND as varchar)", VARCHAR, new SqlIntervalDayTime(0, 0, 0, 32, 0).toString()); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.costexplorer.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetCostForecast" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetCostForecastRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The period of time that you want the forecast to cover. The start date must be equal to or no later than the * current date to avoid a validation error. * </p> */ private DateInterval timePeriod; /** * <p> * Which metric Cost Explorer uses to create your forecast. For more information about blended and unblended rates, * see <a href="http://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/">Why does the "blended" * annotation appear on some line items in my bill?</a>. * </p> * <p> * Valid values for a <code>GetCostForecast</code> call are the following: * </p> * <ul> * <li> * <p> * AMORTIZED_COST * </p> * </li> * <li> * <p> * BLENDED_COST * </p> * </li> * <li> * <p> * NET_AMORTIZED_COST * </p> * </li> * <li> * <p> * NET_UNBLENDED_COST * </p> * </li> * <li> * <p> * UNBLENDED_COST * </p> * </li> * </ul> */ private String metric; /** * <p> * How granular you want the forecast to be. You can get 3 months of <code>DAILY</code> forecasts or 12 months of * <code>MONTHLY</code> forecasts. * </p> * <p> * The <code>GetCostForecast</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * </p> */ private String granularity; /** * <p> * The filters that you want to use to filter your forecast. The <code>GetCostForecast</code> API supports filtering * by the following dimensions: * </p> * <ul> * <li> * <p> * <code>AZ</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT_NAME</code> * </p> * </li> * <li> * <p> * <code>OPERATION</code> * </p> * </li> * <li> * <p> * <code>PURCHASE_TYPE</code> * </p> * </li> * <li> * <p> * <code>REGION</code> * </p> * </li> * <li> * <p> * <code>SERVICE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE_GROUP</code> * </p> * </li> * <li> * <p> * <code>RECORD_TYPE</code> * </p> * </li> * <li> * <p> * <code>OPERATING_SYSTEM</code> * </p> * </li> * <li> * <p> * <code>TENANCY</code> * </p> * </li> * <li> * <p> * <code>SCOPE</code> * </p> * </li> * <li> * <p> * <code>PLATFORM</code> * </p> * </li> * <li> * <p> * <code>SUBSCRIPTION_ID</code> * </p> * </li> * <li> * <p> * <code>LEGAL_ENTITY_NAME</code> * </p> * </li> * <li> * <p> * <code>DEPLOYMENT_OPTION</code> * </p> * </li> * <li> * <p> * <code>DATABASE_ENGINE</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE_FAMILY</code> * </p> * </li> * <li> * <p> * <code>BILLING_ENTITY</code> * </p> * </li> * <li> * <p> * <code>RESERVATION_ID</code> * </p> * </li> * <li> * <p> * <code>SAVINGS_PLAN_ARN</code> * </p> * </li> * </ul> */ private Expression filter; /** * <p> * Cost Explorer always returns the mean forecast as a single point. You can request a prediction interval around * the mean by specifying a confidence level. The higher the confidence level, the more confident Cost Explorer is * about the actual value falling in the prediction interval. Higher confidence levels result in wider prediction * intervals. * </p> */ private Integer predictionIntervalLevel; /** * <p> * The period of time that you want the forecast to cover. The start date must be equal to or no later than the * current date to avoid a validation error. * </p> * * @param timePeriod * The period of time that you want the forecast to cover. The start date must be equal to or no later than * the current date to avoid a validation error. */ public void setTimePeriod(DateInterval timePeriod) { this.timePeriod = timePeriod; } /** * <p> * The period of time that you want the forecast to cover. The start date must be equal to or no later than the * current date to avoid a validation error. * </p> * * @return The period of time that you want the forecast to cover. The start date must be equal to or no later than * the current date to avoid a validation error. */ public DateInterval getTimePeriod() { return this.timePeriod; } /** * <p> * The period of time that you want the forecast to cover. The start date must be equal to or no later than the * current date to avoid a validation error. * </p> * * @param timePeriod * The period of time that you want the forecast to cover. The start date must be equal to or no later than * the current date to avoid a validation error. * @return Returns a reference to this object so that method calls can be chained together. */ public GetCostForecastRequest withTimePeriod(DateInterval timePeriod) { setTimePeriod(timePeriod); return this; } /** * <p> * Which metric Cost Explorer uses to create your forecast. For more information about blended and unblended rates, * see <a href="http://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/">Why does the "blended" * annotation appear on some line items in my bill?</a>. * </p> * <p> * Valid values for a <code>GetCostForecast</code> call are the following: * </p> * <ul> * <li> * <p> * AMORTIZED_COST * </p> * </li> * <li> * <p> * BLENDED_COST * </p> * </li> * <li> * <p> * NET_AMORTIZED_COST * </p> * </li> * <li> * <p> * NET_UNBLENDED_COST * </p> * </li> * <li> * <p> * UNBLENDED_COST * </p> * </li> * </ul> * * @param metric * Which metric Cost Explorer uses to create your forecast. For more information about blended and unblended * rates, see <a href="http://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/">Why does * the "blended" annotation appear on some line items in my bill?</a>. </p> * <p> * Valid values for a <code>GetCostForecast</code> call are the following: * </p> * <ul> * <li> * <p> * AMORTIZED_COST * </p> * </li> * <li> * <p> * BLENDED_COST * </p> * </li> * <li> * <p> * NET_AMORTIZED_COST * </p> * </li> * <li> * <p> * NET_UNBLENDED_COST * </p> * </li> * <li> * <p> * UNBLENDED_COST * </p> * </li> * @see Metric */ public void setMetric(String metric) { this.metric = metric; } /** * <p> * Which metric Cost Explorer uses to create your forecast. For more information about blended and unblended rates, * see <a href="http://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/">Why does the "blended" * annotation appear on some line items in my bill?</a>. * </p> * <p> * Valid values for a <code>GetCostForecast</code> call are the following: * </p> * <ul> * <li> * <p> * AMORTIZED_COST * </p> * </li> * <li> * <p> * BLENDED_COST * </p> * </li> * <li> * <p> * NET_AMORTIZED_COST * </p> * </li> * <li> * <p> * NET_UNBLENDED_COST * </p> * </li> * <li> * <p> * UNBLENDED_COST * </p> * </li> * </ul> * * @return Which metric Cost Explorer uses to create your forecast. For more information about blended and unblended * rates, see <a href="http://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/">Why does * the "blended" annotation appear on some line items in my bill?</a>. </p> * <p> * Valid values for a <code>GetCostForecast</code> call are the following: * </p> * <ul> * <li> * <p> * AMORTIZED_COST * </p> * </li> * <li> * <p> * BLENDED_COST * </p> * </li> * <li> * <p> * NET_AMORTIZED_COST * </p> * </li> * <li> * <p> * NET_UNBLENDED_COST * </p> * </li> * <li> * <p> * UNBLENDED_COST * </p> * </li> * @see Metric */ public String getMetric() { return this.metric; } /** * <p> * Which metric Cost Explorer uses to create your forecast. For more information about blended and unblended rates, * see <a href="http://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/">Why does the "blended" * annotation appear on some line items in my bill?</a>. * </p> * <p> * Valid values for a <code>GetCostForecast</code> call are the following: * </p> * <ul> * <li> * <p> * AMORTIZED_COST * </p> * </li> * <li> * <p> * BLENDED_COST * </p> * </li> * <li> * <p> * NET_AMORTIZED_COST * </p> * </li> * <li> * <p> * NET_UNBLENDED_COST * </p> * </li> * <li> * <p> * UNBLENDED_COST * </p> * </li> * </ul> * * @param metric * Which metric Cost Explorer uses to create your forecast. For more information about blended and unblended * rates, see <a href="http://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/">Why does * the "blended" annotation appear on some line items in my bill?</a>. </p> * <p> * Valid values for a <code>GetCostForecast</code> call are the following: * </p> * <ul> * <li> * <p> * AMORTIZED_COST * </p> * </li> * <li> * <p> * BLENDED_COST * </p> * </li> * <li> * <p> * NET_AMORTIZED_COST * </p> * </li> * <li> * <p> * NET_UNBLENDED_COST * </p> * </li> * <li> * <p> * UNBLENDED_COST * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. * @see Metric */ public GetCostForecastRequest withMetric(String metric) { setMetric(metric); return this; } /** * <p> * Which metric Cost Explorer uses to create your forecast. For more information about blended and unblended rates, * see <a href="http://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/">Why does the "blended" * annotation appear on some line items in my bill?</a>. * </p> * <p> * Valid values for a <code>GetCostForecast</code> call are the following: * </p> * <ul> * <li> * <p> * AMORTIZED_COST * </p> * </li> * <li> * <p> * BLENDED_COST * </p> * </li> * <li> * <p> * NET_AMORTIZED_COST * </p> * </li> * <li> * <p> * NET_UNBLENDED_COST * </p> * </li> * <li> * <p> * UNBLENDED_COST * </p> * </li> * </ul> * * @param metric * Which metric Cost Explorer uses to create your forecast. For more information about blended and unblended * rates, see <a href="http://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/">Why does * the "blended" annotation appear on some line items in my bill?</a>. </p> * <p> * Valid values for a <code>GetCostForecast</code> call are the following: * </p> * <ul> * <li> * <p> * AMORTIZED_COST * </p> * </li> * <li> * <p> * BLENDED_COST * </p> * </li> * <li> * <p> * NET_AMORTIZED_COST * </p> * </li> * <li> * <p> * NET_UNBLENDED_COST * </p> * </li> * <li> * <p> * UNBLENDED_COST * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. * @see Metric */ public GetCostForecastRequest withMetric(Metric metric) { this.metric = metric.toString(); return this; } /** * <p> * How granular you want the forecast to be. You can get 3 months of <code>DAILY</code> forecasts or 12 months of * <code>MONTHLY</code> forecasts. * </p> * <p> * The <code>GetCostForecast</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * </p> * * @param granularity * How granular you want the forecast to be. You can get 3 months of <code>DAILY</code> forecasts or 12 * months of <code>MONTHLY</code> forecasts.</p> * <p> * The <code>GetCostForecast</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * @see Granularity */ public void setGranularity(String granularity) { this.granularity = granularity; } /** * <p> * How granular you want the forecast to be. You can get 3 months of <code>DAILY</code> forecasts or 12 months of * <code>MONTHLY</code> forecasts. * </p> * <p> * The <code>GetCostForecast</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * </p> * * @return How granular you want the forecast to be. You can get 3 months of <code>DAILY</code> forecasts or 12 * months of <code>MONTHLY</code> forecasts.</p> * <p> * The <code>GetCostForecast</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * @see Granularity */ public String getGranularity() { return this.granularity; } /** * <p> * How granular you want the forecast to be. You can get 3 months of <code>DAILY</code> forecasts or 12 months of * <code>MONTHLY</code> forecasts. * </p> * <p> * The <code>GetCostForecast</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * </p> * * @param granularity * How granular you want the forecast to be. You can get 3 months of <code>DAILY</code> forecasts or 12 * months of <code>MONTHLY</code> forecasts.</p> * <p> * The <code>GetCostForecast</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * @return Returns a reference to this object so that method calls can be chained together. * @see Granularity */ public GetCostForecastRequest withGranularity(String granularity) { setGranularity(granularity); return this; } /** * <p> * How granular you want the forecast to be. You can get 3 months of <code>DAILY</code> forecasts or 12 months of * <code>MONTHLY</code> forecasts. * </p> * <p> * The <code>GetCostForecast</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * </p> * * @param granularity * How granular you want the forecast to be. You can get 3 months of <code>DAILY</code> forecasts or 12 * months of <code>MONTHLY</code> forecasts.</p> * <p> * The <code>GetCostForecast</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * @return Returns a reference to this object so that method calls can be chained together. * @see Granularity */ public GetCostForecastRequest withGranularity(Granularity granularity) { this.granularity = granularity.toString(); return this; } /** * <p> * The filters that you want to use to filter your forecast. The <code>GetCostForecast</code> API supports filtering * by the following dimensions: * </p> * <ul> * <li> * <p> * <code>AZ</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT_NAME</code> * </p> * </li> * <li> * <p> * <code>OPERATION</code> * </p> * </li> * <li> * <p> * <code>PURCHASE_TYPE</code> * </p> * </li> * <li> * <p> * <code>REGION</code> * </p> * </li> * <li> * <p> * <code>SERVICE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE_GROUP</code> * </p> * </li> * <li> * <p> * <code>RECORD_TYPE</code> * </p> * </li> * <li> * <p> * <code>OPERATING_SYSTEM</code> * </p> * </li> * <li> * <p> * <code>TENANCY</code> * </p> * </li> * <li> * <p> * <code>SCOPE</code> * </p> * </li> * <li> * <p> * <code>PLATFORM</code> * </p> * </li> * <li> * <p> * <code>SUBSCRIPTION_ID</code> * </p> * </li> * <li> * <p> * <code>LEGAL_ENTITY_NAME</code> * </p> * </li> * <li> * <p> * <code>DEPLOYMENT_OPTION</code> * </p> * </li> * <li> * <p> * <code>DATABASE_ENGINE</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE_FAMILY</code> * </p> * </li> * <li> * <p> * <code>BILLING_ENTITY</code> * </p> * </li> * <li> * <p> * <code>RESERVATION_ID</code> * </p> * </li> * <li> * <p> * <code>SAVINGS_PLAN_ARN</code> * </p> * </li> * </ul> * * @param filter * The filters that you want to use to filter your forecast. The <code>GetCostForecast</code> API supports * filtering by the following dimensions:</p> * <ul> * <li> * <p> * <code>AZ</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT_NAME</code> * </p> * </li> * <li> * <p> * <code>OPERATION</code> * </p> * </li> * <li> * <p> * <code>PURCHASE_TYPE</code> * </p> * </li> * <li> * <p> * <code>REGION</code> * </p> * </li> * <li> * <p> * <code>SERVICE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE_GROUP</code> * </p> * </li> * <li> * <p> * <code>RECORD_TYPE</code> * </p> * </li> * <li> * <p> * <code>OPERATING_SYSTEM</code> * </p> * </li> * <li> * <p> * <code>TENANCY</code> * </p> * </li> * <li> * <p> * <code>SCOPE</code> * </p> * </li> * <li> * <p> * <code>PLATFORM</code> * </p> * </li> * <li> * <p> * <code>SUBSCRIPTION_ID</code> * </p> * </li> * <li> * <p> * <code>LEGAL_ENTITY_NAME</code> * </p> * </li> * <li> * <p> * <code>DEPLOYMENT_OPTION</code> * </p> * </li> * <li> * <p> * <code>DATABASE_ENGINE</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE_FAMILY</code> * </p> * </li> * <li> * <p> * <code>BILLING_ENTITY</code> * </p> * </li> * <li> * <p> * <code>RESERVATION_ID</code> * </p> * </li> * <li> * <p> * <code>SAVINGS_PLAN_ARN</code> * </p> * </li> */ public void setFilter(Expression filter) { this.filter = filter; } /** * <p> * The filters that you want to use to filter your forecast. The <code>GetCostForecast</code> API supports filtering * by the following dimensions: * </p> * <ul> * <li> * <p> * <code>AZ</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT_NAME</code> * </p> * </li> * <li> * <p> * <code>OPERATION</code> * </p> * </li> * <li> * <p> * <code>PURCHASE_TYPE</code> * </p> * </li> * <li> * <p> * <code>REGION</code> * </p> * </li> * <li> * <p> * <code>SERVICE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE_GROUP</code> * </p> * </li> * <li> * <p> * <code>RECORD_TYPE</code> * </p> * </li> * <li> * <p> * <code>OPERATING_SYSTEM</code> * </p> * </li> * <li> * <p> * <code>TENANCY</code> * </p> * </li> * <li> * <p> * <code>SCOPE</code> * </p> * </li> * <li> * <p> * <code>PLATFORM</code> * </p> * </li> * <li> * <p> * <code>SUBSCRIPTION_ID</code> * </p> * </li> * <li> * <p> * <code>LEGAL_ENTITY_NAME</code> * </p> * </li> * <li> * <p> * <code>DEPLOYMENT_OPTION</code> * </p> * </li> * <li> * <p> * <code>DATABASE_ENGINE</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE_FAMILY</code> * </p> * </li> * <li> * <p> * <code>BILLING_ENTITY</code> * </p> * </li> * <li> * <p> * <code>RESERVATION_ID</code> * </p> * </li> * <li> * <p> * <code>SAVINGS_PLAN_ARN</code> * </p> * </li> * </ul> * * @return The filters that you want to use to filter your forecast. The <code>GetCostForecast</code> API supports * filtering by the following dimensions:</p> * <ul> * <li> * <p> * <code>AZ</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT_NAME</code> * </p> * </li> * <li> * <p> * <code>OPERATION</code> * </p> * </li> * <li> * <p> * <code>PURCHASE_TYPE</code> * </p> * </li> * <li> * <p> * <code>REGION</code> * </p> * </li> * <li> * <p> * <code>SERVICE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE_GROUP</code> * </p> * </li> * <li> * <p> * <code>RECORD_TYPE</code> * </p> * </li> * <li> * <p> * <code>OPERATING_SYSTEM</code> * </p> * </li> * <li> * <p> * <code>TENANCY</code> * </p> * </li> * <li> * <p> * <code>SCOPE</code> * </p> * </li> * <li> * <p> * <code>PLATFORM</code> * </p> * </li> * <li> * <p> * <code>SUBSCRIPTION_ID</code> * </p> * </li> * <li> * <p> * <code>LEGAL_ENTITY_NAME</code> * </p> * </li> * <li> * <p> * <code>DEPLOYMENT_OPTION</code> * </p> * </li> * <li> * <p> * <code>DATABASE_ENGINE</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE_FAMILY</code> * </p> * </li> * <li> * <p> * <code>BILLING_ENTITY</code> * </p> * </li> * <li> * <p> * <code>RESERVATION_ID</code> * </p> * </li> * <li> * <p> * <code>SAVINGS_PLAN_ARN</code> * </p> * </li> */ public Expression getFilter() { return this.filter; } /** * <p> * The filters that you want to use to filter your forecast. The <code>GetCostForecast</code> API supports filtering * by the following dimensions: * </p> * <ul> * <li> * <p> * <code>AZ</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT_NAME</code> * </p> * </li> * <li> * <p> * <code>OPERATION</code> * </p> * </li> * <li> * <p> * <code>PURCHASE_TYPE</code> * </p> * </li> * <li> * <p> * <code>REGION</code> * </p> * </li> * <li> * <p> * <code>SERVICE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE_GROUP</code> * </p> * </li> * <li> * <p> * <code>RECORD_TYPE</code> * </p> * </li> * <li> * <p> * <code>OPERATING_SYSTEM</code> * </p> * </li> * <li> * <p> * <code>TENANCY</code> * </p> * </li> * <li> * <p> * <code>SCOPE</code> * </p> * </li> * <li> * <p> * <code>PLATFORM</code> * </p> * </li> * <li> * <p> * <code>SUBSCRIPTION_ID</code> * </p> * </li> * <li> * <p> * <code>LEGAL_ENTITY_NAME</code> * </p> * </li> * <li> * <p> * <code>DEPLOYMENT_OPTION</code> * </p> * </li> * <li> * <p> * <code>DATABASE_ENGINE</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE_FAMILY</code> * </p> * </li> * <li> * <p> * <code>BILLING_ENTITY</code> * </p> * </li> * <li> * <p> * <code>RESERVATION_ID</code> * </p> * </li> * <li> * <p> * <code>SAVINGS_PLAN_ARN</code> * </p> * </li> * </ul> * * @param filter * The filters that you want to use to filter your forecast. The <code>GetCostForecast</code> API supports * filtering by the following dimensions:</p> * <ul> * <li> * <p> * <code>AZ</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT</code> * </p> * </li> * <li> * <p> * <code>LINKED_ACCOUNT_NAME</code> * </p> * </li> * <li> * <p> * <code>OPERATION</code> * </p> * </li> * <li> * <p> * <code>PURCHASE_TYPE</code> * </p> * </li> * <li> * <p> * <code>REGION</code> * </p> * </li> * <li> * <p> * <code>SERVICE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE</code> * </p> * </li> * <li> * <p> * <code>USAGE_TYPE_GROUP</code> * </p> * </li> * <li> * <p> * <code>RECORD_TYPE</code> * </p> * </li> * <li> * <p> * <code>OPERATING_SYSTEM</code> * </p> * </li> * <li> * <p> * <code>TENANCY</code> * </p> * </li> * <li> * <p> * <code>SCOPE</code> * </p> * </li> * <li> * <p> * <code>PLATFORM</code> * </p> * </li> * <li> * <p> * <code>SUBSCRIPTION_ID</code> * </p> * </li> * <li> * <p> * <code>LEGAL_ENTITY_NAME</code> * </p> * </li> * <li> * <p> * <code>DEPLOYMENT_OPTION</code> * </p> * </li> * <li> * <p> * <code>DATABASE_ENGINE</code> * </p> * </li> * <li> * <p> * <code>INSTANCE_TYPE_FAMILY</code> * </p> * </li> * <li> * <p> * <code>BILLING_ENTITY</code> * </p> * </li> * <li> * <p> * <code>RESERVATION_ID</code> * </p> * </li> * <li> * <p> * <code>SAVINGS_PLAN_ARN</code> * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. */ public GetCostForecastRequest withFilter(Expression filter) { setFilter(filter); return this; } /** * <p> * Cost Explorer always returns the mean forecast as a single point. You can request a prediction interval around * the mean by specifying a confidence level. The higher the confidence level, the more confident Cost Explorer is * about the actual value falling in the prediction interval. Higher confidence levels result in wider prediction * intervals. * </p> * * @param predictionIntervalLevel * Cost Explorer always returns the mean forecast as a single point. You can request a prediction interval * around the mean by specifying a confidence level. The higher the confidence level, the more confident Cost * Explorer is about the actual value falling in the prediction interval. Higher confidence levels result in * wider prediction intervals. */ public void setPredictionIntervalLevel(Integer predictionIntervalLevel) { this.predictionIntervalLevel = predictionIntervalLevel; } /** * <p> * Cost Explorer always returns the mean forecast as a single point. You can request a prediction interval around * the mean by specifying a confidence level. The higher the confidence level, the more confident Cost Explorer is * about the actual value falling in the prediction interval. Higher confidence levels result in wider prediction * intervals. * </p> * * @return Cost Explorer always returns the mean forecast as a single point. You can request a prediction interval * around the mean by specifying a confidence level. The higher the confidence level, the more confident * Cost Explorer is about the actual value falling in the prediction interval. Higher confidence levels * result in wider prediction intervals. */ public Integer getPredictionIntervalLevel() { return this.predictionIntervalLevel; } /** * <p> * Cost Explorer always returns the mean forecast as a single point. You can request a prediction interval around * the mean by specifying a confidence level. The higher the confidence level, the more confident Cost Explorer is * about the actual value falling in the prediction interval. Higher confidence levels result in wider prediction * intervals. * </p> * * @param predictionIntervalLevel * Cost Explorer always returns the mean forecast as a single point. You can request a prediction interval * around the mean by specifying a confidence level. The higher the confidence level, the more confident Cost * Explorer is about the actual value falling in the prediction interval. Higher confidence levels result in * wider prediction intervals. * @return Returns a reference to this object so that method calls can be chained together. */ public GetCostForecastRequest withPredictionIntervalLevel(Integer predictionIntervalLevel) { setPredictionIntervalLevel(predictionIntervalLevel); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTimePeriod() != null) sb.append("TimePeriod: ").append(getTimePeriod()).append(","); if (getMetric() != null) sb.append("Metric: ").append(getMetric()).append(","); if (getGranularity() != null) sb.append("Granularity: ").append(getGranularity()).append(","); if (getFilter() != null) sb.append("Filter: ").append(getFilter()).append(","); if (getPredictionIntervalLevel() != null) sb.append("PredictionIntervalLevel: ").append(getPredictionIntervalLevel()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetCostForecastRequest == false) return false; GetCostForecastRequest other = (GetCostForecastRequest) obj; if (other.getTimePeriod() == null ^ this.getTimePeriod() == null) return false; if (other.getTimePeriod() != null && other.getTimePeriod().equals(this.getTimePeriod()) == false) return false; if (other.getMetric() == null ^ this.getMetric() == null) return false; if (other.getMetric() != null && other.getMetric().equals(this.getMetric()) == false) return false; if (other.getGranularity() == null ^ this.getGranularity() == null) return false; if (other.getGranularity() != null && other.getGranularity().equals(this.getGranularity()) == false) return false; if (other.getFilter() == null ^ this.getFilter() == null) return false; if (other.getFilter() != null && other.getFilter().equals(this.getFilter()) == false) return false; if (other.getPredictionIntervalLevel() == null ^ this.getPredictionIntervalLevel() == null) return false; if (other.getPredictionIntervalLevel() != null && other.getPredictionIntervalLevel().equals(this.getPredictionIntervalLevel()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTimePeriod() == null) ? 0 : getTimePeriod().hashCode()); hashCode = prime * hashCode + ((getMetric() == null) ? 0 : getMetric().hashCode()); hashCode = prime * hashCode + ((getGranularity() == null) ? 0 : getGranularity().hashCode()); hashCode = prime * hashCode + ((getFilter() == null) ? 0 : getFilter().hashCode()); hashCode = prime * hashCode + ((getPredictionIntervalLevel() == null) ? 0 : getPredictionIntervalLevel().hashCode()); return hashCode; } @Override public GetCostForecastRequest clone() { return (GetCostForecastRequest) super.clone(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.commitlog; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; import org.cliffc.high_scale_lib.NonBlockingHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.config.Schema; import org.apache.cassandra.db.ColumnFamily; import org.apache.cassandra.db.Mutation; import org.apache.cassandra.io.FSWriteError; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.utils.CLibrary; import org.apache.cassandra.utils.PureJavaCrc32; import org.apache.cassandra.utils.concurrent.OpOrder; import org.apache.cassandra.utils.concurrent.WaitQueue; /* * A single commit log file on disk. Manages creation of the file and writing mutations to disk, * as well as tracking the last mutation position of any "dirty" CFs covered by the segment file. Segment * files are initially allocated to a fixed size and can grow to accomidate a larger value if necessary. */ public class CommitLogSegment { private static final Logger logger = LoggerFactory.getLogger(CommitLogSegment.class); private final static long idBase = System.currentTimeMillis(); private final static AtomicInteger nextId = new AtomicInteger(1); // The commit log entry overhead in bytes (int: length + long: head checksum + long: tail checksum) static final int ENTRY_OVERHEAD_SIZE = 4 + 8 + 8; // The commit log (chained) sync marker/header size in bytes (int: length + long: checksum [segmentId, position]) static final int SYNC_MARKER_SIZE = 4 + 8; // The OpOrder used to order appends wrt sync private final OpOrder appendOrder = new OpOrder(); private final AtomicInteger allocatePosition = new AtomicInteger(); // Everything before this offset has been synced and written. The SYNC_MARKER_SIZE bytes after // each sync are reserved, and point forwards to the next such offset. The final // sync marker in a segment will be zeroed out, or point to EOF. private volatile int lastSyncedOffset; // the amount of the tail of the file we have allocated but not used - this is used when we discard a log segment // to ensure nobody writes to it after we've decided we're done with it private int discardedTailFrom; // a signal for writers to wait on to confirm the log message they provided has been written to disk private final WaitQueue syncComplete = new WaitQueue(); // a map of Cf->dirty position; this is used to permit marking Cfs clean whilst the log is still in use private final NonBlockingHashMap<UUID, AtomicInteger> cfDirty = new NonBlockingHashMap<>(1024); // a map of Cf->clean position; this is used to permit marking Cfs clean whilst the log is still in use private final ConcurrentHashMap<UUID, AtomicInteger> cfClean = new ConcurrentHashMap<>(); public final long id; private final File logFile; private final RandomAccessFile logFileAccessor; private final int fd; private final MappedByteBuffer buffer; public final CommitLogDescriptor descriptor; /** * @return a newly minted segment file */ static CommitLogSegment freshSegment() { return new CommitLogSegment(null); } static long getNextId() { return idBase + nextId.getAndIncrement(); } /** * Constructs a new segment file. * * @param filePath if not null, recycles the existing file by renaming it and truncating it to CommitLog.SEGMENT_SIZE. */ CommitLogSegment(String filePath) { id = getNextId(); descriptor = new CommitLogDescriptor(id); logFile = new File(DatabaseDescriptor.getCommitLogLocation(), descriptor.fileName()); boolean isCreating = true; try { if (filePath != null) { File oldFile = new File(filePath); if (oldFile.exists()) { logger.debug("Re-using discarded CommitLog segment for {} from {}", id, filePath); if (!oldFile.renameTo(logFile)) throw new IOException("Rename from " + filePath + " to " + id + " failed"); isCreating = false; } } // Open the initial the segment file logFileAccessor = new RandomAccessFile(logFile, "rw"); if (isCreating) logger.debug("Creating new commit log segment {}", logFile.getPath()); // Map the segment, extending or truncating it to the standard segment size. // (We may have restarted after a segment size configuration change, leaving "incorrectly" // sized segments on disk.) logFileAccessor.setLength(DatabaseDescriptor.getCommitLogSegmentSize()); fd = CLibrary.getfd(logFileAccessor.getFD()); buffer = logFileAccessor.getChannel().map(FileChannel.MapMode.READ_WRITE, 0, DatabaseDescriptor.getCommitLogSegmentSize()); // mark the initial header as uninitialised buffer.putInt(0, 0); buffer.putLong(4, 0); allocatePosition.set(SYNC_MARKER_SIZE); } catch (IOException e) { throw new FSWriteError(e, logFile); } } /** * allocate space in this buffer for the provided mutation, and populate the provided * Allocation object, returning true on success. False indicates there is not enough room in * this segment, and a new segment is needed */ boolean allocate(Mutation mutation, int size, Allocation alloc) { final OpOrder.Group commandOrder = appendOrder.start(); try { int position = allocate(size); if (position < 0) { commandOrder.close(); return false; } alloc.buffer = (ByteBuffer) buffer.duplicate().position(position).limit(position + size); alloc.position = position; alloc.segment = this; alloc.appendOp = commandOrder; markDirty(mutation, position); return true; } catch (Throwable t) { commandOrder.close(); throw t; } } // allocate bytes in the segment, or return -1 if not enough space private int allocate(int size) { while (true) { int prev = allocatePosition.get(); int next = prev + size; if (next >= buffer.capacity()) return -1; if (allocatePosition.compareAndSet(prev, next)) return prev; } } // ensures no more of this segment is writeable, by allocating any unused section at the end and marking it discarded void discardUnusedTail() { // we guard this with the OpOrdering instead of synchronised due to potential dead-lock with CLSM.advanceAllocatingFrom() // this actually isn't strictly necessary, as currently all calls to discardUnusedTail occur within a block // already protected by this OpOrdering, but to prevent future potential mistakes, we duplicate the protection here // so that the contract between discardUnusedTail() and sync() is more explicit. try (OpOrder.Group group = appendOrder.start()) { while (true) { int prev = allocatePosition.get(); // we set allocatePosition past buffer.capacity() to make sure we always set discardedTailFrom int next = buffer.capacity() + 1; if (prev == next) return; if (allocatePosition.compareAndSet(prev, next)) { discardedTailFrom = prev; return; } } } } /** * Wait for any appends or discardUnusedTail() operations started before this method was called */ private synchronized void waitForModifications() { // issue a barrier and wait for it OpOrder.Barrier barrier = appendOrder.newBarrier(); barrier.issue(); barrier.await(); } /** * Forces a disk flush for this segment file. */ synchronized void sync() { try { // check we have more work to do if (allocatePosition.get() <= lastSyncedOffset + SYNC_MARKER_SIZE) return; // allocate a new sync marker; this is both necessary in itself, but also serves to demarcate // the point at which we can safely consider records to have been completely written to int nextMarker; nextMarker = allocate(SYNC_MARKER_SIZE); boolean close = false; if (nextMarker < 0) { // ensure no more of this CLS is writeable, and mark ourselves for closing discardUnusedTail(); close = true; // wait for modifications guards both discardedTailFrom, and any outstanding appends waitForModifications(); if (discardedTailFrom < buffer.capacity() - SYNC_MARKER_SIZE) { // if there's room in the discard section to write an empty header, use that as the nextMarker nextMarker = discardedTailFrom; } else { // not enough space left in the buffer, so mark the next sync marker as the EOF position nextMarker = buffer.capacity(); } } else { waitForModifications(); } assert nextMarker > lastSyncedOffset; // write previous sync marker to point to next sync marker // we don't chain the crcs here to ensure this method is idempotent if it fails int offset = lastSyncedOffset; final PureJavaCrc32 crc = new PureJavaCrc32(); crc.update((int) (id & 0xFFFFFFFFL)); crc.update((int) (id >>> 32)); crc.update(offset); buffer.putInt(offset, nextMarker); buffer.putLong(offset + 4, crc.getValue()); // zero out the next sync marker so replayer can cleanly exit if (nextMarker < buffer.capacity()) { buffer.putInt(nextMarker, 0); buffer.putLong(nextMarker + 4, 0); } // actually perform the sync and signal those waiting for it buffer.force(); if (close) nextMarker = buffer.capacity(); lastSyncedOffset = nextMarker; syncComplete.signalAll(); CLibrary.trySkipCache(fd, offset, nextMarker); if (close) close(); } catch (Exception e) // MappedByteBuffer.force() does not declare IOException but can actually throw it { throw new FSWriteError(e, getPath()); } } public boolean isStillAllocating() { return allocatePosition.get() < buffer.capacity(); } /** * Completely discards a segment file by deleting it. (Potentially blocking operation) */ void delete() { FileUtils.deleteWithConfirm(logFile); } /** * Recycle processes an unneeded segment file for reuse. * * @return a new CommitLogSegment representing the newly reusable segment. */ CommitLogSegment recycle() { try { sync(); } catch (FSWriteError e) { logger.error("I/O error flushing {} {}", this, e.getMessage()); throw e; } close(); return new CommitLogSegment(getPath()); } /** * @return the current ReplayPosition for this log segment */ public ReplayPosition getContext() { return new ReplayPosition(id, allocatePosition.get()); } /** * @return the file path to this segment */ public String getPath() { return logFile.getPath(); } /** * @return the file name of this segment */ public String getName() { return logFile.getName(); } /** * Close the segment file. */ void close() { try { if (FileUtils.isCleanerAvailable()) FileUtils.clean(buffer); logFileAccessor.close(); } catch (IOException e) { throw new FSWriteError(e, getPath()); } } void markDirty(Mutation mutation, int allocatedPosition) { for (ColumnFamily columnFamily : mutation.getColumnFamilies()) { // check for deleted CFS CFMetaData cfm = columnFamily.metadata(); if (cfm.isPurged()) logger.error("Attempted to write commit log entry for unrecognized column family: {}", columnFamily.id()); else ensureAtleast(cfDirty, cfm.cfId, allocatedPosition); } } /** * Marks the ColumnFamily specified by cfId as clean for this log segment. If the * given context argument is contained in this file, it will only mark the CF as * clean if no newer writes have taken place. * * @param cfId the column family ID that is now clean * @param context the optional clean offset */ public synchronized void markClean(UUID cfId, ReplayPosition context) { if (!cfDirty.containsKey(cfId)) return; if (context.segment == id) markClean(cfId, context.position); else if (context.segment > id) markClean(cfId, Integer.MAX_VALUE); } private void markClean(UUID cfId, int position) { ensureAtleast(cfClean, cfId, position); removeCleanFromDirty(); } private static void ensureAtleast(ConcurrentMap<UUID, AtomicInteger> map, UUID cfId, int value) { AtomicInteger i = map.get(cfId); if (i == null) { AtomicInteger i2 = map.putIfAbsent(cfId, i = new AtomicInteger()); if (i2 != null) i = i2; } while (true) { int cur = i.get(); if (cur > value) break; if (i.compareAndSet(cur, value)) break; } } private void removeCleanFromDirty() { // if we're still allocating from this segment, don't touch anything since it can't be done thread-safely if (isStillAllocating()) return; Iterator<Map.Entry<UUID, AtomicInteger>> iter = cfClean.entrySet().iterator(); while (iter.hasNext()) { Map.Entry<UUID, AtomicInteger> clean = iter.next(); UUID cfId = clean.getKey(); AtomicInteger cleanPos = clean.getValue(); AtomicInteger dirtyPos = cfDirty.get(cfId); if (dirtyPos != null && dirtyPos.intValue() < cleanPos.intValue()) { cfDirty.remove(cfId); iter.remove(); } } } /** * @return a collection of dirty CFIDs for this segment file. */ public synchronized Collection<UUID> getDirtyCFIDs() { removeCleanFromDirty(); if (cfClean.isEmpty() || cfDirty.isEmpty()) return cfDirty.keySet(); List<UUID> r = new ArrayList<>(cfDirty.size()); for (Map.Entry<UUID, AtomicInteger> dirty : cfDirty.entrySet()) { UUID cfId = dirty.getKey(); AtomicInteger dirtyPos = dirty.getValue(); AtomicInteger cleanPos = cfClean.get(cfId); if (cleanPos == null || cleanPos.intValue() < dirtyPos.intValue()) r.add(dirty.getKey()); } return r; } /** * @return true if this segment is unused and safe to recycle or delete */ public synchronized boolean isUnused() { // if room to allocate, we're still in use as the active allocatingFrom, // so we don't want to race with updates to cfClean with removeCleanFromDirty if (isStillAllocating()) return false; removeCleanFromDirty(); return cfDirty.isEmpty(); } /** * Check to see if a certain ReplayPosition is contained by this segment file. * * @param context the replay position to be checked * @return true if the replay position is contained by this segment file. */ public boolean contains(ReplayPosition context) { return context.segment == id; } // For debugging, not fast public String dirtyString() { StringBuilder sb = new StringBuilder(); for (UUID cfId : getDirtyCFIDs()) { CFMetaData m = Schema.instance.getCFMetaData(cfId); sb.append(m == null ? "<deleted>" : m.cfName).append(" (").append(cfId).append("), "); } return sb.toString(); } @Override public String toString() { return "CommitLogSegment(" + getPath() + ')'; } public static class CommitLogSegmentFileComparator implements Comparator<File> { public int compare(File f, File f2) { CommitLogDescriptor desc = CommitLogDescriptor.fromFileName(f.getName()); CommitLogDescriptor desc2 = CommitLogDescriptor.fromFileName(f2.getName()); return (int) (desc.id - desc2.id); } } /** * A simple class for tracking information about the portion of a segment that has been allocated to a log write. * The constructor leaves the fields uninitialized for population by CommitlogManager, so that it can be * stack-allocated by escape analysis in CommitLog.add. */ static class Allocation { private CommitLogSegment segment; private OpOrder.Group appendOp; private int position; private ByteBuffer buffer; CommitLogSegment getSegment() { return segment; } ByteBuffer getBuffer() { return buffer; } // markWritten() MUST be called once we are done with the segment or the CL will never flush // but must not be called more than once void markWritten() { appendOp.close(); } void awaitDiskSync() { while (segment.lastSyncedOffset < position) { WaitQueue.Signal signal = segment.syncComplete.register(CommitLog.instance.metrics.waitingOnCommit.time()); if (segment.lastSyncedOffset < position) signal.awaitUninterruptibly(); else signal.cancel(); } } public ReplayPosition getReplayPosition() { // always allocate a ReplayPosition to let stack allocation do its magic. If we return null, we always // have to allocate an object on the stack return new ReplayPosition(segment == null ? -1 : segment.id, segment == null ? 0 : buffer.limit()); } } }
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.stream.binder; import org.springframework.beans.factory.InitializingBean; import org.springframework.cloud.stream.provisioning.ConsumerDestination; import org.springframework.cloud.stream.provisioning.ProducerDestination; import org.springframework.cloud.stream.provisioning.ProvisioningException; import org.springframework.cloud.stream.provisioning.ProvisioningProvider; import org.springframework.context.Lifecycle; import org.springframework.expression.ExpressionParser; import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.integration.channel.FixedSubscriberChannel; import org.springframework.integration.core.MessageProducer; import org.springframework.integration.endpoint.EventDrivenConsumer; import org.springframework.integration.handler.AbstractMessageHandler; import org.springframework.integration.handler.AbstractReplyProducingMessageHandler; import org.springframework.messaging.Message; import org.springframework.messaging.MessageChannel; import org.springframework.messaging.MessageHandler; import org.springframework.messaging.MessageHeaders; import org.springframework.messaging.SubscribableChannel; import org.springframework.util.Assert; import org.springframework.util.MimeType; /** * {@link AbstractBinder} that serves as base class for {@link MessageChannel} * binders. Implementors must implement the following methods: * <ul> * <li>{@link #createProducerMessageHandler(ProducerDestination, ProducerProperties)} </li> * <li>{@link #createConsumerEndpoint(ConsumerDestination, String, ConsumerProperties)} </li> * </ul> * * @param <C> the consumer properties type * @param <P> the producer properties type * @author Marius Bogoevici * @author Ilayaperumal Gopinathan * @author Soby Chacko * @since 1.1 */ public abstract class AbstractMessageChannelBinder<C extends ConsumerProperties, P extends ProducerProperties, PP extends ProvisioningProvider<C, P>> extends AbstractBinder<MessageChannel, C, P> { protected static final ExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); /** * Indicates whether the implementation and the message broker have * native support for message headers. If false, headers will be * embedded in the message payloads. */ private final boolean supportsHeadersNatively; /** * Indicates what headers are to be embedded in the payload if * {@link #supportsHeadersNatively} is true. */ private final String[] headersToEmbed; /** * {@link ProvisioningProvider} delegated by the downstream binder implementations. */ protected final PP provisioningProvider; public AbstractMessageChannelBinder(boolean supportsHeadersNatively, String[] headersToEmbed, PP provisioningProvider) { this.supportsHeadersNatively = supportsHeadersNatively; this.headersToEmbed = headersToEmbed == null ? new String[0] : headersToEmbed; this.provisioningProvider = provisioningProvider; } /** * Binds an outbound channel to a given destination. The implementation delegates to * {@link ProvisioningProvider#provisionProducerDestination(String, ProducerProperties)} * and {@link #createProducerMessageHandler(ProducerDestination, ProducerProperties)} for * handling the middleware specific logic. If the returned producer message handler is an * {@link InitializingBean} then {@link InitializingBean#afterPropertiesSet()} will be * called on it. Similarly, if the returned producer message handler e ndpoint is a * {@link Lifecycle}, then {@link Lifecycle#start()} will be called on it. * * @param destination the name of the destination * @param outputChannel the channel to be bound * @param producerProperties the {@link ProducerProperties} of the binding * @return the Binding for the channel * @throws BinderException on internal errors during binding */ @Override public final Binding<MessageChannel> doBindProducer(final String destination, MessageChannel outputChannel, final P producerProperties) throws BinderException { Assert.isInstanceOf(SubscribableChannel.class, outputChannel, "Binding is supported only for SubscribableChannel instances"); final MessageHandler producerMessageHandler; final ProducerDestination producerDestination; try { producerDestination = this.provisioningProvider.provisionProducerDestination(destination, producerProperties); producerMessageHandler = createProducerMessageHandler(producerDestination, producerProperties); if (producerMessageHandler instanceof InitializingBean) { ((InitializingBean) producerMessageHandler).afterPropertiesSet(); } } catch (Exception e) { if (e instanceof BinderException) { throw (BinderException) e; } else if (e instanceof ProvisioningException) { throw (ProvisioningException) e; } else { throw new BinderException("Exception thrown while building outbound endpoint", e); } } if (producerMessageHandler instanceof Lifecycle) { ((Lifecycle) producerMessageHandler).start(); } ((SubscribableChannel) outputChannel).subscribe( new SendingHandler(producerMessageHandler, !this.supportsHeadersNatively && HeaderMode.embeddedHeaders .equals(producerProperties.getHeaderMode()), this.headersToEmbed, producerProperties.isUseNativeEncoding())); return new DefaultBinding<MessageChannel>(destination, null, outputChannel, producerMessageHandler instanceof Lifecycle ? (Lifecycle) producerMessageHandler : null) { @Override public void afterUnbind() { afterUnbindProducer(producerDestination, producerProperties); } }; } /** * Creates a {@link MessageHandler} with the ability to send data to the * target middleware. If the returned instance is also a {@link Lifecycle}, * it will be stopped automatically by the binder. * <p> * In order to be fully compliant, the {@link MessageHandler} of the binder * must observe the following headers: * <ul> * <li>{@link BinderHeaders#PARTITION_HEADER} - indicates the target * partition where the message must be sent</li> * </ul> * <p> * * @param destination the name of the target destination * @param producerProperties the producer properties * @return the message handler for sending data to the target middleware * @throws Exception */ protected abstract MessageHandler createProducerMessageHandler(ProducerDestination destination, P producerProperties) throws Exception; /** * Invoked after the unbinding of a producer. Subclasses may override this to provide * their own logic for dealing with unbinding. * * @param destination the bound destination * @param producerProperties the producer properties */ protected void afterUnbindProducer(ProducerDestination destination, P producerProperties) { } /** * Binds an inbound channel to a given destination. The implementation delegates to * {@link ProvisioningProvider#provisionConsumerDestination(String, String, ConsumerProperties)} * and {@link #createConsumerEndpoint(ConsumerDestination, String, ConsumerProperties)} * for handling middleware-specific logic. If the returned consumer endpoint is an * {@link InitializingBean} then {@link InitializingBean#afterPropertiesSet()} will be * called on it. Similarly, if the returned consumer endpoint is a {@link Lifecycle}, * then {@link Lifecycle#start()} will be called on it. * * @param name the name of the destination * @param group the consumer group * @param inputChannel the channel to be bound * @param properties the {@link ConsumerProperties} of the binding * @return the Binding for the channel * @throws BinderException on internal errors during binding */ @Override public final Binding<MessageChannel> doBindConsumer(String name, String group, MessageChannel inputChannel, final C properties) throws BinderException { MessageProducer consumerEndpoint = null; try { final ConsumerDestination destination = this.provisioningProvider.provisionConsumerDestination(name, group, properties); final boolean extractEmbeddedHeaders = HeaderMode.embeddedHeaders.equals( properties.getHeaderMode()) && !this.supportsHeadersNatively; ReceivingHandler rh = new ReceivingHandler(extractEmbeddedHeaders); rh.setOutputChannel(inputChannel); final FixedSubscriberChannel bridge = new FixedSubscriberChannel(rh); bridge.setBeanName("bridge." + name); consumerEndpoint = createConsumerEndpoint(destination, group, properties); consumerEndpoint.setOutputChannel(bridge); if (consumerEndpoint instanceof InitializingBean) { ((InitializingBean) consumerEndpoint).afterPropertiesSet(); } if (consumerEndpoint instanceof Lifecycle) { ((Lifecycle) consumerEndpoint).start(); } final Object endpoint = consumerEndpoint; EventDrivenConsumer edc = new EventDrivenConsumer(bridge, rh); edc.setBeanName("inbound." + groupedName(name, group)); edc.start(); return new DefaultBinding<MessageChannel>(name, group, inputChannel, endpoint instanceof Lifecycle ? (Lifecycle) endpoint : null) { @Override protected void afterUnbind() { AbstractMessageChannelBinder.this.afterUnbindConsumer(destination, this.group, properties); } }; } catch (Exception e) { if (consumerEndpoint instanceof Lifecycle) { ((Lifecycle) consumerEndpoint).stop(); } if (e instanceof BinderException) { throw (BinderException) e; } else if (e instanceof ProvisioningException) { throw (ProvisioningException) e; } else { throw new BinderException("Exception thrown while starting consumer: ", e); } } } /** * Creates {@link MessageProducer} that receives data from the consumer destination. * will be started and stopped by the binder. * * @param group the consumer group * @param destination reference to the consumer destination * @param properties the consumer properties * @return the consumer endpoint. */ protected abstract MessageProducer createConsumerEndpoint(ConsumerDestination destination, String group, C properties) throws Exception; /** * Invoked after the unbinding of a consumer. The binder implementation can override * this method to provide their own logic (e.g. for cleaning up destinations). * * @param destination the consumer destination * @param group the consumer group * @param consumerProperties the consumer properties */ protected void afterUnbindConsumer(ConsumerDestination destination, String group, C consumerProperties) { } private final class ReceivingHandler extends AbstractReplyProducingMessageHandler { private final boolean extractEmbeddedHeaders; private ReceivingHandler(boolean extractEmbeddedHeaders) { this.extractEmbeddedHeaders = extractEmbeddedHeaders; } @Override @SuppressWarnings("unchecked") protected Object handleRequestMessage(Message<?> requestMessage) { if (!(requestMessage.getPayload() instanceof byte[]) && !requestMessage.getHeaders().containsKey(BinderHeaders.BINDER_ORIGINAL_CONTENT_TYPE)) { return requestMessage; } MessageValues messageValues; if (this.extractEmbeddedHeaders) { try { messageValues = EmbeddedHeaderUtils.extractHeaders((Message<byte[]>) requestMessage, true); } catch (Exception e) { AbstractMessageChannelBinder.this.logger.error( EmbeddedHeaderUtils.decodeExceptionMessage( requestMessage), e); messageValues = new MessageValues(requestMessage); } messageValues = deserializePayloadIfNecessary(messageValues); } else { messageValues = deserializePayloadIfNecessary(requestMessage); } return messageValues.toMessage(); } @Override protected boolean shouldCopyRequestHeaders() { // prevent the message from being copied again in superclass return false; } } private final class SendingHandler extends AbstractMessageHandler implements Lifecycle { private final boolean embedHeaders; private final String[] embeddedHeaders; private final MessageHandler delegate; private final boolean useNativeEncoding; private SendingHandler(MessageHandler delegate, boolean embedHeaders, String[] headersToEmbed, boolean useNativeEncoding) { this.delegate = delegate; this.setBeanFactory(AbstractMessageChannelBinder.this.getBeanFactory()); this.embedHeaders = embedHeaders; this.embeddedHeaders = headersToEmbed; this.useNativeEncoding = useNativeEncoding; } @Override protected void handleMessageInternal(Message<?> message) throws Exception { Message<?> messageToSend = (this.useNativeEncoding) ? message : serializeAndEmbedHeadersIfApplicable(message); this.delegate.handleMessage(messageToSend); } private Message<?> serializeAndEmbedHeadersIfApplicable(Message<?> message) throws Exception { MessageValues transformed = serializePayloadIfNecessary(message); byte[] payload; if (this.embedHeaders) { Object contentType = transformed.get(MessageHeaders.CONTENT_TYPE); // transform content type headers to String, so that they can be properly embedded in JSON if (contentType instanceof MimeType) { transformed.put(MessageHeaders.CONTENT_TYPE, contentType.toString()); } Object originalContentType = transformed.get(BinderHeaders.BINDER_ORIGINAL_CONTENT_TYPE); if (originalContentType instanceof MimeType) { transformed.put(BinderHeaders.BINDER_ORIGINAL_CONTENT_TYPE, originalContentType.toString()); } payload = EmbeddedHeaderUtils.embedHeaders(transformed, this.embeddedHeaders); } else { payload = (byte[]) transformed.getPayload(); } return getMessageBuilderFactory().withPayload(payload).copyHeaders(transformed.getHeaders()).build(); } @Override public void start() { if (this.delegate instanceof Lifecycle) { ((Lifecycle) this.delegate).start(); } } @Override public void stop() { if (this.delegate instanceof Lifecycle) { ((Lifecycle) this.delegate).stop(); } } @Override public boolean isRunning() { return this.delegate instanceof Lifecycle && ((Lifecycle) this.delegate).isRunning(); } } }
package com.github.bot.curiosone.core.util; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import org.junit.Test; public class IntervalTest { @Test public void testInstantiation() { Interval interval = new Interval(0, 42); assertThat(interval instanceof Interval).isTrue(); assertThat(interval instanceof Comparable).isTrue(); } @Test public void testIllegalArgumentExceptionInstantiation() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> new Interval(42, 24)) .withMessageContaining("Illegal interval"); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> new Interval(-1, -42)) .withMessageContaining("Illegal interval"); } @Test public void testMin() { assertThat(new Interval(0, 42).min()).isZero(); assertThat(new Interval(0, 0).min()).isZero(); assertThat(new Interval(-42, 84).min()).isEqualTo(-42); assertThat(new Interval(-42, -21).min()).isEqualTo(-42); } @Test public void testMax() { assertThat(new Interval(0, 0).max()).isZero(); assertThat(new Interval(0, 42).max()).isEqualTo(42); assertThat(new Interval(-4, -2).max()).isEqualTo(-2); assertThat(new Interval(-4, 2).max()).isEqualTo(2); } @Test public void testIntersects() { Interval i = new Interval(10,42); Interval ii = new Interval(24,45); assertThat(i.intersects(ii)).isTrue(); ii = new Interval(0,24); assertThat(i.intersects(ii)).isTrue(); ii = new Interval(10,42); assertThat(i.intersects(ii)).isTrue(); i = new Interval(-42, -23); ii = new Interval(-42, -23); assertThat(i.intersects(ii)).isTrue(); ii = new Interval(-42, -30); assertThat(i.intersects(ii)).isTrue(); ii = new Interval(-33, 30); assertThat(i.intersects(ii)).isTrue(); } @Test public void testDoesNotIntersect() { Interval i = new Interval(10,42); Interval ii = new Interval(45,47); assertThat(i.intersects(ii)).isFalse(); ii = new Interval(0,8); assertThat(i.intersects(ii)).isFalse(); i = new Interval(-20, -10); ii = new Interval(-80, -40); assertThat(i.intersects(ii)).isFalse(); ii = new Interval(-5, -1); assertThat(i.intersects(ii)).isFalse(); } @Test public void testContains() { Interval i = new Interval(0, 42); int ii = 24; assertThat(i.contains(ii)).isTrue(); ii = 0; assertThat(i.contains(ii)).isTrue(); ii = 42; assertThat(i.contains(ii)).isTrue(); i = new Interval(-7, -2); ii = -5; assertThat(i.contains(ii)).isTrue(); ii = -7; assertThat(i.contains(ii)).isTrue(); ii = -2; assertThat(i.contains(ii)).isTrue(); } @Test public void testDoesNotContain() { Interval i = new Interval(0, 42); int ii = 98; assertThat(i.contains(ii)).isFalse(); ii = -98; assertThat(i.contains(ii)).isFalse(); i = new Interval(-46, -42); assertThat(i.contains(ii)).isFalse(); ii = -40; assertThat(i.contains(ii)).isFalse(); } @Test public void testLength() { Interval i = new Interval(10, 42); assertThat(i.length()).isEqualTo(32); i = new Interval(10, 10); assertThat(i.length()).isEqualTo(0); i = new Interval(0, 0); assertThat(i.length()).isEqualTo(0); i = new Interval(-5, 0); assertThat(i.length()).isEqualTo(5); i = new Interval(-5, -1); assertThat(i.length()).isEqualTo(4); } @Test public void testToString() { Interval i = new Interval(0, 42); assertThat(i.toString()).isEqualTo("[0, 42]"); i = new Interval(-30, -15); assertThat(i.toString()).isEqualTo("[-30, -15]"); i = new Interval(-30, 7); assertThat(i.toString()).isEqualTo("[-30, 7]"); } @Test public void testEqualsReflexive() { Interval i = new Interval(1, 2); assertThat(i).isEqualTo(i); i = new Interval(-1, 2); assertThat(i).isEqualTo(i); i = new Interval(-2, 1); assertThat(i).isEqualTo(i); i = new Interval(-2, -1); assertThat(i).isEqualTo(i); } @Test public void testEqualsSymmetric() { Interval i = new Interval(1, 2); Interval ii = new Interval(1, 2); assertThat(i).isEqualTo(ii); assertThat(ii).isEqualTo(i); i = new Interval(-1, 2); ii = new Interval(-1, 2); assertThat(i).isEqualTo(ii); assertThat(ii).isEqualTo(i); i = new Interval(-2, 1); ii = new Interval(-2, 1); assertThat(i).isEqualTo(ii); assertThat(ii).isEqualTo(i); i = new Interval(-2, -1); ii = new Interval(-2, -1); assertThat(i).isEqualTo(ii); assertThat(ii).isEqualTo(i); } @Test public void testEqualsTransitive() { Interval i = new Interval(1, 2); Interval ii = new Interval(1, 2); Interval iii = new Interval(1, 2); assertThat(i).isEqualTo(ii); assertThat(ii).isEqualTo(iii); assertThat(iii).isEqualTo(i); i = new Interval(-1, 2); ii = new Interval(-1, 2); iii = new Interval(-1, 2); assertThat(i).isEqualTo(ii); assertThat(ii).isEqualTo(iii); assertThat(iii).isEqualTo(i); i = new Interval(-2, 1); ii = new Interval(-2, 1); iii = new Interval(-2, 1); assertThat(i).isEqualTo(ii); assertThat(ii).isEqualTo(iii); assertThat(iii).isEqualTo(i); i = new Interval(-2, -1); ii = new Interval(-2, -1); iii = new Interval(-2, -1); assertThat(i).isEqualTo(ii); assertThat(ii).isEqualTo(iii); assertThat(iii).isEqualTo(i); } @Test public void testEqualsNullComparison() { Interval i = new Interval(77, 88); assertThat(i).isNotEqualTo(null); i = new Interval(-77, 88); assertThat(i).isNotEqualTo(null); i = new Interval(-88, 77); assertThat(i).isNotEqualTo(null); i = new Interval(-88, -77); assertThat(i).isNotEqualTo(null); i = new Interval(78, 78); assertThat(i).isNotEqualTo(null); } @Test public void testEqualsOtherObj() { Interval i = new Interval(77, 88); assertThat(i).isNotEqualTo("77, 88"); i = new Interval(-7, 1); assertThat(i).isNotEqualTo(new Integer(10)); } @Test public void testNotEqual() { Interval i = new Interval(0, 10); Interval ii = new Interval(-10, 0); assertThat(i).isNotEqualTo(ii); i = new Interval(0, 10); ii = new Interval(10, 10); assertThat(i).isNotEqualTo(ii); } @Test public void testHashCodeCoherent() { Interval i = new Interval(0, 42); Interval ii = new Interval(0, 42); assertThat(i.hashCode()).isEqualTo(ii.hashCode()); ii = new Interval(0, 20); assertThat(i.hashCode()).isNotEqualTo(ii.hashCode()); } @Test public void testHashCodeReflexive() { Interval i = new Interval(0, 42); assertThat(i.hashCode()).isEqualTo(i.hashCode()); i = new Interval(-42, 0); assertThat(i.hashCode()).isEqualTo(i.hashCode()); i = new Interval(-42, -5); assertThat(i.hashCode()).isEqualTo(i.hashCode()); i = new Interval(-42, -4); assertThat(i.hashCode()).isEqualTo(i.hashCode()); } @Test public void testHashCodeEqualsContract() { Interval i = new Interval(0, 42); Interval ii = new Interval(0, 42); assertThat(i.hashCode()).isEqualTo(ii.hashCode()); assertThat(i).isEqualTo(ii); i = new Interval(-40, 42); ii = new Interval(-40, 42); assertThat(i.hashCode()).isEqualTo(ii.hashCode()); assertThat(i).isEqualTo(ii); } @Test public void testCompareTo() { Interval i = new Interval(0, 42); Interval ii = new Interval(1, 41); assertThat(i.compareTo(ii)).isLessThan(0); ii = new Interval(0, 42); assertThat(i.compareTo(ii)).isZero(); i = new Interval(1, 41); ii = new Interval(0, 42); assertThat(i.compareTo(ii)).isGreaterThan(0); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.shard; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.unit.ByteSizeValue; import io.crate.common.unit.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.snapshots.IndexShardRestoreFailedException; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.Repository; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import java.util.stream.Collectors; import static io.crate.common.unit.TimeValue.timeValueMillis; /** * This package private utility class encapsulates the logic to recover an index shard from either an existing index on * disk or from a snapshot in a repository. */ final class StoreRecovery { private final Logger logger; private final ShardId shardId; StoreRecovery(ShardId shardId, Logger logger) { this.logger = logger; this.shardId = shardId; } /** * Recovers a shard from it's local file system store. This method required pre-knowledge about if the shard should * exist on disk ie. has been previously allocated or if the shard is a brand new allocation without pre-existing index * files / transaction logs. This * * @param indexShard the index shard instance to recovery the shard into * @param listener resolves to <code>true</code> if the shard has been recovered successfully, <code>false</code> if the recovery * has been ignored due to a concurrent modification of if the clusters state has changed due to async updates. * @see Store */ void recoverFromStore(final IndexShard indexShard, ActionListener<Boolean> listener) { if (canRecover(indexShard)) { RecoverySource.Type recoveryType = indexShard.recoveryState().getRecoverySource().getType(); assert recoveryType == RecoverySource.Type.EMPTY_STORE || recoveryType == RecoverySource.Type.EXISTING_STORE : "expected store recovery type but was: " + recoveryType; ActionListener.completeWith(recoveryListener(indexShard, listener), () -> { logger.debug("starting recovery from store ..."); internalRecoverFromStore(indexShard); return true; }); } else { listener.onResponse(false); } } void recoverFromLocalShards(Consumer<MappingMetadata> mappingUpdateConsumer, IndexShard indexShard, List<LocalShardSnapshot> shards, ActionListener<Boolean> listener) { if (canRecover(indexShard)) { RecoverySource.Type recoveryType = indexShard.recoveryState().getRecoverySource().getType(); assert recoveryType == RecoverySource.Type.LOCAL_SHARDS : "expected local shards recovery type: " + recoveryType; if (shards.isEmpty()) { throw new IllegalArgumentException("shards must not be empty"); } Set<Index> indices = shards.stream().map((s) -> s.getIndex()).collect(Collectors.toSet()); if (indices.size() > 1) { throw new IllegalArgumentException("can't add shards from more than one index"); } IndexMetadata sourceMetadata = shards.get(0).getIndexMetadata(); if (sourceMetadata.mapping() != null) { mappingUpdateConsumer.accept(sourceMetadata.mapping()); } indexShard.mapperService().merge(sourceMetadata, MapperService.MergeReason.MAPPING_RECOVERY); // now that the mapping is merged we can validate the index sort configuration. final boolean isSplit = sourceMetadata.getNumberOfShards() < indexShard.indexSettings().getNumberOfShards(); ActionListener.completeWith(recoveryListener(indexShard, listener), () -> { logger.debug("starting recovery from local shards {}", shards); try { final Directory directory = indexShard.store().directory(); // don't close this directory!! final Directory[] sources = shards.stream().map(LocalShardSnapshot::getSnapshotDirectory).toArray(Directory[]::new); final long maxSeqNo = shards.stream().mapToLong(LocalShardSnapshot::maxSeqNo).max().getAsLong(); final long maxUnsafeAutoIdTimestamp = shards.stream() .mapToLong(LocalShardSnapshot::maxUnsafeAutoIdTimestamp) .max() .getAsLong(); addIndices( indexShard.recoveryState().getIndex(), directory, sources, maxSeqNo, maxUnsafeAutoIdTimestamp, indexShard.indexSettings().getIndexMetadata(), indexShard.shardId().id(), isSplit ); internalRecoverFromStore(indexShard); // just trigger a merge to do housekeeping on the // copied segments - we will also see them in stats etc. indexShard.getEngine().forceMerge(false, -1, false, false, false, UUIDs.randomBase64UUID()); return true; } catch (IOException ex) { throw new IndexShardRecoveryException(indexShard.shardId(), "failed to recover from local shards", ex); } }); } else { listener.onResponse(false); } } void addIndices(final RecoveryState.Index indexRecoveryStats, final Directory target, final Directory[] sources, final long maxSeqNo, final long maxUnsafeAutoIdTimestamp, IndexMetadata indexMetadata, int shardId, boolean split) throws IOException { assert sources.length > 0; final int luceneIndexCreatedVersionMajor = Lucene.readSegmentInfos(sources[0]).getIndexCreatedVersionMajor(); final Directory hardLinkOrCopyTarget = new org.apache.lucene.store.HardlinkCopyDirectoryWrapper(target); IndexWriterConfig iwc = new IndexWriterConfig(null) .setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setCommitOnClose(false) // we don't want merges to happen here - we call maybe merge on the engine // later once we stared it up otherwise we would need to wait for it here // we also don't specify a codec here and merges should use the engines for this index .setMergePolicy(NoMergePolicy.INSTANCE) .setOpenMode(IndexWriterConfig.OpenMode.CREATE) .setIndexCreatedVersionMajor(luceneIndexCreatedVersionMajor); try (IndexWriter writer = new IndexWriter(new StatsDirectoryWrapper(hardLinkOrCopyTarget, indexRecoveryStats), iwc)) { writer.addIndexes(sources); if (split) { writer.deleteDocuments(new ShardSplittingQuery(indexMetadata, shardId)); } /* * We set the maximum sequence number and the local checkpoint on the target to the maximum of the maximum sequence numbers on * the source shards. This ensures that history after this maximum sequence number can advance and we have correct * document-level semantics. */ writer.setLiveCommitData(() -> { final HashMap<String, String> liveCommitData = new HashMap<>(3); liveCommitData.put(SequenceNumbers.MAX_SEQ_NO, Long.toString(maxSeqNo)); liveCommitData.put(SequenceNumbers.LOCAL_CHECKPOINT_KEY, Long.toString(maxSeqNo)); liveCommitData.put(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID, Long.toString(maxUnsafeAutoIdTimestamp)); return liveCommitData.entrySet().iterator(); }); writer.commit(); } } /** * Directory wrapper that records copy process for recovery statistics */ static final class StatsDirectoryWrapper extends FilterDirectory { private final RecoveryState.Index index; StatsDirectoryWrapper(Directory in, RecoveryState.Index indexRecoveryStats) { super(in); this.index = indexRecoveryStats; } @Override public void copyFrom(Directory from, String src, String dest, IOContext context) throws IOException { final long l = from.fileLength(src); final AtomicBoolean copies = new AtomicBoolean(false); // here we wrap the index input form the source directory to report progress of file copy for the recovery stats. // we increment the num bytes recovered in the readBytes method below, if users pull statistics they can see immediately // how much has been recovered. in.copyFrom(new FilterDirectory(from) { @Override public IndexInput openInput(String name, IOContext context) throws IOException { index.addFileDetail(dest, l, false); copies.set(true); final IndexInput input = in.openInput(name, context); return new IndexInput("StatsDirectoryWrapper(" + input.toString() + ")") { @Override public void close() throws IOException { input.close(); } @Override public long getFilePointer() { throw new UnsupportedOperationException("only straight copies are supported"); } @Override public void seek(long pos) throws IOException { throw new UnsupportedOperationException("seeks are not supported"); } @Override public long length() { return input.length(); } @Override public IndexInput slice(String sliceDescription, long offset, long length) throws IOException { throw new UnsupportedOperationException("slices are not supported"); } @Override public byte readByte() throws IOException { throw new UnsupportedOperationException("use a buffer if you wanna perform well"); } @Override public void readBytes(byte[] b, int offset, int len) throws IOException { // we rely on the fact that copyFrom uses a buffer input.readBytes(b, offset, len); index.addRecoveredBytesToFile(dest, len); } }; } }, src, dest, context); if (copies.get() == false) { index.addFileDetail(dest, l, true); // hardlinked - we treat it as reused since the file was already somewhat there } else { assert index.getFileDetails(dest) != null : "File [" + dest + "] has no file details"; assert index.getFileDetails(dest).recovered() == l : index.getFileDetails(dest).toString(); } } } /** * Recovers an index from a given {@link Repository}. This method restores a * previously created index snapshot into an existing initializing shard. * * @param indexShard the index shard instance to recovery the snapshot from * @param repository the repository holding the physical files the shard should be recovered from * @param listener resolves to <code>true</code> if the shard has been recovered successfully, <code>false</code> if the recovery * has been ignored due to a concurrent modification of if the clusters state has changed due to async updates. */ void recoverFromRepository(final IndexShard indexShard, Repository repository, ActionListener<Boolean> listener) { try { if (canRecover(indexShard)) { RecoverySource.Type recoveryType = indexShard.recoveryState().getRecoverySource().getType(); assert recoveryType == RecoverySource.Type.SNAPSHOT : "expected snapshot recovery type: " + recoveryType; SnapshotRecoverySource recoverySource = (SnapshotRecoverySource) indexShard.recoveryState().getRecoverySource(); restore(indexShard, repository, recoverySource, recoveryListener(indexShard, listener)); } else { listener.onResponse(false); } } catch (Exception e) { listener.onFailure(e); } } private boolean canRecover(IndexShard indexShard) { if (indexShard.state() == IndexShardState.CLOSED) { // got closed on us, just ignore this recovery return false; } if (indexShard.routingEntry().primary() == false) { throw new IndexShardRecoveryException(shardId, "Trying to recover when the shard is in backup state", null); } return true; } private ActionListener<Boolean> recoveryListener(IndexShard indexShard, ActionListener<Boolean> listener) { return ActionListener.wrap( res -> { if (res) { // Check that the gateway didn't leave the shard in init or recovering stage. it is up to the gateway // to call post recovery. final IndexShardState shardState = indexShard.state(); final RecoveryState recoveryState = indexShard.recoveryState(); assert shardState != IndexShardState.CREATED && shardState != IndexShardState.RECOVERING : "recovery process of " + shardId + " didn't get to post_recovery. shardState [" + shardState + "]"; if (logger.isTraceEnabled()) { RecoveryState.Index index = recoveryState.getIndex(); StringBuilder sb = new StringBuilder(); sb.append(" index : files [").append(index.totalFileCount()).append("] with total_size [") .append(new ByteSizeValue(index.totalBytes())).append("], took[") .append(TimeValue.timeValueMillis(index.time())).append("]\n"); sb.append(" : recovered_files [").append(index.recoveredFileCount()).append("] with total_size [") .append(new ByteSizeValue(index.recoveredBytes())).append("]\n"); sb.append(" : reusing_files [").append(index.reusedFileCount()).append("] with total_size [") .append(new ByteSizeValue(index.reusedBytes())).append("]\n"); sb.append(" verify_index : took [") .append(TimeValue.timeValueMillis(recoveryState.getVerifyIndex().time())).append("], check_index [") .append(timeValueMillis(recoveryState.getVerifyIndex().checkIndexTime())).append("]\n"); sb.append(" translog : number_of_operations [").append(recoveryState.getTranslog().recoveredOperations()) .append("], took [").append(TimeValue.timeValueMillis(recoveryState.getTranslog().time())).append("]"); logger.trace("recovery completed from [shard_store], took [{}]\n{}", timeValueMillis(recoveryState.getTimer().time()), sb); } else if (logger.isDebugEnabled()) { logger.debug("recovery completed from [shard_store], took [{}]", timeValueMillis(recoveryState.getTimer().time())); } } listener.onResponse(res); }, ex -> { if (ex instanceof IndexShardRecoveryException) { if (indexShard.state() == IndexShardState.CLOSED) { // got closed on us, just ignore this recovery listener.onResponse(false); return; } if ((ex.getCause() instanceof IndexShardClosedException) || (ex.getCause() instanceof IndexShardNotStartedException)) { // got closed on us, just ignore this recovery listener.onResponse(false); return; } listener.onFailure(ex); } else if (ex instanceof IndexShardClosedException || ex instanceof IndexShardNotStartedException) { listener.onResponse(false); } else { if (indexShard.state() == IndexShardState.CLOSED) { // got closed on us, just ignore this recovery listener.onResponse(false); } else { listener.onFailure(new IndexShardRecoveryException(shardId, "failed recovery", ex)); } } } ); } /** * Recovers the state of the shard from the store. */ private void internalRecoverFromStore(IndexShard indexShard) throws IndexShardRecoveryException { final RecoveryState recoveryState = indexShard.recoveryState(); final boolean indexShouldExists = recoveryState.getRecoverySource().getType() != RecoverySource.Type.EMPTY_STORE; indexShard.prepareForIndexRecovery(); SegmentInfos si = null; final Store store = indexShard.store(); store.incRef(); try { try { store.failIfCorrupted(); try { si = store.readLastCommittedSegmentsInfo(); } catch (Exception e) { String files = "_unknown_"; try { files = Arrays.toString(store.directory().listAll()); } catch (Exception inner) { files += " (failure=" + ExceptionsHelper.stackTrace(inner) + ")"; } if (indexShouldExists) { throw new IndexShardRecoveryException(shardId, "shard allocated for local recovery (post api), should exist, but doesn't, current files: " + files, e); } } if (si != null && indexShouldExists == false) { // it exists on the directory, but shouldn't exist on the FS, its a leftover (possibly dangling) // its a "new index create" API, we have to do something, so better to clean it than use same data logger.trace("cleaning existing shard, shouldn't exists"); Lucene.cleanLuceneIndex(store.directory()); si = null; } } catch (Exception e) { throw new IndexShardRecoveryException(shardId, "failed to fetch index version after copying it over", e); } if (recoveryState.getRecoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) { assert indexShouldExists; bootstrap(indexShard, store); writeEmptyRetentionLeasesFile(indexShard); } else if (indexShouldExists) { if (recoveryState.getRecoverySource().shouldBootstrapNewHistoryUUID()) { store.bootstrapNewHistory(); writeEmptyRetentionLeasesFile(indexShard); } // since we recover from local, just fill the files and size try { final RecoveryState.Index index = recoveryState.getIndex(); if (si != null) { addRecoveredFileDetails(si, store, index); } } catch (IOException e) { logger.debug("failed to list file details", e); } } else { store.createEmpty(indexShard.indexSettings().getIndexVersionCreated().luceneVersion); final String translogUUID = Translog.createEmptyTranslog( indexShard.shardPath().resolveTranslog(), SequenceNumbers.NO_OPS_PERFORMED, shardId, indexShard.getPendingPrimaryTerm()); store.associateIndexWithNewTranslog(translogUUID); writeEmptyRetentionLeasesFile(indexShard); } indexShard.openEngineAndRecoverFromTranslog(); indexShard.getEngine().fillSeqNoGaps(indexShard.getPendingPrimaryTerm()); indexShard.finalizeRecovery(); indexShard.postRecovery("post recovery from shard_store"); } catch (EngineException | IOException e) { throw new IndexShardRecoveryException(shardId, "failed to recover from gateway", e); } finally { store.decRef(); } } private static void writeEmptyRetentionLeasesFile(IndexShard indexShard) throws IOException { assert indexShard.getRetentionLeases().leases().isEmpty() : indexShard.getRetentionLeases(); // not loaded yet indexShard.persistRetentionLeases(); assert indexShard.loadRetentionLeases().leases().isEmpty(); } private void addRecoveredFileDetails(SegmentInfos si, Store store, RecoveryState.Index index) throws IOException { final Directory directory = store.directory(); for (String name : Lucene.files(si)) { long length = directory.fileLength(name); index.addFileDetail(name, length, true); } } /** * Restores shard from {@link SnapshotRecoverySource} associated with this shard in routing table */ private void restore(IndexShard indexShard, Repository repository, SnapshotRecoverySource restoreSource, ActionListener<Boolean> listener) { logger.debug("restoring from {} ...", indexShard.recoveryState().getRecoverySource()); final RecoveryState.Translog translogState = indexShard.recoveryState().getTranslog(); if (restoreSource == null) { listener.onFailure(new IndexShardRestoreFailedException(shardId, "empty restore source")); return; } if (logger.isTraceEnabled()) { logger.trace("[{}] restoring shard [{}]", restoreSource.snapshot(), shardId); } final ActionListener<Void> restoreListener = ActionListener.wrap( v -> { final Store store = indexShard.store(); bootstrap(indexShard, store); assert indexShard.shardRouting.primary() : "only primary shards can recover from store"; writeEmptyRetentionLeasesFile(indexShard); indexShard.openEngineAndRecoverFromTranslog(); indexShard.getEngine().fillSeqNoGaps(indexShard.getPendingPrimaryTerm()); indexShard.finalizeRecovery(); indexShard.postRecovery("restore done"); listener.onResponse(true); }, e -> listener.onFailure(new IndexShardRestoreFailedException(shardId, "restore failed", e)) ); try { translogState.totalOperations(0); translogState.totalOperationsOnStart(0); indexShard.prepareForIndexRecovery(); ShardId snapshotShardId = shardId; final String indexName = restoreSource.index(); if (!shardId.getIndexName().equals(indexName)) { snapshotShardId = new ShardId(indexName, IndexMetadata.INDEX_UUID_NA_VALUE, shardId.id()); } final IndexId indexId = repository.getRepositoryData().resolveIndexId(indexName); assert indexShard.getEngineOrNull() == null; repository.restoreShard( indexShard.store(), restoreSource.snapshot().getSnapshotId(), indexId, snapshotShardId, indexShard.recoveryState(), restoreListener); } catch (Exception e) { restoreListener.onFailure(e); } } private void bootstrap(final IndexShard indexShard, final Store store) throws IOException { store.bootstrapNewHistory(); final SegmentInfos segmentInfos = store.readLastCommittedSegmentsInfo(); final long localCheckpoint = Long.parseLong(segmentInfos.userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); final String translogUUID = Translog.createEmptyTranslog( indexShard.shardPath().resolveTranslog(), localCheckpoint, shardId, indexShard.getPendingPrimaryTerm()); store.associateIndexWithNewTranslog(translogUUID); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v8/services/customer_service.proto package com.google.ads.googleads.v8.services; /** * <pre> * Response message for CreateCustomerClient mutate. * </pre> * * Protobuf type {@code google.ads.googleads.v8.services.CreateCustomerClientResponse} */ public final class CreateCustomerClientResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v8.services.CreateCustomerClientResponse) CreateCustomerClientResponseOrBuilder { private static final long serialVersionUID = 0L; // Use CreateCustomerClientResponse.newBuilder() to construct. private CreateCustomerClientResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateCustomerClientResponse() { resourceName_ = ""; invitationLink_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CreateCustomerClientResponse(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CreateCustomerClientResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 18: { java.lang.String s = input.readStringRequireUtf8(); resourceName_ = s; break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); invitationLink_ = s; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v8.services.CustomerServiceProto.internal_static_google_ads_googleads_v8_services_CreateCustomerClientResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v8.services.CustomerServiceProto.internal_static_google_ads_googleads_v8_services_CreateCustomerClientResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v8.services.CreateCustomerClientResponse.class, com.google.ads.googleads.v8.services.CreateCustomerClientResponse.Builder.class); } public static final int RESOURCE_NAME_FIELD_NUMBER = 2; private volatile java.lang.Object resourceName_; /** * <pre> * The resource name of the newly created customer client. * </pre> * * <code>string resource_name = 2;</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * The resource name of the newly created customer client. * </pre> * * <code>string resource_name = 2;</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int INVITATION_LINK_FIELD_NUMBER = 3; private volatile java.lang.Object invitationLink_; /** * <pre> * Link for inviting user to access the created customer. Accessible to * allowlisted customers only. * </pre> * * <code>string invitation_link = 3;</code> * @return The invitationLink. */ @java.lang.Override public java.lang.String getInvitationLink() { java.lang.Object ref = invitationLink_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); invitationLink_ = s; return s; } } /** * <pre> * Link for inviting user to access the created customer. Accessible to * allowlisted customers only. * </pre> * * <code>string invitation_link = 3;</code> * @return The bytes for invitationLink. */ @java.lang.Override public com.google.protobuf.ByteString getInvitationLinkBytes() { java.lang.Object ref = invitationLink_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); invitationLink_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, resourceName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(invitationLink_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, invitationLink_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, resourceName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(invitationLink_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, invitationLink_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v8.services.CreateCustomerClientResponse)) { return super.equals(obj); } com.google.ads.googleads.v8.services.CreateCustomerClientResponse other = (com.google.ads.googleads.v8.services.CreateCustomerClientResponse) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (!getInvitationLink() .equals(other.getInvitationLink())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); hash = (37 * hash) + INVITATION_LINK_FIELD_NUMBER; hash = (53 * hash) + getInvitationLink().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v8.services.CreateCustomerClientResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v8.services.CreateCustomerClientResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v8.services.CreateCustomerClientResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v8.services.CreateCustomerClientResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v8.services.CreateCustomerClientResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v8.services.CreateCustomerClientResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v8.services.CreateCustomerClientResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v8.services.CreateCustomerClientResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v8.services.CreateCustomerClientResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v8.services.CreateCustomerClientResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v8.services.CreateCustomerClientResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v8.services.CreateCustomerClientResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v8.services.CreateCustomerClientResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Response message for CreateCustomerClient mutate. * </pre> * * Protobuf type {@code google.ads.googleads.v8.services.CreateCustomerClientResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.services.CreateCustomerClientResponse) com.google.ads.googleads.v8.services.CreateCustomerClientResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v8.services.CustomerServiceProto.internal_static_google_ads_googleads_v8_services_CreateCustomerClientResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v8.services.CustomerServiceProto.internal_static_google_ads_googleads_v8_services_CreateCustomerClientResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v8.services.CreateCustomerClientResponse.class, com.google.ads.googleads.v8.services.CreateCustomerClientResponse.Builder.class); } // Construct using com.google.ads.googleads.v8.services.CreateCustomerClientResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); resourceName_ = ""; invitationLink_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v8.services.CustomerServiceProto.internal_static_google_ads_googleads_v8_services_CreateCustomerClientResponse_descriptor; } @java.lang.Override public com.google.ads.googleads.v8.services.CreateCustomerClientResponse getDefaultInstanceForType() { return com.google.ads.googleads.v8.services.CreateCustomerClientResponse.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v8.services.CreateCustomerClientResponse build() { com.google.ads.googleads.v8.services.CreateCustomerClientResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v8.services.CreateCustomerClientResponse buildPartial() { com.google.ads.googleads.v8.services.CreateCustomerClientResponse result = new com.google.ads.googleads.v8.services.CreateCustomerClientResponse(this); result.resourceName_ = resourceName_; result.invitationLink_ = invitationLink_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v8.services.CreateCustomerClientResponse) { return mergeFrom((com.google.ads.googleads.v8.services.CreateCustomerClientResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v8.services.CreateCustomerClientResponse other) { if (other == com.google.ads.googleads.v8.services.CreateCustomerClientResponse.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; onChanged(); } if (!other.getInvitationLink().isEmpty()) { invitationLink_ = other.invitationLink_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v8.services.CreateCustomerClientResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v8.services.CreateCustomerClientResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object resourceName_ = ""; /** * <pre> * The resource name of the newly created customer client. * </pre> * * <code>string resource_name = 2;</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The resource name of the newly created customer client. * </pre> * * <code>string resource_name = 2;</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The resource name of the newly created customer client. * </pre> * * <code>string resource_name = 2;</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; onChanged(); return this; } /** * <pre> * The resource name of the newly created customer client. * </pre> * * <code>string resource_name = 2;</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); onChanged(); return this; } /** * <pre> * The resource name of the newly created customer client. * </pre> * * <code>string resource_name = 2;</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; onChanged(); return this; } private java.lang.Object invitationLink_ = ""; /** * <pre> * Link for inviting user to access the created customer. Accessible to * allowlisted customers only. * </pre> * * <code>string invitation_link = 3;</code> * @return The invitationLink. */ public java.lang.String getInvitationLink() { java.lang.Object ref = invitationLink_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); invitationLink_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Link for inviting user to access the created customer. Accessible to * allowlisted customers only. * </pre> * * <code>string invitation_link = 3;</code> * @return The bytes for invitationLink. */ public com.google.protobuf.ByteString getInvitationLinkBytes() { java.lang.Object ref = invitationLink_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); invitationLink_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Link for inviting user to access the created customer. Accessible to * allowlisted customers only. * </pre> * * <code>string invitation_link = 3;</code> * @param value The invitationLink to set. * @return This builder for chaining. */ public Builder setInvitationLink( java.lang.String value) { if (value == null) { throw new NullPointerException(); } invitationLink_ = value; onChanged(); return this; } /** * <pre> * Link for inviting user to access the created customer. Accessible to * allowlisted customers only. * </pre> * * <code>string invitation_link = 3;</code> * @return This builder for chaining. */ public Builder clearInvitationLink() { invitationLink_ = getDefaultInstance().getInvitationLink(); onChanged(); return this; } /** * <pre> * Link for inviting user to access the created customer. Accessible to * allowlisted customers only. * </pre> * * <code>string invitation_link = 3;</code> * @param value The bytes for invitationLink to set. * @return This builder for chaining. */ public Builder setInvitationLinkBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); invitationLink_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.services.CreateCustomerClientResponse) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v8.services.CreateCustomerClientResponse) private static final com.google.ads.googleads.v8.services.CreateCustomerClientResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v8.services.CreateCustomerClientResponse(); } public static com.google.ads.googleads.v8.services.CreateCustomerClientResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateCustomerClientResponse> PARSER = new com.google.protobuf.AbstractParser<CreateCustomerClientResponse>() { @java.lang.Override public CreateCustomerClientResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CreateCustomerClientResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CreateCustomerClientResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateCustomerClientResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v8.services.CreateCustomerClientResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package org.apollo.fs.parser; import static org.apollo.game.model.obj.GameObjectGroup.WALL; import static org.apollo.game.model.obj.GameObjectType.DIAGONAL_WALL; import static org.apollo.game.model.obj.GameObjectType.GENERAL_PROP; import static org.apollo.game.model.obj.GameObjectType.GROUND_PROP; import static org.apollo.game.model.region.Tile.FLAG_BLOCKED; import static org.apollo.game.model.region.Tile.FLAG_BRIDGE; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import org.apollo.fs.FileSystem; import org.apollo.game.model.Position; import org.apollo.game.model.World; import org.apollo.game.model.def.GameObjectDefinition; import org.apollo.game.model.def.MapDefinition; import org.apollo.game.model.obj.GameObject; import org.apollo.game.model.obj.GameObjectGroup; import org.apollo.game.model.obj.GameObjectOrientation; import org.apollo.game.model.obj.GameObjectType; import org.apollo.game.model.pf.TraversalMap; import org.apollo.util.ByteBufferUtil; import org.apollo.util.CompressionUtil; /** * A class which parses static object definitions, which include map tiles and * landscapes. * * @author Ryley Kimmel <ryley.kimmel@live.com> */ public final class StaticObjectDefinitionParser { /** * A set of game objects */ private final List<GameObject> gameObjects = new ArrayList<>(); /** * The world. */ private final World world; /** * Constructs a new {@link StaticObjectDefinition} with the specified world. * * @param world The world. */ public StaticObjectDefinitionParser(World world) { this.world = world; } /** * Parses the map definition files from the {@link FileSystem}. * * @param fs The file system. * @return A {@link List} of parsed {@link GameObject}s * @throws IOException If some I/O exception occurs. */ public List<GameObject> parse(FileSystem fs) throws IOException { Map<Integer, MapDefinition> defs = MapDefinitionParser.parse(fs); for (Entry<Integer, MapDefinition> entry : defs.entrySet()) { MapDefinition def = entry.getValue(); int hash = def.getHash(); int x = (hash >> 8 & 0xFF) * 64; int y = (hash & 0xFF) * 64; ByteBuffer gameObjectData = fs.getFile(FileSystem.MAP_INDEX, def.getObjectFile()); ByteBuffer gameObjectBuffer = ByteBuffer.wrap(CompressionUtil.ungzip(gameObjectData.array())); parseGameObject(gameObjectBuffer, x, y); ByteBuffer terrainData = fs.getFile(FileSystem.MAP_INDEX, def.getTerrainFile()); ByteBuffer terrainBuffer = ByteBuffer.wrap(CompressionUtil.ungzip(terrainData.array())); parseTerrain(terrainBuffer, x, y); } return gameObjects; } /** * Parses a {@link GameObject} on the specified coordinates. * * @param gameObjectBuffer The uncompressed game object data buffer. * @param x The x coordinate this object is on. * @param y The y coordinate this object is on. */ private void parseGameObject(ByteBuffer gameObjectBuffer, int x, int y) { for (int deltaId, id = -1; (deltaId = ByteBufferUtil.readSmart(gameObjectBuffer)) != 0;) { id += deltaId; for (int deltaPos, hash = 0; (deltaPos = ByteBufferUtil.readSmart(gameObjectBuffer)) != 0;) { hash += deltaPos - 1; int localX = hash >> 6 & 0x3F; int localY = hash & 0x3F; int height = hash >> 12 & 0x3; int attributeHashCode = gameObjectBuffer.get() & 0xFF; Optional<GameObjectType> type = GameObjectType.valueOf(attributeHashCode >> 2); Optional<GameObjectOrientation> orientation = GameObjectOrientation.valueOf(attributeHashCode & 0x3); Position position = new Position(x + localX, y + localY, height); if (type.isPresent() && orientation.isPresent()) { gameObjectDecoded(id, orientation.get(), type.get(), position); } } } } /** * Loads all of the map indexes entries and decodes each. * * @param mapBuffer The uncompressed map entry data buffer. * @param x The x coordinate of this map entry. * @param y The y coordinate of this map entry. */ private void parseTerrain(ByteBuffer mapBuffer, int x, int y) { for (int height = 0; height < 4; height++) { for (int localX = 0; localX < 64; localX++) { for (int localY = 0; localY < 64; localY++) { Position position = new Position(x + localX, y + localY, height); int flags = 0; for (;;) { int attributeId = mapBuffer.get() & 0xFF; if (attributeId == 0) { terrainDecoded(flags, position); break; } else if (attributeId == 1) { mapBuffer.get(); terrainDecoded(flags, position); break; } else if (attributeId <= 49) { mapBuffer.get(); } else if (attributeId <= 81) { flags = attributeId - 49; } } } } } } /** * Decodes the terrains {@link Position}. * * @param flags The flags for the specified position. * @param position The decoded position. */ private void terrainDecoded(int flags, Position position) { if ((flags & FLAG_BLOCKED) != 0) { world.getTraversalMap().markBlocked(position.getHeight(), position.getX(), position.getY()); } if ((flags & FLAG_BRIDGE) != 0) { world.getTraversalMap().markBridge(position.getHeight(), position.getX(), position.getY()); } } /** * Decodes a {@link GameObject} with the specified attributes on the * specified {@link Position}. * * @param id The id of the game object. * @param orientation The orientation of the game object. * @param type The type of the game object. * @param position The position the game object lies on. */ private void gameObjectDecoded(int id, GameObjectOrientation orientation, GameObjectType type, Position position) { TraversalMap traversalMap = world.getTraversalMap(); GameObjectDefinition def = GameObjectDefinition.forId(id); Optional<GameObjectGroup> optionalGroup = type.getGroup(); if (type == GROUND_PROP) { if (def.hasActions()) { traversalMap.markBlocked(position.getHeight(), position.getX(), position.getY()); } } if (type == GENERAL_PROP) { traversalMap.markBlocked(position.getHeight(), position.getX(), position.getY()); } if (type.getId() >= 12 && type != GROUND_PROP) { traversalMap.markBlocked(position.getHeight(), position.getX(), position.getY()); } if (optionalGroup.isPresent()) { GameObjectGroup group = optionalGroup.get(); if (group == WALL) { traversalMap.markWall(orientation, position.getHeight(), position.getX(), position.getY(), type, def.isImpenetrable()); } } if (type == DIAGONAL_WALL) { traversalMap.markBlocked(position.getHeight(), position.getX(), position.getY()); } gameObjects.add(new GameObject(id, position, world, type, orientation)); } }
package org.arabidopsis.ahocorasick; import junit.framework.TestCase; import java.util.*; /** Junit test cases for AhoCorasick. */ public class TestAhoCorasick extends TestCase { private AhoCorasick tree; public void setUp() { this.tree = new AhoCorasick(); } public void testConstruction() { tree.add("hello".getBytes(), "hello".getBytes()); tree.add("hi".getBytes(), "hi".getBytes()); tree.prepare(); State s0 = tree.getRoot(); State s1 = s0.get((byte) 'h'); State s2 = s1.get((byte) 'e'); State s3 = s2.get((byte) 'l'); State s4 = s3.get((byte) 'l'); State s5 = s4.get((byte) 'o'); State s6 = s1.get((byte) 'i'); assertEquals(s0, s1.getFail()); assertEquals(s0, s2.getFail()); assertEquals(s0, s3.getFail()); assertEquals(s0, s4.getFail()); assertEquals(s0, s5.getFail()); assertEquals(s0, s6.getFail()); assertEquals(0, s0.getOutputs().size()); assertEquals(0, s1.getOutputs().size()); assertEquals(0, s2.getOutputs().size()); assertEquals(0, s3.getOutputs().size()); assertEquals(0, s4.getOutputs().size()); assertEquals(1, s5.getOutputs().size()); assertEquals(1, s6.getOutputs().size()); assertTrue(s6 != null); } public void testExample() { tree.add("he".getBytes(), "he".getBytes()); tree.add("she".getBytes(), "she".getBytes()); tree.add("his".getBytes(), "his".getBytes()); tree.add("hers".getBytes(), "hers".getBytes()); assertEquals(10, tree.getRoot().size()); tree.prepare(); // after prepare, we can't call size() State s0 = tree.getRoot(); State s1 = s0.get((byte) 'h'); State s2 = s1.get((byte) 'e'); State s3 = s0.get((byte) 's'); State s4 = s3.get((byte) 'h'); State s5 = s4.get((byte) 'e'); State s6 = s1.get((byte) 'i'); State s7 = s6.get((byte) 's'); State s8 = s2.get((byte) 'r'); State s9 = s8.get((byte) 's'); assertEquals(s0, s1.getFail()); assertEquals(s0, s2.getFail()); assertEquals(s0, s3.getFail()); assertEquals(s0, s6.getFail()); assertEquals(s0, s8.getFail()); assertEquals(s1, s4.getFail()); assertEquals(s2, s5.getFail()); assertEquals(s3, s7.getFail()); assertEquals(s3, s9.getFail()); assertEquals(0, s1.getOutputs().size()); assertEquals(0, s3.getOutputs().size()); assertEquals(0, s4.getOutputs().size()); assertEquals(0, s6.getOutputs().size()); assertEquals(0, s8.getOutputs().size()); assertEquals(1, s2.getOutputs().size()); assertEquals(1, s7.getOutputs().size()); assertEquals(1, s9.getOutputs().size()); assertEquals(2, s5.getOutputs().size()); } public void testStartSearchWithSingleResult() { tree.add("apple".getBytes(), "apple".getBytes()); tree.prepare(); SearchResult result = tree.startSearch("washington cut the apple tree".getBytes()); assertEquals(1, result.getOutputs().size()); assertEquals("apple", new String((byte[]) result.getOutputs().iterator().next())); assertEquals(24, result.getLastIndex()); assertEquals(null, tree.continueSearch(result)); } public void testStartSearchWithAdjacentResults() { tree.add("john".getBytes(), "john".getBytes()); tree.add("jane".getBytes(), "jane".getBytes()); tree.prepare(); SearchResult firstResult = tree.startSearch("johnjane".getBytes()); SearchResult secondResult = tree.continueSearch(firstResult); assertEquals(null, tree.continueSearch(secondResult)); } public void testStartSearchOnEmpty() { tree.add("cipher".getBytes(), new Integer(0)); tree.add("zip".getBytes(), new Integer(1)); tree.add("nought".getBytes(), new Integer(2)); tree.prepare(); SearchResult result = tree.startSearch("".getBytes()); assertEquals(null, result); } public void testMultipleOutputs() { tree.add("x".getBytes(), "x"); tree.add("xx".getBytes(), "xx"); tree.add("xxx".getBytes(), "xxx"); tree.prepare(); SearchResult result = tree.startSearch("xxx".getBytes()); assertEquals(1, result.getLastIndex()); assertEquals(new HashSet(Arrays.asList(new String[] {"x"})), result.getOutputs()); result = tree.continueSearch(result); assertEquals(2, result.getLastIndex()); assertEquals(new HashSet(Arrays.asList(new String[] {"xx", "x"})), result.getOutputs()); result = tree.continueSearch(result); assertEquals(3, result.getLastIndex()); assertEquals(new HashSet(Arrays.asList(new String[] {"xxx", "xx", "x"})), result.getOutputs()); assertEquals(null, tree.continueSearch(result)); } public void testIteratorInterface() { tree.add("moo".getBytes(), "moo"); tree.add("one".getBytes(), "one"); tree.add("on".getBytes(), "on"); tree.add("ne".getBytes(), "ne"); tree.prepare(); Iterator iter = tree.search("one moon ago".getBytes()); assertTrue(iter.hasNext()); SearchResult r = (SearchResult) iter.next(); assertEquals(new HashSet(Arrays.asList(new String[] {"on"})), r.getOutputs()); assertEquals(2, r.getLastIndex()); assertTrue(iter.hasNext()); r = (SearchResult) iter.next(); assertEquals(new HashSet(Arrays.asList(new String[] {"one", "ne"})), r.getOutputs()); assertEquals(3, r.getLastIndex()); assertTrue(iter.hasNext()); r = (SearchResult) iter.next(); assertEquals(new HashSet(Arrays.asList(new String[] {"moo"})), r.getOutputs()); assertEquals(7, r.getLastIndex()); assertTrue(iter.hasNext()); r = (SearchResult) iter.next(); assertEquals(new HashSet(Arrays.asList(new String[] {"on"})), r.getOutputs()); assertEquals(8, r.getLastIndex()); assertFalse(iter.hasNext()); try { iter.next(); fail(); } catch (NoSuchElementException e) { } } public void largerTextExample() { String text = "The ga3 mutant of Arabidopsis is a gibberellin-responsive dwarf. We present data showing that the ga3-1 mutant is deficient in ent-kaurene oxidase activity, the first cytochrome P450-mediated step in the gibberellin biosynthetic pathway. By using a combination of conventional map-based cloning and random sequencing we identified a putative cytochrome P450 gene mapping to the same location as GA3. Relative to the progenitor line, two ga3 mutant alleles contained single base changes generating in-frame stop codons in the predicted amino acid sequence of the P450. A genomic clone spanning the P450 locus complemented the ga3-2 mutant. The deduced GA3 protein defines an additional class of cytochrome P450 enzymes. The GA3 gene was expressed in all tissues examined, RNA abundance being highest in inflorescence tissue."; String[] terms = { "microsome", "cytochrome", "cytochrome P450 activity", "gibberellic acid biosynthesis", "GA3", "cytochrome P450", "oxygen binding", "AT5G25900.1", "protein", "RNA", "gibberellin", "Arabidopsis", "ent-kaurene oxidase activity", "inflorescence", "tissue", }; for (int i = 0; i < terms.length; i++) { tree.add(terms[i].getBytes(), terms[i]); } tree.prepare(); Set termsThatHit = new HashSet(); for (Iterator iter = tree.search(text.getBytes()); iter.hasNext(); ) { SearchResult result = (SearchResult) iter.next(); termsThatHit.addAll(result.getOutputs()); } assertEquals (new HashSet(Arrays.asList(new String[] { "cytochrome", "GA3", "cytochrome P450", "protein", "RNA", "gibberellin", "Arabidopsis", "ent-kaurene oxidase activity", "inflorescence", "tissue", })), termsThatHit); } }
package net.etalia.cron; import java.io.File; import java.io.FileInputStream; import java.util.Map; import java.util.Properties; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.logging.LogManager; import java.util.logging.Logger; import net.etalia.client.codec.Digester; import net.etalia.client.domain.User; import net.etalia.client.http.Call; import net.etalia.client.http.Caller; import net.etalia.client.http.JsonedException; import net.etalia.client.http.httpclient.HttpClientCaller; import net.etalia.client.http.httpclient.HttpClientHelper; import net.etalia.client.services.ContentApi; import org.apache.http.client.HttpClient; public class ScheduledImport { private final static Logger log = Logger.getLogger(ScheduledImport.class.getName()); private static ScheduledExecutorService scheduler; public static final String PROP_FILES_DIR = "directory"; public static final String PROP_FILE_FORMAT = "format"; public static final String PROP_JOB_DELAY = "job.delay"; public static final String PROP_PARALLEL_IMPORTS = "parallel-imports"; public static final String PROP_DIRECTORIES = "directories"; public static final String PROP_PASSWORD = "password"; public static final String PROP_PUBLICATION = "publication"; public static final String PROP_STAMP = "stamp"; public static final String PROP_TIMEZONE = "date.timezone"; public static final String PROP_USER = "email"; protected static final String CONTENT_API_URL = "https://api.etalia.net/content-api/"; protected static final String IMAGE_API_URL = "http://image.etalia.net:81/"; protected static final String CLOUDFRONT_URL = "http://d2feaz2wbhr9zq.cloudfront.net/"; private Properties properties; private String propertyFile; private User user; private HttpClientCaller<ContentApi> cApi; public String getProperty(String name) { return getProperty(name, null); } public String getProperty(String name, String defaultValue) { if (properties == null) { properties = new Properties(); try { properties.load(new FileInputStream(propertyFile)); } catch (Exception e) { e.printStackTrace(); } } if (properties.containsKey(name)) { return properties.getProperty(name); } else { return defaultValue; } } public User getUser() { return user; } public String getAuthorization() { return user.getExtraData("auth"); } public Caller<ContentApi> getCAPI() { return cApi; } public static void main(String[] args) { try { LogManager.getLogManager().readConfiguration(); } catch (Exception e) { System.err.println("Cannot instantiate LogManager: " + e.getMessage()); } String propertyFile; if (args.length != 0) { propertyFile = args[0]; } else { propertyFile = System.getProperty("user.dir") + File.separator + ".etalia-import.properties"; } ScheduledImport importer = new ScheduledImport(propertyFile); importer.init(); importer.run(); } public ScheduledImport(String propertyFile) { try { LogManager.getLogManager().readConfiguration(); } catch (Exception e) { System.err.println("Cannot instantiate LogManager: " + e.getMessage()); } log.info("Using property file: " + propertyFile); this.propertyFile = propertyFile; } public void init() { cApi = new HttpClientCaller<ContentApi>(ContentApi.class); cApi.setBaseUrl(CONTENT_API_URL); HttpClient httpClient = new HttpClientHelper().createDefaultClient(10, 30000); /*try { KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); trustStore.load(ScheduledImport.class.getResourceAsStream("/keystore.jks.dev"), "etalia".toCharArray()); SSLContext sslcontext = SSLContexts.custom().loadTrustMaterial(trustStore, new TrustSelfSignedStrategy()).build(); SSLConnectionSocketFactory sslsf = new SSLConnectionSocketFactory(sslcontext, new String[] {"TLSv1"}, null, SSLConnectionSocketFactory.BROWSER_COMPATIBLE_HOSTNAME_VERIFIER); httpClient = new HttpClientHelper().createDefaultClient(10, 30000, sslsf); } catch (Exception e) { log.log(Level.SEVERE, "Cannot use SSL Certificate", e); }*/ cApi.setHttpClient(httpClient); this.authentication(); } public void run() { if (getProperty(PROP_DIRECTORIES) != null) { int directories = Integer.parseInt(getProperty(PROP_DIRECTORIES)); int imports = Integer.parseInt(getProperty(PROP_PARALLEL_IMPORTS)); log.info("Scheduling " + imports + " parallel jobs"); scheduler = Executors.newScheduledThreadPool(imports); log.info("Scanning " + directories + " directories"); for (int i=1; i<=directories; i++) { String dir = getProperty(PROP_FILES_DIR + "." + i); String publication = getProperty(PROP_PUBLICATION + "." + i); String stamp = getProperty(ScheduledImport.PROP_STAMP + "." + i); ScheduledJob job = new ScheduledJob(this, dir, publication, stamp); log.info("Scheduling new job for directory: " + dir + " - publication: " + publication + " - stamp: " + stamp); scheduler.scheduleWithFixedDelay(job, 0, Long.parseLong(getProperty(PROP_JOB_DELAY, "60")), TimeUnit.SECONDS); } } else { scheduler = Executors.newScheduledThreadPool(1); String dir = getProperty(PROP_FILES_DIR) != null ? getProperty(PROP_FILES_DIR) : System.getProperty("user.dir"); String publication = getProperty(PROP_PUBLICATION); String stamp = getProperty(ScheduledImport.PROP_STAMP); ScheduledJob job = new ScheduledJob(this, dir, publication, stamp); log.info("Scheduling new job for directory: " + dir + " - publication: " + publication + " - stamp: " + stamp); scheduler.scheduleWithFixedDelay(job, 0, Long.parseLong(getProperty(PROP_JOB_DELAY, "60")), TimeUnit.SECONDS); } } public <X> X invokeCAPI(X object, String fields) throws Exception { return invokeCAPI(object, fields, null); } public <X> X invokeCAPI(X object, Map<String, String> pathVariables) throws Exception { return invokeCAPI(object, null, pathVariables); } public <X> X invokeCAPI(X object, String fields, Map<String, String> pathVariables) throws Exception { Call<X> request = getCAPI().method(object); request.setHeader("Authorization", getAuthorization()); if (fields != null) { request.withFields(fields.split(",")); } if (pathVariables != null) { for (Map.Entry<String, String> entry : pathVariables.entrySet()) { request.setPathVariable(entry.getKey(), entry.getValue()); } } X response = null; try { response = request.execute().cast(); } catch (JsonedException e) { if (e.getStatusCode() == 401) { this.authentication(); try { response = request.execute().cast(); } catch (JsonedException ie) { log.severe("Cannot refresh authorization token!"); throw new Exception("Cannot refresh authorization token!"); } } else { log.severe("Cannot invoke server!"); throw new Exception("Cannot invoke server!"); } } return response; } private void authentication() { log.info("Generate Authentication token..."); String auth = "Etalia_" + getProperty(PROP_USER) + ":" + new Digester().md5(getProperty(PROP_PASSWORD)).toBase64UrlSafeNoPad(); user = cApi.method(cApi.service().authUser(auth)).withFields("id", "extraData") .setHeader("Authorization", auth).execute().cast(); log.info("Authentication done!"); } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.devtools.build.lib.packages.RuleClass.Builder.RuleClassType.ABSTRACT; import static com.google.devtools.build.lib.packages.RuleClass.Builder.RuleClassType.TEST; import com.google.common.base.Preconditions; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.analysis.buildinfo.BuildInfoFactory; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.analysis.config.ConfigurationFragmentFactory; import com.google.devtools.build.lib.analysis.config.DefaultsPackage; import com.google.devtools.build.lib.analysis.config.FragmentOptions; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.LabelSyntaxException; import com.google.devtools.build.lib.cmdline.PackageIdentifier; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.graph.Digraph; import com.google.devtools.build.lib.graph.Node; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.NativeAspectClass; import com.google.devtools.build.lib.packages.NonconfigurableAttributeMapper; import com.google.devtools.build.lib.packages.OutputFile; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.packages.RuleClass; import com.google.devtools.build.lib.packages.RuleClassProvider; import com.google.devtools.build.lib.packages.RuleErrorConsumer; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.rules.RuleConfiguredTargetFactory; import com.google.devtools.build.lib.rules.SkylarkModules; import com.google.devtools.build.lib.runtime.proto.InvocationPolicyOuterClass.InvocationPolicy; import com.google.devtools.build.lib.syntax.Environment; import com.google.devtools.build.lib.syntax.Environment.Extension; import com.google.devtools.build.lib.syntax.Environment.Phase; import com.google.devtools.build.lib.syntax.Mutability; import com.google.devtools.build.lib.syntax.Type; import com.google.devtools.common.options.OptionsClassProvider; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import javax.annotation.Nullable; /** * Knows about every rule Blaze supports and the associated configuration options. * * <p>This class is initialized on server startup and the set of rules, build info factories * and configuration options is guarantees not to change over the life time of the Blaze server. */ public class ConfiguredRuleClassProvider implements RuleClassProvider { /** * Custom dependency validation logic. */ public interface PrerequisiteValidator { /** * Checks whether the rule in {@code contextBuilder} is allowed to depend on * {@code prerequisite} through the attribute {@code attribute}. * * <p>Can be used for enforcing any organization-specific policies about the layout of the * workspace. */ void validate( RuleContext.Builder contextBuilder, ConfiguredTarget prerequisite, Attribute attribute); } /** Validator to check for and warn on the deprecation of dependencies. */ public static final class DeprecationValidator implements PrerequisiteValidator { /** Checks if the given prerequisite is deprecated and prints a warning if so. */ @Override public void validate( RuleContext.Builder contextBuilder, ConfiguredTarget prerequisite, Attribute attribute) { validateDirectPrerequisiteForDeprecation( contextBuilder, contextBuilder.getRule(), prerequisite, contextBuilder.forAspect()); } /** * Returns whether two packages are considered the same for purposes of deprecation warnings. * Dependencies within the same package do not print deprecation warnings; a package in the * javatests directory may also depend on its corresponding java package without a warning. */ public static boolean isSameLogicalPackage( PackageIdentifier thisPackage, PackageIdentifier prerequisitePackage) { if (thisPackage.equals(prerequisitePackage)) { // If the packages are equal, they are the same logical package (and just the same package). return true; } if (!thisPackage.getRepository().equals(prerequisitePackage.getRepository())) { // If the packages are in different repositories, they are not the same logical package. return false; } // If the packages are in the same repository, it's allowed iff this package is the javatests // companion to the prerequisite java package. String thisPackagePath = thisPackage.getPackageFragment().getPathString(); String prerequisitePackagePath = prerequisitePackage.getPackageFragment().getPathString(); return thisPackagePath.startsWith("javatests/") && prerequisitePackagePath.startsWith("java/") && thisPackagePath.substring("javatests/".length()).equals( prerequisitePackagePath.substring("java/".length())); } /** Returns whether a deprecation warning should be printed for the prerequisite described. */ private static boolean shouldEmitDeprecationWarningFor( String thisDeprecation, PackageIdentifier thisPackage, String prerequisiteDeprecation, PackageIdentifier prerequisitePackage, boolean forAspect) { // Don't report deprecation edges from javatests to java or within a package; // otherwise tests of deprecated code generate nuisance warnings. // Don't report deprecation if the current target is also deprecated, // or if the current context is evaluating an aspect, // as the base target would have already printed the deprecation warnings. return (!forAspect && prerequisiteDeprecation != null && !isSameLogicalPackage(thisPackage, prerequisitePackage) && thisDeprecation == null); } /** Checks if the given prerequisite is deprecated and prints a warning if so. */ public static void validateDirectPrerequisiteForDeprecation( RuleErrorConsumer errors, Rule rule, ConfiguredTarget prerequisite, boolean forAspect) { Target prerequisiteTarget = prerequisite.getTarget(); Label prerequisiteLabel = prerequisiteTarget.getLabel(); PackageIdentifier thatPackage = prerequisiteLabel.getPackageIdentifier(); PackageIdentifier thisPackage = rule.getLabel().getPackageIdentifier(); if (prerequisiteTarget instanceof Rule) { Rule prerequisiteRule = (Rule) prerequisiteTarget; String thisDeprecation = NonconfigurableAttributeMapper.of(rule).get("deprecation", Type.STRING); String thatDeprecation = NonconfigurableAttributeMapper.of(prerequisiteRule).get("deprecation", Type.STRING); if (shouldEmitDeprecationWarningFor( thisDeprecation, thisPackage, thatDeprecation, thatPackage, forAspect)) { errors.ruleWarning("target '" + rule.getLabel() + "' depends on deprecated target '" + prerequisiteLabel + "': " + thatDeprecation); } } if (prerequisiteTarget instanceof OutputFile) { Rule generatingRule = ((OutputFile) prerequisiteTarget).getGeneratingRule(); String thisDeprecation = NonconfigurableAttributeMapper.of(rule).get("deprecation", Type.STRING); String thatDeprecation = NonconfigurableAttributeMapper.of(generatingRule).get("deprecation", Type.STRING); if (shouldEmitDeprecationWarningFor( thisDeprecation, thisPackage, thatDeprecation, thatPackage, forAspect)) { errors.ruleWarning("target '" + rule.getLabel() + "' depends on the output file " + prerequisiteLabel + " of a deprecated rule " + generatingRule.getLabel() + "': " + thatDeprecation); } } } } /** * Builder for {@link ConfiguredRuleClassProvider}. */ public static class Builder implements RuleDefinitionEnvironment { private final StringBuilder defaultWorkspaceFilePrefix = new StringBuilder(); private final StringBuilder defaultWorkspaceFileSuffix = new StringBuilder(); private Label preludeLabel; private String runfilesPrefix; private String toolsRepository; private final List<ConfigurationFragmentFactory> configurationFragmentFactories = new ArrayList<>(); private final List<BuildInfoFactory> buildInfoFactories = new ArrayList<>(); private final List<Class<? extends FragmentOptions>> configurationOptions = new ArrayList<>(); private final Map<String, RuleClass> ruleClassMap = new HashMap<>(); private final Map<String, RuleDefinition> ruleDefinitionMap = new HashMap<>(); private final Map<String, NativeAspectClass> nativeAspectClassMap = new HashMap<>(); private final Map<Class<? extends RuleDefinition>, RuleClass> ruleMap = new HashMap<>(); private final Digraph<Class<? extends RuleDefinition>> dependencyGraph = new Digraph<>(); private ConfigurationCollectionFactory configurationCollectionFactory; private Class<? extends BuildConfiguration.Fragment> universalFragment; private PrerequisiteValidator prerequisiteValidator; private ImmutableMap.Builder<String, Object> skylarkAccessibleTopLevels = ImmutableMap.builder(); private ImmutableList.Builder<Class<?>> skylarkModules = ImmutableList.<Class<?>>builder().addAll(SkylarkModules.MODULES); private ImmutableBiMap.Builder<String, Class<? extends TransitiveInfoProvider>> registeredSkylarkProviders = ImmutableBiMap.builder(); private Map<String, String> platformRegexps = new TreeMap<>(); public void addWorkspaceFilePrefix(String contents) { defaultWorkspaceFilePrefix.append(contents); } public void addWorkspaceFileSuffix(String contents) { defaultWorkspaceFileSuffix.append(contents); } public Builder setPrelude(String preludeLabelString) { try { this.preludeLabel = Label.parseAbsolute(preludeLabelString); } catch (LabelSyntaxException e) { String errorMsg = String.format("Prelude label '%s' is invalid: %s", preludeLabelString, e.getMessage()); throw new IllegalArgumentException(errorMsg); } return this; } public Builder setRunfilesPrefix(String runfilesPrefix) { this.runfilesPrefix = runfilesPrefix; return this; } public Builder setToolsRepository(String toolsRepository) { this.toolsRepository = toolsRepository; return this; } public Builder setPrerequisiteValidator(PrerequisiteValidator prerequisiteValidator) { this.prerequisiteValidator = prerequisiteValidator; return this; } public Builder addBuildInfoFactory(BuildInfoFactory factory) { buildInfoFactories.add(factory); return this; } public Builder addRuleDefinition(RuleDefinition ruleDefinition) { Class<? extends RuleDefinition> ruleDefinitionClass = ruleDefinition.getClass(); ruleDefinitionMap.put(ruleDefinitionClass.getName(), ruleDefinition); dependencyGraph.createNode(ruleDefinitionClass); for (Class<? extends RuleDefinition> ancestor : ruleDefinition.getMetadata().ancestors()) { dependencyGraph.addEdge(ancestor, ruleDefinitionClass); } return this; } public Builder addNativeAspectClass(NativeAspectClass aspectFactoryClass) { nativeAspectClassMap.put(aspectFactoryClass.getName(), aspectFactoryClass); return this; } public Builder addConfigurationOptions(Class<? extends FragmentOptions> configurationOptions) { this.configurationOptions.add(configurationOptions); return this; } public Builder addConfigurationOptions( Collection<Class<? extends FragmentOptions>> optionsClasses) { this.configurationOptions.addAll(optionsClasses); return this; } public Builder addConfigurationFragment(ConfigurationFragmentFactory factory) { configurationFragmentFactories.add(factory); return this; } public Builder setConfigurationCollectionFactory(ConfigurationCollectionFactory factory) { this.configurationCollectionFactory = factory; return this; } public Builder setUniversalConfigurationFragment( Class<? extends BuildConfiguration.Fragment> fragment) { this.universalFragment = fragment; return this; } public Builder addSkylarkAccessibleTopLevels(String name, Object object) { this.skylarkAccessibleTopLevels.put(name, object); return this; } public Builder addSkylarkModule(Class<?>... modules) { this.skylarkModules.add(modules); return this; } /** * Adds a mapping that determines which keys in structs returned by skylark rules should be * interpreted as native TransitiveInfoProvider instances of type (map value). */ public Builder registerSkylarkProvider( String name, Class<? extends TransitiveInfoProvider> provider) { this.registeredSkylarkProviders.put(name, provider); return this; } /** * Do not use - this only exists for backwards compatibility! Platform regexps are part of a * legacy mechanism - {@code vardef} - that is not exposed in Bazel. * * <p>{@code vardef} needs explicit support in the rule implementations, and cannot express * conditional dependencies, only conditional attribute values. This mechanism will be * supplanted by configuration dependent attributes, and its effect can usually also be achieved * with select(). * * <p>This is a map of platform names to regexps. When a name is used as the third argument to * {@code vardef}, the corresponding regexp is used to match on the C++ abi, and the variable is * only set to that value if the regexp matches. For example, the entry * {@code "oldlinux": "i[34]86-libc[345]-linux"} might define a set of platforms representing * certain older linux releases. */ public Builder addPlatformRegexps(Map<String, String> platformRegexps) { this.platformRegexps.putAll(Preconditions.checkNotNull(platformRegexps)); return this; } private RuleConfiguredTargetFactory createFactory( Class<? extends RuleConfiguredTargetFactory> factoryClass) { try { Constructor<? extends RuleConfiguredTargetFactory> ctor = factoryClass.getConstructor(); return ctor.newInstance(); } catch (NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException e) { throw new IllegalStateException(e); } } private RuleClass commitRuleDefinition(Class<? extends RuleDefinition> definitionClass) { RuleDefinition instance = checkNotNull(ruleDefinitionMap.get(definitionClass.getName()), "addRuleDefinition(new %s()) should be called before build()", definitionClass.getName()); RuleDefinition.Metadata metadata = instance.getMetadata(); checkArgument(ruleClassMap.get(metadata.name()) == null, metadata.name()); List<Class<? extends RuleDefinition>> ancestors = metadata.ancestors(); checkArgument( metadata.type() == ABSTRACT ^ metadata.factoryClass() != RuleConfiguredTargetFactory.class); checkArgument( (metadata.type() != TEST) || ancestors.contains(BaseRuleClasses.TestBaseRule.class)); RuleClass[] ancestorClasses = new RuleClass[ancestors.size()]; for (int i = 0; i < ancestorClasses.length; i++) { ancestorClasses[i] = ruleMap.get(ancestors.get(i)); if (ancestorClasses[i] == null) { // Ancestors should have been initialized by now throw new IllegalStateException("Ancestor " + ancestors.get(i) + " of " + metadata.name() + " is not initialized"); } } RuleConfiguredTargetFactory factory = null; if (metadata.type() != ABSTRACT) { factory = createFactory(metadata.factoryClass()); } RuleClass.Builder builder = new RuleClass.Builder( metadata.name(), metadata.type(), false, ancestorClasses); builder.factory(factory); RuleClass ruleClass = instance.build(builder, this); ruleMap.put(definitionClass, ruleClass); ruleClassMap.put(ruleClass.getName(), ruleClass); ruleDefinitionMap.put(ruleClass.getName(), instance); return ruleClass; } public ConfiguredRuleClassProvider build() { for (Node<Class<? extends RuleDefinition>> ruleDefinition : dependencyGraph.getTopologicalOrder()) { commitRuleDefinition(ruleDefinition.getLabel()); } return new ConfiguredRuleClassProvider( preludeLabel, runfilesPrefix, toolsRepository, ImmutableMap.copyOf(ruleClassMap), ImmutableMap.copyOf(ruleDefinitionMap), ImmutableMap.copyOf(nativeAspectClassMap), defaultWorkspaceFilePrefix.toString(), defaultWorkspaceFileSuffix.toString(), ImmutableList.copyOf(buildInfoFactories), ImmutableList.copyOf(configurationOptions), ImmutableList.copyOf(configurationFragmentFactories), configurationCollectionFactory, universalFragment, prerequisiteValidator, skylarkAccessibleTopLevels.build(), skylarkModules.build(), registeredSkylarkProviders.build()); } @Override public Label getLabel(String labelValue) { return LABELS.getUnchecked(labelValue); } @Override public Label getToolsLabel(String labelValue) { return getLabel(toolsRepository + labelValue); } @Override public String getToolsRepository() { return toolsRepository; } @Nullable public Map<String, String> getPlatformRegexps() { return platformRegexps.isEmpty() ? null : ImmutableMap.copyOf(platformRegexps); } } /** * Used to make the label instances unique, so that we don't create a new * instance for every rule. */ private static final LoadingCache<String, Label> LABELS = CacheBuilder.newBuilder().build( new CacheLoader<String, Label>() { @Override public Label load(String from) { try { return Label.parseAbsolute(from); } catch (LabelSyntaxException e) { throw new IllegalArgumentException(from, e); } } }); /** * Default content that should be added at the beginning of the WORKSPACE file. */ private final String defaultWorkspaceFilePrefix; /** * Default content that should be added at the end of the WORKSPACE file. */ private final String defaultWorkspaceFileSuffix; /** * Label for the prelude file. */ private final Label preludeLabel; /** * The default runfiles prefix. */ private final String runfilesPrefix; /** * The path to the tools repository. */ private final String toolsRepository; /** * Maps rule class name to the metaclass instance for that rule. */ private final ImmutableMap<String, RuleClass> ruleClassMap; /** * Maps rule class name to the rule definition objects. */ private final ImmutableMap<String, RuleDefinition> ruleDefinitionMap; /** * Maps aspect name to the aspect factory meta class. */ private final ImmutableMap<String, NativeAspectClass> nativeAspectClassMap; /** * The configuration options that affect the behavior of the rules. */ private final ImmutableList<Class<? extends FragmentOptions>> configurationOptions; /** The set of configuration fragment factories. */ private final ImmutableList<ConfigurationFragmentFactory> configurationFragmentFactories; /** * The factory that creates the configuration collection. */ private final ConfigurationCollectionFactory configurationCollectionFactory; /** * A configuration fragment that should be available to all rules even when they don't * explicitly require it. */ private final Class<? extends BuildConfiguration.Fragment> universalFragment; private final ImmutableList<BuildInfoFactory> buildInfoFactories; private final PrerequisiteValidator prerequisiteValidator; private final Environment.Frame globals; private final ImmutableBiMap<String, Class<? extends TransitiveInfoProvider>> registeredSkylarkProviders; private ConfiguredRuleClassProvider( Label preludeLabel, String runfilesPrefix, String toolsRepository, ImmutableMap<String, RuleClass> ruleClassMap, ImmutableMap<String, RuleDefinition> ruleDefinitionMap, ImmutableMap<String, NativeAspectClass> nativeAspectClassMap, String defaultWorkspaceFilePrefix, String defaultWorkspaceFileSuffix, ImmutableList<BuildInfoFactory> buildInfoFactories, ImmutableList<Class<? extends FragmentOptions>> configurationOptions, ImmutableList<ConfigurationFragmentFactory> configurationFragments, ConfigurationCollectionFactory configurationCollectionFactory, Class<? extends BuildConfiguration.Fragment> universalFragment, PrerequisiteValidator prerequisiteValidator, ImmutableMap<String, Object> skylarkAccessibleJavaClasses, ImmutableList<Class<?>> skylarkModules, ImmutableBiMap<String, Class<? extends TransitiveInfoProvider>> registeredSkylarkProviders) { this.preludeLabel = preludeLabel; this.runfilesPrefix = runfilesPrefix; this.toolsRepository = toolsRepository; this.ruleClassMap = ruleClassMap; this.ruleDefinitionMap = ruleDefinitionMap; this.nativeAspectClassMap = nativeAspectClassMap; this.defaultWorkspaceFilePrefix = defaultWorkspaceFilePrefix; this.defaultWorkspaceFileSuffix = defaultWorkspaceFileSuffix; this.buildInfoFactories = buildInfoFactories; this.configurationOptions = configurationOptions; this.configurationFragmentFactories = configurationFragments; this.configurationCollectionFactory = configurationCollectionFactory; this.universalFragment = universalFragment; this.prerequisiteValidator = prerequisiteValidator; this.globals = createGlobals(skylarkAccessibleJavaClasses, skylarkModules); this.registeredSkylarkProviders = registeredSkylarkProviders; } public PrerequisiteValidator getPrerequisiteValidator() { return prerequisiteValidator; } @Override public Label getPreludeLabel() { return preludeLabel; } @Override public String getRunfilesPrefix() { return runfilesPrefix; } @Override public String getToolsRepository() { return toolsRepository; } @Override public Map<String, RuleClass> getRuleClassMap() { return ruleClassMap; } @Override public Map<String, NativeAspectClass> getNativeAspectClassMap() { return nativeAspectClassMap; } @Override public NativeAspectClass getNativeAspectClass(String key) { return nativeAspectClassMap.get(key); } /** * Returns a list of build info factories that are needed for the supported languages. */ public ImmutableList<BuildInfoFactory> getBuildInfoFactories() { return buildInfoFactories; } /** * Returns the set of configuration fragments provided by this module. */ public ImmutableList<ConfigurationFragmentFactory> getConfigurationFragments() { return configurationFragmentFactories; } /** * Returns the set of configuration options that are supported in this module. */ public ImmutableList<Class<? extends FragmentOptions>> getConfigurationOptions() { return configurationOptions; } /** * Returns the definition of the rule class definition with the specified name. */ public RuleDefinition getRuleClassDefinition(String ruleClassName) { return ruleDefinitionMap.get(ruleClassName); } /** * Returns the configuration collection creator. */ public ConfigurationCollectionFactory getConfigurationCollectionFactory() { return configurationCollectionFactory; } /** * Returns the configuration fragment that should be available to all rules even when they * don't explicitly require it. */ public Class<? extends BuildConfiguration.Fragment> getUniversalFragment() { return universalFragment; } /** * Returns the defaults package for the default settings. */ public String getDefaultsPackageContent(InvocationPolicy invocationPolicy) { return DefaultsPackage.getDefaultsPackageContent(configurationOptions, invocationPolicy); } /** * Returns the defaults package for the given options taken from an optionsProvider. */ public String getDefaultsPackageContent(OptionsClassProvider optionsProvider) { return DefaultsPackage.getDefaultsPackageContent( BuildOptions.of(configurationOptions, optionsProvider)); } /** * Returns a map that indicates which keys in structs returned by skylark rules should be * interpreted as native TransitiveInfoProvider instances of type (map value). * * <p>That is, if this map contains "dummy" -> DummyProvider.class, a "dummy" entry in a skylark * rule implementations returned struct will be exported from that ConfiguredTarget as a * DummyProvider. */ public ImmutableBiMap<String, Class<? extends TransitiveInfoProvider>> getRegisteredSkylarkProviders() { return this.registeredSkylarkProviders; } /** * Creates a BuildOptions class for the given options taken from an optionsProvider. */ public BuildOptions createBuildOptions(OptionsClassProvider optionsProvider) { return BuildOptions.of(configurationOptions, optionsProvider); } private Environment.Frame createGlobals( ImmutableMap<String, Object> skylarkAccessibleToplLevels, ImmutableList<Class<?>> modules) { try (Mutability mutability = Mutability.create("ConfiguredRuleClassProvider globals")) { Environment env = createSkylarkRuleClassEnvironment( mutability, SkylarkModules.getGlobals(modules), null, null, null); for (Map.Entry<String, Object> entry : skylarkAccessibleToplLevels.entrySet()) { env.setup(entry.getKey(), entry.getValue()); } return env.getGlobals(); } } private Environment createSkylarkRuleClassEnvironment( Mutability mutability, Environment.Frame globals, EventHandler eventHandler, String astFileContentHashCode, Map<String, Extension> importMap) { return Environment.builder(mutability) .setSkylark() .setGlobals(globals) .setEventHandler(eventHandler) .setFileContentHashCode(astFileContentHashCode) .setImportedExtensions(importMap) .setToolsRepository(toolsRepository) .setPhase(Phase.LOADING) .build(); } @Override public Environment createSkylarkRuleClassEnvironment( Label extensionLabel, Mutability mutability, EventHandler eventHandler, String astFileContentHashCode, Map<String, Extension> importMap) { return createSkylarkRuleClassEnvironment( mutability, globals.setLabel(extensionLabel), eventHandler, astFileContentHashCode, importMap); } @Override public String getDefaultWorkspacePrefix() { return defaultWorkspaceFilePrefix; } @Override public String getDefaultWorkspaceSuffix() { return defaultWorkspaceFileSuffix; } /** * Returns all registered {@link BuildConfiguration.Fragment} classes. */ public Set<Class<? extends BuildConfiguration.Fragment>> getAllFragments() { ImmutableSet.Builder<Class<? extends BuildConfiguration.Fragment>> fragmentsBuilder = ImmutableSet.builder(); for (ConfigurationFragmentFactory factory : getConfigurationFragments()) { fragmentsBuilder.add(factory.creates()); } fragmentsBuilder.add(getUniversalFragment()); return fragmentsBuilder.build(); } }
/* * Copyright (c) 2009-2015, United States Government, as represented by the Secretary of Health and Human Services. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above * copyright notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the United States Government nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package gov.hhs.fha.nhinc.callback.cxf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import gov.hhs.fha.nhinc.nhinclib.NhincConstants; import gov.hhs.fha.nhinc.properties.PropertyAccessException; import gov.hhs.fha.nhinc.properties.PropertyAccessor; import java.io.InputStream; import java.security.PublicKey; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import javax.xml.namespace.QName; import org.apache.ws.security.WSSecurityException; import org.apache.ws.security.components.crypto.Crypto; import org.apache.ws.security.handler.RequestData; import org.apache.ws.security.saml.SAMLKeyInfo; import org.apache.ws.security.saml.ext.AssertionWrapper; import org.apache.ws.security.validate.Credential; import org.joda.time.DateTime; import org.junit.Test; import org.opensaml.Configuration; import org.opensaml.DefaultBootstrap; import org.opensaml.common.SAMLVersion; import org.opensaml.saml1.core.Statement; import org.opensaml.saml2.core.Action; import org.opensaml.saml2.core.AuthzDecisionStatement; import org.opensaml.saml2.core.DecisionTypeEnumeration; import org.opensaml.saml2.core.Evidence; import org.opensaml.saml2.core.NameID; import org.opensaml.saml2.core.Subject; import org.opensaml.saml2.metadata.EntitiesDescriptor; import org.opensaml.xml.ConfigurationException; import org.opensaml.xml.io.Unmarshaller; import org.opensaml.xml.io.UnmarshallerFactory; import org.opensaml.xml.io.UnmarshallingException; import org.opensaml.xml.parse.BasicParserPool; import org.opensaml.xml.parse.XMLParserException; import org.opensaml.xml.validation.Validator; import org.opensaml.xml.validation.ValidatorSuite; import org.w3c.dom.Document; import org.w3c.dom.Element; public class CONNECTSamlAssertionValidatorTest { @Test public void testValidateAssertionSaml1() throws WSSecurityException { org.opensaml.saml1.core.Assertion saml1Assertion = mock(org.opensaml.saml1.core.Assertion.class); AssertionWrapper assertion = new AssertionWrapper(saml1Assertion); QName assertionQName = new QName("urn:oasis:names:tc:SAML:1.0:assertion", "Assertion", "saml1"); when(saml1Assertion.getElementQName()).thenReturn(assertionQName); when(saml1Assertion.getMajorVersion()).thenReturn(1); when(saml1Assertion.getMinorVersion()).thenReturn(0); when(saml1Assertion.getID()).thenReturn("Assertion_ID"); when(saml1Assertion.getIssuer()).thenReturn("Issuer"); DateTime dateTime = new DateTime(); when(saml1Assertion.getIssueInstant()).thenReturn(dateTime); Statement statement = mock(Statement.class); List<Statement> statementList = new ArrayList<Statement>(); statementList.add(statement); when(saml1Assertion.getStatements()).thenReturn(statementList); CONNECTSamlAssertionValidator validator = new CONNECTSamlAssertionValidator(); validator.validateAssertion(assertion); verify(saml1Assertion, times(2)).getOrderedChildren(); } @Test(expected = WSSecurityException.class) public void testValidateAssertionSaml1_ValidationFails() throws WSSecurityException { org.opensaml.saml1.core.Assertion saml1Assertion = mock(org.opensaml.saml1.core.Assertion.class); AssertionWrapper assertion = new AssertionWrapper(saml1Assertion); QName assertionQName = new QName("urn:oasis:names:tc:SAML:1.0:assertion", "Assertion", "saml1"); when(saml1Assertion.getElementQName()).thenReturn(assertionQName); when(saml1Assertion.getMajorVersion()).thenReturn(1); when(saml1Assertion.getMinorVersion()).thenReturn(0); when(saml1Assertion.getID()).thenReturn(null); CONNECTSamlAssertionValidator validator = new CONNECTSamlAssertionValidator(); validator.validateAssertion(assertion); } @Test public void testValidateAssertionSaml2() throws WSSecurityException { org.opensaml.saml2.core.Assertion saml2Assertion = mock(org.opensaml.saml2.core.Assertion.class); AssertionWrapper assertion = new AssertionWrapper(saml2Assertion); QName assertionQName = new QName("urn:oasis:names:tc:SAML:2.0:assertion", "Assertion", "saml2"); when(saml2Assertion.getElementQName()).thenReturn(assertionQName); org.opensaml.saml2.core.Issuer issuer = mock(org.opensaml.saml2.core.Issuer.class); when(issuer.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_X509); when(saml2Assertion.getIssuer()).thenReturn(issuer); when(issuer.getValue()).thenReturn(NhincConstants.SAML_DEFAULT_ISSUER_NAME); when(saml2Assertion.getVersion()).thenReturn(SAMLVersion.VERSION_20); when(saml2Assertion.getID()).thenReturn("Assertion_ID"); DateTime dateTime = new DateTime(); when(saml2Assertion.getIssueInstant()).thenReturn(dateTime); Subject subject = mock(Subject.class); when(saml2Assertion.getSubject()).thenReturn(subject); NameID name = mock(NameID.class); when(subject.getNameID()).thenReturn(name); when(name.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_X509); when(name.getValue()).thenReturn(NhincConstants.SAML_DEFAULT_ISSUER_NAME); CONNECTSamlAssertionValidator validator = new CONNECTSamlAssertionValidator() { @Override protected Collection<ValidatorSuite> getSaml2SpecValidators() { return getSaml2DefaultAssertionSpecValidators(); } }; validator.validateAssertion(assertion); verify(saml2Assertion, times(3)).getOrderedChildren(); } @Test public void testValidateAssertionSaml2_blankResource() throws WSSecurityException, XMLParserException, UnmarshallingException, ConfigurationException { /* * when(saml2Assertion.getElementQName()).thenReturn(assertionQName); org.opensaml.saml2.core.Issuer issuer = * mock(org.opensaml.saml2.core.Issuer.class); * when(issuer.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_X509); * when(saml2Assertion.getIssuer()).thenReturn(issuer); * when(issuer.getValue()).thenReturn(NhincConstants.SAML_DEFAULT_ISSUER_NAME); * when(saml2Assertion.getVersion()).thenReturn(SAMLVersion.VERSION_20); * when(saml2Assertion.getID()).thenReturn("Assertion_ID"); DateTime dateTime = new DateTime(); * when(saml2Assertion.getIssueInstant()).thenReturn(dateTime); Subject subject = mock(Subject.class); * when(saml2Assertion.getSubject()).thenReturn(subject); NameID name = mock(NameID.class); * when(subject.getNameID()).thenReturn(name); * when(name.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_X509); * when(name.getValue()).thenReturn(NhincConstants.SAML_DEFAULT_ISSUER_NAME); AuthzDecisionStatement authd = * mock(AuthzDecisionStatement.class); * when(saml2Assertion.getAuthzDecisionStatements()).thenReturn(Collections.singletonList(authd)); * when(authd.getDecision()).thenReturn(DecisionTypeEnumeration.PERMIT); * when(authd.getResource()).thenReturn(""); Action action = mock(Action.class); * when(authd.getActions()).thenReturn(Collections.singletonList(action)); * when(action.getNamespace()).thenReturn("urn:oasis:names:tc:SAML:1.0:action:rwedc"); * when(action.getAction()).thenReturn("Execute"); Evidence evidence = mock(Evidence.class); * when(authd.getEvidence()).thenReturn(evidence); org.opensaml.saml2.core.Assertion authdAssertion = * mock(org.opensaml.saml2.core.Assertion.class); * when(evidence.getAssertions()).thenReturn(Collections.singletonList(authdAssertion)); * when(authdAssertion.getIssuer()).thenReturn(issuer); */ String inCommonMDFile = "authFrameworkAssertion.xml"; // Initialize the library DefaultBootstrap.bootstrap(); // Get parser pool manager BasicParserPool ppMgr = new BasicParserPool(); ppMgr.setNamespaceAware(true); // Parse metadata file InputStream in = CONNECTSamlAssertionValidatorTest.class.getResourceAsStream(inCommonMDFile); Document inCommonMDDoc = ppMgr.parse(in); Element metadataRoot = inCommonMDDoc.getDocumentElement(); // Get apropriate unmarshaller UnmarshallerFactory unmarshallerFactory = Configuration.getUnmarshallerFactory(); Unmarshaller unmarshaller = unmarshallerFactory.getUnmarshaller(new QName("urn:oasis:names:tc:SAML:2.0:assertion", "Assertion")); // Unmarshall using the document root element, an EntitiesDescriptor in this case org.opensaml.saml2.core.Assertion saml2Assertion = (org.opensaml.saml2.core.Assertion) unmarshaller.unmarshall(metadataRoot); AssertionWrapper assertion = new AssertionWrapper(saml2Assertion); CONNECTSamlAssertionValidator validator = new CONNECTSamlAssertionValidator() { @Override protected Collection<ValidatorSuite> getSaml2SpecValidators() { return getSaml2DefaultAssertionSpecValidators(); } }; validator.validateAssertion(assertion); } @Test(expected = WSSecurityException.class) public void testValidateAssertionSaml2_ValidationFails() throws WSSecurityException { org.opensaml.saml2.core.Assertion saml2Assertion = mock(org.opensaml.saml2.core.Assertion.class); AssertionWrapper assertion = new AssertionWrapper(saml2Assertion); org.opensaml.saml2.core.Issuer issuer = mock(org.opensaml.saml2.core.Issuer.class); QName assertionQName = new QName("urn:oasis:names:tc:SAML:2.0:assertion", "Assertion", "saml2"); when(saml2Assertion.getElementQName()).thenReturn(assertionQName); when(saml2Assertion.getIssuer()).thenReturn(issuer); when(saml2Assertion.getIssuer().getFormat()).thenReturn("urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName"); CONNECTSamlAssertionValidator validator = new CONNECTSamlAssertionValidator() { @Override protected Collection<ValidatorSuite> getSaml2SpecValidators() { return getSaml2DefaultAssertionSpecValidators(); } }; validator.validateAssertion(assertion); } @Test(expected = WSSecurityException.class) public void ValidateAssertionSaml2WhenSPProviderID() throws WSSecurityException { org.opensaml.saml2.core.Assertion saml2Assertion = mock(org.opensaml.saml2.core.Assertion.class); AssertionWrapper assertion = new AssertionWrapper(saml2Assertion); org.opensaml.saml2.core.Issuer issuer = mock(org.opensaml.saml2.core.Issuer.class); QName assertionQName = new QName("urn:oasis:names:tc:SAML:2.0:assertion", "Assertion", "saml2"); when(saml2Assertion.getElementQName()).thenReturn(assertionQName); when(saml2Assertion.getIssuer()).thenReturn(issuer); when(saml2Assertion.getIssuer().getSPProvidedID()).thenReturn("SPProvidedID"); when(saml2Assertion.getIssuer().getFormat()).thenReturn("urn:oasis:names:tc:SAML:1.1:nameid-format:entity"); CONNECTSamlAssertionValidator validator = new CONNECTSamlAssertionValidator() { @Override protected Collection<ValidatorSuite> getSaml2SpecValidators() { return getSaml2DefaultAssertionSpecValidators(); } }; validator.validateAssertion(assertion); } @Test @SuppressWarnings("rawtypes") public void testGetSaml2AllowNoSubjectAssertionSpecValidator() { CONNECTSamlAssertionValidator connectValidator = new CONNECTSamlAssertionValidator(); ValidatorSuite specValidators = connectValidator.getSaml2AllowNoSubjectAssertionSpecValidator(); QName qName = new QName("urn:oasis:names:tc:SAML:2.0:assertion", "Assertion", "saml2"); List<Validator> validatorList = specValidators.getValidators(qName); assertNotNull(specValidators); assertEquals(validatorList.size(), 1); } @Test public void testGetSaml2SpecValidator() throws PropertyAccessException { PropertyAccessor propAccessor = mock(PropertyAccessor.class); CONNECTSamlAssertionValidator connectValidator = new CONNECTSamlAssertionValidator(propAccessor) { @Override protected Collection<ValidatorSuite> getSaml2DefaultAssertionSpecValidators() { return null; } }; when(propAccessor.getPropertyBoolean(NhincConstants.GATEWAY_PROPERTY_FILE, "allowNoSubjectAssertion")) .thenReturn(true, false); Collection<ValidatorSuite> validators = connectValidator.getSaml2SpecValidators(); ValidatorSuite validator = null; for (ValidatorSuite v : validators) { if ("saml2-core-spec-validator-allow-no-subject-assertion".equals(v.getId())) { validator = v; } } assertNotNull(validator); validators = connectValidator.getSaml2SpecValidators(); assertNull(validators); } @Test public void testValidate() throws WSSecurityException { final List<Boolean> checkedSignedAssertion = new ArrayList<Boolean>(); Credential credential = new Credential(); final String SECRET_KEY = "secret"; credential.setSecretKey(SECRET_KEY.getBytes()); RequestData data = mock(RequestData.class); AssertionWrapper assertion = mock(AssertionWrapper.class); credential.setAssertion(assertion); List<String> methods = new ArrayList<String>(); final String METHOD_NAME = "urn:oasis:names:tc:SAML:" + "TESTING" + ":cm:holder-of-key"; methods.add(METHOD_NAME); SAMLKeyInfo keyInfo = mock(SAMLKeyInfo.class); org.opensaml.saml2.core.Assertion saml2Assertion = mock(org.opensaml.saml2.core.Assertion.class); org.opensaml.saml2.core.Conditions conditions = mock(org.opensaml.saml2.core.Conditions.class); DateTime testDate = new DateTime(); // For validate() calls when(assertion.getConfirmationMethods()).thenReturn(methods); when(assertion.getSubjectKeyInfo()).thenReturn(keyInfo); when(assertion.isSigned()).thenReturn(true); // For checkConditions() calls when(assertion.getSamlVersion()).thenReturn(SAMLVersion.VERSION_20); when(assertion.getSaml2()).thenReturn(saml2Assertion, saml2Assertion, saml2Assertion, null); when(saml2Assertion.getConditions()).thenReturn(conditions); when(conditions.getNotOnOrAfter()).thenReturn(testDate.plusDays(1)); when(conditions.getNotBefore()).thenReturn(testDate.minusSeconds(5)); CONNECTSamlAssertionValidator validator = new CONNECTSamlAssertionValidator() { @Override protected void checkSignedAssertion(AssertionWrapper assertion, RequestData data) throws WSSecurityException { checkedSignedAssertion.add(true); } }; Credential resultCredential = validator.validate(credential, data); assertFalse(checkedSignedAssertion.isEmpty()); assertTrue(checkedSignedAssertion.get(0).booleanValue()); String resultSecretKey = new String(resultCredential.getSecretKey()); assertEquals(resultSecretKey, SECRET_KEY); } @Test public void testCheckSignedAssertion_HappyPath() throws WSSecurityException { AssertionWrapper assertion = mock(AssertionWrapper.class); RequestData data = mock(RequestData.class); SAMLKeyInfo keyInfo = mock(SAMLKeyInfo.class); PublicKey publicKey = mock(PublicKey.class); Crypto crypto = mock(Crypto.class); when(assertion.getSignatureKeyInfo()).thenReturn(keyInfo); when(keyInfo.getPublicKey()).thenReturn(publicKey); when(data.getSigCrypto()).thenReturn(crypto); when(crypto.verifyTrust(publicKey)).thenReturn(true); CONNECTSamlAssertionValidator validator = new CONNECTSamlAssertionValidator(); validator.checkSignedAssertion(assertion, data); assertNotNull(assertion.getSignatureKeyInfo().getPublicKey()); assertNull(assertion.getSignatureKeyInfo().getCerts()); } @Test public void testCheckSignedAssertion_ChainCertError() throws WSSecurityException { AssertionWrapper assertion = mock(AssertionWrapper.class); RequestData data = mock(RequestData.class); SAMLKeyInfo keyInfo = mock(SAMLKeyInfo.class); PublicKey publicKey = mock(PublicKey.class); Crypto crypto = mock(Crypto.class); when(assertion.getSignatureKeyInfo()).thenReturn(keyInfo); when(keyInfo.getPublicKey()).thenReturn(publicKey); when(data.getSigCrypto()).thenReturn(crypto); // Return false here for Chain Cert Error when(crypto.verifyTrust(publicKey)).thenReturn(false); CONNECTSamlAssertionValidator validator = new CONNECTSamlAssertionValidator(); validator.checkSignedAssertion(assertion, data); assertNotNull(assertion.getSignatureKeyInfo().getPublicKey()); assertNull(assertion.getSignatureKeyInfo().getCerts()); } @Test(expected = WSSecurityException.class) public void testCheckSignedAssertion_Exception() throws WSSecurityException { AssertionWrapper assertion = mock(AssertionWrapper.class); RequestData data = mock(RequestData.class); SAMLKeyInfo keyInfo = mock(SAMLKeyInfo.class); when(assertion.getSignatureKeyInfo()).thenReturn(keyInfo); CONNECTSamlAssertionValidator validator = new CONNECTSamlAssertionValidator(); validator.checkSignedAssertion(assertion, data); } }
/* * Copyright 2006 Sascha Weinreuter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.intellij.plugins.intelliLang.inject.config; import com.intellij.openapi.components.PersistentStateComponent; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.patterns.StringPattern; import com.intellij.patterns.compiler.PatternCompiler; import com.intellij.patterns.compiler.PatternCompilerFactory; import com.intellij.psi.ElementManipulators; import com.intellij.psi.LiteralTextEscaper; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiLanguageInjectionHost; import com.intellij.util.ArrayUtil; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import org.intellij.lang.annotations.RegExp; import org.intellij.plugins.intelliLang.inject.InjectorUtils; import org.jdom.CDATA; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Injection base class: Contains properties for language-id, prefix and suffix. */ public class BaseInjection implements Injection, PersistentStateComponent<Element> { public static final Key<BaseInjection> INJECTION_KEY = Key.create("INJECTION_KEY"); @NotNull private final String mySupportId; private String myDisplayName = ""; private String myInjectedLanguageId = ""; private String myPrefix = ""; private String mySuffix = ""; @NonNls private String myValuePattern = ""; private Pattern myCompiledValuePattern; private boolean mySingleFile; public BaseInjection(@NotNull final String id) { mySupportId = id; } @NotNull private InjectionPlace[] myPlaces = InjectionPlace.EMPTY_ARRAY; @NotNull public InjectionPlace[] getInjectionPlaces() { return myPlaces; } public void setInjectionPlaces(@NotNull InjectionPlace... places) { myPlaces = places; } @NotNull public String getSupportId() { return mySupportId; } @NotNull public String getInjectedLanguageId() { return myInjectedLanguageId; } @NotNull public String getDisplayName() { return myDisplayName; } public void setDisplayName(@NotNull String displayName) { myDisplayName = displayName; } public void setInjectedLanguageId(@NotNull String injectedLanguageId) { myInjectedLanguageId = injectedLanguageId; } @NotNull public String getPrefix() { return myPrefix; } public void setPrefix(@NotNull String prefix) { myPrefix = prefix; } @NotNull public String getSuffix() { return mySuffix; } public void setSuffix(@NotNull String suffix) { mySuffix = suffix; } @NotNull public List<TextRange> getInjectedArea(final PsiElement element) { final TextRange textRange = ElementManipulators.getValueTextRange(element); if (myCompiledValuePattern == null) { return Collections.singletonList(textRange); } else { final LiteralTextEscaper<? extends PsiLanguageInjectionHost> textEscaper = ((PsiLanguageInjectionHost)element).createLiteralTextEscaper(); final StringBuilder sb = new StringBuilder(); textEscaper.decode(textRange, sb); final List<TextRange> ranges = getMatchingRanges(myCompiledValuePattern.matcher(StringPattern.newBombedCharSequence(sb)), sb.length()); return !ranges.isEmpty() ? ContainerUtil.map(ranges, s -> new TextRange(textEscaper.getOffsetInHost(s.getStartOffset(), textRange), textEscaper.getOffsetInHost(s.getEndOffset(), textRange))) : Collections.emptyList(); } } public boolean isEnabled() { for (InjectionPlace place : myPlaces) { if (place.getElementPattern() != null && place.isEnabled()) return true; } return false; } public boolean acceptsPsiElement(final PsiElement element) { ProgressManager.checkCanceled(); for (InjectionPlace place : myPlaces) { if (place.isEnabled() && place.getElementPattern() != null && place.getElementPattern().accepts(element)) { return true; } } return false; } public boolean intersectsWith(final BaseInjection template) { if (!Comparing.equal(getInjectedLanguageId(), template.getInjectedLanguageId())) return false; for (InjectionPlace other : template.getInjectionPlaces()) { if (ArrayUtil.contains(other, myPlaces)) return true; } return false; } public boolean sameLanguageParameters(final BaseInjection that) { if (!myInjectedLanguageId.equals(that.myInjectedLanguageId)) return false; if (!myPrefix.equals(that.myPrefix)) return false; if (!mySuffix.equals(that.mySuffix)) return false; if (!myValuePattern.equals(that.myValuePattern)) return false; if (mySingleFile != that.mySingleFile) return false; return true; } @SuppressWarnings({"unchecked"}) public BaseInjection copy() { return new BaseInjection(mySupportId).copyFrom(this); } @SuppressWarnings({"RedundantIfStatement"}) public boolean equals(Object o) { if (this == o) return true; if (o == null || !(o instanceof BaseInjection)) return false; final BaseInjection that = (BaseInjection)o; if (!Comparing.equal(getDisplayName(), that.getDisplayName())) return false; if (!sameLanguageParameters(that)) return false; if (myPlaces.length != that.myPlaces.length) return false; for (int i = 0, len = myPlaces.length; i < len; i++) { if (myPlaces[i].isEnabled() != that.myPlaces[i].isEnabled()) { return false; } } // enabled flag is not counted this way: if (!Arrays.equals(myPlaces, that.myPlaces)) return false; return true; } public int hashCode() { int result; result = myInjectedLanguageId.hashCode(); result = 31 * result + myPrefix.hashCode(); result = 31 * result + mySuffix.hashCode(); result = 31 * result + myValuePattern.hashCode(); return result; } public BaseInjection copyFrom(@NotNull BaseInjection other) { assert this != other; myInjectedLanguageId = other.getInjectedLanguageId(); myPrefix = other.getPrefix(); mySuffix = other.getSuffix(); myDisplayName = other.getDisplayName(); setValuePattern(other.getValuePattern()); mySingleFile = other.mySingleFile; myPlaces = other.getInjectionPlaces().clone(); return this; } public void loadState(Element element) { final PatternCompiler<PsiElement> helper = getCompiler(); myDisplayName = StringUtil.notNullize(element.getChildText("display-name")); myInjectedLanguageId = StringUtil.notNullize(element.getAttributeValue("language")); myPrefix = StringUtil.notNullize(element.getChildText("prefix")); mySuffix = StringUtil.notNullize(element.getChildText("suffix")); setValuePattern(element.getChildText("value-pattern")); mySingleFile = element.getChild("single-file") != null; readExternalImpl(element); final List<Element> placeElements = element.getChildren("place"); myPlaces = InjectionPlace.ARRAY_FACTORY.create(placeElements.size()); for (int i = 0, placeElementsSize = placeElements.size(); i < placeElementsSize; i++) { Element placeElement = placeElements.get(i); final boolean enabled = !Boolean.parseBoolean(placeElement.getAttributeValue("disabled")); final String text = placeElement.getText(); myPlaces[i] = new InjectionPlace(helper.createElementPattern(text, getDisplayName()), enabled); } if (myPlaces.length == 0) { generatePlaces(); } } public PatternCompiler<PsiElement> getCompiler() { return PatternCompilerFactory.getFactory().getPatternCompiler(InjectorUtils.getPatternClasses(getSupportId())); } public void generatePlaces() { } protected void readExternalImpl(Element e) {} public final Element getState() { final Element e = new Element("injection"); e.setAttribute("language", myInjectedLanguageId); e.setAttribute("injector-id", mySupportId); e.addContent(new Element("display-name").setText(getDisplayName())); if (StringUtil.isNotEmpty(myPrefix)) { e.addContent(new Element("prefix").setText(myPrefix)); } if (StringUtil.isNotEmpty(mySuffix)) { e.addContent(new Element("suffix").setText(mySuffix)); } if (StringUtil.isNotEmpty(myValuePattern)) { e.addContent(new Element("value-pattern").setText(myValuePattern)); } if (mySingleFile) { e.addContent(new Element("single-file")); } Arrays.sort(myPlaces, (o1, o2) -> Comparing.compare(o1.getText(), o2.getText())); for (InjectionPlace place : myPlaces) { final Element child = new Element("place").setContent(new CDATA(place.getText())); if (!place.isEnabled()) child.setAttribute("disabled", "true"); e.addContent(child); } writeExternalImpl(e); return e; } protected void writeExternalImpl(Element e) {} @NotNull public String getValuePattern() { return myValuePattern; } public void setValuePattern(@RegExp @Nullable String pattern) { try { if (pattern != null && pattern.length() > 0) { myValuePattern = pattern; myCompiledValuePattern = Pattern.compile(pattern, Pattern.DOTALL); } else { myValuePattern = ""; myCompiledValuePattern = null; } } catch (Exception e1) { myCompiledValuePattern = null; Logger.getInstance(getClass().getName()).info("Invalid pattern", e1); } } public boolean isSingleFile() { return mySingleFile; } public void setSingleFile(final boolean singleFile) { mySingleFile = singleFile; } /** * Determines if further injections should be examined if {@code isApplicable} has returned true. * <p/> * This is determined by the presence of a value-pattern: If none is present, the entry is considered * to be a terminal one. * * @return true to stop, false to continue */ public boolean isTerminal() { return myCompiledValuePattern == null; } private static List<TextRange> getMatchingRanges(Matcher matcher, final int length) { final List<TextRange> list = new SmartList<>(); int start = 0; while (start < length && matcher.find(start)) { final int groupCount = matcher.groupCount(); if (groupCount == 0) { start = matcher.end(); } else { for (int i=1; i<=groupCount; i++) { start = matcher.start(i); if (start == -1) continue; list.add(new TextRange(start, matcher.end(i))); } if (start >= matcher.end()) break; start = matcher.end(); } } return list; } public void mergeOriginalPlacesFrom(final BaseInjection injection, final boolean enabled) { for (InjectionPlace place : injection.getInjectionPlaces()) { if (!ArrayUtil.contains(place, myPlaces)) { myPlaces = ArrayUtil.append(myPlaces, enabled || !place.isEnabled() ? place : place.enabled(false), InjectionPlace.ARRAY_FACTORY); } } } public void setPlaceEnabled(@Nullable final String text, final boolean enabled) { for (int i = 0; i < myPlaces.length; i++) { final InjectionPlace cur = myPlaces[i]; if (text == null || Comparing.equal(text, cur.getText())) { if (cur.isEnabled() != enabled) { myPlaces[i] = cur.enabled(enabled); } } } } public boolean acceptForReference(PsiElement element) { return acceptsPsiElement(element); } @Override public String toString() { return getInjectedLanguageId()+ "->" +getDisplayName(); } }
package com.netflix.astyanax.recipes.locks; import java.nio.ByteBuffer; import java.util.Map; import java.util.Set; import java.util.Map.Entry; import java.util.concurrent.TimeUnit; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.netflix.astyanax.ColumnListMutation; import com.netflix.astyanax.Keyspace; import com.netflix.astyanax.MutationBatch; import com.netflix.astyanax.model.Column; import com.netflix.astyanax.model.ColumnFamily; import com.netflix.astyanax.model.ColumnList; import com.netflix.astyanax.model.ColumnMap; import com.netflix.astyanax.model.ConsistencyLevel; import com.netflix.astyanax.model.OrderedColumnMap; import com.netflix.astyanax.retry.RetryPolicy; import com.netflix.astyanax.retry.RunOnce; import com.netflix.astyanax.serializers.ByteBufferSerializer; import com.netflix.astyanax.serializers.LongSerializer; public class OneStepDistributedRowLock<K, C> implements DistributedRowLock { public static final int LOCK_TIMEOUT = 60; public static final TimeUnit DEFAULT_OPERATION_TIMEOUT_UNITS = TimeUnit.MINUTES; private final ColumnFamily<K, C> columnFamily; // The column family for data and lock private final Keyspace keyspace; // The keyspace private final K key; // Key being locked private long timeout = LOCK_TIMEOUT; // Timeout after which the lock expires. Units defined by timeoutUnits. private TimeUnit timeoutUnits = DEFAULT_OPERATION_TIMEOUT_UNITS; private ConsistencyLevel consistencyLevel = ConsistencyLevel.CL_LOCAL_QUORUM; private boolean failOnStaleLock = false; private Set<C> locksToDelete = Sets.newHashSet(); private C lockColumn = null; private ColumnMap<C> columns = null; private Integer ttl = null; // Units in seconds private boolean readDataColumns = false; private RetryPolicy backoffPolicy = RunOnce.get(); private long acquireTime = 0; private int retryCount = 0; private LockColumnStrategy<C> columnStrategy = null; public OneStepDistributedRowLock(Keyspace keyspace, ColumnFamily<K, C> columnFamily, K key) { this.keyspace = keyspace; this.columnFamily = columnFamily; this.key = key; } public OneStepDistributedRowLock<K, C> withColumnStrategy(LockColumnStrategy<C> columnStrategy) { this.columnStrategy = columnStrategy; return this; } /** * Modify the consistency level being used. Consistency should always be a * variant of quorum. The default is CL_QUORUM, which is OK for single * region. For multi region the consistency level should be CL_LOCAL_QUORUM. * CL_EACH_QUORUM can be used but will Incur substantial latency. * * @param consistencyLevel * @return */ public OneStepDistributedRowLock<K, C> withConsistencyLevel(ConsistencyLevel consistencyLevel) { this.consistencyLevel = consistencyLevel; return this; } /** * If true the first read will also fetch all the columns in the row as * opposed to just the lock columns. * @param flag * @return */ public OneStepDistributedRowLock<K, C> withDataColumns(boolean flag) { this.readDataColumns = flag; return this; } /** * When set to true the operation will fail if a stale lock is detected * * @param failOnStaleLock * @return */ public OneStepDistributedRowLock<K, C> failOnStaleLock(boolean failOnStaleLock) { this.failOnStaleLock = failOnStaleLock; return this; } /** * Time for failed locks. Under normal circumstances the lock column will be * deleted. If not then this lock column will remain and the row will remain * locked. The lock will expire after this timeout. * * @param timeout * @param unit * @return */ public OneStepDistributedRowLock<K, C> expireLockAfter(long timeout, TimeUnit unit) { this.timeout = timeout; this.timeoutUnits = unit; return this; } /** * This is the TTL on the lock column being written, as opposed to expireLockAfter which * is written as the lock column value. Whereas the expireLockAfter can be used to * identify a stale or abandoned lock the TTL will result in the stale or abandoned lock * being eventually deleted by cassandra. Set the TTL to a number that is much greater * tan the expireLockAfter time. * @param ttl * @return */ public OneStepDistributedRowLock<K, C> withTtl(Integer ttl) { this.ttl = ttl; return this; } public OneStepDistributedRowLock<K, C> withTtl(Integer ttl, TimeUnit units) { this.ttl = (int) TimeUnit.SECONDS.convert(ttl, units); return this; } public OneStepDistributedRowLock<K, C> withBackoff(RetryPolicy policy) { this.backoffPolicy = policy; return this; } /** * Try to take the lock. The caller must call .release() to properly clean up * the lock columns from cassandra * * @return * @throws Exception */ @Override public void acquire() throws Exception { Preconditions.checkArgument(ttl == null || TimeUnit.SECONDS.convert(timeout, timeoutUnits) < ttl, "Timeout " + timeout + " must be less than TTL " + ttl); RetryPolicy retry = backoffPolicy.duplicate(); retryCount = 0; while (true) { try { long curTimeMicros = getCurrentTimeMicros(); MutationBatch m = keyspace.prepareMutationBatch().setConsistencyLevel(consistencyLevel); fillLockMutation(m, curTimeMicros, ttl); m.execute(); verifyLock(curTimeMicros); acquireTime = System.currentTimeMillis(); return; } catch (BusyLockException e) { release(); if(!retry.allowRetry()) throw e; retryCount++; } } } /** * Take the lock and return the row data columns. Use this, instead of acquire, when you * want to implement a read-modify-write scenario and want to reduce the number of calls * to Cassandra. * * @return * @throws Exception */ public ColumnMap<C> acquireLockAndReadRow() throws Exception { withDataColumns(true); acquire(); return getDataColumns(); } /** * Verify that the lock was acquired. This shouldn't be called unless it's part of a recipe * built on top of AbstractDistributedRowLock. * * @param curTimeInMicros * @throws BusyLockException */ public void verifyLock(long curTimeInMicros) throws Exception, BusyLockException, StaleLockException { if (getLockColumn() == null) throw new IllegalStateException("verifyLock() called without attempting to take the lock"); // Read back all columns. There should be only 1 if we got the lock Map<C, Long> lockResult = readLockColumns(readDataColumns); // Cleanup and check that we really got the lock for (Entry<C, Long> entry : lockResult.entrySet()) { // This is a stale lock that was never cleaned up if (entry.getValue() != 0 && curTimeInMicros > entry.getValue()) { if (failOnStaleLock) { throw new StaleLockException("Stale lock on row '" + key + "'. Manual cleanup requried."); } locksToDelete.add(entry.getKey()); } // Lock already taken, and not by us else if (!entry.getKey().equals(getLockColumn())) { throw new BusyLockException("Lock already acquired for row '" + key + "' with lock column " + entry.getKey()); } } } /** * Release the lock by releasing this and any other stale lock columns */ @Override public void release() throws Exception { if (!locksToDelete.isEmpty() || getLockColumn() != null) { MutationBatch m = keyspace.prepareMutationBatch().setConsistencyLevel(consistencyLevel); fillReleaseMutation(m, false); m.execute(); } } /** * Release using the provided mutation. Use this when you want to commit actual data * when releasing the lock * @param m * @throws Exception */ public void releaseWithMutation(MutationBatch m) throws Exception { releaseWithMutation(m, false); } public boolean releaseWithMutation(MutationBatch m, boolean force) throws Exception { long elapsed = System.currentTimeMillis() - acquireTime; boolean isStale = false; if (timeout > 0 && elapsed > TimeUnit.MILLISECONDS.convert(timeout, this.timeoutUnits)) { isStale = true; if (!force) { throw new StaleLockException("Lock for '" + getKey() + "' became stale"); } } m.setConsistencyLevel(consistencyLevel); fillReleaseMutation(m, false); m.execute(); return isStale; } /** * Return a mapping of existing lock columns and their expiration times * * @return * @throws Exception */ public Map<C, Long> readLockColumns() throws Exception { return readLockColumns(false); } /** * Read all the lock columns. Will also ready data columns if withDataColumns(true) was called * * @param readDataColumns * @return * @throws Exception */ private Map<C, Long> readLockColumns(boolean readDataColumns) throws Exception { Map<C, Long> result = Maps.newLinkedHashMap(); // Read all the columns if (readDataColumns) { columns = new OrderedColumnMap<C>(); ColumnList<C> lockResult = keyspace .prepareQuery(columnFamily) .setConsistencyLevel(consistencyLevel) .getKey(key) .execute() .getResult(); for (Column<C> c : lockResult) { if (columnStrategy.isLockColumn(c.getName())) result.put(c.getName(), readTimeoutValue(c)); else columns.add(c); } } // Read only the lock columns else { ColumnList<C> lockResult = keyspace .prepareQuery(columnFamily) .setConsistencyLevel(consistencyLevel) .getKey(key) .withColumnRange(columnStrategy.getLockColumnRange()) .execute() .getResult(); for (Column<C> c : lockResult) { result.put(c.getName(), readTimeoutValue(c)); } } return result; } /** * Release all locks. Use this carefully as it could release a lock for a * running operation. * * @return * @throws Exception */ public Map<C, Long> releaseAllLocks() throws Exception { return releaseLocks(true); } /** * Release all expired locks for this key. * * @return * @throws Exception */ public Map<C, Long> releaseExpiredLocks() throws Exception { return releaseLocks(false); } /** * Delete locks columns. Set force=true to remove locks that haven't * expired yet. * * This operation first issues a read to cassandra and then deletes columns * in the response. * * @param force - Force delete of non expired locks as well * @return * @throws Exception */ public Map<C, Long> releaseLocks(boolean force) throws Exception { Map<C, Long> locksToDelete = readLockColumns(); MutationBatch m = keyspace.prepareMutationBatch().setConsistencyLevel(consistencyLevel); ColumnListMutation<C> row = m.withRow(columnFamily, key); long now = getCurrentTimeMicros(); for (Entry<C, Long> c : locksToDelete.entrySet()) { if (force || (c.getValue() > 0 && c.getValue() < now)) { row.deleteColumn(c.getKey()); } } m.execute(); return locksToDelete; } /** * Get the current system time * * @return */ private static long getCurrentTimeMicros() { return TimeUnit.MICROSECONDS.convert(System.currentTimeMillis(), TimeUnit.MILLISECONDS); } /** * Fill a mutation with the lock column. This may be used when the mutation * is executed externally but should be used with extreme caution to ensure * the lock is properly released * * @param m * @param time * @param ttl */ public C fillLockMutation(MutationBatch m, Long time, Integer ttl) { if (lockColumn != null) { if (!lockColumn.equals(columnStrategy.generateLockColumn())) throw new IllegalStateException("Can't change prefix or lockId after acquiring the lock"); } else { lockColumn = columnStrategy.generateLockColumn(); } Long timeoutValue = (time == null) ? new Long(0) : time + TimeUnit.MICROSECONDS.convert(timeout, timeoutUnits); m.withRow(columnFamily, key).putColumn(lockColumn, generateTimeoutValue(timeoutValue), ttl); return lockColumn; } /** * Generate the expire time value to put in the column value. * @param timeout * @return */ private ByteBuffer generateTimeoutValue(long timeout) { if (columnFamily.getDefaultValueSerializer() == ByteBufferSerializer.get() || columnFamily.getDefaultValueSerializer() == LongSerializer.get()) { return LongSerializer.get().toByteBuffer(timeout); } else { return columnFamily.getDefaultValueSerializer().fromString(Long.toString(timeout)); } } /** * Read the expiration time from the column value * @param column * @return */ public long readTimeoutValue(Column<?> column) { if (columnFamily.getDefaultValueSerializer() == ByteBufferSerializer.get() || columnFamily.getDefaultValueSerializer() == LongSerializer.get()) { return column.getLongValue(); } else { return Long.parseLong(column.getStringValue()); } } /** * Fill a mutation that will release the locks. This may be used from a * separate recipe to release multiple locks. * * @param m */ public void fillReleaseMutation(MutationBatch m, boolean excludeCurrentLock) { // Add the deletes to the end of the mutation ColumnListMutation<C> row = m.withRow(columnFamily, key); for (C c : locksToDelete) { row.deleteColumn(c); } if (!excludeCurrentLock && lockColumn != null) row.deleteColumn(lockColumn); locksToDelete.clear(); lockColumn = null; } public ColumnMap<C> getDataColumns() { return columns; } public K getKey() { return key; } public Keyspace getKeyspace() { return keyspace; } public ConsistencyLevel getConsistencyLevel() { return consistencyLevel; } public C getLockColumn() { return lockColumn; } public int getRetryCount() { return retryCount; } }
/* * Copyright (c) 2011, Andreas Olofsson * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ package se.fojob.forester.grass; import se.fojob.forester.grass.datagrids.MapProvider; import com.jme3.material.Material; import com.jme3.math.Vector2f; import com.jme3.renderer.Camera; import com.jme3.renderer.queue.RenderQueue.Bucket; import com.jme3.scene.Geometry; import com.jme3.scene.Node; import com.jme3.terrain.Terrain; import se.fojob.forester.MapBlock; import se.fojob.forester.grass.GrassLayer.MeshType; import se.fojob.forester.grass.datagrids.MapGrid; import se.fojob.forester.grass.datagrids.UDGrassProvider; import se.fojob.forester.image.ColorMap; import se.fojob.forester.image.DensityMap; import java.util.ArrayList; import java.util.concurrent.Callable; import java.util.logging.Level; import java.util.logging.Logger; import se.fojob.paging.GeometryTileLoader; import se.fojob.paging.interfaces.Page; import se.fojob.paging.interfaces.Tile; /** * This class is used to create grass. * * @author Andreas */ public class GrassLoader extends GeometryTileLoader { protected static final Logger log = Logger.getLogger(GrassLoader.class.getName()); //List of grass-layers. protected ArrayList<GrassLayer> layers; protected Vector2f wind; protected GrassGeometryGenerator grassGen; protected MapProvider mapProvider; protected boolean useBinaries; protected String binariesDir = "."; /** * The only constructor. * * @param tileSize The tile size should be same size as the density * maps used to produce the grass. * @param resolution This value determines the amount of sub-meshes * within each tile. The total amount of meshes per tile is resolution^2. * @param farViewingDistance The far viewing distance for the grass. This is * also a factor in determining grid size. * @param fadingRange The distance over which grass is faded out * (in world units). * @param terrain A terrain object. * @param rootNode The rootNode of the scene. * @param camera The camera used for rendering the scene. */ public GrassLoader(int tileSize, int resolution, float farViewingDistance, float fadingRange, Terrain terrain, Node rootNode, Camera camera) { super(tileSize, resolution, farViewingDistance, rootNode, terrain, camera); pagingEngine.addDetailLevel(farViewingDistance, fadingRange); layers = new ArrayList<GrassLayer>(); wind = new Vector2f(0, 0); grassGen = new GrassGeometryGenerator(terrain); init(); } protected final void init() { pagingEngine.setTileLoader(this); } @Override public Callable<Boolean> loadTile(Tile tile) { return new LoadTask((GrassTile) tile); } /** * This method was used to initialize the paging engine, but that * takes place in the constructor from version 0.1.7 on. * * @deprecated This method is no longer necessary to call. */ @Deprecated public void build() { } @Override public void update(float tpf) { for (GrassLayer layer : layers) { layer.update(); } pagingEngine.update(tpf); } @Override public GrassTile createTile(int x, int z) { Logger.getLogger(GrassLoader.class.getName()).log(Level.INFO, "GrassTile created at: ({0},{1})", new Object[]{x, z}); return new GrassTile(x, z, pagingEngine); } public MapGrid createMapGrid() { MapGrid grid = new MapGrid(pagingEngine.getTileSize()); this.mapProvider = grid; return grid; } public UDGrassProvider createUDProvider() { UDGrassProvider provider = new UDGrassProvider(this); this.mapProvider = provider; return provider; } /** * Adds a new layer of grass to the grassloader. * * @param material The material for the main geometry. * @param type The meshtype of the main geometry. * @return A reference to the GrassLayer object. */ public GrassLayer addLayer(Material material, MeshType type) { GrassLayer layer = new GrassLayer(material, type, this); layers.add(layer); return layer; } //***************************Getters and setters*********************** public ArrayList<GrassLayer> getLayers() { return layers; } public void setLayers(ArrayList<GrassLayer> layers) { this.layers = layers; } public Vector2f getWind() { return wind; } public void setWind(Vector2f wind) { for (GrassLayer layer : layers) { layer.setWind(wind); } } public void setUseBinaries(boolean useBinaries) { this.useBinaries = useBinaries; } public void setBinariesDir(String binariesDir) { this.binariesDir = binariesDir; } public MapProvider getMapProvider() { return mapProvider; } public void setMapProvider(MapProvider mapProvider) { this.mapProvider = mapProvider; } protected class LoadTask implements Callable<Boolean> { GrassTile tile; protected LoadTask(GrassTile tile) { this.tile = tile; } @Override public Boolean call() { //Get the density and colormaps. MapBlock block = mapProvider.getMaps(tile); if (block == null) { return false; } if (block.getDensityMaps().isEmpty()) { return false; } //Creates the empty page objects. tile.createPages(); ArrayList<Page> pages = tile.getPages(); float ps = pagingEngine.getPageSize() * 0.5f; //Loads grass geometry to each page. for (Page p : pages) { GrassPage page = (GrassPage) p; Node[] nodes = new Node[1]; nodes[0] = new Node("Grass"); for (int i = 0; i < layers.size(); i++) { GrassLayer layer = layers.get(i); DensityMap densityMap = block.getDensityMaps().get(layer.getDmTexNum()); if (densityMap == null) { continue; } ColorMap colorMap = null; if (block.getColorMaps() != null) { colorMap = block.getColorMaps().get(i); } Geometry geom = grassGen.createGrassGeometry(layer, page, densityMap, colorMap); geom.setQueueBucket(Bucket.Transparent); geom.setShadowMode(layer.getShadowMode()); nodes[0].attachChild(geom); }//for each layer page.setNodes(nodes); page.calculateOverlap(ps, 0); }//for each page for (Page p : tile.getPages()) { p.calculateOverlap(ps, 0); } return true; }//call } }//AbstractGrassLoader
/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is part of dcm4che, an implementation of DICOM(TM) in * Java(TM), available at http://sourceforge.net/projects/dcm4che. * * The Initial Developer of the Original Code is * TIANI Medgraph AG. * Portions created by the Initial Developer are Copyright (C) 2003-2005 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Gunter Zeilinger <gunter.zeilinger@tiani.com> * Franz Willer <franz.willer@gwi-ag.com> * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ package org.dcm4chex.archive.codec; import java.awt.image.BufferedImage; import java.awt.image.DataBuffer; import org.apache.log4j.Logger; import org.dcm4che.data.Dataset; import org.dcm4che.data.FileMetaInfo; import org.dcm4che.dict.Tags; import org.dcm4che.dict.UIDs; import EDU.oswego.cs.dl.util.concurrent.FIFOSemaphore; import EDU.oswego.cs.dl.util.concurrent.Semaphore; /** * @author gunter.zeilinger@tiani.com * @version $Revision: 14540 $ $Date: 2006-05-15 11:59:33 +0200 (Mon, 15 May * 2006) $ * @since 14.03.2005 * */ public abstract class CodecCmd { static final Logger log = Logger.getLogger(CodecCmd.class); static final String YBR_FULL_422 = "YBR_FULL_422"; static final String YBR_RCT = "YBR_RCT"; static final String JPEG2000 = "jpeg2000"; static final String JPEG = "jpeg"; static final String JPEG_LOSSLESS = "JPEG-LOSSLESS"; static final String JPEG_LS = "JPEG-LS"; static int maxConcurrentCodec = 1; static int nrOfConcurrentCodec = 0; static Semaphore codecSemaphore = new FIFOSemaphore(maxConcurrentCodec); static BufferedImagePool biPool = new BufferedImagePool(); protected final String implClassUID; protected final String photometricInterpretation; protected final int samples; protected final int frames; protected final int rows; protected final int columns; protected final int planarConfiguration; protected final int bitsAllocated; protected final int bitsStored; protected final int pixelRepresentation; protected final int frameLength; protected final String tsuid; protected final int dataType; protected final int maxVal; protected CodecCmd(Dataset ds, String tsuid) { FileMetaInfo fmi = ds.getFileMetaInfo(); this.implClassUID = fmi != null ? fmi.getImplementationClassUID() : null; this.photometricInterpretation = ds.getString(Tags.PhotometricInterpretation, "MONCHROME2"); this.samples = ds.getInt(Tags.SamplesPerPixel, 1); this.frames = ds.getInt(Tags.NumberOfFrames, 1); this.rows = ds.getInt(Tags.Rows, 1); this.columns = ds.getInt(Tags.Columns, 1); this.bitsAllocated = ds.getInt(Tags.BitsAllocated, 8); this.bitsStored = ds.getInt(Tags.BitsStored, bitsAllocated); this.pixelRepresentation = ds.getInt(Tags.PixelRepresentation, 0); this.planarConfiguration = ds.getInt(Tags.PlanarConfiguration, 0); this.frameLength = rows * columns * samples * bitsAllocated / 8; this.tsuid = tsuid; switch (bitsAllocated) { case 8: this.dataType = DataBuffer.TYPE_BYTE; break; case 16: this.dataType = (pixelRepresentation != 0 && (UIDs.JPEG2000Lossless.equals(tsuid) || UIDs.JPEG2000Lossy.equals(tsuid))) ? DataBuffer.TYPE_SHORT : DataBuffer.TYPE_USHORT; break; default: throw new IllegalArgumentException("bits allocated:" + bitsAllocated); } this.maxVal = -1 >>> (32 - bitsStored + pixelRepresentation); } public static void setMaxConcurrentCodec(int maxConcurrentCodec) { codecSemaphore = new FIFOSemaphore(maxConcurrentCodec); CodecCmd.maxConcurrentCodec = maxConcurrentCodec; } public static int getMaxConcurrentCodec() { return maxConcurrentCodec; } public static int getMaxBufferedImagePoolSize() { return biPool.getMaxSize(); } public static void setMaxBufferedImagePoolSize(int maxSize) { biPool.setMaxSize(maxSize); } public static int getCurrentBufferedImagePoolSize() { return biPool.getPoolSize(); } public static long getMaxBufferedImagePoolMemory() { return biPool.getMaxMemory(); } public static void setMaxBufferedImagePoolMemory(long maxMemory) { biPool.setMaxMemory(maxMemory); } public static long getCurrentBufferedImagePoolMemory() { return biPool.getPoolMemory(); } public static float getBufferedImagePoolHitRate() { return biPool.getHitRate(); } public static void resetBufferedImagePoolHitRate() { biPool.resetHitRate(); } public final String getTransferSyntaxUID() { return tsuid; } public int getPixelDataLength() { return frames * frameLength; } protected BufferedImage getBufferedImage() { return biPool.borrowOrCreateBufferedImage(rows, columns, bitsUsed(), samples, planarConfiguration, dataType); } protected int bitsUsed() { return bitsAllocated; } protected void returnBufferedImage(BufferedImage bi) { biPool.returnBufferedImage(bi); } }
// Copyright (c) 2016 Markus Alexander Kuppe. All rights reserved. package tlc2.tool.fp; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static tlc2.tool.fp.OffHeapDiskFPSet.EMPTY; import java.util.ArrayList; import java.util.List; import org.junit.Assume; import org.junit.Before; import org.junit.Test; import tlc2.tool.fp.LongArrays.LongComparator; import tlc2.tool.fp.OffHeapDiskFPSet.Indexer; public class LongArraysTest { @Before public void setup() { Assume.assumeTrue(LongArray.isSupported()); } @Test public void testEmpty1() { doTest(new ArrayList<Long>(0), 1L, 0, new OffHeapDiskFPSet.Indexer(0, 1)); } @Test public void testEmpty2() { final List<Long> expected = new ArrayList<Long>(); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); doTest(expected, 1L, 2, new OffHeapDiskFPSet.Indexer(expected.size(), 1)); } @Test public void testBasic1() { final List<Long> expected = new ArrayList<Long>(); expected.add(5L); expected.add(8L); expected.add(1L); expected.add(7L); expected.add(0L); expected.add(3L); final LongArray array = new LongArray(expected); LongArrays.sort(array); // This amounts to a regular/basic insertion sort because there are no // sentinels in the array. doTest fails for this array, because the // indices calculated by the indexer are invalid. for (int i = 1; i < array.size(); i++) { assertTrue(array.get(i - 1L) < array.get(i)); } } @Test public void testBasic2() { final List<Long> expected = new ArrayList<Long>(); expected.add(74236458333421747L); expected.add(9185197375878056627L); expected.add(9017810141411942826L); expected.add(481170446028802552L); expected.add(587723185270146839L); expected.add(764880467681476738L); expected.add(1028380228728529428L); expected.add(1246117495100367611L); expected.add(1353681884824400499L); expected.add(1963327988900916594L); expected.add(2157942654452711468L); expected.add(2211701751588391467L); expected.add(2197266581704230150L); expected.add(2391118405386569995L); expected.add(2754416910109403115L); expected.add(3528296600587602855L); expected.add(3766154305485605955L); expected.add(4172091881329434331L); expected.add(4273360576593753745L); expected.add(4338054185482857322L); expected.add(4487790251341705673L); expected.add(4760603841378765728L); expected.add(4897534821030901381L); expected.add(5057347369431494228L); expected.add(5185984701076703188L); expected.add(5255556356599253415L); expected.add(4911921657882287345L); expected.add(5512811886280168498L); expected.add(5627022814159167180L); expected.add(5630009759945037387L); expected.add(5592096823142754761L); expected.add(5880489878946290534L); expected.add(6796173646113527960L); expected.add(6887096685265647763L); expected.add(6946033094922439935L); expected.add(7100083311060830826L); expected.add(7575172208974668528L); expected.add(8240485391672917634L); expected.add(8572429495433200993L); expected.add(8804495173596718076L); expected.add(8771524479740786626L); expected.add(8986659781390119011L); expected.add(9136953010061430590L); expected.add(9195197379878056627L); final LongArray array = new LongArray(expected); LongArrays.sort(array); // This amounts to a regular/basic insertion sort because there are no // sentinels in the array. doTest fails for this array, because the // indices calculated by the indexer are invalid. for (int i = 1; i < array.size(); i++) { assertTrue(array.get(i - 1L) < array.get(i)); } } @Test public void test0() { final List<Long> expected = new ArrayList<Long>(); expected.add(22102288204167208L); expected.add(225160948165161873L); expected.add(0L); expected.add(1638602644344629957L); expected.add(1644442600000000000L); expected.add(0L); doTest(expected, 1L, 3, new OffHeapDiskFPSet.Indexer(expected.size(), 1)); } @Test public void test1() { final List<Long> expected = new ArrayList<Long>(); expected.add(22102288204167208L); expected.add(225160948165161873L); expected.add(0L); expected.add(0L); expected.add(810435887525385357L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(1638602644344629957L); expected.add(0L); expected.add(0L); expected.add(2068351286375637679L); expected.add(0L); expected.add(2528370576879701538L); expected.add(2453870502940122045L); expected.add(0L); expected.add(3145830401686811393L); expected.add(3192897355035876677L); expected.add(3527505876050247287L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(4563398963865761585L); expected.add(0L); expected.add(4858869653769863593L); expected.add(5180223017321191209L); expected.add(0L); expected.add(0L); expected.add(5635076245116608576L); expected.add(5649139415351271641L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(6703691584433488410L); expected.add(0L); expected.add(7143040549630863225L); expected.add(7205281130519852628L); expected.add(7012967342342885117L); expected.add(7709106021212022085L); expected.add(7908712604546919197L); expected.add(7246110956693059329L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(8781691546738212390L); expected.add(8897195185152846807L); expected.add(0L); doTest(expected); } @Test public void test2() { final List<Long> expected = new ArrayList<Long>(); expected.add(0L); expected.add(0L); expected.add(22102288204167208L); expected.add(225160948165161873L); expected.add(810435887525385357L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(1638602644344629957L); expected.add(0L); expected.add(0L); expected.add(2068351286375637679L); expected.add(0L); expected.add(2528370576879701538L); expected.add(2453870502940122045L); expected.add(0L); expected.add(3145830401686811393L); expected.add(3192897355035876677L); expected.add(3527505876050247287L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(4563398963865761585L); expected.add(0L); expected.add(4858869653769863593L); expected.add(5180223017321191209L); expected.add(0L); expected.add(0L); expected.add(5635076245116608576L); expected.add(5649139415351271641L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(6703691584433488410L); expected.add(0L); expected.add(7143040549630863225L); expected.add(7205281130519852628L); expected.add(7012967342342885117L); expected.add(7709106021212022085L); expected.add(7908712604546919197L); expected.add(7246110956693059329L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(8781691546738212390L); expected.add(8897195185152846807L); expected.add(0L); doTest(expected); } @Test public void test3() { final List<Long> expected = new ArrayList<Long>(); expected.add(9183932681676589496L); expected.add(0L); expected.add(0L); expected.add(329728050397015749L); expected.add(436139026681109109L); expected.add(556905678415593173L); expected.add(0L); expected.add(796460649423573389L); expected.add(797798112015065380L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(1632374027957690827L); expected.add(1756811852021281877L); expected.add(0L); expected.add(1881448932687659007L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(2342821865031748924L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(2736147834640710575L); expected.add(2864022862265935958L); expected.add(2773542629236699928L); expected.add(2957298868366608281L); expected.add(0L); expected.add(3330257111892751888L); expected.add(3295675356431597478L); expected.add(3395836867027940583L); expected.add(3681469222400184316L); expected.add(3754947896063147473L); expected.add(3698681814958844261L); expected.add(3951382885893085878L); expected.add(0L); expected.add(4188454649677385650L); expected.add(4129247165607948084L); expected.add(4365409305525871332L); expected.add(4526757821913904014L); expected.add(4254202026550171921L); expected.add(4557871951994955815L); expected.add(4806497834029622101L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(5236202638577037427L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(5936146187640212534L); expected.add(0L); expected.add(6127434886073515781L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(6547025209145878563L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(6931928829149329960L); expected.add(0L); expected.add(0L); expected.add(7244186580741581738L); expected.add(0L); expected.add(0L); expected.add(7634041392899269082L); expected.add(7590982629575593986L); expected.add(0L); expected.add(7954723745221262664L); expected.add(0L); expected.add(8156105620374757718L); expected.add(8305398393196381769L); expected.add(8318253237689249492L); expected.add(8487954051864981042L); expected.add(8411933954485687818L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(9175849669163144218L); doTest(expected); } @Test public void test4() { final List<Long> expected = new ArrayList<Long>(); expected.add(9136953010061430590L); expected.add(74236458333421747L); expected.add(0L); expected.add(0L); expected.add(481170446028802552L); expected.add(587723185270146839L); expected.add(0L); expected.add(764880467681476738L); expected.add(0L); expected.add(0L); expected.add(1028380228728529428L); expected.add(0L); expected.add(1246117495100367611L); expected.add(1353681884824400499L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(1963327988900916594L); expected.add(0L); expected.add(2157942654452711468L); expected.add(2211701751588391467L); expected.add(2197266581704230150L); expected.add(2391118405386569995L); expected.add(0L); expected.add(0L); expected.add(2754416910109403115L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(3528296600587602855L); expected.add(0L); expected.add(3766154305485605955L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(4172091881329434331L); expected.add(4273360576593753745L); expected.add(4338054185482857322L); expected.add(4487790251341705673L); expected.add(0L); expected.add(4760603841378765728L); expected.add(0L); expected.add(4897534821030901381L); expected.add(5057347369431494228L); expected.add(5185984701076703188L); expected.add(5255556356599253415L); expected.add(4911921657882287345L); expected.add(5512811886280168498L); expected.add(5627022814159167180L); expected.add(5630009759945037387L); expected.add(5592096823142754761L); expected.add(5880489878946290534L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(6796173646113527960L); expected.add(6887096685265647763L); expected.add(6946033094922439935L); expected.add(7100083311060830826L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(7575172208974668528L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(8240485391672917634L); expected.add(0L); expected.add(0L); expected.add(8572429495433200993L); expected.add(0L); expected.add(8804495173596718076L); expected.add(8771524479740786626L); expected.add(8986659781390119011L); expected.add(9017810141411942826L); expected.add(9195197379878056627L); doTest(expected); } @Test public void test5() { final List<Long> expected = new ArrayList<Long>(); expected.add(9185197375878056627L); expected.add(74236458333421747L); expected.add(9017810141411942826L); expected.add(0L); expected.add(481170446028802552L); expected.add(587723185270146839L); expected.add(0L); expected.add(764880467681476738L); expected.add(0L); expected.add(0L); expected.add(1028380228728529428L); expected.add(0L); expected.add(1246117495100367611L); expected.add(1353681884824400499L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(1963327988900916594L); expected.add(0L); expected.add(2157942654452711468L); expected.add(2211701751588391467L); expected.add(2197266581704230150L); expected.add(2391118405386569995L); expected.add(0L); expected.add(0L); expected.add(2754416910109403115L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(3528296600587602855L); expected.add(0L); expected.add(3766154305485605955L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(4172091881329434331L); expected.add(4273360576593753745L); expected.add(4338054185482857322L); expected.add(4487790251341705673L); expected.add(0L); expected.add(4760603841378765728L); expected.add(0L); expected.add(4897534821030901381L); expected.add(5057347369431494228L); expected.add(5185984701076703188L); expected.add(5255556356599253415L); expected.add(4911921657882287345L); expected.add(5512811886280168498L); expected.add(5627022814159167180L); expected.add(5630009759945037387L); expected.add(5592096823142754761L); expected.add(5880489878946290534L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(6796173646113527960L); expected.add(6887096685265647763L); expected.add(6946033094922439935L); expected.add(7100083311060830826L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(7575172208974668528L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(8240485391672917634L); expected.add(0L); expected.add(0L); expected.add(8572429495433200993L); expected.add(0L); expected.add(8804495173596718076L); expected.add(8771524479740786626L); expected.add(8986659781390119011L); expected.add(9136953010061430590L); expected.add(9195197379878056627L); doTest(expected); } @Test public void test6() { final List<Long> expected = new ArrayList<Long>(); expected.add(1L); expected.add(9185197375878056627L); expected.add(9017810141411942826L); expected.add(0L); expected.add(481170446028802552L); expected.add(587723185270146839L); expected.add(0L); expected.add(764880467681476738L); expected.add(0L); expected.add(0L); expected.add(1028380228728529428L); expected.add(0L); expected.add(1246117495100367611L); expected.add(1353681884824400499L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(1963327988900916594L); expected.add(0L); expected.add(2157942654452711468L); expected.add(2211701751588391467L); expected.add(2197266581704230150L); expected.add(2391118405386569995L); expected.add(0L); expected.add(0L); expected.add(2754416910109403115L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(3528296600587602855L); expected.add(0L); expected.add(3766154305485605955L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(4172091881329434331L); expected.add(4273360576593753745L); expected.add(4338054185482857322L); expected.add(4487790251341705673L); expected.add(0L); expected.add(4760603841378765728L); expected.add(0L); expected.add(4897534821030901381L); expected.add(5057347369431494228L); expected.add(5185984701076703188L); expected.add(5255556356599253415L); expected.add(4911921657882287345L); expected.add(5512811886280168498L); expected.add(5627022814159167180L); expected.add(5630009759945037387L); expected.add(5592096823142754761L); expected.add(5880489878946290534L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(6796173646113527960L); expected.add(6887096685265647763L); expected.add(6946033094922439935L); expected.add(7100083311060830826L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(7575172208974668528L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(8240485391672917634L); expected.add(0L); expected.add(0L); expected.add(8572429495433200993L); expected.add(0L); expected.add(8804495173596718076L); expected.add(8771524479740786626L); expected.add(8986659781390119011L); expected.add(9136953010061430590L); expected.add(9195197379878056627L); doTest(expected); } @Test public void test7() { final List<Long> expected = new ArrayList<Long>(); expected.add(1L); expected.add(0L); expected.add(4L); expected.add(0L); expected.add(6L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(13L); doTest(expected, 1, 0, new OffHeapDiskFPSet.Indexer(expected.size(), 1, 13)); } @Test public void test8() { final List<Long> expected = new ArrayList<Long>(); expected.add(1L); expected.add(11L); expected.add(3L); expected.add(4L); expected.add(5L); expected.add(6L); expected.add(7L); expected.add(8L); expected.add(9L); expected.add(10L); expected.add(12L); final OffHeapDiskFPSet.Indexer indexer = new OffHeapDiskFPSet.Indexer(expected.size(), 1, 12); final LongArray array = new LongArray(expected); final LongComparator comparator = getComparator(indexer); LongArrays.sort(array, 0, array.size() - 1L + 3, comparator); verify(expected, 3, indexer, array); } @Test public void test9a() { final List<Long> expected = new ArrayList<Long>(); expected.add(12L); expected.add(1L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(11L); doTest(expected, 1, 2, new OffHeapDiskFPSet.Indexer(expected.size(), 1, 13)); } @Test public void test9b() { final List<Long> expected = new ArrayList<Long>(); expected.add(11L); expected.add(1L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(12L); doTest(expected, 1, 2, new OffHeapDiskFPSet.Indexer(expected.size(), 1, 13)); } @Test public void test9c() { final List<Long> expected = new ArrayList<Long>(); expected.add(1L); expected.add(12L); expected.add(0L); expected.add(0L); expected.add(0L); expected.add(11L); doTest(expected, 1, 3, new OffHeapDiskFPSet.Indexer(expected.size(), 1, 13)); } private void doTest(final List<Long> expected) { final Indexer indexer = new OffHeapDiskFPSet.Indexer(expected.size(), 1); for (int i = 1; i < (expected.size() / 2); i++) { doTest(expected, i, 2, indexer); } } private void doTest(final List<Long> expected, final long partitions, final int reprobe, final Indexer indexer) { final LongArray array = new LongArray(expected); final LongComparator comparator = getComparator(indexer); final long length = expected.size() / partitions; // Sort each disjunct partition. for (long i = 0; i < partitions; i++) { final long start = i * length; final long end = i + 1L == partitions ? array.size() - 1L: start + length; LongArrays.sort(array, start, end, comparator); } // Stitch the disjunct partitions together. Only need if more than one // partition, but done with one partition anyway to see that it causes // no harm. for (long i = 0; i < partitions; i++) { final long end = getEnd(partitions, array, length, i); LongArrays.sort(array, end - reprobe, end + reprobe, comparator); } verify(expected, reprobe, indexer, array); } private long getEnd(final long partitions, final LongArray array, final long length, long idx) { return idx + 1L == partitions ? array.size() - 1L: (idx + 1L) * length; } private static LongComparator getComparator(final Indexer indexer) { return new LongComparator() { public int compare(final long fpA, final long posA, final long fpB, final long posB) { // Elements not in Nat \ {0} remain at their current // position. if (fpA <= EMPTY || fpB <= EMPTY) { return 0; } final boolean wrappedA = indexer.getIdx(fpA) > posA; final boolean wrappedB = indexer.getIdx(fpB) > posB; if (wrappedA == wrappedB && posA > posB) { return fpA < fpB ? -1 : 1; } else if ((wrappedA ^ wrappedB)) { if (posA < posB && fpA < fpB) { // Swap fpB, which is at the end of array a, with fpA. // fpA is less than fpB. fpB was inserted into array a // before fpA. return -1; } if (posA > posB && fpA > fpB) { return -1; } } return 0; } }; } private void verify(final List<Long> expected, final int reprobe, final Indexer indexer, final LongArray array) { // Verify that negative and EMPTY elements remain at their position. // Lets call them sentinels. int sentinel = 0; OUTER: for (int j = 0; j < expected.size(); j++) { final long l = expected.get(j); if (l == EMPTY) { // EMPTY remain at their original positions. assertEquals(EMPTY, array.get(j)); sentinel++; } else if (l < EMPTY) { // Negative remain at their original positions. assertEquals(l, array.get(j)); sentinel++; } else { // Verify that all non-sentinels are still // array members. for (int k = 0; k < array.size(); k++) { if (array.get(k) == l) { continue OUTER; } } fail(String.format("long %s not found.", l)); } } // Verify elements stayed within their lookup range. for (int pos = 0; pos < array.size(); pos++) { final long l = array.get(pos); if (l <= EMPTY) { continue; } final long idx = indexer.getIdx(l); assertTrue(String.format("%s, pos: %s, idx: %s, r: %s (was at: %s)", l, pos, idx, reprobe, expected.indexOf(l)), isInRange(idx, reprobe, pos, array.size())); } // Verify that non-sentinels are sorted is ascending order. Take // care of wrapped elements too. A) First find the first non-sentinel, // non-wrapped element. long pos = 0; final List<Long> seen = new ArrayList<Long>(expected.size()); while (pos < array.size()) { long e = array.get(pos); if (e <= EMPTY || indexer.getIdx(e) > pos) { // Either sentinel or wrapped. pos++; continue; } seen.add(e); pos++; break; } // B) Collect all elements into seen but skip those at the beginning that // wrapped, and those that didn't wrap at the end (array.size + reprobe). for (; pos < array.size() + reprobe; pos++) { long actual = array.get(pos % array.size()); if (actual <= EMPTY) { continue; } final long idx = indexer.getIdx(actual); if (pos < array.size() && idx > pos) { // When not wrapped, ignore elements belonging to the end that wrapped. continue; } if (pos > array.size() - 1L && idx + reprobe < pos) { // When wrapped, ignore elements at beginning which do not // belong to the end. continue; } seen.add(actual); } // C) Verify that all elements are sorted. for (int i = 1; i < seen.size(); i++) { final long lo = seen.get(i - 1); final long hi = seen.get(i); assertTrue(String.format("%s > %s", lo, hi), lo < hi); } // D) Verify we saw all expected elements. assertEquals(expected.size() - sentinel, seen.size()); } @Test public void testIsInRange() { assertTrue(isInRange(0, 0, 0, 4)); assertFalse(isInRange(0, 0, 1, 4)); assertFalse(isInRange(0, 0, 2, 4)); assertFalse(isInRange(0, 0, 3, 4)); assertFalse(isInRange(0, 0, 4, 4)); assertTrue(isInRange(0, 1, 1, 4)); assertFalse(isInRange(0, 1, 2, 4)); assertTrue(isInRange(0, 2, 2, 4)); assertFalse(isInRange(0, 2, 3, 4)); assertTrue(isInRange(0, 3, 3, 4)); assertFalse(isInRange(0, 3, 4, 4)); assertTrue(isInRange(3, 0, 3, 4)); assertTrue(isInRange(3, 1, 0, 4)); assertTrue(isInRange(3, 2, 1, 4)); assertFalse(isInRange(3, 2, 2, 4)); } private static boolean isInRange(long idx, int reprobe, int pos, long size) { if (idx + reprobe >= size && pos < idx) { return pos <= (idx + reprobe) % size; } else { return idx <= pos && pos <= idx + reprobe; } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import java.io.IOException; import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.TreeMap; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.util.Bytes; /** * Single row result of a {@link Get} or {@link Scan} query.<p> * * This class is <b>NOT THREAD SAFE</b>.<p> * * Convenience methods are available that return various {@link Map} * structures and values directly.<p> * * To get a complete mapping of all cells in the Result, which can include * multiple families and multiple versions, use {@link #getMap()}.<p> * * To get a mapping of each family to its columns (qualifiers and values), * including only the latest version of each, use {@link #getNoVersionMap()}. * * To get a mapping of qualifiers to latest values for an individual family use * {@link #getFamilyMap(byte[])}.<p> * * To get the latest value for a specific family and qualifier use * {@link #getValue(byte[], byte[])}. * * A Result is backed by an array of {@link Cell} objects, each representing * an HBase cell defined by the row, family, qualifier, timestamp, and value.<p> * * The underlying {@link Cell} objects can be accessed through the method {@link #listCells()}. * This will create a List from the internal Cell []. Better is to exploit the fact that * a new Result instance is a primed {@link CellScanner}; just call {@link #advance()} and * {@link #current()} to iterate over Cells as you would any {@link CellScanner}. * Call {@link #cellScanner()} to reset should you need to iterate the same Result over again * ({@link CellScanner}s are one-shot). * * If you need to overwrite a Result with another Result instance -- as in the old 'mapred' * RecordReader next invocations -- then create an empty Result with the null constructor and * in then use {@link #copyFrom(Result)} */ @InterfaceAudience.Public @InterfaceStability.Stable public class Result implements CellScannable, CellScanner { private Cell[] cells; private Boolean exists; // if the query was just to check existence. private boolean stale = false; /** * Partial results do not contain the full row's worth of cells. The result had to be returned in * parts because the size of the cells in the row exceeded the RPC result size on the server. * Partial results must be combined client side with results representing the remainder of the * row's cells to form the complete result. Partial results and RPC result size allow us to avoid * OOME on the server when servicing requests for large rows. The Scan configuration used to * control the result size on the server is {@link Scan#setMaxResultSize(long)} and the default * value can be seen here: {@link HConstants#DEFAULT_HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE} */ private boolean partial = false; // We're not using java serialization. Transient here is just a marker to say // that this is where we cache row if we're ever asked for it. private transient byte [] row = null; // Ditto for familyMap. It can be composed on fly from passed in kvs. private transient NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> familyMap = null; private static ThreadLocal<byte[]> localBuffer = new ThreadLocal<byte[]>(); private static final int PAD_WIDTH = 128; public static final Result EMPTY_RESULT = new Result(true); private final static int INITIAL_CELLSCANNER_INDEX = -1; /** * Index for where we are when Result is acting as a {@link CellScanner}. */ private int cellScannerIndex = INITIAL_CELLSCANNER_INDEX; private ClientProtos.RegionLoadStats stats; private final boolean readonly; /** * Creates an empty Result w/ no KeyValue payload; returns null if you call {@link #rawCells()}. * Use this to represent no results if {@code null} won't do or in old 'mapred' as opposed * to 'mapreduce' package MapReduce where you need to overwrite a Result instance with a * {@link #copyFrom(Result)} call. */ public Result() { this(false); } /** * Allows to construct special purpose immutable Result objects, * such as EMPTY_RESULT. * @param readonly whether this Result instance is readonly */ private Result(boolean readonly) { this.readonly = readonly; } /** * Instantiate a Result with the specified List of KeyValues. * <br><strong>Note:</strong> You must ensure that the keyvalues are already sorted. * @param cells List of cells */ public static Result create(List<Cell> cells) { return create(cells, null); } public static Result create(List<Cell> cells, Boolean exists) { return create(cells, exists, false); } public static Result create(List<Cell> cells, Boolean exists, boolean stale) { return create(cells, exists, stale, false); } public static Result create(List<Cell> cells, Boolean exists, boolean stale, boolean partial) { if (exists != null){ return new Result(null, exists, stale, partial); } return new Result(cells.toArray(new Cell[cells.size()]), null, stale, partial); } /** * Instantiate a Result with the specified array of KeyValues. * <br><strong>Note:</strong> You must ensure that the keyvalues are already sorted. * @param cells array of cells */ public static Result create(Cell[] cells) { return create(cells, null, false); } public static Result create(Cell[] cells, Boolean exists, boolean stale) { return create(cells, exists, stale, false); } public static Result create(Cell[] cells, Boolean exists, boolean stale, boolean partial) { if (exists != null){ return new Result(null, exists, stale, partial); } return new Result(cells, null, stale, partial); } /** Private ctor. Use {@link #create(Cell[])}. */ private Result(Cell[] cells, Boolean exists, boolean stale, boolean partial) { this.cells = cells; this.exists = exists; this.stale = stale; this.partial = partial; this.readonly = false; } /** * Method for retrieving the row key that corresponds to * the row from which this Result was created. * @return row */ public byte [] getRow() { if (this.row == null) { this.row = (this.cells == null || this.cells.length == 0) ? null : CellUtil.cloneRow(this.cells[0]); } return this.row; } /** * Return the array of Cells backing this Result instance. * * The array is sorted from smallest -> largest using the * {@link KeyValue#COMPARATOR}. * * The array only contains what your Get or Scan specifies and no more. * For example if you request column "A" 1 version you will have at most 1 * Cell in the array. If you request column "A" with 2 version you will * have at most 2 Cells, with the first one being the newer timestamp and * the second being the older timestamp (this is the sort order defined by * {@link KeyValue#COMPARATOR}). If columns don't exist, they won't be * present in the result. Therefore if you ask for 1 version all columns, * it is safe to iterate over this array and expect to see 1 Cell for * each column and no more. * * This API is faster than using getFamilyMap() and getMap() * * @return array of Cells; can be null if nothing in the result */ public Cell[] rawCells() { return cells; } /** * Create a sorted list of the Cell's in this result. * * Since HBase 0.20.5 this is equivalent to raw(). * * @return sorted List of Cells; can be null if no cells in the result */ public List<Cell> listCells() { return isEmpty()? null: Arrays.asList(rawCells()); } /** * Return the Cells for the specific column. The Cells are sorted in * the {@link KeyValue#COMPARATOR} order. That implies the first entry in * the list is the most recent column. If the query (Scan or Get) only * requested 1 version the list will contain at most 1 entry. If the column * did not exist in the result set (either the column does not exist * or the column was not selected in the query) the list will be empty. * * Also see getColumnLatest which returns just a Cell * * @param family the family * @param qualifier * @return a list of Cells for this column or empty list if the column * did not exist in the result set */ public List<Cell> getColumnCells(byte [] family, byte [] qualifier) { List<Cell> result = new ArrayList<Cell>(); Cell [] kvs = rawCells(); if (kvs == null || kvs.length == 0) { return result; } int pos = binarySearch(kvs, family, qualifier); if (pos == -1) { return result; // cant find it } for (int i = pos; i < kvs.length; i++) { if (CellUtil.matchingColumn(kvs[i], family,qualifier)) { result.add(kvs[i]); } else { break; } } return result; } protected int binarySearch(final Cell [] kvs, final byte [] family, final byte [] qualifier) { Cell searchTerm = KeyValueUtil.createFirstOnRow(CellUtil.cloneRow(kvs[0]), family, qualifier); // pos === ( -(insertion point) - 1) int pos = Arrays.binarySearch(kvs, searchTerm, KeyValue.COMPARATOR); // never will exact match if (pos < 0) { pos = (pos+1) * -1; // pos is now insertion point } if (pos == kvs.length) { return -1; // doesn't exist } return pos; } /** * Searches for the latest value for the specified column. * * @param kvs the array to search * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * * @return the index where the value was found, or -1 otherwise */ protected int binarySearch(final Cell [] kvs, final byte [] family, final int foffset, final int flength, final byte [] qualifier, final int qoffset, final int qlength) { double keyValueSize = (double) KeyValue.getKeyValueDataStructureSize(kvs[0].getRowLength(), flength, qlength, 0); byte[] buffer = localBuffer.get(); if (buffer == null || keyValueSize > buffer.length) { // pad to the smallest multiple of the pad width buffer = new byte[(int) Math.ceil(keyValueSize / PAD_WIDTH) * PAD_WIDTH]; localBuffer.set(buffer); } Cell searchTerm = KeyValueUtil.createFirstOnRow(buffer, 0, kvs[0].getRowArray(), kvs[0].getRowOffset(), kvs[0].getRowLength(), family, foffset, flength, qualifier, qoffset, qlength); // pos === ( -(insertion point) - 1) int pos = Arrays.binarySearch(kvs, searchTerm, KeyValue.COMPARATOR); // never will exact match if (pos < 0) { pos = (pos+1) * -1; // pos is now insertion point } if (pos == kvs.length) { return -1; // doesn't exist } return pos; } /** * The Cell for the most recent timestamp for a given column. * * @param family * @param qualifier * * @return the Cell for the column, or null if no value exists in the row or none have been * selected in the query (Get/Scan) */ public Cell getColumnLatestCell(byte [] family, byte [] qualifier) { Cell [] kvs = rawCells(); // side effect possibly. if (kvs == null || kvs.length == 0) { return null; } int pos = binarySearch(kvs, family, qualifier); if (pos == -1) { return null; } if (CellUtil.matchingColumn(kvs[pos], family, qualifier)) { return kvs[pos]; } return null; } /** * The Cell for the most recent timestamp for a given column. * * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * * @return the Cell for the column, or null if no value exists in the row or none have been * selected in the query (Get/Scan) */ public Cell getColumnLatestCell(byte [] family, int foffset, int flength, byte [] qualifier, int qoffset, int qlength) { Cell [] kvs = rawCells(); // side effect possibly. if (kvs == null || kvs.length == 0) { return null; } int pos = binarySearch(kvs, family, foffset, flength, qualifier, qoffset, qlength); if (pos == -1) { return null; } if (CellUtil.matchingColumn(kvs[pos], family, foffset, flength, qualifier, qoffset, qlength)) { return kvs[pos]; } return null; } /** * Get the latest version of the specified column. * Note: this call clones the value content of the hosting Cell. See * {@link #getValueAsByteBuffer(byte[], byte[])}, etc., or {@link #listCells()} if you would * avoid the cloning. * @param family family name * @param qualifier column qualifier * @return value of latest version of column, null if none found */ public byte[] getValue(byte [] family, byte [] qualifier) { Cell kv = getColumnLatestCell(family, qualifier); if (kv == null) { return null; } return CellUtil.cloneValue(kv); } /** * Returns the value wrapped in a new <code>ByteBuffer</code>. * * @param family family name * @param qualifier column qualifier * * @return the latest version of the column, or <code>null</code> if none found */ public ByteBuffer getValueAsByteBuffer(byte [] family, byte [] qualifier) { Cell kv = getColumnLatestCell(family, 0, family.length, qualifier, 0, qualifier.length); if (kv == null) { return null; } return ByteBuffer.wrap(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()). asReadOnlyBuffer(); } /** * Returns the value wrapped in a new <code>ByteBuffer</code>. * * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * * @return the latest version of the column, or <code>null</code> if none found */ public ByteBuffer getValueAsByteBuffer(byte [] family, int foffset, int flength, byte [] qualifier, int qoffset, int qlength) { Cell kv = getColumnLatestCell(family, foffset, flength, qualifier, qoffset, qlength); if (kv == null) { return null; } return ByteBuffer.wrap(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()). asReadOnlyBuffer(); } /** * Loads the latest version of the specified column into the provided <code>ByteBuffer</code>. * <p> * Does not clear or flip the buffer. * * @param family family name * @param qualifier column qualifier * @param dst the buffer where to write the value * * @return <code>true</code> if a value was found, <code>false</code> otherwise * * @throws BufferOverflowException there is insufficient space remaining in the buffer */ public boolean loadValue(byte [] family, byte [] qualifier, ByteBuffer dst) throws BufferOverflowException { return loadValue(family, 0, family.length, qualifier, 0, qualifier.length, dst); } /** * Loads the latest version of the specified column into the provided <code>ByteBuffer</code>. * <p> * Does not clear or flip the buffer. * * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * @param dst the buffer where to write the value * * @return <code>true</code> if a value was found, <code>false</code> otherwise * * @throws BufferOverflowException there is insufficient space remaining in the buffer */ public boolean loadValue(byte [] family, int foffset, int flength, byte [] qualifier, int qoffset, int qlength, ByteBuffer dst) throws BufferOverflowException { Cell kv = getColumnLatestCell(family, foffset, flength, qualifier, qoffset, qlength); if (kv == null) { return false; } dst.put(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()); return true; } /** * Checks if the specified column contains a non-empty value (not a zero-length byte array). * * @param family family name * @param qualifier column qualifier * * @return whether or not a latest value exists and is not empty */ public boolean containsNonEmptyColumn(byte [] family, byte [] qualifier) { return containsNonEmptyColumn(family, 0, family.length, qualifier, 0, qualifier.length); } /** * Checks if the specified column contains a non-empty value (not a zero-length byte array). * * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * * @return whether or not a latest value exists and is not empty */ public boolean containsNonEmptyColumn(byte [] family, int foffset, int flength, byte [] qualifier, int qoffset, int qlength) { Cell kv = getColumnLatestCell(family, foffset, flength, qualifier, qoffset, qlength); return (kv != null) && (kv.getValueLength() > 0); } /** * Checks if the specified column contains an empty value (a zero-length byte array). * * @param family family name * @param qualifier column qualifier * * @return whether or not a latest value exists and is empty */ public boolean containsEmptyColumn(byte [] family, byte [] qualifier) { return containsEmptyColumn(family, 0, family.length, qualifier, 0, qualifier.length); } /** * Checks if the specified column contains an empty value (a zero-length byte array). * * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * * @return whether or not a latest value exists and is empty */ public boolean containsEmptyColumn(byte [] family, int foffset, int flength, byte [] qualifier, int qoffset, int qlength) { Cell kv = getColumnLatestCell(family, foffset, flength, qualifier, qoffset, qlength); return (kv != null) && (kv.getValueLength() == 0); } /** * Checks for existence of a value for the specified column (empty or not). * * @param family family name * @param qualifier column qualifier * * @return true if at least one value exists in the result, false if not */ public boolean containsColumn(byte [] family, byte [] qualifier) { Cell kv = getColumnLatestCell(family, qualifier); return kv != null; } /** * Checks for existence of a value for the specified column (empty or not). * * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * * @return true if at least one value exists in the result, false if not */ public boolean containsColumn(byte [] family, int foffset, int flength, byte [] qualifier, int qoffset, int qlength) { return getColumnLatestCell(family, foffset, flength, qualifier, qoffset, qlength) != null; } /** * Map of families to all versions of its qualifiers and values. * <p> * Returns a three level Map of the form: * <code>Map&amp;family,Map&lt;qualifier,Map&lt;timestamp,value>>></code> * <p> * Note: All other map returning methods make use of this map internally. * @return map from families to qualifiers to versions */ public NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> getMap() { if (this.familyMap != null) { return this.familyMap; } if(isEmpty()) { return null; } this.familyMap = new TreeMap<>(Bytes.BYTES_COMPARATOR); for(Cell kv : this.cells) { byte [] family = CellUtil.cloneFamily(kv); NavigableMap<byte[], NavigableMap<Long, byte[]>> columnMap = familyMap.get(family); if(columnMap == null) { columnMap = new TreeMap<>(Bytes.BYTES_COMPARATOR); familyMap.put(family, columnMap); } byte [] qualifier = CellUtil.cloneQualifier(kv); NavigableMap<Long, byte[]> versionMap = columnMap.get(qualifier); if(versionMap == null) { versionMap = new TreeMap<>(new Comparator<Long>() { @Override public int compare(Long l1, Long l2) { return l2.compareTo(l1); } }); columnMap.put(qualifier, versionMap); } Long timestamp = kv.getTimestamp(); byte [] value = CellUtil.cloneValue(kv); versionMap.put(timestamp, value); } return this.familyMap; } /** * Map of families to their most recent qualifiers and values. * <p> * Returns a two level Map of the form: <code>Map&amp;family,Map&lt;qualifier,value>></code> * <p> * The most recent version of each qualifier will be used. * @return map from families to qualifiers and value */ public NavigableMap<byte[], NavigableMap<byte[], byte[]>> getNoVersionMap() { if(this.familyMap == null) { getMap(); } if(isEmpty()) { return null; } NavigableMap<byte[], NavigableMap<byte[], byte[]>> returnMap = new TreeMap<byte[], NavigableMap<byte[], byte[]>>(Bytes.BYTES_COMPARATOR); for(Map.Entry<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> familyEntry : familyMap.entrySet()) { NavigableMap<byte[], byte[]> qualifierMap = new TreeMap<byte[], byte[]>(Bytes.BYTES_COMPARATOR); for(Map.Entry<byte[], NavigableMap<Long, byte[]>> qualifierEntry : familyEntry.getValue().entrySet()) { byte [] value = qualifierEntry.getValue().get(qualifierEntry.getValue().firstKey()); qualifierMap.put(qualifierEntry.getKey(), value); } returnMap.put(familyEntry.getKey(), qualifierMap); } return returnMap; } /** * Map of qualifiers to values. * <p> * Returns a Map of the form: <code>Map&lt;qualifier,value></code> * @param family column family to get * @return map of qualifiers to values */ public NavigableMap<byte[], byte[]> getFamilyMap(byte [] family) { if(this.familyMap == null) { getMap(); } if(isEmpty()) { return null; } NavigableMap<byte[], byte[]> returnMap = new TreeMap<byte[], byte[]>(Bytes.BYTES_COMPARATOR); NavigableMap<byte[], NavigableMap<Long, byte[]>> qualifierMap = familyMap.get(family); if(qualifierMap == null) { return returnMap; } for(Map.Entry<byte[], NavigableMap<Long, byte[]>> entry : qualifierMap.entrySet()) { byte [] value = entry.getValue().get(entry.getValue().firstKey()); returnMap.put(entry.getKey(), value); } return returnMap; } /** * Returns the value of the first column in the Result. * @return value of the first column */ public byte [] value() { if (isEmpty()) { return null; } return CellUtil.cloneValue(cells[0]); } /** * Check if the underlying Cell [] is empty or not * @return true if empty */ public boolean isEmpty() { return this.cells == null || this.cells.length == 0; } /** * @return the size of the underlying Cell [] */ public int size() { return this.cells == null? 0: this.cells.length; } /** * @return String */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("keyvalues="); if(isEmpty()) { sb.append("NONE"); return sb.toString(); } sb.append("{"); boolean moreThanOne = false; for(Cell kv : this.cells) { if(moreThanOne) { sb.append(", "); } else { moreThanOne = true; } sb.append(kv.toString()); } sb.append("}"); return sb.toString(); } /** * Does a deep comparison of two Results, down to the byte arrays. * @param res1 first result to compare * @param res2 second result to compare * @throws Exception Every difference is throwing an exception */ public static void compareResults(Result res1, Result res2) throws Exception { if (res2 == null) { throw new Exception("There wasn't enough rows, we stopped at " + Bytes.toStringBinary(res1.getRow())); } if (res1.size() != res2.size()) { throw new Exception("This row doesn't have the same number of KVs: " + res1.toString() + " compared to " + res2.toString()); } Cell[] ourKVs = res1.rawCells(); Cell[] replicatedKVs = res2.rawCells(); for (int i = 0; i < res1.size(); i++) { if (!ourKVs[i].equals(replicatedKVs[i]) || !Bytes.equals(CellUtil.cloneValue(ourKVs[i]), CellUtil.cloneValue(replicatedKVs[i]))) { throw new Exception("This result was different: " + res1.toString() + " compared to " + res2.toString()); } } } /** * Forms a single result from the partial results in the partialResults list. This method is * useful for reconstructing partial results on the client side. * @param partialResults list of partial results * @return The complete result that is formed by combining all of the partial results together * @throws IOException A complete result cannot be formed because the results in the partial list * come from different rows */ public static Result createCompleteResult(List<Result> partialResults) throws IOException { List<Cell> cells = new ArrayList<Cell>(); boolean stale = false; byte[] prevRow = null; byte[] currentRow = null; if (partialResults != null && !partialResults.isEmpty()) { for (int i = 0; i < partialResults.size(); i++) { Result r = partialResults.get(i); currentRow = r.getRow(); if (prevRow != null && !Bytes.equals(prevRow, currentRow)) { throw new IOException( "Cannot form complete result. Rows of partial results do not match." + " Partial Results: " + partialResults); } // Ensure that all Results except the last one are marked as partials. The last result // may not be marked as a partial because Results are only marked as partials when // the scan on the server side must be stopped due to reaching the maxResultSize. // Visualizing it makes it easier to understand: // maxResultSize: 2 cells // (-x-) represents cell number x in a row // Example: row1: -1- -2- -3- -4- -5- (5 cells total) // How row1 will be returned by the server as partial Results: // Result1: -1- -2- (2 cells, size limit reached, mark as partial) // Result2: -3- -4- (2 cells, size limit reached, mark as partial) // Result3: -5- (1 cell, size limit NOT reached, NOT marked as partial) if (i != (partialResults.size() - 1) && !r.isPartial()) { throw new IOException( "Cannot form complete result. Result is missing partial flag. " + "Partial Results: " + partialResults); } prevRow = currentRow; stale = stale || r.isStale(); for (Cell c : r.rawCells()) { cells.add(c); } } } return Result.create(cells, null, stale); } /** * Get total size of raw cells * @param result * @return Total size. */ public static long getTotalSizeOfCells(Result result) { long size = 0; for (Cell c : result.rawCells()) { size += CellUtil.estimatedHeapSizeOf(c); } return size; } /** * Copy another Result into this one. Needed for the old Mapred framework * @throws UnsupportedOperationException if invoked on instance of EMPTY_RESULT * (which is supposed to be immutable). * @param other */ public void copyFrom(Result other) { checkReadonly(); this.row = null; this.familyMap = null; this.cells = other.cells; } @Override public CellScanner cellScanner() { // Reset this.cellScannerIndex = INITIAL_CELLSCANNER_INDEX; return this; } @Override public Cell current() { if (cells == null) return null; return (cellScannerIndex < 0)? null: this.cells[cellScannerIndex]; } @Override public boolean advance() { if (cells == null) return false; return ++cellScannerIndex < this.cells.length; } public Boolean getExists() { return exists; } public void setExists(Boolean exists) { checkReadonly(); this.exists = exists; } /** * Whether or not the results are coming from possibly stale data. Stale results * might be returned if {@link Consistency} is not STRONG for the query. * @return Whether or not the results are coming from possibly stale data. */ public boolean isStale() { return stale; } /** * Whether or not the result is a partial result. Partial results contain a subset of the cells * for a row and should be combined with a result representing the remaining cells in that row to * form a complete (non-partial) result. * @return Whether or not the result is a partial result */ public boolean isPartial() { return partial; } /** * Add load information about the region to the information about the result * @param loadStats statistics about the current region from which this was returned * @deprecated use {@link #setStatistics(ClientProtos.RegionLoadStats)} instead * @throws UnsupportedOperationException if invoked on instance of EMPTY_RESULT * (which is supposed to be immutable). */ @Deprecated public void addResults(ClientProtos.RegionLoadStats loadStats) { checkReadonly(); this.stats = loadStats; } /** * Set load information about the region to the information about the result * @param loadStats statistics about the current region from which this was returned */ public void setStatistics(ClientProtos.RegionLoadStats loadStats) { this.stats = loadStats; } /** * @return the associated statistics about the region from which this was returned. Can be * <tt>null</tt> if stats are disabled. */ public ClientProtos.RegionLoadStats getStats() { return stats; } /** * All methods modifying state of Result object must call this method * to ensure that special purpose immutable Results can't be accidentally modified. */ private void checkReadonly() { if (readonly == true) { throw new UnsupportedOperationException("Attempting to modify readonly EMPTY_RESULT!"); } } }
package editor.splitpane; import editor.tabpane.TabPosition; import javax.swing.*; import java.awt.*; /** */ public class CollapsibleSplitPane extends SplitPane { private ICaptionedPanel _topMin; private ICaptionedPanel _bottomMin; private ICaptionActionListener _captionActionListener; public CollapsibleSplitPane( int iOrientation, JComponent comp1, JComponent comp2 ) { super( iOrientation, comp1, comp2 ); } public void setTop( JComponent c ) { super.setTop( c ); listenToCaptionActions( c ); } public void setBottom( JComponent c ) { super.setBottom( c ); listenToCaptionActions( c ); } public void restorePane() { if( isMin() ) { toggleCollapseNow( getTopMin() == null ? getBottomMin() : getTopMin() ); } } public boolean isMin() { return getTopMin() != null || getBottomMin() != null; } public ICaptionedPanel getTopMin() { return _topMin; } protected void setTopMin( ICaptionedPanel topMin ) { _topMin = topMin; } public ICaptionedPanel getBottomMin() { return _bottomMin; } protected void setBottomMin( ICaptionedPanel bottomMin ) { _bottomMin = bottomMin; } public void hearBothSides() { listenToCaptionActions( _comp1 ); listenToCaptionActions( _comp2 ); } private void listenToCaptionActions( JComponent c ) { ICaptionedPanel captionedPanel = findCaptionedPanel( c ); if( captionedPanel == null ) { return; } if( _captionActionListener == null ) { _captionActionListener = new CaptionActionListener(); } captionedPanel.getCaption().addCaptionActionListener( _captionActionListener ); } private ICaptionedPanel findCaptionedPanel( Component c ) { if( !(c instanceof Container) ) { return null; } if( c instanceof SplitPane ) { return null; } if( c instanceof ICaptionedPanel ) { return (ICaptionedPanel)c; } Component[] children = ((Container)c).getComponents(); for( int i = 0; i < children.length; i++ ) { ICaptionedPanel titledPane = findCaptionedPanel( children[i] ); if( titledPane != null ) { return titledPane; } } return null; } public void toggleCollapse( final ICaptionedPanel captionedPanel ) { EventQueue.invokeLater( () -> toggleCollapseNow( captionedPanel ) ); } private void toggleCollapseNow( ICaptionedPanel captionedPanel ) { if( isInTop( captionedPanel ) ) { toggleTopCollapse( captionedPanel ); } else if( isInBottom( captionedPanel ) ) { toggleBottomCollapse( captionedPanel ); } EventQueue.invokeLater( () -> { revalidate(); doLayout(); repaint(); } ); } private void toggleTopCollapse( ICaptionedPanel captionedPanel ) { if( getTopMin() != null ) { restore(); } else { collapseTop( captionedPanel ); } } private void toggleBottomCollapse( ICaptionedPanel captionedPanel ) { if( getBottomMin() != null ) { restore(); } else { collapseBottom( captionedPanel ); } } private void restore() { Component minBar = getComponent( 0 ); if( !(minBar instanceof ICaptionBar) ) { return; } ICaptionBar bar = (ICaptionBar)minBar; removeAll(); addMainComponents( _comp1, _comp2 ); setTopMin( null ); setBottomMin( null ); bar.setCaptionType( ICaptionActionListener.ActionType.RESTORE ); ICaptionedPanel other = findCaptionedPanel( getTop() ); if( other != null && other.getCaption().getCaptionType() != ICaptionActionListener.ActionType.RESTORE ) { other.getCaption().setCaptionType( ICaptionActionListener.ActionType.RESTORE ); } else { other = findCaptionedPanel( getBottom() ); if( other != null && other.getCaption().getCaptionType() != ICaptionActionListener.ActionType.RESTORE ) { other.getCaption().setCaptionType( ICaptionActionListener.ActionType.RESTORE ); } } } private void collapseTop( ICaptionedPanel captionedPanel ) { if( getBottomMin() != null ) { setBottomMin( null ); } ICaptionBar minLabel = makeMinLabel( captionedPanel, true ); setTopMin( captionedPanel ); removeAll(); BorderLayout bl = new BorderLayout(); setLayout( bl ); if( getOrientation() == HORIZONTAL ) { bl.setHgap( 4 ); bl.setVgap( 4 ); add( (Component)minLabel, BorderLayout.WEST ); } else { bl.setVgap( 4 ); bl.setHgap( 4 ); add( (Component)minLabel, BorderLayout.NORTH ); } ICaptionedPanel bottomCaptionedPanel = findCaptionedPanel( _comp2 ); if( bottomCaptionedPanel != null ) { bottomCaptionedPanel.getCaption().setCaptionType( ICaptionActionListener.ActionType.MAXIMIZE ); } add( _comp2, BorderLayout.CENTER ); } public void collapseBottom( ICaptionedPanel bottomCaptionedPanel ) { if( getTopMin() != null ) { setTopMin( null ); } ICaptionBar minLabel = makeMinLabel( bottomCaptionedPanel, false ); setBottomMin( bottomCaptionedPanel ); removeAll(); BorderLayout bl = new BorderLayout(); setLayout( bl ); int iGap = minLabel instanceof EmptyCaptionBar ? 0 : 4; String location = getOrientation() == HORIZONTAL ? BorderLayout.EAST : BorderLayout.SOUTH; bl.setHgap( iGap ); bl.setVgap( iGap ); add( (Component)minLabel, location ); ICaptionedPanel topCaptionedPanel = findCaptionedPanel( _comp1 ); if( topCaptionedPanel != null ) { topCaptionedPanel.getCaption().setCaptionType( ICaptionActionListener.ActionType.MAXIMIZE ); } add( _comp1, BorderLayout.CENTER ); } private ICaptionBar makeMinLabel( final ICaptionedPanel captionedPanel, boolean bTop ) { captionedPanel.getCaption().setCaptionType( ICaptionActionListener.ActionType.MINIMIZE ); ICaptionBar bar = captionedPanel.getCaption() .getMinimizedPanel( getOrientation() == HORIZONTAL ? bTop ? TabPosition.LEFT : TabPosition.RIGHT : TabPosition.TOP ); bar.setCaptionType( ICaptionActionListener.ActionType.MINIMIZE ); return bar; } private boolean isInTop( ICaptionedPanel captionedPanel ) { Component c = (Component)captionedPanel; while( c != _comp1 && c != null ) { c = c.getParent(); } return c == _comp1; } private boolean isInBottom( ICaptionedPanel captionedPanel ) { Component c = (Component)captionedPanel; while( c != _comp2 && c != null ) { c = c.getParent(); } return c == _comp2; } private class CaptionActionListener implements ICaptionActionListener { public CaptionActionListener() { } public void captionActionPerformed( ICaptionedPanel captionedPanel, ActionType actionType ) { if( actionType == ICaptionActionListener.ActionType.MINIMIZE ) { toggleCollapse( captionedPanel ); } else if( actionType == ICaptionActionListener.ActionType.RESTORE ) { restore(); } else if( actionType == ICaptionActionListener.ActionType.MAXIMIZE ) { if( getTop() == captionedPanel || getTop().isAncestorOf( (Component)captionedPanel ) ) { ICaptionedPanel bottom = findCaptionedPanel( getBottom() ); if( bottom != null ) { toggleBottomCollapse( bottom ); } } else { ICaptionedPanel top = findCaptionedPanel( getTop() ); if( top != null ) { toggleTopCollapse( top ); } } } revalidate(); repaint(); } } }
// Decompiled by Jad v1.5.8e. Copyright 2001 Pavel Kouznetsov. // Jad home page: http://www.geocities.com/kpdus/jad.html // Decompiler options: braces fieldsfirst space lnc package com.xiaomi.account.openauth.utils; import android.content.Context; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.telephony.TelephonyManager; import android.text.TextUtils; import android.util.Log; import java.io.BufferedReader; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.Proxy; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLEncoder; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.ParseException; import org.apache.http.StatusLine; import org.apache.http.client.HttpClient; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpConnectionParams; import org.apache.http.params.HttpProtocolParams; import org.apache.http.util.EntityUtils; import org.json.JSONException; import org.json.JSONObject; // Referenced classes of package com.xiaomi.account.openauth.utils: // a, b, c public class Network { public static final String CMWAP_GATEWAY = "10.0.0.172"; public static final String CMWAP_HEADER_HOST_KEY = "X-Online-Host"; public static final int CMWAP_PORT = 80; public static final int CONNECTION_TIMEOUT = 10000; public static final Pattern ContentTypePattern_Charset = Pattern.compile("(.*?charset\\s*=[^a-zA-Z0-9]*)([-a-zA-Z0-9]+)(.*)", 2); public static final Pattern ContentTypePattern_MimeType = Pattern.compile("([^\\s;]+)(.*)"); public static final Pattern ContentTypePattern_XmlEncoding = Pattern.compile("(\\<\\?xml\\s+.*?encoding\\s*=[^a-zA-Z0-9]*)([-a-zA-Z0-9]+)(.*)", 2); public static final String NETWORK_TYPE_3GNET = "3gnet"; public static final String NETWORK_TYPE_3GWAP = "3gwap"; public static final String NETWORK_TYPE_CHINATELECOM = "#777"; public static final String NETWORK_TYPE_WIFI = "wifi"; public static final int READ_TIMEOUT = 15000; public static final String RESPONSE_BODY = "RESPONSE_BODY"; public static final String RESPONSE_CODE = "RESPONSE_CODE"; public static final String USER_AGENT = "User-Agent"; public static final String UserAgent_PC_Chrome = "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.3 (KHTML, like Gecko) Chrome/6.0.464.0 Safari/534.3"; public static final String UserAgent_PC_Chrome_6_0_464_0 = "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.3 (KHTML, like Gecko) Chrome/6.0.464.0 Safari/534.3"; private static final String a = "com.xiaomi.common.Network"; private static final int b = 200; private static HostnameVerifier c = new a(); private static TrustManager d = new b(); public Network() { } public static void beginDownloadFile(String s, OutputStream outputstream, Context context, boolean flag, PostDownloadHandler postdownloadhandler) { (new c(s, outputstream, postdownloadhandler, flag, context)).execute(new Void[0]); } public static void beginDownloadFile(String s, OutputStream outputstream, PostDownloadHandler postdownloadhandler) { (new c(s, outputstream, postdownloadhandler)).execute(new Void[0]); } public static String doHttpPost(Context context, String s, List list) { return doHttpPost(context, s, list, null, null, null, null); } public static String doHttpPost(Context context, String s, List list, Map map, HttpHeaderInfo httpheaderinfo, String s1, String s2) { HttpURLConnection httpurlconnection; TrustManager atrustmanager[]; if (TextUtils.isEmpty(s)) { throw new IllegalArgumentException("url"); } HttpsURLConnection.setDefaultHostnameVerifier(c); httpurlconnection = getHttpUrlConnection(context, new URL(s)); httpurlconnection.setConnectTimeout(10000); httpurlconnection.setReadTimeout(15000); httpurlconnection.setRequestMethod("POST"); atrustmanager = new TrustManager[1]; atrustmanager[0] = d; SSLContext sslcontext1 = SSLContext.getInstance("SSL"); SSLContext sslcontext = sslcontext1; sslcontext.init(null, atrustmanager, new SecureRandom()); _L5: javax.net.ssl.SSLSocketFactory sslsocketfactory = sslcontext.getSocketFactory(); ((HttpsURLConnection)httpurlconnection).setSSLSocketFactory(sslsocketfactory); if (!TextUtils.isEmpty(s1)) { httpurlconnection.setRequestProperty("User-Agent", s1); } if (s2 != null) { httpurlconnection.setRequestProperty("Cookie", s2); } if (map == null) goto _L2; else goto _L1 _L1: Iterator iterator = map.keySet().iterator(); _L6: if (iterator.hasNext()) goto _L3; else goto _L2 _L2: KeyManagementException keymanagementexception1; int i; NoSuchAlgorithmException nosuchalgorithmexception1; String s3 = fromParamListToString(list); KeyManagementException keymanagementexception; String s9; NoSuchAlgorithmException nosuchalgorithmexception; if (s3 == null) { throw new IllegalArgumentException("nameValuePairs"); } httpurlconnection.setDoOutput(true); byte abyte0[] = s3.getBytes(); httpurlconnection.getOutputStream().write(abyte0, 0, abyte0.length); httpurlconnection.getOutputStream().flush(); httpurlconnection.getOutputStream().close(); i = httpurlconnection.getResponseCode(); Log.d("com.xiaomi.common.Network", (new StringBuilder("Http POST Response Code: ")).append(i).toString()); goto _L4 nosuchalgorithmexception; sslcontext = null; nosuchalgorithmexception1 = nosuchalgorithmexception; _L13: nosuchalgorithmexception1.printStackTrace(); goto _L5 keymanagementexception; sslcontext = null; keymanagementexception1 = keymanagementexception; _L12: keymanagementexception1.printStackTrace(); goto _L5 _L3: s9 = (String)iterator.next(); httpurlconnection.setRequestProperty(s9, (String)map.get(s9)); goto _L6 _L4: if (httpheaderinfo == null) goto _L8; else goto _L7 _L7: int j; httpheaderinfo.ResponseCode = i; if (httpheaderinfo.AllHeaders == null) { httpheaderinfo.AllHeaders = new HashMap(); } j = 0; _L10: String s7; String s8; s7 = httpurlconnection.getHeaderFieldKey(j); s8 = httpurlconnection.getHeaderField(j); if (s7 != null || s8 != null) goto _L9; else goto _L8 _L8: BufferedReader bufferedreader; String s4; StringBuffer stringbuffer; String s5; bufferedreader = new BufferedReader(new InputStreamReader(new DoneHandlerInputStream(httpurlconnection.getInputStream()))); s4 = bufferedreader.readLine(); stringbuffer = new StringBuffer(); s5 = System.getProperty("line.separator"); _L11: if (s4 == null) { String s6 = stringbuffer.toString(); bufferedreader.close(); return s6; } break MISSING_BLOCK_LABEL_497; _L9: httpheaderinfo.AllHeaders.put(s7, s8); j = 1 + (j + 1); goto _L10 stringbuffer.append(s4); stringbuffer.append(s5); s4 = bufferedreader.readLine(); goto _L11 keymanagementexception1; goto _L12 nosuchalgorithmexception1; goto _L13 } public static JSONObject doHttpPostWithResponseStatus(Context context, String s, List list, Map map, String s1, String s2) { JSONObject jsonobject; DefaultHttpClient defaulthttpclient; if (context == null) { throw new IllegalArgumentException("context"); } if (TextUtils.isEmpty(s)) { throw new IllegalArgumentException("url"); } jsonobject = new JSONObject(); BasicHttpParams basichttpparams = new BasicHttpParams(); HttpConnectionParams.setConnectionTimeout(basichttpparams, 10000); HttpConnectionParams.setSoTimeout(basichttpparams, 15000); if (!TextUtils.isEmpty(s1)) { HttpProtocolParams.setUserAgent(basichttpparams, s1); } if (!TextUtils.isEmpty(s2)) { basichttpparams.setParameter("Cookie", s2); } defaulthttpclient = new DefaultHttpClient(basichttpparams); if (!isCmwap(context)) goto _L2; else goto _L1 _L1: HttpPost httppost; URL url = new URL(s); String s3 = getCMWapUrl(url); String s4 = url.getHost(); httppost = new HttpPost(s3); httppost.addHeader("X-Online-Host", s4); _L6: if (list == null) { break MISSING_BLOCK_LABEL_190; } if (list.size() != 0) { httppost.setEntity(new UrlEncodedFormEntity(list, "UTF-8")); } HttpResponse httpresponse = defaulthttpclient.execute(httppost); String s5 = ""; int i; org.apache.http.HttpEntity httpentity; i = httpresponse.getStatusLine().getStatusCode(); httpentity = httpresponse.getEntity(); if (httpentity == null) { break MISSING_BLOCK_LABEL_241; } s5 = EntityUtils.toString(httpentity); jsonobject.put("RESPONSE_CODE", i); jsonobject.put("RESPONSE_BODY", s5); if (!jsonobject.has("RESPONSE_CODE") || !jsonobject.has("RESPONSE_BODY")) { jsonobject.remove("RESPONSE_CODE"); jsonobject.remove("RESPONSE_BODY"); } _L3: return jsonobject; _L2: httppost = new HttpPost(s); continue; /* Loop/switch isn't completed */ ParseException parseexception; parseexception; Log.e("com.xiaomi.common.Network", "doHttpPostWithResponseStatus", parseexception); if (!jsonobject.has("RESPONSE_CODE") || !jsonobject.has("RESPONSE_BODY")) { jsonobject.remove("RESPONSE_CODE"); jsonobject.remove("RESPONSE_BODY"); return jsonobject; } goto _L3 IOException ioexception; ioexception; Log.e("com.xiaomi.common.Network", "doHttpPostWithResponseStatus", ioexception); if (!jsonobject.has("RESPONSE_CODE") || !jsonobject.has("RESPONSE_BODY")) { jsonobject.remove("RESPONSE_CODE"); jsonobject.remove("RESPONSE_BODY"); return jsonobject; } goto _L3 JSONException jsonexception; jsonexception; Log.e("com.xiaomi.common.Network", "doHttpPostWithResponseStatus", jsonexception); if (jsonobject.has("RESPONSE_CODE") && jsonobject.has("RESPONSE_BODY")) goto _L3; else goto _L4 _L4: jsonobject.remove("RESPONSE_CODE"); jsonobject.remove("RESPONSE_BODY"); return jsonobject; Exception exception; exception; if (!jsonobject.has("RESPONSE_CODE") || !jsonobject.has("RESPONSE_BODY")) { jsonobject.remove("RESPONSE_CODE"); jsonobject.remove("RESPONSE_BODY"); } throw exception; if (true) goto _L6; else goto _L5 _L5: } public static boolean downloadFile(String s, OutputStream outputstream) { return downloadFile(s, outputstream, false, null); } public static boolean downloadFile(String s, OutputStream outputstream, Context context) { URL url = new URL(s); if (!isCmwap(context)) goto _L2; else goto _L1 _L1: HttpURLConnection httpurlconnection; int j; HttpURLConnection.setFollowRedirects(false); String s1 = getCMWapUrl(url); String s2 = url.getHost(); httpurlconnection = (HttpURLConnection)(new URL(s1)).openConnection(); httpurlconnection.setRequestProperty("X-Online-Host", s2); j = httpurlconnection.getResponseCode(); goto _L3 _L6: InputStream inputstream; byte abyte0[]; httpurlconnection.setConnectTimeout(10000); httpurlconnection.setReadTimeout(15000); httpurlconnection.connect(); inputstream = httpurlconnection.getInputStream(); abyte0 = new byte[1024]; _L5: int i = inputstream.read(abyte0); if (i <= 0) { String s3; URL url1; String s4; String s5; try { inputstream.close(); outputstream.close(); } catch (IOException ioexception) { Log.e("com.xiaomi.common.Network", (new StringBuilder("error while download file")).append(ioexception).toString()); return false; } return true; } goto _L4 _L7: s3 = httpurlconnection.getHeaderField("location"); if (TextUtils.isEmpty(s3)) { break; /* Loop/switch isn't completed */ } url1 = new URL(s3); s4 = getCMWapUrl(url1); s5 = url1.getHost(); httpurlconnection = (HttpURLConnection)(new URL(s4)).openConnection(); httpurlconnection.setRequestProperty("X-Online-Host", s5); j = httpurlconnection.getResponseCode(); continue; /* Loop/switch isn't completed */ _L2: httpurlconnection = (HttpURLConnection)url.openConnection(); HttpURLConnection.setFollowRedirects(true); break; /* Loop/switch isn't completed */ _L4: outputstream.write(abyte0, 0, i); goto _L5 _L3: if (j >= 300 && j < 400) goto _L7; else goto _L6 } public static boolean downloadFile(String s, OutputStream outputstream, boolean flag, Context context) { InputStream inputstream = null; InputStream inputstream2; HttpURLConnection httpurlconnection = (HttpURLConnection)(new URL(s)).openConnection(); httpurlconnection.setConnectTimeout(10000); httpurlconnection.setReadTimeout(15000); HttpURLConnection.setFollowRedirects(true); httpurlconnection.connect(); inputstream2 = httpurlconnection.getInputStream(); inputstream = inputstream2; byte abyte0[] = new byte[1024]; _L4: int i = inputstream.read(abyte0); if (i != -1) goto _L2; else goto _L1 _L1: boolean flag2 = false; _L6: Exception exception; IOException ioexception2; InputStream inputstream1; boolean flag1; boolean flag3; if (flag2) { flag3 = false; } else { flag3 = true; } if (inputstream != null) { try { inputstream.close(); } catch (IOException ioexception6) { } } if (outputstream != null) { try { outputstream.close(); } catch (IOException ioexception5) { return flag3; } } return flag3; _L2: outputstream.write(abyte0, 0, i); if (!flag || context == null) goto _L4; else goto _L3 _L3: flag1 = isWifi(context); if (flag1) goto _L4; else goto _L5 _L5: flag2 = true; goto _L6 ioexception2; inputstream1 = null; _L10: Log.e("com.xiaomi.common.Network", (new StringBuilder("error while download file")).append(ioexception2).toString()); if (inputstream1 != null) { try { inputstream1.close(); } catch (IOException ioexception4) { } } if (outputstream != null) { try { outputstream.close(); } catch (IOException ioexception3) { } } return false; exception; _L8: if (inputstream != null) { try { inputstream.close(); } catch (IOException ioexception1) { } } if (outputstream != null) { try { outputstream.close(); } catch (IOException ioexception) { } } throw exception; exception; inputstream = inputstream1; if (true) goto _L8; else goto _L7 _L7: ioexception2; inputstream1 = inputstream; if (true) goto _L10; else goto _L9 _L9: } public static String downloadXml(Context context, URL url) { return downloadXml(context, url, false, null, "UTF-8", ((String) (null))); } public static String downloadXml(Context context, URL url, String s, String s1, Map map, HttpHeaderInfo httpheaderinfo) { InputStream inputstream = null; StringBuilder stringbuilder; BufferedReader bufferedreader; inputstream = downloadXmlAsStream(context, url, s, s1, map, httpheaderinfo); stringbuilder = new StringBuilder(1024); bufferedreader = new BufferedReader(new InputStreamReader(inputstream, "UTF-8"), 1024); _L1: String s2 = bufferedreader.readLine(); if (s2 == null) { Exception exception; IOException ioexception; if (inputstream != null) { try { inputstream.close(); } catch (IOException ioexception1) { Log.e("com.xiaomi.common.Network", (new StringBuilder("Failed to close responseStream")).append(ioexception1.toString()).toString()); } } return stringbuilder.toString(); } stringbuilder.append(s2); stringbuilder.append("\r\n"); goto _L1 exception; if (inputstream != null) { try { inputstream.close(); } // Misplaced declaration of an exception variable catch (IOException ioexception) { Log.e("com.xiaomi.common.Network", (new StringBuilder("Failed to close responseStream")).append(ioexception.toString()).toString()); } } throw exception; } public static String downloadXml(Context context, URL url, boolean flag, String s, String s1, String s2) { InputStream inputstream = null; StringBuilder stringbuilder; BufferedReader bufferedreader; inputstream = downloadXmlAsStream(context, url, s, s2); stringbuilder = new StringBuilder(1024); bufferedreader = new BufferedReader(new InputStreamReader(inputstream, s1), 1024); _L1: String s3 = bufferedreader.readLine(); if (s3 == null) { Exception exception; IOException ioexception; if (inputstream != null) { try { inputstream.close(); } catch (IOException ioexception1) { Log.e("com.xiaomi.common.Network", (new StringBuilder("Failed to close responseStream")).append(ioexception1.toString()).toString()); } } return stringbuilder.toString(); } stringbuilder.append(s3); stringbuilder.append("\r\n"); goto _L1 exception; if (inputstream != null) { try { inputstream.close(); } // Misplaced declaration of an exception variable catch (IOException ioexception) { Log.e("com.xiaomi.common.Network", (new StringBuilder("Failed to close responseStream")).append(ioexception.toString()).toString()); } } throw exception; } public static InputStream downloadXmlAsStream(Context context, URL url) { return downloadXmlAsStream(context, url, null, null, null, null); } public static InputStream downloadXmlAsStream(Context context, URL url, String s, String s1) { return downloadXmlAsStream(context, url, s, s1, null, null); } public static InputStream downloadXmlAsStream(Context context, URL url, String s, String s1, Map map, HttpHeaderInfo httpheaderinfo) { HttpURLConnection httpurlconnection; TrustManager atrustmanager[]; if (context == null) { throw new IllegalArgumentException("context"); } if (url == null) { throw new IllegalArgumentException("url"); } HttpsURLConnection.setDefaultHostnameVerifier(c); HttpURLConnection.setFollowRedirects(true); httpurlconnection = getHttpUrlConnection(context, url); httpurlconnection.setConnectTimeout(10000); httpurlconnection.setReadTimeout(15000); atrustmanager = new TrustManager[1]; atrustmanager[0] = d; SSLContext sslcontext1 = SSLContext.getInstance("SSL"); SSLContext sslcontext = sslcontext1; sslcontext.init(null, atrustmanager, new SecureRandom()); _L7: javax.net.ssl.SSLSocketFactory sslsocketfactory = sslcontext.getSocketFactory(); ((HttpsURLConnection)httpurlconnection).setSSLSocketFactory(sslsocketfactory); if (!TextUtils.isEmpty(s)) { httpurlconnection.setRequestProperty("User-Agent", s); } if (s1 != null) { httpurlconnection.setRequestProperty("Cookie", s1); } if (map == null) goto _L2; else goto _L1 _L1: Iterator iterator = map.keySet().iterator(); _L8: if (iterator.hasNext()) goto _L3; else goto _L2 _L2: if (httpheaderinfo == null || !url.getProtocol().equals("http") && !url.getProtocol().equals("https")) goto _L5; else goto _L4 _L4: int i; httpheaderinfo.ResponseCode = httpurlconnection.getResponseCode(); if (httpheaderinfo.AllHeaders == null) { httpheaderinfo.AllHeaders = new HashMap(); } i = 0; _L9: String s2; String s3; s2 = httpurlconnection.getHeaderFieldKey(i); s3 = httpurlconnection.getHeaderField(i); if (s2 != null || s3 != null) goto _L6; else goto _L5 _L5: InputStream inputstream1 = httpurlconnection.getInputStream(); InputStream inputstream = inputstream1; _L10: return new DoneHandlerInputStream(inputstream); NoSuchAlgorithmException nosuchalgorithmexception; nosuchalgorithmexception; NoSuchAlgorithmException nosuchalgorithmexception1; sslcontext = null; nosuchalgorithmexception1 = nosuchalgorithmexception; _L12: nosuchalgorithmexception1.printStackTrace(); goto _L7 KeyManagementException keymanagementexception; keymanagementexception; KeyManagementException keymanagementexception1; sslcontext = null; keymanagementexception1 = keymanagementexception; _L11: keymanagementexception1.printStackTrace(); goto _L7 _L3: String s4 = (String)iterator.next(); httpurlconnection.setRequestProperty(s4, (String)map.get(s4)); goto _L8 _L6: if (!TextUtils.isEmpty(s2) && !TextUtils.isEmpty(s3)) { httpheaderinfo.AllHeaders.put(s2, s3); } i++; goto _L9 IOException ioexception; ioexception; inputstream = httpurlconnection.getErrorStream(); goto _L10 keymanagementexception1; goto _L11 nosuchalgorithmexception1; goto _L12 } public static InputStream downloadXmlAsStreamWithoutRedirect(URL url, String s, String s1) { HttpURLConnection.setFollowRedirects(false); HttpURLConnection httpurlconnection = (HttpURLConnection)url.openConnection(); httpurlconnection.setConnectTimeout(10000); httpurlconnection.setReadTimeout(15000); if (!TextUtils.isEmpty(s)) { httpurlconnection.setRequestProperty("User-Agent", s); } if (s1 != null) { httpurlconnection.setRequestProperty("Cookie", s1); } int i = httpurlconnection.getResponseCode(); InputStream inputstream; if (i < 300 || i >= 400) { inputstream = httpurlconnection.getInputStream(); } else { inputstream = null; } return new DoneHandlerInputStream(inputstream); } public static String fromParamListToString(List list) { StringBuffer stringbuffer; Iterator iterator; stringbuffer = new StringBuffer(); iterator = list.iterator(); _L2: if (!iterator.hasNext()) { NameValuePair namevaluepair; UnsupportedEncodingException unsupportedencodingexception; StringBuffer stringbuffer1; if (stringbuffer.length() > 0) { stringbuffer1 = stringbuffer.deleteCharAt(-1 + stringbuffer.length()); } else { stringbuffer1 = stringbuffer; } return stringbuffer1.toString(); } namevaluepair = (NameValuePair)iterator.next(); if (namevaluepair.getValue() == null) goto _L2; else goto _L1 _L1: stringbuffer.append(URLEncoder.encode(namevaluepair.getName(), "UTF-8")); stringbuffer.append("="); stringbuffer.append(URLEncoder.encode(namevaluepair.getValue(), "UTF-8")); stringbuffer.append("&"); goto _L2 unsupportedencodingexception; Log.d("com.xiaomi.common.Network", (new StringBuilder("Failed to convert from param list to string: ")).append(unsupportedencodingexception.toString()).toString()); Log.d("com.xiaomi.common.Network", (new StringBuilder("pair: ")).append(namevaluepair.toString()).toString()); return null; } public static String getActiveConnPoint(Context context) { if (isWIFIConnected(context)) { return "wifi"; } ConnectivityManager connectivitymanager = (ConnectivityManager)context.getSystemService("connectivity"); if (connectivitymanager == null) { return ""; } NetworkInfo networkinfo = connectivitymanager.getActiveNetworkInfo(); if (networkinfo == null) { return ""; } else { return networkinfo.getExtraInfo(); } } public static String getActiveNetworkName(Context context) { ConnectivityManager connectivitymanager = (ConnectivityManager)context.getSystemService("connectivity"); if (connectivitymanager == null) { return "null"; } NetworkInfo networkinfo = connectivitymanager.getActiveNetworkInfo(); if (networkinfo == null) { return "null"; } if (TextUtils.isEmpty(networkinfo.getSubtypeName())) { return networkinfo.getTypeName(); } else { Object aobj[] = new Object[2]; aobj[0] = networkinfo.getTypeName(); aobj[1] = networkinfo.getSubtypeName(); return String.format("%s-%s", aobj); } } public static int getActiveNetworkType(Context context) { ConnectivityManager connectivitymanager = (ConnectivityManager)context.getSystemService("connectivity"); if (connectivitymanager == null) { return -1; } NetworkInfo networkinfo = connectivitymanager.getActiveNetworkInfo(); if (networkinfo == null) { return -1; } else { return networkinfo.getType(); } } public static String getCMWapUrl(URL url) { StringBuilder stringbuilder = new StringBuilder(); stringbuilder.append(url.getProtocol()).append("://").append("10.0.0.172").append(url.getPath()); if (!TextUtils.isEmpty(url.getQuery())) { stringbuilder.append("?").append(url.getQuery()); } return stringbuilder.toString(); } public static HttpHeaderInfo getHttpHeaderInfo(String s, String s1, String s2) { URL url = new URL(s); if (!url.getProtocol().equals("http") && !url.getProtocol().equals("https")) { return null; } HttpURLConnection httpurlconnection; HttpURLConnection.setFollowRedirects(false); httpurlconnection = (HttpURLConnection)url.openConnection(); if (s.indexOf("wap") != -1) goto _L2; else goto _L1 _L1: httpurlconnection.setConnectTimeout(5000); httpurlconnection.setReadTimeout(5000); _L4: if (!TextUtils.isEmpty(s1)) { httpurlconnection.setRequestProperty("User-Agent", s1); } if (s2 == null) { break MISSING_BLOCK_LABEL_104; } httpurlconnection.setRequestProperty("Cookie", s2); HttpHeaderInfo httpheaderinfo; httpheaderinfo = new HttpHeaderInfo(); httpheaderinfo.ResponseCode = httpurlconnection.getResponseCode(); httpheaderinfo.UserAgent = s1; int i = 0; _L5: String s3; String s4; s3 = httpurlconnection.getHeaderFieldKey(i); s4 = httpurlconnection.getHeaderField(i); goto _L3 _L2: httpurlconnection.setConnectTimeout(15000); httpurlconnection.setReadTimeout(15000); goto _L4 malformedurlexception; Log.e("com.xiaomi.common.Network", "Failed to transform URL", malformedurlexception); _L6: return null; _L3: MalformedURLException malformedurlexception; if (s3 == null && s4 == null) { return httpheaderinfo; } if (s3 == null) { break MISSING_BLOCK_LABEL_220; } if (s3.equals("content-type")) { httpheaderinfo.ContentType = s4; } if (s3 == null) { break MISSING_BLOCK_LABEL_280; } if (s3.equals("location")) { URI uri = new URI(s4); if (!uri.isAbsolute()) { uri = (new URI(s)).resolve(uri); } httpheaderinfo.realUrl = uri.toString(); } i++; goto _L5 IOException ioexception; ioexception; Log.e("com.xiaomi.common.Network", "Failed to get mime type", ioexception); goto _L6 URISyntaxException urisyntaxexception; urisyntaxexception; Log.e("com.xiaomi.common.Network", "Failed to parse URI", urisyntaxexception); goto _L6 } public static InputStream getHttpPostAsStream(URL url, String s, Map map, String s1, String s2) { if (url == null) { throw new IllegalArgumentException("url"); } HttpURLConnection.setFollowRedirects(true); HttpURLConnection httpurlconnection = (HttpURLConnection)url.openConnection(); httpurlconnection.setConnectTimeout(5000); httpurlconnection.setReadTimeout(15000); httpurlconnection.setRequestMethod("POST"); httpurlconnection.setDoOutput(true); if (!TextUtils.isEmpty(s1)) { httpurlconnection.setRequestProperty("User-Agent", s1); } if (!TextUtils.isEmpty(s2)) { httpurlconnection.setRequestProperty("Cookie", s2); } httpurlconnection.getOutputStream().write(s.getBytes()); httpurlconnection.getOutputStream().flush(); httpurlconnection.getOutputStream().close(); map.put("ResponseCode", (new StringBuilder(String.valueOf(httpurlconnection.getResponseCode()))).toString()); int i = 0; do { String s3 = httpurlconnection.getHeaderFieldKey(i); String s4 = httpurlconnection.getHeaderField(i); if (s3 == null && s4 == null) { return httpurlconnection.getInputStream(); } map.put(s3, s4); i++; } while (true); } public static HttpURLConnection getHttpUrlConnection(Context context, URL url) { if (isCtwap(context)) { return (HttpURLConnection)url.openConnection(new Proxy(java.net.Proxy.Type.HTTP, new InetSocketAddress("10.0.0.200", 80))); } if (!isCmwap(context)) { return (HttpURLConnection)url.openConnection(); } else { String s = url.getHost(); HttpURLConnection httpurlconnection = (HttpURLConnection)(new URL(getCMWapUrl(url))).openConnection(); httpurlconnection.addRequestProperty("X-Online-Host", s); return httpurlconnection; } } public static boolean hasNetwork(Context context) { return getActiveNetworkType(context) >= 0; } public static boolean isCmwap(Context context) { ConnectivityManager connectivitymanager; NetworkInfo networkinfo; String s; if ("CN".equalsIgnoreCase(((TelephonyManager)context.getSystemService("phone")).getSimCountryIso())) { if ((connectivitymanager = (ConnectivityManager)context.getSystemService("connectivity")) != null && (networkinfo = connectivitymanager.getActiveNetworkInfo()) != null && (!TextUtils.isEmpty(s = networkinfo.getExtraInfo()) && s.length() >= 3 && !s.contains("ctwap"))) { return s.regionMatches(true, -3 + s.length(), "wap", 0, 3); } } return false; } public static boolean isCtwap(Context context) { if (!"CN".equalsIgnoreCase(((TelephonyManager)context.getSystemService("phone")).getSimCountryIso())) { return false; } ConnectivityManager connectivitymanager = (ConnectivityManager)context.getSystemService("connectivity"); if (connectivitymanager == null) { return false; } NetworkInfo networkinfo = connectivitymanager.getActiveNetworkInfo(); if (networkinfo == null) { return false; } String s = networkinfo.getExtraInfo(); if (TextUtils.isEmpty(s) || s.length() < 3) { return false; } return s.contains("ctwap"); } public static boolean isWIFIConnected(Context context) { ConnectivityManager connectivitymanager = (ConnectivityManager)context.getSystemService("connectivity"); if (connectivitymanager == null) { return false; } NetworkInfo networkinfo = connectivitymanager.getActiveNetworkInfo(); if (networkinfo == null) { return false; } return 1 == networkinfo.getType(); } public static boolean isWifi(Context context) { return getActiveNetworkType(context) == 1; } public static String tryDetectCharsetEncoding(URL url, String s) { DoneHandlerInputStream donehandlerinputstream; HttpURLConnection httpurlconnection; String s1; donehandlerinputstream = null; if (url == null) { throw new IllegalArgumentException("url"); } HttpURLConnection.setFollowRedirects(true); httpurlconnection = (HttpURLConnection)url.openConnection(); httpurlconnection.setConnectTimeout(5000); httpurlconnection.setReadTimeout(15000); if (!TextUtils.isEmpty(s)) { httpurlconnection.setRequestProperty("User-Agent", s); } s1 = httpurlconnection.getContentType(); if (TextUtils.isEmpty(s1)) goto _L2; else goto _L1 _L1: Matcher matcher = ContentTypePattern_Charset.matcher(s1); if (!matcher.matches() || matcher.groupCount() < 3) goto _L4; else goto _L3 _L3: String s2 = matcher.group(2); if (TextUtils.isEmpty(s2)) goto _L4; else goto _L5 _L5: Log.v("com.xiaomi.common.Network", (new StringBuilder("HTTP charset detected is: ")).append(s2).toString()); _L18: if (!TextUtils.isEmpty(s2)) goto _L7; else goto _L6 _L6: Matcher matcher1 = ContentTypePattern_MimeType.matcher(s1); if (!matcher1.matches() || matcher1.groupCount() < 2) goto _L7; else goto _L8 _L8: String s3 = matcher1.group(1); if (TextUtils.isEmpty(s3)) goto _L7; else goto _L9 _L9: String s4 = s3.toLowerCase(); if (!s4.startsWith("application/") || !s4.startsWith("application/xml") && !s4.endsWith("+xml")) goto _L7; else goto _L10 _L10: DoneHandlerInputStream donehandlerinputstream1 = new DoneHandlerInputStream(httpurlconnection.getInputStream()); BufferedReader bufferedreader = new BufferedReader(new InputStreamReader(donehandlerinputstream1)); _L14: String s5 = bufferedreader.readLine(); if (s5 != null) goto _L12; else goto _L11 _L11: if (donehandlerinputstream1 != null) { donehandlerinputstream1.close(); } _L7: return s2; _L12: String s6 = s5.trim(); if (s6.length() == 0) goto _L14; else goto _L13 _L13: Matcher matcher2 = ContentTypePattern_XmlEncoding.matcher(s6); if (!matcher2.matches() || matcher2.groupCount() < 3) goto _L11; else goto _L15 _L15: String s7 = matcher2.group(2); if (TextUtils.isEmpty(s7)) goto _L11; else goto _L16 _L16: Log.v("com.xiaomi.common.Network", (new StringBuilder("XML charset detected is: ")).append(s7).toString()); s2 = s7; goto _L11 Exception exception; exception; _L17: if (donehandlerinputstream != null) { donehandlerinputstream.close(); } throw exception; exception; donehandlerinputstream = donehandlerinputstream1; if (true) goto _L17; else goto _L4 _L4: s2 = null; if (true) goto _L18; else goto _L2 _L2: return null; } public static String uploadFile(String s, File file, String s1) { BufferedReader bufferedreader = null; if (file.exists()) goto _L2; else goto _L1 _L1: String s5 = null; _L8: return s5; _L2: String s2 = file.getName(); HttpURLConnection httpurlconnection; DataOutputStream dataoutputstream1; httpurlconnection = (HttpURLConnection)(new URL(s)).openConnection(); httpurlconnection.setReadTimeout(15000); httpurlconnection.setConnectTimeout(10000); httpurlconnection.setDoInput(true); httpurlconnection.setDoOutput(true); httpurlconnection.setUseCaches(false); httpurlconnection.setRequestMethod("POST"); httpurlconnection.setRequestProperty("Connection", "Keep-Alive"); httpurlconnection.setRequestProperty("Content-Type", "multipart/form-data;boundary=*****"); httpurlconnection.setFixedLengthStreamingMode(77 + s2.length() + (int)file.length() + s1.length()); dataoutputstream1 = new DataOutputStream(httpurlconnection.getOutputStream()); FileInputStream fileinputstream1; dataoutputstream1.writeBytes("--*****\r\n"); dataoutputstream1.writeBytes((new StringBuilder("Content-Disposition: form-data; name=\"")).append(s1).append("\";filename=\"").append(file.getName()).append("\"").append("\r\n").toString()); dataoutputstream1.writeBytes("\r\n"); fileinputstream1 = new FileInputStream(file); byte abyte0[] = new byte[1024]; _L9: int i = fileinputstream1.read(abyte0); if (i != -1) goto _L4; else goto _L3 _L3: StringBuffer stringbuffer; BufferedReader bufferedreader1; dataoutputstream1.writeBytes("\r\n"); dataoutputstream1.writeBytes("--"); dataoutputstream1.writeBytes("*****"); dataoutputstream1.writeBytes("--"); dataoutputstream1.writeBytes("\r\n"); dataoutputstream1.flush(); stringbuffer = new StringBuffer(); bufferedreader1 = new BufferedReader(new InputStreamReader(new DoneHandlerInputStream(httpurlconnection.getInputStream()))); _L10: String s3 = bufferedreader1.readLine(); if (s3 != null) goto _L6; else goto _L5 _L5: String s4 = stringbuffer.toString(); s5 = s4; if (fileinputstream1 != null) { try { fileinputstream1.close(); } catch (IOException ioexception1) { Log.e("com.xiaomi.common.Network", "error while closing strean", ioexception1); return s5; } } if (dataoutputstream1 == null) { continue; /* Loop/switch isn't completed */ } dataoutputstream1.close(); if (bufferedreader1 == null) goto _L8; else goto _L7 _L7: bufferedreader1.close(); return s5; _L4: dataoutputstream1.write(abyte0, 0, i); dataoutputstream1.flush(); goto _L9 Exception exception; exception; DataOutputStream dataoutputstream; FileInputStream fileinputstream; dataoutputstream = dataoutputstream1; fileinputstream = fileinputstream1; _L11: if (fileinputstream == null) { break MISSING_BLOCK_LABEL_425; } fileinputstream.close(); if (dataoutputstream == null) { break MISSING_BLOCK_LABEL_435; } dataoutputstream.close(); if (bufferedreader != null) { try { bufferedreader.close(); } catch (IOException ioexception) { Log.e("com.xiaomi.common.Network", "error while closing strean", ioexception); } } throw exception; _L6: stringbuffer.append(s3); goto _L10 exception; bufferedreader = bufferedreader1; dataoutputstream = dataoutputstream1; fileinputstream = fileinputstream1; goto _L11 exception; bufferedreader = null; dataoutputstream = null; fileinputstream = null; goto _L11 exception; dataoutputstream = dataoutputstream1; bufferedreader = null; fileinputstream = null; goto _L11 } private class HttpHeaderInfo { public Map AllHeaders; public String ContentType; public int ResponseCode; public String UserAgent; public String realUrl; public String toString() { Object aobj[] = new Object[2]; aobj[0] = Integer.valueOf(ResponseCode); aobj[1] = AllHeaders.toString(); return String.format("resCode = %1$d, headers = %2$s", aobj); } public HttpHeaderInfo() { } } private class DoneHandlerInputStream extends FilterInputStream { private boolean a; public int read(byte abyte0[], int i, int j) { if (!a) { int k = super.read(abyte0, i, j); if (k != -1) { return k; } } a = true; return -1; } public DoneHandlerInputStream(InputStream inputstream) { super(inputstream); } } }