signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class MultiLineStringSerializer { /** * Method that can be called to ask implementation to serialize values of type this serializer handles . * @ param value Value to serialize ; can not be null . * @ param jgen Generator used to output resulting Json content * @ param provider Provider that can be used to get serializers for serializing Objects value contains , if any . * @ throws java . io . IOException If serialization failed . */ @ Override public void writeShapeSpecificSerialization ( MultiLineString value , JsonGenerator jgen , SerializerProvider provider ) throws IOException { } }
jgen . writeFieldName ( "type" ) ; jgen . writeString ( "MultiLineString" ) ; jgen . writeArrayFieldStart ( "coordinates" ) ; // set beanproperty to null since we are not serializing a real property JsonSerializer < Object > ser = provider . findValueSerializer ( Double . class , null ) ; for ( int i = 0 ; i < value . getNumGeometries ( ) ; i ++ ) { LineString ml = ( LineString ) value . getGeometryN ( i ) ; jgen . writeStartArray ( ) ; for ( int j = 0 ; j < ml . getNumPoints ( ) ; j ++ ) { Point point = ml . getPointN ( j ) ; jgen . writeStartArray ( ) ; ser . serialize ( point . getX ( ) , jgen , provider ) ; ser . serialize ( point . getY ( ) , jgen , provider ) ; jgen . writeEndArray ( ) ; } jgen . writeEndArray ( ) ; } jgen . writeEndArray ( ) ;
public class RouteFiltersInner { /** * Gets the specified route filter . * @ param resourceGroupName The name of the resource group . * @ param routeFilterName The name of the route filter . * @ param expand Expands referenced express route bgp peering resources . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < RouteFilterInner > getByResourceGroupAsync ( String resourceGroupName , String routeFilterName , String expand , final ServiceCallback < RouteFilterInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getByResourceGroupWithServiceResponseAsync ( resourceGroupName , routeFilterName , expand ) , serviceCallback ) ;
public class MapOperatorBase { @ Override protected List < OUT > executeOnCollections ( List < IN > inputData , RuntimeContext ctx , ExecutionConfig executionConfig ) throws Exception { } }
MapFunction < IN , OUT > function = this . userFunction . getUserCodeObject ( ) ; FunctionUtils . setFunctionRuntimeContext ( function , ctx ) ; FunctionUtils . openFunction ( function , this . parameters ) ; ArrayList < OUT > result = new ArrayList < OUT > ( inputData . size ( ) ) ; TypeSerializer < IN > inSerializer = getOperatorInfo ( ) . getInputType ( ) . createSerializer ( executionConfig ) ; TypeSerializer < OUT > outSerializer = getOperatorInfo ( ) . getOutputType ( ) . createSerializer ( executionConfig ) ; for ( IN element : inputData ) { IN inCopy = inSerializer . copy ( element ) ; OUT out = function . map ( inCopy ) ; result . add ( outSerializer . copy ( out ) ) ; } FunctionUtils . closeFunction ( function ) ; return result ;
public class XbaseInterpreter { /** * don ' t call this directly . Always call evaluate ( ) internalEvaluate ( ) */ protected Object doEvaluate ( XExpression expression , IEvaluationContext context , CancelIndicator indicator ) { } }
if ( expression instanceof XAssignment ) { return _doEvaluate ( ( XAssignment ) expression , context , indicator ) ; } else if ( expression instanceof XDoWhileExpression ) { return _doEvaluate ( ( XDoWhileExpression ) expression , context , indicator ) ; } else if ( expression instanceof XMemberFeatureCall ) { return _doEvaluate ( ( XMemberFeatureCall ) expression , context , indicator ) ; } else if ( expression instanceof XWhileExpression ) { return _doEvaluate ( ( XWhileExpression ) expression , context , indicator ) ; } else if ( expression instanceof XFeatureCall ) { return _doEvaluate ( ( XFeatureCall ) expression , context , indicator ) ; } else if ( expression instanceof XAbstractFeatureCall ) { return _doEvaluate ( ( XAbstractFeatureCall ) expression , context , indicator ) ; } else if ( expression instanceof XBlockExpression ) { return _doEvaluate ( ( XBlockExpression ) expression , context , indicator ) ; } else if ( expression instanceof XSynchronizedExpression ) { return _doEvaluate ( ( XSynchronizedExpression ) expression , context , indicator ) ; } else if ( expression instanceof XBooleanLiteral ) { return _doEvaluate ( ( XBooleanLiteral ) expression , context , indicator ) ; } else if ( expression instanceof XCastedExpression ) { return _doEvaluate ( ( XCastedExpression ) expression , context , indicator ) ; } else if ( expression instanceof XClosure ) { return _doEvaluate ( ( XClosure ) expression , context , indicator ) ; } else if ( expression instanceof XConstructorCall ) { return _doEvaluate ( ( XConstructorCall ) expression , context , indicator ) ; } else if ( expression instanceof XForLoopExpression ) { return _doEvaluate ( ( XForLoopExpression ) expression , context , indicator ) ; } else if ( expression instanceof XBasicForLoopExpression ) { return _doEvaluate ( ( XBasicForLoopExpression ) expression , context , indicator ) ; } else if ( expression instanceof XIfExpression ) { return _doEvaluate ( ( XIfExpression ) expression , context , indicator ) ; } else if ( expression instanceof XInstanceOfExpression ) { return _doEvaluate ( ( XInstanceOfExpression ) expression , context , indicator ) ; } else if ( expression instanceof XNullLiteral ) { return _doEvaluate ( ( XNullLiteral ) expression , context , indicator ) ; } else if ( expression instanceof XNumberLiteral ) { return _doEvaluate ( ( XNumberLiteral ) expression , context , indicator ) ; } else if ( expression instanceof XReturnExpression ) { return _doEvaluate ( ( XReturnExpression ) expression , context , indicator ) ; } else if ( expression instanceof XStringLiteral ) { return _doEvaluate ( ( XStringLiteral ) expression , context , indicator ) ; } else if ( expression instanceof XSwitchExpression ) { return _doEvaluate ( ( XSwitchExpression ) expression , context , indicator ) ; } else if ( expression instanceof XThrowExpression ) { return _doEvaluate ( ( XThrowExpression ) expression , context , indicator ) ; } else if ( expression instanceof XTryCatchFinallyExpression ) { return _doEvaluate ( ( XTryCatchFinallyExpression ) expression , context , indicator ) ; } else if ( expression instanceof XTypeLiteral ) { return _doEvaluate ( ( XTypeLiteral ) expression , context , indicator ) ; } else if ( expression instanceof XVariableDeclaration ) { return _doEvaluate ( ( XVariableDeclaration ) expression , context , indicator ) ; } else if ( expression instanceof XListLiteral ) { return _doEvaluate ( ( XListLiteral ) expression , context , indicator ) ; } else if ( expression instanceof XSetLiteral ) { return _doEvaluate ( ( XSetLiteral ) expression , context , indicator ) ; } else { throw new IllegalArgumentException ( "Unhandled parameter types: " + Arrays . < Object > asList ( expression , context , indicator ) . toString ( ) ) ; }
public class TiffDocument { /** * Gets the Subifd count . * @ return the Subifd count */ public int getSubIfdCount ( ) { } }
int c = 0 ; if ( metadata != null && metadata . contains ( "SubIFDs" ) ) c = getMetadataList ( "SubIFDs" ) . size ( ) ; return c ;
public class MultipleTableFieldConverter { /** * Should I pass the alternate field ( or the main field ) ? * @ return index ( - 1 ) = next converter , 0 - n = List of converters */ public int getIndexOfConverterToPass ( boolean bSetData ) { } }
Converter field = this . getTargetField ( null ) ; if ( m_converterNext == field ) return - 1 ; // -1 is the code for the base field . int iIndex = 0 ; for ( ; ; iIndex ++ ) { // Is this one already on my list ? Converter converter = this . getConverterToPass ( iIndex ) ; if ( converter == null ) break ; // End of list if ( converter == field ) return iIndex ; // Found } if ( field == null ) return - 1 ; // Never this . addConverterToPass ( field ) ; // Add it , and return iIndex ; // Return the index to the new converter .
public class Handler { /** * Marks a CHANNELID + UUID as either a transaction or a query * @ param uuid ID to be marked * @ param isTransaction true for transaction , false for query * @ return whether or not the UUID was successfully marked */ private synchronized boolean markIsTransaction ( String channelId , String uuid , boolean isTransaction ) { } }
if ( this . isTransaction == null ) { return false ; } String key = getTxKey ( channelId , uuid ) ; this . isTransaction . put ( key , isTransaction ) ; return true ;
public class ParetoStochasticLaw { /** * Replies the x according to the value of the distribution function . * @ param u is a value given by the uniform random variable generator { @ code U ( 0 , 1 ) } . * @ return { @ code F < sup > - 1 < / sup > ( u ) } * @ throws MathException in case { @ code F < sup > - 1 < / sup > ( u ) } could not be computed */ @ Pure @ Override public double inverseF ( double u ) throws MathException { } }
return this . xmin / Math . pow ( u , 1. / this . k ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link CoordinateReferenceSystemRefType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link CoordinateReferenceSystemRefType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "includesCRS" ) public JAXBElement < CoordinateReferenceSystemRefType > createIncludesCRS ( CoordinateReferenceSystemRefType value ) { } }
return new JAXBElement < CoordinateReferenceSystemRefType > ( _IncludesCRS_QNAME , CoordinateReferenceSystemRefType . class , null , value ) ;
public class FloatColumn { /** * Returns a new IntColumn containing a value for each value in this column , truncating if necessary . * A narrowing primitive conversion such as this one may lose information about the overall magnitude of a * numeric value and may also lose precision and range . Specifically , if the value is too small ( a negative value * of large magnitude or negative infinity ) , the result is the smallest representable value of type int . * Similarly , if the value is too large ( a positive value of large magnitude or positive infinity ) , the result is the * largest representable value of type int . * Despite the fact that overflow , underflow , or other loss of information may occur , a narrowing primitive * conversion never results in a run - time exception . * A missing value in the receiver is converted to a missing value in the result */ @ Override public IntColumn asIntColumn ( ) { } }
IntArrayList values = new IntArrayList ( ) ; for ( float d : data ) { values . add ( ( int ) d ) ; } values . trim ( ) ; return IntColumn . create ( this . name ( ) , values . elements ( ) ) ;
public class Statement { /** * Generates a statement that returns the value produced by the given expression . * < p > This does not validate that the return type is appropriate . It is our callers responsibility * to do that . */ public static Statement returnExpression ( final Expression expression ) { } }
// TODO ( lukes ) : it would be nice to do a checkType operation here to make sure that expression // is compatible with the return type of the method , but i don ' t know how to get that // information here ( reasonably ) . So it is the caller ' s responsibility . return new Statement ( ) { @ Override protected void doGen ( CodeBuilder adapter ) { expression . gen ( adapter ) ; adapter . returnValue ( ) ; } } ;
public class Manager { /** * 获取class加载信息 * @ return class加载信息 */ public static ClassLoadInfo classLoadManager ( ) { } }
ClassLoadingMXBean classLoadingMXBean = ManagementFactory . getClassLoadingMXBean ( ) ; int nowLoadedClassCount = classLoadingMXBean . getLoadedClassCount ( ) ; long totalLoadedClassCount = classLoadingMXBean . getTotalLoadedClassCount ( ) ; long unloadedClassCount = classLoadingMXBean . getUnloadedClassCount ( ) ; return new ClassLoadInfo ( nowLoadedClassCount , totalLoadedClassCount , unloadedClassCount ) ;
public class CRFBiasedClassifier { /** * The main method , which is essentially the same as in CRFClassifier . See the class documentation . */ public static void main ( String [ ] args ) throws Exception { } }
System . err . println ( "CRFBiasedClassifier invoked at " + new Date ( ) + " with arguments:" ) ; for ( String arg : args ) { System . err . print ( " " + arg ) ; } System . err . println ( ) ; Properties props = StringUtils . argsToProperties ( args ) ; CRFBiasedClassifier crf = new CRFBiasedClassifier ( props ) ; String testFile = crf . flags . testFile ; String loadPath = crf . flags . loadClassifier ; if ( loadPath != null ) { crf . loadClassifierNoExceptions ( loadPath , props ) ; } else if ( crf . flags . loadJarClassifier != null ) { crf . loadJarClassifier ( crf . flags . loadJarClassifier , props ) ; } else { crf . loadDefaultClassifier ( ) ; } if ( crf . flags . classBias != null ) { StringTokenizer biases = new java . util . StringTokenizer ( crf . flags . classBias , "," ) ; while ( biases . hasMoreTokens ( ) ) { StringTokenizer bias = new java . util . StringTokenizer ( biases . nextToken ( ) , ":" ) ; String cname = bias . nextToken ( ) ; double w = Double . parseDouble ( bias . nextToken ( ) ) ; crf . setBiasWeight ( cname , w ) ; System . err . println ( "Setting bias for class " + cname + " to " + w ) ; } } if ( testFile != null ) { DocumentReaderAndWriter readerAndWriter = crf . makeReaderAndWriter ( ) ; if ( crf . flags . printFirstOrderProbs ) { crf . printFirstOrderProbs ( testFile , readerAndWriter ) ; } else if ( crf . flags . printProbs ) { crf . printProbs ( testFile , readerAndWriter ) ; } else if ( crf . flags . useKBest ) { int k = crf . flags . kBest ; crf . classifyAndWriteAnswersKBest ( testFile , k , readerAndWriter ) ; } else { crf . classifyAndWriteAnswers ( testFile , readerAndWriter ) ; } }
public class Validators { /** * Creates a new { @ link HibernateValidatorConfiguration } with all the custom value extractors registered . */ public static HibernateValidatorConfiguration newConfiguration ( ) { } }
return BaseValidator . newConfiguration ( ) . constraintValidatorFactory ( new MutableValidatorFactory ( ) ) . parameterNameProvider ( new JerseyParameterNameProvider ( ) ) . addValueExtractor ( NonEmptyStringParamValueExtractor . DESCRIPTOR . getValueExtractor ( ) ) . addValueExtractor ( ParamValueExtractor . DESCRIPTOR . getValueExtractor ( ) ) ;
public class GeometryTools { /** * Calculates the center of mass for the < code > Atom < / code > s in the * AtomContainer for the 2D coordinates . * See comment for center ( IAtomContainer atomCon , Dimension areaDim , HashMap renderingCoordinates ) for details on coordinate sets * @ param ac AtomContainer for which the center of mass is calculated * @ return Null , if any of the atomcontainer { @ link IAtom } ' s masses are null * @ cdk . keyword center of mass */ public static Point2d get2DCentreOfMass ( IAtomContainer ac ) { } }
double xsum = 0.0 ; double ysum = 0.0 ; double totalmass = 0.0 ; Iterator < IAtom > atoms = ac . atoms ( ) . iterator ( ) ; while ( atoms . hasNext ( ) ) { IAtom a = ( IAtom ) atoms . next ( ) ; Double mass = a . getExactMass ( ) ; if ( mass == null ) return null ; totalmass += mass ; xsum += mass * a . getPoint2d ( ) . x ; ysum += mass * a . getPoint2d ( ) . y ; } return new Point2d ( xsum / totalmass , ysum / totalmass ) ;
public class LoggingFilter { /** * { @ inheritDoc } */ @ Override public void filter ( final ContainerRequestContext context ) throws IOException { } }
final long id = _id . incrementAndGet ( ) ; context . setProperty ( LOGGING_ID_PROPERTY , id ) ; final StringBuilder b = new StringBuilder ( ) ; printRequestLine ( b , "Server has received a request" , id , context . getMethod ( ) , context . getUriInfo ( ) . getRequestUri ( ) ) ; printPrefixedHeaders ( b , id , REQUEST_PREFIX , context . getHeaders ( ) ) ; if ( printEntity && context . hasEntity ( ) && isSupportPrintType ( context . getMediaType ( ) ) ) { context . setEntityStream ( logInboundEntity ( b , context . getEntityStream ( ) , MessageUtils . getCharset ( context . getMediaType ( ) ) ) ) ; } log ( b ) ;
public class IndexMetadataBuilder { /** * Add column . Parameters in columnMetadata will be null . * @ param columnName the column name * @ param colType the col type * @ return the index metadata builder */ @ TimerJ public IndexMetadataBuilder addColumn ( String columnName , ColumnType colType ) { } }
ColumnName colName = new ColumnName ( tableName , columnName ) ; ColumnMetadata colMetadata = new ColumnMetadata ( colName , null , colType ) ; columns . put ( colName , colMetadata ) ; return this ;
public class GCI { /** * B & # 8849 ; & # 8707 ; r . C ' & rarr ; { B & # 8849 ; & # 8707 ; r . A , A & # 8849 ; C ' } * @ param gcis * @ return */ boolean rule6 ( final IFactory factory , final Inclusion [ ] gcis ) { } }
boolean result = false ; if ( rhs instanceof Existential ) { Existential existential = ( Existential ) rhs ; final AbstractConcept cHat = existential . getConcept ( ) ; if ( ! ( cHat instanceof Concept ) ) { result = true ; Concept a = getA ( factory , cHat ) ; gcis [ 0 ] = new GCI ( lhs , new Existential ( existential . getRole ( ) , a ) ) ; gcis [ 1 ] = new GCI ( a , cHat ) ; } } return result ;
public class JKAbstractCacheManager { /** * ( non - Javadoc ) * @ see com . jk . util . cache . JKCacheManager # cache ( java . lang . Object , * java . lang . Object , java . lang . Class ) */ @ Override public < T > void cache ( final Object key , final Object object , Class < T > clas ) { } }
this . logger . debug ( "@cache v2 " ) ; if ( object == null && ! isAllowNullable ( ) ) { return ; } else { this . logger . debug ( "logging key :" , key , " with object : " , object , " with Class : " , clas ) ; getCachableMap ( clas ) . put ( key , object ) ; }
public class ReflectingConverter { /** * Helper method to do token replacement for strings . * @ param str * @ param context * @ return * @ throws Siren4JException */ private String handleTokenReplacement ( String str , EntityContext context ) throws Siren4JException { } }
String result = "" ; // First resolve parents result = ReflectionUtils . replaceFieldTokens ( context . getParentObject ( ) , str , context . getParentFieldInfo ( ) , true ) ; // Now resolve others result = ReflectionUtils . flattenReservedTokens ( ReflectionUtils . replaceFieldTokens ( context . getCurrentObject ( ) , result , context . getCurrentFieldInfo ( ) , false ) ) ; return result ;
public class StringUtils { /** * Removes all non alpha numerical characters from the passed text . First tries to convert diacritics to their * alpha numeric representation . * @ param text the text to convert * @ return the alpha numeric equivalent * @ since 10.6RC1 */ @ Unstable public static String toAlphaNumeric ( String text ) { } }
if ( isEmpty ( text ) ) { return text ; } return stripAccents ( text ) . replaceAll ( "[^a-zA-Z0-9]" , "" ) ;
public class SerializerFactory { /** * Returns a custom serializer the class * @ param cl the class of the object that needs to be serialized . * @ return a serializer object for the serialization . */ protected Deserializer getCustomDeserializer ( Class cl ) { } }
try { Class serClass = Class . forName ( cl . getName ( ) + "HessianDeserializer" , false , cl . getClassLoader ( ) ) ; Deserializer ser = ( Deserializer ) serClass . newInstance ( ) ; return ser ; } catch ( ClassNotFoundException e ) { log . log ( Level . FINEST , e . toString ( ) , e ) ; return null ; } catch ( Exception e ) { log . log ( Level . FINE , e . toString ( ) , e ) ; return null ; }
public class WalkerFactory { /** * Tell if the pattern can be ' walked ' with the iteration steps in natural * document order , without duplicates . * @ param analysis The general analysis of the pattern . * @ return true if the walk can be done in natural order . * @ throws javax . xml . transform . TransformerException */ static public boolean isNaturalDocOrder ( int analysis ) { } }
if ( canCrissCross ( analysis ) || isSet ( analysis , BIT_NAMESPACE ) || walksFilteredList ( analysis ) ) return false ; if ( walksInDocOrder ( analysis ) ) return true ; return false ;
public class QRDecomposition { /** * Least squares solution of A * X = B * @ param aMatrix * A Matrix with as many rows as A and any number of columns . * @ return X that minimizes the two norm of Q * R * X - B . * @ exception IllegalArgumentException * Matrix row dimensions must agree . * @ exception RuntimeException * Matrix is rank deficient . */ @ Nonnull @ ReturnsMutableCopy public Matrix solve ( @ Nonnull final Matrix aMatrix ) { } }
if ( aMatrix . getRowDimension ( ) != m_nRows ) throw new IllegalArgumentException ( "Matrix row dimensions must agree." ) ; if ( ! isFullRank ( ) ) throw new IllegalStateException ( "Matrix is rank deficient." ) ; // Copy right hand side final int nCols = aMatrix . getColumnDimension ( ) ; final double [ ] [ ] aArray = aMatrix . getArrayCopy ( ) ; // Compute Y = transpose ( Q ) * B for ( int k = 0 ; k < m_nCols ; k ++ ) { final double [ ] aQRk = m_aQR [ k ] ; for ( int j = 0 ; j < nCols ; j ++ ) { double s = 0.0 ; for ( int i = k ; i < m_nRows ; i ++ ) s += m_aQR [ i ] [ k ] * aArray [ i ] [ j ] ; s = - s / aQRk [ k ] ; for ( int i = k ; i < m_nRows ; i ++ ) aArray [ i ] [ j ] += s * m_aQR [ i ] [ k ] ; } } // Solve R * X = Y ; for ( int k = m_nCols - 1 ; k >= 0 ; k -- ) { final double [ ] aArrayk = aArray [ k ] ; for ( int j = 0 ; j < nCols ; j ++ ) aArrayk [ j ] /= m_aRdiag [ k ] ; for ( int i = 0 ; i < k ; i ++ ) { final double [ ] aSrcRow = m_aQR [ i ] ; final double [ ] aDstRow = aArray [ i ] ; for ( int j = 0 ; j < nCols ; j ++ ) aDstRow [ j ] -= aArrayk [ j ] * aSrcRow [ k ] ; } } return new Matrix ( aArray , m_nCols , nCols ) . getMatrix ( 0 , m_nCols - 1 , 0 , nCols - 1 ) ; }
public class CSSExpression { /** * Shortcut method to add a numeric value * @ param nIndex * The index where the member should be added . Must be & ge ; 0. * @ param dValue * The value to be added . * @ return this */ @ Nonnull public CSSExpression addNumber ( @ Nonnegative final int nIndex , final double dValue ) { } }
return addMember ( nIndex , new CSSExpressionMemberTermSimple ( dValue ) ) ;
public class AstaTextFileReader { /** * Very basic implementation of an inner join between two result sets . * @ param leftRows left result set * @ param leftColumn left foreign key column * @ param rightTable right table name * @ param rightRows right result set * @ param rightColumn right primary key column * @ return joined result set */ private List < Row > join ( List < Row > leftRows , String leftColumn , String rightTable , List < Row > rightRows , String rightColumn ) { } }
List < Row > result = new LinkedList < Row > ( ) ; RowComparator leftComparator = new RowComparator ( new String [ ] { leftColumn } ) ; RowComparator rightComparator = new RowComparator ( new String [ ] { rightColumn } ) ; Collections . sort ( leftRows , leftComparator ) ; Collections . sort ( rightRows , rightComparator ) ; ListIterator < Row > rightIterator = rightRows . listIterator ( ) ; Row rightRow = rightIterator . hasNext ( ) ? rightIterator . next ( ) : null ; for ( Row leftRow : leftRows ) { Integer leftValue = leftRow . getInteger ( leftColumn ) ; boolean match = false ; while ( rightRow != null ) { Integer rightValue = rightRow . getInteger ( rightColumn ) ; int comparison = leftValue . compareTo ( rightValue ) ; if ( comparison == 0 ) { match = true ; break ; } if ( comparison < 0 ) { if ( rightIterator . hasPrevious ( ) ) { rightRow = rightIterator . previous ( ) ; } break ; } rightRow = rightIterator . next ( ) ; } if ( match && rightRow != null ) { Map < String , Object > newMap = new HashMap < String , Object > ( ( ( MapRow ) leftRow ) . getMap ( ) ) ; for ( Entry < String , Object > entry : ( ( MapRow ) rightRow ) . getMap ( ) . entrySet ( ) ) { String key = entry . getKey ( ) ; if ( newMap . containsKey ( key ) ) { key = rightTable + "." + key ; } newMap . put ( key , entry . getValue ( ) ) ; } result . add ( new MapRow ( newMap ) ) ; } } return result ;
public class HeapAlphaSketch { /** * restricted methods */ @ Override int getCurrentPreambleLongs ( final boolean compact ) { } }
if ( ! compact ) { return Family . ALPHA . getMinPreLongs ( ) ; } return computeCompactPreLongs ( thetaLong_ , empty_ , curCount_ ) ;
public class VariableListConverter { /** * Expects a query parameter of multiple variable expressions formatted as KEY _ OPERATOR _ VALUE , e . g . aVariable _ eq _ aValue . * Multiple values are expected to be comma - separated . */ @ Override public List < VariableQueryParameterDto > convertQueryParameterToType ( String value ) { } }
String [ ] expressions = value . split ( EXPRESSION_DELIMITER ) ; List < VariableQueryParameterDto > queryVariables = new ArrayList < VariableQueryParameterDto > ( ) ; for ( String expression : expressions ) { String [ ] valueTriple = expression . split ( ATTRIBUTE_DELIMITER ) ; if ( valueTriple . length != 3 ) { throw new InvalidRequestException ( Status . BAD_REQUEST , "variable query parameter has to have format KEY_OPERATOR_VALUE." ) ; } VariableQueryParameterDto queryVariable = new VariableQueryParameterDto ( ) ; queryVariable . setName ( valueTriple [ 0 ] ) ; queryVariable . setOperator ( valueTriple [ 1 ] ) ; queryVariable . setValue ( valueTriple [ 2 ] ) ; queryVariables . add ( queryVariable ) ; } return queryVariables ;
public class JKAbstractContext { /** * ( non - Javadoc ) * @ see com . jk . context . JKContext # setAttribute ( java . lang . String , * java . lang . Object ) */ @ Override public void setAttribute ( final String key , final Object value ) { } }
logger . debug ( "Attribute ({}) is set to ({})" , key , value ) ; JKThreadLocal . setValue ( key , value ) ;
public class FieldParser { /** * Checks if the given bytes ends with the delimiter at the given end position . * @ param bytes The byte array that holds the value . * @ param endPos The index of the byte array where the check for the delimiter ends . * @ param delim The delimiter to check for . * @ return true if a delimiter ends at the given end position , false otherwise . */ public static final boolean endsWithDelimiter ( byte [ ] bytes , int endPos , byte [ ] delim ) { } }
if ( endPos < delim . length - 1 ) { return false ; } for ( int pos = 0 ; pos < delim . length ; ++ pos ) { if ( delim [ pos ] != bytes [ endPos - delim . length + 1 + pos ] ) { return false ; } } return true ;
public class GenericWordSpace { /** * Updates the semantic vectors based on the words in the document . * @ param document { @ inheritDoc } * @ throws IllegalStateException if the vector values of this instance have * been transform using { @ link # processSpace ( Transform ) } */ public void processDocument ( BufferedReader document ) throws IOException { } }
if ( wordToTransformedVector != null ) { throw new IllegalStateException ( "Cannot add new documents to a " + "GenericWordSpace whose vectors have been transformed" ) ; } Queue < String > prevWords = new ArrayDeque < String > ( windowSize ) ; Queue < String > nextWords = new ArrayDeque < String > ( windowSize ) ; Iterator < String > documentTokens = IteratorFactory . tokenizeOrdered ( document ) ; String focusWord = null ; // prefetch the first windowSize words for ( int i = 0 ; i < windowSize && documentTokens . hasNext ( ) ; ++ i ) nextWords . offer ( documentTokens . next ( ) ) ; while ( ! nextWords . isEmpty ( ) ) { focusWord = nextWords . remove ( ) ; // shift over the window to the next word if ( documentTokens . hasNext ( ) ) { String windowEdge = documentTokens . next ( ) ; nextWords . offer ( windowEdge ) ; } // If we are filtering the semantic vectors , check whether this word // should have its semantics calculated . In addition , if there is a // filter and it would have excluded the word , do not keep its // semantics around boolean calculateSemantics = ( semanticFilter . isEmpty ( ) || semanticFilter . contains ( focusWord ) ) && ! focusWord . equals ( IteratorFactory . EMPTY_TOKEN ) ; if ( calculateSemantics ) { SparseIntegerVector focusSemantics = getSemanticVector ( focusWord ) ; // Keep track of the relative position of the focus word in case // word ordering is being used . int position = - prevWords . size ( ) ; // first word is furthest for ( String word : prevWords ) { // Skip the addition of any words that are excluded from the // filter set . Note that by doing the exclusion here , we // ensure that the token stream maintains its existing // ordering , which is necessary when word order is taken // into account . if ( word . equals ( IteratorFactory . EMPTY_TOKEN ) ) { position ++ ; continue ; } int dimension = basisMapping . getDimension ( new Duple < String , Integer > ( word , position ) ) ; synchronized ( focusSemantics ) { focusSemantics . add ( dimension , 1 ) ; } position ++ ; } // Repeat for the words in the forward window . position = 1 ; for ( String word : nextWords ) { // Skip the addition of any words that are excluded from the // filter set . Note that by doing the exclusion here , we // ensure that the token stream maintains its existing // ordering , which is necessary when word order is taken // into account . if ( word . equals ( IteratorFactory . EMPTY_TOKEN ) ) { ++ position ; continue ; } int dimension = basisMapping . getDimension ( new Duple < String , Integer > ( word , position ) ) ; synchronized ( focusSemantics ) { focusSemantics . add ( dimension , 1 ) ; } position ++ ; } } // Last put this focus word in the prev words and shift off the // front of the previous word window if it now contains more words // than the maximum window size prevWords . offer ( focusWord ) ; if ( prevWords . size ( ) > windowSize ) { prevWords . remove ( ) ; } } document . close ( ) ;
public class MtasDataCollector { /** * Sets the with total . * @ throws IOException Signals that an I / O exception has occurred . */ public void setWithTotal ( ) throws IOException { } }
if ( collectorType . equals ( DataCollector . COLLECTOR_TYPE_LIST ) ) { if ( segmentName != null ) { throw new IOException ( "can't get total with segmentRegistration" ) ; } else { withTotal = true ; } } else { throw new IOException ( "can't get total for dataCollector of type " + collectorType ) ; }
public class Matrix4x3d { /** * Apply a mirror / reflection transformation to this matrix that reflects about the given plane * specified via the equation < code > x * a + y * b + z * c + d = 0 < / code > . * The vector < code > ( a , b , c ) < / code > must be a unit vector . * If < code > M < / code > is < code > this < / code > matrix and < code > R < / code > the reflection matrix , * then the new matrix will be < code > M * R < / code > . So when transforming a * vector < code > v < / code > with the new matrix by using < code > M * R * v < / code > , the * reflection will be applied first ! * Reference : < a href = " https : / / msdn . microsoft . com / en - us / library / windows / desktop / bb281733 ( v = vs . 85 ) . aspx " > msdn . microsoft . com < / a > * @ param a * the x factor in the plane equation * @ param b * the y factor in the plane equation * @ param c * the z factor in the plane equation * @ param d * the constant in the plane equation * @ return this */ public Matrix4x3d reflect ( double a , double b , double c , double d ) { } }
return reflect ( a , b , c , d , this ) ;
public class AllocatedEvaluatorBridge { /** * Bridge function for REEF . NET to submit context configuration for the allocated evaluator . * @ param evaluatorConfigurationString the evaluator configuration from . NET . * @ param contextConfigurationString the context configuration from . NET . */ public void submitContextString ( final String evaluatorConfigurationString , final String contextConfigurationString ) { } }
if ( evaluatorConfigurationString . isEmpty ( ) ) { throw new RuntimeException ( "empty evaluatorConfigurationString provided." ) ; } if ( contextConfigurationString . isEmpty ( ) ) { throw new RuntimeException ( "empty contextConfigurationString provided." ) ; } // When submit over the bridge , we would keep the contextConfigurationString as serialized strings . // public void submitContext ( final String contextConfiguration ) // is not exposed in the interface . Therefore cast is necessary . ( ( AllocatedEvaluatorImpl ) jallocatedEvaluator ) . submitContext ( evaluatorConfigurationString , contextConfigurationString ) ;
public class HTTP { /** * Opens the URL connection , and if a proxy is provided , uses the proxy to establish the connection * @ param url - the url to connect to * @ return HttpURLConnection : our established http connection * @ throws IOException : if the connection can ' t be established , an IOException is thrown */ private HttpURLConnection getConnection ( URL url ) throws IOException { } }
Proxy proxy = Proxy . NO_PROXY ; if ( Property . isProxySet ( ) ) { SocketAddress addr = new InetSocketAddress ( Property . getProxyHost ( ) , Property . getProxyPort ( ) ) ; proxy = new Proxy ( Proxy . Type . HTTP , addr ) ; } return ( HttpURLConnection ) url . openConnection ( proxy ) ;
public class CompileEvent { /** * Get the source file information associated with this event . The source * file information includes the name of the associated template and * potential line number . * @ return The source file information associated with the event */ public String getSourceInfoMessage ( ) { } }
String msg ; if ( mUnit == null ) { if ( mInfo == null ) { msg = "" ; } else { msg = String . valueOf ( mInfo . getLine ( ) ) ; } } else { if ( mInfo == null ) { msg = mUnit . getName ( ) ; } else { msg = mUnit . getName ( ) + ':' + mInfo . getLine ( ) ; } } return msg ;
public class Crc32Caucho { /** * Calculates CRC from a string . */ public static int generate ( String value ) { } }
int len = value . length ( ) ; int crc = 0 ; for ( int i = 0 ; i < len ; i ++ ) { crc = next ( crc , value . charAt ( i ) ) ; } return crc ;
public class WikibaseDataEditor { /** * Creates a new item document with the summary message as provided . * The item document that is given as a parameter must use a local item id , * such as { @ link ItemIdValue # NULL } , and its revision id must be 0 . The * newly created document is returned . It will contain the new id . Note that * the site IRI used in this ID is not part of the API response ; the site * IRI given when constructing this object is used in this place . * Statements in the given data must have empty statement IDs . * @ param itemDocument * the document that contains the data to be written * @ param summary * additional summary message for the edit , or null to omit this * @ return newly created item document , or null if there was an error * @ throws IOException * if there was an IO problem , such as missing network * connection * @ throws MediaWikiApiErrorException */ public ItemDocument createItemDocument ( ItemDocument itemDocument , String summary ) throws IOException , MediaWikiApiErrorException { } }
String data = JsonSerializer . getJsonString ( itemDocument ) ; return ( ItemDocument ) this . wbEditingAction . wbEditEntity ( null , null , null , "item" , data , false , this . editAsBot , 0 , summary ) ;
public class ScanIterator { /** * Sequentially iterate over keys in the keyspace . This method uses { @ code SCAN } to perform an iterative scan . * @ param commands the commands interface , must not be { @ literal null } . * @ param < K > Key type . * @ param < V > Value type . * @ return a new { @ link ScanIterator } . */ public static < K , V > ScanIterator < K > scan ( RedisKeyCommands < K , V > commands ) { } }
return scan ( commands , Optional . empty ( ) ) ;
public class XBELValidator { /** * { @ inheritDoc } */ @ Override public List < SAXParseException > validateWithErrors ( final String s ) throws SAXException , IOException { } }
final Source s2 = new StreamSource ( new StringReader ( s ) ) ; final Validator errorValidator = createNewErrorValidator ( ) ; errorValidator . validate ( s2 , null ) ; return ( ( Handler ) errorValidator . getErrorHandler ( ) ) . exceptions ;
public class XPath { /** * Evaluates this { @ code XPath } expression on the object supplied , producing as result a * unique object of the type { @ code T } specified . * @ param object * the object to evaluate this expression on * @ param resultClass * the { @ code Class } object for the result object * @ param < T > * the type of result * @ return on success , the unique object of the requested type resulting from the evaluation , * or null if evaluation produced no results ; on failure , null is returned if this * { @ code XPath } expression is lenient * @ throws IllegalArgumentException * if this { @ code XPath } expression is not lenient and evaluation fails for the * object supplied */ @ Nullable public final < T > T evalUnique ( final Object object , final Class < T > resultClass ) throws IllegalArgumentException { } }
Preconditions . checkNotNull ( object ) ; Preconditions . checkNotNull ( resultClass ) ; try { return toUnique ( doEval ( object ) , resultClass ) ; } catch ( final Exception ex ) { if ( isLenient ( ) ) { return null ; } throw new IllegalArgumentException ( "Evaluation of XPath failed: " + ex . getMessage ( ) + "\nXPath is: " + this . support . string + "\nInput is: " + object + "\nExpected result is: " + resultClass . getSimpleName ( ) , ex ) ; }
public class PathUtil { /** * Test if the target path is an archive . * @ param path path to the file . * @ return true if the path points to a zip file - false otherwise . * @ throws IOException */ public static final boolean isArchive ( Path path ) throws IOException { } }
if ( Files . exists ( path ) && Files . isRegularFile ( path ) ) { try ( ZipFile zip = new ZipFile ( path . toFile ( ) ) ) { return true ; } catch ( ZipException e ) { return false ; } } return false ;
public class CQLSSTableWriter { /** * Adds a new row to the writer given already serialized values . * This is equivalent to the other rawAddRow methods , but takes a map whose * keys are the names of the columns to add instead of taking a list of the * values in the order of the insert statement used during construction of * this write . * @ param values a map of colum name to column values representing the new * row to add . Note that if a column is not part of the map , it ' s value will * be { @ code null } . If the map contains keys that does not correspond to one * of the column of the insert statement used when creating this writer , the * the corresponding value is ignored . * @ return this writer . */ public CQLSSTableWriter rawAddRow ( Map < String , ByteBuffer > values ) throws InvalidRequestException , IOException { } }
int size = Math . min ( values . size ( ) , boundNames . size ( ) ) ; List < ByteBuffer > rawValues = new ArrayList < > ( size ) ; for ( int i = 0 ; i < size ; i ++ ) { ColumnSpecification spec = boundNames . get ( i ) ; rawValues . add ( values . get ( spec . name . toString ( ) ) ) ; } return rawAddRow ( rawValues ) ;
public class Matrix { /** * Frobenius norm * @ return sqrt of sum of squares of all elements . */ public double normF ( ) { } }
double f = 0 ; for ( int i = 0 ; i < m ; i ++ ) { for ( int j = 0 ; j < n ; j ++ ) { f = Maths . hypot ( f , A [ i ] [ j ] ) ; } } return f ;
public class ParticipantListener { /** * Sets the active state . * @ param active The new active state . When set to true , all event subscriptions are activated * and a participant add event is fired globally . When set to false , all event * subscriptions are inactivated and a participant remove event is fired globally . */ public void setActive ( boolean active ) { } }
refreshListener . setActive ( active ) ; addListener . setActive ( active ) ; removeListener . setActive ( active ) ; eventManager . fireRemoteEvent ( active ? addListener . eventName : removeListener . eventName , self ) ; if ( active ) { refresh ( ) ; }
public class SesClient { /** * Send email . * Simple to send email , all optional parameters use system default value . * @ param from The sender , which is required * @ param displayName The display name of sender , which can be custom by the users themselves * @ param toAddr The receive , which is required * @ param subject The title of the email , which is required * @ param body The content of the email , which is required * @ param attachmentFiles The array of attachment file . If you need send attachment then set it , it ' s optional * parameters * @ see com . baidubce . services . ses . SesClient # sendEmail ( com . baidubce . services . ses . model . SendEmailRequest request ) */ public SendEmailResponse sendEmail ( String from , String displayName , String [ ] toAddr , String subject , String body , File ... attachmentFiles ) { } }
SendEmailRequest request = buildSendEmailRequest ( from , from , from , toAddr , new String [ ] { "" } , new String [ ] { "" } , subject , body , 1 , 1 ) ; request = fillDisplayName ( request , displayName ) ; request = fillAttachment ( request , attachmentFiles ) ; return sendEmail ( request ) ;
public class MessageProcessorMatching { /** * Method evaluateDiscriminator * Used to determine whether a supplied fully qualified discriminator matches * a supplied wildcarded discriminator expression . * @ param fullTopic * @ param wildcardTopic * @ param discriminatorTree * @ return * @ throws SIDiscriminatorSyntaxException */ public boolean evaluateDiscriminator ( String fullTopic , String wildcardTopic , Selector discriminatorTree ) throws SIDiscriminatorSyntaxException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "evaluateDiscriminator" , new Object [ ] { fullTopic , wildcardTopic , discriminatorTree } ) ; Object result = null ; boolean discriminatorMatches = false ; // Use the dummy evaluation cache , we don ' t need one here as we ' re not // searching the MatchSpace EvalCache cache = EvalCache . DUMMY ; try { Evaluator evaluator = Matching . getEvaluator ( ) ; MatchSpaceKey msk = new DiscriminatorMatchSpaceKey ( fullTopic ) ; if ( discriminatorTree == null ) discriminatorTree = parseDiscriminator ( wildcardTopic ) ; // Evaluate message against discriminator tree result = evaluator . eval ( discriminatorTree , msk , cache , null , false ) ; if ( result != null && ( ( Boolean ) result ) . booleanValue ( ) ) { discriminatorMatches = true ; } } catch ( BadMessageFormatMatchingException qex ) { FFDCFilter . processException ( qex , "com.ibm.ws.sib.processor.matching.MessageProcessorMatching.evaluateDiscriminator" , "1:2623:1.117.1.11" , this ) ; SibTr . exception ( tc , qex ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "evaluateDiscriminator" , "SICoreException" ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.matching.MessageProcessorMatching" , "1:2633:1.117.1.11" , qex } ) ; // For now , I ' ll throw a core exception , but in due course this ( or something with a // better name ) will be externalised in the Core API . throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.matching.MessageProcessorMatching" , "1:2643:1.117.1.11" , qex } , null ) , qex ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "evaluateDiscriminator" , Boolean . valueOf ( discriminatorMatches ) ) ; return discriminatorMatches ;
public class SourceColumnFinder { /** * Finds all source jobs / components for a particular job / component . This * method uses { @ link Object } as types because input and output can be quite * polymorphic . Typically { @ link InputColumnSinkJob } , * { @ link InputColumnSourceJob } , { @ link HasComponentRequirement } and * { @ link OutcomeSourceJob } implementations are used . * @ param job * typically some { @ link InputColumnSinkJob } * @ return a list of jobs / components that are a source of this job . */ public Set < Object > findAllSourceJobs ( Object job ) { } }
final Set < Object > result = new HashSet < Object > ( ) ; findAllSourceJobs ( job , result ) ; return result ;
public class LogCursorImpl { /** * Returns a boolean flag to indicate if this LogCursorImpl has further objects to * return . * @ return boolean Flag indicating if this LogCursorImpl has further objects to * return . */ public boolean hasNext ( ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "hasNext" , this ) ; boolean hasNext = false ; if ( ( ! _empty ) && ( ( _singleObject != null ) || ( ( _iterator1 != null ) && ( _iterator1 . hasNext ( ) ) ) || ( ( _iterator2 != null ) && ( _iterator2 . hasNext ( ) ) ) ) ) { hasNext = true ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "hasNext" , new Boolean ( hasNext ) ) ; return hasNext ;
public class StreamUtils { /** * Helper method for { @ link # tar ( FileSystem , FileSystem , Path , Path ) } that adds a directory entry to a given * { @ link TarArchiveOutputStream } . */ private static void dirToTarArchiveOutputStream ( Path destDir , TarArchiveOutputStream tarArchiveOutputStream ) throws IOException { } }
TarArchiveEntry tarArchiveEntry = new TarArchiveEntry ( formatPathToDir ( destDir ) ) ; tarArchiveEntry . setModTime ( System . currentTimeMillis ( ) ) ; tarArchiveOutputStream . putArchiveEntry ( tarArchiveEntry ) ; tarArchiveOutputStream . closeArchiveEntry ( ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AbstractCoverageType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link AbstractCoverageType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "_Coverage" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "_Feature" ) public JAXBElement < AbstractCoverageType > create_Coverage ( AbstractCoverageType value ) { } }
return new JAXBElement < AbstractCoverageType > ( __Coverage_QNAME , AbstractCoverageType . class , null , value ) ;
public class ThreadHelper { /** * Sleep the current thread for a certain amount of time * @ param nSeconds * The seconds to sleep . Must be & ge ; 0. * @ return { @ link ESuccess # SUCCESS } if sleeping was not interrupted , * { @ link ESuccess # FAILURE } if sleeping was interrupted */ @ Nonnull public static ESuccess sleepSeconds ( @ Nonnegative final long nSeconds ) { } }
ValueEnforcer . isGE0 ( nSeconds , "Seconds" ) ; return sleep ( nSeconds * CGlobal . MILLISECONDS_PER_SECOND ) ;
public class LocationApi { /** * Get character online ( asynchronously ) Checks if the character is * currently online - - - This route is cached for up to 60 seconds SSO Scope : * esi - location . read _ online . v1 * @ param characterId * An EVE character ID ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param token * Access token to use if unable to set a header ( optional ) * @ param callback * The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException * If fail to process the API call , e . g . serializing the request * body object */ public com . squareup . okhttp . Call getCharactersCharacterIdOnlineAsync ( Integer characterId , String datasource , String ifNoneMatch , String token , final ApiCallback < CharacterOnlineResponse > callback ) throws ApiException { } }
com . squareup . okhttp . Call call = getCharactersCharacterIdOnlineValidateBeforeCall ( characterId , datasource , ifNoneMatch , token , callback ) ; Type localVarReturnType = new TypeToken < CharacterOnlineResponse > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
public class DOInfoReader { /** * 先按照setter的约定寻找setter方法 ( 必须严格匹配参数类型或自动转换 ) < br > * 如果有则按setter方法 , 如果没有则直接写入 * @ param field * @ param object * @ param value */ public static boolean setValue ( Field field , Object object , Object value ) { } }
String fieldName = field . getName ( ) ; String setMethodName = "set" + firstLetterUpperCase ( fieldName ) ; value = TypeAutoCast . cast ( value , field . getType ( ) ) ; Method method = null ; try { method = object . getClass ( ) . getMethod ( setMethodName , value . getClass ( ) ) ; } catch ( Exception e ) { } if ( method != null ) { try { method . invoke ( object , value ) ; } catch ( Exception e ) { LOGGER . error ( "method invoke" , e ) ; return false ; } } else { field . setAccessible ( true ) ; try { field . set ( object , value ) ; } catch ( Exception e ) { LOGGER . error ( "method invoke" , e ) ; return false ; } } return true ;
public class Validator { /** * Validates a given field to have a maximum length * @ param maxLength The maximum length * @ param name The field to check */ public void expectMax ( String name , double maxLength ) { } }
expectMax ( name , maxLength , messages . get ( Validation . MAX_KEY . name ( ) , name , maxLength ) ) ;
public class CouchDbSamlIdPMetadataDocument { /** * Merge another doc into this one . * @ param doc other doc * @ return this */ public CouchDbSamlIdPMetadataDocument merge ( final SamlIdPMetadataDocument doc ) { } }
setId ( doc . getId ( ) ) ; setMetadata ( doc . getMetadata ( ) ) ; setSigningCertificate ( doc . getSigningCertificate ( ) ) ; setSigningKey ( doc . getSigningKey ( ) ) ; setEncryptionCertificate ( doc . getEncryptionCertificate ( ) ) ; setEncryptionKey ( doc . getEncryptionKey ( ) ) ; return this ;
public class SimpleDirectoryScanner { /** * Clears errors and warnings , automatically called for new scans . */ public void clear ( ) { } }
this . errors . clearErrorMessages ( ) ; ; this . warnings . clear ( ) ; this . infos . clear ( ) ; this . scDir = 0 ; this . scDirUnreadable = 0 ; this . scFiles = 0 ; this . scFilesUnreadable = 0 ;
public class MultiLevelSeqGenerator { /** * add . * @ param style a { @ link org . beangle . commons . text . seq . SeqPattern } object . */ public void add ( SeqPattern style ) { } }
style . setGenerator ( this ) ; patterns . put ( style . getLevel ( ) , style ) ;
public class GetMLModelResult { /** * A list of the training parameters in the < code > MLModel < / code > . The list is implemented as a map of key - value * pairs . * The following is the current set of training parameters : * < ul > * < li > * < code > sgd . maxMLModelSizeInBytes < / code > - The maximum allowed size of the model . Depending on the input data , the * size of the model might affect its performance . * The value is an integer that ranges from < code > 100000 < / code > to < code > 2147483648 < / code > . The default value is * < code > 33554432 < / code > . * < / li > * < li > * < code > sgd . maxPasses < / code > - The number of times that the training process traverses the observations to build * the < code > MLModel < / code > . The value is an integer that ranges from < code > 1 < / code > to < code > 10000 < / code > . The * default value is < code > 10 < / code > . * < / li > * < li > * < code > sgd . shuffleType < / code > - Whether Amazon ML shuffles the training data . Shuffling data improves a model ' s * ability to find the optimal solution for a variety of data types . The valid values are < code > auto < / code > and * < code > none < / code > . The default value is < code > none < / code > . We strongly recommend that you shuffle your data . * < / li > * < li > * < code > sgd . l1RegularizationAmount < / code > - The coefficient regularization L1 norm . It controls overfitting the * data by penalizing large coefficients . This tends to drive coefficients to zero , resulting in a sparse feature * set . If you use this parameter , start by specifying a small value , such as < code > 1.0E - 08 < / code > . * The value is a double that ranges from < code > 0 < / code > to < code > MAX _ DOUBLE < / code > . The default is to not use L1 * normalization . This parameter can ' t be used when < code > L2 < / code > is specified . Use this parameter sparingly . * < / li > * < li > * < code > sgd . l2RegularizationAmount < / code > - The coefficient regularization L2 norm . It controls overfitting the * data by penalizing large coefficients . This tends to drive coefficients to small , nonzero values . If you use this * parameter , start by specifying a small value , such as < code > 1.0E - 08 < / code > . * The value is a double that ranges from < code > 0 < / code > to < code > MAX _ DOUBLE < / code > . The default is to not use L2 * normalization . This parameter can ' t be used when < code > L1 < / code > is specified . Use this parameter sparingly . * < / li > * < / ul > * @ return A list of the training parameters in the < code > MLModel < / code > . The list is implemented as a map of * key - value pairs . < / p > * The following is the current set of training parameters : * < ul > * < li > * < code > sgd . maxMLModelSizeInBytes < / code > - The maximum allowed size of the model . Depending on the input * data , the size of the model might affect its performance . * The value is an integer that ranges from < code > 100000 < / code > to < code > 2147483648 < / code > . The default * value is < code > 33554432 < / code > . * < / li > * < li > * < code > sgd . maxPasses < / code > - The number of times that the training process traverses the observations to * build the < code > MLModel < / code > . The value is an integer that ranges from < code > 1 < / code > to * < code > 10000 < / code > . The default value is < code > 10 < / code > . * < / li > * < li > * < code > sgd . shuffleType < / code > - Whether Amazon ML shuffles the training data . Shuffling data improves a * model ' s ability to find the optimal solution for a variety of data types . The valid values are * < code > auto < / code > and < code > none < / code > . The default value is < code > none < / code > . We strongly recommend * that you shuffle your data . * < / li > * < li > * < code > sgd . l1RegularizationAmount < / code > - The coefficient regularization L1 norm . It controls overfitting * the data by penalizing large coefficients . This tends to drive coefficients to zero , resulting in a * sparse feature set . If you use this parameter , start by specifying a small value , such as * < code > 1.0E - 08 < / code > . * The value is a double that ranges from < code > 0 < / code > to < code > MAX _ DOUBLE < / code > . The default is to not * use L1 normalization . This parameter can ' t be used when < code > L2 < / code > is specified . Use this parameter * sparingly . * < / li > * < li > * < code > sgd . l2RegularizationAmount < / code > - The coefficient regularization L2 norm . It controls overfitting * the data by penalizing large coefficients . This tends to drive coefficients to small , nonzero values . If * you use this parameter , start by specifying a small value , such as < code > 1.0E - 08 < / code > . * The value is a double that ranges from < code > 0 < / code > to < code > MAX _ DOUBLE < / code > . The default is to not * use L2 normalization . This parameter can ' t be used when < code > L1 < / code > is specified . Use this parameter * sparingly . * < / li > */ public java . util . Map < String , String > getTrainingParameters ( ) { } }
if ( trainingParameters == null ) { trainingParameters = new com . amazonaws . internal . SdkInternalMap < String , String > ( ) ; } return trainingParameters ;
public class StringUtils { /** * Removes the leading and trailing delimiter from a string . * @ param str String to process . * @ param delimiter Delimiter to remove . * @ return The string with the leading and trailing delimiter removed . */ public static String removeLeadingAndTrailingDelimiter ( String str , String delimiter ) { } }
final int strLength = str . length ( ) ; final int delimiterLength = delimiter . length ( ) ; final boolean leadingDelimiter = str . startsWith ( delimiter ) ; final boolean trailingDelimiter = strLength > delimiterLength && str . endsWith ( delimiter ) ; if ( ! leadingDelimiter && ! trailingDelimiter ) { return str ; } else { final int startingDelimiterIndex = leadingDelimiter ? delimiterLength : 0 ; final int endingDelimiterIndex = trailingDelimiter ? Math . max ( strLength - delimiterLength , startingDelimiterIndex ) : strLength ; return str . substring ( startingDelimiterIndex , endingDelimiterIndex ) ; }
public class ModelsImpl { /** * Delete an entity role . * @ param appId The application ID . * @ param versionId The version ID . * @ param entityId The entity ID . * @ param roleId The entity role Id . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < OperationStatus > deleteCustomEntityRoleAsync ( UUID appId , String versionId , UUID entityId , UUID roleId , final ServiceCallback < OperationStatus > serviceCallback ) { } }
return ServiceFuture . fromResponse ( deleteCustomEntityRoleWithServiceResponseAsync ( appId , versionId , entityId , roleId ) , serviceCallback ) ;
public class DefaultTypeCache { /** * Return the list of type names in the type system which match the specified filter . * @ return list of type names * @ param filterMap - Map of filter for type names . Valid keys are CATEGORY , SUPERTYPE , NOT _ SUPERTYPE * For example , CATEGORY = TRAIT & & SUPERTYPE contains ' X ' & & SUPERTYPE ! contains ' Y ' */ @ Override public Collection < String > getTypeNames ( Map < TYPE_FILTER , String > filterMap ) throws AtlasException { } }
assertFilter ( filterMap ) ; List < String > typeNames = new ArrayList < > ( ) ; for ( IDataType type : types_ . values ( ) ) { if ( shouldIncludeType ( type , filterMap ) ) { typeNames . add ( type . getName ( ) ) ; } } return typeNames ;
public class PathTokenizer { /** * Get the remaining path from some tokens * @ param tokens the tokens * @ param i the current location * @ return the remaining path * @ throws IllegalArgumentException for null tokens or i is out of range */ public static String getRemainingPath ( List < String > tokens , int i ) { } }
if ( tokens == null ) { throw MESSAGES . nullArgument ( "tokens" ) ; } return getRemainingPath ( tokens , i , tokens . size ( ) ) ;
public class GeoIPCityDissector { public void dissect ( final Parsable < ? > parsable , final String inputname , final InetAddress ipAddress ) throws DissectionFailure { } }
// City is the ' Country ' + more details . CityResponse response ; try { response = reader . city ( ipAddress ) ; } catch ( IOException | GeoIp2Exception e ) { return ; } extractCountryFields ( parsable , inputname , response ) ; extractCityFields ( parsable , inputname , response ) ;
public class Purge { /** * Executes the dependency - check purge to delete the existing local copy of * the NVD CVE data . * @ throws BuildException thrown if there is a problem deleting the file ( s ) */ @ Override public void execute ( ) throws BuildException { } }
populateSettings ( ) ; final File db ; try { db = new File ( getSettings ( ) . getDataDirectory ( ) , getSettings ( ) . getString ( Settings . KEYS . DB_FILE_NAME , "odc.mv.db" ) ) ; if ( db . exists ( ) ) { if ( db . delete ( ) ) { log ( "Database file purged; local copy of the NVD has been removed" , Project . MSG_INFO ) ; } else { final String msg = String . format ( "Unable to delete '%s'; please delete the file manually" , db . getAbsolutePath ( ) ) ; if ( this . failOnError ) { throw new BuildException ( msg ) ; } log ( msg , Project . MSG_ERR ) ; } } else { final String msg = String . format ( "Unable to purge database; the database file does not exist: %s" , db . getAbsolutePath ( ) ) ; if ( this . failOnError ) { throw new BuildException ( msg ) ; } log ( msg , Project . MSG_ERR ) ; } } catch ( IOException ex ) { final String msg = "Unable to delete the database" ; if ( this . failOnError ) { throw new BuildException ( msg ) ; } log ( msg , Project . MSG_ERR ) ; } finally { settings . cleanup ( true ) ; }
public class DirectCouponList { /** * Standard factory for new DirectCouponList . * This initializes the given WritableMemory . * @ param lgConfigK the configured Lg K * @ param tgtHllType the configured HLL target * @ param dstMem the destination memory for the sketch . * @ return a new DirectCouponList */ static DirectCouponList newInstance ( final int lgConfigK , final TgtHllType tgtHllType , final WritableMemory dstMem ) { } }
insertPreInts ( dstMem , LIST_PREINTS ) ; insertSerVer ( dstMem ) ; insertFamilyId ( dstMem ) ; insertLgK ( dstMem , lgConfigK ) ; insertLgArr ( dstMem , LG_INIT_LIST_SIZE ) ; insertFlags ( dstMem , EMPTY_FLAG_MASK ) ; // empty and not compact insertListCount ( dstMem , 0 ) ; insertModes ( dstMem , tgtHllType , CurMode . LIST ) ; return new DirectCouponList ( lgConfigK , tgtHllType , CurMode . LIST , dstMem ) ;
public class CmsSecurityManager { /** * Locks a resource . < p > * The < code > type < / code > parameter controls what kind of lock is used . < br > * Possible values for this parameter are : < br > * < ul > * < li > < code > { @ link org . opencms . lock . CmsLockType # EXCLUSIVE } < / code > < / li > * < li > < code > { @ link org . opencms . lock . CmsLockType # TEMPORARY } < / code > < / li > * < li > < code > { @ link org . opencms . lock . CmsLockType # PUBLISH } < / code > < / li > * < / ul > < p > * @ param context the current request context * @ param resource the resource to lock * @ param type type of the lock * @ throws CmsException if something goes wrong * @ see CmsObject # lockResource ( String ) * @ see CmsObject # lockResourceTemporary ( String ) * @ see org . opencms . file . types . I _ CmsResourceType # lockResource ( CmsObject , CmsSecurityManager , CmsResource , CmsLockType ) */ public void lockResource ( CmsRequestContext context , CmsResource resource , CmsLockType type ) throws CmsException { } }
CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; try { checkOfflineProject ( dbc ) ; checkPermissions ( dbc , resource , CmsPermissionSet . ACCESS_WRITE , false , CmsResourceFilter . ALL ) ; m_driverManager . lockResource ( dbc , resource , type ) ; } catch ( Exception e ) { CmsMessageContainer messageContainer ; if ( e instanceof CmsLockException ) { messageContainer = ( ( CmsLockException ) e ) . getMessageContainer ( ) ; } else { messageContainer = Messages . get ( ) . container ( Messages . ERR_LOCK_RESOURCE_2 , context . getSitePath ( resource ) , type . toString ( ) ) ; } dbc . report ( null , messageContainer , e ) ; } finally { dbc . clear ( ) ; }
public class InternalXtextParser { /** * InternalXtext . g : 1095:1 : entryRuleTerminalRule : ruleTerminalRule EOF ; */ public final void entryRuleTerminalRule ( ) throws RecognitionException { } }
try { // InternalXtext . g : 1096:1 : ( ruleTerminalRule EOF ) // InternalXtext . g : 1097:1 : ruleTerminalRule EOF { before ( grammarAccess . getTerminalRuleRule ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_1 ) ; ruleTerminalRule ( ) ; state . _fsp -- ; after ( grammarAccess . getTerminalRuleRule ( ) ) ; match ( input , EOF , FollowSets000 . FOLLOW_2 ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcMonetaryUnit ( ) { } }
if ( ifcMonetaryUnitEClass == null ) { ifcMonetaryUnitEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 323 ) ; } return ifcMonetaryUnitEClass ;
public class CmsJspContentAccessBean { /** * Returns a lazy initialized Map that provides a Map that provides * values from the XML content in the selected locale . < p > * The first provided Map key is assumed to be a String that represents the Locale , * the second provided Map key is assumed to be a String that represents the xpath to the value . < p > * Usage example on a JSP with the JSTL : < pre > * & lt ; cms : contentload . . . & gt ; * & lt ; cms : contentaccess var = " content " / & gt ; * The Title in Locale " de " : $ { content . localeValue [ ' de ' ] [ ' Title ' ] } * & lt ; / cms : contentload & gt ; < / pre > * @ return a lazy initialized Map that provides a Map that provides * values from the XML content in the selected locale * @ see # getValue ( ) */ public Map < String , Map < String , CmsJspContentAccessValueWrapper > > getLocaleValue ( ) { } }
if ( m_localeValue == null ) { m_localeValue = CmsCollectionsGenericWrapper . createLazyMap ( new CmsLocaleValueTransformer ( ) ) ; } return m_localeValue ;
public class LocalTransactionCurrentService { /** * { @ inheritDoc } */ @ Override public void resume ( LocalTransactionCoordinator arg0 ) throws IllegalStateException { } }
if ( ltc != null ) { ltc . resume ( arg0 ) ; }
public class CreatePlatformEndpointRequest { /** * For a list of attributes , see < a * href = " https : / / docs . aws . amazon . com / sns / latest / api / API _ SetEndpointAttributes . html " > SetEndpointAttributes < / a > . * @ return For a list of attributes , see < a * href = " https : / / docs . aws . amazon . com / sns / latest / api / API _ SetEndpointAttributes . html " * > SetEndpointAttributes < / a > . */ public java . util . Map < String , String > getAttributes ( ) { } }
if ( attributes == null ) { attributes = new com . amazonaws . internal . SdkInternalMap < String , String > ( ) ; } return attributes ;
public class DateUtils { /** * Returns a Java representation of a Facebook " month - year " { @ code date } string . * @ param date * Facebook { @ code date } string . * @ return Java date representation of the given Facebook " month - year " { @ code date } string or { @ code null } if * { @ code date } is { @ code null } or invalid . */ public static Date toDateFromMonthYearFormat ( String date ) { } }
if ( date == null ) { return null ; } if ( "0000-00" . equals ( date ) ) { return null ; } return toDateWithFormatString ( date , FACEBOOK_MONTH_YEAR_DATE_FORMAT ) ;
public class RemoveDescendantsUtil { /** * class属性の指定で子孫要素を削除する * @ param < T > * tag class type . ( i . e . Div . class , Span . class . . . ) * @ param target * objects for scan * @ param clazz * class property of tag */ public static < T extends AbstractJaxb > void removeDescendants ( T target , String clazz ) { } }
execute ( target , null , clazz ) ;
public class GermanSpellerRule { /** * that contains an ignored word from spelling . txt ( e . g . , " Feynman " ) */ private boolean ignoreCompoundWithIgnoredWord ( String word ) throws IOException { } }
if ( ! StringTools . startsWithUppercase ( word ) && ! StringUtils . startsWithAny ( word , "nord" , "west" , "ost" , "süd" ) ) { // otherwise stuff like " rumfangreichen " gets accepted return false ; } String [ ] words = word . split ( "-" ) ; if ( words . length < 2 ) { // non - hyphenated compound ( e . g . , " Feynmandiagramm " ) : // only search for compounds that start ( ! ) with a word from spelling . txt int end = super . startsWithIgnoredWord ( word , true ) ; if ( end < 3 ) { // support for geographical adjectives - although " süd / ost / west / nord " are not in spelling . txt // to accept sentences such as // " Der westperuanische Ferienort , das ostargentinische Städtchen , das südukrainische Brauchtum , der nordägyptische Staudamm . " if ( word . startsWith ( "ost" ) || word . startsWith ( "süd" ) ) { end = 3 ; } else if ( word . startsWith ( "west" ) || word . startsWith ( "nord" ) ) { end = 4 ; } else { return false ; } } String ignoredWord = word . substring ( 0 , end ) ; String partialWord = word . substring ( end ) ; boolean isCandidateForNonHyphenatedCompound = ! StringUtils . isAllUpperCase ( ignoredWord ) && ( StringUtils . isAllLowerCase ( partialWord ) || ignoredWord . endsWith ( "-" ) ) ; boolean needFugenS = isNeedingFugenS ( ignoredWord ) ; if ( isCandidateForNonHyphenatedCompound && ! needFugenS && partialWord . length ( ) > 2 ) { return ! hunspellDict . misspelled ( partialWord ) || ! hunspellDict . misspelled ( StringUtils . capitalize ( partialWord ) ) ; } else if ( isCandidateForNonHyphenatedCompound && needFugenS && partialWord . length ( ) > 2 ) { partialWord = partialWord . startsWith ( "s" ) ? partialWord . substring ( 1 ) : partialWord ; return ! hunspellDict . misspelled ( partialWord ) || ! hunspellDict . misspelled ( StringUtils . capitalize ( partialWord ) ) ; } return false ; } // hyphenated compound ( e . g . , " Feynman - Diagramm " ) : boolean hasIgnoredWord = false ; List < String > toSpellCheck = new ArrayList < > ( 3 ) ; String stripFirst = word . substring ( words [ 0 ] . length ( ) + 1 ) ; // everything after the first " - " String stripLast = word . substring ( 0 , word . length ( ) - words [ words . length - 1 ] . length ( ) - 1 ) ; // everything up to the last " - " if ( super . ignoreWord ( stripFirst ) || wordsToBeIgnoredInCompounds . contains ( stripFirst ) ) { // e . g . , " Senioren - Au - pair " hasIgnoredWord = true ; if ( ! super . ignoreWord ( words [ 0 ] ) ) { toSpellCheck . add ( words [ 0 ] ) ; } } else if ( super . ignoreWord ( stripLast ) || wordsToBeIgnoredInCompounds . contains ( stripLast ) ) { // e . g . , " Au - pair - Agentur " hasIgnoredWord = true ; if ( ! super . ignoreWord ( words [ words . length - 1 ] ) ) { toSpellCheck . add ( words [ words . length - 1 ] ) ; } } else { for ( String word1 : words ) { if ( super . ignoreWord ( word1 ) || wordsToBeIgnoredInCompounds . contains ( word1 ) ) { hasIgnoredWord = true ; } else { toSpellCheck . add ( word1 ) ; } } } if ( hasIgnoredWord ) { for ( String w : toSpellCheck ) { if ( hunspellDict . misspelled ( w ) ) { return false ; } } } return hasIgnoredWord ;
public class StringConverter { /** * Normalizes and prints the given string . */ public static String normalizeString ( String s , boolean canonical ) { } }
StringBuilder strBuf = new StringBuilder ( ) ; int len = ( s != null ) ? s . length ( ) : 0 ; for ( int i = 0 ; i < len ; i ++ ) { char c = s . charAt ( i ) ; if ( '_' == c ) { if ( len - i > ENCODE_CHARS ) { String spart = s . substring ( i , i + ENCODE_CHARS ) ; Matcher encodeMatcher = ENCODE_PATTERN . matcher ( spart ) ; if ( encodeMatcher . matches ( ) ) strBuf . append ( normalizeChar ( c , canonical ) ) ; else strBuf . append ( c ) ; } else strBuf . append ( c ) ; } else strBuf . append ( normalizeChar ( c , canonical ) ) ; } return new String ( strBuf ) ;
public class JSON { /** * / * ObjectReadContext : databind */ @ Override public < T extends TreeNode > T readTree ( JsonParser p ) throws IOException { } }
if ( _treeCodec == null ) { _noTreeCodec ( "write TreeNode" ) ; } return _treeCodec . readTree ( p ) ;
public class TransformerIdentityImpl { /** * Report an attribute type declaration . * < p > Only the effective ( first ) declaration for an attribute will * be reported . The type will be one of the strings " CDATA " , * " ID " , " IDREF " , " IDREFS " , " NMTOKEN " , " NMTOKENS " , " ENTITY " , * " ENTITIES " , or " NOTATION " , or a parenthesized token group with * the separator " | " and all whitespace removed . < / p > * @ param eName The name of the associated element . * @ param aName The name of the attribute . * @ param type A string representing the attribute type . * @ param valueDefault A string representing the attribute default * ( " # IMPLIED " , " # REQUIRED " , or " # FIXED " ) or null if * none of these applies . * @ param value A string representing the attribute ' s default value , * or null if there is none . * @ exception SAXException The application may raise an exception . */ public void attributeDecl ( String eName , String aName , String type , String valueDefault , String value ) throws SAXException { } }
if ( null != m_resultDeclHandler ) m_resultDeclHandler . attributeDecl ( eName , aName , type , valueDefault , value ) ;
public class Disposables { /** * Performs null checks and disposes of assets . * @ param disposables its values will be disposed of ( if they exist ) . Can be null . */ public static void disposeOf ( final ObjectMap < ? , ? extends Disposable > disposables ) { } }
if ( disposables != null ) { for ( final Disposable disposable : disposables . values ( ) ) { disposeOf ( disposable ) ; } }
public class JsBusImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . admin . JsBus # getSIBDestination ( java . lang . String , * java . lang . String ) */ public BaseDestinationDefinition getSIBDestination ( String busName , String name ) throws SIBExceptionBase , SIBExceptionDestinationNotFound { } }
return getDestinationCache ( ) . getSIBDestination ( busName , name ) ;
public class SparseBitmap { /** * Convenience method : returns an array containing the set bits . * @ return array corresponding to the position of the set bits . */ public int [ ] toArray ( ) { } }
IntIterator i = getIntIterator ( ) ; final int cardinality = this . cardinality ( ) ; int [ ] answer = new int [ cardinality ] ; for ( int k = 0 ; k < cardinality ; ++ k ) answer [ k ] = i . next ( ) ; return answer ;
public class DialogState { /** * Flush the scope of unit and all children . * @ param unitId */ public void flushChildScopes ( QName unitId ) { } }
Set < Integer > childScopes = findChildScopes ( unitId ) ; for ( Integer scopeId : childScopes ) { MutableContext mutableContext = statementContexts . get ( scopeId ) ; mutableContext . clearStatements ( ) ; }
public class VolatileIndex { /** * Overwrites the default implementation by adding the documents to a * pending list and commits the pending list if needed . * @ param docs the documents to add to the index . * @ throws IOException if an error occurs while writing to the index . */ @ Override void addDocuments ( Document [ ] docs ) throws IOException { } }
for ( int i = 0 ; i < docs . length ; i ++ ) { Document old = pending . put ( docs [ i ] . get ( FieldNames . UUID ) , docs [ i ] ) ; if ( old != null ) { Util . disposeDocument ( old ) ; } if ( pending . size ( ) >= bufferSize ) { commitPending ( ) ; } numDocs ++ ; } invalidateSharedReader ( ) ;
public class ForkJoinPool { /** * Tries to decrement active count ( sometimes implicitly ) and * possibly release or create a compensating worker in preparation * for blocking . Fails on contention or termination . Otherwise , * adds a new thread if no idle workers are available and either * pool would become completely starved or : ( at least half * starved , and fewer than 50 % spares exist , and there is at least * one task apparently available ) . Even though the availability * check requires a full scan , it is worthwhile in reducing false * alarms . * @ param task if non - null , a task being waited for * @ param blocker if non - null , a blocker being waited for * @ return true if the caller can block , else should recheck and retry */ final boolean tryCompensate ( ForkJoinTask < ? > task , ManagedBlocker blocker ) { } }
int pc = parallelism , e ; long c = ctl ; WorkQueue [ ] ws = workQueues ; if ( ( e = ( int ) c ) >= 0 && ws != null ) { int u , a , ac , hc ; int tc = ( short ) ( ( u = ( int ) ( c >>> 32 ) ) >>> UTC_SHIFT ) + pc ; boolean replace = false ; if ( ( a = u >> UAC_SHIFT ) <= 0 ) { if ( ( ac = a + pc ) <= 1 ) replace = true ; else if ( ( e > 0 || ( task != null && ac <= ( hc = pc >>> 1 ) && tc < pc + hc ) ) ) { WorkQueue w ; for ( int j = 0 ; j < ws . length ; ++ j ) { if ( ( w = ws [ j ] ) != null && ! w . isEmpty ( ) ) { replace = true ; break ; // in compensation range and tasks available } } } } if ( ( task == null || task . status >= 0 ) && // recheck need to block ( blocker == null || ! blocker . isReleasable ( ) ) && ctl == c ) { if ( ! replace ) { // no compensation long nc = ( ( c - AC_UNIT ) & AC_MASK ) | ( c & ~ AC_MASK ) ; if ( U . compareAndSwapLong ( this , CTL , c , nc ) ) return true ; } else if ( e != 0 ) { // release an idle worker WorkQueue w ; Thread p ; int i ; if ( ( i = e & SMASK ) < ws . length && ( w = ws [ i ] ) != null ) { long nc = ( ( long ) ( w . nextWait & E_MASK ) | ( c & ( AC_MASK | TC_MASK ) ) ) ; if ( w . eventCount == ( e | INT_SIGN ) && U . compareAndSwapLong ( this , CTL , c , nc ) ) { w . eventCount = ( e + E_SEQ ) & E_MASK ; if ( ( p = w . parker ) != null ) U . unpark ( p ) ; return true ; } } } else if ( tc < MAX_CAP ) { // create replacement long nc = ( ( c + TC_UNIT ) & TC_MASK ) | ( c & ~ TC_MASK ) ; if ( U . compareAndSwapLong ( this , CTL , c , nc ) ) { addWorker ( ) ; return true ; } } } } return false ;
public class FacebookFragment { /** * Gets the permissions associated with the current session or null if no session * has been created . * @ return the permissions associated with the current session */ protected final List < String > getSessionPermissions ( ) { } }
if ( sessionTracker != null ) { Session currentSession = sessionTracker . getSession ( ) ; return ( currentSession != null ) ? currentSession . getPermissions ( ) : null ; } return null ;
public class Error { /** * Converts an array of { @ link Error } s to an array of { @ link String } s . * @ param spans * @ param s * @ return the strings */ public static String [ ] spansToStrings ( Error [ ] spans , CharSequence s ) { } }
String [ ] tokens = new String [ spans . length ] ; for ( int si = 0 , sl = spans . length ; si < sl ; si ++ ) { tokens [ si ] = spans [ si ] . getCoveredText ( s ) . toString ( ) ; } return tokens ;
public class DraeneiSearchService { /** * Filter stop word facets * @ param facets Input facets * @ return */ private Collection < Facet > filter ( Collection < Facet > facets ) { } }
if ( NotStopWordPredicate == null ) { return facets ; } return facets . stream ( ) . filter ( NotStopWordPredicate ) . collect ( Collectors . toList ( ) ) ;
public class TokenBucket { /** * Note : this method should only be called while holding the class lock . For performance , the lock is not explicitly * acquired . * @ return the wait until the tokens are available or negative if they can ' t be acquired in the give timeout . */ synchronized long tryReserveTokens ( long tokens , long maxWaitMillis ) { } }
long now = System . currentTimeMillis ( ) ; long waitUntilNextTokenAvailable = Math . max ( 0 , this . nextTokenAvailableMillis - now ) ; updateTokensStored ( now ) ; if ( tokens <= this . tokensStored ) { this . tokensStored -= tokens ; return waitUntilNextTokenAvailable ; } double additionalNeededTokens = tokens - this . tokensStored ; // casting to long will round towards 0 long additionalWaitForEnoughTokens = ( long ) ( additionalNeededTokens / this . tokensPerMilli ) + 1 ; long totalWait = waitUntilNextTokenAvailable + additionalWaitForEnoughTokens ; if ( totalWait > maxWaitMillis ) { return - 1 ; } this . tokensStored = this . tokensPerMilli * additionalWaitForEnoughTokens - additionalNeededTokens ; this . nextTokenAvailableMillis = this . nextTokenAvailableMillis + additionalWaitForEnoughTokens ; return totalWait ;
public class SleUtility { /** * Groups values by the groups from the SLE . * @ param values List of Extendable implementations to group . * @ param groups Group fields ( from the SimpleListExtension module ) * @ return Grouped list of entries . */ public static < T extends Extendable > List < T > group ( final List < T > values , final Group [ ] groups ) { } }
final SortableList < T > list = getSortableList ( values ) ; final GroupStrategy strategy = new GroupStrategy ( ) ; for ( int i = groups . length - 1 ; i >= 0 ; i -- ) { list . sortOnProperty ( groups [ i ] , true , strategy ) ; } return list ;
public class JAXBUtils { /** * Write XML entity to the given destination . * @ param entity * XML entity * @ param destination * destination to write to . Supported destinations : { @ link java . io . OutputStream } , { @ link java . io . File } , * { @ link java . io . Writer } * @ param comment * optional comment which will be added at the begining of the generated XML * @ throws IllegalArgumentException * @ throws SwidException * @ param < T > * JAXB entity */ public static < T > void writeObject ( final T entity , final Object destination , final String comment ) { } }
try { JAXBContext jaxbContext ; if ( entity instanceof JAXBElement ) { jaxbContext = JAXBContext . newInstance ( ( ( JAXBElement ) entity ) . getValue ( ) . getClass ( ) ) ; } else { jaxbContext = JAXBContext . newInstance ( entity . getClass ( ) ) ; } Marshaller marshaller = jaxbContext . createMarshaller ( ) ; marshaller . setProperty ( Marshaller . JAXB_FORMATTED_OUTPUT , true ) ; if ( StringUtils . isNotBlank ( comment ) ) { marshaller . setProperty ( "com.sun.xml.bind.xmlHeaders" , comment ) ; } if ( destination instanceof java . io . OutputStream ) { marshaller . marshal ( entity , ( OutputStream ) destination ) ; } else if ( destination instanceof java . io . File ) { marshaller . marshal ( entity , ( java . io . File ) destination ) ; } else if ( destination instanceof java . io . Writer ) { marshaller . marshal ( entity , ( java . io . Writer ) destination ) ; } else { throw new IllegalArgumentException ( "Unsupported destination." ) ; } } catch ( final JAXBException e ) { throw new SwidException ( "Cannot write object." , e ) ; }
public class CacheProxy { /** * Sets the access expiration time . * @ param expirable the entry that was operated on * @ param currentTimeMS the current time , or 0 if not read yet */ protected final void setAccessExpirationTime ( Expirable < ? > expirable , long currentTimeMS ) { } }
try { Duration duration = expiry . getExpiryForAccess ( ) ; if ( duration == null ) { return ; } else if ( duration . isZero ( ) ) { expirable . setExpireTimeMS ( 0L ) ; } else if ( duration . isEternal ( ) ) { expirable . setExpireTimeMS ( Long . MAX_VALUE ) ; } else { if ( currentTimeMS == 0L ) { currentTimeMS = currentTimeMillis ( ) ; } long expireTimeMS = duration . getAdjustedTime ( currentTimeMS ) ; expirable . setExpireTimeMS ( expireTimeMS ) ; } } catch ( Exception e ) { logger . log ( Level . WARNING , "Failed to set the entry's expiration time" , e ) ; }
public class Participant { /** * Waits for the end of the synchronization and updates last seen config * file . * @ param peerId the id of the peer . * @ throws TimeoutException in case of timeout . * @ throws IOException in case of IOException . * @ throws InterruptedException if it ' s interrupted . */ void waitForSyncEnd ( String peerId ) throws TimeoutException , IOException , InterruptedException { } }
MessageTuple tuple = filter . getExpectedMessage ( MessageType . SYNC_END , peerId , getSyncTimeoutMs ( ) ) ; ClusterConfiguration cnf = ClusterConfiguration . fromProto ( tuple . getMessage ( ) . getConfig ( ) , this . serverId ) ; LOG . debug ( "Got SYNC_END {} from {}" , cnf , peerId ) ; this . persistence . setLastSeenConfig ( cnf ) ; if ( persistence . isInStateTransfer ( ) ) { persistence . endStateTransfer ( ) ; } // If the synchronization is performed by truncation , then it ' s possible // the content of cluster _ config has been truncated in log , then we ' ll // delete these invalid cluster _ config files . persistence . cleanupClusterConfigFiles ( ) ;
public class JInternalDialog { /** * Scans up the interface hierarchy looking for the { @ link * JInternalDialog } that contains the supplied child component and * dismisses it . */ public static void dismissDialog ( Component child ) { } }
if ( child == null ) { return ; } else if ( child instanceof JInternalDialog ) { ( ( JInternalDialog ) child ) . dismissDialog ( ) ; } else { dismissDialog ( child . getParent ( ) ) ; }
public class RSS090Parser { /** * Parses the root element of an RSS document looking for image information . * It reads title and url out of the ' image ' element . * @ param rssRoot the root element of the RSS document to parse for image information . * @ return the parsed image bean . */ protected Image parseImage ( final Element rssRoot ) { } }
Image image = null ; final Element eImage = getImage ( rssRoot ) ; if ( eImage != null ) { image = new Image ( ) ; final Element title = eImage . getChild ( "title" , getRSSNamespace ( ) ) ; if ( title != null ) { image . setTitle ( title . getText ( ) ) ; } final Element url = eImage . getChild ( "url" , getRSSNamespace ( ) ) ; if ( url != null ) { image . setUrl ( url . getText ( ) ) ; } final Element link = eImage . getChild ( "link" , getRSSNamespace ( ) ) ; if ( link != null ) { image . setLink ( link . getText ( ) ) ; } } return image ;
public class AbstractRenderer { /** * Computes a score by checking the value of the ' $ format ' parameter ( if present ) against a required media type . * @ param formatOption The option containing the ' $ format ' parameter . * @ param requiredMediaType The required media type . * @ return A score that indicates if the media type present in the ' $ format ' parameter * matches the required media type . */ protected int scoreByFormat ( Option < FormatOption > formatOption , MediaType requiredMediaType ) { } }
if ( ! formatOption . isDefined ( ) ) { return DEFAULT_SCORE ; } if ( formatOption . get ( ) . mediaType ( ) . matches ( requiredMediaType ) ) { return MAXIMUM_FORMAT_SCORE ; } return DEFAULT_SCORE ;
public class AWSDatabaseMigrationServiceClient { /** * Deletes the specified certificate . * @ param deleteCertificateRequest * @ return Result of the DeleteCertificate operation returned by the service . * @ throws ResourceNotFoundException * The resource could not be found . * @ throws InvalidResourceStateException * The resource is in a state that prevents it from being used for database migration . * @ sample AWSDatabaseMigrationService . DeleteCertificate * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / dms - 2016-01-01 / DeleteCertificate " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DeleteCertificateResult deleteCertificate ( DeleteCertificateRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteCertificate ( request ) ;
public class TimeUtils { /** * Generate a fancy timestamp based on unix epoch time that is more user friendly than just * a raw output by collapsing the time into manageable formats based on how much time has * elapsed since epoch * @ param epoch the time in unix epoch * @ return the fancy timestamp */ public static String fancyTimestamp ( long epoch ) { } }
// First , check to see if it ' s within 1 minute of the current date if ( System . currentTimeMillis ( ) - epoch < 60000 ) { return "Just now" ; } // Get calendar for just now Calendar now = Calendar . getInstance ( ) ; // Generate Calendar for this time Calendar cal = Calendar . getInstance ( ) ; cal . setTimeInMillis ( epoch ) ; // Based on the date , determine what to print out // 1 ) Determine if time is the same day if ( cal . get ( YEAR ) == now . get ( YEAR ) ) { if ( cal . get ( MONTH ) == now . get ( MONTH ) ) { if ( cal . get ( DAY_OF_MONTH ) == now . get ( DAY_OF_MONTH ) ) { // Return just the time SimpleDateFormat format = new SimpleDateFormat ( "h:mm a" ) ; return format . format ( cal . getTime ( ) ) ; } else { // Return the day and time SimpleDateFormat format = new SimpleDateFormat ( "EEE, h:mm a" ) ; return format . format ( cal . getTime ( ) ) ; } } else { SimpleDateFormat format = new SimpleDateFormat ( "EEE, MMM d, h:mm a" ) ; return format . format ( cal . getTime ( ) ) ; } } else { SimpleDateFormat format = new SimpleDateFormat ( "M/d/yy" ) ; return format . format ( cal . getTime ( ) ) ; }
public class Router { /** * This should be called by the host Activity when its onBackPressed method is called . The call will be forwarded * to its top { @ link Controller } . If that controller doesn ' t handle it , then it will be popped . * @ return Whether or not a back action was handled by the Router */ @ UiThread public boolean handleBack ( ) { } }
ThreadUtils . ensureMainThread ( ) ; if ( ! backstack . isEmpty ( ) ) { // noinspection ConstantConditions if ( backstack . peek ( ) . controller . handleBack ( ) ) { return true ; } else if ( popCurrentController ( ) ) { return true ; } } return false ;
public class MutableDataPoint { /** * Resets with a new pair of a timestamp and a double value . * @ param timestamp A timestamp . * @ param value A double value . */ public void reset ( final long timestamp , final double value ) { } }
this . timestamp = timestamp ; this . is_integer = false ; this . value = Double . doubleToRawLongBits ( value ) ;
public class Requests { /** * Retrieve a request by doc id , masterRequestId , all matching requests , or an aggregated request breakdown . */ @ Override @ Path ( "/{requestId}" ) @ ApiOperation ( value = "Retrieve a request or a page of requests according to specified filters" , notes = "If requestId is not present, returns all matching requests." , response = Request . class , responseContainer = "List" ) public JSONObject get ( String path , Map < String , String > headers ) throws ServiceException , JSONException { } }
RequestServices requestServices = ServiceLocator . getRequestServices ( ) ; try { Query query = getQuery ( path , headers ) ; String segOne = getSegment ( path , 1 ) ; if ( segOne != null ) { if ( segOne . equals ( "tops" ) ) { return getTops ( query ) . getJson ( ) ; } else if ( segOne . equals ( "breakdown" ) ) { return getBreakdown ( query ) . getJson ( ) ; } else if ( segOne . equals ( "paths" ) ) { return getPaths ( query ) . getJson ( ) ; } else if ( segOne . equals ( "insights" ) ) { JsonList < Insight > jsonList = getInsights ( query ) ; JSONObject json = jsonList . getJson ( ) ; String trend = query . getFilter ( "trend" ) ; if ( "completionTime" . equals ( trend ) ) { List < Timepoint > timepoints = requestServices . getRequestTrend ( query ) ; json . put ( "trend" , new JsonList < > ( timepoints , "trend" ) . getJson ( ) . getJSONArray ( "trend" ) ) ; } return json ; } else { try { if ( query . getBooleanFilter ( "master" ) ) { String masterRequestId = segOne ; if ( query . getBooleanFilter ( "response" ) ) { Request masterRequest = requestServices . getMasterRequestResponse ( masterRequestId ) ; if ( masterRequest == null ) throw new ServiceException ( ServiceException . NOT_FOUND , "Master request not found: " + masterRequestId ) ; return masterRequest . getJson ( ) ; } else { Request masterRequest = requestServices . getMasterRequest ( masterRequestId ) ; if ( masterRequest == null ) throw new ServiceException ( ServiceException . NOT_FOUND , "Master request not found: " + masterRequestId ) ; return masterRequest . getJson ( ) ; } } else { Long requestId = Long . valueOf ( segOne ) ; if ( query . getBooleanFilter ( "request" ) && query . getBooleanFilter ( "response" ) ) { Request request = requestServices . getRequestAndResponse ( requestId ) ; if ( request == null ) throw new ServiceException ( ServiceException . NOT_FOUND , "Request not found: " + requestId ) ; return request . getJson ( ) ; } else if ( query . getBooleanFilter ( "response" ) ) { Request request = requestServices . getRequestResponse ( requestId ) ; if ( request == null ) throw new ServiceException ( ServiceException . NOT_FOUND , "Request not found: " + requestId ) ; return request . getJson ( ) ; } else { Request request = requestServices . getRequest ( requestId ) ; if ( request == null ) throw new ServiceException ( ServiceException . NOT_FOUND , "Request not found: " + requestId ) ; return request . getJson ( ) ; } } } catch ( NumberFormatException ex ) { throw new ServiceException ( ServiceException . BAD_REQUEST , "Bad requestId: " + segOne ) ; } } } else { if ( query . getLongFilter ( "ownerId" ) >= 0L ) { RequestList reqList = requestServices . getRequests ( query ) ; if ( ! reqList . getItems ( ) . isEmpty ( ) ) return requestServices . getRequestAndResponse ( reqList . getItems ( ) . get ( 0 ) . getId ( ) ) . getJson ( ) ; else throw new ServiceException ( ServiceException . NOT_FOUND , "Request not found for ownerId: " + query . getLongFilter ( "ownerId" ) ) ; } else return requestServices . getRequests ( query ) . getJson ( ) ; } } catch ( ServiceException ex ) { throw ex ; } catch ( Exception ex ) { throw new ServiceException ( ServiceException . INTERNAL_ERROR , ex . getMessage ( ) , ex ) ; }