signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class MmtfUtils { /** * Get a list of length N * 16 of a list of Matrix4d * N . * @ param ncsOperators the { @ link Matrix4d } list * @ return the list of length N * 16 of the list of matrices */ public static double [ ] [ ] getNcsAsArray ( Matrix4d [ ] ncsOperators ) { } }
if ( ncsOperators == null ) { return new double [ 0 ] [ 0 ] ; } double [ ] [ ] outList = new double [ ncsOperators . length ] [ 16 ] ; for ( int i = 0 ; i < ncsOperators . length ; i ++ ) { outList [ i ] = convertToDoubleArray ( ncsOperators [ i ] ) ; } return outList ;
public class PatchOperationTarget { /** * Create a host target . * @ param hostName the host name * @ param client the connected controller client to the master host . * @ return the remote target */ public static final PatchOperationTarget createHost ( final String hostName , final ModelControllerClient client ) { } }
final PathElement host = PathElement . pathElement ( HOST , hostName ) ; final PathAddress address = PathAddress . EMPTY_ADDRESS . append ( host , CORE_SERVICES ) ; return new RemotePatchOperationTarget ( address , client ) ;
public class BaseNeo4jEntityQueries { /** * Example : CREATE ( n : ENTITY : table { props } ) RETURN n */ private static String initCreateEntityWithPropertiesQuery ( EntityKeyMetadata entityKeyMetadata ) { } }
StringBuilder queryBuilder = new StringBuilder ( "CREATE " ) ; queryBuilder . append ( "(n:" ) ; queryBuilder . append ( ENTITY ) ; queryBuilder . append ( ":" ) ; appendLabel ( entityKeyMetadata , queryBuilder ) ; // We should not pass a map as parameter as Neo4j cannot cache the query plan for it queryBuilder . append ( " {props})" ) ; queryBuilder . append ( " RETURN n" ) ; return queryBuilder . toString ( ) ;
public class ObjectMappableProcessor { /** * Generates the ContentValues Builder Class * @ param clazz The class you want to create a builder for * @ param className The classname @ return The Builder class */ private TypeSpec generateContentValuesBuilderClass ( ObjectMappableAnnotatedClass clazz , String mapperClassName , String className ) { } }
String cvVarName = "contentValues" ; MethodSpec constructor = MethodSpec . constructorBuilder ( ) . addModifiers ( Modifier . PRIVATE ) . addStatement ( "$L = new $T()" , cvVarName , ClassName . get ( ContentValues . class ) ) . build ( ) ; TypeSpec . Builder builder = TypeSpec . classBuilder ( className ) . addJavadoc ( "Builder class to generate type sage {@link $T } . At the end you have to call {@link #build()}\n" , TypeName . get ( ContentValues . class ) ) . addModifiers ( Modifier . PUBLIC , Modifier . STATIC ) . addField ( ContentValues . class , cvVarName , Modifier . PRIVATE ) . addMethod ( constructor ) . addMethod ( MethodSpec . methodBuilder ( "build" ) . addJavadoc ( "Creates and returns a $T from the builder\n" , TypeName . get ( ContentValues . class ) ) . addJavadoc ( "@return $T" , TypeName . get ( ContentValues . class ) ) . addModifiers ( Modifier . PUBLIC ) . addStatement ( "return $L" , cvVarName ) . returns ( ContentValues . class ) . build ( ) ) ; String packageName = getPackageName ( clazz ) ; for ( ColumnAnnotateable e : clazz . getColumnAnnotatedElements ( ) ) { e . generateContentValuesBuilderMethod ( builder , ClassName . get ( packageName , mapperClassName , className ) , cvVarName ) ; } return builder . build ( ) ;
public class InternalQueries { /** * Coverts list of objects to { @ code List < String > } . * @ param args list of objects that will be converted to list of strings . * @ return non - null , unmodifiable list of strings . */ @ NonNull public static List < String > unmodifiableNonNullListOfStrings ( @ Nullable List < ? > args ) { } }
if ( args == null || args . isEmpty ( ) ) { return emptyList ( ) ; } else { final List < String > list = new ArrayList < String > ( args . size ( ) ) ; for ( Object arg : args ) { list . add ( arg != null ? arg . toString ( ) : "null" ) ; } return unmodifiableList ( list ) ; }
public class Remove_First { /** * remove _ first ( input , string ) * remove the first occurrences of a substring */ @ Override public Object apply ( Object value , Object ... params ) { } }
String original = super . asString ( value ) ; Object needle = super . get ( 0 , params ) ; if ( needle == null ) { throw new RuntimeException ( "invalid pattern: " + needle ) ; } return original . replaceFirst ( Pattern . quote ( String . valueOf ( needle ) ) , "" ) ;
public class MBeanRegistry { /** * Builds an MBean path and creates an ObjectName instance using the path . * @ param path MBean path * @ param bean the MBean instance * @ return ObjectName to be registered with the platform MBean server */ protected ObjectName makeObjectName ( String path , ZKMBeanInfo bean ) throws MalformedObjectNameException { } }
if ( path == null ) return null ; StringBuilder beanName = new StringBuilder ( CommonNames . DOMAIN + ":" ) ; int counter = 0 ; counter = tokenize ( beanName , path , counter ) ; tokenize ( beanName , bean . getName ( ) , counter ) ; beanName . deleteCharAt ( beanName . length ( ) - 1 ) ; try { return new ObjectName ( beanName . toString ( ) ) ; } catch ( MalformedObjectNameException e ) { LOG . warn ( "Invalid name \"" + beanName . toString ( ) + "\" for class " + bean . getClass ( ) . toString ( ) ) ; throw e ; }
public class PageSQLInterceptor { /** * 执行总条数查询并设置值 * @ param connection * @ param mappedStatement * @ param boundSql */ private void setPageTotalCount ( Connection connection , MappedStatement mappedStatement , BoundSql boundSql ) { } }
PreparedStatement preparedStatement = null ; ResultSet resultSet = null ; try { preparedStatement = connection . prepareStatement ( createCountSql ( boundSql . getSql ( ) ) ) ; ParameterHandler parameterHandler = new DefaultParameterHandler ( mappedStatement , boundSql . getParameterObject ( ) , boundSql ) ; parameterHandler . setParameters ( preparedStatement ) ; resultSet = preparedStatement . executeQuery ( ) ; if ( resultSet . next ( ) ) { ( ( PageEntity ) boundSql . getParameterObject ( ) ) . setTotalCount ( resultSet . getLong ( 1 ) ) ; } if ( resultSet != null ) resultSet . close ( ) ; if ( preparedStatement != null ) preparedStatement . close ( ) ; } catch ( SQLException e ) { e . printStackTrace ( ) ; }
public class CmsToolbar { /** * Sets the toolbar title label . < p > * @ param title the title */ public void setAppTitle ( String title ) { } }
if ( CmsStringUtil . isEmptyOrWhitespaceOnly ( title ) ) { if ( m_titleLabel != null ) { m_titleLabel . removeFromParent ( ) ; m_titleLabel = null ; } } else { if ( m_titleLabel == null ) { m_titleLabel = new Label ( ) ; m_titleLabel . setStyleName ( I_CmsLayoutBundle . INSTANCE . toolbarCss ( ) . title ( ) ) ; m_buttonPanelLeft . insert ( m_titleLabel , 0 ) ; } m_titleLabel . setText ( title ) ; }
public class FinalizeMigrationOperation { /** * Sets all replica versions to { @ code 0 } up to the { @ code replicaIndex } . */ private long [ ] updatePartitionReplicaVersions ( PartitionReplicaManager replicaManager , int partitionId , ServiceNamespace namespace , int replicaIndex ) { } }
long [ ] versions = replicaManager . getPartitionReplicaVersions ( partitionId , namespace ) ; // No need to set versions back right now . actual version array is modified directly . Arrays . fill ( versions , 0 , replicaIndex , 0 ) ; return versions ;
public class DefaultApplicationRouter { /** * load the configuration file as defined in appendix C of JSR289 */ public void init ( ) { } }
defaultApplicationRouterParser . init ( ) ; try { defaultSipApplicationRouterInfos = defaultApplicationRouterParser . parse ( ) ; } catch ( ParseException e ) { log . fatal ( "Impossible to parse the default application router configuration file" , e ) ; throw new IllegalArgumentException ( "Impossible to parse the default application router configuration file" , e ) ; }
public class ContextServiceImpl { /** * Ignore , warn , or fail when a configuration error occurs . * This is copied from Tim ' s code in tWAS and updated slightly to * override with the Liberty ignore / warn / fail setting . * Precondition : invoker must have lock on this context service , in order to read the onError property . * @ param throwable an already created Throwable object , which can be used if the desired action is fail . * @ param exceptionClassToRaise the class of the Throwable object to return * @ param msgKey the NLS message key * @ param objs list of objects to substitute in the NLS message * @ return either null or the Throwable object */ private < T extends Throwable > T ignoreWarnOrFail ( Throwable throwable , final Class < T > exceptionClassToRaise , String msgKey , Object ... objs ) { } }
// Read the value each time in order to allow for changes to the onError setting switch ( ( OnError ) properties . get ( OnErrorUtil . CFG_KEY_ON_ERROR ) ) { case IGNORE : if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "ignoring error: " + msgKey , objs ) ; return null ; case WARN : Tr . warning ( tc , msgKey , objs ) ; return null ; case FAIL : try { if ( throwable != null && exceptionClassToRaise . isInstance ( throwable ) ) return exceptionClassToRaise . cast ( throwable ) ; Constructor < T > con = AccessController . doPrivileged ( new PrivilegedExceptionAction < Constructor < T > > ( ) { @ Override public Constructor < T > run ( ) throws NoSuchMethodException { return exceptionClassToRaise . getConstructor ( String . class ) ; } } ) ; String message = msgKey == null ? throwable . getMessage ( ) : Tr . formatMessage ( tc , msgKey , objs ) ; T failure = con . newInstance ( message ) ; failure . initCause ( throwable ) ; return failure ; } catch ( PrivilegedActionException e ) { throw new RuntimeException ( e . getCause ( ) ) ; } catch ( RuntimeException e ) { throw e ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } return null ;
public class CommerceShippingFixedOptionLocalServiceBaseImpl { /** * Creates a new commerce shipping fixed option with the primary key . Does not add the commerce shipping fixed option to the database . * @ param commerceShippingFixedOptionId the primary key for the new commerce shipping fixed option * @ return the new commerce shipping fixed option */ @ Override @ Transactional ( enabled = false ) public CommerceShippingFixedOption createCommerceShippingFixedOption ( long commerceShippingFixedOptionId ) { } }
return commerceShippingFixedOptionPersistence . create ( commerceShippingFixedOptionId ) ;
public class Node { /** * syck _ map _ empty */ public void mapEmpty ( ) { } }
Data . Map m = ( Data . Map ) data ; m . idx = 0 ; m . capa = YAML . ALLOC_CT ; m . keys = new Object [ m . capa ] ; m . values = new Object [ m . capa ] ;
public class StringUtils { /** * Turn the given Object into a String with single quotes if it is a String ; * keeping the Object as - is else . * @ param obj * the input Object ( e . g . " myString " ) * @ return the quoted String ( e . g . " ' myString ' " ) , or the input object as - is * if not a String */ public static Object quoteIfString ( Object obj ) { } }
return obj instanceof String ? quote ( ( String ) obj ) : obj ;
public class BeginDownloadResult { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false otherwise */ public boolean isSet ( _Fields field ) { } }
if ( field == null ) { throw new IllegalArgumentException ( ) ; } switch ( field ) { case VERSION : return is_set_version ( ) ; case SESSION : return is_set_session ( ) ; case DATA_SIZE : return is_set_data_size ( ) ; } throw new IllegalStateException ( ) ;
public class ComputerSet { /** * { @ code getTotalExecutors ( ) - getBusyExecutors ( ) } , plus executors that are being brought online . */ public int getIdleExecutors ( ) { } }
int r = 0 ; for ( Computer c : get_all ( ) ) if ( ( c . isOnline ( ) || c . isConnecting ( ) ) && c . isAcceptingTasks ( ) ) r += c . countIdle ( ) ; return r ;
public class ReflectionUtils { /** * < p > getDeclaredFieldValue . < / p > * @ param object a { @ link java . lang . Object } object . * @ param declaredFieldName a { @ link java . lang . String } object . * @ return a { @ link java . lang . Object } object . * @ throws java . lang . Exception if any . */ public static Object getDeclaredFieldValue ( Object object , String declaredFieldName ) throws Exception { } }
Field field = object . getClass ( ) . getDeclaredField ( declaredFieldName ) ; field . setAccessible ( true ) ; return field . get ( object ) ;
public class InvokerTask { /** * Utility method that serializes a task result , or the failure that occurred when attempting * to serialize the task result . * @ param result non - null task result * @ param loader class loader that can deserialize the task and result . * @ return serialized bytes */ @ FFDCIgnore ( Throwable . class ) @ Sensitive private byte [ ] serializeResult ( Object result , ClassLoader loader ) throws IOException { } }
try { return persistentExecutor . serialize ( result ) ; } catch ( Throwable x ) { return persistentExecutor . serialize ( new TaskFailure ( x , loader , persistentExecutor , TaskFailure . NONSER_RESULT , result . getClass ( ) . getName ( ) ) ) ; }
public class VisualizationTree { /** * Process new result combinations of an object type1 ( in first hierarchy ) * having a child of type2 ( in second hierarchy ) . * This is a bit painful , because we have two hierarchies with different * types : results , and visualizations . * @ param context Context * @ param start Starting point * @ param type1 First type , in first hierarchy * @ param type2 Second type , in second hierarchy * @ param handler Handler */ public static < A extends Result , B extends VisualizationItem > void findNewResultVis ( VisualizerContext context , Object start , Class < ? super A > type1 , Class < ? super B > type2 , BiConsumer < A , B > handler ) { } }
final Hierarchy < Object > hier = context . getVisHierarchy ( ) ; // Search start in first hierarchy : if ( start instanceof Result ) { for ( It < A > it1 = context . getHierarchy ( ) . iterDescendantsSelf ( ( Result ) start ) . filter ( type1 ) ; it1 . valid ( ) ; it1 . advance ( ) ) { final A result = it1 . get ( ) ; // Find descendant results in result hierarchy : for ( It < Result > it3 = context . getHierarchy ( ) . iterDescendantsSelf ( result ) ; it3 . valid ( ) ; it3 . advance ( ) ) { // Find descendant in visualization hierarchy : for ( It < B > it2 = hier . iterDescendantsSelf ( it3 . get ( ) ) . filter ( type2 ) ; it2 . valid ( ) ; it2 . advance ( ) ) { handler . accept ( result , it2 . get ( ) ) ; } } } } // Search start in second hierarchy : if ( start instanceof VisualizationItem ) { for ( It < B > it2 = hier . iterDescendantsSelf ( start ) . filter ( type2 ) ; it2 . valid ( ) ; it2 . advance ( ) ) { final B vis = it2 . get ( ) ; // Find ancestor result in visualization hierarchy : for ( It < Result > it3 = hier . iterAncestorsSelf ( vis ) . filter ( Result . class ) ; it3 . valid ( ) ; it3 . advance ( ) ) { // Find ancestor in result hierarchy : for ( It < A > it1 = context . getHierarchy ( ) . iterAncestorsSelf ( it3 . get ( ) ) . filter ( type1 ) ; it1 . valid ( ) ; it1 . advance ( ) ) { handler . accept ( it1 . get ( ) , vis ) ; } } } }
public class SolverWorldConnection { /** * Sends a collection of updated Attribute values to the world model , or * buffers them to be sent later if the World Model is not connected . * @ param attributes * the Attribute values to send . * @ return { @ code true } if the solutions were able to be sent immediately , and * { @ code false } if one or more were unable to be sent or were * buffered for later transmission . * @ throws IllegalStateException * if this method is called once the world model connection has been * destroyed . */ public boolean updateAttributes ( Collection < Attribute > attributes ) throws IllegalStateException { } }
if ( this . terminated ) { throw new IllegalStateException ( "Cannot send solutions to the World Model once the connection has been destroyed." ) ; } if ( this . canSend ) { return this . wmi . updateAttributes ( attributes ) ; } for ( Attribute a : attributes ) { if ( ! this . attributeBuffer . offer ( a ) ) { return false ; } } return true ;
public class SFS { /** * Attempts to add one feature to the list of features while increasing or * maintaining the current accuracy * @ param available the set of available features from [ 0 , n ) to consider * for adding * @ param dataSet the original data set to perform feature selection from * @ param catToRemove the current set of categorical features to remove * @ param numToRemove the current set of numerical features to remove * @ param catSelecteed the current set of categorical features we are keeping * @ param numSelected the current set of numerical features we are keeping * @ param evaluater the classifier or regressor to perform evaluations with * @ param folds the number of cross validation folds to determine performance * @ param rand the source of randomness * @ param PbestScore an array to behave as a pointer to the best score seen * so far * @ param minFeatures the minimum number of features needed * @ return the feature that was selected to add , or - 1 if none were added . */ static protected int SFSSelectFeature ( Set < Integer > available , DataSet dataSet , Set < Integer > catToRemove , Set < Integer > numToRemove , Set < Integer > catSelecteed , Set < Integer > numSelected , Object evaluater , int folds , Random rand , double [ ] PbestScore , int minFeatures ) { } }
int nCat = dataSet . getNumCategoricalVars ( ) ; int curBest = - 1 ; double curBestScore = Double . POSITIVE_INFINITY ; for ( int feature : available ) { removeFeature ( feature , nCat , catToRemove , numToRemove ) ; DataSet workOn = dataSet . shallowClone ( ) ; RemoveAttributeTransform remove = new RemoveAttributeTransform ( workOn , catToRemove , numToRemove ) ; workOn . applyTransform ( remove ) ; double score = getScore ( workOn , evaluater , folds , rand ) ; if ( score < curBestScore ) { curBestScore = score ; curBest = feature ; } addFeature ( feature , nCat , catToRemove , numToRemove ) ; } if ( curBestScore <= 1e-14 && PbestScore [ 0 ] <= 1e-14 && catSelecteed . size ( ) + numSelected . size ( ) >= minFeatures ) return - 1 ; if ( curBestScore < PbestScore [ 0 ] || catSelecteed . size ( ) + numSelected . size ( ) < minFeatures || Math . abs ( PbestScore [ 0 ] - curBestScore ) < 1e-3 ) { PbestScore [ 0 ] = curBestScore ; addFeature ( curBest , nCat , catSelecteed , numSelected ) ; removeFeature ( curBest , nCat , catToRemove , numToRemove ) ; available . remove ( curBest ) ; return curBest ; } else return - 1 ; // No possible improvment & weve got enough
public class OkHttpClientFactory { /** * Creates an OkHttpClient optionally enabling TLS * @ param enableTLS Whether TLS should be enabled * @ return a valid OkHttpClient */ @ NonNull private static OkHttpClient getNewOkHttpClient ( boolean enableTLS ) { } }
OkHttpClient . Builder client = new OkHttpClient . Builder ( ) . connectTimeout ( DEFAULT_CONNECT_TIMEOUT , TimeUnit . SECONDS ) . readTimeout ( DEFAULT_READ_TIMEOUT , TimeUnit . SECONDS ) ; if ( enableTLS ) { client = enableTls12 ( client ) ; } return client . build ( ) ;
public class UnicodeCompressor { /** * Compress a Unicode character array into a byte array . * This function will only consume input that can be completely * output . * @ param charBuffer The character buffer to compress . * @ param charBufferStart The start of the character run to compress . * @ param charBufferLimit The limit of the character run to compress . * @ param charsRead A one - element array . If not null , on return * the number of characters read from charBuffer . * @ param byteBuffer A buffer to receive the compressed data . This * buffer must be at minimum four bytes in size . * @ param byteBufferStart The starting offset to which to write * compressed data . * @ param byteBufferLimit The limiting offset for writing compressed data . * @ return The number of bytes written to byteBuffer . */ public int compress ( char [ ] charBuffer , int charBufferStart , int charBufferLimit , int [ ] charsRead , byte [ ] byteBuffer , int byteBufferStart , int byteBufferLimit ) { } }
// the current position in the target byte buffer int bytePos = byteBufferStart ; // the current position in the source unicode character buffer int ucPos = charBufferStart ; // the current unicode character from the source buffer int curUC = INVALIDCHAR ; // the index for the current character int curIndex = - 1 ; // look ahead int nextUC = INVALIDCHAR ; int forwardUC = INVALIDCHAR ; // temporary for window searching int whichWindow = 0 ; // high and low bytes of the current unicode character int hiByte = 0 ; int loByte = 0 ; // byteBuffer must be at least 4 bytes in size if ( byteBuffer . length < 4 || ( byteBufferLimit - byteBufferStart ) < 4 ) throw new IllegalArgumentException ( "byteBuffer.length < 4" ) ; mainLoop : while ( ucPos < charBufferLimit && bytePos < byteBufferLimit ) { switch ( fMode ) { // main single byte mode compression loop case SINGLEBYTEMODE : singleByteModeLoop : while ( ucPos < charBufferLimit && bytePos < byteBufferLimit ) { // get current char curUC = charBuffer [ ucPos ++ ] ; // get next char if ( ucPos < charBufferLimit ) nextUC = charBuffer [ ucPos ] ; else nextUC = INVALIDCHAR ; // chars less than 0x0080 ( excluding tags ) go straight // in stream if ( curUC < 0x0080 ) { loByte = curUC & 0xFF ; // we need to check and make sure we don ' t // accidentally write a single byte mode tag to // the stream unless it ' s quoted if ( sSingleTagTable [ loByte ] ) { // make sure there is enough room to // write both bytes if not , rewind the // source stream and break out if ( ( bytePos + 1 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } // since we know the byte is less than 0x80 , SQUOTE0 // will use static window 0 , or ASCII byteBuffer [ bytePos ++ ] = ( byte ) SQUOTE0 ; } byteBuffer [ bytePos ++ ] = ( byte ) loByte ; } // if the char belongs to current window , convert it // to a byte by adding the generic compression offset // and subtracting the window ' s offset else if ( inDynamicWindow ( curUC , fCurrentWindow ) ) { byteBuffer [ bytePos ++ ] = ( byte ) ( curUC - fOffsets [ fCurrentWindow ] + COMPRESSIONOFFSET ) ; } // if char is not in compressible range , either switch to or // quote from unicode else if ( ! isCompressible ( curUC ) ) { // only check next character if it is valid if ( nextUC != INVALIDCHAR && isCompressible ( nextUC ) ) { // make sure there is enough room to // write all three bytes if not , // rewind the source stream and break // out if ( ( bytePos + 2 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } byteBuffer [ bytePos ++ ] = ( byte ) SQUOTEU ; byteBuffer [ bytePos ++ ] = ( byte ) ( curUC >>> 8 ) ; byteBuffer [ bytePos ++ ] = ( byte ) ( curUC & 0xFF ) ; } else { // make sure there is enough room to // write all four bytes if not , rewind // the source stream and break out if ( ( bytePos + 3 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } byteBuffer [ bytePos ++ ] = ( byte ) SCHANGEU ; hiByte = curUC >>> 8 ; loByte = curUC & 0xFF ; if ( sUnicodeTagTable [ hiByte ] ) // add quote Unicode tag byteBuffer [ bytePos ++ ] = ( byte ) UQUOTEU ; byteBuffer [ bytePos ++ ] = ( byte ) hiByte ; byteBuffer [ bytePos ++ ] = ( byte ) loByte ; fMode = UNICODEMODE ; break singleByteModeLoop ; } } // if the char is in a currently defined dynamic // window , figure out which one , and either switch to // it or quote from it else if ( ( whichWindow = findDynamicWindow ( curUC ) ) != INVALIDWINDOW ) { // look ahead if ( ( ucPos + 1 ) < charBufferLimit ) forwardUC = charBuffer [ ucPos + 1 ] ; else forwardUC = INVALIDCHAR ; // all three chars in same window , switch to that // window inDynamicWindow will return false for // INVALIDCHAR if ( inDynamicWindow ( nextUC , whichWindow ) && inDynamicWindow ( forwardUC , whichWindow ) ) { // make sure there is enough room to // write both bytes if not , rewind the // source stream and break out if ( ( bytePos + 1 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } byteBuffer [ bytePos ++ ] = ( byte ) ( SCHANGE0 + whichWindow ) ; byteBuffer [ bytePos ++ ] = ( byte ) ( curUC - fOffsets [ whichWindow ] + COMPRESSIONOFFSET ) ; fTimeStamps [ whichWindow ] = ++ fTimeStamp ; fCurrentWindow = whichWindow ; } // either only next char or neither in same // window , so quote else { // make sure there is enough room to // write both bytes if not , rewind the // source stream and break out if ( ( bytePos + 1 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } byteBuffer [ bytePos ++ ] = ( byte ) ( SQUOTE0 + whichWindow ) ; byteBuffer [ bytePos ++ ] = ( byte ) ( curUC - fOffsets [ whichWindow ] + COMPRESSIONOFFSET ) ; } } // if a static window is defined , and the following // character is not in that static window , quote from // the static window Note : to quote from a static // window , don ' t add 0x80 else if ( ( whichWindow = findStaticWindow ( curUC ) ) != INVALIDWINDOW && ! inStaticWindow ( nextUC , whichWindow ) ) { // make sure there is enough room to write both // bytes if not , rewind the source stream and // break out if ( ( bytePos + 1 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } byteBuffer [ bytePos ++ ] = ( byte ) ( SQUOTE0 + whichWindow ) ; byteBuffer [ bytePos ++ ] = ( byte ) ( curUC - sOffsets [ whichWindow ] ) ; } // if a window is not defined , decide if we want to // define a new one or switch to unicode mode else { // determine index for current char ( char is compressible ) curIndex = makeIndex ( curUC ) ; fIndexCount [ curIndex ] ++ ; // look ahead if ( ( ucPos + 1 ) < charBufferLimit ) forwardUC = charBuffer [ ucPos + 1 ] ; else forwardUC = INVALIDCHAR ; // if we have encountered this index at least once // before , define a new window // OR // three chars in a row with same index , define a // new window ( makeIndex will return RESERVEDINDEX // for INVALIDCHAR ) if ( ( fIndexCount [ curIndex ] > 1 ) || ( curIndex == makeIndex ( nextUC ) && curIndex == makeIndex ( forwardUC ) ) ) { // make sure there is enough room to write all // three bytes if not , rewind the source // stream and break out if ( ( bytePos + 2 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } // get least recently defined window whichWindow = getLRDefinedWindow ( ) ; byteBuffer [ bytePos ++ ] = ( byte ) ( SDEFINE0 + whichWindow ) ; byteBuffer [ bytePos ++ ] = ( byte ) curIndex ; byteBuffer [ bytePos ++ ] = ( byte ) ( curUC - sOffsetTable [ curIndex ] + COMPRESSIONOFFSET ) ; fOffsets [ whichWindow ] = sOffsetTable [ curIndex ] ; fCurrentWindow = whichWindow ; fTimeStamps [ whichWindow ] = ++ fTimeStamp ; } // only two chars in a row with same index , so // switch to unicode mode ( makeIndex will return // RESERVEDINDEX for INVALIDCHAR ) // OR // three chars have different indices , so switch // to unicode mode else { // make sure there is enough room to write all // four bytes if not , rewind the source stream // and break out if ( ( bytePos + 3 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } byteBuffer [ bytePos ++ ] = ( byte ) SCHANGEU ; hiByte = curUC >>> 8 ; loByte = curUC & 0xFF ; if ( sUnicodeTagTable [ hiByte ] ) // add quote Unicode tag byteBuffer [ bytePos ++ ] = ( byte ) UQUOTEU ; byteBuffer [ bytePos ++ ] = ( byte ) hiByte ; byteBuffer [ bytePos ++ ] = ( byte ) loByte ; fMode = UNICODEMODE ; break singleByteModeLoop ; } } } break ; case UNICODEMODE : // main unicode mode compression loop unicodeModeLoop : while ( ucPos < charBufferLimit && bytePos < byteBufferLimit ) { // get current char curUC = charBuffer [ ucPos ++ ] ; // get next char if ( ucPos < charBufferLimit ) nextUC = charBuffer [ ucPos ] ; else nextUC = INVALIDCHAR ; // if we have two uncompressible chars in a row , // put the current char ' s bytes in the stream if ( ! isCompressible ( curUC ) || ( nextUC != INVALIDCHAR && ! isCompressible ( nextUC ) ) ) { // make sure there is enough room to write all three bytes // if not , rewind the source stream and break out if ( ( bytePos + 2 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } hiByte = curUC >>> 8 ; loByte = curUC & 0xFF ; if ( sUnicodeTagTable [ hiByte ] ) // add quote Unicode tag byteBuffer [ bytePos ++ ] = ( byte ) UQUOTEU ; byteBuffer [ bytePos ++ ] = ( byte ) hiByte ; byteBuffer [ bytePos ++ ] = ( byte ) loByte ; } // bytes less than 0x80 can go straight in the stream , // but in single - byte mode else if ( curUC < 0x0080 ) { loByte = curUC & 0xFF ; // if two chars in a row below 0x80 and the // current char is not a single - byte mode tag , // switch to single - byte mode if ( nextUC != INVALIDCHAR && nextUC < 0x0080 && ! sSingleTagTable [ loByte ] ) { // make sure there is enough room to // write both bytes if not , rewind the // source stream and break out if ( ( bytePos + 1 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } // use the last - active window whichWindow = fCurrentWindow ; byteBuffer [ bytePos ++ ] = ( byte ) ( UCHANGE0 + whichWindow ) ; byteBuffer [ bytePos ++ ] = ( byte ) loByte ; // fCurrentWindow = 0; fTimeStamps [ whichWindow ] = ++ fTimeStamp ; fMode = SINGLEBYTEMODE ; break unicodeModeLoop ; } // otherwise , just write the bytes to the stream // ( this will cover the case of only 1 char less than 0x80 // and single - byte mode tags ) else { // make sure there is enough room to // write both bytes if not , rewind the // source stream and break out if ( ( bytePos + 1 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } // since the character is less than 0x80 , the // high byte is always 0x00 - no need for // ( curUC > > > 8) byteBuffer [ bytePos ++ ] = ( byte ) 0x00 ; byteBuffer [ bytePos ++ ] = ( byte ) loByte ; } } // figure out if the current char is in a defined window else if ( ( whichWindow = findDynamicWindow ( curUC ) ) != INVALIDWINDOW ) { // if two chars in a row in the same window , // switch to that window and go to single - byte mode // inDynamicWindow will return false for INVALIDCHAR if ( inDynamicWindow ( nextUC , whichWindow ) ) { // make sure there is enough room to // write both bytes if not , rewind the // source stream and break out if ( ( bytePos + 1 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } byteBuffer [ bytePos ++ ] = ( byte ) ( UCHANGE0 + whichWindow ) ; byteBuffer [ bytePos ++ ] = ( byte ) ( curUC - fOffsets [ whichWindow ] + COMPRESSIONOFFSET ) ; fTimeStamps [ whichWindow ] = ++ fTimeStamp ; fCurrentWindow = whichWindow ; fMode = SINGLEBYTEMODE ; break unicodeModeLoop ; } // otherwise , just quote the unicode for the char else { // make sure there is enough room to // write all three bytes if not , // rewind the source stream and break // out if ( ( bytePos + 2 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } hiByte = curUC >>> 8 ; loByte = curUC & 0xFF ; if ( sUnicodeTagTable [ hiByte ] ) // add quote Unicode tag byteBuffer [ bytePos ++ ] = ( byte ) UQUOTEU ; byteBuffer [ bytePos ++ ] = ( byte ) hiByte ; byteBuffer [ bytePos ++ ] = ( byte ) loByte ; } } // char is not in a defined window else { // determine index for current char ( char is compressible ) curIndex = makeIndex ( curUC ) ; fIndexCount [ curIndex ] ++ ; // look ahead if ( ( ucPos + 1 ) < charBufferLimit ) forwardUC = charBuffer [ ucPos + 1 ] ; else forwardUC = INVALIDCHAR ; // if we have encountered this index at least once // before , define a new window for it that hasn ' t // previously been redefined // OR // if three chars in a row with the same index , // define a new window ( makeIndex will return // RESERVEDINDEX for INVALIDCHAR ) if ( ( fIndexCount [ curIndex ] > 1 ) || ( curIndex == makeIndex ( nextUC ) && curIndex == makeIndex ( forwardUC ) ) ) { // make sure there is enough room to // write all three bytes if not , // rewind the source stream and break // out if ( ( bytePos + 2 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } // get least recently defined window whichWindow = getLRDefinedWindow ( ) ; byteBuffer [ bytePos ++ ] = ( byte ) ( UDEFINE0 + whichWindow ) ; byteBuffer [ bytePos ++ ] = ( byte ) curIndex ; byteBuffer [ bytePos ++ ] = ( byte ) ( curUC - sOffsetTable [ curIndex ] + COMPRESSIONOFFSET ) ; fOffsets [ whichWindow ] = sOffsetTable [ curIndex ] ; fCurrentWindow = whichWindow ; fTimeStamps [ whichWindow ] = ++ fTimeStamp ; fMode = SINGLEBYTEMODE ; break unicodeModeLoop ; } // otherwise just quote the unicode , and save our // windows for longer runs else { // make sure there is enough room to // write all three bytes if not , // rewind the source stream and break // out if ( ( bytePos + 2 ) >= byteBufferLimit ) { -- ucPos ; break mainLoop ; } hiByte = curUC >>> 8 ; loByte = curUC & 0xFF ; if ( sUnicodeTagTable [ hiByte ] ) // add quote Unicode tag byteBuffer [ bytePos ++ ] = ( byte ) UQUOTEU ; byteBuffer [ bytePos ++ ] = ( byte ) hiByte ; byteBuffer [ bytePos ++ ] = ( byte ) loByte ; } } } } // end switch } // fill in output parameter if ( charsRead != null ) charsRead [ 0 ] = ( ucPos - charBufferStart ) ; // return # of bytes written return ( bytePos - byteBufferStart ) ;
public class FieldDataScratchHandler { /** * Constructor . * @ param field The basefield owner of this listener ( usually null and set on setOwner ( ) ) . */ public void init ( BaseField field , boolean bChangeDataOnRefresh ) { } }
super . init ( field ) ; m_objOriginalData = null ; m_bAlwaysEnabled = false ; m_bChangeDataOnRefresh = bChangeDataOnRefresh ; this . setRespondsToMode ( DBConstants . SCREEN_MOVE , false ) ;
public class VoltCompiler { /** * Compile empty catalog jar * @ param jarOutputPath output jar path * @ return true if successful */ public boolean compileEmptyCatalog ( final String jarOutputPath ) { } }
// Use a special DDL reader to provide the contents . List < VoltCompilerReader > ddlReaderList = new ArrayList < > ( 1 ) ; ddlReaderList . add ( new VoltCompilerStringReader ( "ddl.sql" , m_emptyDDLComment ) ) ; // Seed it with the DDL so that a version upgrade hack in compileInternalToFile ( ) // doesn ' t try to get the DDL file from the path . InMemoryJarfile jarFile = new InMemoryJarfile ( ) ; try { ddlReaderList . get ( 0 ) . putInJar ( jarFile , "ddl.sql" ) ; } catch ( IOException e ) { compilerLog . error ( "Failed to add DDL file to empty in-memory jar." ) ; return false ; } return compileInternalToFile ( jarOutputPath , null , null , ddlReaderList , jarFile ) ;
public class Character { /** * Returns the index within the given char sequence that is offset * from the given { @ code index } by { @ code codePointOffset } * code points . Unpaired surrogates within the text range given by * { @ code index } and { @ code codePointOffset } count as * one code point each . * @ param seq the char sequence * @ param index the index to be offset * @ param codePointOffset the offset in code points * @ return the index within the char sequence * @ exception NullPointerException if { @ code seq } is null . * @ exception IndexOutOfBoundsException if { @ code index } * is negative or larger then the length of the char sequence , * or if { @ code codePointOffset } is positive and the * subsequence starting with { @ code index } has fewer than * { @ code codePointOffset } code points , or if * { @ code codePointOffset } is negative and the subsequence * before { @ code index } has fewer than the absolute value * of { @ code codePointOffset } code points . * @ since 1.5 */ public static int offsetByCodePoints ( CharSequence seq , int index , int codePointOffset ) { } }
int length = seq . length ( ) ; if ( index < 0 || index > length ) { throw new IndexOutOfBoundsException ( ) ; } int x = index ; if ( codePointOffset >= 0 ) { int i ; for ( i = 0 ; x < length && i < codePointOffset ; i ++ ) { if ( isHighSurrogate ( seq . charAt ( x ++ ) ) && x < length && isLowSurrogate ( seq . charAt ( x ) ) ) { x ++ ; } } if ( i < codePointOffset ) { throw new IndexOutOfBoundsException ( ) ; } } else { int i ; for ( i = codePointOffset ; x > 0 && i < 0 ; i ++ ) { if ( isLowSurrogate ( seq . charAt ( -- x ) ) && x > 0 && isHighSurrogate ( seq . charAt ( x - 1 ) ) ) { x -- ; } } if ( i < 0 ) { throw new IndexOutOfBoundsException ( ) ; } } return x ;
public class PdfStamperImp { /** * Sets the open and close page additional action . * @ param actionType the action type . It can be < CODE > PdfWriter . PAGE _ OPEN < / CODE > * or < CODE > PdfWriter . PAGE _ CLOSE < / CODE > * @ param action the action to perform * @ param page the page where the action will be applied . The first page is 1 * @ throws PdfException if the action type is invalid */ void setPageAction ( PdfName actionType , PdfAction action , int page ) throws PdfException { } }
if ( ! actionType . equals ( PAGE_OPEN ) && ! actionType . equals ( PAGE_CLOSE ) ) throw new PdfException ( "Invalid page additional action type: " + actionType . toString ( ) ) ; PdfDictionary pg = reader . getPageN ( page ) ; PdfDictionary aa = ( PdfDictionary ) PdfReader . getPdfObject ( pg . get ( PdfName . AA ) , pg ) ; if ( aa == null ) { aa = new PdfDictionary ( ) ; pg . put ( PdfName . AA , aa ) ; markUsed ( pg ) ; } aa . put ( actionType , action ) ; markUsed ( aa ) ;
public class GraphRunJobsDependencies { /** * warning : this will fail if the user has an escape path separator in a path */ private static Optional < File > findExecutableOnSystemPath ( final String executableName ) { } }
for ( final String pathPath : Splitter . on ( File . pathSeparator ) . split ( System . getenv ( "PATH" ) ) ) { final File probeFile = new File ( pathPath , executableName ) ; if ( probeFile . isFile ( ) && java . nio . file . Files . isExecutable ( probeFile . toPath ( ) ) ) { return Optional . of ( probeFile ) ; } } return Optional . absent ( ) ;
public class AbstractCandidateFactory { /** * Randomly , create an initial population of candidates . If some * control is required over the composition of the initial population , * consider the overloaded { @ link # generateInitialPopulation ( int , Collection , Random ) } * method . * @ param populationSize The number of candidates to randomly create . * @ param rng The random number generator to use when creating the random * candidates . * @ return A randomly generated initial population of candidate solutions . */ public List < T > generateInitialPopulation ( int populationSize , Random rng ) { } }
List < T > population = new ArrayList < T > ( populationSize ) ; for ( int i = 0 ; i < populationSize ; i ++ ) { population . add ( generateRandomCandidate ( rng ) ) ; } return Collections . unmodifiableList ( population ) ;
public class CommerceTierPriceEntryPersistenceImpl { /** * Returns a range of all the commerce tier price entries where companyId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceTierPriceEntryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param companyId the company ID * @ param start the lower bound of the range of commerce tier price entries * @ param end the upper bound of the range of commerce tier price entries ( not inclusive ) * @ return the range of matching commerce tier price entries */ @ Override public List < CommerceTierPriceEntry > findByCompanyId ( long companyId , int start , int end ) { } }
return findByCompanyId ( companyId , start , end , null ) ;
public class Burst { /** * Explode a list of argument values for invoking the specified method with all combinations of * its parameters . */ public static Enum < ? > [ ] [ ] explodeArguments ( Method method ) { } }
checkNotNull ( method , "method" ) ; return explodeParameters ( method . getParameterTypes ( ) , method . getDeclaringClass ( ) . getName ( ) + '.' + method . getName ( ) + " method" ) ;
public class EclipseIndexWriter { /** * Logic for adding various end index entry elements for Eclipse help . * @ param term The indexterm to be processed . * @ param printWriter The Writer used for writing content to disk . * @ param indexsee Boolean value for using the new markup for see references . */ private void outputIndexTermEndElement ( final IndexTerm term , final XMLStreamWriter serializer , final boolean indexsee ) throws XMLStreamException { } }
if ( indexsee ) { if ( term . getTermPrefix ( ) != null ) { serializer . writeEndElement ( ) ; // see inIndexsee = false ; } else if ( inIndexsee ) { // NOOP } else { serializer . writeEndElement ( ) ; // entry } } else { serializer . writeEndElement ( ) ; // entry }
public class JCloudsReader { /** * { @ inheritDoc } */ @ Override public UberBucket readUber ( ) throws TTIOException { } }
try { final Blob blobRetrieved = mBlobStore . getBlob ( mResourceName , Long . toString ( - 1l ) ) ; final DataInputStream datain = new DataInputStream ( blobRetrieved . getPayload ( ) . getInput ( ) ) ; final long uberkey = datain . readLong ( ) ; final UberBucket bucket = ( UberBucket ) read ( uberkey ) ; datain . close ( ) ; return bucket ; } catch ( final IOException exc ) { throw new TTIOException ( exc ) ; }
public class FnNumber { /** * Determines whether the target object is null or not . * @ return true if the target object is null , false if not . */ public static final Function < Number , Boolean > isNull ( ) { } }
return ( Function < Number , Boolean > ) ( ( Function ) FnObject . isNull ( ) ) ;
public class JtsLayer { /** * Validate the JtsLayer . * @ param name mvt layer name * @ param geometries geometries in the tile * @ throws IllegalArgumentException when { @ code name } or { @ code geometries } are null */ private static void validate ( String name , Collection < Geometry > geometries , int extent ) { } }
if ( name == null ) { throw new IllegalArgumentException ( "layer name is null" ) ; } if ( geometries == null ) { throw new IllegalArgumentException ( "geometry collection is null" ) ; } if ( extent <= 0 ) { throw new IllegalArgumentException ( "extent is less than or equal to 0" ) ; }
public class FastProtocolRegister { /** * Registers a FastProtocol ' s method for a given id ( name ) . The names should * be unique , it is not possible to re - use a name for a different method . */ public static void register ( FastProtocolId id , Method m ) { } }
synchronized ( FastProtocolRegister . class ) { String name = id . toString ( ) ; if ( name . length ( ) != NAME_LEN ) { // we only allow two - byte serialization codes throw new RuntimeException ( "Code must be " + NAME_LEN + " bytes long" ) ; } if ( m == null ) { throw new RuntimeException ( "Method cannot be null" ) ; } Method registeredMethod = idToMethod . get ( name ) ; if ( registeredMethod != null && registeredMethod != m ) { throw new RuntimeException ( "Trying to register different method with name: " + name ) ; } else if ( registeredMethod == m ) { return ; // quietly } LOG . info ( "FastProtocol - Registering method: " + name + " method: " + m . getName ( ) ) ; idToMethod . put ( name , m ) ; methodToId . put ( m , name ) ; }
public class KeywordEstimate { /** * Gets the min value for this KeywordEstimate . * @ return min * The lower bound on the estimated stats . * < p > This is not a guarantee that actual performance * will never be lower than * these stats . */ public com . google . api . ads . adwords . axis . v201809 . o . StatsEstimate getMin ( ) { } }
return min ;
public class ThreadSettingsApi { /** * This method is used to add any payment settings needed . * @ param paymentSettings * the payment settings object . * @ see < a * href = " https : / / developers . facebook . com / docs / messenger - platform / thread - settings / payment " * > Payments settings < / a > */ public static void addPaymentSettings ( PaymentSettings paymentSettings ) { } }
if ( paymentSettings == null ) { logger . error ( "FbBotMill validation error: Payment Settings can't be null or empty!" ) ; return ; } FbBotMillNetworkController . postThreadSetting ( paymentSettings ) ;
public class FpKit { /** * Concatenates two lists into one * @ param l1 the first list to concatenate * @ param l2 the second list to concatenate * @ param < T > the type of element of the lists * @ return a < strong > new < / strong > list composed of the two concatenated lists elements */ public static < T > List < T > concat ( List < T > l1 , List < T > l2 ) { } }
ArrayList < T > l = new ArrayList < > ( l1 ) ; l . addAll ( l2 ) ; l . trimToSize ( ) ; return l ;
public class Utils { /** * / * from https : / / github . com / apache / httpcomponents - client / commit / b58e7d46d75e1d3c42f5fd6db9bd45f32a49c639 # diff - a74b24f025e68ec11e4550b42e9f807d */ static String encodePath ( String content , Charset charset ) { } }
final StringBuilder buf = new StringBuilder ( ) ; final ByteBuffer bb = charset . encode ( content ) ; while ( bb . hasRemaining ( ) ) { final int b = bb . get ( ) & 0xff ; if ( PATHSAFE . get ( b ) ) { buf . append ( ( char ) b ) ; } else { buf . append ( "%" ) ; final char hex1 = Character . toUpperCase ( Character . forDigit ( ( b >> 4 ) & 0xF , RADIX ) ) ; final char hex2 = Character . toUpperCase ( Character . forDigit ( b & 0xF , RADIX ) ) ; buf . append ( hex1 ) ; buf . append ( hex2 ) ; } } return buf . toString ( ) ;
public class MMORGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . MMORG__OV_LID : return getOVLid ( ) ; case AfplibPackage . MMORG__FLAGS : return getFlags ( ) ; case AfplibPackage . MMORG__OV_LNAME : return getOVLname ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class PutEventsRequest { /** * The entry that defines an event in your system . You can specify several parameters for the entry such as the * source and type of the event , resources associated with the event , and so on . * @ param entries * The entry that defines an event in your system . You can specify several parameters for the entry such as * the source and type of the event , resources associated with the event , and so on . */ public void setEntries ( java . util . Collection < PutEventsRequestEntry > entries ) { } }
if ( entries == null ) { this . entries = null ; return ; } this . entries = new java . util . ArrayList < PutEventsRequestEntry > ( entries ) ;
public class ListAssociationVersionsResult { /** * Information about all versions of the association for the specified association ID . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setAssociationVersions ( java . util . Collection ) } or { @ link # withAssociationVersions ( java . util . Collection ) } * if you want to override the existing values . * @ param associationVersions * Information about all versions of the association for the specified association ID . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListAssociationVersionsResult withAssociationVersions ( AssociationVersionInfo ... associationVersions ) { } }
if ( this . associationVersions == null ) { setAssociationVersions ( new com . amazonaws . internal . SdkInternalList < AssociationVersionInfo > ( associationVersions . length ) ) ; } for ( AssociationVersionInfo ele : associationVersions ) { this . associationVersions . add ( ele ) ; } return this ;
public class DocumentElement { /** * setter for medianFontsize - sets * @ generated * @ param v value to set into the feature */ public void setMedianFontsize ( double v ) { } }
if ( DocumentElement_Type . featOkTst && ( ( DocumentElement_Type ) jcasType ) . casFeat_medianFontsize == null ) jcasType . jcas . throwFeatMissing ( "medianFontsize" , "ch.epfl.bbp.uima.types.DocumentElement" ) ; jcasType . ll_cas . ll_setDoubleValue ( addr , ( ( DocumentElement_Type ) jcasType ) . casFeatCode_medianFontsize , v ) ;
public class IndyMath { /** * Widens the operators . For math operations like a + b we generally * execute them using a conversion to certain types . If a for example * is an int and b a byte , we do the operation using integer math . This * method gives a simplified MethodType that contains the two operators * with this widening according to Groovy rules applied . That means both * parameters in the MethodType will have the same type . */ private static MethodType widenOperators ( MethodType mt ) { } }
if ( mt . parameterCount ( ) == 2 ) { Class leftType = mt . parameterType ( 0 ) ; Class rightType = mt . parameterType ( 1 ) ; if ( isIntCategory ( leftType ) && isIntCategory ( rightType ) ) return IIV ; if ( isLongCategory ( leftType ) && isLongCategory ( rightType ) ) return LLV ; if ( isBigDecCategory ( leftType ) && isBigDecCategory ( rightType ) ) return GGV ; if ( isDoubleCategory ( leftType ) && isDoubleCategory ( rightType ) ) return DDV ; return OOV ; } else if ( mt . parameterCount ( ) == 1 ) { Class leftType = mt . parameterType ( 0 ) ; if ( isIntCategory ( leftType ) ) return IV ; if ( isLongCategory ( leftType ) ) return LV ; if ( isBigDecCategory ( leftType ) ) return GV ; if ( isDoubleCategory ( leftType ) ) return DV ; } return mt ;
public class NettyUtils { /** * Read a socket address from a buffer . The socket address will be provided * as two strings containing host and port . * @ param buffer * The buffer containing the host and port as string . * @ return The InetSocketAddress object created from host and port or null * in case the strings are not there . */ public static InetSocketAddress readSocketAddress ( ChannelBuffer buffer ) { } }
String remoteHost = NettyUtils . readString ( buffer ) ; int remotePort = 0 ; if ( buffer . readableBytes ( ) >= 4 ) { remotePort = buffer . readInt ( ) ; } else { return null ; } InetSocketAddress remoteAddress = null ; if ( null != remoteHost ) { remoteAddress = new InetSocketAddress ( remoteHost , remotePort ) ; } return remoteAddress ;
public class AdaptiveTableManager { /** * Return column number which bounds contains X * @ param x coordinate * @ return column number */ int getColumnByXWithShift ( int x , int shiftEveryStep ) { } }
checkForInit ( ) ; int sum = 0 ; // header offset int tempX = x - mHeaderRowWidth ; if ( tempX <= sum ) { return 0 ; } for ( int count = mColumnWidths . length , i = 0 ; i < count ; i ++ ) { int nextSum = sum + mColumnWidths [ i ] + shiftEveryStep ; if ( tempX > sum && tempX < nextSum ) { return i ; } else if ( tempX < nextSum ) { return i - 1 ; } sum = nextSum ; } return mColumnWidths . length - 1 ;
public class DefaultApplicationObjectConfigurer { /** * Attempts to load the message corresponding to the given message code * using this instance ' s { @ link MessageSource } and locale . * @ param messageCode The message code that will be used to retrieve the * message . Must not be null . * @ return The message for the given code , or null if the message code could * not be found . * @ throws IllegalArgumentException if { @ code messageCode } is null . */ private String loadMessage ( String messageCode ) { } }
Assert . notNull ( messageCode , "messageCode" ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Resolving label with code '" + messageCode + "'" ) ; } try { return getMessageSource ( ) . getMessage ( messageCode , null , getLocale ( ) ) ; } catch ( NoSuchMessageException e ) { if ( logger . isInfoEnabled ( ) ) { logger . info ( "The message source is unable to find message code [" + messageCode + "]. Ignoring and returning null." ) ; } return null ; }
public class Fn { /** * Synchronized { @ code Predicate } * @ param mutex to synchronized on * @ param a * @ param b * @ param triPredicate * @ return */ @ Beta public static < A , B , T > Predicate < T > sp ( final Object mutex , final A a , final B b , final TriPredicate < A , B , T > triPredicate ) { } }
N . checkArgNotNull ( mutex , "mutex" ) ; N . checkArgNotNull ( triPredicate , "triPredicate" ) ; return new Predicate < T > ( ) { @ Override public boolean test ( T t ) { synchronized ( mutex ) { return triPredicate . test ( a , b , t ) ; } } } ;
public class PipelineOutputConfig { /** * Optional . The < code > Permissions < / code > object specifies which users and / or predefined Amazon S3 groups you want * to have access to transcoded files and playlists , and the type of access you want them to have . You can grant * permissions to a maximum of 30 users and / or predefined Amazon S3 groups . * If you include < code > Permissions < / code > , Elastic Transcoder grants only the permissions that you specify . It does * not grant full permissions to the owner of the role specified by < code > Role < / code > . If you want that user to have * full control , you must explicitly grant full control to the user . * If you omit < code > Permissions < / code > , Elastic Transcoder grants full control over the transcoded files and * playlists to the owner of the role specified by < code > Role < / code > , and grants no other permissions to any other * user or group . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setPermissions ( java . util . Collection ) } or { @ link # withPermissions ( java . util . Collection ) } if you want to * override the existing values . * @ param permissions * Optional . The < code > Permissions < / code > object specifies which users and / or predefined Amazon S3 groups you * want to have access to transcoded files and playlists , and the type of access you want them to have . You * can grant permissions to a maximum of 30 users and / or predefined Amazon S3 groups . < / p > * If you include < code > Permissions < / code > , Elastic Transcoder grants only the permissions that you specify . * It does not grant full permissions to the owner of the role specified by < code > Role < / code > . If you want * that user to have full control , you must explicitly grant full control to the user . * If you omit < code > Permissions < / code > , Elastic Transcoder grants full control over the transcoded files and * playlists to the owner of the role specified by < code > Role < / code > , and grants no other permissions to any * other user or group . * @ return Returns a reference to this object so that method calls can be chained together . */ public PipelineOutputConfig withPermissions ( Permission ... permissions ) { } }
if ( this . permissions == null ) { setPermissions ( new com . amazonaws . internal . SdkInternalList < Permission > ( permissions . length ) ) ; } for ( Permission ele : permissions ) { this . permissions . add ( ele ) ; } return this ;
public class ChronoStorage { /** * Returns the revision of the specified chrono storage block . * @ param revisionIndex * chronological order index * @ return */ public Revision get ( final int revisionIndex ) { } }
if ( this . storage . containsKey ( revisionIndex ) ) { ChronoStorageBlock block = this . storage . get ( revisionIndex ) ; return block . getRev ( ) ; } return null ;
public class CdnManager { /** * 预取文件链接 , 每次最多不可以超过100条 * 参考文档 : < a href = " http : / / developer . qiniu . com / fusion / api / file - prefetching " > 文件预取 < / a > * @ param urls 待预取的文件外链列表 * @ return 预取请求的回复 */ public CdnResult . PrefetchResult prefetchUrls ( String [ ] urls ) throws QiniuException { } }
if ( urls != null && urls . length > MAX_API_PREFETCH_URL_COUNT ) { throw new QiniuException ( new Exception ( "url count exceeds the max prefetch limit per request" ) ) ; } HashMap < String , String [ ] > req = new HashMap < > ( ) ; req . put ( "urls" , urls ) ; byte [ ] body = Json . encode ( req ) . getBytes ( Constants . UTF_8 ) ; String url = server + "/v2/tune/prefetch" ; StringMap headers = auth . authorizationV2 ( url , "POST" , body , Client . JsonMime ) ; Response response = client . post ( url , body , headers , Client . JsonMime ) ; return response . jsonToObject ( CdnResult . PrefetchResult . class ) ;
public class Control { /** * Closes the old session and creates and opens an untitled database . * @ throws Exception if an error occurred while creating or opening the database . */ private void closeSessionAndCreateAndOpenUntitledDb ( ) throws Exception { } }
getExtensionLoader ( ) . sessionAboutToChangeAllPlugin ( null ) ; model . closeSession ( ) ; log . info ( "Create and Open Untitled Db" ) ; model . createAndOpenUntitledDb ( ) ;
public class UploadDemo { /** * 上传策略中设置persistentOps字段和persistentPipeline字段 */ public String getUpToken ( ) { } }
return auth . uploadToken ( bucketname , null , 3600 , new StringMap ( ) . putNotEmpty ( "persistentOps" , pfops ) . putNotEmpty ( "persistentPipeline" , pipeline ) , true ) ;
public class FastDtoa { /** * computed . */ static boolean grisu3 ( double v , FastDtoaBuilder buffer ) { } }
long bits = Double . doubleToLongBits ( v ) ; DiyFp w = DoubleHelper . asNormalizedDiyFp ( bits ) ; // boundary _ minus and boundary _ plus are the boundaries between v and its // closest floating - point neighbors . Any number strictly between // boundary _ minus and boundary _ plus will round to v when convert to a double . // Grisu3 will never output representations that lie exactly on a boundary . DiyFp boundary_minus = new DiyFp ( ) , boundary_plus = new DiyFp ( ) ; DoubleHelper . normalizedBoundaries ( bits , boundary_minus , boundary_plus ) ; assert ( boundary_plus . e ( ) == w . e ( ) ) ; DiyFp ten_mk = new DiyFp ( ) ; // Cached power of ten : 10 ^ - k int mk = CachedPowers . getCachedPower ( w . e ( ) + DiyFp . kSignificandSize , minimal_target_exponent , maximal_target_exponent , ten_mk ) ; assert ( minimal_target_exponent <= w . e ( ) + ten_mk . e ( ) + DiyFp . kSignificandSize && maximal_target_exponent >= w . e ( ) + ten_mk . e ( ) + DiyFp . kSignificandSize ) ; // Note that ten _ mk is only an approximation of 10 ^ - k . A DiyFp only contains a // 64 bit significand and ten _ mk is thus only precise up to 64 bits . // The DiyFp : : Times procedure rounds its result , and ten _ mk is approximated // too . The variable scaled _ w ( as well as scaled _ boundary _ minus / plus ) are now // off by a small amount . // In fact : scaled _ w - w * 10 ^ k < 1ulp ( unit in the last place ) of scaled _ w . // In other words : let f = scaled _ w . f ( ) and e = scaled _ w . e ( ) , then // ( f - 1 ) * 2 ^ e < w * 10 ^ k < ( f + 1 ) * 2 ^ e DiyFp scaled_w = DiyFp . times ( w , ten_mk ) ; assert ( scaled_w . e ( ) == boundary_plus . e ( ) + ten_mk . e ( ) + DiyFp . kSignificandSize ) ; // In theory it would be possible to avoid some recomputations by computing // the difference between w and boundary _ minus / plus ( a power of 2 ) and to // compute scaled _ boundary _ minus / plus by subtracting / adding from // scaled _ w . However the code becomes much less readable and the speed // enhancements are not terriffic . DiyFp scaled_boundary_minus = DiyFp . times ( boundary_minus , ten_mk ) ; DiyFp scaled_boundary_plus = DiyFp . times ( boundary_plus , ten_mk ) ; // DigitGen will generate the digits of scaled _ w . Therefore we have // v = = ( double ) ( scaled _ w * 10 ^ - mk ) . // Set decimal _ exponent = = - mk and pass it to DigitGen . If scaled _ w is not an // integer than it will be updated . For instance if scaled _ w = = 1.23 then // the buffer will be filled with " 123 " und the decimal _ exponent will be // decreased by 2. return digitGen ( scaled_boundary_minus , scaled_w , scaled_boundary_plus , buffer , mk ) ;
public class Solo { /** * Sets the progress of the specified ProgressBar . Examples of ProgressBars are : { @ link android . widget . SeekBar } and { @ link android . widget . RatingBar } . * @ param progressBar the { @ link ProgressBar } * @ param progress the progress to set the { @ link ProgressBar } */ public void setProgressBar ( ProgressBar progressBar , int progress ) { } }
if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "setProgressBar(" + progressBar + ", " + progress + ")" ) ; } progressBar = ( ProgressBar ) waiter . waitForView ( progressBar , Timeout . getSmallTimeout ( ) ) ; setter . setProgressBar ( progressBar , progress ) ;
public class ControlToControllerER { /** * Navigates to the related ERs of the controllers of the given Control . * @ param match current pattern match * @ param ind mapped indices * @ return related ERs */ @ Override public Collection < BioPAXElement > generate ( Match match , int ... ind ) { } }
Control ctrl = ( Control ) match . get ( ind [ 0 ] ) ; return new HashSet < BioPAXElement > ( getRelatedERs ( ctrl ) ) ;
public class RemoteMongoCollectionImpl { /** * Inserts the provided document . If the document is missing an identifier , the client should * generate one . * @ param document the document to insert * @ return a task containing the result of the insert one operation */ public Task < RemoteInsertOneResult > insertOne ( final DocumentT document ) { } }
return dispatcher . dispatchTask ( new Callable < RemoteInsertOneResult > ( ) { @ Override public RemoteInsertOneResult call ( ) { return proxy . insertOne ( document ) ; } } ) ;
public class DataBlockWriteOperation { /** * / * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . storage . data . impl . journal . AbstractJournalOperation # writeTo ( net . timewalker . ffmq4 . storage . data . impl . journal . JournalFile ) */ @ Override protected void writeTo ( JournalFile journalFile ) throws JournalException { } }
super . writeTo ( journalFile ) ; journalFile . writeInt ( blockData . length ) ; journalFile . write ( blockData ) ;
public class CallbackOption { /** * Constructs a { @ link ICallback . CallbackOption } with predicate { @ link ICallback . ICallbackPredicate # alwaysTrue ( ) } and the specified * { @ link Priority } . * @ param < P > the generic type * @ param priority the priority * @ return the callback option */ public static < P extends ICallbackPredicate > CallbackOption < P > of ( Priority priority ) { } }
return new CallbackOption < > ( null , priority ) ;
public class SDKUtil { /** * 解析应答字符串 , 生成应答要素 * @ param str * 需要解析的字符串 * @ return 解析的结果map * @ throws UnsupportedEncodingException */ public static Map < String , String > parseQString ( String str ) { } }
Map < String , String > map = new HashMap < String , String > ( ) ; int len = str . length ( ) ; StringBuilder temp = new StringBuilder ( ) ; char curChar ; String key = null ; boolean isKey = true ; boolean isOpen = false ; // 值里有嵌套 char openName = 0 ; if ( len > 0 ) { for ( int i = 0 ; i < len ; i ++ ) { // 遍历整个带解析的字符串 curChar = str . charAt ( i ) ; // 取当前字符 if ( isKey ) { // 如果当前生成的是key if ( curChar == '=' ) { // 如果读取到 = 分隔符 key = temp . toString ( ) ; temp . setLength ( 0 ) ; isKey = false ; } else { temp . append ( curChar ) ; } } else { // 如果当前生成的是value if ( isOpen ) { if ( curChar == openName ) { isOpen = false ; } } else { // 如果没开启嵌套 if ( curChar == '{' ) { // 如果碰到 , 就开启嵌套 isOpen = true ; openName = '}' ; } if ( curChar == '[' ) { isOpen = true ; openName = ']' ; } } if ( curChar == '&' && ! isOpen ) { // 如果读取到 & 分割符 , 同时这个分割符不是值域 , 这时将map里添加 putKeyValueToMap ( temp , isKey , key , map ) ; temp . setLength ( 0 ) ; isKey = true ; } else { temp . append ( curChar ) ; } } } putKeyValueToMap ( temp , isKey , key , map ) ; } return map ;
public class E { /** * Throws out a { @ link ConfigurationException } with cause and message specified . * @ param cause * the cause of the configuration error . * @ param message * the error message format pattern . * @ param args * the error message format arguments . */ public static ConfigurationException invalidConfiguration ( Throwable cause , String message , Object ... args ) { } }
throw new ConfigurationException ( cause , message , args ) ;
public class JSDocInfoBuilder { /** * Adds a marker to the current JSDocInfo and populates the marker with the * annotation information . */ public void markAnnotation ( String annotation , int lineno , int charno ) { } }
JSDocInfo . Marker marker = currentInfo . addMarker ( ) ; if ( marker != null ) { JSDocInfo . TrimmedStringPosition position = new JSDocInfo . TrimmedStringPosition ( ) ; position . setItem ( annotation ) ; position . setPositionInformation ( lineno , charno , lineno , charno + annotation . length ( ) ) ; marker . setAnnotation ( position ) ; populated = true ; } currentMarker = marker ;
public class Log { /** * What a Terrible Failure : Report an exception that should never happen . * Similar to { @ link # wtf ( String , Throwable ) } , with a message as well . * @ param tag Used to identify the source of a log message . * @ param msg The message you would like logged . * @ param tr An exception to log . May be null . */ public static int wtf ( String tag , String msg , Throwable tr ) { } }
return wtf ( LOG_ID_MAIN , tag , msg , tr , false ) ;
public class RocksDBStateUploader { /** * Upload all the files to checkpoint fileSystem using specified number of threads . * @ param files The files will be uploaded to checkpoint filesystem . * @ param checkpointStreamFactory The checkpoint streamFactory used to create outputstream . * @ throws Exception Thrown if can not upload all the files . */ public Map < StateHandleID , StreamStateHandle > uploadFilesToCheckpointFs ( @ Nonnull Map < StateHandleID , Path > files , CheckpointStreamFactory checkpointStreamFactory , CloseableRegistry closeableRegistry ) throws Exception { } }
Map < StateHandleID , StreamStateHandle > handles = new HashMap < > ( ) ; Map < StateHandleID , CompletableFuture < StreamStateHandle > > futures = createUploadFutures ( files , checkpointStreamFactory , closeableRegistry ) ; try { FutureUtils . waitForAll ( futures . values ( ) ) . get ( ) ; for ( Map . Entry < StateHandleID , CompletableFuture < StreamStateHandle > > entry : futures . entrySet ( ) ) { handles . put ( entry . getKey ( ) , entry . getValue ( ) . get ( ) ) ; } } catch ( ExecutionException e ) { Throwable throwable = ExceptionUtils . stripExecutionException ( e ) ; throwable = ExceptionUtils . stripException ( throwable , RuntimeException . class ) ; if ( throwable instanceof IOException ) { throw ( IOException ) throwable ; } else { throw new FlinkRuntimeException ( "Failed to download data for state handles." , e ) ; } } return handles ;
public class AbstractBpmnActivityBehavior { /** * Subclasses that call leave ( ) will first pass through this method , before the regular { @ link FlowNodeActivityBehavior # leave ( ActivityExecution ) } is called . This way , we can check if the activity * has loop characteristics , and delegate to the behavior if this is the case . */ public void leave ( DelegateExecution execution ) { } }
FlowElement currentFlowElement = execution . getCurrentFlowElement ( ) ; Collection < BoundaryEvent > boundaryEvents = findBoundaryEventsForFlowNode ( execution . getProcessDefinitionId ( ) , currentFlowElement ) ; if ( CollectionUtil . isNotEmpty ( boundaryEvents ) ) { executeCompensateBoundaryEvents ( boundaryEvents , execution ) ; } if ( ! hasLoopCharacteristics ( ) ) { super . leave ( execution ) ; } else if ( hasMultiInstanceCharacteristics ( ) ) { multiInstanceActivityBehavior . leave ( execution ) ; }
public class Expressions { /** * Create a new Path expression * @ param type type of expression * @ param parent parent path * @ param property property name * @ return property path */ public static < T extends Enum < T > > EnumPath < T > enumPath ( Class < ? extends T > type , Path < ? > parent , String property ) { } }
return new EnumPath < T > ( type , PathMetadataFactory . forProperty ( parent , property ) ) ;
public class CreateFunctionDefinitionVersionRequest { /** * A list of Lambda functions in this function definition version . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setFunctions ( java . util . Collection ) } or { @ link # withFunctions ( java . util . Collection ) } if you want to * override the existing values . * @ param functions * A list of Lambda functions in this function definition version . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateFunctionDefinitionVersionRequest withFunctions ( Function ... functions ) { } }
if ( this . functions == null ) { setFunctions ( new java . util . ArrayList < Function > ( functions . length ) ) ; } for ( Function ele : functions ) { this . functions . add ( ele ) ; } return this ;
public class Merger { /** * Merges argument iterators . Iterators should return values in natural order . * @ param < T > * @ param iterables * @ return */ public static < T > Iterable < T > merge ( Iterable < T > ... iterables ) { } }
return merge ( null , iterables ) ;
public class CollUtil { /** * 新建一个ArrayList * @ param < T > 集合元素类型 * @ param values 数组 * @ return ArrayList对象 */ @ SafeVarargs public static < T > ArrayList < T > newArrayList ( T ... values ) { } }
return ( ArrayList < T > ) list ( false , values ) ;
public class PropertyWidgetCollection { /** * Invoked whenever a configured property within this widget factory is * changed . */ public void onConfigurationChanged ( ) { } }
final Collection < PropertyWidget < ? > > widgets = getWidgets ( ) ; if ( logger . isDebugEnabled ( ) ) { final StringBuilder sb = new StringBuilder ( ) ; sb . append ( "id=" ) ; sb . append ( System . identityHashCode ( this ) ) ; sb . append ( " - onConfigurationChanged() - notifying widgets:" ) ; sb . append ( widgets . size ( ) ) ; for ( final PropertyWidget < ? > widget : widgets ) { final String propertyName = widget . getPropertyDescriptor ( ) . getName ( ) ; final String propertyWidgetClassName = widget . getClass ( ) . getSimpleName ( ) ; sb . append ( "\n - " ) ; sb . append ( propertyName ) ; sb . append ( ": " ) ; sb . append ( propertyWidgetClassName ) ; } logger . debug ( sb . toString ( ) ) ; } for ( final PropertyWidget < ? > widget : widgets ) { @ SuppressWarnings ( "unchecked" ) final PropertyWidget < Object > objectWidget = ( PropertyWidget < Object > ) widget ; final ConfiguredPropertyDescriptor propertyDescriptor = objectWidget . getPropertyDescriptor ( ) ; final Object value = _componentBuilder . getConfiguredProperty ( propertyDescriptor ) ; objectWidget . onValueTouched ( value ) ; }
public class ScriptRuntime { /** * Convert the value to a number . * See ECMA 9.3. */ public static double toNumber ( Object val ) { } }
for ( ; ; ) { if ( val instanceof Number ) return ( ( Number ) val ) . doubleValue ( ) ; if ( val == null ) return + 0.0 ; if ( val == Undefined . instance ) return NaN ; if ( val instanceof String ) return toNumber ( ( String ) val ) ; if ( val instanceof CharSequence ) return toNumber ( val . toString ( ) ) ; if ( val instanceof Boolean ) return ( ( Boolean ) val ) . booleanValue ( ) ? 1 : + 0.0 ; if ( val instanceof Symbol ) throw typeError0 ( "msg.not.a.number" ) ; if ( val instanceof Scriptable ) { val = ( ( Scriptable ) val ) . getDefaultValue ( NumberClass ) ; if ( ( val instanceof Scriptable ) && ! isSymbol ( val ) ) throw errorWithClassName ( "msg.primitive.expected" , val ) ; continue ; } warnAboutNonJSObject ( val ) ; return NaN ; }
public class RelationalOperationsMatrix { /** * Checks whether the scl string is the contains relation . */ private static boolean contains_ ( String scl ) { } }
// Valid for all if ( scl . charAt ( 0 ) == 'T' && scl . charAt ( 1 ) == '*' && scl . charAt ( 2 ) == '*' && scl . charAt ( 3 ) == '*' && scl . charAt ( 4 ) == '*' && scl . charAt ( 5 ) == '*' && scl . charAt ( 6 ) == 'F' && scl . charAt ( 7 ) == 'F' && scl . charAt ( 8 ) == '*' ) return true ; return false ;
public class Locations { /** * Create the directory represented by the location if not exists . * @ param location the location for the directory . * @ throws java . io . IOException If the location cannot be created */ public static void mkdirsIfNotExists ( Location location ) throws IOException { } }
// Need to check & & mkdir & & check to deal with race condition if ( ! location . isDirectory ( ) && ! location . mkdirs ( ) && ! location . isDirectory ( ) ) { throw new IOException ( "Failed to create directory at " + location . toURI ( ) ) ; }
public class Utils4Swing { /** * Create a new resizeable frame with a panel as it ' s content pane and * position the frame . * @ param title * Frame title . * @ param content * Content . * @ param exitOnClose * Exit the program on closing the frame ? * @ param positioner * FramePositioner . * @ return A visible frame at the preferred position . */ public static JFrame createShowAndPosition ( final String title , final Container content , final boolean exitOnClose , final FramePositioner positioner ) { } }
return createShowAndPosition ( title , content , exitOnClose , true , positioner ) ;
public class TrustedIdProvidersInner { /** * Updates the specified trusted identity provider . * @ param resourceGroupName The name of the Azure resource group . * @ param accountName The name of the Data Lake Store account . * @ param trustedIdProviderName The name of the trusted identity provider . This is used for differentiation of providers in the account . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the TrustedIdProviderInner object if successful . */ public TrustedIdProviderInner update ( String resourceGroupName , String accountName , String trustedIdProviderName ) { } }
return updateWithServiceResponseAsync ( resourceGroupName , accountName , trustedIdProviderName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class BoxRetentionPolicy { /** * Returns all the retention policies with specified filters . * @ param name a name to filter the retention policies by . A trailing partial match search is performed . * Set to null if no name filtering is required . * @ param type a policy type to filter the retention policies by . Set to null if no type filtering is required . * @ param userID a user id to filter the retention policies by . Set to null if no type filtering is required . * @ param limit the limit of items per single response . The default value is 100. * @ param api the API connection to be used by the resource . * @ param fields the fields to retrieve . * @ return an iterable with all the retention policies met search conditions . */ public static Iterable < BoxRetentionPolicy . Info > getAll ( String name , String type , String userID , int limit , final BoxAPIConnection api , String ... fields ) { } }
QueryStringBuilder queryString = new QueryStringBuilder ( ) ; if ( name != null ) { queryString . appendParam ( "policy_name" , name ) ; } if ( type != null ) { queryString . appendParam ( "policy_type" , type ) ; } if ( userID != null ) { queryString . appendParam ( "created_by_user_id" , userID ) ; } if ( fields . length > 0 ) { queryString . appendParam ( "fields" , fields ) ; } URL url = RETENTION_POLICIES_URL_TEMPLATE . buildWithQuery ( api . getBaseURL ( ) , queryString . toString ( ) ) ; return new BoxResourceIterable < BoxRetentionPolicy . Info > ( api , url , limit ) { @ Override protected BoxRetentionPolicy . Info factory ( JsonObject jsonObject ) { BoxRetentionPolicy policy = new BoxRetentionPolicy ( api , jsonObject . get ( "id" ) . asString ( ) ) ; return policy . new Info ( jsonObject ) ; } } ;
public class StorageUtil { /** * reads a XML Element Attribute ans cast it to a Time Object * @ param config * @ param el XML Element to read Attribute from it * @ param attributeName Name of the Attribute to read * @ return Attribute Value */ public Time toTime ( Config config , Element el , String attributeName ) { } }
DateTime dt = toDateTime ( config , el , attributeName ) ; if ( dt == null ) return null ; return new TimeImpl ( dt ) ;
public class JDBC4Statement { /** * Moves to this Statement object ' s next result , deals with any current ResultSet object ( s ) throws SQLException { throw SQLError . noSupport ( ) ; } according to the instructions specified by the given flag , and returns true if the next result is a ResultSet object . */ @ Override public boolean getMoreResults ( int current ) throws SQLException { } }
checkClosed ( ) ; switch ( current ) { case Statement . KEEP_CURRENT_RESULT : this . openResults . add ( this . result ) ; this . result = null ; this . lastUpdateCount = - 1 ; break ; case Statement . CLOSE_CURRENT_RESULT : closeCurrentResult ( ) ; break ; case Statement . CLOSE_ALL_RESULTS : closeCurrentResult ( ) ; closeAllOpenResults ( ) ; break ; default : throw SQLError . get ( SQLError . ILLEGAL_ARGUMENT , current ) ; } if ( current != Statement . CLOSE_ALL_RESULTS ) { this . tableResultIndex ++ ; if ( this . tableResultIndex < this . tableResults . length ) { VoltTable table = this . tableResults [ this . tableResultIndex ] ; if ( VoltSQL . isUpdateResult ( table ) ) { this . lastUpdateCount = ( int ) table . fetchRow ( 0 ) . getLong ( 0 ) ; } else { this . result = createTrimmedResultSet ( table ) ; return true ; } } } return false ;
public class StartRemediationExecutionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( StartRemediationExecutionRequest startRemediationExecutionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( startRemediationExecutionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( startRemediationExecutionRequest . getConfigRuleName ( ) , CONFIGRULENAME_BINDING ) ; protocolMarshaller . marshall ( startRemediationExecutionRequest . getResourceKeys ( ) , RESOURCEKEYS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SwitchYardServiceInvoker { /** * Invokes the request and returns the response . * @ param request the request * @ return the response */ public SwitchYardServiceResponse invoke ( SwitchYardServiceRequest request ) { } }
Map < String , Object > contextOut = new HashMap < String , Object > ( ) ; Object contentOut = null ; Object fault = null ; try { QName serviceName = request . getServiceName ( ) ; if ( serviceName == null ) { throw CommonKnowledgeMessages . MESSAGES . serviceNameNull ( ) ; } else if ( Strings . trimToNull ( serviceName . getNamespaceURI ( ) ) == null ) { String tns = getTargetNamespace ( ) ; if ( tns != null ) { serviceName = XMLHelper . createQName ( tns , serviceName . getLocalPart ( ) ) ; } } ServiceDomain serviceDomain = getServiceDomain ( ) ; if ( serviceDomain == null ) { throw CommonKnowledgeMessages . MESSAGES . serviceDomainNull ( ) ; } ServiceReference serviceReference = serviceDomain . getServiceReference ( serviceName ) ; if ( serviceReference == null ) { throw CommonKnowledgeMessages . MESSAGES . serviceReferenceNull ( serviceName . toString ( ) ) ; } final Exchange exchangeIn ; FaultHandler handler = new FaultHandler ( ) ; String operationName = request . getOperationName ( ) ; if ( operationName != null ) { exchangeIn = serviceReference . createExchange ( operationName , handler ) ; } else { exchangeIn = serviceReference . createExchange ( handler ) ; } Message messageIn = exchangeIn . createMessage ( ) ; Context contextIn = exchangeIn . getContext ( messageIn ) ; for ( Map . Entry < String , Object > entry : request . getContext ( ) . entrySet ( ) ) { contextIn . setProperty ( entry . getKey ( ) , entry . getValue ( ) ) ; } Object contentIn = request . getContent ( ) ; if ( contentIn != null ) { messageIn . setContent ( contentIn ) ; } exchangeIn . send ( messageIn ) ; if ( ExchangePattern . IN_OUT . equals ( exchangeIn . getContract ( ) . getConsumerOperation ( ) . getExchangePattern ( ) ) ) { Exchange exchangeOut = handler . waitForOut ( ) ; Message messageOut = exchangeOut . getMessage ( ) ; contentOut = messageOut . getContent ( ) ; for ( Property property : exchangeOut . getContext ( messageOut ) . getProperties ( ) ) { contextOut . put ( property . getName ( ) , property . getValue ( ) ) ; } } fault = handler . getFault ( ) ; } catch ( Throwable t ) { fault = t ; } return new SwitchYardServiceResponse ( contentOut , contextOut , fault ) ;
public class HandlerRegistry { /** * IBM : there was a ( reliable ) failure for double - unregister of a servlet without * this method : the compliance test ( TC12 ) unregisters via an alias . * Calling getServletByAlias separately from removeServlet left a window * for a double remove . Do the check atomically ( and more directly ) here instead . * @ param alias * @ return removed / destroyed Servlet . */ public synchronized Servlet removeServletByAlias ( String alias ) { } }
final boolean destroy = true ; Servlet servlet = this . aliasMap . remove ( alias ) ; this . container . removeContextRoot ( alias ) ; ServletHandler handler = this . servletMap . remove ( servlet ) ; if ( handler != null ) { updateServletArray ( ) ; if ( destroy ) { handler . destroy ( ) ; } } else { throw new IllegalArgumentException ( "Servlet is not registered: " + servlet ) ; } return servlet ;
public class nsip { /** * Use this API to fetch all the nsip resources that are configured on netscaler . * This uses nsip _ args which is a way to provide additional arguments while fetching the resources . */ public static nsip [ ] get ( nitro_service service , nsip_args args ) throws Exception { } }
nsip obj = new nsip ( ) ; options option = new options ( ) ; option . set_args ( nitro_util . object_to_string_withoutquotes ( args ) ) ; nsip [ ] response = ( nsip [ ] ) obj . get_resources ( service , option ) ; return response ;
public class SimpleFormValidator { /** * Validates if file is not empty */ public FormInputValidator notEmpty ( VisValidatableTextField field , String errorMsg ) { } }
EmptyInputValidator validator = new EmptyInputValidator ( errorMsg ) ; field . addValidator ( validator ) ; add ( field ) ; return validator ;
public class ValdrBeanValidation { /** * See class comment . * @ param args cli arguments */ public static void main ( String [ ] args ) { } }
org . apache . commons . cli . Options cliOptions = createCliOptions ( ) ; try { CommandLine cli = parseCli ( args , cliOptions ) ; Options options = loadOptions ( cli ) ; validate ( options ) ; ConstraintParser parser = new ConstraintParser ( options ) ; try { output ( parser , options . getOutputFile ( ) ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } catch ( IncompleteCliException e ) { // If the command line is not complete just print usage and help printErrorWithUsageAndHelp ( cliOptions ) ; }
public class DoubleIntIndex { /** * Check if targeted column value in the row indexed i is less than the * search target object . * @ param i the index * @ return - 1 , 0 or + 1 */ private int compare ( int i ) { } }
if ( sortOnValues ) { if ( targetSearchValue > values [ i ] ) { return 1 ; } else if ( targetSearchValue < values [ i ] ) { return - 1 ; } } else { if ( targetSearchValue > keys [ i ] ) { return 1 ; } else if ( targetSearchValue < keys [ i ] ) { return - 1 ; } } return 0 ;
public class ZkUtils { /** * read all brokers in the zookeeper * @ param zkClient zookeeper client * @ return all brokers */ public static Cluster getCluster ( ZkClient zkClient ) { } }
Cluster cluster = new Cluster ( ) ; List < String > nodes = getChildrenParentMayNotExist ( zkClient , BrokerIdsPath ) ; for ( String node : nodes ) { final String brokerInfoString = readData ( zkClient , BrokerIdsPath + "/" + node ) ; cluster . add ( Broker . createBroker ( Integer . valueOf ( node ) , brokerInfoString ) ) ; } return cluster ;
public class BigDecimal { /** * Returns the length of the absolute value of a { @ code long } , in decimal * digits . * @ param x the { @ code long } * @ return the length of the unscaled value , in deciaml digits . */ static int longDigitLength ( long x ) { } }
/* * As described in " Bit Twiddling Hacks " by Sean Anderson , * ( http : / / graphics . stanford . edu / ~ seander / bithacks . html ) * integer log 10 of x is within 1 of ( 1233/4096 ) * ( 1 + * integer log 2 of x ) . The fraction 1233/4096 approximates * log10(2 ) . So we first do a version of log2 ( a variant of * Long class with pre - checks and opposite directionality ) and * then scale and check against powers table . This is a little * simpler in present context than the version in Hacker ' s * Delight sec 11-4 . Adding one to bit length allows comparing * downward from the LONG _ TEN _ POWERS _ TABLE that we need * anyway . */ assert x != BigDecimal . INFLATED ; if ( x < 0 ) x = - x ; if ( x < 10 ) // must screen for 0 , might as well 10 return 1 ; int r = ( ( 64 - Long . numberOfLeadingZeros ( x ) + 1 ) * 1233 ) >>> 12 ; long [ ] tab = LONG_TEN_POWERS_TABLE ; // if r > = length , must have max possible digits for long return ( r >= tab . length || x < tab [ r ] ) ? r : r + 1 ;
public class RetryTemplateBuilder { /** * Allows retry if there is no more than { @ code timeout } millis since first attempt . * Invocation of this method does not discard default exception classification rule , * that is " retry only on { @ link Exception } and it ' s subclasses " . * @ param timeout whole execution timeout in milliseconds * @ return this * @ see TimeoutRetryPolicy */ public RetryTemplateBuilder withinMillis ( long timeout ) { } }
Assert . isTrue ( timeout > 0 , "Timeout should be positive" ) ; Assert . isNull ( this . baseRetryPolicy , "You have already selected another retry policy" ) ; TimeoutRetryPolicy timeoutRetryPolicy = new TimeoutRetryPolicy ( ) ; timeoutRetryPolicy . setTimeout ( timeout ) ; this . baseRetryPolicy = timeoutRetryPolicy ; return this ;
public class CodecUtils { /** * 扁平化复制 * @ param prefix 前缀 * @ param sourceMap 原始map * @ param dstMap 目标map */ public static void flatCopyTo ( String prefix , Map < String , Object > sourceMap , Map < String , String > dstMap ) { } }
for ( Map . Entry < String , Object > entry : sourceMap . entrySet ( ) ) { String key = prefix + entry . getKey ( ) ; Object value = entry . getValue ( ) ; if ( value instanceof String ) { dstMap . put ( key , ( String ) value ) ; } else if ( value instanceof Number ) { dstMap . put ( key , value . toString ( ) ) ; } else if ( value instanceof Map ) { flatCopyTo ( key + "." , ( Map < String , Object > ) value , dstMap ) ; } }
public class ColumnSchema { /** * Returns generated value in the session context . */ Object getGeneratedValue ( Session session ) { } }
return generatingExpression == null ? null : generatingExpression . getValue ( session , dataType ) ;
public class JDBC4Connection { /** * Undoes all changes made in the current transaction and releases any database locks currently held by this Connection object . */ @ Override public void rollback ( ) throws SQLException { } }
checkClosed ( ) ; if ( props . getProperty ( ROLLBACK_THROW_EXCEPTION , "true" ) . equalsIgnoreCase ( "true" ) ) { throw SQLError . noSupport ( ) ; }
public class RecurringData { /** * Calculate start dates for a monthly relative recurrence . * @ param calendar current date * @ param frequency frequency * @ param dates array of start dates */ private void getMonthlyRelativeDates ( Calendar calendar , int frequency , List < Date > dates ) { } }
long startDate = calendar . getTimeInMillis ( ) ; calendar . set ( Calendar . DAY_OF_MONTH , 1 ) ; int dayNumber = NumberHelper . getInt ( m_dayNumber ) ; while ( moreDates ( calendar , dates ) ) { if ( dayNumber > 4 ) { setCalendarToLastRelativeDay ( calendar ) ; } else { setCalendarToOrdinalRelativeDay ( calendar , dayNumber ) ; } if ( calendar . getTimeInMillis ( ) > startDate ) { dates . add ( calendar . getTime ( ) ) ; if ( ! moreDates ( calendar , dates ) ) { break ; } } calendar . set ( Calendar . DAY_OF_MONTH , 1 ) ; calendar . add ( Calendar . MONTH , frequency ) ; }
public class Startup { /** * show corresponding contents for show ( ) */ String showDetail ( ) { } }
if ( isDefault ( ) || isEmpty ( ) ) { return "" ; } else { return entries . stream ( ) . map ( sue -> "---- " + sue . name + ( sue . timeStamp . isEmpty ( ) ? "" : " @ " + sue . timeStamp ) + " ----\n" + sue . content ) . collect ( joining ( ) ) ; }
public class MasterHooks { /** * Resets the master hooks . * @ param hooks The hooks to reset * @ throws FlinkException Thrown , if the hooks throw an exception . */ public static void reset ( final Collection < MasterTriggerRestoreHook < ? > > hooks , final Logger log ) throws FlinkException { } }
for ( MasterTriggerRestoreHook < ? > hook : hooks ) { final String id = hook . getIdentifier ( ) ; try { hook . reset ( ) ; } catch ( Throwable t ) { ExceptionUtils . rethrowIfFatalErrorOrOOM ( t ) ; throw new FlinkException ( "Error while resetting checkpoint master hook '" + id + '\'' , t ) ; } }
public class Stream { /** * Returns the first element wrapped by { @ code Optional } class . * If stream is empty , returns { @ code Optional . empty ( ) } . * < p > This is a short - circuiting terminal operation . * @ return an { @ code Optional } with the first element * or { @ code Optional . empty ( ) } if stream is empty */ @ NotNull public Optional < T > findFirst ( ) { } }
if ( iterator . hasNext ( ) ) { return Optional . of ( iterator . next ( ) ) ; } return Optional . empty ( ) ;
public class FederationPresenter { /** * - - - - - federation */ private void readFederation ( ) { } }
if ( federation != null ) { Operation fedOp = new Operation . Builder ( READ_RESOURCE_OPERATION , FEDERATION_TEMPLATE . resolve ( statementContext , federation ) ) . param ( RECURSIVE , true ) . build ( ) ; Operation sdOp = new Operation . Builder ( READ_CHILDREN_NAMES_OPERATION , AddressTemplate . of ( "{selected.profile}/subsystem=security" ) . resolve ( statementContext ) ) . param ( CHILD_TYPE , "security-domain" ) . build ( ) ; dispatcher . execute ( new DMRAction ( new Composite ( fedOp , sdOp ) ) , new SimpleCallback < DMRResponse > ( ) { @ Override public void onSuccess ( final DMRResponse dmrResponse ) { ModelNode response = dmrResponse . get ( ) ; if ( response . isFailure ( ) ) { Log . error ( "Failed to read federation " + federation , response . getFailureDescription ( ) ) ; } else { ModelNode fedNode = response . get ( RESULT ) . get ( "step-1" ) . get ( RESULT ) ; identityProvider = fedNode . get ( "identity-provider" ) . asProperty ( ) . getName ( ) ; List < ModelNode > sdNodes = response . get ( RESULT ) . get ( "step-2" ) . get ( RESULT ) . asList ( ) ; getView ( ) . update ( fedNode , Ordering . natural ( ) . immutableSortedCopy ( FluentIterable . from ( sdNodes ) . transform ( ModelNode :: asString ) ) ) ; } } } ) ; }
public class MaskPasswordsConfig { /** * Adds a name / password pair at the global level . * < p > If either name or password is blank ( as defined per the Commons Lang * library ) , then the pair is not added . < / p > * @ since 2.7 */ public void addGlobalVarPasswordPair ( VarPasswordPair varPasswordPair ) { } }
// blank values are forbidden if ( StringUtils . isBlank ( varPasswordPair . getVar ( ) ) || StringUtils . isBlank ( varPasswordPair . getPassword ( ) ) ) { LOGGER . fine ( "addGlobalVarPasswordPair NOT adding pair with null var or password" ) ; return ; } getGlobalVarPasswordPairsList ( ) . add ( varPasswordPair ) ;
public class ChronoHistory { /** * / * [ deutsch ] * < p > Erzeugt eine Kopie dieser Instanz mit der angegebenen & Auml ; rapr & auml ; ferenz . < / p > * < p > Diese Methode hat keine Auswirkung , wenn angewandt auf { @ code ChronoHistory . PROLEPTIC _ GREGORIAN } * oder { @ code ChronoHistory . PROLEPTIC _ JULIAN } oder { @ code ChronoHistory . PROLEPTIC _ BYZANTINE } . < / p > * < p > Hintergrund : Einige historische Kalender haben andere & Auml ; ras als AD oder BC verwendet . Zum * Beispiel kannte Ru & szlig ; land vor dem julianischen Jahr 1700 den byzantinischen Kalender , der die * & Auml ; ra Anno Mundi ben & ouml ; tigt . Dies kann mit Hilfe einer geeigneten & Auml ; rapr & auml ; ferenz * ausgedr & uuml ; ckt werden . < / p > * @ param eraPreference strategy which era should be preferred ( most relevant for printing ) * @ return new history with changed era preference * @ since 3.14/4.11 */ public ChronoHistory with ( EraPreference eraPreference ) { } }
if ( eraPreference . equals ( this . eraPreference ) || ! this . hasGregorianCutOverDate ( ) ) { return this ; } return new ChronoHistory ( this . variant , this . events , this . ajly , this . nys , eraPreference ) ;