signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DataSet { /** * Returns a new version of this data set that is of the same type , and * contains a different listing pointing to shallow data point copies . * Because the data point object contains the weight itself , the weight * is not shared - while the vector and array information is . This * allows altering the weights of the data points while preserving the * original weights . < br > * Altering the list or weights of the returned data set will not be * reflected in the original . Altering the feature values will . * @ return a shallow copy of shallow data point copies for this data set . */ public DataSet getTwiceShallowClone ( ) { } }
DataSet clone = shallowClone ( ) ; for ( int i = 0 ; i < clone . size ( ) ; i ++ ) { DataPoint d = getDataPoint ( i ) ; DataPoint sd = new DataPoint ( d . getNumericalValues ( ) , d . getCategoricalValues ( ) , d . getCategoricalData ( ) ) ; clone . setDataPoint ( i , sd ) ; } return clone ;
public class ScheduleAsyncPersistencePRequest { /** * < code > optional . alluxio . grpc . file . ScheduleAsyncPersistencePOptions options = 2 ; < / code > */ public alluxio . grpc . ScheduleAsyncPersistencePOptions getOptions ( ) { } }
return options_ == null ? alluxio . grpc . ScheduleAsyncPersistencePOptions . getDefaultInstance ( ) : options_ ;
public class LogSequenceNumber { /** * Create LSN instance by string represent LSN . * @ param strValue not null string as two hexadecimal numbers of up to 8 digits each , separated by * a slash . For example { @ code 16/3002D50 } , { @ code 0/15D68C50} * @ return not null LSN instance where if specified string represent have not valid form { @ link * LogSequenceNumber # INVALID _ LSN } */ public static LogSequenceNumber valueOf ( String strValue ) { } }
int slashIndex = strValue . lastIndexOf ( '/' ) ; if ( slashIndex <= 0 ) { return INVALID_LSN ; } String logicalXLogStr = strValue . substring ( 0 , slashIndex ) ; int logicalXlog = ( int ) Long . parseLong ( logicalXLogStr , 16 ) ; String segmentStr = strValue . substring ( slashIndex + 1 , strValue . length ( ) ) ; int segment = ( int ) Long . parseLong ( segmentStr , 16 ) ; ByteBuffer buf = ByteBuffer . allocate ( 8 ) ; buf . putInt ( logicalXlog ) ; buf . putInt ( segment ) ; buf . position ( 0 ) ; long value = buf . getLong ( ) ; return LogSequenceNumber . valueOf ( value ) ;
public class JspVisitor { /** * validate the children ) . */ protected boolean shouldSkipChildren ( Element jspElement ) throws JspCoreException { } }
if ( ! shouldSkipChildrenForThisVisitor ( ) ) { return false ; } String uri = jspElement . getNamespaceURI ( ) ; String name = jspElement . getLocalName ( ) ; NodeList children ; // First , see if this is a < jsp : body > tag . if ( uri . equalsIgnoreCase ( Constants . JSP_NAMESPACE ) && name . equalsIgnoreCase ( Constants . JSP_BODY_TYPE ) ) { // It is , so we need to skip processing children if the parent tag is // tagdependent . return isNodeTagDependent ( jspElement . getParentNode ( ) ) ; } // Otherwise , this is a custom tag . We have three cases : if ( ! isElementTagDependent ( jspElement ) ) { // Case 1 : not a tagdependent tag , so return . return false ; } // Now , we know this is a tagdependent tag . Look at the children . children = jspElement . getChildNodes ( ) ; if ( children . getLength ( ) == 0 ) { // Case 2 : there are no children , so there ' s no harm in allowing processing to continue . return false ; } for ( int i = 0 ; i < children . getLength ( ) ; ++ i ) { Node node = children . item ( i ) ; if ( node . getNodeType ( ) == Node . ELEMENT_NODE ) { Element element = ( Element ) node ; String elemURI = element . getNamespaceURI ( ) ; if ( elemURI . equalsIgnoreCase ( Constants . JSP_NAMESPACE ) ) { String elemName = element . getLocalName ( ) ; if ( elemName . equalsIgnoreCase ( Constants . JSP_ATTRIBUTE_TYPE ) || elemName . equalsIgnoreCase ( Constants . JSP_BODY_TYPE ) ) { // Case 3 : the first child is < jsp : attribute > or < jsp : body > , so we // have to process children . return false ; } else { // Default case : the first child is not < jsp : attribute > or < jsp : body > , // so we have an implicit body defined . Skip processing . return true ; } } } } // Not likely we ' d make it here . . . return false ;
public class BundleArchive { /** * This method will return a bundle entry URL for the supplied path , it will test for both a normal entry and a directory entry for it . * @ param pathAndName The path to the entry * @ return The URL for the bundle entry */ @ FFDCIgnore ( IllegalStateException . class ) public URL getBundleEntry ( Bundle bundleToTest , String pathAndName ) { } }
try { URL bundleEntry = bundleToTest . getEntry ( pathAndName ) ; /* * Defect 54588 discovered that if a directory does not have a zip entry then calling getEntry will return null unless the path has a " / " on the end so if we have null * still then add a " / " on the end of the path and retest */ if ( bundleEntry == null ) { bundleEntry = bundleToTest . getEntry ( pathAndName + "/" ) ; } return bundleEntry ; } catch ( IllegalStateException ise ) { // bundle context was no longer valid , so we cannot use getEntry any more . return null ; }
public class HostActiveDirectoryAuthentication { /** * Install a trust anchor certificate for smart card authentication . * @ param cert SSL certificate in PEM format * @ throws HostConfigFault * @ throws RuntimeFault * @ throws RemoteException * @ since 6.0 */ public void installSmartCardTrustAnchor ( String cert ) throws HostConfigFault , RuntimeFault , RemoteException { } }
getVimService ( ) . installSmartCardTrustAnchor ( getMOR ( ) , cert ) ;
public class CommonServerReceiveListener { /** * This method is used to inform the client that we are rejecting their handshake . Typically * this will never happen unless a third party client is written , or an internal error occurs . * However , we should check for an inproperly formatted handshake and inform the client if * such an error occurs . * @ param conversation * @ param requestNumber * @ param rejectedField A String that indicates the field that was rejected . */ private void rejectHandshake ( Conversation conversation , int requestNumber , String rejectedField ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "rejectHandshake" , new Object [ ] { conversation , requestNumber , rejectedField } ) ; SIConnectionLostException exception = new SIConnectionLostException ( nls . getFormattedMessage ( "INVALID_PROP_SICO8012" , null , null ) ) ; FFDCFilter . processException ( exception , CLASS_NAME + ".rejectHandshake" , CommsConstants . COMMONSERVERRECEIVELISTENER_HSREJCT_01 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Invalid handshake type received - rejecting field:" , rejectedField ) ; StaticCATHelper . sendExceptionToClient ( exception , CommsConstants . COMMONSERVERRECEIVELISTENER_HSREJCT_01 , conversation , requestNumber ) ; // At this point we really don ' t want anything more to do with this client - so close him closeConnection ( conversation ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "rejectHandshake" ) ;
public class CollUtil { /** * 将多个集合排序并显示不同的段落 ( 分页 ) < br > * 采用 { @ link BoundedPriorityQueue } 实现分页取局部 * @ param < T > 集合元素类型 * @ param pageNo 页码 , 从1开始计数 , 0和1效果相同 * @ param pageSize 每页的条目数 * @ param comparator 比较器 * @ param colls 集合数组 * @ return 分页后的段落内容 */ @ SafeVarargs public static < T > List < T > sortPageAll ( int pageNo , int pageSize , Comparator < T > comparator , Collection < T > ... colls ) { } }
final List < T > list = new ArrayList < > ( pageNo * pageSize ) ; for ( Collection < T > coll : colls ) { list . addAll ( coll ) ; } if ( null != comparator ) { Collections . sort ( list , comparator ) ; } return page ( pageNo , pageSize , list ) ;
public class DebugUtil { /** * Builds a javadoc - like presentation of the top wrapped class fields and methods of a { @ code java . lang . Object } . * @ param pObject the { @ code java . lang . Object } to be analysed . * @ param pObjectName the name of the object instance , for identification purposes . * @ return a listing of the object ' s class details . * @ see < a href = " http : / / java . sun . com / products / jdk / 1.3 / docs / api / java / lang / Class . html " > { @ code java . lang . Class } < / a > * @ see < a href = " http : / / java . sun . com / products / jdk / 1.3 / docs / api / java / lang / reflect / Modifier . html " > { @ code java . lang . reflect . Modifier } < / a > * @ see < a href = " http : / / java . sun . com / products / jdk / 1.3 / docs / api / java / lang / reflect / Field . html " > { @ code java . lang . reflect . Field } < / a > * @ see < a href = " http : / / java . sun . com / products / jdk / 1.3 / docs / api / java / lang / reflect / Constructor . html " > { @ code java . lang . reflect . Constructor } < / a > * @ see < a href = " http : / / java . sun . com / products / jdk / 1.3 / docs / api / java / lang / reflect / Method . html " > { @ code java . lang . reflect . Method } < / a > */ public static String getClassDetails ( final Object pObject , final String pObjectName ) { } }
if ( pObject == null ) { return OBJECT_IS_NULL_ERROR_MESSAGE ; } final String endOfLine = System . getProperty ( "line.separator" ) ; final String dividerLine = "---------------------------------------------------------" ; Class c = pObject . getClass ( ) ; StringTokenizer tokenizedString ; String str ; String className = new String ( ) ; String superClassName = new String ( ) ; StringBuilder buffer = new StringBuilder ( ) ; // Heading buffer . append ( endOfLine ) ; buffer . append ( "**** class details" ) ; if ( ! StringUtil . isEmpty ( pObjectName ) ) { buffer . append ( " for \"" + pObjectName + "\"" ) ; } buffer . append ( " ****" ) ; buffer . append ( endOfLine ) ; // Package Package p = c . getPackage ( ) ; if ( p != null ) { buffer . append ( p . getName ( ) ) ; } buffer . append ( endOfLine ) ; // Class or Interface if ( c . isInterface ( ) ) { buffer . append ( "I n t e r f a c e " ) ; } else { buffer . append ( "C l a s s " ) ; } str = c . getName ( ) ; tokenizedString = new StringTokenizer ( str , "." ) ; while ( tokenizedString . hasMoreTokens ( ) ) { className = tokenizedString . nextToken ( ) . trim ( ) ; } str = new String ( ) ; char [ ] charArray = className . toCharArray ( ) ; for ( int i = 0 ; i < charArray . length ; i ++ ) { str += charArray [ i ] + " " ; } buffer . append ( str ) ; buffer . append ( endOfLine ) ; buffer . append ( endOfLine ) ; // Class Hierarch List classNameList = new Vector ( ) ; classNameList . add ( c . getName ( ) ) ; Class superclass = c . getSuperclass ( ) ; while ( superclass != null ) { classNameList . add ( superclass . getName ( ) ) ; superclass = superclass . getSuperclass ( ) ; } Object [ ] classNameArray = classNameList . toArray ( ) ; int counter = 0 ; for ( int i = classNameArray . length - 1 ; i >= 0 ; i -- ) { for ( int j = 0 ; j < counter ; j ++ ) { buffer . append ( " " ) ; } if ( counter > 0 ) { buffer . append ( "|" ) ; buffer . append ( endOfLine ) ; } for ( int j = 0 ; j < counter ; j ++ ) { buffer . append ( " " ) ; } if ( counter > 0 ) { buffer . append ( "+-" ) ; } buffer . append ( ( String ) classNameArray [ i ] ) ; buffer . append ( endOfLine ) ; counter ++ ; } // Divider buffer . append ( endOfLine ) ; buffer . append ( dividerLine ) ; buffer . append ( endOfLine ) ; buffer . append ( endOfLine ) ; // Profile int classModifier = c . getModifiers ( ) ; buffer . append ( Modifier . toString ( classModifier ) + " " ) ; if ( c . isInterface ( ) ) { buffer . append ( "Interface " ) ; } else { buffer . append ( "Class " ) ; } buffer . append ( className ) ; buffer . append ( endOfLine ) ; if ( ( classNameArray != null ) && ( classNameArray [ classNameArray . length - 2 ] != null ) ) { str = ( String ) classNameArray [ classNameArray . length - 2 ] ; tokenizedString = new StringTokenizer ( str , "." ) ; while ( tokenizedString . hasMoreTokens ( ) ) { superClassName = tokenizedString . nextToken ( ) . trim ( ) ; } buffer . append ( "extends " + superClassName ) ; buffer . append ( endOfLine ) ; } if ( ! c . isInterface ( ) ) { Class [ ] interfaces = c . getInterfaces ( ) ; if ( ( interfaces != null ) && ( interfaces . length > 0 ) ) { buffer . append ( "implements " ) ; str = interfaces [ 0 ] . getName ( ) ; tokenizedString = new StringTokenizer ( str , "." ) ; while ( tokenizedString . hasMoreTokens ( ) ) { str = tokenizedString . nextToken ( ) . trim ( ) ; } buffer . append ( str ) ; for ( int i = 1 ; i < interfaces . length ; i ++ ) { str = interfaces [ i ] . getName ( ) ; tokenizedString = new StringTokenizer ( str , "." ) ; while ( tokenizedString . hasMoreTokens ( ) ) { str = tokenizedString . nextToken ( ) . trim ( ) ; } buffer . append ( ", " + str ) ; } buffer . append ( endOfLine ) ; } } // Divider buffer . append ( endOfLine ) ; buffer . append ( dividerLine ) ; buffer . append ( endOfLine ) ; buffer . append ( endOfLine ) ; // Fields buffer . append ( "F I E L D S U M M A R Y" ) ; buffer . append ( endOfLine ) ; Field [ ] fields = c . getFields ( ) ; if ( fields != null ) { for ( int i = 0 ; i < fields . length ; i ++ ) { buffer . append ( Modifier . toString ( fields [ i ] . getType ( ) . getModifiers ( ) ) + " " ) ; str = fields [ i ] . getType ( ) . getName ( ) ; tokenizedString = new StringTokenizer ( str , "." ) ; while ( tokenizedString . hasMoreTokens ( ) ) { str = tokenizedString . nextToken ( ) . trim ( ) ; } buffer . append ( str + " " ) ; buffer . append ( fields [ i ] . getName ( ) ) ; buffer . append ( endOfLine ) ; } } buffer . append ( endOfLine ) ; // Constructors buffer . append ( "C O N S T R U C T O R S U M M A R Y" ) ; buffer . append ( endOfLine ) ; Constructor [ ] constructors = c . getConstructors ( ) ; if ( constructors != null ) { for ( int i = 0 ; i < constructors . length ; i ++ ) { buffer . append ( className + "(" ) ; Class [ ] parameterTypes = constructors [ i ] . getParameterTypes ( ) ; if ( parameterTypes != null ) { if ( parameterTypes . length > 0 ) { str = parameterTypes [ 0 ] . getName ( ) ; tokenizedString = new StringTokenizer ( str , "." ) ; while ( tokenizedString . hasMoreTokens ( ) ) { str = tokenizedString . nextToken ( ) . trim ( ) ; } buffer . append ( str ) ; for ( int j = 1 ; j < parameterTypes . length ; j ++ ) { str = parameterTypes [ j ] . getName ( ) ; tokenizedString = new StringTokenizer ( str , "." ) ; while ( tokenizedString . hasMoreTokens ( ) ) { str = tokenizedString . nextToken ( ) . trim ( ) ; } buffer . append ( ", " + str ) ; } } } buffer . append ( ")" ) ; buffer . append ( endOfLine ) ; } } buffer . append ( endOfLine ) ; // Methods buffer . append ( "M E T H O D S U M M A R Y" ) ; buffer . append ( endOfLine ) ; Method [ ] methods = c . getMethods ( ) ; if ( methods != null ) { for ( int i = 0 ; i < methods . length ; i ++ ) { buffer . append ( Modifier . toString ( methods [ i ] . getModifiers ( ) ) + " " ) ; str = methods [ i ] . getReturnType ( ) . getName ( ) ; tokenizedString = new StringTokenizer ( str , "." ) ; while ( tokenizedString . hasMoreTokens ( ) ) { str = tokenizedString . nextToken ( ) . trim ( ) ; } buffer . append ( str + " " ) ; buffer . append ( methods [ i ] . getName ( ) + "(" ) ; Class [ ] parameterTypes = methods [ i ] . getParameterTypes ( ) ; if ( ( parameterTypes != null ) && ( parameterTypes . length > 0 ) ) { if ( parameterTypes [ 0 ] != null ) { str = parameterTypes [ 0 ] . getName ( ) ; tokenizedString = new StringTokenizer ( str , "." ) ; while ( tokenizedString . hasMoreTokens ( ) ) { str = tokenizedString . nextToken ( ) . trim ( ) ; } // array bugfix if ( str . charAt ( str . length ( ) - 1 ) == ';' ) { str = str . substring ( 0 , str . length ( ) - 1 ) + "[]" ; } buffer . append ( str ) ; for ( int j = 1 ; j < parameterTypes . length ; j ++ ) { str = parameterTypes [ j ] . getName ( ) ; tokenizedString = new StringTokenizer ( str , "." ) ; while ( tokenizedString . hasMoreTokens ( ) ) { str = tokenizedString . nextToken ( ) . trim ( ) ; } buffer . append ( ", " + str ) ; } } } buffer . append ( ")" ) ; buffer . append ( endOfLine ) ; } } buffer . append ( endOfLine ) ; // Ending buffer . append ( "**** class details" ) ; if ( ! StringUtil . isEmpty ( pObjectName ) ) { buffer . append ( " for \"" + pObjectName + "\"" ) ; } buffer . append ( " end ****" ) ; buffer . append ( endOfLine ) ; return buffer . toString ( ) ;
public class StoredPaymentChannelServerStates { /** * If the broadcaster has not been set for MAX _ SECONDS _ TO _ WAIT _ FOR _ BROADCASTER _ TO _ BE _ SET seconds , then * the programmer probably forgot to set it and we should throw exception . */ private TransactionBroadcaster getBroadcaster ( ) { } }
try { return broadcasterFuture . get ( MAX_SECONDS_TO_WAIT_FOR_BROADCASTER_TO_BE_SET , TimeUnit . SECONDS ) ; } catch ( InterruptedException e ) { throw new RuntimeException ( e ) ; } catch ( ExecutionException e ) { throw new RuntimeException ( e ) ; } catch ( TimeoutException e ) { String err = "Transaction broadcaster not set" ; log . error ( err ) ; throw new RuntimeException ( err , e ) ; }
public class XalanGroupMembershipHelperBean { /** * / * ( non - Javadoc ) * @ see org . apereo . portal . security . xslt . IXalanGroupMembershipHelper # isUserDeepMemberOfGroupName ( java . lang . String , java . lang . String ) * groupName is case sensitive . */ @ Override public boolean isUserDeepMemberOfGroupName ( String userName , String groupName ) { } }
final EntityIdentifier [ ] results = GroupService . searchForGroups ( groupName , GroupService . SearchMethod . DISCRETE , IPerson . class ) ; if ( results == null || results . length == 0 ) { return false ; } if ( results . length > 1 ) { this . logger . warn ( results . length + " groups were found for '" + groupName + "'. The first result will be used." ) ; } final IGroupMember group = GroupService . getGroupMember ( results [ 0 ] ) ; final IEntity entity = GroupService . getEntity ( userName , IPerson . class ) ; if ( entity == null ) { if ( this . logger . isDebugEnabled ( ) ) { this . logger . debug ( "No user found for key '" + userName + "'" ) ; } return false ; } return group . asGroup ( ) . deepContains ( entity ) ;
public class RemoteTaskRunner { /** * A task will be run only if there is no current knowledge in the RemoteTaskRunner of the task . * @ param task task to run */ @ Override public ListenableFuture < TaskStatus > run ( final Task task ) { } }
final RemoteTaskRunnerWorkItem completeTask , runningTask , pendingTask ; if ( ( pendingTask = pendingTasks . get ( task . getId ( ) ) ) != null ) { log . info ( "Assigned a task[%s] that is already pending!" , task . getId ( ) ) ; runPendingTasks ( ) ; return pendingTask . getResult ( ) ; } else if ( ( runningTask = runningTasks . get ( task . getId ( ) ) ) != null ) { ZkWorker zkWorker = findWorkerRunningTask ( task . getId ( ) ) ; if ( zkWorker == null ) { log . warn ( "Told to run task[%s], but no worker has started running it yet." , task . getId ( ) ) ; } else { log . info ( "Task[%s] already running on %s." , task . getId ( ) , zkWorker . getWorker ( ) . getHost ( ) ) ; TaskAnnouncement announcement = zkWorker . getRunningTasks ( ) . get ( task . getId ( ) ) ; if ( announcement . getTaskStatus ( ) . isComplete ( ) ) { taskComplete ( runningTask , zkWorker , announcement . getTaskStatus ( ) ) ; } } return runningTask . getResult ( ) ; } else if ( ( completeTask = completeTasks . get ( task . getId ( ) ) ) != null ) { return completeTask . getResult ( ) ; } else { return addPendingTask ( task ) . getResult ( ) ; }
public class XTreeDirectoryEntry { /** * Calls the super method and reads the MBR object of this entry from the * specified input stream . * @ param in the stream to read data from in order to restore the object * @ throws java . io . IOException if I / O errors occur * @ throws ClassNotFoundException If the class for an object being restored * cannot be found . */ @ Override public void readExternal ( ObjectInput in ) throws IOException , ClassNotFoundException { } }
super . readExternal ( in ) ; this . splitHistory = SplitHistory . readExternal ( in ) ;
public class JsonUtils { /** * Add quotes around the object iff it ' s not a JSON object . * @ param source * @ return */ static Object quoteIfNeeded ( Object source ) { } }
if ( source instanceof String ) { return quoteIfNeeded ( ( String ) source ) ; } else { return source ; }
public class DefaultYAMLParser { /** * the generated parser , with debugging messages . * Maintains a dynamic state and value stack . * @ param yyLex scanner . * @ param yydebug debug message writer implementing < tt > yyDebug < / tt > , or < tt > null < / tt > . * @ return result of the last reduction , if any . */ public Object yyparse ( yyInput yyLex , Object yydebug ) throws java . io . IOException { } }
// t this . yydebug = ( jay . yydebug . yyDebug ) yydebug ; return yyparse ( yyLex ) ;
public class CacheableExtractableDataSetFetcher { /** * Downloads and extracts the local dataset . * @ throws IOException */ public void downloadAndExtract ( DataSetType set ) throws IOException { } }
String localFilename = new File ( remoteDataUrl ( set ) ) . getName ( ) ; File tmpFile = new File ( System . getProperty ( "java.io.tmpdir" ) , localFilename ) ; File localCacheDir = getLocalCacheDir ( ) ; // check empty cache if ( localCacheDir . exists ( ) ) { File [ ] list = localCacheDir . listFiles ( ) ; if ( list == null || list . length == 0 ) localCacheDir . delete ( ) ; } File localDestinationDir = new File ( localCacheDir , dataSetName ( set ) ) ; if ( ! localDestinationDir . exists ( ) ) { localCacheDir . mkdirs ( ) ; tmpFile . delete ( ) ; log . info ( "Downloading dataset to " + tmpFile . getAbsolutePath ( ) ) ; FileUtils . copyURLToFile ( new URL ( remoteDataUrl ( set ) ) , tmpFile ) ; } else { // Directory exists and is non - empty - assume OK log . info ( "Using cached dataset at " + localCacheDir . getAbsolutePath ( ) ) ; return ; } if ( expectedChecksum ( set ) != 0L ) { log . info ( "Verifying download..." ) ; Checksum adler = new Adler32 ( ) ; FileUtils . checksum ( tmpFile , adler ) ; long localChecksum = adler . getValue ( ) ; log . info ( "Checksum local is " + localChecksum + ", expecting " + expectedChecksum ( set ) ) ; if ( expectedChecksum ( set ) != localChecksum ) { log . error ( "Checksums do not match. Cleaning up files and failing..." ) ; tmpFile . delete ( ) ; throw new IllegalStateException ( "Dataset file failed checksum: " + tmpFile + " - expected checksum " + expectedChecksum ( set ) + " vs. actual checksum " + localChecksum + ". If this error persists, please open an issue at https://github.com/deeplearning4j/deeplearning4j." ) ; } } try { ArchiveUtils . unzipFileTo ( tmpFile . getAbsolutePath ( ) , localCacheDir . getAbsolutePath ( ) ) ; } catch ( Throwable t ) { // Catch any errors during extraction , and delete the directory to avoid leaving the dir in an invalid state if ( localCacheDir . exists ( ) ) FileUtils . deleteDirectory ( localCacheDir ) ; throw t ; }
public class BoxStoragePolicyAssignment { /** * Create a BoxStoragePolicyAssignment for a BoxStoragePolicy . * @ param api the API connection to be used by the resource . * @ param policyID the policy ID of the BoxStoragePolicy . * @ param userID the user ID of the to assign the BoxStoragePolicy to . * @ return the information about the BoxStoragePolicyAssignment created . */ public static BoxStoragePolicyAssignment . Info create ( BoxAPIConnection api , String policyID , String userID ) { } }
URL url = STORAGE_POLICY_ASSIGNMENT_URL_TEMPLATE . build ( api . getBaseURL ( ) ) ; BoxJSONRequest request = new BoxJSONRequest ( api , url , HttpMethod . POST ) ; JsonObject requestJSON = new JsonObject ( ) . add ( "storage_policy" , new JsonObject ( ) . add ( "type" , "storage_policy" ) . add ( "id" , policyID ) ) . add ( "assigned_to" , new JsonObject ( ) . add ( "type" , "user" ) . add ( "id" , userID ) ) ; request . setBody ( requestJSON . toString ( ) ) ; BoxJSONResponse response = ( BoxJSONResponse ) request . send ( ) ; JsonObject responseJSON = JsonObject . readFrom ( response . getJSON ( ) ) ; BoxStoragePolicyAssignment storagePolicyAssignment = new BoxStoragePolicyAssignment ( api , responseJSON . get ( "id" ) . asString ( ) ) ; return storagePolicyAssignment . new Info ( responseJSON ) ;
public class Permutations { /** * Get a random sample of k out of n elements . * See Algorithm S , D . E . Knuth , The Art of Computer Programming , Vol . 2 , p . 142. */ public static int [ ] randomSubset ( int k , int n ) { } }
assert ( 0 < k && k <= n ) ; Random r = new Random ( ) ; int t = 0 , m = 0 ; int [ ] result = new int [ k ] ; while ( m < k ) { double u = r . nextDouble ( ) ; if ( ( n - t ) * u < k - m ) { result [ m ] = t ; m ++ ; } t ++ ; } return result ;
public class UrlPatternAnalyzer { public UrlPatternChosenBox choose ( Method executeMethod , String mappingMethodName , String specifiedUrlPattern , List < Class < ? > > pathParamTypeList ) { } }
checkSpecifiedUrlPattern ( executeMethod , specifiedUrlPattern , pathParamTypeList ) ; final UrlPatternChosenBox chosenBox ; if ( specifiedUrlPattern != null && ! specifiedUrlPattern . isEmpty ( ) ) { // e . g . urlPattern = " { } " chosenBox = adjustUrlPatternMethodPrefix ( executeMethod , specifiedUrlPattern , mappingMethodName , /* specified */ true ) ; } else { // urlPattern = [ no definition ] if ( ! pathParamTypeList . isEmpty ( ) ) { // e . g . sea ( int pageNumber ) final String derivedUrlPattern = buildDerivedUrlPattern ( pathParamTypeList ) ; chosenBox = adjustUrlPatternMethodPrefix ( executeMethod , derivedUrlPattern , mappingMethodName , /* non - specified */ false ) ; } else { // e . g . index ( ) , sea ( ) * no parameter chosenBox = adjustUrlPatternByMethodNameWithoutParam ( mappingMethodName ) ; } } return chosenBox ;
public class ContentStoreImpl { /** * { @ inheritDoc } */ @ Override public void validateSpaceId ( String spaceId ) throws InvalidIdException { } }
try { IdUtil . validateSpaceId ( spaceId ) ; } catch ( org . duracloud . storage . error . InvalidIdException e ) { throw new InvalidIdException ( e . getMessage ( ) ) ; }
public class PrivateKeyUsageExtension { /** * Delete the attribute value . * @ exception CertificateException on attribute handling errors . */ public void delete ( String name ) throws CertificateException , IOException { } }
if ( name . equalsIgnoreCase ( NOT_BEFORE ) ) { notBefore = null ; } else if ( name . equalsIgnoreCase ( NOT_AFTER ) ) { notAfter = null ; } else { throw new CertificateException ( "Attribute name not recognized by" + " CertAttrSet:PrivateKeyUsage." ) ; } encodeThis ( ) ;
public class Initializer { /** * Filters element or list fields , and if their associated objects implement * { @ link com . redhat . darcy . ui . api . HasElementContext } , calls * { @ link com . redhat . darcy . ui . api . HasElementContext # setContext ( com . redhat . darcy . ui . api . ElementContext ) } * on those objects with the specified context . */ private void setContext ( ElementContext context ) { } }
fields . stream ( ) . filter ( f -> HasElementContext . class . isAssignableFrom ( f . getType ( ) ) || Element . class . isAssignableFrom ( f . getType ( ) ) || List . class . isAssignableFrom ( f . getType ( ) ) ) // TODO : . filter ( f - > f . getAnnotation ( IndependentContext . class ) = = null ) . map ( f -> { try { return f . get ( view ) ; } catch ( IllegalAccessException e ) { throw new DarcyException ( String . format ( "Couldn't retrieve get object " + "from field, %s, in view, %s" , f , view ) , e ) ; } } ) . filter ( o -> o instanceof HasElementContext ) . map ( e -> ( HasElementContext ) e ) . forEach ( e -> e . setContext ( context ) ) ;
public class BufferedWriteFilter { /** * Flushes the buffered data . * @ param session the session where buffer will be written */ public void flush ( IoSession session ) { } }
try { internalFlush ( session . getFilterChain ( ) . getNextFilter ( this ) , session , buffersMap . get ( session ) ) ; } catch ( Throwable e ) { session . getFilterChain ( ) . fireExceptionCaught ( e ) ; }
public class TransformationUtils { /** * Get the affine transform that brings from the world envelope to the rectangle . * @ param worldEnvelope the envelope . * @ param pixelRectangle the destination rectangle . * @ return the transform . */ public static AffineTransform getWorldToPixel ( Envelope worldEnvelope , Rectangle pixelRectangle ) { } }
double width = pixelRectangle . getWidth ( ) ; double worldWidth = worldEnvelope . getWidth ( ) ; double height = pixelRectangle . getHeight ( ) ; double worldHeight = worldEnvelope . getHeight ( ) ; AffineTransform translate = AffineTransform . getTranslateInstance ( - worldEnvelope . getMinX ( ) , - worldEnvelope . getMinY ( ) ) ; AffineTransform scale = AffineTransform . getScaleInstance ( width / worldWidth , height / worldHeight ) ; AffineTransform mirror_y = new AffineTransform ( 1 , 0 , 0 , - 1 , 0 , pixelRectangle . getHeight ( ) ) ; AffineTransform world2pixel = new AffineTransform ( mirror_y ) ; world2pixel . concatenate ( scale ) ; world2pixel . concatenate ( translate ) ; return world2pixel ;
public class TreeUtils { /** * Sorts children of a node using a comparator * @ param node Node instance whose children to be sorted * @ param comparator Comparator to use in sorting */ public static void sortNode ( TreeNode node , Comparator comparator ) { } }
TreeNodeList children = ( TreeNodeList ) node . getChildren ( ) ; if ( children != null && ! children . isEmpty ( ) ) { Object [ ] childrenArray = children . toArray ( ) ; Arrays . sort ( childrenArray , comparator ) ; for ( int i = 0 ; i < childrenArray . length ; i ++ ) { children . setSibling ( i , ( TreeNode ) childrenArray [ i ] ) ; } for ( int i = 0 ; i < children . size ( ) ; i ++ ) { sortNode ( children . get ( i ) , comparator ) ; } }
public class VoiceApi { /** * Resume recording a call * Resume recording the specified call . * @ param id The connection ID of the call . ( required ) * @ param resumeRecordingBody Request parameters . ( optional ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > resumeRecordingWithHttpInfo ( String id , ResumeRecordingBody resumeRecordingBody ) throws ApiException { } }
com . squareup . okhttp . Call call = resumeRecordingValidateBeforeCall ( id , resumeRecordingBody , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class CPDefinitionGroupedEntryPersistenceImpl { /** * Removes all the cp definition grouped entries where uuid = & # 63 ; and companyId = & # 63 ; from the database . * @ param uuid the uuid * @ param companyId the company ID */ @ Override public void removeByUuid_C ( String uuid , long companyId ) { } }
for ( CPDefinitionGroupedEntry cpDefinitionGroupedEntry : findByUuid_C ( uuid , companyId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( cpDefinitionGroupedEntry ) ; }
public class LongTuples { /** * Computes the minimum value that occurs in the given tuple , * or < code > Long . MAX _ VALUE < / code > if the given tuple has a * size of 0. * @ param t The input tuple * @ return The minimum value */ public static long min ( LongTuple t ) { } }
return LongTupleFunctions . reduce ( t , Long . MAX_VALUE , Math :: min ) ;
public class TarExporterImpl { /** * { @ inheritDoc } * @ see org . jboss . shrinkwrap . api . exporter . StreamExporter # exportAsInputStream ( ) */ @ Override public InputStream exportAsInputStream ( ) { } }
// Create export delegate final AbstractExporterDelegate < InputStream > exportDelegate = new TarExporterDelegate ( this . getArchive ( ) ) ; // Execute export return exportDelegate . export ( ) ;
public class BaseListener { /** * Get then next enabled listener in the chain . * @ return The next enabled listener ( of null if none ) . */ public BaseListener getNextEnabledListener ( ) { } }
if ( m_nextListener == null ) return null ; if ( m_nextListener . isEnabled ( ) ) return m_nextListener ; else return m_nextListener . getNextEnabledListener ( ) ;
public class JMThread { /** * Sleep . * @ param millis the millis */ public static void sleep ( long millis ) { } }
try { Thread . sleep ( millis ) ; } catch ( InterruptedException e ) { JMExceptionManager . handleException ( log , e , "sleep" , millis ) ; }
public class FtpPickerFragment { /** * Name is validated to be non - null , non - empty and not containing any * slashes . * @ param name The name of the folder the user wishes to create . */ @ Override public void onNewFolder ( @ NonNull String name ) { } }
AsyncTask < String , Void , FtpFile > task = new AsyncTask < String , Void , FtpFile > ( ) { @ Override protected FtpFile doInBackground ( String ... names ) { FtpFile result = null ; if ( names . length > 0 ) { result = onNewFolderAsync ( names [ 0 ] ) ; } return result ; } @ Override protected void onPostExecute ( FtpFile folder ) { if ( folder != null ) { refresh ( folder ) ; } else { Toast . makeText ( getContext ( ) , R . string . nnf_create_folder_error , Toast . LENGTH_SHORT ) . show ( ) ; } } } ; task . execute ( name ) ;
public class TagService { /** * Returns the tags that were root in the definition files . These serve as entry point shortcuts when browsing the graph . We could reduce this to * just fewer as the root tags may be connected through parents = " . . . " . */ public List < Tag > getRootTags ( ) { } }
return this . definedTags . values ( ) . stream ( ) . filter ( Tag :: isRoot ) . collect ( Collectors . toList ( ) ) ;
public class IDEStructureImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . IDE_STRUCTURE__FLAGS : return FLAGS_EDEFAULT == null ? flags != null : ! FLAGS_EDEFAULT . equals ( flags ) ; case AfplibPackage . IDE_STRUCTURE__FORMAT : return FORMAT_EDEFAULT == null ? format != null : ! FORMAT_EDEFAULT . equals ( format ) ; case AfplibPackage . IDE_STRUCTURE__SIZE1 : return SIZE1_EDEFAULT == null ? size1 != null : ! SIZE1_EDEFAULT . equals ( size1 ) ; case AfplibPackage . IDE_STRUCTURE__SIZE2 : return SIZE2_EDEFAULT == null ? size2 != null : ! SIZE2_EDEFAULT . equals ( size2 ) ; case AfplibPackage . IDE_STRUCTURE__SIZE3 : return SIZE3_EDEFAULT == null ? size3 != null : ! SIZE3_EDEFAULT . equals ( size3 ) ; case AfplibPackage . IDE_STRUCTURE__SIZE4 : return SIZE4_EDEFAULT == null ? size4 != null : ! SIZE4_EDEFAULT . equals ( size4 ) ; } return super . eIsSet ( featureID ) ;
public class DevicesStatusApi { /** * Update Device Status * Update Device Status * @ param deviceId Device ID . ( required ) * @ param body Body ( optional ) * @ return DeviceStatus * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public DeviceStatus putDeviceStatus ( String deviceId , DeviceStatusPut body ) throws ApiException { } }
ApiResponse < DeviceStatus > resp = putDeviceStatusWithHttpInfo ( deviceId , body ) ; return resp . getData ( ) ;
public class MethodProducerFactory { /** * Producers returned from this method are not validated . This is an optimization for { @ link ProducerMethod } whose injection points are validated anyway . Internal use only . */ public < T > Producer < T > createProducer ( final Bean < X > declaringBean , final Bean < T > bean , DisposalMethod < X , T > disposalMethod ) { } }
EnhancedAnnotatedMethod < T , X > enhancedMethod = getManager ( ) . getServices ( ) . get ( MemberTransformer . class ) . loadEnhancedMember ( method , getManager ( ) . getId ( ) ) ; return new ProducerMethodProducer < X , T > ( enhancedMethod , disposalMethod ) { @ Override public AnnotatedMethod < X > getAnnotated ( ) { return method ; } @ Override public BeanManagerImpl getBeanManager ( ) { return getManager ( ) ; } @ Override public Bean < X > getDeclaringBean ( ) { return declaringBean ; } @ Override public Bean < T > getBean ( ) { return bean ; } } ;
public class FilterErrorEvent { /** * Get the original cause of the error . * Use of ServletExceptions by the engine to rethrow errors * can cause the original error to be buried within one or more * exceptions . This method will sift through the wrapped ServletExceptions * to return the original error . */ public Throwable getRootCause ( ) { } }
Throwable root = getError ( ) ; while ( true ) { if ( root instanceof ServletException ) { ServletException se = ( ServletException ) _error ; Throwable seRoot = se . getRootCause ( ) ; if ( seRoot == null ) { return root ; } else if ( seRoot . equals ( root ) ) { // prevent possible recursion return root ; } else { root = seRoot ; } } else { return root ; } }
public class Reflect { /** * Invokes a method with a given name and arguments with the most specific method possible * @ param methodName The name of the method * @ param args The arguments to the method * @ return A Reflect object representing the results * @ throws ReflectionException Something went wrong invoking the method */ public Reflect invoke ( String methodName , Object ... args ) throws ReflectionException { } }
Class [ ] types = ReflectionUtils . getTypes ( args ) ; try { return on ( clazz . getMethod ( methodName , types ) , object , accessAll , args ) ; } catch ( NoSuchMethodException e ) { Method method = ReflectionUtils . bestMatchingMethod ( getMethods ( ) , methodName , types ) ; if ( method != null ) { return on ( method , object , accessAll , args ) ; } throw new ReflectionException ( e ) ; }
public class NioChildDatagramChannel { /** * Joins the specified multicast group at the specified interface using the specified source . */ public ChannelFuture joinGroup ( InetAddress multicastAddress , NetworkInterface networkInterface , InetAddress source ) { } }
if ( DetectionUtil . javaVersion ( ) < 7 ) { throw new UnsupportedOperationException ( ) ; } if ( multicastAddress == null ) { throw new NullPointerException ( "multicastAddress" ) ; } if ( networkInterface == null ) { throw new NullPointerException ( "networkInterface" ) ; } try { MembershipKey key ; if ( source == null ) { key = channel . join ( multicastAddress , networkInterface ) ; } else { key = channel . join ( multicastAddress , networkInterface , source ) ; } synchronized ( this ) { if ( memberships == null ) { memberships = new HashMap < InetAddress , List < MembershipKey > > ( ) ; } List < MembershipKey > keys = memberships . get ( multicastAddress ) ; if ( keys == null ) { keys = new ArrayList < MembershipKey > ( ) ; memberships . put ( multicastAddress , keys ) ; } keys . add ( key ) ; } } catch ( Throwable e ) { return failedFuture ( this , e ) ; } return succeededFuture ( this ) ;
public class WebXmlScannerPlugin { /** * Set the value for an async supported on the given descriptor . * @ param asyncSupportedDescriptor * The async supported descriptor . * @ param asyncSupported * The value . */ private void setAsyncSupported ( AsyncSupportedDescriptor asyncSupportedDescriptor , TrueFalseType asyncSupported ) { } }
if ( asyncSupported != null ) { asyncSupportedDescriptor . setAsyncSupported ( asyncSupported . isValue ( ) ) ; }
public class HCCol { /** * Create a new column with a certain percentage . * @ param nPerc * The percentage to be used . Should ideally be between 0 and 100. * @ return Never < code > null < / code > . */ @ Nonnull public static HCCol perc ( @ Nonnegative final int nPerc ) { } }
return new HCCol ( ) . setWidth ( ECSSUnit . perc ( nPerc ) ) ;
public class EntityDeepJobConfig { /** * Given an instance of the generic object mapped to this configurtion object , * sets the instance property whose name is the name specified by dbName . * Since the provided dbName is the name of the field in the database , we first try * to resolve the property name using the fieldName property of the DeepField annotation . * If we don ' t find any property whose DeepField . fieldName . equals ( dbName ) we fallback to the * name of the Java property . * @ param instance instance object . * @ param dbName name of the field as known by the data store . * @ param value value to set in the property field of the provided instance object . */ public void setInstancePropertyFromDbName ( T instance , String dbName , Object value ) { } }
Map < String , Cell > cfs = columnDefinitions ( ) ; Cell metadataCell = cfs . get ( dbName ) ; String f = mapDBNameToEntityName . get ( dbName ) ; if ( StringUtils . isEmpty ( f ) ) { // DB column is not mapped in the testentity return ; } try { Method setter = Utils . findSetter ( f , entityClass , value . getClass ( ) ) ; setter . invoke ( instance , value ) ; } catch ( DeepIOException e ) { Utils . setFieldWithReflection ( instance , f , value ) ; } catch ( Exception e1 ) { throw new DeepGenericException ( e1 ) ; }
public class JQMCommon { /** * Expensive , based on jQuery , but gives realistic visibility ( CSS rules , parent chain considered , * width and height are explicitly set to 0 , . . . ) * < br > If you need logical visibility of this particular widget , * use { @ link UIObject # isVisible ( Element elem ) } */ public static boolean isRealVisible ( Widget widget ) { } }
if ( widget == null || ! widget . isAttached ( ) ) return false ; Element elt = widget . getElement ( ) ; return UIObject . isVisible ( elt ) && Mobile . isVisible ( elt ) ;
public class LoginResponseParser { /** * { @ inheritDoc } */ @ Override protected final void checkIntegrity ( ) throws InternetSCSIException { } }
String exceptionMessage ; do { if ( status != LoginStatus . SUCCESS && statusSequenceNumber != 0 ) { exceptionMessage = "While no successful login is preformed, the StatusSequenceNumber must be 0." ; break ; } // message is checked correctly return ; } while ( false ) ; throw new InternetSCSIException ( exceptionMessage ) ;
public class DescribeClustersRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeClustersRequest describeClustersRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeClustersRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeClustersRequest . getClusters ( ) , CLUSTERS_BINDING ) ; protocolMarshaller . marshall ( describeClustersRequest . getInclude ( ) , INCLUDE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ConvertBase { /** * Convert the tabs to spaces . */ public String tabsToSpaces ( String string ) { } }
int iOffset = 0 ; for ( int i = 0 ; i < string . length ( ) ; i ++ ) { if ( string . charAt ( i ) == '\n' ) iOffset = i + 1 ; if ( string . charAt ( i ) == '\t' ) { int iSpaces = ( i - iOffset ) % 4 ; if ( iSpaces == 0 ) iSpaces = 4 ; string = string . substring ( 0 , i ) + FOUR_SPACES . substring ( 0 , iSpaces ) + string . substring ( i + 1 ) ; } } return string ;
public class RaidCodec { /** * Count the number of live replicas of each parity block in the raided file * If any stripe has not enough parity block replicas , add the stripe to * raidEncodingTasks to schedule encoding . * If forceAdd is true , we always add the stripe to raidEncodingTasks * without checking * @ param sourceINode * @ param raidTasks * @ param fs * @ param forceAdd * @ return true if all parity blocks of the file have enough replicas * @ throws IOException */ public boolean checkRaidProgress ( INodeFile sourceINode , LightWeightLinkedSet < RaidBlockInfo > raidEncodingTasks , FSNamesystem fs , boolean forceAdd ) throws IOException { } }
boolean result = true ; BlockInfo [ ] blocks = sourceINode . getBlocks ( ) ; for ( int i = 0 ; i < blocks . length ; i += numStripeBlocks ) { boolean hasParity = true ; if ( ! forceAdd ) { for ( int j = 0 ; j < numParityBlocks ; j ++ ) { if ( fs . countLiveNodes ( blocks [ i + j ] ) < this . parityReplication ) { hasParity = false ; break ; } } } if ( ! hasParity || forceAdd ) { raidEncodingTasks . add ( new RaidBlockInfo ( blocks [ i ] , parityReplication , i ) ) ; result = false ; } } return result ;
public class FileOutputFormat { /** * Helper function to generate a { @ link Path } for a file that is unique for * the task within the job output directory . * < p > The path can be used to create custom files from within the map and * reduce tasks . The path name will be unique for each task . The path parent * will be the job output directory . < / p > ls * < p > This method uses the { @ link # getUniqueName } method to make the file name * unique for the task . < / p > * @ param conf the configuration for the job . * @ param name the name for the file . * @ return a unique path accross all tasks of the job . */ public static Path getPathForCustomFile ( JobConf conf , String name ) { } }
return new Path ( getWorkOutputPath ( conf ) , getUniqueName ( conf , name ) ) ;
public class CPUMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CPU cPU , ProtocolMarshaller protocolMarshaller ) { } }
if ( cPU == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( cPU . getFrequency ( ) , FREQUENCY_BINDING ) ; protocolMarshaller . marshall ( cPU . getArchitecture ( ) , ARCHITECTURE_BINDING ) ; protocolMarshaller . marshall ( cPU . getClock ( ) , CLOCK_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JsonReader { /** * Consumes { @ code expected } . */ private void expect ( JsonToken expected ) throws IOException { } }
peek ( ) ; if ( token != expected ) { throw new IllegalStateException ( "Expected " + expected + " but was " + peek ( ) ) ; } advance ( ) ;
public class FileListFromTaskNextOptions { /** * Set the time the request was issued . Client libraries typically set this to the current system clock time ; set it explicitly if you are calling the REST API directly . * @ param ocpDate the ocpDate value to set * @ return the FileListFromTaskNextOptions object itself . */ public FileListFromTaskNextOptions withOcpDate ( DateTime ocpDate ) { } }
if ( ocpDate == null ) { this . ocpDate = null ; } else { this . ocpDate = new DateTimeRfc1123 ( ocpDate ) ; } return this ;
public class LocatorSelectionStrategy { /** * ( non - Javadoc ) * @ see * org . apache . cxf . clustering . FailoverStrategy # getAlternateAddresses ( org . * apache . cxf . message . Exchange ) */ @ Override public List < String > getAlternateAddresses ( Exchange exchange ) { } }
return locatorCache . getFailoverEndpoints ( getServiceName ( exchange ) ) ;
public class Job { /** * Forks computation of this job . * < p > The call does not block . < / p > * @ return always returns this job . */ public Job fork ( ) { } }
init ( ) ; H2OCountedCompleter task = new H2OCountedCompleter ( ) { @ Override public void compute2 ( ) { try { try { // Exec always waits till the end of computation Job . this . exec ( ) ; Job . this . remove ( ) ; } catch ( Throwable t ) { if ( ! ( t instanceof ExpectedExceptionForDebug ) ) Log . err ( t ) ; Job . this . cancel ( t ) ; } } finally { tryComplete ( ) ; } } } ; start ( task ) ; H2O . submitTask ( task ) ; return this ;
public class AbstractMappingHTTPResponseHandler { /** * This function initializes the component . */ @ Override protected void initializeImpl ( ) { } }
super . initializeImpl ( ) ; // init maps this . faxActionType2ReponseDataPathMap = new HashMap < FaxActionType , String > ( ) ; this . statusString2FaxStatusMap = new HashMap < String , FaxJobStatus > ( ) ; // populate maps this . populateFaxActionType2ReponseDataPathMappings ( ) ; this . populateStatusString2FaxStatusMappings ( ) ; // set error values Enum < ? > configurationKey = this . getErrorDetectionPathPropertyKey ( ) ; this . errorDetectionPath = this . getConfigurationValue ( configurationKey ) ; configurationKey = this . getErrorDetectionValuePropertyKey ( ) ; this . errorDetectionValue = this . getConfigurationValue ( configurationKey ) ; configurationKey = this . getErrorMessagePathPropertyKey ( ) ; this . errorMessagePath = this . getConfigurationValue ( configurationKey ) ;
public class UnicodeSet { /** * TODO : create class Appendables ? * @ throws IOException */ private static void append ( Appendable app , CharSequence s ) { } }
try { app . append ( s ) ; } catch ( IOException e ) { throw new ICUUncheckedIOException ( e ) ; }
public class BinaryContentUploadServlet { /** * Determines content - type of the uploaded file . * @ param items * @ return the content type */ private String getContentType ( List < FileItem > items ) { } }
for ( FileItem i : items ) { if ( ! i . isFormField ( ) && i . getFieldName ( ) . equals ( CONTENT_PARAMETER ) ) { return i . getContentType ( ) ; } } return null ;
public class BundleAdjustmentProjectiveSchurJacobian { /** * Internal matrix type agnostic process function . * @ param input Input parameters describing the current state of the optimization * @ param leftPoint Storage for left Jacobian * @ param rightView Storage for right Jacobian */ public void processInternal ( double [ ] input , DMatrix leftPoint , DMatrix rightView ) { } }
int numRows = getNumOfOutputsM ( ) ; int numPointParam = structure . points . length * lengthPoint ; int numViewParam = numParameters - numPointParam ; // view + camera ( ( ReshapeMatrix ) leftPoint ) . reshape ( numRows , numPointParam ) ; ( ( ReshapeMatrix ) rightView ) . reshape ( numRows , numViewParam ) ; leftPoint . zero ( ) ; rightView . zero ( ) ; int observationIndex = 0 ; // first decode the transformation for ( int viewIndex = 0 ; viewIndex < structure . views . length ; viewIndex ++ ) { SceneStructureProjective . View view = structure . views [ viewIndex ] ; if ( ! view . known ) { int paramIndex = viewParameterIndexes [ viewIndex ] + indexFirstView ; for ( int i = 0 ; i < 12 ; i ++ ) { worldToView . data [ i ] = input [ paramIndex ++ ] ; } } else { worldToView . set ( view . worldToView ) ; } SceneObservations . View obsView = observations . views [ viewIndex ] ; for ( int i = 0 ; i < obsView . size ( ) ; i ++ ) { int featureIndex = obsView . point . get ( i ) ; int columnOfPointInJac = featureIndex * lengthPoint ; worldPt . x = input [ columnOfPointInJac ] ; worldPt . y = input [ columnOfPointInJac + 1 ] ; worldPt . z = input [ columnOfPointInJac + 2 ] ; if ( structure . isHomogenous ( ) ) { worldPt . w = input [ columnOfPointInJac + 3 ] ; } PerspectiveOps . renderPixel ( worldToView , worldPt , cameraPt ) ; if ( view . known ) { if ( structure . isHomogenous ( ) ) partialCameraModelH ( worldPt . x , worldPt . y , worldPt . z , worldPt . w , worldToView , pointGradX , pointGradY , null , null ) ; else partialCameraModel ( worldPt . x , worldPt . y , worldPt . z , worldToView , pointGradX , pointGradY , null , null ) ; } else { if ( structure . isHomogenous ( ) ) partialCameraModelH ( worldPt . x , worldPt . y , worldPt . z , worldPt . w , worldToView , pointGradX , pointGradY , camGradX , camGradY ) ; else partialCameraModel ( worldPt . x , worldPt . y , worldPt . z , worldToView , pointGradX , pointGradY , camGradX , camGradY ) ; } jacRowX = observationIndex * 2 ; jacRowY = jacRowX + 1 ; // = = = = = Partial of worldPt // partial of x ' = ( 1 / z ) * P * X with respect to X is a 2 by 3 | 4 matrix addToJacobian ( leftPoint , columnOfPointInJac , lengthPoint , pointGradX , pointGradY ) ; if ( ! view . known ) { // partial of x ' = ( 1 / z ) * P * X with respect to P is a 2 by 12 matrix int col = viewParameterIndexes [ viewIndex ] ; addToJacobian ( rightView , col , 12 , camGradX , camGradY ) ; } observationIndex ++ ; } } // left . print ( ) ; // right . print ( ) ; // System . out . println ( " Asdads " ) ;
public class UriSpec { /** * Build into a UriSpec string * @ param serviceInstance instance to use for pre - defined replacement fields * @ return UriSpec string */ public String build ( ServiceInstance < ? > serviceInstance ) { } }
return build ( serviceInstance , Maps . < String , Object > newHashMap ( ) ) ;
public class Days { /** * Creates a < code > Days < / code > representing the number of whole days * between the two specified partial datetimes . * The two partials must contain the same fields , for example you can specify * two < code > LocalDate < / code > objects . * @ param start the start partial date , must not be null * @ param end the end partial date , must not be null * @ return the period in days * @ throws IllegalArgumentException if the partials are null or invalid */ public static Days daysBetween ( ReadablePartial start , ReadablePartial end ) { } }
if ( start instanceof LocalDate && end instanceof LocalDate ) { Chronology chrono = DateTimeUtils . getChronology ( start . getChronology ( ) ) ; int days = chrono . days ( ) . getDifference ( ( ( LocalDate ) end ) . getLocalMillis ( ) , ( ( LocalDate ) start ) . getLocalMillis ( ) ) ; return Days . days ( days ) ; } int amount = BaseSingleFieldPeriod . between ( start , end , ZERO ) ; return Days . days ( amount ) ;
public class TokenParser { /** * Convert the given string into tokens . * @ param text the text * @ param tokenize whether to tokenize * @ return the token [ ] */ public static Token [ ] makeTokens ( String text , boolean tokenize ) { } }
if ( text == null ) { return null ; } Token [ ] tokens ; if ( tokenize ) { tokens = parse ( text ) ; } else { tokens = new Token [ 1 ] ; tokens [ 0 ] = new Token ( text ) ; } return tokens ;
public class GetImportRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetImportRequest getImportRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getImportRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getImportRequest . getImportId ( ) , IMPORTID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class WestCacheConnector { /** * Connect the cache with the new cached value . * @ param runnable Runnable to call cached method . * @ param cachedValue new cached value * @ param < T > cached value type * @ return cached value . */ @ SuppressWarnings ( "unchecked" ) public static < T > T connectCache ( Runnable runnable , Object cachedValue ) { } }
THREAD_LOCAL . set ( Optional . fromNullable ( cachedValue ) ) ; @ Cleanup QuietCloseable i = ( ) -> THREAD_LOCAL . remove ( ) ; runnable . run ( ) ; return ( T ) THREAD_LOCAL . get ( ) . orNull ( ) ;
public class MediaWikiApiErrorHandler { /** * Creates and throws a suitable { @ link MediaWikiApiErrorException } for the * given error code and message . * @ param errorCode * the error code reported by MediaWiki * @ param errorMessage * the error message reported by MediaWiki , or any other * meaningful message for the user * @ throws MediaWikiApiErrorException * in all cases , but may throw a subclass for some errors */ public static void throwMediaWikiApiErrorException ( String errorCode , String errorMessage ) throws MediaWikiApiErrorException { } }
switch ( errorCode ) { case ERROR_NO_TOKEN : case ERROR_INVALID_TOKEN : throw new TokenErrorException ( errorCode , errorMessage ) ; case ERROR_EDIT_CONFLICT : throw new EditConflictErrorException ( errorMessage ) ; case ERROR_NO_SUCH_ENTITY : throw new NoSuchEntityErrorException ( errorMessage ) ; case ERROR_MAXLAG : throw new MaxlagErrorException ( errorMessage ) ; case ERROR_ASSERT_USER_FAILED : throw new AssertUserFailedException ( errorMessage ) ; default : throw new MediaWikiApiErrorException ( errorCode , errorMessage ) ; }
public class SiftsXMLParser { /** * < residue dbResNum = " 1 " dbResName = " THR " > * < crossRefDb dbSource = " PDB " dbVersion = " 20101103" * dbCoordSys = " PDBresnum " dbAccessionId = " 1a4w " dbResNum = " 1H " * dbResName = " THR " dbChainId = " L " > < / crossRefDb > * < crossRefDb dbSource = " UniProt " dbVersion = " 157-2" * dbCoordSys = " UniProt " dbAccessionId = " P00734" * dbResNum = " 328 " dbResName = " T " > < / crossRefDb > * < crossRefDb dbSource = " SCOP " dbVersion = " 1.75" * dbCoordSys = " PDBresnum " dbAccessionId = " 26083" * dbResNum = " 1H " dbResName = " THR " dbChainId = " L " > < / crossRefDb > * < residueDetail dbSource = " MSD " property = " Annotation " > * Not _ Observed < / residueDetail > * < / residue > */ private SiftsResidue getResidue ( Element residue ) { } }
SiftsResidue res = new SiftsResidue ( ) ; String dbResNumS = residue . getAttribute ( "dbResNum" ) ; res . setNaturalPos ( Integer . parseInt ( dbResNumS ) ) ; String seqResName = residue . getAttribute ( "dbResName" ) ; res . setSeqResName ( seqResName ) ; boolean observed = true ; List < String > details = getTextValues ( residue , "residueDetail" ) ; if ( details != null && details . contains ( "Not_Observed" ) ) { observed = false ; } res . setNotObserved ( ! observed ) ; // else if ( detail ! = null & & detail . trim ( ) . equalsIgnoreCase ( " Conflict " ) ) { NodeList nl = residue . getElementsByTagName ( "crossRefDb" ) ; if ( nl != null && nl . getLength ( ) > 0 ) { for ( int i = 0 ; i < nl . getLength ( ) ; i ++ ) { // get the entity element Element crossRefEl = ( Element ) nl . item ( i ) ; String dbSource = crossRefEl . getAttribute ( "dbSource" ) ; String dbCoordSys = crossRefEl . getAttribute ( "dbCoordSys" ) ; String dbAccessionId = crossRefEl . getAttribute ( "dbAccessionId" ) ; String dbResNum = crossRefEl . getAttribute ( "dbResNum" ) ; String dbResName = crossRefEl . getAttribute ( "dbResName" ) ; String dbChainId = crossRefEl . getAttribute ( "dbChainId" ) ; // System . out . println ( dbSource + " " + dbCoordSys + " " + dbAccessionId + " " + dbResNum + " " + dbResName + " " + dbChainId ) ; if ( dbSource . equals ( "PDB" ) && ( dbCoordSys . equals ( "PDBresnum" ) ) ) { res . setPdbResNum ( dbResNum ) ; res . setPdbResName ( dbResName ) ; res . setChainId ( dbChainId ) ; res . setPdbId ( dbAccessionId ) ; } else if ( dbSource . equals ( "UniProt" ) ) { res . setUniProtPos ( Integer . parseInt ( dbResNum ) ) ; res . setUniProtResName ( dbResName ) ; res . setUniProtAccessionId ( dbAccessionId ) ; } } } return res ;
public class MPDPlayer { /** * Sends the appropriate { @ link PlayerChangeEvent } to all registered * { @ link PlayerChangeListener } s . * @ param event the { @ link PlayerChangeEvent . Event } to send */ protected synchronized void firePlayerChangeEvent ( PlayerChangeEvent . Event event ) { } }
PlayerChangeEvent pce = new PlayerChangeEvent ( this , event ) ; for ( PlayerChangeListener pcl : listeners ) { pcl . playerChanged ( pce ) ; }
public class LTieIntConsumerBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */ @ Nonnull public static < T > LTieIntConsumerBuilder < T > tieIntConsumer ( Consumer < LTieIntConsumer < T > > consumer ) { } }
return new LTieIntConsumerBuilder ( consumer ) ;
public class ASTHelpers { /** * Gives the return type of an ExpressionTree that represents a method select . * < p > TODO ( eaftan ) : Are there other places this could be used ? */ public static Type getReturnType ( ExpressionTree expressionTree ) { } }
if ( expressionTree instanceof JCFieldAccess ) { JCFieldAccess methodCall = ( JCFieldAccess ) expressionTree ; return methodCall . type . getReturnType ( ) ; } else if ( expressionTree instanceof JCIdent ) { JCIdent methodCall = ( JCIdent ) expressionTree ; return methodCall . type . getReturnType ( ) ; } else if ( expressionTree instanceof JCMethodInvocation ) { return getReturnType ( ( ( JCMethodInvocation ) expressionTree ) . getMethodSelect ( ) ) ; } else if ( expressionTree instanceof JCMemberReference ) { return ( ( JCMemberReference ) expressionTree ) . sym . type . getReturnType ( ) ; } throw new IllegalArgumentException ( "Expected a JCFieldAccess or JCIdent" ) ;
public class ApiOvhDbaastimeseries { /** * Create a OpenTSDB token * REST : POST / dbaas / timeseries / { serviceName } / token / opentsdb * @ param serviceName [ required ] Service Name * @ param description [ required ] Token description * @ param permission [ required ] Permission * @ param tags [ required ] Tags to apply * API beta */ public OvhOpenTSDBToken serviceName_token_opentsdb_POST ( String serviceName , String description , String permission , OvhTag [ ] tags ) throws IOException { } }
String qPath = "/dbaas/timeseries/{serviceName}/token/opentsdb" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "description" , description ) ; addBody ( o , "permission" , permission ) ; addBody ( o , "tags" , tags ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhOpenTSDBToken . class ) ;
public class ActionValidator { public ValidationSuccess simplyValidate ( Object form ) { } }
// for e . g . response bean validator assertArgumentNotNull ( "form" , form ) ; return doValidate ( form , unused -> { } , ( ) -> { throw new IllegalStateException ( "unused here, no way" ) ; } ) ;
public class Checksum { /** * Calculates the MD5 checksum of the specified text . * @ param text the text to generate the MD5 checksum * @ return the hex representation of the MD5 */ public static String getMD5Checksum ( String text ) { } }
final byte [ ] data = stringToBytes ( text ) ; return getChecksum ( MD5 , data ) ;
public class XLSFormatter { /** * Copies all pictures from template sheet to result sheet , shift picture depending on area dependencies * @ param templateSheet - template sheet * @ param resultSheet - result sheet */ protected void copyPicturesFromTemplateToResult ( HSSFSheet templateSheet , HSSFSheet resultSheet ) { } }
List < HSSFClientAnchor > list = getAllAnchors ( getEscherAggregate ( templateSheet ) ) ; int i = 0 ; if ( CollectionUtils . isNotEmpty ( orderedPicturesId ) ) { // just a shitty workaround for anchors without pictures for ( HSSFClientAnchor anchor : list ) { Cell topLeft = getCellFromTemplate ( new Cell ( anchor . getCol1 ( ) , anchor . getRow1 ( ) ) ) ; anchor . setCol1 ( topLeft . getCol ( ) ) ; anchor . setRow1 ( topLeft . getRow ( ) ) ; anchor . setCol2 ( topLeft . getCol ( ) + anchor . getCol2 ( ) - anchor . getCol1 ( ) ) ; anchor . setRow2 ( topLeft . getRow ( ) + anchor . getRow2 ( ) - anchor . getRow1 ( ) ) ; HSSFPatriarch sheetPatriarch = drawingPatriarchsMap . get ( resultSheet ) ; if ( sheetPatriarch != null ) { sheetPatriarch . createPicture ( anchor , orderedPicturesId . get ( i ++ ) ) ; } } }
public class JScreen { /** * Free the resources held by this object . * Besides freeing all the sub - screens , this method disconnects all of my * fields from their controls . */ public void free ( ) { } }
FieldList record = this . getFieldList ( ) ; if ( record != null ) if ( record . isModified ( ) ) { String message = this . getBaseApplet ( ) . getString ( "Save changes?" ) ; if ( JOptionPane . showConfirmDialog ( this , message , null , JOptionPane . OK_CANCEL_OPTION ) == JOptionPane . OK_OPTION ) BaseApplet . handleAction ( Constants . SUBMIT , this , this , 0 ) ; // Add any changes } super . free ( ) ;
public class GuiceyBootstrap { /** * Register other guicey bundles for installation . * Duplicate bundles will be filtered automatically : bundles of the same type considered duplicate * ( if two or more bundles of the same type detected then only first instance will be processed ) . * @ param bundles guicey bundles * @ return bootstrap instance for chained calls */ public GuiceyBootstrap bundles ( final GuiceyBundle ... bundles ) { } }
context . registerBundles ( bundles ) ; iterationBundles . addAll ( Arrays . asList ( bundles ) ) ; return this ;
public class GeocodeResultBuilder { /** * Copy an address component . Since they are NOT immutable , I don ' t * want to mess with the variability of the damn things . * @ param in the component to copy * @ return the copy */ private AddressComponent copy ( final AddressComponent in ) { } }
final AddressComponent out = new AddressComponent ( ) ; out . longName = in . longName ; out . shortName = in . shortName ; out . types = Arrays . copyOf ( in . types , in . types . length ) ; return out ;
public class ActionButton { /** * Initializes the animation , which is used while hiding or dismissing * < b > Action Button < / b > * @ param attrs attributes of the XML tag that is inflating the view */ private void initHideAnimation ( TypedArray attrs ) { } }
int index = R . styleable . ActionButton_hide_animation ; if ( attrs . hasValue ( index ) ) { int animResId = attrs . getResourceId ( index , Animations . NONE . animResId ) ; hideAnimation = Animations . load ( getContext ( ) , animResId ) ; LOGGER . trace ( "Initialized Action Button hide animation" ) ; }
public class NodeVector { /** * Inserts the specified node in this vector at the specified index . * Each component in this vector with an index greater or equal to * the specified index is shifted upward to have an index one greater * than the value it had previously . * @ param value Node to insert * @ param at Position where to insert */ public void insertElementAt ( int value , int at ) { } }
if ( null == m_map ) { m_map = new int [ m_blocksize ] ; m_mapSize = m_blocksize ; } else if ( ( m_firstFree + 1 ) >= m_mapSize ) { m_mapSize += m_blocksize ; int newMap [ ] = new int [ m_mapSize ] ; System . arraycopy ( m_map , 0 , newMap , 0 , m_firstFree + 1 ) ; m_map = newMap ; } if ( at <= ( m_firstFree - 1 ) ) { System . arraycopy ( m_map , at , m_map , at + 1 , m_firstFree - at ) ; } m_map [ at ] = value ; m_firstFree ++ ;
public class PluginOption { /** * Convert this { @ link PluginOption } to the Option required by Apache . * Commons Cli . * @ return The option object required by commons cli */ Option toOption ( ) { } }
DefaultOptionBuilder oBuilder = new DefaultOptionBuilder ( ) ; oBuilder . withShortName ( option ) . withDescription ( description ) . withRequired ( required ) ; if ( longOptionName != null ) { oBuilder . withLongName ( longOptionName ) ; } if ( hasArgs ) { ArgumentBuilder aBuilder = new ArgumentBuilder ( ) ; if ( argName != null ) { aBuilder . withName ( argName ) ; } if ( argsAreOptional ) { aBuilder . withMinimum ( 0 ) ; } if ( argsCount != null ) { aBuilder . withMaximum ( argsCount ) ; } else { aBuilder . withMaximum ( 1 ) ; } if ( argsValueSeparator != null && argsValueSeparator . length ( ) != 0 ) { aBuilder . withInitialSeparator ( argsValueSeparator . charAt ( 0 ) ) ; aBuilder . withSubsequentSeparator ( argsValueSeparator . charAt ( 0 ) ) ; } oBuilder . withArgument ( aBuilder . create ( ) ) ; } return oBuilder . create ( ) ;
public class vlan_stats { /** * Use this API to fetch the statistics of all vlan _ stats resources that are configured on netscaler . */ public static vlan_stats [ ] get ( nitro_service service ) throws Exception { } }
vlan_stats obj = new vlan_stats ( ) ; vlan_stats [ ] response = ( vlan_stats [ ] ) obj . stat_resources ( service ) ; return response ;
public class GetUserDefinedFunctionsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetUserDefinedFunctionsRequest getUserDefinedFunctionsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getUserDefinedFunctionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getUserDefinedFunctionsRequest . getCatalogId ( ) , CATALOGID_BINDING ) ; protocolMarshaller . marshall ( getUserDefinedFunctionsRequest . getDatabaseName ( ) , DATABASENAME_BINDING ) ; protocolMarshaller . marshall ( getUserDefinedFunctionsRequest . getPattern ( ) , PATTERN_BINDING ) ; protocolMarshaller . marshall ( getUserDefinedFunctionsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( getUserDefinedFunctionsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ChunkedFileSet { /** * This function looks for files with the " wrong " replica type in their name , and * if it finds any , renames them . * Those files may have ended up on this server either because : * - 1 . We restored them from another server , where they were named according to * another replica type . Or , * - 2 . The { @ link voldemort . store . readonly . mr . azkaban . VoldemortBuildAndPushJob } * and the { @ link voldemort . store . readonly . fetcher . HdfsFetcher } are * operating in ' build . primary . replicas . only ' mode , so they only ever built * and fetched replica 0 of any given file . * Note : This is an implementation detail of the READONLY _ V2 naming scheme , and should * not be used outside of that scope . * @ param masterPartitionId partition ID of the " primary replica " * @ param correctReplicaType replica number which should be found on the current * node for the provided masterPartitionId . */ private void renameReadOnlyV2Files ( int masterPartitionId , int correctReplicaType ) { } }
for ( int replica = 0 ; replica < routingStrategy . getNumReplicas ( ) ; replica ++ ) { if ( replica != correctReplicaType ) { int chunkId = 0 ; while ( true ) { String fileName = Integer . toString ( masterPartitionId ) + "_" + Integer . toString ( replica ) + "_" + Integer . toString ( chunkId ) ; File index = getIndexFile ( fileName ) ; File data = getDataFile ( fileName ) ; if ( index . exists ( ) && data . exists ( ) ) { // We found files with the " wrong " replica type , so let ' s rename them String correctFileName = Integer . toString ( masterPartitionId ) + "_" + Integer . toString ( correctReplicaType ) + "_" + Integer . toString ( chunkId ) ; File indexWithCorrectReplicaType = getIndexFile ( correctFileName ) ; File dataWithCorrectReplicaType = getDataFile ( correctFileName ) ; Utils . move ( index , indexWithCorrectReplicaType ) ; Utils . move ( data , dataWithCorrectReplicaType ) ; // Maybe change this to DEBUG ? logger . info ( "Renamed files with wrong replica type: " + index . getAbsolutePath ( ) + "|data -> " + indexWithCorrectReplicaType . getName ( ) + "|data" ) ; } else if ( index . exists ( ) ^ data . exists ( ) ) { throw new VoldemortException ( "One of the following does not exist: " + index . toString ( ) + " or " + data . toString ( ) + "." ) ; } else { // The files don ' t exist , or we ' ve gone over all available chunks , // so let ' s move on . break ; } chunkId ++ ; } } }
public class URL { /** * Makes a copy of a given URL with some additional components . * @ param deviceAddress bluetooth device MAC address * @ param attr device attributes * @ return a copy of a given URL with some additional components */ public URL copyWithDevice ( String deviceAddress , Map < String , String > attr ) { } }
return new URL ( this . protocol , this . adapterAddress , deviceAddress , attr , this . serviceUUID , this . characteristicUUID , this . fieldName ) ;
public class JcrStorageDao { /** * clear all parents from cache */ private void clearParentsCache ( Entity entity ) { } }
try { String xpath = null ; if ( entity instanceof DataSource ) { // find all reports and charts with this DataSource xpath = "//nextServer//*[@dataSource='" + entity . getId ( ) + "']" ; } else if ( entity instanceof Report ) { // find all schedulers with this report xpath = "//nextServer/scheduler/*[@report='" + entity . getId ( ) + "']" ; } if ( xpath != null ) { NodeIterator nodes = getTemplate ( ) . query ( xpath ) . getNodes ( ) ; while ( nodes . hasNext ( ) ) { entitiesCache . remove ( nodes . nextNode ( ) . getIdentifier ( ) ) ; } } // if entity is inside a drill down we have to clear the master // report ( with drillDown list ) // first parent is ' drillDownEntities ' node ; second parent is the // actual report / chart if ( ( entity instanceof Report ) || ( entity instanceof Chart ) ) { xpath = " //nextServer//drillDownEntities/*[@entity='" + entity . getId ( ) + "']" ; if ( xpath != null ) { NodeIterator nodes = getTemplate ( ) . query ( xpath ) . getNodes ( ) ; while ( nodes . hasNext ( ) ) { entitiesCache . remove ( nodes . nextNode ( ) . getParent ( ) . getParent ( ) . getIdentifier ( ) ) ; } } } } catch ( RepositoryException e ) { throw convertJcrAccessException ( e ) ; }
public class CmsParameterConfiguration { /** * Merges this parameter configuration with the provided other parameter configuration . < p > * The difference form a simple < code > Map & lt ; String , String & gt ; < / code > is that for the parameter * configuration , the values of the keys in both maps are merged and kept in the Object store * as a List . < p > * As result , < code > this < / code > configuration will be altered , the other configuration will * stay unchanged . < p > * @ param other the other parameter configuration to merge this configuration with */ @ Override public void putAll ( Map < ? extends String , ? extends String > other ) { } }
for ( String key : other . keySet ( ) ) { boolean tokenize = false ; if ( other instanceof CmsParameterConfiguration ) { Object o = ( ( CmsParameterConfiguration ) other ) . getObject ( key ) ; if ( o instanceof List ) { tokenize = true ; } } add ( key , other . get ( key ) , tokenize ) ; }
public class CronMapper { /** * Creates a Function that returns an Always instance . * @ param name - Cron field name * @ return new CronField - > CronField instance , never null */ @ VisibleForTesting static Function < CronField , CronField > returnAlwaysExpression ( final CronFieldName name ) { } }
return field -> new CronField ( name , always ( ) , FieldConstraintsBuilder . instance ( ) . forField ( name ) . createConstraintsInstance ( ) ) ;
public class ParameterSerializer { /** * Parse collection of values and get the value mapped to smartfox value * @ param method structure of getter method * @ param collection collection of value * @ return the value after parsed */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) protected Object parseCollection ( GetterMethodCover method , Collection collection ) { if ( method . isArrayObjectCollection ( ) ) { return parseArrayObjectCollection ( method , collection ) ; } else if ( method . isObjectCollection ( ) ) { return parseObjectCollection ( method , collection ) ; } else if ( method . isByteCollection ( ) ) { return collectionToPrimitiveByteArray ( collection ) ; } else if ( method . isCharCollection ( ) ) { return charCollectionToPrimitiveByteArray ( collection ) ; } else if ( method . isArrayCollection ( ) ) { return parseArrayCollection ( method , collection ) ; } return collection ;
public class ComponentBorder { /** * Install this Border on the specified component by replacing the * existing Border with a CompoundBorder containing the original Border * and our ComponentBorder * This method should only be invoked once all the properties of this * class have been set . Installing the Border more than once will cause * unpredictable results . */ public void install ( JComponent parent ) { } }
this . parent = parent ; determineInsetsAndAlignment ( ) ; // Add this Border to the parent Border current = parent . getBorder ( ) ; if ( current == null ) { parent . setBorder ( this ) ; } else { CompoundBorder compound = new CompoundBorder ( current , this ) ; parent . setBorder ( compound ) ; } // Add component to the parent parent . add ( component ) ;
public class JsMessageImpl { /** * Return a new JsMessage , generalizing the message to just be a JsMessageImpl . * The new message contains the given JsMsgObject . * @ param newJmo The JsMsgObject the new JsMessage will contain . * @ return JsMessage A new JsMessage instance at the JsMessageImpl specialization . */ private final JsMessageImpl createNewGeneralized ( JsMsgObject newJmo ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "createNewGeneralized" ) ; JsMessageImpl newMsg = null ; /* Now create the new JsMessage */ newMsg = new JsMessageImpl ( newJmo ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "createNewGeneralized" , newMsg ) ; return newMsg ;
public class DB { /** * unexpected deadlock ) . */ public ManagedProcess dumpXML ( File outputFile , String dbName , String user , String password ) throws IOException , ManagedProcessException { } }
return dump ( outputFile , Arrays . asList ( dbName ) , true , true , true , user , password ) ;
public class CEMIFactory { /** * Returns an EMI1 / EMI2 - formatted message using the specified L - Data message parameters . This * method performs the same formatting as { @ link # toEmi ( CEMILData ) } , with the flexibility of * individual message parameters from an already dissected cEMI L - Data message . * @ param mc message code * @ param dst destination address * @ param p message priority * @ param repeat see * { @ link CEMILData # CEMILData ( int , IndividualAddress , KNXAddress , byte [ ] , Priority , boolean , boolean , boolean , int ) } * @ param ackRequest see * { @ link CEMILData # CEMILData ( int , IndividualAddress , KNXAddress , byte [ ] , Priority , boolean , boolean , boolean , int ) } * @ param positiveCon positive confirmation , < code > true < / code > if not applicable * @ param hopCount the hop count starting value set in control field , in the range 0 & lt ; = value * & lt ; = 7 * @ param nsdu the NSDU * @ return byte array containing the link layer message in EMI1 / EMI2 format * @ throws KNXIllegalArgumentException on unsupported ASDU length */ public static byte [ ] toEmi ( final int mc , final KNXAddress dst , final Priority p , final boolean repeat , final boolean ackRequest , final boolean positiveCon , final int hopCount , final byte [ ] nsdu ) { } }
// TP1 , standard frames only if ( nsdu . length > 16 ) throw new KNXIllegalArgumentException ( "maximum TPDU length is 16 in standard frame" ) ; final byte [ ] buf = new byte [ nsdu . length + 7 ] ; buf [ 0 ] = ( byte ) mc ; buf [ 1 ] = ( byte ) ( p . value << 2 ) ; // repeat flag is only relevant for . con final boolean rep = mc == Emi1_LData_con ? repeat : false ; final int ctrl = ( rep ? 0x20 : 0 ) | ( ackRequest ? 0x02 : 0 ) | ( positiveCon ? 0 : 0x01 ) ; buf [ 1 ] |= ( byte ) ctrl ; // on dst null , default address 0 is used ( null indicates system broadcast in link API ) final int d = dst != null ? dst . getRawAddress ( ) : 0 ; buf [ 4 ] = ( byte ) ( d >> 8 ) ; buf [ 5 ] = ( byte ) d ; buf [ 6 ] = ( byte ) ( hopCount << 4 | ( nsdu . length - 1 ) ) ; if ( dst instanceof GroupAddress ) buf [ 6 ] |= 0x80 ; for ( int i = 0 ; i < nsdu . length ; ++ i ) buf [ 7 + i ] = nsdu [ i ] ; return buf ;
public class SkeletonStream { /** * Processes and tracks the next packet for * the stream */ public void processPacket ( OggPacket packet ) { } }
SkeletonPacket skel = SkeletonPacketFactory . create ( packet ) ; // First packet must be the head if ( packet . isBeginningOfStream ( ) ) { fishead = ( SkeletonFishead ) skel ; } else if ( skel instanceof SkeletonFisbone ) { SkeletonFisbone bone = ( SkeletonFisbone ) skel ; fisbones . add ( bone ) ; bonesByStream . put ( bone . getSerialNumber ( ) , bone ) ; } else if ( skel instanceof SkeletonKeyFramePacket ) { keyFrames . add ( ( SkeletonKeyFramePacket ) skel ) ; } else { throw new IllegalStateException ( "Unexpected Skeleton " + skel ) ; } if ( packet . isEndOfStream ( ) ) { hasWholeStream = true ; }
public class PackagingProcessors { /** * Finds the first packaging processor on the classpath that supports give { @ code packageType } * @ param packagingType Package type * @ return */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) public static PackagingProcessor < ? extends Archive < ? > > find ( final PackagingType packagingType ) { ServiceRegistry registry = ServiceRegistry . getInstance ( ) ; Collection < PackagingProcessor > processors = registry . all ( PackagingProcessor . class ) ; StringBuilder unsupportedFormatMessage = new StringBuilder ( "No packaging processor for " ) . append ( packagingType . toString ( ) ) . append ( " packaging was found. Supported processors are: " ) ; for ( PackagingProcessor processor : processors ) { if ( processor . handles ( packagingType ) ) { // unchecked cast return ( PackagingProcessor < ? extends Archive < ? > > ) processor ; } unsupportedFormatMessage . append ( processor . getClass ( ) ) . append ( ", " ) ; } // trim if ( unsupportedFormatMessage . indexOf ( ", " ) != - 1 ) { unsupportedFormatMessage . delete ( unsupportedFormatMessage . length ( ) - 2 , unsupportedFormatMessage . length ( ) ) ; } throw new UnsupportedOperationException ( unsupportedFormatMessage . toString ( ) ) ;
public class EIIImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . EII__IMO_NAME : setImoName ( IMO_NAME_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class SBGNLayoutManager { /** * Applies CoSE layout to the given SBGN PD model . * @ param sbgn model where layout is performed and stored * @ param doLayout whether to actually run the CoSE or just complete the SBGN model */ public void createLayout ( final Sbgn sbgn , boolean doLayout ) { } }
viewToLayout = new HashMap ( ) ; glyphToVNode = new HashMap ( ) ; idToGLyph = new HashMap ( ) ; idToCompartmentGlyphs = new HashMap ( ) ; portIDToOwnerGlyph = new HashMap ( ) ; layoutToView = new HashMap ( ) ; idToArcs = new HashMap < String , Arc > ( ) ; this . layout = new SbgnPDLayout ( ) ; final LGraphManager graphMgr = layout . getGraphManager ( ) ; graphMgr . addRoot ( ) ; root = new VCompound ( new Glyph ( ) ) ; // Detect compartment glyphs and put them in a hashmap ; // also set compartment glyphs of members of complexes . for ( Glyph g : sbgn . getMap ( ) . getGlyph ( ) ) { if ( glyphClazzOneOf ( g , GlyphClazz . COMPARTMENT ) ) { idToCompartmentGlyphs . put ( g . getId ( ) , g ) ; } // Set compartmentRef to all children of a Complex node . Glyph compartment = ( Glyph ) g . getCompartmentRef ( ) ; if ( compartment != null && glyphClazzOneOf ( g , GlyphClazz . COMPLEX ) ) { setCompartmentRefForComplexMembers ( g , compartment , new HashSet < Glyph > ( ) ) ; } } // Nest glyphs inside compartment glyphs according to their compartmentRef . // This list holds the glyphs that will be deleted after corresponding glyph // is added to child glyph of another glyph . if ( ! idToCompartmentGlyphs . isEmpty ( ) ) { List < Glyph > deletedList = new ArrayList < Glyph > ( ) ; for ( Glyph g : sbgn . getMap ( ) . getGlyph ( ) ) { Glyph containerCompartment = ( Glyph ) g . getCompartmentRef ( ) ; if ( containerCompartment != null ) { idToCompartmentGlyphs . get ( containerCompartment . getId ( ) ) . getGlyph ( ) . add ( g ) ; deletedList . add ( g ) ; } } // Delete the duplicate glyphs , after they are moved to corresponding compartment glyph . for ( Glyph g : deletedList ) { sbgn . getMap ( ) . getGlyph ( ) . remove ( g ) ; } } // initialize the map for keeping ports and their owner glyphs // with entries like : < portID , ownerGlyph > initPortIdToGlyphMap ( sbgn . getMap ( ) . getGlyph ( ) ) ; // Remove ports from source and target field of ports // replace them with owner glyphs of these ports removePortsFromArcs ( sbgn . getMap ( ) . getArc ( ) ) ; // Assign logical operator and Process nodes to compartment assignProcessAndLogicOpNodesToCompartment ( sbgn ) ; // Create Vnodes for ChiLay layout component createVNodes ( root , sbgn . getMap ( ) . getGlyph ( ) ) ; for ( VNode vNode : root . children ) { createLNode ( vNode , null ) ; } // Create LEdges for ChiLay layout component createLEdges ( sbgn . getMap ( ) . getArc ( ) ) ; // Apply layout if ( doLayout ) { layout . runLayout ( ) ; } graphMgr . updateBounds ( ) ; // Here if any SbgnProcessNode node is returned from SBGNPD Layout // this means that we will have two additional port info . We should // add this information to libSbgn objects for ( Object lNode : layout . getAllNodes ( ) ) { if ( lNode instanceof SbgnProcessNode ) { // Set geometry of corresponding node VNode vNode = layoutToView . get ( ( ( SbgnProcessNode ) lNode ) . label ) ; Bbox tempBbox = vNode . glyph . getBbox ( ) ; tempBbox . setX ( ( float ) ( ( ( SbgnProcessNode ) lNode ) . getLeft ( ) ) ) ; tempBbox . setY ( ( float ) ( ( ( SbgnProcessNode ) lNode ) . getTop ( ) ) ) ; vNode . glyph . setBbox ( tempBbox ) ; // Created port objects in layout level SbgnPDNode inputLPort = ( ( SbgnProcessNode ) lNode ) . getInputPort ( ) ; SbgnPDNode outputLPort = ( ( SbgnProcessNode ) lNode ) . getOutputPort ( ) ; // New port objects Port inputPort = new Port ( ) ; Port outputPort = new Port ( ) ; // Set port attributes inputPort . setX ( ( float ) ( inputLPort . getCenterX ( ) ) ) ; inputPort . setY ( ( float ) ( inputLPort . getCenterY ( ) ) ) ; inputPort . setId ( inputLPort . label ) ; outputPort . setX ( ( float ) ( outputLPort . getCenterX ( ) ) ) ; outputPort . setY ( ( float ) ( outputLPort . getCenterY ( ) ) ) ; outputPort . setId ( ( outputLPort . label ) ) ; // Clear existing ports ! vNode . glyph . getPort ( ) . clear ( ) ; // Connect existing arcs to newly created ports // These ports are created by ChiLay and SBGNPD Layout connectArcToPort ( inputLPort , inputPort ) ; connectArcToPort ( outputLPort , outputPort ) ; // Add ports to the corresponding glyph vNode . glyph . getPort ( ) . add ( inputPort ) ; vNode . glyph . getPort ( ) . add ( outputPort ) ; } } // Update the bounds for ( VNode vNode : root . children ) { updateCompoundBounds ( vNode . glyph , vNode . glyph . getGlyph ( ) ) ; } // Clear inside of the compartmentGlyphs for ( Glyph compGlyph : idToCompartmentGlyphs . values ( ) ) { // Again add the members of compartments for ( Glyph memberGlyph : compGlyph . getGlyph ( ) ) { sbgn . getMap ( ) . getGlyph ( ) . add ( memberGlyph ) ; } compGlyph . getGlyph ( ) . clear ( ) ; }
public class OMMapManagerOld { /** * Flushes away all the buffers of closed files . This frees the memory . */ public void flush ( ) { } }
lock . writeLock ( ) . lock ( ) ; try { OMMapBufferEntry entry ; for ( Iterator < OMMapBufferEntry > it = bufferPoolLRU . iterator ( ) ; it . hasNext ( ) ; ) { entry = it . next ( ) ; if ( entry . file != null && entry . file . isClosed ( ) ) { if ( removeEntry ( entry ) ) it . remove ( ) ; } } } finally { lock . writeLock ( ) . unlock ( ) ; }
public class XmGroovyExecutionStrategy { /** * lep : / / xm / com / icthh / lep / < script _ name > $ < entityType > $ < state > $ < script _ type > . groovy */ public Map < XmLepResourceSubType , UrlLepResourceKey > getAvailableAtomicResourceKeys ( UrlLepResourceKey compositeResourceKey , LepManagerService managerService ) { } }
// add ' / ' at start if not exists String compositePath = compositeResourceKey . getUrlResourcePath ( ) ; if ( ! compositePath . startsWith ( URL_DELIMITER ) ) { compositePath = URL_DELIMITER + compositePath ; } // get all path without script file extension int extIndex = compositePath . lastIndexOf ( XmLepConstants . SCRIPT_EXTENSION_SEPARATOR ) ; if ( extIndex <= 0 ) { throw new IllegalArgumentException ( "LEP resource name must ends with *.<extension>, actual value: " + compositePath ) ; } final String scriptBasePath = compositePath . substring ( 0 , extIndex ) ; final String extension = compositePath . substring ( extIndex ) ; String tenantKey = LepContextUtils . getTenantKey ( managerService ) ; Map < XmLepResourceSubType , UrlLepResourceKey > resourceKeyMap = new EnumMap < > ( XmLepResourceSubType . class ) ; for ( XmLepResourceSubType type : XmLepResourceSubType . values ( ) ) { // build atomic key for script type String resourceUrlPath = URL_NET_PATH_DELIMITER + tenantKey + scriptBasePath + SCRIPT_NAME_SEPARATOR + type . getName ( ) + extension ; UrlLepResourceKey atomicKey = UrlLepResourceKey . valueOfUrlResourcePath ( resourceUrlPath ) ; // if resource for key exists add it to map if ( managerService . getResourceService ( ) . isResourceExists ( managerService , atomicKey ) ) { resourceKeyMap . put ( type , atomicKey ) ; } } return resourceKeyMap ;
public class FSOutputSummer { /** * Writes < code > len < / code > bytes from the specified byte array * starting at offset < code > off < / code > and generate a checksum for * each data chunk . * < p > This method stores bytes from the given array into this * stream ' s buffer before it gets checksumed . The buffer gets checksumed * and flushed to the underlying output stream when all data * in a checksum chunk are in the buffer . If the buffer is empty and * requested length is at least as large as the size of next checksum chunk * size , this method will checksum and write the chunk directly * to the underlying output stream . Thus it avoids uneccessary data copy . * @ param b the data . * @ param off the start offset in the data . * @ param len the number of bytes to write . * @ exception IOException if an I / O error occurs . */ public synchronized void write ( byte b [ ] , int off , int len ) throws IOException { } }
if ( off < 0 || len < 0 || off > b . length - len ) { throw new ArrayIndexOutOfBoundsException ( ) ; } for ( int n = 0 ; n < len ; n += write1 ( b , off + n , len - n ) ) { } incMetrics ( len ) ;
public class Stream { /** * Zip together the " a " and " b " iterators until all of them runs out of values . * Each pair of values is combined into a single value using the supplied zipFunction function . * @ param a * @ param b * @ param valueForNoneA value to fill if " a " runs out of values first . * @ param valueForNoneB value to fill if " b " runs out of values first . * @ param zipFunction * @ return */ public static < R > Stream < R > zip ( final FloatIterator a , final FloatIterator b , final float valueForNoneA , final float valueForNoneB , final FloatBiFunction < R > zipFunction ) { } }
return new IteratorStream < > ( new ObjIteratorEx < R > ( ) { @ Override public boolean hasNext ( ) { return a . hasNext ( ) || b . hasNext ( ) ; } @ Override public R next ( ) { if ( hasNext ( ) == false ) { throw new NoSuchElementException ( ) ; } return zipFunction . apply ( a . hasNext ( ) ? a . nextFloat ( ) : valueForNoneA , b . hasNext ( ) ? b . nextFloat ( ) : valueForNoneB ) ; } } ) ;
public class StringUtilities { /** * Skip all linear white spaces * @ param buf the buf which is being scanned for lws * @ param start the offset to start at * @ return the next position in buf which isn ' t a lws character */ private static int skipLws ( byte [ ] buf , int start ) { } }
int i ; for ( i = start ; i < buf . length ; i ++ ) { if ( ! isLws ( buf [ i ] ) ) { return i ; } } return i ;
public class BeaconManager { /** * Check if Bluetooth LE is supported by this Android device , and if so , make sure it is enabled . * @ return false if it is supported and not enabled * @ throws BleNotAvailableException if Bluetooth LE is not supported . ( Note : The Android emulator will do this ) */ @ TargetApi ( 18 ) public boolean checkAvailability ( ) throws BleNotAvailableException { } }
if ( ! isBleAvailableOrSimulated ( ) ) { throw new BleNotAvailableException ( "Bluetooth LE not supported by this device" ) ; } return ( ( BluetoothManager ) mContext . getSystemService ( Context . BLUETOOTH_SERVICE ) ) . getAdapter ( ) . isEnabled ( ) ;
public class RetryHandler { /** * Handle exception from a write failure . */ public void handleExceptionWrite ( Exception e , List < Object > w ) { } }
final String mName = "handleExceptionWrite" ; if ( logger . isLoggable ( Level . FINER ) ) logger . logp ( Level . FINE , className , mName , e . getClass ( ) . getName ( ) + "; " + this . toString ( ) ) ; if ( ! isRetryLimitReached ( ) && isRetryable ( e ) ) { // Retry it . Log it . Call the RetryListener . retryType = RETRY_WRITE ; _retryException = e ; ++ _retryCount ; logRetry ( e ) ; if ( _retryWriteListeners != null ) { for ( RetryWriteListenerProxy retryWriteListenerProxy : _retryWriteListeners ) { retryWriteListenerProxy . onRetryWriteException ( w , e ) ; } } } else { // No retry . Throw it back . if ( logger . isLoggable ( Level . FINER ) ) logger . logp ( Level . FINE , className , mName , "No retry. Rethrow " , e ) ; throw new BatchContainerRuntimeException ( e ) ; }