signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class NodeTraversal { /** * Traverses a parse tree recursively . */ public void traverse ( Node root ) { } }
try { initTraversal ( root ) ; curNode = root ; pushScope ( root ) ; // null parent ensures that the shallow callbacks will traverse root traverseBranch ( root , null ) ; popScope ( ) ; } catch ( Error | Exception unexpectedException ) { throwUnexpectedException ( unexpectedException ) ; }
import java . util . ArrayList ; import java . util . regex . Matcher ; import java . util . regex . Pattern ; public class FindAeWords { /** * Function to find all words in the input string that start with ' a ' or ' e ' . * Example usage : * find _ ae _ words ( ' python program ' ) - > [ ' am ' ] * find _ ae _ words ( ' python programe language ' ) - > [ ' am ' , ' anguage ' ] * find _ ae _ words ( ' assert statement ' ) - > [ ' assert ' , ' atement ' ] */ public static ArrayList < String > findAeWords ( String inputString ) { } }
ArrayList < String > wordsList = new ArrayList < > ( ) ; Pattern p = Pattern . compile ( "[ae]\\w+" ) ; Matcher m = p . matcher ( inputString ) ; while ( m . find ( ) ) { wordsList . add ( m . group ( ) ) ; } return wordsList ;
public class KeyAgreementPeer { /** * < p > Computes the shared secret using the other peer ' s public key . < / p > * @ param key * @ return * @ throws InvalidKeyException */ public byte [ ] computeSharedSecret ( Key key ) throws InvalidKeyException { } }
keyAgreement . doPhase ( key , true ) ; return keyAgreement . generateSecret ( ) ;
public class AWSAppSyncClient { /** * Retrieves a < code > Type < / code > object . * @ param getTypeRequest * @ return Result of the GetType operation returned by the service . * @ throws BadRequestException * The request is not well formed . For example , a value is invalid or a required field is missing . Check the * field values , and then try again . * @ throws ConcurrentModificationException * Another modification is in progress at this time and it must complete before you can make your change . * @ throws NotFoundException * The resource specified in the request was not found . Check the resource , and then try again . * @ throws UnauthorizedException * You are not authorized to perform this operation . * @ throws InternalFailureException * An internal AWS AppSync error occurred . Try your request again . * @ sample AWSAppSync . GetType * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / appsync - 2017-07-25 / GetType " target = " _ top " > AWS API * Documentation < / a > */ @ Override public GetTypeResult getType ( GetTypeRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetType ( request ) ;
public class TerminateWorkspacesRequest { /** * The WorkSpaces to terminate . You can specify up to 25 WorkSpaces . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setTerminateWorkspaceRequests ( java . util . Collection ) } or * { @ link # withTerminateWorkspaceRequests ( java . util . Collection ) } if you want to override the existing values . * @ param terminateWorkspaceRequests * The WorkSpaces to terminate . You can specify up to 25 WorkSpaces . * @ return Returns a reference to this object so that method calls can be chained together . */ public TerminateWorkspacesRequest withTerminateWorkspaceRequests ( TerminateRequest ... terminateWorkspaceRequests ) { } }
if ( this . terminateWorkspaceRequests == null ) { setTerminateWorkspaceRequests ( new com . amazonaws . internal . SdkInternalList < TerminateRequest > ( terminateWorkspaceRequests . length ) ) ; } for ( TerminateRequest ele : terminateWorkspaceRequests ) { this . terminateWorkspaceRequests . add ( ele ) ; } return this ;
public class NodeBuilder { /** * checks if we can add the requested keys + children to the builder , and if not we spill - over into our parent */ private void ensureRoom ( int nextBuildKeyPosition ) { } }
if ( nextBuildKeyPosition < MAX_KEYS ) return ; // flush even number of items so we don ' t waste leaf space repeatedly Object [ ] flushUp = buildFromRange ( 0 , FAN_FACTOR , isLeaf ( copyFrom ) , true ) ; ensureParent ( ) . addExtraChild ( flushUp , buildKeys [ FAN_FACTOR ] ) ; int size = FAN_FACTOR + 1 ; assert size <= buildKeyPosition : buildKeyPosition + "," + nextBuildKeyPosition ; System . arraycopy ( buildKeys , size , buildKeys , 0 , buildKeyPosition - size ) ; buildKeyPosition -= size ; maxBuildKeyPosition = buildKeys . length ; if ( buildChildPosition > 0 ) { System . arraycopy ( buildChildren , size , buildChildren , 0 , buildChildPosition - size ) ; buildChildPosition -= size ; }
public class StructureIO { /** * Returns all biological assemblies for the given PDB id . * The output Structure will be different depending on the multiModel parameter : * < li > * the symmetry - expanded chains are added as new models , one per transformId . All original models but * the first one are discarded . * < / li > * < li > * as original with symmetry - expanded chains added with renamed chain ids and names ( in the form * originalAsymId _ transformId and originalAuthId _ transformId ) * < / li > * If only one biological assembly is required use { @ link # getBiologicalAssembly ( String ) } or { @ link # getBiologicalAssembly ( String , int ) } instead . * @ param pdbId * @ param multiModel if true the output Structure will be a multi - model one with one transformId per model , * if false the outputStructure will be as the original with added chains with renamed asymIds ( in the form originalAsymId _ transformId and originalAuthId _ transformId ) . * @ return * @ throws IOException * @ throws StructureException * @ since 5.0 */ public static List < Structure > getBiologicalAssemblies ( String pdbId , boolean multiModel ) throws IOException , StructureException { } }
checkInitAtomCache ( ) ; pdbId = pdbId . toLowerCase ( ) ; List < Structure > s = cache . getBiologicalAssemblies ( pdbId , multiModel ) ; return s ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcPropertyBoundedValue ( ) { } }
if ( ifcPropertyBoundedValueEClass == null ) { ifcPropertyBoundedValueEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 398 ) ; } return ifcPropertyBoundedValueEClass ;
public class PoiWorkbookConverters { /** * Returns { @ link EvaluationCell } of given { @ link EvaluationWorkbook } . * { @ link EvaluationWorkbook } should be created with { @ link # toEvaluationWorkbook ( Workbook ) } method . */ public static EvaluationCell getEvaluationCell ( EvaluationWorkbook evaluationWorkbook , IA1Address addr ) { } }
return ( ( PoiProxyWorkbook ) evaluationWorkbook ) . getSheet ( 0 ) . getCell ( addr . row ( ) , addr . column ( ) ) ;
public class CloseDownServiceImp { /** * ( non - Javadoc ) * @ see com . popbill . api . CloseDownService # CheckCorpNum */ @ Override public CorpState CheckCorpNum ( String MemberCorpNum , String CheckCorpNum ) throws PopbillException { } }
return httpget ( "/CloseDown?CN=" + CheckCorpNum , MemberCorpNum , null , CorpState . class ) ;
public class DBConn { /** * configured maximum number of retries . */ private void commitMutations ( DBTransaction dbTran , long timestamp ) { } }
Map < ByteBuffer , Map < String , List < Mutation > > > colMutMap = CassandraTransaction . getUpdateMap ( dbTran , timestamp ) ; if ( colMutMap . size ( ) == 0 ) { return ; } m_logger . debug ( "Committing {} mutations" , CassandraTransaction . totalColumnMutations ( dbTran ) ) ; // The batch _ mutate will be retried up to MAX _ COMMIT _ RETRIES times . boolean bSuccess = false ; for ( int attempts = 1 ; ! bSuccess ; attempts ++ ) { try { // Attempt to commit all updates in the the current mutation map . Date startDate = new Date ( ) ; m_client . batch_mutate ( colMutMap , ConsistencyLevel . ONE ) ; timing ( "commitMutations" , startDate ) ; if ( attempts > 1 ) { // Since we had a failure and warned about it , confirm which attempt succeeded . m_logger . info ( "batch_mutate() succeeded on attempt #{}" , attempts ) ; } bSuccess = true ; } catch ( InvalidRequestException ex ) { // No point in retrying this one . m_bFailed = true ; m_logger . error ( "batch_mutate() failed" , ex ) ; throw new RuntimeException ( "batch_mutate() failed" , ex ) ; } catch ( Exception ex ) { // If we ' ve reached the retry limit , we fail this commit . if ( attempts >= m_max_commit_attempts ) { m_bFailed = true ; m_logger . error ( "All retries exceeded; abandoning batch_mutate()" , ex ) ; throw new RuntimeException ( "All retries exceeded; abandoning batch_mutate()" , ex ) ; } // Report retry as a warning . m_logger . warn ( "batch_mutate() attempt #{} failed: {}" , attempts , ex ) ; try { // We wait more with each failure . Thread . sleep ( attempts * m_retry_wait_millis ) ; } catch ( InterruptedException e1 ) { // ignore } // Experience suggests that even for timeout exceptions , the connection // may be bad , so we attempt to reconnect . If this fails , it will throw // an DBNotAvailableException , which we pass to the caller . reconnect ( ex ) ; } }
public class TraceComponent { /** * Process a full trace specification of the form * = info : loggerX = fine and * set the corresponding trace flags for this trace component . */ private boolean updateTraceSpec ( TraceSpecification ts ) { } }
List < TraceElement > traceSpecs = ts . getSpecs ( ) ; Integer minimumLevel = null ; if ( ts . isSensitiveTraceSuppressed ( ) ) { minimumLevel = findMinimumSafeLevel ( ts . getSafeLevelsIndex ( ) ) ; } int newFineLevel = fineLevel ; int newFineLevelsEnabled = 0 ; int newSpecTraceLevel = TrLevelConstants . SPEC_TRACE_LEVEL_OFF ; for ( TraceElement spec : traceSpecs ) { String clazz = spec . groupName ; int traceElementFineLevel = spec . fineLevel ; int specTraceLevel = spec . specTraceLevel ; boolean setValue = spec . action ; // Do we have a match with our package name ? boolean process = false ; if ( clazz . endsWith ( "*" ) ) // packages can end with wildcard { if ( 1 == clazz . length ( ) ) { process = true ; } else { clazz = clazz . substring ( 0 , clazz . length ( ) - 1 ) ; for ( String group : groups ) { if ( group . startsWith ( clazz ) ) { process = true ; break ; } } process = process || name . startsWith ( clazz ) ; } } else { // or can be a complete package or parent package // - look for a full or partial match with the TC package name int lastDot = name . lastIndexOf ( '.' ) ; if ( lastDot > 0 ) { String packageName = name . substring ( 0 , lastDot ) ; if ( packageName . startsWith ( clazz ) ) process = true ; } // groups may be class names ( eg WsLogger impls ) for ( String group : groups ) { lastDot = group . lastIndexOf ( '.' ) ; if ( lastDot > 0 ) { String packageName = group . substring ( 0 , lastDot ) ; if ( packageName . startsWith ( clazz ) ) process = true ; break ; } } // could be a straight group name match for ( String group : groups ) { if ( group . equalsIgnoreCase ( clazz ) ) { process = true ; break ; } } process = process || name . equalsIgnoreCase ( clazz ) ; } if ( process ) { newFineLevel = traceElementFineLevel ; newSpecTraceLevel = specTraceLevel ; if ( minimumLevel != null && newFineLevel < minimumLevel ) { newFineLevel = minimumLevel ; } for ( int level = newFineLevel ; level < TrLevelConstants . TRACE_LEVEL_OFF ; level ++ ) { if ( setValue ) { newFineLevelsEnabled |= 1 << level ; } else { newFineLevelsEnabled &= ~ ( 1 << level ) ; } } if ( newFineLevel == TrLevelConstants . TRACE_LEVEL_OFF ) { isTraceOff = true ; } else { isTraceOff = false ; } // Indicate that the trace spec matched something spec . setMatched ( true ) ; } } // end for each spec boolean updated = false ; if ( newFineLevel != fineLevel ) { fineLevel = newFineLevel ; updated = true ; } if ( newFineLevelsEnabled != fineLevelsEnabled ) { fineLevelsEnabled = newFineLevelsEnabled ; updated = true ; } if ( newSpecTraceLevel != specTraceLevel ) { specTraceLevel = newSpecTraceLevel ; updated = true ; } return updated ;
public class Channel { /** * Query approval status for all organizations . * @ param lifecycleQueryApprovalStatusRequest The request see { @ link LifecycleQueryApprovalStatusRequest } * @ param peers Peers to send the request . Usually only need one . * @ return A { @ link LifecycleQueryApprovalStatusProposalResponse } * @ throws InvalidArgumentException * @ throws ProposalException */ public Collection < LifecycleQueryApprovalStatusProposalResponse > sendLifecycleQueryApprovalStatusRequest ( LifecycleQueryApprovalStatusRequest lifecycleQueryApprovalStatusRequest , Collection < Peer > peers ) throws InvalidArgumentException , ProposalException { } }
if ( null == lifecycleQueryApprovalStatusRequest ) { throw new InvalidArgumentException ( "The lifecycleQueryApprovalStatusRequest parameter can not be null." ) ; } checkChannelState ( ) ; checkPeers ( peers ) ; try { if ( IS_TRACE_LEVEL ) { String collectionData = "null" ; final org . hyperledger . fabric . protos . common . Collection . CollectionConfigPackage chaincodeCollectionConfiguration = lifecycleQueryApprovalStatusRequest . getCollectionConfigPackage ( ) ; if ( null != chaincodeCollectionConfiguration ) { final byte [ ] asBytes = chaincodeCollectionConfiguration . toByteArray ( ) ; if ( null != asBytes ) { collectionData = toHexString ( asBytes ) ; } } logger . trace ( format ( "LifecycleQueryApprovalStatus channel: %s, sequence: %d, chaincodeName: %s, chaincodeVersion: %s" + ", isInitRequired: %s, validationParameter: '%s', endorsementPolicyPlugin: %s, validationPlugin: %s" + ", collectionConfiguration: %s" , name , lifecycleQueryApprovalStatusRequest . getSequence ( ) , lifecycleQueryApprovalStatusRequest . getChaincodeName ( ) , lifecycleQueryApprovalStatusRequest . getChaincodeVersion ( ) , lifecycleQueryApprovalStatusRequest . isInitRequired ( ) + "" , toHexString ( lifecycleQueryApprovalStatusRequest . getValidationParameter ( ) ) , lifecycleQueryApprovalStatusRequest . getChaincodeEndorsementPlugin ( ) , lifecycleQueryApprovalStatusRequest . getChaincodeValidationPlugin ( ) , collectionData ) ) ; } TransactionContext context = getTransactionContext ( lifecycleQueryApprovalStatusRequest ) ; LifecycleQueryApprovalStatusBuilder lifecycleQueryApprovalStatusBuilder = LifecycleQueryApprovalStatusBuilder . newBuilder ( ) ; lifecycleQueryApprovalStatusBuilder . setSequence ( lifecycleQueryApprovalStatusRequest . getSequence ( ) ) ; lifecycleQueryApprovalStatusBuilder . setName ( lifecycleQueryApprovalStatusRequest . getChaincodeName ( ) ) ; lifecycleQueryApprovalStatusBuilder . setVersion ( lifecycleQueryApprovalStatusRequest . getChaincodeVersion ( ) ) ; String endorsementPlugin = lifecycleQueryApprovalStatusRequest . getChaincodeEndorsementPlugin ( ) ; if ( ! isNullOrEmpty ( endorsementPlugin ) ) { lifecycleQueryApprovalStatusBuilder . setEndorsementPlugin ( endorsementPlugin ) ; } String validationPlugin = lifecycleQueryApprovalStatusRequest . getChaincodeValidationPlugin ( ) ; if ( ! isNullOrEmpty ( validationPlugin ) ) { lifecycleQueryApprovalStatusBuilder . setValidationPlugin ( validationPlugin ) ; } ByteString validationParameter = lifecycleQueryApprovalStatusRequest . getValidationParameter ( ) ; if ( null != validationParameter ) { lifecycleQueryApprovalStatusBuilder . setValidationParameter ( validationParameter ) ; } org . hyperledger . fabric . protos . common . Collection . CollectionConfigPackage collectionConfigPackage = lifecycleQueryApprovalStatusRequest . getCollectionConfigPackage ( ) ; if ( null != collectionConfigPackage ) { lifecycleQueryApprovalStatusBuilder . setCollections ( collectionConfigPackage ) ; } Boolean initRequired = lifecycleQueryApprovalStatusRequest . isInitRequired ( ) ; if ( null != initRequired ) { lifecycleQueryApprovalStatusBuilder . setInitRequired ( initRequired ) ; } lifecycleQueryApprovalStatusBuilder . context ( context ) ; SignedProposal qProposal = getSignedProposal ( context , lifecycleQueryApprovalStatusBuilder . build ( ) ) ; return sendProposalToPeers ( peers , qProposal , context , LifecycleQueryApprovalStatusProposalResponse . class ) ; } catch ( Exception e ) { throw new ProposalException ( format ( "QueryNamespaceDefinitions %s channel failed. " + e . getMessage ( ) , name ) , e ) ; }
public class IntDoubleHashVector { /** * Gets a NEW array containing all the elements in this vector . */ public double [ ] toNativeArray ( ) { } }
final double [ ] arr = new double [ getNumImplicitEntries ( ) ] ; iterate ( new FnIntDoubleToVoid ( ) { @ Override public void call ( int idx , double val ) { arr [ idx ] = val ; } } ) ; return arr ;
public class Threads { /** * Returns a CheckedFuture for the given CheckedAsync object */ public static < T , X extends Exception > CheckedFuture < T , X > checkedFuture ( final CheckedAsync < T , X > async ) { } }
return Futures . makeChecked ( future ( async ) , async ) ;
public class JinjavaListELResolver { /** * Convert the given property to an index . Inspired by * ListELResolver . toIndex , but without the base param since we only use it for * getValue where base is null . * @ param property * The name of the property to analyze . Will be coerced to a String . * @ return The index of property in base . * @ throws IllegalArgumentException * if property cannot be coerced to an integer . */ private static int toIndex ( Object property ) { } }
int index = 0 ; if ( property instanceof Number ) { index = ( ( Number ) property ) . intValue ( ) ; } else if ( property instanceof String ) { try { // ListELResolver uses valueOf , but findbugs complains . index = Integer . parseInt ( ( String ) property ) ; } catch ( NumberFormatException e ) { throw new IllegalArgumentException ( "Cannot parse list index: " + property ) ; } } else if ( property instanceof Character ) { index = ( ( Character ) property ) . charValue ( ) ; } else if ( property instanceof Boolean ) { index = ( ( Boolean ) property ) . booleanValue ( ) ? 1 : 0 ; } else { throw new IllegalArgumentException ( "Cannot coerce property to list index: " + property ) ; } return index ;
public class ProcessorGraphNode { /** * Return input mapper from processor . */ @ Nonnull public BiMap < String , String > getInputMapper ( ) { } }
final BiMap < String , String > inputMapper = this . processor . getInputMapperBiMap ( ) ; if ( inputMapper == null ) { return HashBiMap . create ( ) ; } return inputMapper ;
public class SharedPreferenceUtils { /** * Inflate menu item for debug . * @ param inflater : * MenuInflater to inflate the menu . * @ param menu * : Menu object to inflate debug menu . */ public void inflateDebugMenu ( MenuInflater inflater , Menu menu ) { } }
inflater . inflate ( R . menu . debug , menu ) ;
public class ModelConstraints { /** * Gathers the pk fields from the hierarchy of the given class , and copies them into the class . * @ param classDef The root of the hierarchy * @ throws ConstraintException If there is a conflict between the pk fields */ private void ensurePKsFromHierarchy ( ClassDescriptorDef classDef ) throws ConstraintException { } }
SequencedHashMap pks = new SequencedHashMap ( ) ; for ( Iterator it = classDef . getAllExtentClasses ( ) ; it . hasNext ( ) ; ) { ClassDescriptorDef subTypeDef = ( ClassDescriptorDef ) it . next ( ) ; ArrayList subPKs = subTypeDef . getPrimaryKeys ( ) ; // check against already present PKs for ( Iterator pkIt = subPKs . iterator ( ) ; pkIt . hasNext ( ) ; ) { FieldDescriptorDef fieldDef = ( FieldDescriptorDef ) pkIt . next ( ) ; FieldDescriptorDef foundPKDef = ( FieldDescriptorDef ) pks . get ( fieldDef . getName ( ) ) ; if ( foundPKDef != null ) { if ( ! isEqual ( fieldDef , foundPKDef ) ) { throw new ConstraintException ( "Cannot pull up the declaration of the required primary key " + fieldDef . getName ( ) + " because its definitions in " + fieldDef . getOwner ( ) . getName ( ) + " and " + foundPKDef . getOwner ( ) . getName ( ) + " differ" ) ; } } else { pks . put ( fieldDef . getName ( ) , fieldDef ) ; } } } ensureFields ( classDef , pks . values ( ) ) ;
public class IntegerBestFitAllocator { /** * Unlink steps : * 1 . If x is a chained node , unlink it from its same - sized fd / bk links * and choose its bk node as its replacement . * 2 . If x was the last node of its size , but not a leaf node , it must * be replaced with a leaf node ( not merely one with an open left or * right ) , to make sure that lefts and rights of descendents * correspond properly to bit masks . We use the rightmost descendent * of x . We could use any other leaf , but this is easy to locate and * tends to counteract removal of leftmosts elsewhere , and so keeps * paths shorter than minimally guaranteed . This doesn ' t loop much * because on average a node in a tree is near the bottom . * 3 . If x is the base of a chain ( i . e . , has parent links ) relink * x ' s parent and children to x ' s replacement ( or null if none ) . */ private void unlinkLargeChunk ( int x ) { } }
int xp = parent ( x ) ; int r ; if ( backward ( x ) != x ) { int f = forward ( x ) ; r = backward ( x ) ; if ( okAddress ( f ) ) { backward ( f , r ) ; forward ( r , f ) ; } else { throw new AssertionError ( ) ; } } else { int rpIndex ; if ( ( ( r = child ( x , rpIndex = 1 ) ) != - 1 ) || ( ( r = child ( x , rpIndex = 0 ) ) != - 1 ) ) { int rp = x ; while ( true ) { if ( child ( r , 1 ) != - 1 ) { rp = r ; rpIndex = 1 ; r = child ( r , 1 ) ; } else if ( child ( r , 0 ) != - 1 ) { rp = r ; rpIndex = 0 ; r = child ( r , 0 ) ; } else { break ; } } if ( okAddress ( rp ) ) { child ( rp , rpIndex , - 1 ) ; } else { throw new AssertionError ( ) ; } } } int index = index ( x ) ; if ( xp != - 1 || treeBins [ index ] == x ) { int h = treeBins [ index ] ; if ( x == h ) { if ( ( treeBins [ index ] = r ) == - 1 ) { clearTreeMap ( index ) ; } else { parent ( r , - 1 ) ; } } else if ( okAddress ( xp ) ) { if ( child ( xp , 0 ) == x ) { child ( xp , 0 , r ) ; } else { child ( xp , 1 , r ) ; } } else { throw new AssertionError ( ) ; } if ( r != - 1 ) { if ( okAddress ( r ) ) { int c0 , c1 ; parent ( r , xp ) ; if ( ( c0 = child ( x , 0 ) ) != - 1 ) { if ( okAddress ( c0 ) ) { child ( r , 0 , c0 ) ; parent ( c0 , r ) ; } else { throw new AssertionError ( ) ; } } if ( ( c1 = child ( x , 1 ) ) != - 1 ) { if ( okAddress ( c1 ) ) { child ( r , 1 , c1 ) ; parent ( c1 , r ) ; } else { throw new AssertionError ( ) ; } } } else { throw new AssertionError ( ) ; } } }
public class AmazonRDSClient { /** * Promotes a Read Replica DB instance to a standalone DB instance . * < note > * < ul > * < li > * Backup duration is a function of the amount of changes to the database since the previous backup . If you plan to * promote a Read Replica to a standalone instance , we recommend that you enable backups and complete at least one * backup prior to promotion . In addition , a Read Replica cannot be promoted to a standalone instance when it is in * the < code > backing - up < / code > status . If you have enabled backups on your Read Replica , configure the automated * backup window so that daily backups do not interfere with Read Replica promotion . * < / li > * < li > * This command doesn ' t apply to Aurora MySQL and Aurora PostgreSQL . * < / li > * < / ul > * < / note > * @ param promoteReadReplicaRequest * @ return Result of the PromoteReadReplica operation returned by the service . * @ throws InvalidDBInstanceStateException * The DB instance isn ' t in a valid state . * @ throws DBInstanceNotFoundException * < i > DBInstanceIdentifier < / i > doesn ' t refer to an existing DB instance . * @ sample AmazonRDS . PromoteReadReplica * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / rds - 2014-10-31 / PromoteReadReplica " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DBInstance promoteReadReplica ( PromoteReadReplicaRequest request ) { } }
request = beforeClientExecution ( request ) ; return executePromoteReadReplica ( request ) ;
public class SourceParams { /** * Create parameters necessary to create a 3D Secure source . * @ param amount A positive integer in the smallest currency unit representing the amount to * charge the customer ( e . g . , 1099 for a € 10.99 payment ) . * @ param currency The currency the payment is being created in ( e . g . , eur ) . * @ param returnUrl The URL the customer should be redirected to after the verification process . * @ param cardID The ID of the card source . * @ return a { @ link SourceParams } object that can be used to create a 3D Secure source * @ see < a href = " https : / / stripe . com / docs / sources / three - d - secure " > https : / / stripe . com / docs / sources / three - d - secure < / a > */ @ NonNull public static SourceParams createThreeDSecureParams ( @ IntRange ( from = 0 ) long amount , @ NonNull String currency , @ NonNull String returnUrl , @ NonNull String cardID ) { } }
return new SourceParams ( ) . setType ( Source . THREE_D_SECURE ) . setCurrency ( currency ) . setAmount ( amount ) . setRedirect ( createSimpleMap ( FIELD_RETURN_URL , returnUrl ) ) . setApiParameterMap ( createSimpleMap ( FIELD_CARD , cardID ) ) ;
public class ConfirmPinActivity { /** * Implementation of BasePinActivity method * @ param pin PIN value entered by user */ @ Override public final void onCompleted ( String pin ) { } }
resetStatus ( ) ; if ( isPinCorrect ( pin ) ) { setResult ( SUCCESS ) ; finish ( ) ; } else { setLabel ( getString ( R . string . message_invalid_pin ) ) ; }
public class RBBIDataWrapper { /** * / CLOVER : OFF */ private void dumpCharCategories ( java . io . PrintStream out ) { } }
int n = fHeader . fCatCount ; String catStrings [ ] = new String [ n + 1 ] ; int rangeStart = 0 ; int rangeEnd = 0 ; int lastCat = - 1 ; int char32 ; int category ; int lastNewline [ ] = new int [ n + 1 ] ; for ( category = 0 ; category <= fHeader . fCatCount ; category ++ ) { catStrings [ category ] = "" ; } out . println ( "\nCharacter Categories" ) ; out . println ( "--------------------" ) ; for ( char32 = 0 ; char32 <= 0x10ffff ; char32 ++ ) { category = fTrie . getCodePointValue ( char32 ) ; category &= ~ 0x4000 ; // Mask off dictionary bit . if ( category < 0 || category > fHeader . fCatCount ) { out . println ( "Error, bad category " + Integer . toHexString ( category ) + " for char " + Integer . toHexString ( char32 ) ) ; break ; } if ( category == lastCat ) { rangeEnd = char32 ; } else { if ( lastCat >= 0 ) { if ( catStrings [ lastCat ] . length ( ) > lastNewline [ lastCat ] + 70 ) { lastNewline [ lastCat ] = catStrings [ lastCat ] . length ( ) + 10 ; catStrings [ lastCat ] += "\n " ; } catStrings [ lastCat ] += " " + Integer . toHexString ( rangeStart ) ; if ( rangeEnd != rangeStart ) { catStrings [ lastCat ] += "-" + Integer . toHexString ( rangeEnd ) ; } } lastCat = category ; rangeStart = rangeEnd = char32 ; } } catStrings [ lastCat ] += " " + Integer . toHexString ( rangeStart ) ; if ( rangeEnd != rangeStart ) { catStrings [ lastCat ] += "-" + Integer . toHexString ( rangeEnd ) ; } for ( category = 0 ; category <= fHeader . fCatCount ; category ++ ) { out . println ( intToString ( category , 5 ) + " " + catStrings [ category ] ) ; } out . println ( ) ;
public class CmsReplaceDialog { /** * Initializes the dialog content . < p > * @ param structureId the structure id of the file to replace */ protected void initContent ( final CmsUUID structureId ) { } }
CmsRpcAction < CmsReplaceInfo > action = new CmsRpcAction < CmsReplaceInfo > ( ) { @ Override public void execute ( ) { start ( 0 , true ) ; CmsCoreProvider . getVfsService ( ) . getFileReplaceInfo ( structureId , this ) ; } @ Override protected void onResponse ( CmsReplaceInfo result ) { initContent ( result ) ; stop ( false ) ; } } ; action . execute ( ) ;
public class FunctionBodyAnalyzer { /** * Record information about the side effects caused by assigning a value to a given LHS . * < p > If the operation modifies this or taints global state , mark the enclosing function as * having those side effects . * @ param sideEffectInfo Function side effect record to be updated * @ param scope variable scope in which the variable assignment occurs * @ param enclosingFunction FUNCTION node for the enclosing function * @ param lhsNodes LHS nodes that are all assigned values by a given parent node * @ param hasLocalRhs Predicate indicating whether a given LHS is being assigned a local value */ private void visitLhsNodes ( AmbiguatedFunctionSummary sideEffectInfo , Scope scope , Node enclosingFunction , List < Node > lhsNodes , Predicate < Node > hasLocalRhs ) { } }
for ( Node lhs : lhsNodes ) { if ( NodeUtil . isGet ( lhs ) ) { if ( lhs . getFirstChild ( ) . isThis ( ) ) { sideEffectInfo . setMutatesThis ( ) ; } else { Node objectNode = lhs . getFirstChild ( ) ; if ( objectNode . isName ( ) ) { Var var = scope . getVar ( objectNode . getString ( ) ) ; if ( isVarDeclaredInSameContainerScope ( var , scope ) ) { // Maybe a local object modification . We won ' t know for sure until // we exit the scope and can validate the value of the local . taintedVarsByFunction . put ( enclosingFunction , var ) ; } else { sideEffectInfo . setMutatesGlobalState ( ) ; } } else { // Don ' t track multi level locals : local . prop . prop2 + + ; sideEffectInfo . setMutatesGlobalState ( ) ; } } } else { checkState ( lhs . isName ( ) , lhs ) ; Var var = scope . getVar ( lhs . getString ( ) ) ; if ( isVarDeclaredInSameContainerScope ( var , scope ) ) { if ( ! hasLocalRhs . test ( lhs ) ) { // Assigned value is not guaranteed to be a local value , // so if we see any property assignments on this variable , // they could be tainting a non - local value . blacklistedVarsByFunction . put ( enclosingFunction , var ) ; } } else { sideEffectInfo . setMutatesGlobalState ( ) ; } } }
public class Collectors { /** * Returns a { @ code Collector } that accumulates the input elements into a * new { @ code List } . There are no guarantees on the type , mutability , * serializability , or thread - safety of the { @ code List } returned ; if more * control over the returned { @ code List } is required , use { @ link # toCollection ( Supplier ) } . * @ param < T > the type of the input elements * @ return a { @ code Collector } which collects all the input elements into a * { @ code List } , in encounter order */ public static < T > Collector < T , ? , List < T > > toList ( ) { } }
return new CollectorImpl < > ( ( Supplier < List < T > > ) ArrayList :: new , List :: add , ( left , right ) -> { left . addAll ( right ) ; return left ; } , CH_ID ) ;
public class ThemeManager { /** * Set the current theme . Should be called in main thread ( UI thread ) . * @ param theme The current theme . * @ return True if set theme successfully , False if method ' s called on main thread or theme already set . */ public boolean setCurrentTheme ( int theme ) { } }
if ( Looper . getMainLooper ( ) . getThread ( ) != Thread . currentThread ( ) ) return false ; if ( mCurrentTheme != theme ) { mCurrentTheme = theme ; SharedPreferences pref = getSharedPreferences ( mContext ) ; if ( pref != null ) pref . edit ( ) . putInt ( KEY_THEME , mCurrentTheme ) . apply ( ) ; dispatchThemeChanged ( mCurrentTheme ) ; return true ; } return false ;
public class PriorityQueue { /** * Purges the content of the priority queue . This closes the queue and * wakes up any blocked threads */ public void purge ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "purge" ) ; synchronized ( queueMonitor ) { state = CLOSED ; for ( int i = 0 ; i < JFapChannelConstants . MAX_PRIORITY_LEVELS - 1 ; ++ i ) { queueArray [ i ] . monitor . setActive ( false ) ; } closeWaitersMonitor . setActive ( false ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "purge" ) ;
public class FactoredSequenceListener { /** * Informs this sequence model that the value of the element at position pos has changed . * This allows this sequence model to update its internal model if desired . */ public void updateSequenceElement ( int [ ] sequence , int pos , int oldVal ) { } }
if ( models != null ) { for ( int i = 0 ; i < models . length ; i ++ ) models [ i ] . updateSequenceElement ( sequence , pos , oldVal ) ; return ; } model1 . updateSequenceElement ( sequence , pos , 0 ) ; model2 . updateSequenceElement ( sequence , pos , 0 ) ;
public class LazyCsvAnnotationBeanReader { /** * 1行目のレコードをヘッダー情報として読み込んで 、 カラム情報を初期化を行います 。 * @ return 読み込んだヘッダー情報 * @ throws SuperCsvNoMatchColumnSizeException ヘッダーのサイズ ( カラム数 ) がBean定義と一致しない場合 。 * @ throws SuperCsvNoMatchHeaderException ヘッダーの値がBean定義と一致しない場合 。 * @ throws SuperCsvException 引数firstLineCheck = trueのとき 、 このメソッドが1行目以外の読み込み時に呼ばれた場合 。 * @ throws IOException ファイルの読み込みに失敗した場合 。 */ public String [ ] init ( ) throws IOException { } }
// ヘッダーを元に 、 カラム情報の番号を補完する final String [ ] headers = getHeader ( true ) ; init ( headers ) ; return headers ;
public class NodeModelUtils { /** * / * @ Nullable */ public static EObject findActualSemanticObjectFor ( /* @ Nullable */ INode node ) { } }
if ( node == null ) return null ; if ( node . hasDirectSemanticElement ( ) ) return node . getSemanticElement ( ) ; EObject grammarElement = node . getGrammarElement ( ) ; ICompositeNode parent = node . getParent ( ) ; if ( grammarElement == null ) return findActualSemanticObjectFor ( parent ) ; Assignment assignment = GrammarUtil . containingAssignment ( grammarElement ) ; if ( assignment != null ) { if ( GrammarUtil . isEObjectFragmentRule ( GrammarUtil . containingRule ( assignment ) ) ) { EObject result = findActualSemanticObjectInChildren ( node , grammarElement ) ; if ( result != null ) return result ; } if ( parent . hasDirectSemanticElement ( ) ) return findActualSemanticObjectFor ( parent ) ; INode sibling = parent . getFirstChild ( ) ; while ( ! sibling . equals ( node ) ) { EObject siblingGrammarElement = sibling . getGrammarElement ( ) ; if ( siblingGrammarElement != null && GrammarUtil . containingAssignment ( siblingGrammarElement ) == null ) { if ( GrammarUtil . isEObjectRuleCall ( siblingGrammarElement ) ) { return findActualSemanticObjectFor ( sibling ) ; } if ( siblingGrammarElement . eClass ( ) == XtextPackage . Literals . ACTION ) { return findActualSemanticObjectFor ( sibling ) ; } } sibling = sibling . getNextSibling ( ) ; } } else if ( ! GrammarUtil . isEObjectFragmentRuleCall ( grammarElement ) ) { EObject result = findActualSemanticObjectInChildren ( node , grammarElement ) ; if ( result != null ) return result ; } return findActualSemanticObjectFor ( parent ) ;
public class AWSOpsWorksCMClient { /** * Describes backups . The results are ordered by time , with newest backups first . If you do not specify a BackupId * or ServerName , the command returns all backups . * This operation is synchronous . * A < code > ResourceNotFoundException < / code > is thrown when the backup does not exist . A * < code > ValidationException < / code > is raised when parameters of the request are not valid . * @ param describeBackupsRequest * @ return Result of the DescribeBackups operation returned by the service . * @ throws ValidationException * One or more of the provided request parameters are not valid . * @ throws ResourceNotFoundException * The requested resource does not exist , or access was denied . * @ throws InvalidNextTokenException * This occurs when the provided nextToken is not valid . * @ sample AWSOpsWorksCM . DescribeBackups * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / opsworkscm - 2016-11-01 / DescribeBackups " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeBackupsResult describeBackups ( DescribeBackupsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeBackups ( request ) ;
public class LogBuffer { /** * Return 24 - bit signed int from buffer . ( big - endian ) * @ see mysql - 5.6.10 / include / myisampack . h - mi _ usint3korr */ public final int getBeInt24 ( final int pos ) { } }
final int position = origin + pos ; if ( pos + 2 >= limit || pos < 0 ) throw new IllegalArgumentException ( "limit excceed: " + ( pos < 0 ? pos : ( pos + 2 ) ) ) ; byte [ ] buf = buffer ; return ( 0xff & buf [ position + 2 ] ) | ( ( 0xff & buf [ position + 1 ] ) << 8 ) | ( ( buf [ position ] ) << 16 ) ;
public class KeyRange { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false otherwise */ public boolean isSet ( _Fields field ) { } }
if ( field == null ) { throw new IllegalArgumentException ( ) ; } switch ( field ) { case START_KEY : return isSetStart_key ( ) ; case END_KEY : return isSetEnd_key ( ) ; case START_TOKEN : return isSetStart_token ( ) ; case END_TOKEN : return isSetEnd_token ( ) ; case ROW_FILTER : return isSetRow_filter ( ) ; case COUNT : return isSetCount ( ) ; } throw new IllegalStateException ( ) ;
public class EntityManagerImpl { /** * Make an instance managed and persistent . * @ param entity * @ throws EntityExistsException * if the entity already exists . ( If the entity already exists , * the EntityExistsException may be thrown when the persist * operation is invoked , or the EntityExistsException or another * PersistenceException may be thrown at flush or commit time . ) * @ throws IllegalArgumentException * if the instance is not an entity * @ throws TransactionRequiredException * if invoked on a container - managed entity manager of type * PersistenceContextType . TRANSACTION and there is no * transaction */ @ Override public final void persist ( Object e ) { } }
checkClosed ( ) ; checkTransactionNeeded ( ) ; try { getPersistenceDelegator ( ) . persist ( e ) ; } catch ( Exception ex ) { // onRollBack . doRollback ( ) ; throw new KunderaException ( ex ) ; }
public class OAHashSet { /** * Removes the specified element from this set if it is present with * the hash provided in parameter . * This variant of { @ link # remove ( Object ) } acts as an optimisation to * enable avoiding { @ link # hashCode ( ) } calls if the hash is already * known on the caller side . * @ param objectToRemove object to be removed from this set , if present * @ param hash the hash of the element to be removed * @ return < tt > true < / tt > if this set contained the specified element * @ see # remove ( Object ) */ public boolean remove ( Object objectToRemove , int hash ) { } }
checkNotNull ( objectToRemove ) ; int index = hash & mask ; // using the hashes array for looping and comparison if possible , hence we ' re cache friendly while ( hashes [ index ] != 0 || table [ index ] != null ) { if ( hash == hashes [ index ] && objectToRemove . equals ( table [ index ] ) ) { removeFromIndex ( index ) ; return true ; } index = ++ index & mask ; } return false ;
public class MergeRequestApi { /** * Get all merge requests matching the filter . * < pre > < code > GitLab Endpoint : GET / merge _ requests < / code > < / pre > * @ param filter a MergeRequestFilter instance with the filter settings * @ param itemsPerPage the number of MergeRequest instances that will be fetched per page * @ return all merge requests for the specified project matching the filter * @ throws GitLabApiException if any exception occurs */ public Pager < MergeRequest > getMergeRequests ( MergeRequestFilter filter , int itemsPerPage ) throws GitLabApiException { } }
MultivaluedMap < String , String > queryParams = ( filter != null ? filter . getQueryParams ( ) . asMap ( ) : null ) ; if ( filter != null && ( filter . getProjectId ( ) != null && filter . getProjectId ( ) . intValue ( ) > 0 ) || ( filter . getIids ( ) != null && filter . getIids ( ) . size ( ) > 0 ) ) { if ( filter . getProjectId ( ) == null || filter . getProjectId ( ) . intValue ( ) == 0 ) { throw new RuntimeException ( "project ID cannot be null or 0" ) ; } return ( new Pager < MergeRequest > ( this , MergeRequest . class , itemsPerPage , queryParams , "projects" , filter . getProjectId ( ) , "merge_requests" ) ) ; } else { return ( new Pager < MergeRequest > ( this , MergeRequest . class , itemsPerPage , queryParams , "merge_requests" ) ) ; }
public class AbstractMetadataPopupLayout { /** * Returns metadata popup . * @ param entity * entity for which metadata data is displayed * @ param metaDatakey * metadata key to be selected * @ return { @ link CommonDialogWindow } */ public CommonDialogWindow getWindow ( final E entity , final String metaDatakey ) { } }
selectedEntity = entity ; metadataWindow = new WindowBuilder ( SPUIDefinitions . CREATE_UPDATE_WINDOW ) . caption ( getMetadataCaption ( ) ) . content ( this ) . cancelButtonClickListener ( event -> onCancel ( ) ) . id ( UIComponentIdProvider . METADATA_POPUP_ID ) . layout ( mainLayout ) . i18n ( i18n ) . saveDialogCloseListener ( new SaveOnDialogCloseListener ( ) ) . buildCommonDialogWindow ( ) ; metadataWindow . setHeight ( 550 , Unit . PIXELS ) ; metadataWindow . setWidth ( 800 , Unit . PIXELS ) ; metadataWindow . getMainLayout ( ) . setSizeFull ( ) ; metadataWindow . getButtonsLayout ( ) . setHeight ( "45px" ) ; setUpDetails ( entity . getId ( ) , metaDatakey ) ; return metadataWindow ;
public class AutoFormManualTaskActivity { /** * This method is used to extract data from the message received from the task manager . * The method updates all variables specified as non - readonly * @ param datadoc * @ return completion code ; when it returns null , the completion * code is taken from the completionCode parameter of * the message with key FormDataDocument . ATTR _ ACTION * @ throws ActivityException * @ throws JSONException */ protected String extractFormData ( JSONObject datadoc ) throws ActivityException , JSONException { } }
String varstring = this . getAttributeValue ( TaskActivity . ATTRIBUTE_TASK_VARIABLES ) ; List < String [ ] > parsed = StringHelper . parseTable ( varstring , ',' , ';' , 5 ) ; for ( String [ ] one : parsed ) { String varname = one [ 0 ] ; String displayOption = one [ 2 ] ; if ( displayOption . equals ( TaskActivity . VARIABLE_DISPLAY_NOTDISPLAYED ) ) continue ; if ( displayOption . equals ( TaskActivity . VARIABLE_DISPLAY_READONLY ) ) continue ; if ( varname . startsWith ( "#{" ) || varname . startsWith ( "${" ) ) continue ; String data = datadoc . has ( varname ) ? datadoc . getString ( varname ) : null ; setDataToVariable ( varname , data ) ; } return null ;
public class KinesisDataFetcher { /** * Registers a metric group associated with the shard id of the provided { @ link KinesisStreamShardState shardState } . * @ return a { @ link ShardMetricsReporter } that can be used to update metric values */ private static ShardMetricsReporter registerShardMetrics ( MetricGroup metricGroup , KinesisStreamShardState shardState ) { } }
ShardMetricsReporter shardMetrics = new ShardMetricsReporter ( ) ; MetricGroup streamShardMetricGroup = metricGroup . addGroup ( KinesisConsumerMetricConstants . STREAM_METRICS_GROUP , shardState . getStreamShardHandle ( ) . getStreamName ( ) ) . addGroup ( KinesisConsumerMetricConstants . SHARD_METRICS_GROUP , shardState . getStreamShardHandle ( ) . getShard ( ) . getShardId ( ) ) ; streamShardMetricGroup . gauge ( KinesisConsumerMetricConstants . MILLIS_BEHIND_LATEST_GAUGE , shardMetrics :: getMillisBehindLatest ) ; streamShardMetricGroup . gauge ( KinesisConsumerMetricConstants . MAX_RECORDS_PER_FETCH , shardMetrics :: getMaxNumberOfRecordsPerFetch ) ; streamShardMetricGroup . gauge ( KinesisConsumerMetricConstants . NUM_AGGREGATED_RECORDS_PER_FETCH , shardMetrics :: getNumberOfAggregatedRecords ) ; streamShardMetricGroup . gauge ( KinesisConsumerMetricConstants . NUM_DEAGGREGATED_RECORDS_PER_FETCH , shardMetrics :: getNumberOfDeaggregatedRecords ) ; streamShardMetricGroup . gauge ( KinesisConsumerMetricConstants . AVG_RECORD_SIZE_BYTES , shardMetrics :: getAverageRecordSizeBytes ) ; streamShardMetricGroup . gauge ( KinesisConsumerMetricConstants . BYTES_PER_READ , shardMetrics :: getBytesPerRead ) ; streamShardMetricGroup . gauge ( KinesisConsumerMetricConstants . RUNTIME_LOOP_NANOS , shardMetrics :: getRunLoopTimeNanos ) ; streamShardMetricGroup . gauge ( KinesisConsumerMetricConstants . LOOP_FREQUENCY_HZ , shardMetrics :: getLoopFrequencyHz ) ; streamShardMetricGroup . gauge ( KinesisConsumerMetricConstants . SLEEP_TIME_MILLIS , shardMetrics :: getSleepTimeMillis ) ; return shardMetrics ;
public class ImgUtil { /** * 将一个image信息复制到另一个image中 * @ param src 源 * @ param dest 目标 * @ return dest */ public static BufferedImage copy ( BufferedImage src , BufferedImage dest ) { } }
dest . getGraphics ( ) . drawImage ( src , 0 , 0 , null ) ; return dest ;
public class RepositoryConfigurationFactory { /** * Create a new default repository configuration . * @ return the newly - created default repository configuration */ public static List < RepositoryConfiguration > createDefaultRepositoryConfiguration ( ) { } }
MavenSettings mavenSettings = new MavenSettingsReader ( ) . readSettings ( ) ; List < RepositoryConfiguration > repositoryConfiguration = new ArrayList < > ( ) ; repositoryConfiguration . add ( MAVEN_CENTRAL ) ; if ( ! Boolean . getBoolean ( "disableSpringSnapshotRepos" ) ) { repositoryConfiguration . add ( SPRING_MILESTONE ) ; repositoryConfiguration . add ( SPRING_SNAPSHOT ) ; } addDefaultCacheAsRepository ( mavenSettings . getLocalRepository ( ) , repositoryConfiguration ) ; addActiveProfileRepositories ( mavenSettings . getActiveProfiles ( ) , repositoryConfiguration ) ; return repositoryConfiguration ;
public class InternalPureXbaseParser { /** * InternalPureXbase . g : 1121:1 : entryRuleXAndExpression returns [ EObject current = null ] : iv _ ruleXAndExpression = ruleXAndExpression EOF ; */ public final EObject entryRuleXAndExpression ( ) throws RecognitionException { } }
EObject current = null ; EObject iv_ruleXAndExpression = null ; try { // InternalPureXbase . g : 1121:55 : ( iv _ ruleXAndExpression = ruleXAndExpression EOF ) // InternalPureXbase . g : 1122:2 : iv _ ruleXAndExpression = ruleXAndExpression EOF { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAndExpressionRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleXAndExpression = ruleXAndExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleXAndExpression ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class FileUtils { /** * Reads the content of the file between the specific line numbers . * @ param filePath * the path to the file * @ param lineToStart * the line number to start with * @ param lineToEnd * the line number to end with * @ param encoding * the file encoding . Examples : " UTF - 8 " , " UTF - 16 " . * @ return a list of strings for each line between { @ code LineToStart } and * { @ code lineToEnd } * @ throws IOException * if something goes wrong while reading the file */ public List < String > readFromFile ( final String filePath , final int lineToStart , final int lineToEnd , final String encoding ) throws IOException { } }
if ( lineToStart > lineToEnd ) { throw new IllegalArgumentException ( "Line to start must be lower than line to end" ) ; } LOG . info ( "Reading from file: " + filePath ) ; List < String > result = new ArrayList < String > ( ) ; BufferedReader reader = null ; int i = 0 ; try { reader = new BufferedReader ( new InputStreamReader ( new FileInputStream ( filePath ) , encoding ) ) ; String line = reader . readLine ( ) ; while ( line != null && i >= lineToStart && i <= lineToEnd ) { result . add ( line ) ; i ++ ; line = reader . readLine ( ) ; } } finally { if ( reader != null ) { reader . close ( ) ; } } LOG . info ( "Returning: " + result ) ; return result ;
public class GenericDraweeHierarchyBuilder { /** * Sets the progress bar image and its scale type . * @ param progressBarDrawable drawable to be used as progress bar image * @ param progressBarImageScaleType scale type for the progress bar image * @ return modified instance of this builder */ public GenericDraweeHierarchyBuilder setProgressBarImage ( Drawable progressBarDrawable , @ Nullable ScalingUtils . ScaleType progressBarImageScaleType ) { } }
mProgressBarImage = progressBarDrawable ; mProgressBarImageScaleType = progressBarImageScaleType ; return this ;
public class Thin { /** * Applies the hitmiss operation to a set of pixels * stored in a hash table . * @ param b the BinaryFast input image * @ param input the set of pixels requiring matching * @ param kernel the kernel to match them with * @ return A hash table containing all the successful matches . */ private HashSet < Point > hitMissHashSet ( BinaryFast b , HashSet < Point > input , int [ ] kernel ) { } }
HashSet < Point > output = new HashSet < Point > ( ) ; Iterator < Point > it = input . iterator ( ) ; while ( it . hasNext ( ) ) { Point p = it . next ( ) ; if ( kernelMatch ( p , b . getPixels ( ) , b . getWidth ( ) , b . getHeight ( ) , kernel ) ) { // System . out . println ( " Match " + p . x + " " + p . y ) ; output . add ( p ) ; } } // System . out . println ( output . size ( ) ) ; return output ;
public class Cache2kBuilder { /** * Time duration after insert or updated an cache entry expires . * To switch off time based expiry use { @ link # eternal ( boolean ) } . * < p > If an { @ link ExpiryPolicy } is specified , the maximum expiry duration * is capped to the value specified here . * < p > A value of { @ code 0 } means every entry should expire immediately . Low values or * { @ code 0 } together with read through operation mode with a { @ link CacheLoader } should be * avoided in production environments . * @ throws IllegalArgumentException if { @ link # eternal ( boolean ) } was set to true * @ see < a href = " https : / / cache2k . org / docs / latest / user - guide . html # expiry - and - refresh " > cache2k user guide - Expiry and Refresh < / a > */ public final Cache2kBuilder < K , V > expireAfterWrite ( long v , TimeUnit u ) { } }
config ( ) . setExpireAfterWrite ( u . toMillis ( v ) ) ; return this ;
public class SerializationUtils { /** * Deserializes a byte array into an object . When the bytes are null , returns null . * @ param bytes the byte array to deserialize * @ return the deserialized object * @ throws IOException if the deserialization fails * @ throws ClassNotFoundException if no class found to deserialize into */ public static Serializable deserialize ( byte [ ] bytes ) throws IOException , ClassNotFoundException { } }
if ( bytes == null ) { return null ; } try ( ByteArrayInputStream b = new ByteArrayInputStream ( bytes ) ) { try ( ObjectInputStream o = new ObjectInputStream ( b ) ) { return ( Serializable ) o . readObject ( ) ; } }
public class StorePackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getArrayType ( ) { } }
if ( arrayTypeEClass == null ) { arrayTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( StorePackage . eNS_URI ) . getEClassifiers ( ) . get ( 80 ) ; } return arrayTypeEClass ;
public class PeerGroup { /** * See { @ link Peer # addOnTransactionBroadcastListener ( OnTransactionBroadcastListener ) } */ public void addOnTransactionBroadcastListener ( Executor executor , OnTransactionBroadcastListener listener ) { } }
peersTransactionBroadastEventListeners . add ( new ListenerRegistration < > ( checkNotNull ( listener ) , executor ) ) ; for ( Peer peer : getConnectedPeers ( ) ) peer . addOnTransactionBroadcastListener ( executor , listener ) ; for ( Peer peer : getPendingPeers ( ) ) peer . addOnTransactionBroadcastListener ( executor , listener ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link GraphStyleType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link GraphStyleType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "GraphStyle" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "_GML" ) public JAXBElement < GraphStyleType > createGraphStyle ( GraphStyleType value ) { } }
return new JAXBElement < GraphStyleType > ( _GraphStyle_QNAME , GraphStyleType . class , null , value ) ;
public class ServiceBuilderImpl { /** * Used by journal builder . */ ServiceRefAmp service ( QueueServiceFactoryInbox serviceFactory , ServiceConfig config ) { } }
QueueDeliverBuilderImpl < MessageAmp > queueBuilder = new QueueDeliverBuilderImpl < > ( ) ; // queueBuilder . setOutboxFactory ( OutboxAmpFactory . newFactory ( ) ) ; queueBuilder . setClassLoader ( _services . classLoader ( ) ) ; queueBuilder . sizeMax ( config . queueSizeMax ( ) ) ; queueBuilder . size ( config . queueSize ( ) ) ; InboxAmp inbox = new InboxQueue ( _services , queueBuilder , serviceFactory , config ) ; return inbox . serviceRef ( ) ;
public class KiteConnect { /** * Get a new access token using refresh token . * @ param refreshToken is the refresh token obtained after generateSession . * @ param apiSecret is unique for each app . * @ return TokenSet contains user id , refresh token , api secret . * @ throws IOException is thrown when there is connection error . * @ throws KiteException is thrown for all Kite trade related errors . */ public TokenSet renewAccessToken ( String refreshToken , String apiSecret ) throws IOException , KiteException , JSONException { } }
String hashableText = this . apiKey + refreshToken + apiSecret ; String sha256hex = sha256Hex ( hashableText ) ; Map < String , Object > params = new HashMap < > ( ) ; params . put ( "api_key" , apiKey ) ; params . put ( "refresh_token" , refreshToken ) ; params . put ( "checksum" , sha256hex ) ; JSONObject response = new KiteRequestHandler ( proxy ) . postRequest ( routes . get ( "api.refresh" ) , params , apiKey , accessToken ) ; return gson . fromJson ( String . valueOf ( response . get ( "data" ) ) , TokenSet . class ) ;
public class TreeInfo { /** * Is tree a constructor declaration ? */ public static boolean isConstructor ( JCTree tree ) { } }
if ( tree . hasTag ( METHODDEF ) ) { Name name = ( ( JCMethodDecl ) tree ) . name ; return name == name . table . names . init ; } else { return false ; }
public class CalendarAstronomer { /** * Returns the current Greenwich sidereal time , measured in hours * @ hide draft / provisional / internal are hidden on Android */ public double getGreenwichSidereal ( ) { } }
if ( siderealTime == INVALID ) { // See page 86 of " Practial Astronomy with your Calculator " , // by Peter Duffet - Smith , for details on the algorithm . double UT = normalize ( ( double ) time / HOUR_MS , 24 ) ; siderealTime = normalize ( getSiderealOffset ( ) + UT * 1.002737909 , 24 ) ; } return siderealTime ;
public class AmbiguityLibrary { /** * 插入到树种 * @ param key * @ param value */ public static void insert ( String key , Value value ) { } }
Forest forest = get ( key ) ; Library . insertWord ( forest , value ) ;
public class StaticTypeCheckingSupport { /** * Checks that arguments and parameter types match , expecting that the number of parameters is strictly greater * than the number of arguments , allowing possible inclusion of default parameters . * @ param params method parameters * @ param args type arguments * @ return - 1 if arguments do not match , 0 if arguments are of the exact type and > 0 when one or more argument is * not of the exact type but still match */ static int allParametersAndArgumentsMatchWithDefaultParams ( Parameter [ ] params , ClassNode [ ] args ) { } }
int dist = 0 ; ClassNode ptype = null ; // we already know the lengths are equal for ( int i = 0 , j = 0 ; i < params . length ; i ++ ) { Parameter param = params [ i ] ; ClassNode paramType = param . getType ( ) ; ClassNode arg = j >= args . length ? null : args [ j ] ; if ( arg == null || ! isAssignableTo ( arg , paramType ) ) { if ( ! param . hasInitialExpression ( ) && ( ptype == null || ! ptype . equals ( paramType ) ) ) { return - 1 ; // no default value } // a default value exists , we can skip this param ptype = null ; } else { j ++ ; if ( ! paramType . equals ( arg ) ) dist += getDistance ( arg , paramType ) ; if ( param . hasInitialExpression ( ) ) { ptype = arg ; } else { ptype = null ; } } } return dist ;
public class ArtFinder { /** * We have obtained album art for a device , so store it and alert any listeners . * @ param update the update which caused us to retrieve this art * @ param art the album art which we retrieved */ private void updateArt ( TrackMetadataUpdate update , AlbumArt art ) { } }
hotCache . put ( DeckReference . getDeckReference ( update . player , 0 ) , art ) ; // Main deck if ( update . metadata . getCueList ( ) != null ) { // Update the cache with any hot cues in this track as well for ( CueList . Entry entry : update . metadata . getCueList ( ) . entries ) { if ( entry . hotCueNumber != 0 ) { hotCache . put ( DeckReference . getDeckReference ( update . player , entry . hotCueNumber ) , art ) ; } } } deliverAlbumArtUpdate ( update . player , art ) ;
public class SARLValidator { /** * Replies the member feature call that is the root of a sequence of member feature calls . * < p > While the current feature call is the actual receiver of a member feature call , and not an argument , * the sequence is still active . Otherwise , the sequence is stopped . * @ param leaf the expression at the leaf of the feature call . * @ param container the top most container that cannot be part of the sequence . Could be { @ code null } . * @ param feedback the function that is invoked on each discovered member feature call within the sequence . Could be { @ code null } . * @ return the root of a member feature call sequence . */ protected static XMemberFeatureCall getRootOfMemberFeatureCallSequence ( EObject leaf , EObject container , Procedure1 < XMemberFeatureCall > feedback ) { } }
EObject call = leaf ; EObject obj = EcoreUtil2 . getContainerOfType ( leaf . eContainer ( ) , XExpression . class ) ; while ( obj != null && ( container == null || obj != container ) ) { if ( ! ( obj instanceof XMemberFeatureCall ) ) { obj = null ; } else { final EObject previous = call ; final XMemberFeatureCall fcall = ( XMemberFeatureCall ) obj ; call = fcall ; if ( fcall . getActualReceiver ( ) == previous ) { // Sequence of calls , with the ' . ' char . if ( feedback != null ) { feedback . apply ( fcall ) ; } obj = EcoreUtil2 . getContainerOfType ( call . eContainer ( ) , XExpression . class ) ; } else if ( fcall . getActualArguments ( ) . contains ( previous ) ) { // The sequence is an argument of a function call . call = previous ; obj = null ; } else { obj = null ; } } } return call instanceof XMemberFeatureCall ? ( XMemberFeatureCall ) call : null ;
public class DMNResourceDependenciesSorter { /** * Performs a depth first visit , but keeping a separate reference of visited / visiting nodes , _ also _ to avoid potential issues of circularities . */ private static void dfVisit ( DMNResource node , List < DMNResource > allNodes , Collection < DMNResource > visited , List < DMNResource > dfv ) { } }
if ( visited . contains ( node ) ) { throw new RuntimeException ( "Circular dependency detected: " + visited + " , and again to: " + node ) ; } visited . add ( node ) ; List < DMNResource > neighbours = node . getDependencies ( ) . stream ( ) . flatMap ( dep -> allNodes . stream ( ) . filter ( r -> r . getModelID ( ) . equals ( dep ) ) ) . collect ( Collectors . toList ( ) ) ; for ( DMNResource n : neighbours ) { if ( ! visited . contains ( n ) ) { dfVisit ( n , allNodes , visited , dfv ) ; } } dfv . add ( node ) ;
public class vpath { /** * Use this API to fetch vpath resource of given name . */ public static vpath get ( nitro_service service , String name ) throws Exception { } }
vpath obj = new vpath ( ) ; obj . set_name ( name ) ; vpath response = ( vpath ) obj . get_resource ( service ) ; return response ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link ModeType } { @ code > } } */ @ XmlElementDecl ( namespace = "http://www.w3.org/1998/Math/MathML" , name = "mode" ) public JAXBElement < ModeType > createMode ( ModeType value ) { } }
return new JAXBElement < ModeType > ( _Mode_QNAME , ModeType . class , null , value ) ;
public class FailoverGroupsInner { /** * Fails over from the current primary server to this server . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server containing the failover group . * @ param failoverGroupName The name of the failover group . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < FailoverGroupInner > failoverAsync ( String resourceGroupName , String serverName , String failoverGroupName ) { } }
return failoverWithServiceResponseAsync ( resourceGroupName , serverName , failoverGroupName ) . map ( new Func1 < ServiceResponse < FailoverGroupInner > , FailoverGroupInner > ( ) { @ Override public FailoverGroupInner call ( ServiceResponse < FailoverGroupInner > response ) { return response . body ( ) ; } } ) ;
public class QueryFactory { /** * Creates a new TypedQuery that queries the amount of entities of the entity class of this QueryFactory , that a * query for the given Filter would return . * @ param filter the filter * @ return a query */ public TypedQuery < Long > count ( Filter filter ) { } }
CriteriaQuery < Long > query = cb . createQuery ( Long . class ) ; Root < T > from = query . from ( getEntityClass ( ) ) ; if ( filter != null ) { Predicate where = new CriteriaMapper ( from , cb ) . create ( filter ) ; query . where ( where ) ; } return getEntityManager ( ) . createQuery ( query . select ( cb . count ( from ) ) ) ;
public class BitmapUtils { /** * Store the bitmap on the application private directory path . * @ param context the context . * @ param bitmap to store . * @ param filename file name . * @ param format bitmap format . * @ param quality the quality of the compressed bitmap . * @ return the compressed bitmap file . */ public static boolean storeOnApplicationPrivateDir ( Context context , Bitmap bitmap , String filename , Bitmap . CompressFormat format , int quality ) { } }
OutputStream out = null ; try { out = new BufferedOutputStream ( context . openFileOutput ( filename , Context . MODE_PRIVATE ) ) ; return bitmap . compress ( format , quality , out ) ; } catch ( FileNotFoundException e ) { Log . e ( TAG , "no such file for saving bitmap: " , e ) ; return false ; } finally { CloseableUtils . close ( out ) ; }
public class AbstractParser { /** * Fetch a JSONObject from the provided string . * @ param jsonString json string * @ return json object representing the string or null if there is an exception */ protected JSONObject getJSONObject ( final String jsonString ) { } }
JSONObject json = new JSONObject ( ) ; try { json = new JSONObject ( jsonString ) ; } catch ( NullPointerException e ) { LOGGER . error ( "JSON string cannot be null." , e ) ; } catch ( JSONException e ) { LOGGER . error ( "Could not parse string into JSONObject." , e ) ; } return json ;
public class AbcGrammar { /** * field - userdef - print : : = % x55.3A * WSP userdef header - eol < p > * < tt > U : < / tt > */ Rule FieldUserdefPrint ( ) { } }
return Sequence ( String ( "U:" ) , ZeroOrMore ( WSP ( ) ) . suppressNode ( ) , Userdef ( ) , HeaderEol ( ) ) . label ( FieldUserdefPrint ) ;
public class ExtensionsInner { /** * Enables the Operations Management Suite ( OMS ) on the HDInsight cluster . * @ param resourceGroupName The name of the resource group . * @ param clusterName The name of the cluster . * @ param parameters The Operations Management Suite ( OMS ) workspace parameters . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < Void > beginEnableMonitoringAsync ( String resourceGroupName , String clusterName , ClusterMonitoringRequest parameters ) { } }
return beginEnableMonitoringWithServiceResponseAsync ( resourceGroupName , clusterName , parameters ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
public class appqoecustomresp { /** * Use this API to fetch all the appqoecustomresp resources that are configured on netscaler . */ public static appqoecustomresp [ ] get ( nitro_service service , options option ) throws Exception { } }
appqoecustomresp obj = new appqoecustomresp ( ) ; appqoecustomresp [ ] response = ( appqoecustomresp [ ] ) obj . get_resources ( service , option ) ; return response ;
public class StringHelper { /** * Trim the passed lead and tail from the source value . If the source value does * not start with the passed trimmed value , nothing happens . * @ param sSrc * The input source string * @ param cValueToTrim * The char to be trimmed of the beginning and the end * @ return The trimmed string , or the original input string , if the value to * trim was not found * @ see # trimStart ( String , String ) * @ see # trimEnd ( String , String ) * @ see # trimStartAndEnd ( String , String , String ) */ @ Nullable @ CheckReturnValue public static String trimStartAndEnd ( @ Nullable final String sSrc , final char cValueToTrim ) { } }
return trimStartAndEnd ( sSrc , cValueToTrim , cValueToTrim ) ;
public class ClassUtilImpl { /** * FUTURE add to loader */ public BIF loadBIF ( PageContext pc , String name , String bundleName , Version bundleVersion ) throws InstantiationException , IllegalAccessException , ClassException , BundleException { } }
// first of all we chek if itis a class Class < ? > res = lucee . commons . lang . ClassUtil . loadClassByBundle ( name , bundleName , bundleVersion , pc . getConfig ( ) . getIdentification ( ) ) ; if ( res != null ) { if ( Reflector . isInstaneOf ( res , BIF . class ) ) { return ( BIF ) res . newInstance ( ) ; } return new BIFProxy ( res ) ; } return null ;
public class BaseDrawerItem { /** * helper method to decide for the correct color * @ param ctx * @ return */ protected int getSelectedIconColor ( Context ctx ) { } }
return ColorHolder . color ( getSelectedIconColor ( ) , ctx , R . attr . material_drawer_selected_text , R . color . material_drawer_selected_text ) ;
public class StorageUtil { /** * reads a XML Element Attribute ans cast it to a DateTime * @ param el XML Element to read Attribute from it * @ param attributeName Name of the Attribute to read * @ param defaultValue if attribute doesn ' t exist return default value * @ return Attribute Value */ public DateTime toDateTime ( Element el , String attributeName , DateTime defaultValue ) { } }
String value = el . getAttribute ( attributeName ) ; if ( value == null ) return defaultValue ; DateTime dtValue = Caster . toDate ( value , false , null , null ) ; if ( dtValue == null ) return defaultValue ; return dtValue ;
public class AnnotationUtils { /** * Helper method for generating a hash code for an array . * @ param componentType the component type of the array * @ param o the array * @ return a hash code for the specified array */ private static int arrayMemberHash ( final Class < ? > componentType , final Object o ) { } }
if ( componentType . equals ( Byte . TYPE ) ) { return Arrays . hashCode ( ( byte [ ] ) o ) ; } if ( componentType . equals ( Short . TYPE ) ) { return Arrays . hashCode ( ( short [ ] ) o ) ; } if ( componentType . equals ( Integer . TYPE ) ) { return Arrays . hashCode ( ( int [ ] ) o ) ; } if ( componentType . equals ( Character . TYPE ) ) { return Arrays . hashCode ( ( char [ ] ) o ) ; } if ( componentType . equals ( Long . TYPE ) ) { return Arrays . hashCode ( ( long [ ] ) o ) ; } if ( componentType . equals ( Float . TYPE ) ) { return Arrays . hashCode ( ( float [ ] ) o ) ; } if ( componentType . equals ( Double . TYPE ) ) { return Arrays . hashCode ( ( double [ ] ) o ) ; } if ( componentType . equals ( Boolean . TYPE ) ) { return Arrays . hashCode ( ( boolean [ ] ) o ) ; } return Arrays . hashCode ( ( Object [ ] ) o ) ;
public class ComputationGraph { /** * Set the states for all RNN layers , for use in { @ link # rnnTimeStep ( INDArray . . . ) } * @ param previousStates The previous time step states for all layers ( key : layer name . Value : layer states ) * @ see # rnnGetPreviousStates ( ) */ public void rnnSetPreviousStates ( Map < String , Map < String , INDArray > > previousStates ) { } }
for ( Map . Entry < String , Map < String , INDArray > > entry : previousStates . entrySet ( ) ) { rnnSetPreviousState ( entry . getKey ( ) , entry . getValue ( ) ) ; }
import java . lang . Math ; public class Main { /** * A Java function to convert a decimal number to a binary number . * Args : * decimal _ number : An integer to be converted into binary . * Returns : * A binary number corresponding to the given decimal number . * Examples : * > > > convertDecimalToBinary ( 10) * 1010 * > > > convertDecimalToBinary ( 1) * > > > convertDecimalToBinary ( 20) * 10100 */ public static int convertDecimalToBinary ( int decimalNumber ) { } public static void main ( String [ ] args ) { System . out . println ( convertDecimalToBinary ( 10 ) ) ; System . out . println ( convertDecimalToBinary ( 20 ) ) ; System . out . println ( convertDecimalToBinary ( 1 ) ) ; } }
int binaryNumber = 0 ; int multiplier = 0 ; while ( decimalNumber != 0 ) { int remainder = decimalNumber % 2 ; binaryNumber += remainder * Math . pow ( 10 , multiplier ) ; decimalNumber /= 2 ; multiplier += 1 ; } return binaryNumber ;
public class FileUtils { /** * Write a Memento to a particular resource directory . * @ param resourceDir the resource directory * @ param resource the resource * @ param time the time for the memento */ public static void writeMemento ( final File resourceDir , final Resource resource , final Instant time ) { } }
try ( final BufferedWriter writer = newBufferedWriter ( getNquadsFile ( resourceDir , time ) . toPath ( ) , UTF_8 , CREATE , WRITE , TRUNCATE_EXISTING ) ) { try ( final Stream < String > quads = generateServerManaged ( resource ) . map ( FileUtils :: serializeQuad ) ) { final Iterator < String > lineIter = quads . iterator ( ) ; while ( lineIter . hasNext ( ) ) { writer . write ( lineIter . next ( ) + lineSeparator ( ) ) ; } } try ( final Stream < String > quads = resource . stream ( ) . filter ( FileUtils :: notServerManaged ) . map ( FileUtils :: serializeQuad ) ) { final Iterator < String > lineiter = quads . iterator ( ) ; while ( lineiter . hasNext ( ) ) { writer . write ( lineiter . next ( ) + lineSeparator ( ) ) ; } } } catch ( final IOException ex ) { throw new UncheckedIOException ( "Error writing resource version for " + resource . getIdentifier ( ) . getIRIString ( ) , ex ) ; }
public class ECSTarget { /** * The < code > ECSTaskSet < / code > objects associated with the ECS target . * @ return The < code > ECSTaskSet < / code > objects associated with the ECS target . */ public java . util . List < ECSTaskSet > getTaskSetsInfo ( ) { } }
if ( taskSetsInfo == null ) { taskSetsInfo = new com . amazonaws . internal . SdkInternalList < ECSTaskSet > ( ) ; } return taskSetsInfo ;
public class QueueReader { /** * Make the request to the Twilio API to perform the read . * @ param client TwilioRestClient with which to make the request * @ return Queue ResourceSet */ @ Override public ResourceSet < Queue > read ( final TwilioRestClient client ) { } }
return new ResourceSet < > ( this , client , firstPage ( client ) ) ;
public class BufferedWriter { /** * Finishes the current response . */ public void finish ( ) throws IOException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { // 306998.15 Tr . debug ( tc , "finish" ) ; } if ( length == - 1 && total != 0 ) length = total ; // PM71666 - DBCS and application calls out . close ( ) , the length set here is not correct ( depending on # of bytes for each character ) // It will be more length by the time the data is encoded by the JDK sun . nio . cs . StreamEncoder , so more bytes are // sent to client than specified in content - length . Those extra will be dropped by the client without any exception . if ( WCCustomProperties . SET_CONTENT_LENGTH_ON_CLOSE ) { if ( ! committed ) { // first write on the close / finish path should set an explicit // content - length if we can if ( ! _hasFlushed && obs != null ) { if ( ! this . response . isCommitted ( ) ) { setContentLengthHeader ( length ) ; } _hasFlushed = true ; obs . alertFirstFlush ( ) ; } committed = true ; } } // PM71666 flush ( ) ;
public class AnnotatedGenericRowMapper { /** * { @ inheritDoc } * @ since 0.8.1 */ @ Override public String [ ] getAllColumns ( ) { } }
if ( cachedAllColumns == null ) { List < String > colList = new ArrayList < > ( ) ; ColumnAttribute [ ] annoMappings = getClass ( ) . getAnnotationsByType ( ColumnAttribute . class ) ; for ( ColumnAttribute colAttr : annoMappings ) { colList . add ( colAttr . column ( ) ) ; } cachedAllColumns = colList . toArray ( ArrayUtils . EMPTY_STRING_ARRAY ) ; } return cachedAllColumns ;
public class TarHeader { /** * Creates a new header for a file / directory entry . * @ param entryName * File name * @ param size * File size in bytes * @ param modTime * Last modification time in numeric Unix time format * @ param dir * Is directory */ public static TarHeader createHeader ( String entryName , long size , long modTime , boolean dir , int permissions ) { } }
String name = entryName ; name = TarUtils . trim ( name . replace ( File . separatorChar , '/' ) , '/' ) ; TarHeader header = new TarHeader ( ) ; header . linkName = new StringBuffer ( "" ) ; header . mode = permissions ; if ( name . length ( ) > 100 ) { header . namePrefix = new StringBuffer ( name . substring ( 0 , name . lastIndexOf ( '/' ) ) ) ; header . name = new StringBuffer ( name . substring ( name . lastIndexOf ( '/' ) + 1 ) ) ; } else { header . name = new StringBuffer ( name ) ; } if ( dir ) { header . linkFlag = TarHeader . LF_DIR ; if ( header . name . charAt ( header . name . length ( ) - 1 ) != '/' ) { header . name . append ( "/" ) ; } header . size = 0 ; } else { header . linkFlag = TarHeader . LF_NORMAL ; header . size = size ; } header . modTime = modTime ; header . checkSum = 0 ; header . devMajor = 0 ; header . devMinor = 0 ; return header ;
public class keyListener { /** * Called when a key is dispatched to a dialog . This allows listeners to * get a chance to respond before the dialog . * @ param dialog the dialog the key has been dispatched to * @ param keyCode the code for the physical key that was pressed * @ param event the KeyEvent object containing full information about * the event * @ return { @ code true } if the listener has consumed the event , * { @ code false } otherwise */ @ Override public boolean onKey ( DialogInterface dialog , int keyCode , KeyEvent event ) { } }
if ( event . getAction ( ) != KeyEvent . ACTION_DOWN ) return false ; if ( keyCode == KeyEvent . KEYCODE_BACK || keyCode == KeyEvent . KEYCODE_ESCAPE ) { if ( _c . get ( ) . _newFolderView != null && _c . get ( ) . _newFolderView . getVisibility ( ) == VISIBLE ) { _c . get ( ) . _newFolderView . setVisibility ( GONE ) ; return true ; } _c . get ( ) . _onBackPressed . onBackPressed ( _c . get ( ) . _alertDialog ) ; return true ; } if ( ! _c . get ( ) . _enableDpad ) return true ; if ( ! _c . get ( ) . _list . hasFocus ( ) ) { switch ( keyCode ) { case KeyEvent . KEYCODE_DPAD_UP : if ( _c . get ( ) . _neutralBtn . hasFocus ( ) || _c . get ( ) . _negativeBtn . hasFocus ( ) || _c . get ( ) . _positiveBtn . hasFocus ( ) ) { if ( _c . get ( ) . _options != null && _c . get ( ) . _options . getVisibility ( ) == VISIBLE ) { _c . get ( ) . _options . requestFocus ( _c . get ( ) . _neutralBtn . hasFocus ( ) ? View . FOCUS_RIGHT : View . FOCUS_LEFT ) ; return true ; } else if ( _c . get ( ) . _newFolderView != null && _c . get ( ) . _newFolderView . getVisibility ( ) == VISIBLE ) { _c . get ( ) . _newFolderView . requestFocus ( View . FOCUS_LEFT ) ; return true ; } else { _c . get ( ) . _list . requestFocus ( ) ; _c . get ( ) . lastSelected = true ; return true ; } } if ( _c . get ( ) . _options != null && _c . get ( ) . _options . hasFocus ( ) ) { _c . get ( ) . _list . requestFocus ( ) ; _c . get ( ) . lastSelected = true ; return true ; } break ; default : return false ; } } if ( _c . get ( ) . _list . hasFocus ( ) ) { switch ( keyCode ) { case KeyEvent . KEYCODE_DPAD_LEFT : _c . get ( ) . _onBackPressed . onBackPressed ( _c . get ( ) . _alertDialog ) ; _c . get ( ) . lastSelected = false ; return true ; case KeyEvent . KEYCODE_DPAD_RIGHT : _c . get ( ) . _list . performItemClick ( _c . get ( ) . _list , _c . get ( ) . _list . getSelectedItemPosition ( ) , _c . get ( ) . _list . getSelectedItemId ( ) ) ; _c . get ( ) . lastSelected = false ; return true ; case KeyEvent . KEYCODE_DPAD_DOWN : if ( _c . get ( ) . lastSelected ) { _c . get ( ) . lastSelected = false ; if ( _c . get ( ) . _options != null && _c . get ( ) . _options . getVisibility ( ) == VISIBLE ) { _c . get ( ) . _options . requestFocus ( ) ; } else { if ( _c . get ( ) . _neutralBtn . getVisibility ( ) == VISIBLE ) { _c . get ( ) . _neutralBtn . requestFocus ( ) ; } else { _c . get ( ) . _negativeBtn . requestFocus ( ) ; } } return true ; } break ; default : return false ; } } return false ;
public class BasicFileServlet { /** * Parses the Range Header of the request and sets the appropriate ranges list on the { @ link FileRequestContext } Object . * @ param context * @ return false if the range pattern contains no satisfiable ranges . */ public static boolean parseRanges ( FileRequestContext context ) { } }
if ( ! StringUtils . isBlank ( context . range ) ) { Matcher rangeMatcher = rangePattern . matcher ( context . range ) ; if ( rangeMatcher . matches ( ) ) { String ranges [ ] = rangeMatcher . group ( 1 ) . split ( "," ) ; for ( String range : ranges ) { long startBound = - 1 ; int hyphenIndex = range . indexOf ( '-' ) ; if ( hyphenIndex > 0 ) { startBound = Long . parseLong ( range . substring ( 0 , hyphenIndex ) ) ; } long endBound = - 1 ; if ( hyphenIndex >= 0 && ( hyphenIndex + 1 ) < range . length ( ) ) { endBound = Long . parseLong ( range . substring ( hyphenIndex + 1 ) ) ; } Range newRange = new Range ( startBound , endBound ) ; if ( ! ( startBound != - 1 && endBound != - 1 && startBound > endBound ) && ! ( startBound == - 1 && endBound == - 1 ) ) { context . ranges . add ( newRange ) ; } } return ! context . ranges . isEmpty ( ) ; } } return true ;
public class ShanksSimulation2DGUI { /** * Add a Histogram to the simulation * @ param histogramID * - The name of the Histogram * @ param xAxisLabel * - The label for the x axis * @ param yAxisLabel * - The label fot the y axis * @ throws DuplicatedChartIDException * @ throws DuplicatedPortrayalIDException * @ throws ScenarioNotFoundException */ public void addHistogram ( String histogramID , String xAxisLabel , String yAxisLabel ) throws ShanksException { } }
Scenario2DPortrayal scenarioPortrayal = ( Scenario2DPortrayal ) this . getSimulation ( ) . getScenarioPortrayal ( ) ; scenarioPortrayal . addHistogram ( histogramID , xAxisLabel , yAxisLabel ) ;
public class BytecodeUtils { /** * Returns an expression that returns a new { @ link ArrayList } containing all the given items . */ public static Expression asList ( Iterable < ? extends Expression > items ) { } }
final ImmutableList < Expression > copy = ImmutableList . copyOf ( items ) ; if ( copy . isEmpty ( ) ) { return MethodRef . IMMUTABLE_LIST_OF . get ( 0 ) . invoke ( ) ; } // Note , we cannot necessarily use ImmutableList for anything besides the empty list because // we may need to put a null in it . final Expression construct = ConstructorRef . ARRAY_LIST_SIZE . construct ( constant ( copy . size ( ) ) ) ; return new Expression ( ARRAY_LIST_TYPE , Feature . NON_NULLABLE ) { @ Override protected void doGen ( CodeBuilder mv ) { construct . gen ( mv ) ; for ( Expression child : copy ) { mv . dup ( ) ; child . gen ( mv ) ; MethodRef . ARRAY_LIST_ADD . invokeUnchecked ( mv ) ; mv . pop ( ) ; // pop the bool result of arraylist . add } } } ;
public class IconProviderBuilder { /** * Gets the { @ link IStatesIconProvider } from this { @ link IconProviderBuilder } . * @ return the state icon provider */ private IStatesIconProvider getStateIconProvider ( ) { } }
return state -> { return state . getProperties ( ) . keySet ( ) . stream ( ) . map ( prop -> stateIcons . get ( prop , state . getValue ( prop ) ) ) . filter ( Objects :: nonNull ) . findFirst ( ) . orElse ( defaultIcon ) ; } ;
public class ProcessExtensionService { /** * Json variables need to be represented as JsonNode for engine to handle as Json * Do this for any var marked as json or whose type is not recognised from the extension file */ private ProcessExtensionModel convertJsonVariables ( ProcessExtensionModel processExtensionModel ) { } }
if ( processExtensionModel != null && processExtensionModel . getExtensions ( ) != null && processExtensionModel . getExtensions ( ) . getProperties ( ) != null ) { for ( VariableDefinition variableDefinition : processExtensionModel . getExtensions ( ) . getProperties ( ) . values ( ) ) { if ( ! variableTypeMap . keySet ( ) . contains ( variableDefinition . getType ( ) ) || variableDefinition . getType ( ) . equals ( "json" ) ) { variableDefinition . setValue ( objectMapper . convertValue ( variableDefinition . getValue ( ) , JsonNode . class ) ) ; } } } return processExtensionModel ;
public class ThriftInvertedIndexHandler { /** * / * ( non - Javadoc ) * @ see com . impetus . client . cassandra . index . InvertedIndexHandlerBase # search ( com . impetus . kundera . metadata . model . EntityMetadata , java . lang . String , org . apache . cassandra . thrift . ConsistencyLevel , java . util . Map ) */ @ Override public List < SearchResult > search ( EntityMetadata m , String persistenceUnit , ConsistencyLevel consistencyLevel , Map < Boolean , List < IndexClause > > indexClauseMap ) { } }
return super . search ( m , persistenceUnit , consistencyLevel , indexClauseMap ) ;
public class JumboCyclicVertexSearch { /** * XOR the to bit sets together and return the result . Neither input is * modified . * @ param x first bit set * @ param y second bit set * @ return the XOR of the two bit sets */ static BitSet xor ( BitSet x , BitSet y ) { } }
BitSet z = copy ( x ) ; z . xor ( y ) ; return z ;
public class EventUtilities { /** * Send data so ZMQ Socket . < br > * Warning . See http : / / zeromq . org / area : faq . " ZeroMQ sockets are not thread - safe . < br > * The short version is that sockets should not be shared between threads . We recommend creating a dedicated socket for each thread . < br > * For those situations where a dedicated socket per thread is infeasible , a socket may be shared if and only if each thread executes a full memory barrier before accessing the socket . * Most languages support a Mutex or Spinlock which will execute the full memory barrier on your behalf . " * @ param eventSocket * @ param fullName * @ param counter * @ param isException * @ param data * @ throws DevFailed */ static void sendToSocket ( final ZMQ . Socket eventSocket , final String fullName , int counter , boolean isException , byte [ ] data ) throws DevFailed { } }
XLOGGER . entry ( ) ; sendContextData ( eventSocket , fullName , counter , isException ) ; eventSocket . send ( data ) ; LOGGER . debug ( "event {} sent" , fullName ) ; XLOGGER . exit ( ) ;
public class LuceneGazetteer { /** * Executes a query against the Lucene index , processing the results and returning * at most maxResults ResolvedLocations with ancestry resolved . * @ param location the location occurrence * @ param sanitizedName the sanitized name of the search location * @ param filter the filter used to restrict the search results * @ param maxResults the maximum number of results * @ param fuzzy is this a fuzzy query * @ param dedupe should duplicate locations be filtered from the results * @ param ancestryMode the hierarchy resolution mode * @ param previousResults the results of a previous query that should be used for duplicate filtering and appended to until * no additional matches are found or maxResults has been reached ; the input list will not be modified * and may be < code > null < / code > * @ return the ResolvedLocations with ancestry resolved matching the query * @ throws ParseException if an error occurs generating the query * @ throws IOException if an error occurs executing the query */ private List < ResolvedLocation > executeQuery ( final LocationOccurrence location , final String sanitizedName , final Filter filter , final int maxResults , final boolean fuzzy , final boolean dedupe , final AncestryMode ancestryMode , final List < ResolvedLocation > previousResults ) throws ParseException , IOException { } }
Query query = new AnalyzingQueryParser ( Version . LUCENE_4_9 , INDEX_NAME . key ( ) , INDEX_ANALYZER ) . parse ( String . format ( fuzzy ? FUZZY_FMT : EXACT_MATCH_FMT , sanitizedName ) ) ; List < ResolvedLocation > matches = new ArrayList < ResolvedLocation > ( maxResults ) ; Map < Integer , Set < GeoName > > parentMap = new HashMap < Integer , Set < GeoName > > ( ) ; // reuse GeoName instances so all ancestry is correctly resolved if multiple names for // the same GeoName match the query Map < Integer , GeoName > geonameMap = new HashMap < Integer , GeoName > ( ) ; // if we are filling previous results , add them to the match list and the geoname map // so they can be used for deduplication or re - used if additional matches are found if ( previousResults != null ) { matches . addAll ( previousResults ) ; for ( ResolvedLocation loc : previousResults ) { geonameMap . put ( loc . getGeoname ( ) . getGeonameID ( ) , loc . getGeoname ( ) ) ; } } // short circuit if we were provided enough previous results to satisfy maxResults ; // we do this here because the query loop condition is evaluated after the query // is executed and results are processed to support de - duplication if ( matches . size ( ) >= maxResults ) { return matches ; } // track the last discovered hit so we can re - execute the query if we are // deduping and need to fill results ScoreDoc lastDoc = null ; do { // collect all the hits up to maxResults , and sort them based // on Lucene match score and population for the associated // GeoNames record TopDocs results = indexSearcher . searchAfter ( lastDoc , query , filter , maxResults , POPULATION_SORT ) ; // set lastDoc to null so we don ' t infinite loop if results is empty lastDoc = null ; // populate results if matches were discovered for ( ScoreDoc scoreDoc : results . scoreDocs ) { lastDoc = scoreDoc ; Document doc = indexSearcher . doc ( scoreDoc . doc ) ; // reuse GeoName instances so all ancestry is correctly resolved if multiple names for // the same GeoName match the query int geonameID = GEONAME_ID . getValue ( doc ) ; GeoName geoname = geonameMap . get ( geonameID ) ; if ( geoname == null ) { geoname = BasicGeoName . parseFromGeoNamesRecord ( ( String ) GEONAME . getValue ( doc ) , ( String ) PREFERRED_NAME . getValue ( doc ) ) ; geonameMap . put ( geonameID , geoname ) ; } else if ( dedupe ) { // if we have already seen this GeoName and we are removing duplicates , skip to the next doc continue ; } String matchedName = INDEX_NAME . getValue ( doc ) ; if ( ! geoname . isAncestryResolved ( ) ) { IndexableField parentIdField = doc . getField ( IndexField . PARENT_ID . key ( ) ) ; Integer parentId = parentIdField != null && parentIdField . numericValue ( ) != null ? parentIdField . numericValue ( ) . intValue ( ) : null ; if ( parentId != null ) { // if we are lazily or manually loading ancestry , replace GeoName with a LazyAncestryGeoName // otherwide , build the parent resolution map switch ( ancestryMode ) { case LAZY : geoname = new LazyAncestryGeoName ( geoname , parentId , this ) ; break ; case MANUAL : geoname = new LazyAncestryGeoName ( geoname , parentId ) ; break ; case ON_CREATE : Set < GeoName > geos = parentMap . get ( parentId ) ; if ( geos == null ) { geos = new HashSet < GeoName > ( ) ; parentMap . put ( parentId , geos ) ; } geos . add ( geoname ) ; break ; } } } matches . add ( new ResolvedLocation ( location , geoname , matchedName , fuzzy ) ) ; // stop processing results if we have reached maxResults matches if ( matches . size ( ) >= maxResults ) { break ; } } } while ( dedupe && lastDoc != null && matches . size ( ) < maxResults ) ; // if any results need ancestry resolution , resolve parents // this map should only contain GeoNames if ancestryMode = = ON _ CREATE if ( ! parentMap . isEmpty ( ) ) { resolveParents ( parentMap ) ; } return matches ;
public class CollectionUtils { /** * Counts the number of elements in the { @ link Iterable } collection accepted by the { @ link Filter } . * @ param < T > Class type of the elements in the { @ link Iterable } collection . * @ param iterable { @ link Iterable } collection of elements being evaluated . * @ param filter { @ link Filter } used to determine the count of elements in the { @ link Iterable } collection * accepted by the { @ link Filter } . * @ return an integer value indicating the number of elements in the { @ link Iterable } collection accepted * by the { @ link Filter } . * @ throws IllegalArgumentException if { @ link Filter } is null . * @ see java . lang . Iterable * @ see org . cp . elements . lang . Filter * @ see # nullSafeIterable ( Iterable ) */ public static < T > long count ( Iterable < T > iterable , Filter < T > filter ) { } }
Assert . notNull ( filter , "Filter is required" ) ; return StreamSupport . stream ( nullSafeIterable ( iterable ) . spliterator ( ) , false ) . filter ( filter :: accept ) . count ( ) ;
public class DownloadSerialQueue { /** * Enqueues the given task sometime in the serial queue . If the { @ code task } is in the head of * the serial queue , the { @ code task } will be started automatically . */ public synchronized void enqueue ( DownloadTask task ) { } }
taskList . add ( task ) ; Collections . sort ( taskList ) ; if ( ! paused && ! looping ) { looping = true ; startNewLooper ( ) ; }
public class EnumVocab { /** * Returns the { @ link Property } for the given enum item contained in this * vocabulary . * @ param property * the property to look up , must not be < code > null < / code > * @ return the result of looking up < code > property < / code > in this vocabulary . */ public Property get ( P property ) { } }
Preconditions . checkNotNull ( property ) ; return lookup ( converter . convert ( property ) ) . get ( ) ;
public class JacksonSingleton { /** * Un - registers a JSON Module . * @ param module the module */ @ Override public void unregister ( Module module ) { } }
if ( module == null ) { // May happen on departure . return ; } LOGGER . info ( "Removing Jackson module {}" , module . getModuleName ( ) ) ; synchronized ( lock ) { if ( modules . remove ( module ) ) { rebuildMappers ( ) ; } }
public class Ssh2DsaPublicKey { /** * Verify the signature . * @ param signature * byte [ ] * @ param data * byte [ ] * @ return < code > true < / code > if the signature was produced by the * corresponding private key that owns this public key , otherwise * < code > false < / code > . * @ throws SshException * @ todo Implement this com . sshtools . ssh . SshPublicKey method */ public boolean verifySignature ( byte [ ] signature , byte [ ] data ) throws SshException { } }
ByteArrayReader bar = new ByteArrayReader ( signature ) ; try { if ( signature . length != 40 // 160 bits && signature . length != 56 // 224 bits && signature . length != 64 ) { // 256 bits byte [ ] sig = bar . readBinaryString ( ) ; // log . debug ( " Signature blob is " + new String ( sig ) ) ; String header = new String ( sig ) ; if ( ! header . equals ( "ssh-dss" ) ) { throw new SshException ( "The encoded signature is not DSA" , SshException . INTERNAL_ERROR ) ; } signature = bar . readBinaryString ( ) ; } int numSize = signature . length / 2 ; // Using a SimpleASNWriter ByteArrayOutputStream r = new ByteArrayOutputStream ( ) ; ByteArrayOutputStream s = new ByteArrayOutputStream ( ) ; SimpleASNWriter asn = new SimpleASNWriter ( ) ; asn . writeByte ( 0x02 ) ; if ( ( ( signature [ 0 ] & 0x80 ) == 0x80 ) && ( signature [ 0 ] != 0x00 ) ) { r . write ( 0 ) ; r . write ( signature , 0 , numSize ) ; } else { r . write ( signature , 0 , numSize ) ; } asn . writeData ( r . toByteArray ( ) ) ; asn . writeByte ( 0x02 ) ; if ( ( ( signature [ numSize ] & 0x80 ) == 0x80 ) && ( signature [ numSize ] != 0x00 ) ) { s . write ( 0 ) ; s . write ( signature , numSize , numSize ) ; } else { s . write ( signature , numSize , numSize ) ; } asn . writeData ( s . toByteArray ( ) ) ; SimpleASNWriter asnEncoded = new SimpleASNWriter ( ) ; asnEncoded . writeByte ( 0x30 ) ; asnEncoded . writeData ( asn . toByteArray ( ) ) ; byte [ ] encoded = asnEncoded . toByteArray ( ) ; Signature sig = JCEProvider . getProviderForAlgorithm ( JCEAlgorithms . JCE_SHA1WithDSA ) == null ? Signature . getInstance ( JCEAlgorithms . JCE_SHA1WithDSA ) : Signature . getInstance ( JCEAlgorithms . JCE_SHA1WithDSA , JCEProvider . getProviderForAlgorithm ( JCEAlgorithms . JCE_SHA1WithDSA ) ) ; sig . initVerify ( pubkey ) ; sig . update ( data ) ; return sig . verify ( encoded ) ; } catch ( Exception ex ) { throw new SshException ( SshException . JCE_ERROR , ex ) ; } finally { try { bar . close ( ) ; } catch ( IOException e ) { } }
public class AuthenticationService { /** * Invalidates a specific authentication token and its corresponding * Guacamole session , effectively logging out the associated user . If the * authentication token is not valid , this function has no effect . * @ param authToken * The token being invalidated . * @ return * true if the given authentication token was valid and the * corresponding Guacamole session was destroyed , false if the given * authentication token was not valid and no action was taken . */ public boolean destroyGuacamoleSession ( String authToken ) { } }
// Remove corresponding GuacamoleSession if the token is valid GuacamoleSession session = tokenSessionMap . remove ( authToken ) ; if ( session == null ) return false ; // Invalidate the removed session session . invalidate ( ) ; return true ;
public class Request { /** * sets the user agent header for the request , if exists then appends * @ param userAgentValue : header field value to add * @ return : Request Object with userAgent header value set */ public Request userAgent ( String userAgentValue ) { } }
final String userAgent = RequestHeaderFields . USER_AGENT . getName ( ) ; String agent = headers . get ( userAgent ) ; if ( agent == null ) { agent = userAgentValue ; } else { agent = agent + " " + userAgentValue ; } return this . setHeader ( userAgent , agent ) ;