signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class WheelCall { /** * Calls a wheel module function on the master asynchronously and * returns information about the scheduled job that can be used to query the result . * Authentication is done with the token therefore you have to login prior * to using this function . * @ param client SaltClient instance * @ param auth authentication credentials to use * @ return information about the scheduled job */ public CompletionStage < WheelAsyncResult < R > > callAsync ( final SaltClient client , AuthMethod auth ) { } }
return client . call ( this , Client . WHEEL_ASYNC , Optional . empty ( ) , Collections . emptyMap ( ) , new TypeToken < Return < List < WheelAsyncResult < R > > > > ( ) { } , auth ) . thenApply ( wrapper -> { WheelAsyncResult < R > result = wrapper . getResult ( ) . get ( 0 ) ; result . setType ( getReturnType ( ) ) ; return result ; } ) ;
public class ExportToFileClient { /** * Deprecate the current batch and create a new one . The old one will still * be active until all writers have finished writing their current blocks * to it . */ void roll ( ) { } }
m_batchLock . writeLock ( ) . lock ( ) ; final PeriodicExportContext previous = m_current ; try { m_current = new PeriodicExportContext ( ) ; m_logger . trace ( "Rolling batch." ) ; for ( ExportToFileDecoder decoder : m_tableDecoders . values ( ) ) { decoder . resetWriter ( ) ; } } finally { m_batchLock . writeLock ( ) . unlock ( ) ; } previous . closeAllWriters ( ) ;
public class TriggerContext { /** * Adds the specified triggers to the validator under construction . * @ param triggers Triggers to be added . * @ return Context allowing further construction of the validator using the DSL . */ public TriggerContext on ( Collection < Trigger > triggers ) { } }
if ( triggers != null ) { addedTriggers . addAll ( triggers ) ; } // Stay in the same context and re - use the same instance because no type has changed return this ;
public class Ec2IaasHandler { /** * ( non - Javadoc ) * @ see net . roboconf . target . api . TargetHandler * # terminateMachine ( net . roboconf . target . api . TargetHandlerParameters , java . lang . String ) */ @ Override public void terminateMachine ( TargetHandlerParameters parameters , String machineId ) throws TargetException { } }
this . logger . fine ( "Terminating machine '" + machineId + "'." ) ; cancelMachineConfigurator ( machineId ) ; try { AmazonEC2 ec2 = createEc2Client ( parameters . getTargetProperties ( ) ) ; TerminateInstancesRequest terminateInstancesRequest = new TerminateInstancesRequest ( ) ; terminateInstancesRequest . withInstanceIds ( machineId ) ; ec2 . terminateInstances ( terminateInstancesRequest ) ; } catch ( Exception e ) { this . logger . severe ( "An error occurred while terminating a machine on Amazon EC2. " + e . getMessage ( ) ) ; throw new TargetException ( e ) ; }
public class ORecordIteratorClusters { /** * Move the iterator to the end of the range . If no range was specified move to the last record of the cluster . * @ return The object itself */ @ Override public ORecordIteratorClusters < REC > last ( ) { } }
currentClusterIdx = clusterIds . length - 1 ; current . clusterPosition = liveUpdated ? database . countClusterElements ( clusterIds [ currentClusterIdx ] ) : lastClusterPosition + 1 ; return this ;
public class MapIterate { /** * For each key and value of the map , the function is evaluated with the key and value as the parameter . * The results of these evaluations are collected into the target map . */ public static < K , V , V2 , R extends Map < K , V2 > > R collectValues ( Map < K , V > map , final Function2 < ? super K , ? super V , ? extends V2 > function , final R target ) { } }
MapIterate . forEachKeyValue ( map , new Procedure2 < K , V > ( ) { public void value ( K key , V value ) { target . put ( key , function . value ( key , value ) ) ; } } ) ; return target ;
public class MainScene { /** * Apply the necessary rotation to the transform so that it is in front of * the camera . The actual rotation is performed not using the yaw angle but * using equivalent quaternion values for better accuracy . But the yaw angle * is still returned for backward compatibility . * @ param widget The transform to modify . * @ return The camera ' s yaw in degrees . */ public float rotateToFaceCamera ( final Widget widget ) { } }
final float yaw = getMainCameraRigYaw ( ) ; GVRTransform t = getMainCameraRig ( ) . getHeadTransform ( ) ; widget . rotateWithPivot ( t . getRotationW ( ) , 0 , t . getRotationY ( ) , 0 , 0 , 0 , 0 ) ; return yaw ;
public class AdGroupAdRotationMode { /** * Gets the adRotationMode value for this AdGroupAdRotationMode . * @ return adRotationMode * < span class = " constraint CampaignType " > This field may only be * set to OPTIMIZE for campaign channel subtype UNIVERSAL _ APP _ CAMPAIGN . < / span > * < span class = " constraint CampaignType " > This field may only be set to * the values : OPTIMIZE , ROTATE _ FOREVER . < / span > */ public com . google . api . ads . adwords . axis . v201809 . cm . AdRotationMode getAdRotationMode ( ) { } }
return adRotationMode ;
public class HttpRequestBody { /** * Construct a HTTP POST Body from the variables in postParams */ public void setFormParams ( TreeSet < HtmlParameter > postParams ) { } }
if ( postParams . isEmpty ( ) ) { this . setBody ( "" ) ; return ; } StringBuilder postData = new StringBuilder ( ) ; for ( HtmlParameter parameter : postParams ) { if ( parameter . getType ( ) != HtmlParameter . Type . form ) { continue ; } postData . append ( parameter . getName ( ) ) ; postData . append ( '=' ) ; postData . append ( parameter . getValue ( ) ) ; postData . append ( '&' ) ; } String data = "" ; if ( postData . length ( ) != 0 ) { data = postData . substring ( 0 , postData . length ( ) - 1 ) ; } this . setBody ( data ) ;
public class DomainsInner { /** * Get domain name recommendations based on keywords . * Get domain name recommendations based on keywords . * @ param parameters Search parameters for domain name recommendations . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; NameIdentifierInner & gt ; object */ public Observable < Page < NameIdentifierInner > > listRecommendationsAsync ( final DomainRecommendationSearchParameters parameters ) { } }
return listRecommendationsWithServiceResponseAsync ( parameters ) . map ( new Func1 < ServiceResponse < Page < NameIdentifierInner > > , Page < NameIdentifierInner > > ( ) { @ Override public Page < NameIdentifierInner > call ( ServiceResponse < Page < NameIdentifierInner > > response ) { return response . body ( ) ; } } ) ;
public class AdminToolDBBrowserExampleLoader { /** * vendor must be set and a type of { @ link Vendor } * @ param xmlString * @ throws JSONException * @ throws IllegalArgumentException * @ throws IOException * @ throws JsonMappingException * @ throws JsonParseException * @ see ExampleStatements */ public void loadExampleStatementsFromXMLString ( String xmlString ) throws JSONException , IllegalArgumentException , JsonParseException , JsonMappingException , IOException { } }
if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( "Receiving json string: " + xmlString ) ; } JSONObject jsonobject = XML . toJSONObject ( xmlString ) ; String [ ] root = JSONObject . getNames ( jsonobject ) ; if ( null == root ) { throw new IllegalArgumentException ( "no root object in xml found" ) ; } if ( root . length > 1 ) { throw new IllegalArgumentException ( "more than one root objects found" ) ; } JSONObject statement = jsonobject . getJSONObject ( root [ 0 ] ) ; loadExampleStatementsFromJsonString ( statement . toString ( ) ) ;
public class RESTMBeanServerConnection { /** * { @ inheritDoc } */ @ Override public String getDefaultDomain ( ) throws IOException { } }
final String sourceMethod = "getDefaultDomain" ; checkConnection ( ) ; URL defaultDomainURL = null ; HttpsURLConnection connection = null ; try { // Get URL for default domain defaultDomainURL = getDefaultDomainURL ( ) ; // Get connection to server connection = getConnection ( defaultDomainURL , HttpMethod . GET ) ; } catch ( IOException io ) { throw getRequestErrorException ( sourceMethod , io , defaultDomainURL ) ; } // Check response code from server int responseCode = 0 ; try { responseCode = connection . getResponseCode ( ) ; } catch ( ConnectException ce ) { recoverConnection ( ce ) ; // Server is down ; not a client bug throw ce ; } switch ( responseCode ) { case HttpURLConnection . HTTP_OK : JSONConverter converter = JSONConverter . getConverter ( ) ; try { // Process and return server response return converter . readString ( connection . getInputStream ( ) ) ; } catch ( Exception e ) { throw getResponseErrorException ( sourceMethod , e , defaultDomainURL ) ; } finally { JSONConverter . returnConverter ( converter ) ; } case HttpURLConnection . HTTP_BAD_REQUEST : case HttpURLConnection . HTTP_INTERNAL_ERROR : // Server response should be a serialized Throwable Throwable t = getServerThrowable ( sourceMethod , connection ) ; IOException ioe = t instanceof IOException ? ( IOException ) t : new IOException ( RESTClientMessagesUtil . getMessage ( RESTClientMessagesUtil . UNEXPECTED_SERVER_THROWABLE ) , t ) ; throw ioe ; case HttpURLConnection . HTTP_UNAUTHORIZED : case HttpURLConnection . HTTP_FORBIDDEN : throw getBadCredentialsException ( responseCode , connection ) ; case HttpURLConnection . HTTP_GONE : case HttpURLConnection . HTTP_NOT_FOUND : IOException io = getResponseCodeErrorException ( sourceMethod , responseCode , connection ) ; recoverConnection ( io ) ; throw io ; default : throw getResponseCodeErrorException ( sourceMethod , responseCode , connection ) ; }
public class FrameAndRootPainter { /** * Get the paint to paint the inner highlight with . * @ param s the highlight shape . * @ return the paint . */ public Paint getFrameInnerHighlightPaint ( Shape s ) { } }
switch ( state ) { case BACKGROUND_ENABLED : return frameInnerHighlightInactive ; case BACKGROUND_ENABLED_WINDOWFOCUSED : return frameInnerHighlightActive ; } return null ;
public class WSCredentialProvider { /** * { @ inheritDoc } Create a WSCredential for the WSPrincipal in the subject . * If WSPrincipal is found , take no action . */ @ Override public void setCredential ( Subject subject ) throws CredentialException { } }
Set < WSPrincipal > principals = subject . getPrincipals ( WSPrincipal . class ) ; if ( principals . isEmpty ( ) ) { return ; } if ( principals . size ( ) != 1 ) { throw new CredentialException ( "Too many WSPrincipals in the subject" ) ; } WSPrincipal principal = principals . iterator ( ) . next ( ) ; setCredential ( subject , principal ) ;
public class ContractsApi { /** * Get public contract items Lists items of a public contract - - - This route * is cached for up to 3600 seconds * @ param contractId * ID of a contract ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param page * Which page of results to return ( optional , default to 1) * @ return ApiResponse & lt ; List & lt ; PublicContractsItemsResponse & gt ; & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public ApiResponse < List < PublicContractsItemsResponse > > getContractsPublicItemsContractIdWithHttpInfo ( Integer contractId , String datasource , String ifNoneMatch , Integer page ) throws ApiException { } }
com . squareup . okhttp . Call call = getContractsPublicItemsContractIdValidateBeforeCall ( contractId , datasource , ifNoneMatch , page , null ) ; Type localVarReturnType = new TypeToken < List < PublicContractsItemsResponse > > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class Scopes { /** * indexes the IEObject description using the given */ public static < T > Multimap < T , IEObjectDescription > index ( Iterable < IEObjectDescription > descriptions , Function < IEObjectDescription , T > indexer ) { } }
ArrayList < IEObjectDescription > list = Lists . newArrayList ( descriptions ) ; LinkedHashMultimap < T , IEObjectDescription > multimap = LinkedHashMultimap . create ( list . size ( ) , 1 ) ; for ( IEObjectDescription desc : list ) { multimap . put ( indexer . apply ( desc ) , desc ) ; } return multimap ;
public class Closeables2 { /** * Close all { @ link Closeable } objects provided in the { @ code closeables } * iterator . When encountering an error when closing , write the message out * to the provided { @ code log } . * @ param log The log where we will write error messages when failing to * close a closeable . * @ param closeables The closeables that we want to close . */ public static void closeAll ( @ Nonnull final Logger log , @ Nonnull final Closeable ... closeables ) { } }
closeAll ( Arrays . asList ( closeables ) , log ) ;
public class TreeUtil { /** * Returns the tree item associated with the specified \ - delimited path . * @ param tree Tree to search . * @ param path \ - delimited path to search . Search is not case sensitive . * @ param create If true , tree nodes are created if they do not already exist . * @ param clazz Class of Treenode to create . * @ return The tree item corresponding to the specified path , or null if not found . */ public static Treenode findNode ( Treeview tree , String path , boolean create , Class < ? extends Treenode > clazz ) { } }
return findNode ( tree , path , create , clazz , MatchMode . CASE_INSENSITIVE ) ;
public class StorageUpdate11 { /** * instead of ReportUtil . loadConvertedReport ( xml ) */ private void convertReports ( ) throws RepositoryException { } }
String statement = "/jcr:root" + ISO9075 . encodePath ( StorageConstants . REPORTS_ROOT ) + "//*[@className='ro.nextreports.server.domain.Report' and @type='Next']" + "//*[fn:name()='jcr:content' and @jcr:mimeType='text/xml']" ; QueryResult queryResult = getTemplate ( ) . query ( statement ) ; NodeIterator nodes = queryResult . getNodes ( ) ; LOG . info ( "Converter 5.1 : Found " + nodes . getSize ( ) + " report nodes" ) ; while ( nodes . hasNext ( ) ) { Node node = nodes . nextNode ( ) ; Node reportNode = node . getParent ( ) . getParent ( ) . getParent ( ) . getParent ( ) ; String reportName = reportNode . getName ( ) ; String reportPath = reportNode . getPath ( ) ; LOG . info ( " * Start convert '" + reportPath + "'" ) ; Property prop = node . getProperty ( "jcr:data" ) ; String xml = null ; try { xml = new Converter_5_2 ( ) . convertFromInputStream ( prop . getBinary ( ) . getStream ( ) , true ) ; if ( xml != null ) { ValueFactory valueFactory = node . getSession ( ) . getValueFactory ( ) ; Binary binaryValue = valueFactory . createBinary ( new ByteArrayInputStream ( xml . getBytes ( "UTF-8" ) ) ) ; node . setProperty ( "jcr:data" , binaryValue ) ; LOG . info ( "\t -> OK" ) ; } else { LOG . error ( "\t -> FAILED : null xml" ) ; } } catch ( Throwable t ) { LOG . error ( "\t-> FAILED : " + t . getMessage ( ) , t ) ; } }
public class MarkerRecordedEventAttributesMarshaller { /** * Marshall the given parameter object . */ public void marshall ( MarkerRecordedEventAttributes markerRecordedEventAttributes , ProtocolMarshaller protocolMarshaller ) { } }
if ( markerRecordedEventAttributes == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( markerRecordedEventAttributes . getMarkerName ( ) , MARKERNAME_BINDING ) ; protocolMarshaller . marshall ( markerRecordedEventAttributes . getDetails ( ) , DETAILS_BINDING ) ; protocolMarshaller . marshall ( markerRecordedEventAttributes . getDecisionTaskCompletedEventId ( ) , DECISIONTASKCOMPLETEDEVENTID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PhysicalDatabaseParent { /** * Check all the cached tables and flush the old ones . */ private synchronized void checkCache ( ) { } }
Object [ ] pTables = m_setTableCacheList . toArray ( ) ; for ( Object objTable : pTables ) { PTable pTable = ( PTable ) objTable ; if ( pTable . addPTableOwner ( null ) == 1 ) { // Not currently being used is a candidate for flushing from the cache . long lTimeLastUsed = pTable . getLastUsed ( ) ; long lTimeCurrent = System . currentTimeMillis ( ) ; if ( ( lTimeCurrent - lTimeLastUsed ) > ( this . getCacheMinutes ( ) * 60 * 1000 ) ) { pTable . removePTableOwner ( this , true ) ; m_setTableCacheList . remove ( pTable ) ; } } }
public class AuthDirectiveParser { /** * http : / / javasourcecode . org / html / open - source / jdk / jdk - 6u23 / sun / net / www / HeaderParser . java . html */ public void parse ( ) throws AuthHeaderParsingException { } }
String [ ] parts = headerValue . trim ( ) . split ( "\\s+" , 2 ) ; if ( parts . length < 1 ) { throw new AuthHeaderParsingException ( "Unable to split scheme and other part in " + headerValue ) ; } builder . scheme ( parts [ 0 ] ) ; if ( parts . length == 1 ) { return ; } // FIXME : handle optional , one - time token for ( String kv : parts [ 1 ] . split ( "," ) ) { String [ ] kva = kv . trim ( ) . split ( "=" , 2 ) ; if ( ! ( kva . length == 2 ) ) { throw new AuthHeaderParsingException ( "Unable to split " + kv + " into parameter key and value in " + headerValue ) ; } if ( kva [ 1 ] . startsWith ( "\"" ) ) { kva [ 1 ] = kva [ 1 ] . substring ( 1 , kva [ 1 ] . length ( ) - 1 ) ; } builder . param ( kva [ 0 ] , kva [ 1 ] ) ; }
public class Math { /** * Returns the correlation coefficient between two vectors . */ public static double cor ( int [ ] x , int [ ] y ) { } }
if ( x . length != y . length ) { throw new IllegalArgumentException ( "Arrays have different length." ) ; } if ( x . length < 3 ) { throw new IllegalArgumentException ( "array length has to be at least 3." ) ; } double Sxy = cov ( x , y ) ; double Sxx = var ( x ) ; double Syy = var ( y ) ; if ( Sxx == 0 || Syy == 0 ) { return Double . NaN ; } return Sxy / java . lang . Math . sqrt ( Sxx * Syy ) ;
public class FaceletViewDeclarationLanguage { /** * { @ inheritDoc } */ @ Override public void renderView ( FacesContext context , UIViewRoot view ) throws IOException { } }
if ( ! view . isRendered ( ) ) { return ; } // log request if ( log . isLoggable ( Level . FINE ) ) { log . fine ( "Rendering View: " + view . getViewId ( ) ) ; } try { // build view - but not if we ' re in " buildBeforeRestore " // land and we ' ve already got a populated view . Note // that this optimizations breaks if there ' s a " c : if " in // the page that toggles as a result of request processing - // should that be handled ? Or // is this optimization simply so minor that it should just // be trimmed altogether ? // See JSF 2.0 spec section 2.2.6 , buildView is called before // Render Response // if ( ! isFilledView ( context , view ) ) // buildView ( context , view ) ; // setup writer and assign it to the context ResponseWriter origWriter = createResponseWriter ( context ) ; ExternalContext extContext = context . getExternalContext ( ) ; Writer outputWriter = extContext . getResponseOutputWriter ( ) ; StateWriter stateWriter = new StateWriter ( outputWriter , 1024 , context ) ; try { ResponseWriter writer = origWriter . cloneWithWriter ( stateWriter ) ; try { context . setResponseWriter ( writer ) ; StateManager stateMgr = context . getApplication ( ) . getStateManager ( ) ; // force creation of session if saving state there // - = Leonardo Uribe = - Do this does not have any sense ! . The only reference // about these lines are on http : / / java . net / projects / facelets / sources / svn / revision / 376 // and it says : " fixed lazy session instantiation with eager response commit " // This code is obviously to prevent this exception : // java . lang . IllegalStateException : Cannot create a session after the response has been committed // But in theory if that so , StateManager . saveState must happen before writer . close ( ) is called , // which can be done very easily . // if ( ! stateMgr . isSavingStateInClient ( context ) ) // extContext . getSession ( true ) ; // render the view to the response writer . startDocument ( ) ; view . encodeAll ( context ) ; writer . endDocument ( ) ; // finish writing // - = Leonardo Uribe = - This does not has sense too , because that ' s the reason // of the try / finally block . In practice , it only forces the close of the tag // in HtmlResponseWriter if necessary , but according to the spec , this should // be done using writer . flush ( ) instead . // writer . close ( ) ; // flush to origWriter if ( stateWriter . isStateWritten ( ) ) { // Call this method to force close the tag if necessary . // The spec javadoc says this : // " . . . Flush any ouput buffered by the output method to the underlying // Writer or OutputStream . This method will not flush the underlying // Writer or OutputStream ; it simply clears any values buffered by this // ResponseWriter . . . . " writer . flush ( ) ; // = - = markoc : STATE _ KEY is in output ONLY if // stateManager . isSavingStateInClient ( context ) is true - see // org . apache . myfaces . application . ViewHandlerImpl . writeState ( FacesContext ) // TODO this class and ViewHandlerImpl contain same constant < ! - - @ @ JSF _ FORM _ STATE _ MARKER @ @ - - > Object stateObj = stateMgr . saveView ( context ) ; String content = stateWriter . getAndResetBuffer ( ) ; int end = content . indexOf ( STATE_KEY ) ; // See if we can find any trace of the saved state . // If so , we need to perform token replacement if ( end >= 0 ) { // save state int start = 0 ; while ( end != - 1 ) { origWriter . write ( content , start , end - start ) ; String stateStr ; if ( view . isTransient ( ) ) { // Force state saving stateMgr . writeState ( context , stateObj ) ; stateStr = stateWriter . getAndResetBuffer ( ) ; } else if ( stateObj == null ) { stateStr = null ; } else { stateMgr . writeState ( context , stateObj ) ; stateStr = stateWriter . getAndResetBuffer ( ) ; } if ( stateStr != null ) { origWriter . write ( stateStr ) ; } start = end + STATE_KEY_LEN ; end = content . indexOf ( STATE_KEY , start ) ; } origWriter . write ( content , start , content . length ( ) - start ) ; // No trace of any saved state , so we just need to flush // the buffer } else { origWriter . write ( content ) ; } } else if ( stateWriter . isStateWrittenWithoutWrapper ( ) ) { // The state token has been written but the state has not been // saved yet . stateMgr . saveView ( context ) ; } else { // GET case without any form that trigger state saving . // Try to store it into cache . if ( _viewPoolProcessor != null && _viewPoolProcessor . isViewPoolEnabledForThisView ( context , view ) ) { ViewDeclarationLanguage vdl = context . getApplication ( ) . getViewHandler ( ) . getViewDeclarationLanguage ( context , view . getViewId ( ) ) ; if ( ViewDeclarationLanguage . FACELETS_VIEW_DECLARATION_LANGUAGE_ID . equals ( vdl . getId ( ) ) ) { StateManagementStrategy sms = vdl . getStateManagementStrategy ( context , view . getId ( ) ) ; if ( sms != null ) { context . getAttributes ( ) . put ( ViewPoolProcessor . FORCE_HARD_RESET , Boolean . TRUE ) ; // Force indirectly to store the map in the cache try { Object state = sms . saveView ( context ) ; } finally { context . getAttributes ( ) . remove ( ViewPoolProcessor . FORCE_HARD_RESET ) ; } // Clear the calculated value from the application map context . getAttributes ( ) . remove ( SERIALIZED_VIEW_REQUEST_ATTR ) ; } } } } } finally { // The Facelets implementation must close the writer used to write the response writer . close ( ) ; } } finally { stateWriter . release ( context ) ; } } catch ( FileNotFoundException fnfe ) { handleFaceletNotFound ( context , view . getViewId ( ) ) ; } catch ( Exception e ) { handleRenderException ( context , e ) ; }
public class GcsOutputChannelImpl { /** * Waits for the current outstanding request retrying it with exponential backoff if it fails . * @ throws ClosedByInterruptException if request was interrupted * @ throws IOException In the event of FileNotFoundException , MalformedURLException * @ throws RetriesExhaustedException if exceeding the number of retries */ private void waitForOutstandingRequest ( ) throws IOException { } }
if ( outstandingRequest == null ) { return ; } try { RetryHelper . runWithRetries ( new Callable < Void > ( ) { @ Override public Void call ( ) throws IOException , InterruptedException { if ( RetryHelper . getContext ( ) . getAttemptNumber ( ) > 1 ) { outstandingRequest . retry ( ) ; } token = outstandingRequest . waitForNextToken ( ) ; outstandingRequest = null ; return null ; } } , retryParams , GcsServiceImpl . exceptionHandler ) ; } catch ( RetryInterruptedException ex ) { token = null ; throw new ClosedByInterruptException ( ) ; } catch ( NonRetriableException e ) { Throwables . propagateIfInstanceOf ( e . getCause ( ) , IOException . class ) ; throw e ; }
public class StreamBuilderImpl { /** * Reduce with a binary function * < code > < pre > * s = init ; * s = op ( s , t1 ) ; * s = op ( s , t2 ) ; * result = s ; * < / pre > < / code > */ @ Override public StreamBuilderImpl < T , U > reduce ( T init , BinaryOperatorSync < T > op ) { } }
return new ReduceOpInitSync < > ( this , init , op ) ;
public class ResettableOAuth2AuthorizedClientService { /** * Copy of { @ link InMemoryOAuth2AuthorizedClientService # removeAuthorizedClient ( String , String ) } */ @ Override public void removeAuthorizedClient ( String clientRegistrationId , String principalName ) { } }
Assert . hasText ( clientRegistrationId , "clientRegistrationId cannot be empty" ) ; Assert . hasText ( principalName , "principalName cannot be empty" ) ; ClientRegistration registration = this . clientRegistrationRepository . findByRegistrationId ( clientRegistrationId ) ; if ( registration != null ) { this . authorizedClients . remove ( this . getIdentifier ( registration , principalName ) ) ; }
public class ObjectNameFactoryImpl { /** * Default behavior of Metrics 3 library , for fallback */ private ObjectName createMetrics3Name ( String domain , String name ) throws MalformedObjectNameException { } }
try { return new ObjectName ( domain , "name" , name ) ; } catch ( MalformedObjectNameException e ) { return new ObjectName ( domain , "name" , ObjectName . quote ( name ) ) ; }
public class MappedFieldMetaData { /** * Prepares a proxy instance of a collection type for use as a { @ link Factory } . * Proxy instances are always { @ link Factory factories } . * Using { @ link Factory # newInstance ( net . sf . cglib . proxy . Callback ) } * is significantly more efficient than using { @ link Enhancer # create ( Class , net . sf . cglib . proxy . Callback ) } . */ private Factory prepareProxyFactoryForCollectionTypes ( ) { } }
if ( this . isInstantiableCollectionType ) { return ( Factory ) Enhancer . create ( this . fieldType , ( LazyLoader ) ( ) -> null ) ; } return null ;
public class PluginLoader { /** * Scans the classpath for given pluginType . If not found , default class is used . */ @ SuppressWarnings ( "unchecked" ) < T > T loadPlugin ( final Class < T > pluginType ) { } }
return ( T ) loadPlugin ( pluginType , null ) ;
public class OptionalHeader { /** * Adjusts the file alignment to low alignment mode if necessary . * @ return 1 if low alignment mode , file alignment value otherwise */ public long getAdjustedFileAlignment ( ) { } }
long fileAlign = get ( FILE_ALIGNMENT ) ; if ( isLowAlignmentMode ( ) ) { return 1 ; } if ( fileAlign < 512 ) { // TODO correct ? fileAlign = 512 ; } // TODO what happens for too big alignment ? // TODO this is just a test , verify if ( fileAlign % 512 != 0 ) { long rest = fileAlign % 512 ; fileAlign += ( 512 - rest ) ; } return fileAlign ;
public class ConcurrentBigIntegerMinMaxHolder { /** * Convert a number to a big integer and try the best to preserve precision * @ param numberthe number * @ returnthe big integer , can be null if the number is null */ protected BigInteger toBigInteger ( Number number ) { } }
if ( number == null ) { return null ; } Class < ? > claz = number . getClass ( ) ; if ( claz == BigInteger . class ) { return ( BigInteger ) number ; } else if ( claz == BigDecimal . class ) { return ( ( BigDecimal ) number ) . toBigInteger ( ) ; } else if ( claz == Double . class ) { return new BigDecimal ( ( Double ) number ) . toBigInteger ( ) ; } else if ( claz == Float . class ) { return new BigDecimal ( ( Float ) number ) . toBigInteger ( ) ; } else { return BigInteger . valueOf ( number . longValue ( ) ) ; }
public class PortalSessionScope { /** * / * ( non - Javadoc ) * @ see org . springframework . beans . factory . config . Scope # remove ( java . lang . String ) */ @ Override public Object remove ( String name ) { } }
final HttpSession session = this . getPortalSesion ( false ) ; if ( session == null ) { return null ; } final Object sessionMutex = WebUtils . getSessionMutex ( session ) ; synchronized ( sessionMutex ) { final Object attribute = session . getAttribute ( name ) ; if ( attribute != null ) { session . removeAttribute ( name ) ; } return attribute ; }
public class NodeSelectorMarkupHandler { /** * Processing Instruction handling */ @ Override public void handleProcessingInstruction ( final char [ ] buffer , final int targetOffset , final int targetLen , final int targetLine , final int targetCol , final int contentOffset , final int contentLen , final int contentLine , final int contentCol , final int outerOffset , final int outerLen , final int line , final int col ) throws ParseException { } }
this . someSelectorsMatch = false ; for ( int i = 0 ; i < this . selectorsLen ; i ++ ) { this . selectorMatches [ i ] = this . selectorFilters [ i ] . matchProcessingInstruction ( false , this . markupLevel , this . markupBlocks [ this . markupLevel ] ) ; if ( this . selectorMatches [ i ] ) { this . someSelectorsMatch = true ; } } if ( this . someSelectorsMatch ) { markCurrentSelection ( ) ; this . selectedHandler . handleProcessingInstruction ( buffer , targetOffset , targetLen , targetLine , targetCol , contentOffset , contentLen , contentLine , contentCol , outerOffset , outerLen , line , col ) ; unmarkCurrentSelection ( ) ; return ; } unmarkCurrentSelection ( ) ; this . nonSelectedHandler . handleProcessingInstruction ( buffer , targetOffset , targetLen , targetLine , targetCol , contentOffset , contentLen , contentLine , contentCol , outerOffset , outerLen , line , col ) ;
public class Sentence { /** * Returns a text representation of the tagging for this { @ link Sentence } , using the specified { @ link TagFormat } . In other words , each token in * the sentence is given a tag indicating its position in a mention or that the token is not a mention . Assumes that each token is tagged either 0 * or 1 times . * @ param format * The { @ link TagFormat } to use * @ param reverse * Whether to return the text in reverse order * @ return A text representation of the tagging for this { @ link Sentence } , using the specified { @ link TagFormat } */ public String getTrainingText ( TagFormat format , boolean reverse ) { } }
List < TaggedToken > taggedTokens = new ArrayList < TaggedToken > ( getTaggedTokens ( ) ) ; if ( reverse ) Collections . reverse ( taggedTokens ) ; StringBuffer trainingText = new StringBuffer ( ) ; for ( TaggedToken token : taggedTokens ) { trainingText . append ( token . getText ( format ) ) ; trainingText . append ( " " ) ; } return trainingText . toString ( ) . trim ( ) ;
public class LocalVariablesSorter { /** * Creates a new local variable of the given type . * @ param type * the type of the local variable to be created . * @ return the identifier of the newly created local variable . */ public int newLocal ( final Type type ) { } }
Object t ; switch ( type . getSort ( ) ) { case Type . BOOLEAN : case Type . CHAR : case Type . BYTE : case Type . SHORT : case Type . INT : t = Opcodes . INTEGER ; break ; case Type . FLOAT : t = Opcodes . FLOAT ; break ; case Type . LONG : t = Opcodes . LONG ; break ; case Type . DOUBLE : t = Opcodes . DOUBLE ; break ; case Type . ARRAY : t = type . getDescriptor ( ) ; break ; // case Type . OBJECT : default : t = type . getInternalName ( ) ; break ; } int local = newLocalMapping ( type ) ; setLocalType ( local , type ) ; setFrameLocal ( local , t ) ; changed = true ; return local ;
public class HystrixScriptModuleExecutor { /** * Execute a collection of ScriptModules identified by moduleId . * @ param moduleIds moduleIds for modules to execute * @ param executable execution logic to be performed for each module . * @ param moduleLoader loader which manages the modules . * @ return list of the outputs from the executable . */ public List < V > executeModules ( List < String > moduleIds , ScriptModuleExecutable < V > executable , ScriptModuleLoader moduleLoader ) { } }
Objects . requireNonNull ( moduleIds , "moduleIds" ) ; Objects . requireNonNull ( executable , "executable" ) ; Objects . requireNonNull ( moduleLoader , "moduleLoader" ) ; List < ScriptModule > modules = new ArrayList < ScriptModule > ( moduleIds . size ( ) ) ; for ( String moduleId : moduleIds ) { ScriptModule module = moduleLoader . getScriptModule ( ModuleId . create ( moduleId ) ) ; if ( module != null ) { modules . add ( module ) ; } } return executeModules ( modules , executable ) ;
public class Parameterized { /** * Given an object return the set of classes that it extends * or implements . * @ param inputClass object to describe * @ return set of classes that are implemented or extended by that object */ private static Set < Class < ? > > describeClassTree ( Class < ? > inputClass ) { } }
if ( inputClass == null ) { return Collections . emptySet ( ) ; } // create result collector Set < Class < ? > > classes = Sets . newLinkedHashSet ( ) ; // describe tree describeClassTree ( inputClass , classes ) ; return classes ;
public class ReadOptimizedGraphity { /** * update the ego network of a user * @ param user * user where changes have occurred */ private void updateEgoNetwork ( final Node user ) { } }
Node followedReplica , followingUser , lastPosterReplica ; Node prevReplica , nextReplica ; // loop through users following for ( Relationship relationship : user . getRelationships ( SocialGraphRelationshipType . REPLICA , Direction . INCOMING ) ) { // load each replica and the user corresponding followedReplica = relationship . getStartNode ( ) ; followingUser = NeoUtils . getPrevSingleNode ( followedReplica , SocialGraphRelationshipType . FOLLOW ) ; // bridge user node prevReplica = NeoUtils . getPrevSingleNode ( followedReplica , SocialGraphRelationshipType . GRAPHITY ) ; if ( ! prevReplica . equals ( followingUser ) ) { followedReplica . getSingleRelationship ( SocialGraphRelationshipType . GRAPHITY , Direction . INCOMING ) . delete ( ) ; nextReplica = NeoUtils . getNextSingleNode ( followedReplica , SocialGraphRelationshipType . GRAPHITY ) ; if ( nextReplica != null ) { followedReplica . getSingleRelationship ( SocialGraphRelationshipType . GRAPHITY , Direction . OUTGOING ) . delete ( ) ; prevReplica . createRelationshipTo ( nextReplica , SocialGraphRelationshipType . GRAPHITY ) ; } } // insert user ' s replica at its new position lastPosterReplica = NeoUtils . getNextSingleNode ( followingUser , SocialGraphRelationshipType . GRAPHITY ) ; if ( ! lastPosterReplica . equals ( followedReplica ) ) { followingUser . getSingleRelationship ( SocialGraphRelationshipType . GRAPHITY , Direction . OUTGOING ) . delete ( ) ; followingUser . createRelationshipTo ( followedReplica , SocialGraphRelationshipType . GRAPHITY ) ; followedReplica . createRelationshipTo ( lastPosterReplica , SocialGraphRelationshipType . GRAPHITY ) ; } }
public class ThymeleafEngineConfigBuilder { /** * Adds a new dialect for this template engine , using the specified prefix . * This dialect will be added to the set of currently configured ones . * This operation can only be executed before processing templates for the * first time . Once a template is processed , the template engine is * considered to be < i > initialized < / i > , and from then on any attempt to * change its configuration will result in an exception . * @ param prefix * the prefix that will be used for this dialect * @ param dialect * the new { @ link IDialect } to be added to the existing ones . * @ return this for fluent use */ public ThymeleafEngineConfigBuilder < P > addDialect ( final String prefix , final IDialect dialect ) { } }
this . dialectsByPrefix ( ) . put ( prefix , dialect ) ; return this ;
public class EnvLoader { /** * Returns the topmost dynamic class loader . * @ param loader the context loader */ public static DynamicClassLoader getDynamicClassLoader ( ClassLoader loader ) { } }
for ( ; loader != null ; loader = loader . getParent ( ) ) { if ( loader instanceof DynamicClassLoader ) { return ( DynamicClassLoader ) loader ; } } return null ;
public class PhoneNumberUtil { /** * get suggestions . * @ param psearch search string * @ param plimit limit entries * @ param plocale locale * @ return list of phone number data */ public final List < PhoneNumberData > getSuggstions ( final String psearch , final int plimit , final Locale plocale ) { } }
final List < PhoneNumberData > suggestList = new ArrayList < > ( plimit ) ; final String cleanedPhoneNumber = cleanString ( psearch ) ; PhoneCountryCodeData foundCounty = null ; final List < PhoneCountryCodeData > possibleCountries = new ArrayList < > ( plimit ) ; for ( final PhoneCountryCodeData countryCode : CreatePhoneCountryConstantsClass . create ( plocale ) . countryCodeData ( ) ) { if ( cleanedPhoneNumber . startsWith ( countryCode . getCountryCode ( ) ) ) { foundCounty = countryCode ; break ; } if ( countryCode . getCountryCode ( ) . startsWith ( cleanedPhoneNumber ) ) { possibleCountries . add ( countryCode ) ; } } if ( foundCounty == null ) { // we don ' t have found a matching country , show possible countries for ( final PhoneCountryCodeData country : possibleCountries ) { final PhoneNumberData entry = new PhoneNumberData ( ) ; entry . setCountryCode ( country . getCountryCode ( ) ) ; entry . setCountryName ( country . getCountryCodeName ( ) ) ; suggestList . add ( entry ) ; } } else { // we do have a country , search for possible area codes final String phoneNumberWork = StringUtils . substring ( cleanedPhoneNumber , foundCounty . getCountryCode ( ) . length ( ) ) ; for ( final PhoneAreaCodeData areaCode : foundCounty . getAreaCodeData ( ) ) { if ( ! areaCode . isRegEx ( ) && areaCode . getAreaCode ( ) . startsWith ( phoneNumberWork ) ) { final PhoneNumberData entry = new PhoneNumberData ( ) ; entry . setCountryCode ( foundCounty . getCountryCode ( ) ) ; entry . setCountryName ( foundCounty . getCountryCodeName ( ) ) ; entry . setAreaCode ( areaCode . getAreaCode ( ) ) ; entry . setAreaName ( areaCode . getAreaName ( ) ) ; suggestList . add ( entry ) ; } } } Collections . sort ( suggestList , new PhoneNumberSuggestComperator ( ) ) ; if ( suggestList . size ( ) >= plimit ) { return suggestList . subList ( 0 , plimit ) ; } return suggestList ;
public class LssClient { /** * Update stream watermark in live stream service * @ param domain The requested domain which the specific stream belongs to * @ param app The requested app which the specific stream belongs to * @ param stream The requested stream which need to update the watermark * @ param watermarks object of the new watermark , contains image watermark and timestamp watermark */ public void updateStreamWatermark ( String domain , String app , String stream , Watermarks watermarks ) { } }
UpdateStreamWatermarkRequest request = new UpdateStreamWatermarkRequest ( ) . withDomain ( domain ) . withApp ( app ) . withStream ( stream ) . withWatermarks ( watermarks ) ; updateStreamWatermark ( request ) ;
public class QEntity { /** * Parse an @ Embeddable * @ param entityFactory * @ param sessionFactory * @ param prefix * @ param type */ public void parseEmbeddable ( final QEntityFactory entityFactory , final SessionFactoryImplementor sessionFactory , final String prefix , final EmbeddableType < ? > type ) { } }
this . metamodelEmbeddable = type ; // Make sure the entity factory sees this embeddable entityFactory . getEmbeddable ( type . getJavaType ( ) , type ) ; for ( Attribute < ? , ? > attribute : type . getAttributes ( ) ) { parseFields ( entityFactory , sessionFactory , prefix , attribute ) ; }
public class StandardResponsesApi { /** * Get the details of a Standard Response . * @ param id id of the Standard Response ( required ) * @ param getStandardResponseData ( required ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > getStandardResponseWithHttpInfo ( String id , GetStandardResponseData getStandardResponseData ) throws ApiException { } }
com . squareup . okhttp . Call call = getStandardResponseValidateBeforeCall ( id , getStandardResponseData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class JSONUtil { /** * 转为实体类对象 , 转换异常将被抛出 * @ param < T > Bean类型 * @ param json JSONObject * @ param beanClass 实体类对象 * @ return 实体类对象 */ public static < T > T toBean ( JSONObject json , Class < T > beanClass ) { } }
return null == json ? null : json . toBean ( beanClass ) ;
public class Node { /** * Get the affiliations of this node . * @ return List of { @ link Affiliation } * @ throws NoResponseException * @ throws XMPPErrorException * @ throws NotConnectedException * @ throws InterruptedException */ public List < Affiliation > getAffiliations ( ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
return getAffiliations ( null , null ) ;
public class OutputManager { /** * Starts every associated OutputExtension * @ param outputPlugin the OutputPlugin to generate the Data for * @ param t the argument or null * @ param event the Event to generate for * @ param < T > the type of the argument * @ param < X > the return type * @ return a List of Future - Objects */ public < T , X > List < CompletableFuture < X > > generateAllOutputExtensions ( OutputPluginModel < T , X > outputPlugin , T t , EventModel event ) { } }
IdentifiableSet < OutputExtensionModel < ? , ? > > extensions = outputExtensions . get ( outputPlugin . getID ( ) ) ; if ( extensions == null ) return new ArrayList < > ( ) ; return filterType ( extensions , outputPlugin ) . stream ( ) . map ( extension -> { try { // noinspection unchecked return ( OutputExtensionModel < X , T > ) extension ; } catch ( ClassCastException e ) { return null ; } } ) . filter ( Objects :: nonNull ) . filter ( outputExtension -> outputExtension . canRun ( event ) ) . map ( extension -> submit ( ( ) -> extension . generate ( event , t ) ) ) . collect ( Collectors . toList ( ) ) ;
public class CmsBinaryPreviewContent { /** * Creates the list item for the resource information bean . < p > * @ param resourceInfo the resource information bean * @ param dndHandler the drag and drop handler * @ return the list item widget */ private CmsListItem createListItem ( CmsResourceInfoBean resourceInfo , CmsDNDHandler dndHandler ) { } }
CmsListInfoBean infoBean = new CmsListInfoBean ( ) ; infoBean . setTitle ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( resourceInfo . getProperties ( ) . get ( CmsClientProperty . PROPERTY_TITLE ) ) ? resourceInfo . getProperties ( ) . get ( CmsClientProperty . PROPERTY_TITLE ) : resourceInfo . getTitle ( ) ) ; infoBean . setSubTitle ( resourceInfo . getResourcePath ( ) ) ; infoBean . setResourceType ( resourceInfo . getResourceType ( ) ) ; infoBean . setBigIconClasses ( resourceInfo . getBigIconClasses ( ) ) ; infoBean . setSmallIconClasses ( resourceInfo . getSmallIconClasses ( ) ) ; infoBean . addAdditionalInfo ( Messages . get ( ) . key ( Messages . GUI_PREVIEW_LABEL_SIZE_0 ) , resourceInfo . getSize ( ) ) ; if ( resourceInfo . getDescription ( ) != null ) { infoBean . addAdditionalInfo ( Messages . get ( ) . key ( Messages . GUI_PREVIEW_LABEL_DESCRIPTION_0 ) , resourceInfo . getDescription ( ) ) ; } if ( resourceInfo . getLastModified ( ) != null ) { infoBean . addAdditionalInfo ( Messages . get ( ) . key ( Messages . GUI_PREVIEW_LABEL_DATEMODIFIED_0 ) , CmsDateTimeUtil . getDate ( resourceInfo . getLastModified ( ) , Format . MEDIUM ) ) ; } CmsListItemWidget itemWidget = new CmsListItemWidget ( infoBean ) ; itemWidget . addOpenHandler ( new OpenHandler < CmsListItemWidget > ( ) { public void onOpen ( OpenEvent < CmsListItemWidget > event ) { int widgetHeight = event . getTarget ( ) . getOffsetHeight ( ) ; m_previewContent . getElement ( ) . getStyle ( ) . setTop ( 12 + widgetHeight , Unit . PX ) ; } } ) ; itemWidget . addCloseHandler ( new CloseHandler < CmsListItemWidget > ( ) { public void onClose ( CloseEvent < CmsListItemWidget > event ) { m_previewContent . getElement ( ) . getStyle ( ) . clearTop ( ) ; } } ) ; CmsListItem result = new CmsListItem ( itemWidget ) ; CmsPushButton button = CmsResultListItem . createDeleteButton ( ) ; if ( dndHandler != null ) { result . initMoveHandle ( dndHandler ) ; } CmsResultsTab resultsTab = m_binaryPreviewHandler . getGalleryDialog ( ) . getResultsTab ( ) ; final DeleteHandler deleteHandler = resultsTab . makeDeleteHandler ( resourceInfo . getResourcePath ( ) ) ; ClickHandler handler = new ClickHandler ( ) { public void onClick ( ClickEvent event ) { deleteHandler . onClick ( event ) ; m_binaryPreviewHandler . closePreview ( ) ; } } ; button . addClickHandler ( handler ) ; itemWidget . addButton ( button ) ; return result ;
public class GraphVizHelper { /** * Invoked the external process " neato " from the GraphViz package . Attention : * this spans a sub - process ! * @ param sFileType * The file type to be generated . E . g . " png " - see neato help for * details . May neither be < code > null < / code > nor empty . * @ param sDOT * The DOT file to be converted to an image . May neither be * < code > null < / code > nor empty . * @ return The byte buffer that keeps the converted image . Never * < code > null < / code > . * @ throws IOException * In case some IO error occurs * @ throws InterruptedException * If the sub - process did not terminate correctly ! */ @ Nonnull public static NonBlockingByteArrayOutputStream getGraphAsImageWithGraphVizNeato ( @ Nonnull @ Nonempty final String sFileType , @ Nonnull final String sDOT ) throws IOException , InterruptedException { } }
ValueEnforcer . notEmpty ( sFileType , "FileType" ) ; ValueEnforcer . notEmpty ( sDOT , "DOT" ) ; final ProcessBuilder aPB = new ProcessBuilder ( "neato" , "-T" + sFileType ) . redirectErrorStream ( false ) ; final Process p = aPB . start ( ) ; // Set neato stdin p . getOutputStream ( ) . write ( sDOT . getBytes ( StandardCharsets . UTF_8 ) ) ; p . getOutputStream ( ) . close ( ) ; // Read neato stdout final NonBlockingByteArrayOutputStream aBAOS = new NonBlockingByteArrayOutputStream ( ) ; StreamHelper . copyInputStreamToOutputStream ( p . getInputStream ( ) , aBAOS ) ; p . waitFor ( ) ; return aBAOS ;
public class ApimanPathUtils { /** * Join endpoint and path with sensible / behaviour . * @ param endpoint the endpoint * @ param path the destination ( path ) * @ return the joined endpoint + destination . */ public static String join ( String endpoint , String path ) { } }
if ( endpoint == null || endpoint . isEmpty ( ) ) return path ; if ( path == null || path . isEmpty ( ) ) return endpoint ; if ( StringUtils . endsWith ( endpoint , "/" ) && path . startsWith ( "/" ) ) { return endpoint + path . substring ( 1 ) ; } else if ( StringUtils . endsWith ( endpoint , "/" ) ^ path . startsWith ( "/" ) ) { return endpoint + path ; } return endpoint + "/" + path ;
public class FragmentBuilder { /** * This method pops the latest node from the trace * fragment hierarchy . * @ param cls The type of node to pop * @ param uri The optional uri to match * @ return The node */ public Node popNode ( Class < ? extends Node > cls , String uri ) { } }
synchronized ( nodeStack ) { // Check if fragment is in suppression mode if ( suppress ) { if ( ! suppressedNodeStack . isEmpty ( ) ) { // Check if node is on the suppressed stack Node suppressed = popNode ( suppressedNodeStack , cls , uri ) ; if ( suppressed != null ) { // Popped node from suppressed stack return suppressed ; } } else { // If suppression parent popped , then cancel the suppress mode suppress = false ; } } return popNode ( nodeStack , cls , uri ) ; }
public class StreamEx { /** * Returns a { @ link NavigableMap } whose keys and values are the result of * applying the provided mapping functions to the input elements . * This is a < a href = " package - summary . html # StreamOps " > terminal < / a > * operation . * If the mapped keys contains duplicates ( according to * { @ link Object # equals ( Object ) } ) , the value mapping function is applied to * each equal element , and the results are merged using the provided merging * function . * Returned { @ code NavigableMap } is guaranteed to be modifiable . * @ param < K > the output type of the key mapping function * @ param < V > the output type of the value mapping function * @ param keyMapper a mapping function to produce keys * @ param valMapper a mapping function to produce values * @ param mergeFunction a merge function , used to resolve collisions between * values associated with the same key , as supplied to * { @ link Map # merge ( Object , Object , BiFunction ) } * @ return a { @ code NavigableMap } whose keys are the result of applying a key * mapping function to the input elements , and whose values are the * result of applying a value mapping function to all input elements * equal to the key and combining them using the merge function * @ see Collectors # toMap ( Function , Function , BinaryOperator ) * @ see Collectors # toConcurrentMap ( Function , Function , BinaryOperator ) * @ see # toNavigableMap ( Function , Function ) * @ since 0.6.5 */ public < K , V > NavigableMap < K , V > toNavigableMap ( Function < ? super T , ? extends K > keyMapper , Function < ? super T , ? extends V > valMapper , BinaryOperator < V > mergeFunction ) { } }
return rawCollect ( Collectors . toMap ( keyMapper , valMapper , mergeFunction , TreeMap :: new ) ) ;
public class Version { /** * Loads the version info from version . properties . */ @ SuppressWarnings ( "nls" ) private void load ( ) { } }
URL url = Version . class . getResource ( "version.properties" ) ; if ( url == null ) { this . versionString = "Unknown" ; this . versionDate = new Date ( ) . toString ( ) ; } else { allProperties = new Properties ( ) ; try ( InputStream is = url . openStream ( ) ) { allProperties . load ( is ) ; this . versionString = allProperties . getProperty ( "version" , "Unknown" ) ; this . versionDate = allProperties . getProperty ( "date" , new Date ( ) . toString ( ) ) ; this . vcsDescribe = allProperties . getProperty ( "git.commit.id.describe" , "Non-Git Build" ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } }
public class CollectionUtil { /** * Adds all items returned by the enumeration to the supplied collection * and returns the supplied collection . */ @ ReplacedBy ( "com.google.common.collect.Iterators#addAll(Collection, com.google.common.collect.Iterators#forEnumeration(Enumeration))" ) public static < T , C extends Collection < T > > C addAll ( C col , Enumeration < ? extends T > enm ) { } }
while ( enm . hasMoreElements ( ) ) { col . add ( enm . nextElement ( ) ) ; } return col ;
public class JsonConfig { /** * Sets the current collection type used for collection transformations . < br > * [ JSON - & gt ; Java ] * @ param collectionType the target collection class for conversion */ public void setCollectionType ( Class collectionType ) { } }
if ( collectionType != null ) { if ( ! Collection . class . isAssignableFrom ( collectionType ) ) { throw new JSONException ( "The configured collectionType is not a Collection: " + collectionType . getName ( ) ) ; } this . collectionType = collectionType ; } else { collectionType = DEFAULT_COLLECTION_TYPE ; }
public class SortOperationFactory { /** * Creates a valid { @ link SortTableOperation } operation . * < p > < b > NOTE : < / b > if the collation is not explicitly specified for any expression , it is wrapped in a * default ascending order * @ param orders expressions describing order , * @ param child relational expression on top of which to apply the sort operation * @ return valid sort operation */ public TableOperation createSort ( List < Expression > orders , TableOperation child ) { } }
failIfStreaming ( ) ; List < Expression > convertedOrders = orders . stream ( ) . map ( f -> f . accept ( orderWrapper ) ) . collect ( Collectors . toList ( ) ) ; return new SortTableOperation ( convertedOrders , child ) ;
public class Utils { /** * Get the Device ' s GMT Offset * @ returnthe gmt offset in hours */ public static int getGMTOffset ( ) { } }
Calendar now = Calendar . getInstance ( ) ; return ( now . get ( Calendar . ZONE_OFFSET ) + now . get ( Calendar . DST_OFFSET ) ) / 3600000 ;
public class SplittingBAMIndexer { /** * Write the given virtual offset to the index . This method is for internal use only . * @ param virtualOffset virtual file pointer * @ throws IOException */ public void writeVirtualOffset ( long virtualOffset ) throws IOException { } }
lb . put ( 0 , virtualOffset ) ; out . write ( byteBuffer . array ( ) ) ;
public class RingBuffer { /** * Allows three user supplied arguments per event . * @ param translator The user specified translation for the event * @ param batchStartsAt The first element of the array which is within the batch . * @ param batchSize The number of elements in the batch . * @ param arg0 An array of user supplied arguments , one element per event . * @ param arg1 An array of user supplied arguments , one element per event . * @ param arg2 An array of user supplied arguments , one element per event . * @ see # publishEvents ( EventTranslator [ ] ) */ public < A , B , C > void publishEvents ( EventTranslatorThreeArg < E , A , B , C > translator , int batchStartsAt , int batchSize , A [ ] arg0 , B [ ] arg1 , C [ ] arg2 ) { } }
checkBounds ( arg0 , arg1 , arg2 , batchStartsAt , batchSize ) ; final long finalSequence = sequencer . next ( batchSize ) ; translateAndPublishBatch ( translator , arg0 , arg1 , arg2 , batchStartsAt , batchSize , finalSequence ) ;
public class TargetOutrankShareBiddingScheme { /** * Gets the maxCpcBidCeiling value for this TargetOutrankShareBiddingScheme . * @ return maxCpcBidCeiling * Ceiling on max CPC bids . */ public com . google . api . ads . adwords . axis . v201809 . cm . Money getMaxCpcBidCeiling ( ) { } }
return maxCpcBidCeiling ;
public class OutputQuartzHelper { /** * Gets the simple trigger . * @ param timeUnit the time unit * @ param timeInterval the time interval * @ return the simple trigger */ public Trigger getSimpleTrigger ( TimeUnit timeUnit , int timeInterval ) { } }
SimpleScheduleBuilder simpleScheduleBuilder = null ; simpleScheduleBuilder = SimpleScheduleBuilder . simpleSchedule ( ) ; switch ( timeUnit ) { case MILLISECONDS : simpleScheduleBuilder . withIntervalInMilliseconds ( timeInterval ) . repeatForever ( ) ; break ; case SECONDS : simpleScheduleBuilder . withIntervalInSeconds ( timeInterval ) . repeatForever ( ) ; break ; case MINUTES : simpleScheduleBuilder . withIntervalInMinutes ( timeInterval ) . repeatForever ( ) ; break ; case HOURS : simpleScheduleBuilder . withIntervalInHours ( timeInterval ) . repeatForever ( ) ; break ; case DAYS : simpleScheduleBuilder . withIntervalInHours ( timeInterval * 24 ) . repeatForever ( ) ; break ; default : simpleScheduleBuilder . withIntervalInSeconds ( 1 ) . repeatForever ( ) ; // default 1 sec } Trigger simpleTrigger = TriggerBuilder . newTrigger ( ) . withSchedule ( simpleScheduleBuilder ) . build ( ) ; return simpleTrigger ;
public class ManifestRest { /** * Generates a manifest file asynchronously and uploads to DuraCloud * @ param account * @ param spaceId * @ param storeId * @ param format * @ return The URI of the generated manifest . */ private URI generateAsynchronously ( String account , String spaceId , String storeId , String format ) throws Exception { } }
StorageProviderType providerType = getStorageProviderType ( storeId ) ; InputStream manifest = manifestResource . getManifest ( account , storeId , spaceId , format ) ; String contentId = MessageFormat . format ( "generated-manifests/manifest-{0}_{1}_{2}.txt{3}" , spaceId , providerType . name ( ) . toLowerCase ( ) , DateUtil . convertToString ( System . currentTimeMillis ( ) , DateFormat . PLAIN_FORMAT ) , ".gz" ) ; String adminSpace = "x-duracloud-admin" ; URI uri = buildURI ( adminSpace , contentId ) ; StorageProvider provider = storageProviderFactory . getStorageProvider ( ) ; executor . execute ( ( ) -> { try { boolean gzip = true ; // write file to disk File file = IOUtil . writeStreamToFile ( manifest , gzip ) ; // upload to the default storage provider with retries uploadManifestToDefaultStorageProvider ( format , adminSpace , contentId , file , provider , gzip ) ; } catch ( Exception ex ) { log . error ( "failed to generate manifest for space: spaceId=" + spaceId + ", storeId=" + storeId + " : " + ex . getMessage ( ) , ex ) ; } } ) ; return uri ;
public class AbstractEventBuilder { /** * Creates a new camunda input parameter extension element with the * given name and value . * @ param name the name of the input parameter * @ param value the value of the input parameter * @ return the builder object */ public B camundaInputParameter ( String name , String value ) { } }
CamundaInputOutput camundaInputOutput = getCreateSingleExtensionElement ( CamundaInputOutput . class ) ; CamundaInputParameter camundaInputParameter = createChild ( camundaInputOutput , CamundaInputParameter . class ) ; camundaInputParameter . setCamundaName ( name ) ; camundaInputParameter . setTextContent ( value ) ; return myself ;
public class Introspector { /** * 根据信息查询FastMethod , 已经有cache实现 。 * @ param clazz * @ param methodName * @ param parameterTypes * @ return */ public FastMethod getFastMethod ( Class < ? > clazz , String methodName , Class ... parameterTypes ) { } }
String clazzName = clazz . getName ( ) ; String methodKey = buildMethodKey ( clazzName , methodName , parameterTypes ) ; FastMethod method = fastMethodCache . get ( methodKey ) ; if ( null == method ) { getFastClass ( clazz ) ; // 分析一次clazz , 这时会跟新fastMethodCache return fastMethodCache . get ( methodKey ) ; } else { return method ; }
public class ClusterLocator { /** * Locates the local address of the nearest group node if one exists . * @ param cluster The cluster in which to search for the node . * @ param group The group to search . * @ param doneHandler A handler to be called once the address has been located . */ public void locateGroup ( String cluster , String group , Handler < AsyncResult < String > > doneHandler ) { } }
locateGroup ( String . format ( "%s.%s" , cluster , group ) , doneHandler ) ;
public class Visibility { /** * Subclasses should override this method or * { @ link # onDisappear ( ViewGroup , View , TransitionValues , TransitionValues ) } * if they need to create an Animator when targets disappear . * The method should only be called by the Visibility class ; it is * not intended to be called from external classes . * The default implementation of this method attempts to find a View to use to call * { @ link # onDisappear ( ViewGroup , View , TransitionValues , TransitionValues ) } , * based on the situation of the View in the View hierarchy . For example , * if a View was simply removed from its parent , then the View will be added * into a { @ link android . view . ViewGroupOverlay } and passed as the < code > view < / code > * parameter in { @ link # onDisappear ( ViewGroup , View , TransitionValues , TransitionValues ) } . * If a visible View is changed to be { @ link View # GONE } or { @ link View # INVISIBLE } , * then it can be used as the < code > view < / code > and the visibility will be changed * to { @ link View # VISIBLE } for the duration of the animation . However , if a View * is in a hierarchy which is also altering its visibility , the situation can be * more complicated . In general , if a view that is no longer in the hierarchy in * the end scene still has a parent ( so its parent hierarchy was removed , but it * was not removed from its parent ) , then it will be left alone to avoid side - effects from * improperly removing it from its parent . The only exception to this is if * the previous { @ link Scene } was { @ link Scene # getSceneForLayout ( ViewGroup , int , * android . content . Context ) created from a layout resource file } , then it is considered * safe to un - parent the starting scene view in order to make it disappear . < / p > * @ param sceneRoot The root of the transition hierarchy * @ param startValues The target values in the start scene * @ param startVisibility The target visibility in the start scene * @ param endValues The target values in the end scene * @ param endVisibility The target visibility in the end scene * @ return An Animator to be started at the appropriate time in the * overall transition for this scene change . A null value means no animation * should be run . */ @ Nullable public Animator onDisappear ( @ NonNull final ViewGroup sceneRoot , @ Nullable TransitionValues startValues , int startVisibility , @ Nullable TransitionValues endValues , int endVisibility ) { } }
if ( ( mMode & MODE_OUT ) != MODE_OUT ) { return null ; } View startView = ( startValues != null ) ? startValues . view : null ; View endView = ( endValues != null ) ? endValues . view : null ; View overlayView = null ; View viewToKeep = null ; boolean reusingCreatedOverlayView = false ; boolean createOverlayFromStartView = false ; if ( endView == null || endView . getParent ( ) == null ) { if ( endView != null ) { // endView was removed from its parent - add it to the overlay overlayView = endView ; } else if ( startView != null ) { createOverlayFromStartView = true ; } } else { // visibility change if ( endVisibility == View . INVISIBLE ) { viewToKeep = endView ; } else { // Becoming GONE if ( startView == endView || startView == null ) { viewToKeep = endView ; } else { createOverlayFromStartView = true ; } } } if ( createOverlayFromStartView ) { // endView does not exist . Use startView only under certain // conditions , because placing a view in an overlay necessitates // it being removed from its current parent if ( startView . getTag ( R . id . overlay_view ) != null ) { // we ' ve already created overlay for the start view . // it means that we are applying two visibility // transitions for the same view overlayView = ( View ) startView . getTag ( R . id . overlay_view ) ; reusingCreatedOverlayView = true ; } else if ( startView . getParent ( ) == null ) { // no parent - safe to use overlayView = startView ; } else if ( startView . getParent ( ) instanceof View ) { View startParent = ( View ) startView . getParent ( ) ; TransitionValues startParentValues = getTransitionValues ( startParent , true ) ; TransitionValues endParentValues = getMatchedTransitionValues ( startParent , true ) ; VisibilityInfo parentVisibilityInfo = getVisibilityChangeInfo ( startParentValues , endParentValues ) ; if ( ! parentVisibilityInfo . visibilityChange ) { overlayView = TransitionUtils . copyViewImage ( sceneRoot , startView , startParent ) ; } else if ( startParent . getParent ( ) == null ) { int id = startParent . getId ( ) ; if ( id != View . NO_ID && sceneRoot . findViewById ( id ) != null && mCanRemoveViews ) { // no parent , but its parent is unparented but the parent // hierarchy has been replaced by a new hierarchy with the same id // and it is safe to un - parent startView overlayView = startView ; } } } } if ( overlayView != null && startValues != null ) { // TODO : Need to do this for general case of adding to overlay final int [ ] screenLoc = ( int [ ] ) startValues . values . get ( PROPNAME_SCREEN_LOCATION ) ; if ( ! reusingCreatedOverlayView ) { ViewGroupOverlayUtils . addOverlay ( sceneRoot , overlayView , screenLoc [ 0 ] , screenLoc [ 1 ] ) ; } Animator animator = onDisappear ( sceneRoot , overlayView , startValues , endValues ) ; if ( animator == null ) { ViewGroupOverlayUtils . removeOverlay ( sceneRoot , overlayView ) ; } else if ( ! reusingCreatedOverlayView ) { final View finalOverlayView = overlayView ; final View finalStartView = startView ; finalStartView . setTag ( R . id . overlay_view , finalOverlayView ) ; addListener ( new TransitionListenerAdapter ( ) { @ Override public void onTransitionPause ( @ NonNull Transition transition ) { ViewGroupOverlayUtils . removeOverlay ( sceneRoot , finalOverlayView ) ; } @ Override public void onTransitionResume ( @ NonNull Transition transition ) { if ( finalOverlayView . getParent ( ) == null ) { ViewGroupOverlayUtils . addOverlay ( sceneRoot , finalOverlayView , screenLoc [ 0 ] , screenLoc [ 1 ] ) ; } else { cancel ( ) ; } } @ Override public void onTransitionEnd ( @ NonNull Transition transition ) { finalStartView . setTag ( R . id . overlay_view , null ) ; ViewGroupOverlayUtils . removeOverlay ( sceneRoot , finalOverlayView ) ; transition . removeListener ( this ) ; } } ) ; } return animator ; } if ( viewToKeep != null ) { int originalVisibility = - 1 ; final boolean isForcedVisibility = mForcedStartVisibility != - 1 || mForcedEndVisibility != - 1 ; if ( ! isForcedVisibility ) { originalVisibility = viewToKeep . getVisibility ( ) ; ViewUtils . setTransitionVisibility ( viewToKeep , View . VISIBLE ) ; } Animator animator = onDisappear ( sceneRoot , viewToKeep , startValues , endValues ) ; if ( animator != null ) { DisappearListener disappearListener = new DisappearListener ( viewToKeep , endVisibility , isForcedVisibility ) ; animator . addListener ( disappearListener ) ; AnimatorUtils . addPauseListener ( animator , disappearListener ) ; addListener ( disappearListener ) ; } else if ( ! isForcedVisibility ) { ViewUtils . setTransitionVisibility ( viewToKeep , originalVisibility ) ; } return animator ; } return null ;
public class RestClusterClient { /** * Requests the job details . * @ param jobId The job id * @ return Job details */ public CompletableFuture < JobDetailsInfo > getJobDetails ( JobID jobId ) { } }
final JobDetailsHeaders detailsHeaders = JobDetailsHeaders . getInstance ( ) ; final JobMessageParameters params = new JobMessageParameters ( ) ; params . jobPathParameter . resolve ( jobId ) ; return sendRequest ( detailsHeaders , params ) ;
public class IniFile { /** * Helper method to check the existance of a file . * @ param the full path and name of the file to be checked . * @ return true if file exists , false otherwise . */ private boolean checkFile ( String pstrFile ) { } }
boolean blnRet = false ; File objFile = null ; try { objFile = new CSFile ( pstrFile ) ; blnRet = ( objFile . exists ( ) && objFile . isFile ( ) ) ; } catch ( Exception e ) { blnRet = false ; } finally { if ( objFile != null ) objFile = null ; } return blnRet ;
public class Proxies { /** * Returns true if the given object was created via { @ link Proxies } . */ public static boolean isForgeProxy ( Object object ) { } }
if ( object != null ) { Class < ? > [ ] interfaces = object . getClass ( ) . getInterfaces ( ) ; if ( interfaces != null ) { for ( Class < ? > iface : interfaces ) { if ( iface . getName ( ) . equals ( ForgeProxy . class . getName ( ) ) ) { return true ; } } } } return false ;
public class DataXceiver { /** * Read / write data from / to the DataXceiveServer . */ public void run ( ) { } }
DataInputStream in = null ; byte op = - 1 ; int opsProcessed = 0 ; try { s . setTcpNoDelay ( true ) ; s . setSoTimeout ( datanode . socketTimeout * 5 ) ; in = new DataInputStream ( new BufferedInputStream ( NetUtils . getInputStream ( s ) , SMALL_BUFFER_SIZE ) ) ; int stdTimeout = s . getSoTimeout ( ) ; // We process requests in a loop , and stay around for a short timeout . // This optimistic behavior allows the other end to reuse connections . // Setting keepalive timeout to 0 to disable this behavior . do { long startNanoTime = System . nanoTime ( ) ; long startTime = DataNode . now ( ) ; VersionAndOpcode versionAndOpcode = new VersionAndOpcode ( ) ; try { if ( opsProcessed != 0 ) { assert socketKeepaliveTimeout > 0 ; s . setSoTimeout ( socketKeepaliveTimeout ) ; } versionAndOpcode . readFields ( in ) ; op = versionAndOpcode . getOpCode ( ) ; } catch ( InterruptedIOException ignored ) { // Time out while we wait for client rpc break ; } catch ( IOException err ) { // Since we optimistically expect the next op , it ' s quite normal to get EOF here . if ( opsProcessed > 0 && ( err instanceof EOFException || err instanceof ClosedChannelException ) ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Cached " + s . toString ( ) + " closing after " + opsProcessed + " ops" ) ; } } else { throw err ; } break ; } // restore normal timeout . if ( opsProcessed != 0 ) { s . setSoTimeout ( stdTimeout ) ; } boolean local = s . getInetAddress ( ) . equals ( s . getLocalAddress ( ) ) ; updateCurrentThreadName ( "waiting for operation" ) ; // Make sure the xciver count is not exceeded int curXceiverCount = datanode . getXceiverCount ( ) ; if ( curXceiverCount > dataXceiverServer . maxXceiverCount ) { datanode . myMetrics . xceiverCountExceeded . inc ( ) ; throw new IOException ( "xceiverCount " + curXceiverCount + " exceeds the limit of concurrent xcievers " + dataXceiverServer . maxXceiverCount ) ; } switch ( op ) { case DataTransferProtocol . OP_READ_BLOCK : readBlock ( in , versionAndOpcode , startNanoTime ) ; datanode . myMetrics . readBlockOp . inc ( DataNode . now ( ) - startTime ) ; if ( local ) datanode . myMetrics . readsFromLocalClient . inc ( ) ; else datanode . myMetrics . readsFromRemoteClient . inc ( ) ; break ; case DataTransferProtocol . OP_READ_BLOCK_ACCELERATOR : readBlockAccelerator ( in , versionAndOpcode ) ; datanode . myMetrics . readBlockOp . inc ( DataNode . now ( ) - startTime ) ; if ( local ) datanode . myMetrics . readsFromLocalClient . inc ( ) ; else datanode . myMetrics . readsFromRemoteClient . inc ( ) ; break ; case DataTransferProtocol . OP_WRITE_BLOCK : writeBlock ( in , versionAndOpcode ) ; datanode . myMetrics . writeBlockOp . inc ( DataNode . now ( ) - startTime ) ; if ( local ) datanode . myMetrics . writesFromLocalClient . inc ( ) ; else datanode . myMetrics . writesFromRemoteClient . inc ( ) ; break ; case DataTransferProtocol . OP_READ_METADATA : readMetadata ( in , versionAndOpcode ) ; datanode . myMetrics . readMetadataOp . inc ( DataNode . now ( ) - startTime ) ; break ; case DataTransferProtocol . OP_REPLACE_BLOCK : // for balancing purpose ; send to a destination replaceBlock ( in , versionAndOpcode ) ; datanode . myMetrics . replaceBlockOp . inc ( DataNode . now ( ) - startTime ) ; break ; case DataTransferProtocol . OP_COPY_BLOCK : // for balancing purpose ; send to a proxy source copyBlock ( in , versionAndOpcode ) ; datanode . myMetrics . copyBlockOp . inc ( DataNode . now ( ) - startTime ) ; break ; case DataTransferProtocol . OP_BLOCK_CHECKSUM : // get the checksum of a block getBlockChecksum ( in , versionAndOpcode ) ; datanode . myMetrics . blockChecksumOp . inc ( DataNode . now ( ) - startTime ) ; break ; case DataTransferProtocol . OP_BLOCK_CRC : // get the checksum of a block getBlockCrc ( in , versionAndOpcode ) ; datanode . myMetrics . blockChecksumOp . inc ( DataNode . now ( ) - startTime ) ; break ; case DataTransferProtocol . OP_APPEND_BLOCK : appendBlock ( in , versionAndOpcode ) ; datanode . myMetrics . appendBlockOp . inc ( DataNode . now ( ) - startTime ) ; if ( local ) datanode . myMetrics . writesFromLocalClient . inc ( ) ; else datanode . myMetrics . writesFromRemoteClient . inc ( ) ; break ; default : throw new IOException ( "Unknown opcode " + op + " in data stream" ) ; } ++ opsProcessed ; if ( versionAndOpcode . getDataTransferVersion ( ) < DataTransferProtocol . READ_REUSE_CONNECTION_VERSION || ! reuseConnection ) { break ; } } while ( s . isConnected ( ) && socketKeepaliveTimeout > 0 ) ; } catch ( Throwable t ) { if ( op == DataTransferProtocol . OP_READ_BLOCK && t instanceof SocketTimeoutException ) { // Ignore SocketTimeoutException for reading . // This usually means that the client who ' s reading data from the DataNode has exited . if ( ClientTraceLog . isInfoEnabled ( ) ) { ClientTraceLog . info ( datanode . getDatanodeInfo ( ) + ":DataXceiver" + " (IGNORED) " + StringUtils . stringifyException ( t ) ) ; } } else { if ( op == DataTransferProtocol . OP_READ_BLOCK || op == DataTransferProtocol . OP_READ_BLOCK_ACCELERATOR ) { datanode . myMetrics . blockReadFailures . inc ( ) ; } datanode . myMetrics . opFailures . inc ( ) ; LOG . error ( datanode . getDatanodeInfo ( ) + ":DataXceiver, at " + s . toString ( ) , t ) ; } } finally { LOG . debug ( datanode . getDatanodeInfo ( ) + ":Number of active connections is: " + datanode . getXceiverCount ( ) ) ; updateCurrentThreadName ( "Cleaning up" ) ; IOUtils . closeStream ( in ) ; IOUtils . closeSocket ( s ) ; dataXceiverServer . childSockets . remove ( s ) ; }
public class TaskOperations { /** * Lists the { @ link SubtaskInformation subtasks } of the specified task . * @ param jobId * The ID of the job containing the task . * @ param taskId * The ID of the task . * @ return A list of { @ link SubtaskInformation } objects . * @ throws BatchErrorException * Exception thrown when an error response is received from the * Batch service . * @ throws IOException * Exception thrown when there is an error in * serialization / deserialization of data sent to / received from the * Batch service . */ public List < SubtaskInformation > listSubtasks ( String jobId , String taskId ) throws BatchErrorException , IOException { } }
return listSubtasks ( jobId , taskId , null , null ) ;
public class CaretSelectionBindImpl { /** * Assumes that { @ code getArea ( ) . getLength ! = 0 } is true and { @ link BreakIterator # setText ( String ) } has been called */ private int calculatePositionViaBreakingBackwards ( int numOfBreaks , BreakIterator breakIterator , int position ) { } }
breakIterator . preceding ( position ) ; for ( int i = 1 ; i < numOfBreaks ; i ++ ) { breakIterator . previous ( ) ; } return breakIterator . current ( ) ;
public class SVGUtil { /** * main */ private static boolean viewSVG ( File file ) throws IOException { } }
if ( "Mac OS X" . equals ( System . getProperty ( "os.name" ) ) ) { Runtime . getRuntime ( ) . exec ( String . format ( "open -a /Applications/Safari.app %s" , file . getAbsoluteFile ( ) ) ) ; return true ; } return false ;
public class DescribeGlobalTableRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeGlobalTableRequest describeGlobalTableRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeGlobalTableRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeGlobalTableRequest . getGlobalTableName ( ) , GLOBALTABLENAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Currency { /** * Attempt to parse the given string as a currency , either as a * display name in the given locale , or as a 3 - letter ISO 4217 * code . If multiple display names match , then the longest one is * selected . If both a display name and a 3 - letter ISO code * match , then the display name is preferred , unless it ' s length * is less than 3. * @ param locale the locale of the display names to match * @ param text the text to parse * @ param type parse against currency type : LONG _ NAME only or not * @ param pos input - output position ; on input , the position within * text to match ; must have 0 & lt ; = pos . getIndex ( ) & lt ; text . length ( ) ; * on output , the position after the last matched character . If * the parse fails , the position in unchanged upon output . * @ return the ISO 4217 code , as a string , of the best match , or * null if there is no match * @ deprecated This API is ICU internal only . * @ hide original deprecated declaration * @ hide draft / provisional / internal are hidden on Android */ @ Deprecated public static String parse ( ULocale locale , String text , int type , ParsePosition pos ) { } }
List < TextTrieMap < CurrencyStringInfo > > currencyTrieVec = CURRENCY_NAME_CACHE . get ( locale ) ; if ( currencyTrieVec == null ) { TextTrieMap < CurrencyStringInfo > currencyNameTrie = new TextTrieMap < CurrencyStringInfo > ( true ) ; TextTrieMap < CurrencyStringInfo > currencySymbolTrie = new TextTrieMap < CurrencyStringInfo > ( false ) ; currencyTrieVec = new ArrayList < TextTrieMap < CurrencyStringInfo > > ( ) ; currencyTrieVec . add ( currencySymbolTrie ) ; currencyTrieVec . add ( currencyNameTrie ) ; setupCurrencyTrieVec ( locale , currencyTrieVec ) ; CURRENCY_NAME_CACHE . put ( locale , currencyTrieVec ) ; } int maxLength = 0 ; String isoResult = null ; // look for the names TextTrieMap < CurrencyStringInfo > currencyNameTrie = currencyTrieVec . get ( 1 ) ; CurrencyNameResultHandler handler = new CurrencyNameResultHandler ( ) ; currencyNameTrie . find ( text , pos . getIndex ( ) , handler ) ; isoResult = handler . getBestCurrencyISOCode ( ) ; maxLength = handler . getBestMatchLength ( ) ; if ( type != Currency . LONG_NAME ) { // not long name only TextTrieMap < CurrencyStringInfo > currencySymbolTrie = currencyTrieVec . get ( 0 ) ; handler = new CurrencyNameResultHandler ( ) ; currencySymbolTrie . find ( text , pos . getIndex ( ) , handler ) ; if ( handler . getBestMatchLength ( ) > maxLength ) { isoResult = handler . getBestCurrencyISOCode ( ) ; maxLength = handler . getBestMatchLength ( ) ; } } int start = pos . getIndex ( ) ; pos . setIndex ( start + maxLength ) ; return isoResult ;
public class MainFrame { /** * Sets the title of the main window . * The actual title set is the given { @ code title } followed by the program name and version . * @ see Constant # PROGRAM _ NAME * @ see Constant # PROGRAM _ VERSION * @ deprecated as of 2.7.0 , replaced by { @ link # setTitle ( Session ) } */ @ Override @ Deprecated public void setTitle ( String title ) { } }
StringBuilder strBuilder = new StringBuilder ( ) ; if ( title != null && ! title . isEmpty ( ) ) { strBuilder . append ( title ) ; strBuilder . append ( " - " ) ; } strBuilder . append ( Constant . PROGRAM_NAME ) . append ( ' ' ) . append ( Constant . PROGRAM_VERSION ) ; super . setTitle ( strBuilder . toString ( ) ) ;
public class DeploymentReflectionIndex { /** * Construct a new instance . * @ return the new instance */ public static DeploymentReflectionIndex create ( ) { } }
final SecurityManager sm = System . getSecurityManager ( ) ; if ( sm != null ) { sm . checkPermission ( ServerPermission . CREATE_DEPLOYMENT_REFLECTION_INDEX ) ; } return new DeploymentReflectionIndex ( ) ;
public class UfsJournal { /** * Transitions the journal from secondary to primary mode . The journal will apply the latest * journal entries to the state machine , then begin to allow writes . */ public synchronized void gainPrimacy ( ) throws IOException { } }
Preconditions . checkState ( mWriter == null , "writer must be null in secondary mode" ) ; Preconditions . checkState ( mTailerThread != null , "tailer thread must not be null in secondary mode" ) ; mTailerThread . awaitTermination ( true ) ; long nextSequenceNumber = mTailerThread . getNextSequenceNumber ( ) ; mTailerThread = null ; nextSequenceNumber = catchUp ( nextSequenceNumber ) ; mWriter = new UfsJournalLogWriter ( this , nextSequenceNumber ) ; mAsyncWriter = new AsyncJournalWriter ( mWriter ) ; mState = State . PRIMARY ;
public class Strings { /** * Joins all non - null elements of the given < code > elements < / code > into one String . * @ param delimiter Inserted as separator between consecutive elements . * @ param elements The elements to join . * @ return A long string containing all non - null elements . */ public static String join ( final String delimiter , final Object ... elements ) { } }
final StringBuilder sb = new StringBuilder ( ) ; for ( final Object part : elements ) { if ( part == null ) { continue ; } if ( sb . length ( ) > 0 ) { sb . append ( delimiter ) ; } sb . append ( part . toString ( ) ) ; } return sb . toString ( ) ;
public class JdbcAccessImpl { /** * performs an INSERT operation against RDBMS . * @ param obj The Object to be inserted as a row of the underlying table . * @ param cld ClassDescriptor providing mapping information . */ public void executeInsert ( ClassDescriptor cld , Object obj ) throws PersistenceBrokerException { } }
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "executeInsert: " + obj ) ; } final StatementManagerIF sm = broker . serviceStatementManager ( ) ; PreparedStatement stmt = null ; try { stmt = sm . getInsertStatement ( cld ) ; if ( stmt == null ) { logger . error ( "getInsertStatement returned a null statement" ) ; throw new PersistenceBrokerException ( "getInsertStatement returned a null statement" ) ; } // before bind values perform autoincrement sequence columns assignAutoincrementSequences ( cld , obj ) ; sm . bindInsert ( stmt , cld , obj ) ; if ( logger . isDebugEnabled ( ) ) logger . debug ( "executeInsert: " + stmt ) ; stmt . executeUpdate ( ) ; // after insert read and assign identity columns assignAutoincrementIdentityColumns ( cld , obj ) ; // Harvest any return values . harvestReturnValues ( cld . getInsertProcedure ( ) , obj , stmt ) ; } catch ( PersistenceBrokerException e ) { logger . error ( "PersistenceBrokerException during the execution of the insert: " + e . getMessage ( ) , e ) ; throw e ; } catch ( SequenceManagerException e ) { throw new PersistenceBrokerException ( "Error while try to assign identity value" , e ) ; } catch ( SQLException e ) { final String sql = broker . serviceSqlGenerator ( ) . getPreparedInsertStatement ( cld ) . getStatement ( ) ; throw ExceptionHelper . generateException ( e , sql , cld , logger , obj ) ; } finally { sm . closeResources ( stmt , null ) ; }
public class CmsSimpleSearchConfigurationParser { /** * The default field facets . * @ param categoryConjunction flag , indicating if category selections in the facet should be " AND " combined . * @ return the default field facets . */ private Map < String , I_CmsSearchConfigurationFacetField > getDefaultFieldFacets ( boolean categoryConjunction ) { } }
Map < String , I_CmsSearchConfigurationFacetField > fieldFacets = new HashMap < String , I_CmsSearchConfigurationFacetField > ( ) ; fieldFacets . put ( CmsListManager . FIELD_CATEGORIES , new CmsSearchConfigurationFacetField ( CmsListManager . FIELD_CATEGORIES , null , Integer . valueOf ( 1 ) , Integer . valueOf ( 200 ) , null , "Category" , SortOrder . index , null , Boolean . valueOf ( categoryConjunction ) , null , Boolean . TRUE ) ) ; fieldFacets . put ( CmsListManager . FIELD_PARENT_FOLDERS , new CmsSearchConfigurationFacetField ( CmsListManager . FIELD_PARENT_FOLDERS , null , Integer . valueOf ( 1 ) , Integer . valueOf ( 200 ) , null , "Folders" , SortOrder . index , null , Boolean . FALSE , null , Boolean . TRUE ) ) ; return Collections . unmodifiableMap ( fieldFacets ) ;
public class Password { public String toTraceString ( ) { } }
if ( _traceableString == null ) { if ( _password != null ) { try { MessageDigest digester = MessageDigest . getInstance ( "SHA-256" ) ; digester . update ( SALT ) ; for ( char c : _password ) { digester . update ( ( byte ) ( ( c & 0xFF00 ) >> 8 ) ) ; digester . update ( ( byte ) ( ( c & 0x00FF ) ) ) ; } byte [ ] hash = digester . digest ( ) ; StringBuilder sb = new StringBuilder ( ) ; // Throw away the high nibbles of each byte to increase security and reduce the // length of the trace string for ( byte b : hash ) { int i = b & 0x0F ; sb . append ( Integer . toHexString ( i ) ) ; } _traceableString = sb . toString ( ) ; } catch ( NoSuchAlgorithmException nsae ) { // No FFDC Code needed - fall back on the toString implementation _traceableString = toString ( ) ; } } else { _traceableString = "" ; /* not just null : - ) */ } } return _traceableString ;
public class UNode { /** * Parse the JSON text from the given character Reader and return the appropriate * UNode object . If an error occurs reading from the reader , it is passed to the * caller . The reader is closed when parsing is done . The only JSON documents we * allow are in the form : * < pre > * { " something " : [ value ] } * < / pre > * This means that when we parse the JSON , we should see an object with a single * member . The UNode returned is a MAP object whose name is the member name and whose * elements are parsed from the [ value ] . * @ param reader Character reader contain JSON text to parse . The reader is * closed when reading is complete . * @ return UNode with type = = { @ link UNode . NodeType # MAP } . * @ throws IllegalArgumentException If the JSON text is malformed or an error occurs * while reading from the reader . */ public static UNode parseJSON ( Reader reader ) throws IllegalArgumentException { } }
assert reader != null ; SajListener listener = new SajListener ( ) ; try { new JSONAnnie ( reader ) . parse ( listener ) ; } finally { Utils . close ( reader ) ; } return listener . getRootNode ( ) ;
public class SequenceBasesFix { /** * Method to delete the base n ' s at the beginning and end of the sequence */ private ByteBuffer removeChar ( Sequence sequenceObj ) { } }
byte [ ] sequenceByte = sequenceObj . getSequenceByte ( ) ; int beginPosition = 0 ; int endPosition = ( int ) sequenceObj . getLength ( ) ; if ( sequenceByte [ 0 ] == 'n' ) { for ( byte base : sequenceByte ) { if ( 'n' == ( char ) base ) { beginPosition ++ ; beginDelSequenceStr += ( char ) base ; } else { break ; } } if ( beginPosition == endPosition ) { return ByteBuffer . wrap ( new byte [ 0 ] ) ; } } if ( sequenceByte [ endPosition - 1 ] == 'n' ) { for ( int i = endPosition - 1 ; i > 0 ; i -- ) { if ( 'n' == sequenceByte [ i ] ) { endPosition -- ; } else { break ; } } } int length = ( int ) ( endPosition - beginPosition ) ; int offset = beginPosition ; byte [ ] strippedSequence = new byte [ length ] ; System . arraycopy ( sequenceObj . getSequenceByte ( ) , offset , strippedSequence , 0 , length ) ; return ByteBuffer . wrap ( strippedSequence , 0 , strippedSequence . length ) ;
public class CHFWBundle { /** * DS method to remove a factory provider . * @ param provider */ protected void unsetFactoryProvider ( ChannelFactoryProvider provider ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "Remove factory provider; " + provider ) ; } this . chfw . deregisterFactories ( provider ) ;
public class SearchParameter { /** * syntactic sugar */ public SearchParameterContactComponent addContact ( ) { } }
SearchParameterContactComponent t = new SearchParameterContactComponent ( ) ; if ( this . contact == null ) this . contact = new ArrayList < SearchParameterContactComponent > ( ) ; this . contact . add ( t ) ; return t ;
public class AmazonS3Client { /** * Retrieves the region of the bucket by making a HeadBucket request to us - west - 1 region . * Currently S3 doesn ' t return region in a HEAD Bucket request if the bucket * owner has enabled bucket to accept only SigV4 requests via bucket * policies . */ private String getBucketRegionViaHeadRequest ( String bucketName ) { } }
String bucketRegion = null ; try { Request < HeadBucketRequest > request = createRequest ( bucketName , null , new HeadBucketRequest ( bucketName ) , HttpMethodName . HEAD ) ; request . addHandlerContext ( HandlerContextKey . OPERATION_NAME , "HeadBucket" ) ; HeadBucketResult result = invoke ( request , new HeadBucketResultHandler ( ) , bucketName , null , true ) ; bucketRegion = result . getBucketRegion ( ) ; } catch ( AmazonS3Exception exception ) { if ( exception . getAdditionalDetails ( ) != null ) { bucketRegion = exception . getAdditionalDetails ( ) . get ( Headers . S3_BUCKET_REGION ) ; } } if ( bucketRegion == null && log . isDebugEnabled ( ) ) { log . debug ( "Not able to derive region of the " + bucketName + " from the HEAD Bucket requests." ) ; } return bucketRegion ;
public class BadAlias { /** * syck _ badalias _ initialize */ @ JRubyMethod public static IRubyObject initialize ( IRubyObject self , IRubyObject val ) { } }
( ( RubyObject ) self ) . fastSetInstanceVariable ( "@name" , val ) ; return self ;
public class FilePath { /** * Creates a zip file from this directory by only including the files that match the given glob . * @ param glob * Ant style glob , like " * * & # x2F ; * . xml " . If empty or null , this method * works like { @ link # createZipArchive ( OutputStream ) } , inserting a top - level directory into the ZIP . * @ since 1.315 */ public void zip ( OutputStream os , final String glob ) throws IOException , InterruptedException { } }
archive ( ArchiverFactory . ZIP , os , glob ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link String } { @ code > } */ @ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "renditionFilter" , scope = GetObjectByPath . class ) public JAXBElement < String > createGetObjectByPathRenditionFilter ( String value ) { } }
return new JAXBElement < String > ( _GetObjectOfLatestVersionRenditionFilter_QNAME , String . class , GetObjectByPath . class , value ) ;
public class DefaultBeanManager { /** * Validate populated values based on JSR 303. * @ param bean the bean to validate * @ throws PropertyException validation error */ private void validateBean ( Object bean ) throws PropertyException { } }
if ( getValidatorFactory ( ) != null ) { Validator validator = getValidatorFactory ( ) . getValidator ( ) ; Set < ConstraintViolation < Object > > constraintViolations = validator . validate ( bean ) ; if ( ! constraintViolations . isEmpty ( ) ) { throw new PropertyException ( "Failed to validate bean: [" + constraintViolations . iterator ( ) . next ( ) . getMessage ( ) + "]" ) ; } }
public class ResolvedTypes { /** * / * @ Nullable */ @ Override public LightweightTypeReference getActualType ( JvmIdentifiableElement identifiable ) { } }
LightweightTypeReference result = doGetActualType ( identifiable , false ) ; return toOwnedReference ( result ) ;
public class AggregateContainer { /** * Performs the output aggregation and generates the { @ link WorkerToMasterReports } to report back to the * { @ link org . apache . reef . vortex . driver . VortexDriver } . */ private void aggregateTasklets ( final AggregateTriggerType type ) { } }
final List < WorkerToMasterReport > workerToMasterReports = new ArrayList < > ( ) ; final List < Object > results = new ArrayList < > ( ) ; final List < Integer > aggregatedTasklets = new ArrayList < > ( ) ; // Synchronization to prevent duplication of work on the same aggregation function on the same worker . synchronized ( stateLock ) { switch ( type ) { case ALARM : aggregateTasklets ( workerToMasterReports , results , aggregatedTasklets ) ; break ; case COUNT : if ( ! aggregateOnCount ( ) ) { return ; } aggregateTasklets ( workerToMasterReports , results , aggregatedTasklets ) ; break ; default : throw new RuntimeException ( "Unexpected aggregate type." ) ; } } if ( ! results . isEmpty ( ) ) { // Run the aggregation function . try { final Object aggregationResult = taskletAggregationRequest . executeAggregation ( results ) ; workerToMasterReports . add ( new TaskletAggregationResultReport ( aggregatedTasklets , aggregationResult ) ) ; } catch ( final Exception e ) { workerToMasterReports . add ( new TaskletAggregationFailureReport ( aggregatedTasklets , e ) ) ; } } // Add to worker report only if there is something to report back . if ( ! workerToMasterReports . isEmpty ( ) ) { workerReportsQueue . addLast ( kryoUtils . serialize ( new WorkerToMasterReports ( workerToMasterReports ) ) ) ; heartBeatTriggerManager . triggerHeartBeat ( ) ; }
public class EndpointBuilder { /** * { @ inheritDoc } */ @ Override public Endpoint buildObject ( String namespaceURI , String localName , String namespacePrefix ) { } }
return new EndpointImpl ( namespaceURI , localName , namespacePrefix ) ;
public class DatatypeConverter { /** * Print work units . * @ param value TimeUnit instance * @ return work units value */ public static final BigInteger printWorkUnits ( TimeUnit value ) { } }
int result ; if ( value == null ) { value = TimeUnit . HOURS ; } switch ( value ) { case MINUTES : { result = 1 ; break ; } case DAYS : { result = 3 ; break ; } case WEEKS : { result = 4 ; break ; } case MONTHS : { result = 5 ; break ; } case YEARS : { result = 7 ; break ; } default : case HOURS : { result = 2 ; break ; } } return ( BigInteger . valueOf ( result ) ) ;
public class AbstractTlsDirContextAuthenticationStrategy { /** * / * ( non - Javadoc ) * @ see org . springframework . ldap . core . support . DirContextAuthenticationStrategy # processContextAfterCreation ( javax . naming . directory . DirContext , java . lang . String , java . lang . String ) */ public final DirContext processContextAfterCreation ( DirContext ctx , String userDn , String password ) throws NamingException { } }
if ( ctx instanceof LdapContext ) { final LdapContext ldapCtx = ( LdapContext ) ctx ; final StartTlsResponse tlsResponse = ( StartTlsResponse ) ldapCtx . extendedOperation ( new StartTlsRequest ( ) ) ; try { if ( hostnameVerifier != null ) { tlsResponse . setHostnameVerifier ( hostnameVerifier ) ; } tlsResponse . negotiate ( sslSocketFactory ) ; // If null , the default SSL socket factory is used applyAuthentication ( ldapCtx , userDn , password ) ; if ( shutdownTlsGracefully ) { // Wrap the target context in a proxy to intercept any calls // to ' close ' , so that we can shut down the TLS connection // gracefully first . return ( DirContext ) Proxy . newProxyInstance ( DirContextProxy . class . getClassLoader ( ) , new Class < ? > [ ] { LdapContext . class , DirContextProxy . class } , new TlsAwareDirContextProxy ( ldapCtx , tlsResponse ) ) ; } else { return ctx ; } } catch ( IOException e ) { LdapUtils . closeContext ( ctx ) ; throw new UncategorizedLdapException ( "Failed to negotiate TLS session" , e ) ; } } else { throw new IllegalArgumentException ( "Processed Context must be an LDAPv3 context, i.e. an LdapContext implementation" ) ; }
public class OjbMemberTagsHandler { /** * Evaluates the body if current member has no tag with the specified name . * @ param template The body of the block tag * @ param attributes The attributes of the template tag * @ exception XDocletException Description of Exception * @ doc . tag type = " block " * @ doc . param name = " tagName " optional = " false " description = " The tag name . " * @ doc . param name = " paramName " description = " The parameter name . If not specified , then the raw * content of the tag is returned . " * @ doc . param name = " paramNum " description = " The zero - based parameter number . It ' s used if the user * used the space - separated format for specifying parameters . " * @ doc . param name = " error " description = " Show this error message if no tag found . " */ public void ifDoesntHaveMemberTag ( String template , Properties attributes ) throws XDocletException { } }
boolean result = false ; if ( getCurrentField ( ) != null ) { if ( ! hasTag ( attributes , FOR_FIELD ) ) { result = true ; generate ( template ) ; } } else if ( getCurrentMethod ( ) != null ) { if ( ! hasTag ( attributes , FOR_METHOD ) ) { result = true ; generate ( template ) ; } } if ( ! result ) { String error = attributes . getProperty ( "error" ) ; if ( error != null ) { getEngine ( ) . print ( error ) ; } }
public class StrSubstitutor { /** * Replaces all the occurrences of variables with their matching values * from the resolver using the given source as a template . * The source is not altered by this method . * @ param source the buffer to use as a template , not changed , null returns null * @ return the result of the replace operation * @ since 3.2 */ public String replace ( final CharSequence source ) { } }
if ( source == null ) { return null ; } return replace ( source , 0 , source . length ( ) ) ;
public class GoogleCloudStorageReadChannel { /** * Reads from this channel and stores read data in the given buffer . * < p > On unexpected failure , will attempt to close the channel and clean up state . * @ param buffer buffer to read data into * @ return number of bytes read or - 1 on end - of - stream * @ throws IOException on IO error */ @ Override public int read ( ByteBuffer buffer ) throws IOException { } }
throwIfNotOpen ( ) ; // Don ' t try to read if the buffer has no space . if ( buffer . remaining ( ) == 0 ) { return 0 ; } logger . atFine ( ) . log ( "Reading %s bytes at %s position from '%s'" , buffer . remaining ( ) , currentPosition , resourceIdString ) ; // Do not perform any further reads if we already read everything from this channel . if ( currentPosition == size ) { return - 1 ; } int totalBytesRead = 0 ; int retriesAttempted = 0 ; // We read from a streaming source . We may not get all the bytes we asked for // in the first read . Therefore , loop till we either read the required number of // bytes or we reach end - of - stream . do { int remainingBeforeRead = buffer . remaining ( ) ; performLazySeek ( remainingBeforeRead ) ; checkState ( contentChannelPosition == currentPosition , "contentChannelPosition (%s) should be equal to currentPosition (%s) after lazy seek" , contentChannelPosition , currentPosition ) ; try { int numBytesRead = contentChannel . read ( buffer ) ; checkIOPrecondition ( numBytesRead != 0 , "Read 0 bytes without blocking" ) ; if ( numBytesRead < 0 ) { // Because we don ' t know decompressed object size for gzip - encoded objects , // assume that this is an object end . if ( gzipEncoded ) { size = currentPosition ; contentChannelEnd = currentPosition ; } // Check that we didn ' t get a premature End of Stream signal by checking the number of // bytes read against the stream size . Unfortunately we don ' t have information about the // actual size of the data stream when stream compression is used , so we can only ignore // this case here . checkIOPrecondition ( currentPosition == contentChannelEnd || currentPosition == size , String . format ( "Received end of stream result before all the file data has been received; " + "totalBytesRead: %d, currentPosition: %d," + " contentChannelEnd %d, size: %d, object: '%s'" , totalBytesRead , currentPosition , contentChannelEnd , size , resourceIdString ) ) ; // If we have reached an end of a contentChannel but not an end of an object // then close contentChannel and continue reading an object if necessary . if ( contentChannelEnd != size && currentPosition == contentChannelEnd ) { closeContentChannel ( ) ; } else { break ; } } if ( numBytesRead > 0 ) { totalBytesRead += numBytesRead ; currentPosition += numBytesRead ; contentChannelPosition += numBytesRead ; checkState ( contentChannelPosition == currentPosition , "contentChannelPosition (%s) should be equal to currentPosition (%s)" + " after successful read" , contentChannelPosition , currentPosition ) ; } if ( retriesAttempted != 0 ) { logger . atInfo ( ) . log ( "Success after %s retries on reading '%s'" , retriesAttempted , resourceIdString ) ; } // The count of retriesAttempted is per low - level contentChannel . read call ; // each time we make progress we reset the retry counter . retriesAttempted = 0 ; } catch ( IOException ioe ) { logger . atFine ( ) . log ( "Closing contentChannel after %s exception for '%s'." , ioe . getMessage ( ) , resourceIdString ) ; closeContentChannel ( ) ; if ( buffer . remaining ( ) != remainingBeforeRead ) { int partialRead = remainingBeforeRead - buffer . remaining ( ) ; logger . atInfo ( ) . log ( "Despite exception, had partial read of %s bytes from '%s'; resetting retry count." , partialRead , resourceIdString ) ; retriesAttempted = 0 ; totalBytesRead += partialRead ; currentPosition += partialRead ; } // TODO ( user ) : Refactor any reusable logic for retries into a separate RetryHelper class . if ( retriesAttempted == maxRetries ) { logger . atSevere ( ) . log ( "Throwing exception after reaching max read retries (%s) for '%s'." , maxRetries , resourceIdString ) ; throw ioe ; } if ( retriesAttempted == 0 ) { // If this is the first of a series of retries , we also want to reset the readBackOff // to have fresh initial values . readBackOff . get ( ) . reset ( ) ; } ++ retriesAttempted ; logger . atWarning ( ) . withCause ( ioe ) . log ( "Failed read retry #%s/%s for '%s'. Sleeping..." , retriesAttempted , maxRetries , resourceIdString ) ; try { boolean backOffSuccessful = BackOffUtils . next ( sleeper , readBackOff . get ( ) ) ; if ( ! backOffSuccessful ) { logger . atSevere ( ) . log ( "BackOff returned false; maximum total elapsed time exhausted." + " Giving up after %s/%s retries for '%s'" , retriesAttempted , maxRetries , resourceIdString ) ; throw ioe ; } } catch ( InterruptedException ie ) { logger . atSevere ( ) . log ( "Interrupted while sleeping before retry. Giving up after %s/%s retries for '%s'" , retriesAttempted , maxRetries , resourceIdString ) ; ioe . addSuppressed ( ie ) ; throw ioe ; } logger . atInfo ( ) . log ( "Done sleeping before retry #%s/%s for '%s'" , retriesAttempted , maxRetries , resourceIdString ) ; } catch ( RuntimeException r ) { closeContentChannel ( ) ; throw r ; } } while ( buffer . remaining ( ) > 0 && currentPosition < size ) ; // If this method was called when the stream was already at EOF // ( indicated by totalBytesRead = = 0 ) then return EOF else , // return the number of bytes read . boolean isEndOfStream = ( totalBytesRead == 0 ) ; if ( isEndOfStream ) { // Check that we didn ' t get a premature End of Stream signal by checking the number of bytes // read against the stream size . Unfortunately we don ' t have information about the actual size // of the data stream when stream compression is used , so we can only ignore this case here . checkIOPrecondition ( currentPosition == size , String . format ( "Failed to read any data before all the file data has been received;" + " currentPosition: %d, size: %d, object '%s'" , currentPosition , size , resourceIdString ) ) ; return - 1 ; } return totalBytesRead ;