signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CharacterStreamWritePool { /** * Write the given characters . */ public ISynchronizationPoint < IOException > write ( char [ ] chars , int offset , int length ) { } }
ISynchronizationPoint < IOException > last = lastWrite ; if ( length == 0 ) return last ; if ( last . isUnblocked ( ) ) { lastWrite = stream . writeAsync ( chars , offset , length ) ; return lastWrite ; } SynchronizationPoint < IOException > ours = new SynchronizationPoint < > ( ) ; lastWrite = ours ; last . listenInline ( ( ) -> { stream . writeAsync ( chars , offset , length ) . listenInline ( ours ) ; } , ours ) ; return ours ;
public class ImmutableAnalysis { /** * Check a single field for immutability . */ private Violation isFieldImmutable ( Optional < Tree > tree , ImmutableSet < String > immutableTyParams , ClassSymbol classSym , ClassType classType , VarSymbol var , ViolationReporter reporter ) { } }
if ( bugChecker . isSuppressed ( var ) ) { return Violation . absent ( ) ; } if ( ! var . getModifiers ( ) . contains ( Modifier . FINAL ) && ! ASTHelpers . hasAnnotation ( var , LazyInit . class , state ) ) { Violation info = Violation . of ( String . format ( "'%s' has non-final field '%s'" , threadSafety . getPrettyName ( classSym ) , var . getSimpleName ( ) ) ) ; if ( tree . isPresent ( ) ) { // If we have a tree to attach diagnostics to , report the error immediately instead of // accumulating the path to the error from the top - level class being checked state . reportMatch ( reporter . report ( tree . get ( ) , info , SuggestedFixes . addModifiers ( tree . get ( ) , state , Modifier . FINAL ) ) ) ; return Violation . absent ( ) ; } return info ; } Type varType = state . getTypes ( ) . memberType ( classType , var ) ; Violation info = threadSafety . isThreadSafeType ( /* allowContainerTypeParameters = */ true , immutableTyParams , varType ) ; if ( info . isPresent ( ) ) { info = info . plus ( String . format ( "'%s' has field '%s' of type '%s'" , threadSafety . getPrettyName ( classSym ) , var . getSimpleName ( ) , varType ) ) ; if ( tree . isPresent ( ) ) { // If we have a tree to attach diagnostics to , report the error immediately instead of // accumulating the path to the error from the top - level class being checked state . reportMatch ( reporter . report ( tree . get ( ) , info , Optional . empty ( ) ) ) ; return Violation . absent ( ) ; } return info ; } return Violation . absent ( ) ;
public class ContextHelper { /** * Get a specific cookie by its name . * @ param cookies provided cookies * @ param name the name of the cookie * @ return the cookie */ public static Cookie getCookie ( final Collection < Cookie > cookies , final String name ) { } }
if ( cookies != null ) { for ( final Cookie cookie : cookies ) { if ( cookie != null && CommonHelper . areEquals ( name , cookie . getName ( ) ) ) { return cookie ; } } } return null ;
public class TablePanel { /** * Removes all elements that are instance of specified element * @ param widget Instance of widget to remove from container * @ return This instance for chaining */ public TablePanel remove ( Widget widget ) { } }
for ( int i = 0 ; i < content . length ; ++ i ) for ( int j = 0 ; j < content [ i ] . length ; ++ j ) if ( content [ i ] [ j ] == widget ) content [ i ] [ j ] = null ; this . sendElement ( ) ; return this ;
public class BplusTree { /** * Removes and returns a key - value mapping associated with the least key in this map , or null if the map * is empty . * @ return entry */ public synchronized TreeEntry < K , V > pollFirstEntry ( ) { } }
final TreeEntry < K , V > entry = firstEntry ( ) ; if ( entry != null ) { remove ( entry . getKey ( ) ) ; } return entry ;
public class ResteasyUtil { /** * Given a { @ code Response } object , find the associated http request and abort it . * Mostly useful to cleanly close never ending streaming responses - - if you call * { @ link ChunkedInputStream # close ( ) } without first aborting it hangs forever . * @ see https : / / issues . jboss . org / browse / RESTEASY - 1478 * @ see http : / / mail - archives . apache . org / mod _ mbox / hc - httpclient - users / 201608 . mbox / % 3CD93DAC13 - ABE5-4E3C - 9429-82D3C94838C9%40gmail . com % 3E */ public static void abortResponse ( Response r ) { } }
final ClientResponse clientResponse = ( ClientResponse ) r ; ( ( HttpRequestBase ) ( clientResponse . getProperties ( ) . get ( JaxRsClientProperties . ACTUAL_REQUEST ) ) ) . abort ( ) ;
public class JsonObjectFormat { /** * Serializing . */ @ SuppressWarnings ( "unchecked" ) public < T > Object write ( final T object , final DataTypeDescriptor < T > descriptor ) throws Exception { } }
if ( descriptor == null ) throw new NullPointerException ( "descriptor" ) ; if ( object == null ) return null ; TypeEnum typeEnum = descriptor . getType ( ) ; switch ( typeEnum ) { case BOOL : case INT16 : case INT32 : case INT64 : case FLOAT : case DOUBLE : case STRING : return object ; case DATETIME : return writeDate ( ( Date ) object ) ; case ENUM : return writeEnum ( ( Enum ) object ) ; case LIST : return writeList ( ( List ) object , ( ListDescriptor ) descriptor ) ; case SET : return writeSet ( ( Set ) object , ( SetDescriptor ) descriptor ) ; case MAP : return writeMap ( ( Map ) object , ( MapDescriptor ) descriptor ) ; case MESSAGE : return writeMessage ( ( Message ) object ) ; case VOID : return null ; default : throw new IllegalArgumentException ( "Unsupported descriptor " + descriptor ) ; }
public class GeoCodeBasic { /** * { @ inheritDoc } */ @ Override public final Collection < String > notFoundKeys ( ) { } }
logger . debug ( "Captures the places that couldn't be found" ) ; final SortedSet < String > notFoundSet = new TreeSet < > ( ) ; for ( final GeoDocument gdm : findAllDocuments ( ) ) { final String name = gdm . getName ( ) ; if ( gdm . getResult ( ) == null ) { notFoundSet . add ( name ) ; final String mName = gdm . getModernName ( ) ; if ( name . equals ( mName ) ) { logger . debug ( name ) ; } else { logger . debug ( name + "|" + mName ) ; } } } return notFoundSet ;
public class RolesInner { /** * Create or update a role . * @ param deviceName The device name . * @ param name The role name . * @ param resourceGroupName The resource group name . * @ param role The role properties . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the RoleInner object if successful . */ public RoleInner beginCreateOrUpdate ( String deviceName , String name , String resourceGroupName , RoleInner role ) { } }
return beginCreateOrUpdateWithServiceResponseAsync ( deviceName , name , resourceGroupName , role ) . toBlocking ( ) . single ( ) . body ( ) ;
public class MemcachedClient { /** * Get the given key asynchronously . * @ param < T > * @ param key the key to fetch * @ param tc the transcoder to serialize and unserialize value * @ return a future that will hold the return value of the fetch * @ throws IllegalStateException in the rare circumstance where queue is too * full to accept any more requests */ @ Override public < T > GetFuture < T > asyncGet ( final String key , final Transcoder < T > tc ) { } }
final CountDownLatch latch = new CountDownLatch ( 1 ) ; final GetFuture < T > rv = new GetFuture < T > ( latch , operationTimeout , key , executorService ) ; Operation op = opFact . get ( key , new GetOperation . Callback ( ) { private Future < T > val ; @ Override public void receivedStatus ( OperationStatus status ) { rv . set ( val , status ) ; } @ Override public void gotData ( String k , int flags , byte [ ] data ) { assert key . equals ( k ) : "Wrong key returned" ; val = tcService . decode ( tc , new CachedData ( flags , data , tc . getMaxSize ( ) ) ) ; } @ Override public void complete ( ) { latch . countDown ( ) ; rv . signalComplete ( ) ; } } ) ; rv . setOperation ( op ) ; mconn . enqueueOperation ( key , op ) ; return rv ;
public class BoundedLocalCache { /** * Increases the size of the admission window by shrinking the portion allocated to the main * space . As the main space is partitioned into probation and protected regions ( 80 % / 20 % ) , for * simplicity only the protected is reduced . If the regions exceed their maximums , this may cause * protected items to be demoted to the probation region and probation items to be demoted to the * admission window . */ @ GuardedBy ( "evictionLock" ) void increaseWindow ( ) { } }
if ( mainProtectedMaximum ( ) == 0 ) { return ; } long quota = Math . min ( adjustment ( ) , mainProtectedMaximum ( ) ) ; setMainProtectedMaximum ( mainProtectedMaximum ( ) - quota ) ; setWindowMaximum ( windowMaximum ( ) + quota ) ; demoteFromMainProtected ( ) ; for ( int i = 0 ; i < QUEUE_TRANSFER_THRESHOLD ; i ++ ) { Node < K , V > candidate = accessOrderProbationDeque ( ) . peek ( ) ; boolean probation = true ; if ( ( candidate == null ) || ( quota < candidate . getPolicyWeight ( ) ) ) { candidate = accessOrderProtectedDeque ( ) . peek ( ) ; probation = false ; } if ( candidate == null ) { break ; } int weight = candidate . getPolicyWeight ( ) ; if ( quota < weight ) { break ; } quota -= weight ; if ( probation ) { accessOrderProbationDeque ( ) . remove ( candidate ) ; } else { setMainProtectedWeightedSize ( mainProtectedWeightedSize ( ) - weight ) ; accessOrderProtectedDeque ( ) . remove ( candidate ) ; } setWindowWeightedSize ( windowWeightedSize ( ) + weight ) ; accessOrderWindowDeque ( ) . add ( candidate ) ; candidate . makeWindow ( ) ; } setMainProtectedMaximum ( mainProtectedMaximum ( ) + quota ) ; setWindowMaximum ( windowMaximum ( ) - quota ) ; setAdjustment ( quota ) ;
public class DisseminationService { /** * Assembles a dissemination given an instance of < code > * DisseminationBindingInfo < / code > which has the dissemination - related * information from the digital object and its associated Service Deployment * object . * @ param context * The current context . * @ param PID * The persistent identifier of the digital object . * @ param h _ userParms * A hashtable of user - supplied method parameters . * @ param dissBindInfoArray * The associated dissemination binding information . * @ return A MIME - typed stream containing the result of the dissemination . * @ throws ServerException * If unable to assemble the dissemination for any reason . */ public MIMETypedStream assembleDissemination ( Context context , String PID , Hashtable < String , String > h_userParms , DisseminationBindingInfo [ ] dissBindInfoArray , String deploymentPID , ServiceDeploymentReader bmReader , String methodName ) throws ServerException { } }
logger . debug ( "Started assembling dissemination" ) ; String dissURL = null ; String protocolType = null ; DisseminationBindingInfo dissBindInfo = null ; MIMETypedStream dissemination = null ; boolean isRedirect = false ; if ( logger . isDebugEnabled ( ) ) { printBindingInfo ( dissBindInfoArray ) ; } if ( dissBindInfoArray != null && dissBindInfoArray . length > 0 ) { String replaceString = null ; int numElements = dissBindInfoArray . length ; // Get row ( s ) of binding info and perform string substitution // on DSBindingKey and method parameter values in WSDL // Note : In case where more than one datastream matches the // DSBindingKey or there are multiple DSBindingKeys for the // method , multiple rows will be present ; otherwise there is only // a single row . for ( int i = 0 ; i < dissBindInfoArray . length ; i ++ ) { m_authorization . enforce_Internal_DSState ( context , dissBindInfoArray [ i ] . dsID , dissBindInfoArray [ i ] . dsState ) ; dissBindInfo = dissBindInfoArray [ i ] ; // Before doing anything , check whether we can replace any // placeholders in the datastream url with parameter values from // the request . This supports the special case where a // datastream ' s URL is dependent on user parameters , such // as when the datastream is actually a dissemination that // takes parameters . if ( dissBindInfo . dsLocation != null && ( dissBindInfo . dsLocation . startsWith ( "http://" ) || dissBindInfo . dsLocation . startsWith ( "https://" ) ) ) { String [ ] parts = dissBindInfo . dsLocation . split ( "=\\(" ) ; // regex // for if ( parts . length > 1 ) { StringBuffer replaced = new StringBuffer ( ) ; replaced . append ( parts [ 0 ] ) ; for ( int x = 1 ; x < parts . length ; x ++ ) { replaced . append ( '=' ) ; int rightParenPos = parts [ x ] . indexOf ( ')' ) ; if ( rightParenPos != - 1 && rightParenPos > 0 ) { String key = parts [ x ] . substring ( 0 , rightParenPos ) ; String val = h_userParms . get ( key ) ; if ( val != null ) { // We have a match . . . so insert the // urlencoded value . try { replaced . append ( URLEncoder . encode ( val , "UTF-8" ) ) ; } catch ( UnsupportedEncodingException uee ) { // won ' t happen : java always supports // UTF - 8 } if ( rightParenPos < parts [ x ] . length ( ) ) { replaced . append ( parts [ x ] . substring ( rightParenPos + 1 ) ) ; } } else { replaced . append ( '(' ) ; replaced . append ( parts [ x ] ) ; } } else { replaced . append ( '(' ) ; replaced . append ( parts [ x ] ) ; } } dissBindInfo . dsLocation = replaced . toString ( ) ; } } // Match DSBindingKey pattern in WSDL which is a string of the // form : // ( DSBindingKey ) . Rows in DisseminationBindingInfo are sorted // alphabetically on binding key . String bindingKeyPattern = "\\(" + dissBindInfo . DSBindKey + "\\)" ; if ( i == 0 ) { // If addressLocation has a value of " LOCAL " , this indicates // the associated operationLocation requires no // addressLocation . // i . e . , the operationLocation contains all information // necessary // to perform the dissemination request . This is a special // case // used when the web services are generally mechanisms like // cgi - scripts , // java servlets , and simple HTTP GETs . Using the value of // LOCAL // in the address location also enables one to have // different methods // serviced by different hosts . In true web services like // SOAP , the // addressLocation specifies the host name of the service // and all // methods are served from that single host location . if ( dissBindInfo . AddressLocation . equalsIgnoreCase ( LOCAL_ADDRESS_LOCATION ) ) { dissURL = dissBindInfo . OperationLocation ; } else { dissURL = dissBindInfo . AddressLocation + dissBindInfo . OperationLocation ; /* * Substitute real app server context if we detect * ' / fedora ' . This is necessary here because * DOTranslator does not scrub URLs that result from * concatenating fragments from different locations in * the file */ dissURL = dissURL . replaceAll ( m_fedoraServerHost + ":" + m_fedoraServerPort + "/fedora/" , m_fedoraServerHost + ":" + m_fedoraServerPort + "/" + m_fedoraAppServerContext + "/" ) ; } protocolType = dissBindInfo . ProtocolType ; } // Assess beSecurity for backend service and for datastreams // that may be parameters for the // backend service . // dsMediatedCallbackHost - when dsMediation is in effect , all // M , X , and E type datastreams // are encoded as callbacks to the Fedora server to obtain the // datastream ' s contents . dsMediatedCallbackHost contains // protocol , // host , and port used for this type of backendservice - to - fedora // callback . // The specifics of protocol , host , and port are obtained from // the // beSecurity configuration file . // dsMediatedServletPath - when dsMediation is in effect , all M , // X , and E type datastreams // are encoded as callbacks to the Fedora server to obtain the // datastream ' s contents . dsMediatedServletPath contains the // servlet // path info for this type of backendservice - to - fedora callback . // The specifics of servlet path are obtained from the // beSecurity configuration // file and determines whether the backedservice - to - fedora // callback // will use authentication or not . // callbackRole - contains the role of the backend service ( the // deploymentPID of the service ) . String callbackRole = deploymentPID ; Hashtable < String , String > beHash = m_beSS . getSecuritySpec ( callbackRole , methodName ) ; boolean callbackBasicAuth = Boolean . parseBoolean ( beHash . get ( "callbackBasicAuth" ) ) ; boolean callbackSSL = Boolean . parseBoolean ( beHash . get ( "callbackSSL" ) ) ; String dsMediatedServletPath = null ; if ( callbackBasicAuth ) { dsMediatedServletPath = "/" + m_fedoraAppServerContext + "/getDSAuthenticated?id=" ; } else { dsMediatedServletPath = "/" + m_fedoraAppServerContext + "/getDS?id=" ; } String dsMediatedCallbackHost = null ; if ( callbackSSL ) { dsMediatedCallbackHost = "https://" + m_fedoraServerHost + ":" + m_fedoraServerRedirectPort ; } else { dsMediatedCallbackHost = "http://" + m_fedoraServerHost + ":" + m_fedoraServerPort ; } String datastreamResolverServletURL = dsMediatedCallbackHost + dsMediatedServletPath ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "******************Checking backend service dsLocation: {}" , dissBindInfo . dsLocation ) ; logger . debug ( "******************Checking backend service dsControlGroupType: {}" , dissBindInfo . dsControlGroupType ) ; logger . debug ( "******************Checking backend service callbackBasicAuth: {}" , callbackBasicAuth ) ; logger . debug ( "******************Checking backend service callbackSSL: {}" , callbackSSL ) ; logger . debug ( "******************Checking backend service callbackRole: {}" , callbackRole ) ; logger . debug ( "******************DatastreamResolverServletURL: {}" , datastreamResolverServletURL ) ; } String currentKey = dissBindInfo . DSBindKey ; String nextKey = "" ; if ( i != numElements - 1 ) { // Except for last row , get the value of the next binding // key // to compare with the value of the current binding key . nextKey = dissBindInfoArray [ i + 1 ] . DSBindKey ; } logger . debug ( "currentKey: '" + currentKey + "', nextKey: '" + nextKey + "'" ) ; // In most cases , there is only a single datastream that matches // given DSBindingKey so the substitution process is to just // replace // the occurrence of ( BINDING _ KEY ) with the value of the // datastream // location . However , when multiple datastreams match the same // DSBindingKey , the occurrence of ( BINDING _ KEY ) is replaced // with the // value of the datastream location and the value + ( BINDING _ KEY ) // is // appended so that subsequent datastreams matching the binding // key // will be substituted . The end result is that the binding key // will // be replaced by a series of datastream locations separated by // plus ( + ) sign . For example , in the case where 3 datastreams // match // the binding key for PHOTO : // file = ( PHOTO ) becomes // file = dslocation1 + dslocation2 + dslocation3 // It is the responsibility of the Service Deployment to know // how to // handle an input parameter with multiple datastream locations . // In the case of a method containing multiple binding keys , // substitutions are performed on each binding key . For example , // in // the case where there are 2 binding keys named PHOTO and // WATERMARK // where each matches a single datastream : // image = ( PHOTO ) & watermark = ( WATERMARK ) becomes // image = dslocation1 & watermark = dslocation2 // In the case with multiple binding keys and multiple // datastreams , // the substitution might appear like the following : // image = ( PHOTO ) & watermark = ( WATERMARK ) becomes // image = dslocation1 + dslocation2 & watermark = dslocation3 if ( nextKey . equalsIgnoreCase ( currentKey ) & i != numElements ) { // Case where binding keys are equal which means that // multiple // datastreams matched the same binding key . if ( m_doDatastreamMediation && ! dissBindInfo . dsControlGroupType . equalsIgnoreCase ( "R" ) ) { // Use Datastream Mediation ( except for redirected // datastreams ) . replaceString = datastreamResolverServletURL + registerDatastreamLocation ( dissBindInfo . dsLocation , dissBindInfo . dsControlGroupType , callbackRole , methodName ) + "+(" + dissBindInfo . DSBindKey + ")" ; } else { // Bypass Datastream Mediation . if ( dissBindInfo . dsControlGroupType . equalsIgnoreCase ( "M" ) || dissBindInfo . dsControlGroupType . equalsIgnoreCase ( "X" ) ) { // Use the Default Disseminator syntax to resolve // the internal // datastream location for Managed and XML // datastreams . replaceString = resolveInternalDSLocation ( context , dissBindInfo . dsLocation , dissBindInfo . dsCreateDT , dsMediatedCallbackHost ) + "+(" + dissBindInfo . DSBindKey + ")" ; ; } else { replaceString = dissBindInfo . dsLocation + "+(" + dissBindInfo . DSBindKey + ")" ; } if ( dissBindInfo . dsControlGroupType . equalsIgnoreCase ( "R" ) && dissBindInfo . AddressLocation . equals ( LOCAL_ADDRESS_LOCATION ) ) { isRedirect = true ; } } } else { // Case where there are one or more binding keys . if ( m_doDatastreamMediation && ! dissBindInfo . dsControlGroupType . equalsIgnoreCase ( "R" ) ) { // Use Datastream Mediation ( except for Redirected // datastreams ) replaceString = datastreamResolverServletURL + registerDatastreamLocation ( dissBindInfo . dsLocation , dissBindInfo . dsControlGroupType , callbackRole , methodName ) ; // this is // generic , // should be // made // specific // per // service } else { // Bypass Datastream Mediation . if ( dissBindInfo . dsControlGroupType . equalsIgnoreCase ( "M" ) || dissBindInfo . dsControlGroupType . equalsIgnoreCase ( "X" ) ) { // Use the Default Disseminator syntax to resolve // the internal // datastream location for Managed and XML // datastreams . replaceString = resolveInternalDSLocation ( context , dissBindInfo . dsLocation , dissBindInfo . dsCreateDT , dsMediatedCallbackHost ) ; } else { replaceString = dissBindInfo . dsLocation ; } if ( dissBindInfo . dsControlGroupType . equalsIgnoreCase ( "R" ) && dissBindInfo . AddressLocation . equals ( LOCAL_ADDRESS_LOCATION ) ) { isRedirect = true ; } } } try { // Here we choose between two different tests for deciding // whether to URL - encode the datastream URL : // Old method : // If the operationLocation contains datastreamInputParms and also // contains a " = ( " sequence , then // URLEncode each parameter before substitution . Otherwise , the // operationLocation has no parameters ( i . e . , it is a simple URL ) // so bypass URLencoding . // New Method : // If the operationLocation contains datastreamInputParms // URLEncode each parameter before substitution , except when // the parameter comprises the first part of the the URL . boolean useUrlEncoding = m_useNewUrlEncodingTest ? dissURL . indexOf ( "(" + bindingKeyPattern + ")" ) > 0 : dissURL . indexOf ( "=(" ) != - 1 ; if ( useUrlEncoding ) { dissURL = substituteString ( dissURL , bindingKeyPattern , URLEncoder . encode ( replaceString , "UTF-8" ) ) ; } else { dissURL = substituteString ( dissURL , bindingKeyPattern , replaceString ) ; } } catch ( UnsupportedEncodingException uee ) { String message = "[DisseminationService] An error occured. The error " + "was \"" + uee . getClass ( ) . getName ( ) + "\" . The Reason was \"" + uee . getMessage ( ) + "\" . String value: " + replaceString + " . " ; logger . error ( message ) ; throw new GeneralException ( message ) ; } logger . debug ( "Replaced dissURL: " + dissURL . toString ( ) + " DissBindingInfo index: " + i ) ; } DeploymentDSBindSpec dsBindSpec = bmReader . getServiceDSInputSpec ( null ) ; DeploymentDSBindRule rules [ ] = dsBindSpec . dsBindRules ; for ( DeploymentDSBindRule element : rules ) { String rulePattern = "(" + element . bindingKeyName + ")" ; if ( dissURL . indexOf ( rulePattern ) != - 1 ) { throw new DisseminationException ( null , "Data Object " + PID + " missing required datastream: " + element . bindingKeyName , null , null , null ) ; } } // Substitute method parameter values in dissemination URL Enumeration < String > e = h_userParms . keys ( ) ; while ( e . hasMoreElements ( ) ) { String name = null ; String value = null ; try { name = URLEncoder . encode ( e . nextElement ( ) , "UTF-8" ) ; value = URLEncoder . encode ( h_userParms . get ( name ) , "UTF-8" ) ; } catch ( UnsupportedEncodingException uee ) { String message = "[DisseminationService] An error occured. The error " + "was \"" + uee . getClass ( ) . getName ( ) + "\" . The Reason was \"" + uee . getMessage ( ) + "\" . Parameter name: " + name + " . " + "Parameter value: " + value + " ." ; logger . error ( message ) ; throw new GeneralException ( message ) ; } String pattern = "\\(" + name + "\\)" ; dissURL = substituteString ( dissURL , pattern , value ) ; logger . debug ( "User parm substituted in URL: " + dissURL ) ; } // FIXME Need a more elegant means of handling optional // userInputParm // method parameters that are not supplied by the invoking client ; // for now , any optional parms that were not supplied are removed // from // the outgoing URL . This works because parms are validated in // DefaultAccess to insure all required parms are present and all // parm // names match parm names defined for the specific method . The only // unsubstituted parms left in the operationLocation string at this // point // are those for optional parameters that the client omitted in the // initial request so they can safely be removed from the outgoing // dissemination URL . This step is only needed when optional // parameters // are not supplied by the client . if ( dissURL . indexOf ( "(" ) != - 1 ) { dissURL = stripParms ( dissURL ) ; logger . debug ( "Non-supplied optional userInputParm values removed " + "from URL: " + dissURL ) ; } if ( dissURL . indexOf ( "(" ) != - 1 ) { String datastreamName = dissURL . substring ( dissURL . indexOf ( "(" ) + 1 , dissURL . indexOf ( ")" ) ) ; throw new DisseminationException ( null , "Data Object " + PID + " missing required datastream: " + datastreamName , null , null , null ) ; } // Resolve content referenced by dissemination result . logger . debug ( "ProtocolType: " + protocolType ) ; if ( protocolType . equalsIgnoreCase ( "http" ) ) { if ( isRedirect ) { // The dsControlGroupType of Redirect ( " R " ) is a special // control type used primarily for streaming media . // Datastreams of this type are not mediated ( proxied by // Fedora ) and their physical dsLocation is simply // redirected back to the client . Therefore , the contents // of the MIMETypedStream returned for dissemination // requests will contain the raw URL of the dsLocation and // will be assigned a special fedora - specific MIME type to // identify the stream as a MIMETypedStream whose contents // contain a URL to which the client should be redirected . dissemination = MIMETypedStream . getRedirect ( dissURL ) ; logger . debug ( "Finished assembling dissemination for redirect, URL={}" , dissURL ) ; } else { // For all non - redirected disseminations , Fedora captures // and returns // the MIMETypedStream resulting from the dissemination // request . // See if backend service reference is to fedora server // itself or an external location . // We must examine URL to see if this is referencing a // remote backend service or is // simply a callback to the fedora server . If the reference // is remote , then use // the role of backend service deployment PID . If the // reference is to the fedora server , // use the special role of " fedoraInternalCall - 1 " to denote // that the callback will come from the // fedora server itself . String beServiceRole = null ; if ( ServerUtility . isURLFedoraServer ( dissURL ) ) { beServiceRole = BackendPolicies . FEDORA_INTERNAL_CALL ; } else { beServiceRole = deploymentPID ; } // Get basicAuth and SSL info about the backend service and // use this info to configure the // " call " to the backend service . Hashtable < String , String > beHash = m_beSS . getSecuritySpec ( beServiceRole , methodName ) ; boolean beServiceCallSSL = Boolean . parseBoolean ( beHash . get ( "callSSL" ) ) ; String beServiceCallUsername = "" ; String beServiceCallPassword = "" ; boolean beServiceCallBasicAuth = Boolean . parseBoolean ( beHash . get ( "callBasicAuth" ) ) ; if ( beServiceCallBasicAuth ) { beServiceCallUsername = beHash . get ( "callUsername" ) ; beServiceCallPassword = beHash . get ( "callPassword" ) ; } if ( logger . isDebugEnabled ( ) ) { logger . debug ( "******************getDisseminationContent beServiceRole: {}" , beServiceRole ) ; logger . debug ( "******************getDisseminationContent beServiceCallBasicAuth: {}" , beServiceCallBasicAuth ) ; logger . debug ( "******************getDisseminationContent beServiceCallSSL: {}" , beServiceCallSSL ) ; logger . debug ( "******************getDisseminationContent beServiceCallUsername: {}" , beServiceCallUsername ) ; logger . debug ( "******************getDisseminationContent beServiceCallPassword: {}" , beServiceCallPassword ) ; logger . debug ( "******************getDisseminationContent dissURL: {}" , dissURL ) ; } // Dispatch backend service URL request authenticating as // necessary based on beSecurity configuration ContentManagerParams params = new ContentManagerParams ( dissURL , null , beServiceCallUsername , beServiceCallPassword ) ; params . setBypassBackend ( true ) ; params . setContext ( context ) ; dissemination = m_ecm . getExternalContent ( params ) ; logger . debug ( "Finished assembling dissemination, URL={}" , dissURL ) ; } } else if ( protocolType . equalsIgnoreCase ( "soap" ) ) { // FIXME ! ! future handling of soap bindings . String message = "[DisseminationService] Protocol type: " + protocolType + "NOT yet implemented" ; logger . error ( message ) ; throw new DisseminationException ( message ) ; } else if ( protocolType . equalsIgnoreCase ( "file" ) ) { ContentManagerParams params = new ContentManagerParams ( dissURL ) ; params . setContext ( context ) ; dissemination = m_ecm . getExternalContent ( params ) ; } else { String message = "[DisseminationService] Protocol type: " + protocolType + "NOT supported." ; logger . error ( message ) ; throw new DisseminationException ( message ) ; } } else { // DisseminationBindingInfo was empty so there was no information // provided to construct a dissemination . String message = "[DisseminationService] Dissemination Binding " + "Info contained no data" ; logger . error ( message ) ; throw new DisseminationBindingInfoNotFoundException ( message ) ; } return dissemination ;
public class TrackerMeanShiftLikelihood { /** * Computes the PDF only inside the image as needed amd update the dirty rectangle */ protected void updatePdfImage ( int x0 , int y0 , int x1 , int y1 ) { } }
for ( int y = y0 ; y < y1 ; y ++ ) { int indexOut = pdf . startIndex + pdf . stride * y + x0 ; for ( int x = x0 ; x < x1 ; x ++ , indexOut ++ ) { if ( pdf . data [ indexOut ] < 0 ) pdf . data [ indexOut ] = targetModel . compute ( x , y ) ; } } // update the dirty region if ( dirty . x0 > x0 ) dirty . x0 = x0 ; if ( dirty . y0 > y0 ) dirty . y0 = y0 ; if ( dirty . x1 < x1 ) dirty . x1 = x1 ; if ( dirty . y1 < y1 ) dirty . y1 = y1 ;
public class HttpHeaders { /** * Return the ( new ) location of a resource * as specified by the { @ code Location } header . * < p > Returns { @ code null } when the location is unknown . */ public URI getLocation ( ) { } }
String value = getFirst ( LOCATION ) ; return ( value != null ? URI . create ( value ) : null ) ;
public class LicenseUtility { /** * Display the license file . If an error occurs reading or writing the * license file , exit with a message * @ param licenseFile * The license file to display */ public Object displayLicenseFile ( InputStream licenseFile , CommandConsole commandConsole ) { } }
Object e = LicenseUtility . class ; if ( licenseFile != null ) { e = showLicenseFile ( licenseFile , commandConsole ) ; } if ( e != null ) { commandConsole . printErrorMessage ( CommandUtils . getMessage ( "LICENSE_NOT_FOUND" ) ) ; } return e ;
public class PhaseOneApplication { /** * { @ inheritDoc } */ @ Override public boolean validCommandLine ( ) { } }
final CommandLineParser parser = new AntelopeParser ( ) ; List < Option > myOpts = getCommandLineOptions ( ) ; final Options options = new Options ( ) ; for ( final Option option : myOpts ) { options . addOption ( option ) ; } CommandLine parse ; try { parse = parser . parse ( options , getCommandLineArguments ( ) , false ) ; } catch ( ParseException e ) { return false ; } // Verify one form of valid input is present . if ( parse . hasOption ( SHORT_OPT_IN_PATH ) ) { return true ; } if ( parse . hasOption ( INFILE_SHORT_OPT ) ) { return true ; } return false ;
public class AWSShieldClient { /** * Updates the details of an existing subscription . Only enter values for parameters you want to change . Empty * parameters are not updated . * @ param updateSubscriptionRequest * @ return Result of the UpdateSubscription operation returned by the service . * @ throws InternalErrorException * Exception that indicates that a problem occurred with the service infrastructure . You can retry the * request . * @ throws LockedSubscriptionException * You are trying to update a subscription that has not yet completed the 1 - year commitment . You can change * the < code > AutoRenew < / code > parameter during the last 30 days of your subscription . This exception * indicates that you are attempting to change < code > AutoRenew < / code > prior to that period . * @ throws ResourceNotFoundException * Exception indicating the specified resource does not exist . * @ throws InvalidParameterException * Exception that indicates that the parameters passed to the API are invalid . * @ throws OptimisticLockException * Exception that indicates that the protection state has been modified by another client . You can retry the * request . * @ sample AWSShield . UpdateSubscription * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / shield - 2016-06-02 / UpdateSubscription " target = " _ top " > AWS API * Documentation < / a > */ @ Override public UpdateSubscriptionResult updateSubscription ( UpdateSubscriptionRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdateSubscription ( request ) ;
public class RequestFactory { /** * Create new Dependency Data request . * @ param orgToken WhiteSource organization token . * @ param projects Projects status statement to check . * @ param userKey user key uniquely identifying the account at white source . * @ return Newly created request to get Dependency Additional Data ( Licenses , Description , homepageUrl and Vulnerabilities ) . */ @ Deprecated public GetDependencyDataRequest newDependencyDataRequest ( String orgToken , Collection < AgentProjectInfo > projects , String userKey ) { } }
return newDependencyDataRequest ( orgToken , null , null , projects , userKey ) ;
public class Period { /** * Constructs a Period representing a duration of * count units extending into the past . * @ param count the number of units , must be non - negative * @ param unit the unit * @ return the new Period */ public static Period at ( float count , TimeUnit unit ) { } }
checkCount ( count ) ; return new Period ( ETimeLimit . NOLIMIT , false , count , unit ) ;
public class TimelineModel { /** * Adds all given events to the model with UI update . * @ param events collection of events to be added * @ param timelineUpdater TimelineUpdater instance to add the events in UI */ public void addAll ( Collection < TimelineEvent > events , TimelineUpdater timelineUpdater ) { } }
if ( events != null && ! events . isEmpty ( ) ) { for ( TimelineEvent event : events ) { add ( event , timelineUpdater ) ; } }
public class Choice7 { /** * { @ inheritDoc } */ @ Override public < H > Choice7 < A , B , C , D , E , F , H > pure ( H h ) { } }
return g ( h ) ;
public class Distances { /** * converts strict content math ml to a histogram for the given tagname , e . g . , ci * @ param strictCmml * @ param tagName * @ return */ private static HashMap < String , Double > strictCmmlInfoToHistogram ( CMMLInfo strictCmml , String tagName ) { } }
final NodeList elements = strictCmml . getElementsByTagName ( tagName ) ; return contentElementsToHistogram ( elements ) ;
public class MongoFactory { /** * 关闭全部连接 */ public static void closeAll ( ) { } }
if ( CollectionUtil . isNotEmpty ( dsMap ) ) { for ( MongoDS ds : dsMap . values ( ) ) { ds . close ( ) ; } dsMap . clear ( ) ; }
public class SchemaResource { /** * Gets a map of protocols defined in the system - protocol name to protocol . * @ return * A map of protocol information , where each key is the unique name * associated with that protocol . * @ throws GuacamoleException * If an error occurs while retrieving the available protocols . */ @ GET @ Path ( "protocols" ) public Map < String , ProtocolInfo > getProtocols ( ) throws GuacamoleException { } }
// Get and return a map of all protocols . Environment env = new LocalEnvironment ( ) ; return env . getProtocols ( ) ;
public class NBLinearClassifierFactory { /** * Train weights . * If tuneSigma is true , the optimal sigma value is found using cross - validation : * the number of folds is determined by the < code > folds < / code > variable , * if there are less training examples than folds , * leave - one - out is used . */ double [ ] [ ] trainWeights ( int [ ] [ ] data , int [ ] labels ) { } }
if ( tuneSigma ) { tuneSigma ( data , labels ) ; } if ( VERBOSE ) { System . err . println ( "NB CF: " + data . length + " data items " ) ; for ( int i = 0 ; i < data . length ; i ++ ) { System . err . print ( "Datum " + i + ": " + labels [ i ] + ":" ) ; for ( int j = 0 ; j < data [ i ] . length ; j ++ ) { System . err . print ( " " + data [ i ] [ j ] ) ; } System . err . println ( ) ; } } int numFeatures = numFeatures ( ) ; int numClasses = numClasses ( ) ; double [ ] [ ] weights = new double [ numFeatures ] [ numClasses ] ; // find P ( C | F ) / P ( C ) int num = 0 ; double [ ] numc = new double [ numClasses ] ; double n = 0 ; // num active features in whole dataset double [ ] n_c = new double [ numClasses ] ; // num active features in class c items double [ ] n_f = new double [ numFeatures ] ; // num data items for which feature is active double [ ] [ ] n_fc = new double [ numFeatures ] [ numClasses ] ; // num times feature active in class c for ( int d = 0 ; d < data . length ; d ++ ) { num ++ ; numc [ labels [ d ] ] ++ ; for ( int i = 0 ; i < data [ d ] . length ; i ++ ) { n ++ ; n_c [ labels [ d ] ] ++ ; n_f [ data [ d ] [ i ] ] ++ ; n_fc [ data [ d ] [ i ] ] [ labels [ d ] ] ++ ; } } for ( int c = 0 ; c < numClasses ; c ++ ) { for ( int f = 0 ; f < numFeatures ; f ++ ) { if ( interpretAlwaysOnFeatureAsPrior && n_f [ f ] == data . length ) { // interpret always on feature as prior ! weights [ f ] [ c ] = Math . log ( numc [ c ] / num ) ; } else { // p _ c _ f = ( N ( f , c ) + k ) / ( N ( f ) + | C | k ) = Paddk ( c | f ) // set lambda = log ( P ( ) / P ( ) ) double p_c = ( n_c [ c ] + epsilon ) / ( n + numClasses * epsilon ) ; double p_c_f = ( n_fc [ f ] [ c ] + sigma ) / ( n_f [ f ] + sigma * numClasses ) ; if ( VERBOSE ) { System . err . println ( "Prob ratio(f=" + f + ",c=" + c + ") = " + p_c_f / p_c + " (nc=" + n_c [ c ] + ", nf=" + n_f [ f ] + ", nfc=" + n_fc [ f ] [ c ] + ")" ) ; } weights [ f ] [ c ] = Math . log ( p_c_f / p_c ) ; } } } return weights ;
public class WalletApi { /** * Get character wallet journal Retrieve the given character & # 39 ; s wallet * journal going 30 days back - - - This route is cached for up to 3600 * seconds SSO Scope : esi - wallet . read _ character _ wallet . v1 * @ param characterId * An EVE character ID ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param page * Which page of results to return ( optional , default to 1) * @ param token * Access token to use if unable to set a header ( optional ) * @ return List & lt ; CharacterWalletJournalResponse & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public List < CharacterWalletJournalResponse > getCharactersCharacterIdWalletJournal ( Integer characterId , String datasource , String ifNoneMatch , Integer page , String token ) throws ApiException { } }
ApiResponse < List < CharacterWalletJournalResponse > > resp = getCharactersCharacterIdWalletJournalWithHttpInfo ( characterId , datasource , ifNoneMatch , page , token ) ; return resp . getData ( ) ;
public class VcfReader { /** * Read zero or more VCF samples from the specified URL . * @ param url URL to read from , must not be null * @ return zero or more VCF samples read from the specified URL * @ throws IOException if an I / O error occurs */ public static Iterable < VcfSample > samples ( final URL url ) throws IOException { } }
checkNotNull ( url ) ; try ( BufferedReader reader = Resources . asCharSource ( url , Charsets . UTF_8 ) . openBufferedStream ( ) ) { return samples ( reader ) ; }
public class EventSubscriptionsInner { /** * List all event subscriptions for a specific domain topic . * List all event subscriptions that have been created for a specific domain topic . * @ param resourceGroupName The name of the resource group within the user ' s subscription . * @ param domainName Name of the top level domain * @ param topicName Name of the domain topic * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the List & lt ; EventSubscriptionInner & gt ; object if successful . */ public List < EventSubscriptionInner > listByDomainTopic ( String resourceGroupName , String domainName , String topicName ) { } }
return listByDomainTopicWithServiceResponseAsync ( resourceGroupName , domainName , topicName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class InfluxDBImpl { /** * { @ inheritDoc } */ @ Override public void createRetentionPolicy ( final String rpName , final String database , final String duration , final String shardDuration , final int replicationFactor ) { } }
createRetentionPolicy ( rpName , database , duration , null , replicationFactor , false ) ;
public class Store { /** * Returns for given parameter < i > _ uuid < / i > the instance of class * { @ link Store } . * @ param _ uuid uuid of the type to get * @ return instance of class { @ link Store } * @ throws CacheReloadException on error */ public static Store get ( final UUID _uuid ) throws CacheReloadException { } }
final Cache < UUID , Store > cache = InfinispanCache . get ( ) . < UUID , Store > getCache ( Store . UUIDCACHE ) ; if ( ! cache . containsKey ( _uuid ) ) { Store . getStoreFromDB ( CIDB . Store . UUID , String . valueOf ( _uuid ) ) ; } return cache . get ( _uuid ) ;
public class FSInputChecker { /** * Set the checksum related parameters * @ param verifyChecksum whether to verify checksum * @ param sum which type of checksum to use * @ param maxChunkSize maximun chunk size * @ param checksumSize checksum size */ final protected synchronized void set ( boolean verifyChecksum , Checksum sum , int maxChunkSize , int checksumSize ) { } }
this . verifyChecksum = verifyChecksum ; this . sum = sum ; this . buf = new byte [ maxChunkSize ] ; this . checksum = new byte [ checksumSize ] ; this . count = 0 ; this . pos = 0 ;
public class CharacterRangeImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public NotificationChain basicSetLeft ( Keyword newLeft , NotificationChain msgs ) { } }
Keyword oldLeft = left ; left = newLeft ; if ( eNotificationRequired ( ) ) { ENotificationImpl notification = new ENotificationImpl ( this , Notification . SET , XtextPackage . CHARACTER_RANGE__LEFT , oldLeft , newLeft ) ; if ( msgs == null ) msgs = notification ; else msgs . add ( notification ) ; } return msgs ;
public class RadialDistortionEstimateLinear { /** * Declares and sets up data structures */ private void init ( List < CalibrationObservation > observations ) { } }
int totalPoints = 0 ; for ( int i = 0 ; i < observations . size ( ) ; i ++ ) { totalPoints += observations . get ( i ) . size ( ) ; } A . reshape ( 2 * totalPoints , X . numRows , false ) ; B . reshape ( A . numRows , 1 , false ) ;
public class JavacParser { /** * Report an illegal start of expression / type error at given position . */ JCExpression illegal ( int pos ) { } }
setErrorEndPos ( pos ) ; if ( ( mode & EXPR ) != 0 ) return syntaxError ( pos , "illegal.start.of.expr" ) ; else return syntaxError ( pos , "illegal.start.of.type" ) ;
public class BaseListener { /** * Get the listener with this class identifier . * Note : You can pass the full class name , or the short class name or ( preferably ) the class . * @ param bExactMatch Only returns classes that are an exact match . . . if false , return classes that are an instanceof the class * @ param strListenerClass The name of the class I ' m looking for . * @ return The first listener of this class or null if no match . */ public BaseListener getListener ( Object strBehaviorClass , boolean bExactMatch ) { } }
BaseListener listener = m_nextListener ; if ( listener == null ) return null ; if ( ! bExactMatch ) { try { if ( strBehaviorClass instanceof String ) strBehaviorClass = Class . forName ( ( String ) strBehaviorClass ) ; if ( ( ( Class < ? > ) strBehaviorClass ) . isAssignableFrom ( listener . getClass ( ) ) ) return listener ; else return listener . getListener ( strBehaviorClass , bExactMatch ) ; } catch ( ClassNotFoundException ex ) { ex . printStackTrace ( ) ; return null ; } } if ( strBehaviorClass instanceof String ) { String strClass = listener . getClass ( ) . getName ( ) ; if ( ( ( String ) strBehaviorClass ) . indexOf ( '.' ) == - 1 ) { // If identifier is not fully qualified , strip the package from the class if ( strClass . lastIndexOf ( '.' ) != - 1 ) strClass = strClass . substring ( strClass . lastIndexOf ( '.' ) + 1 ) ; } if ( strClass . equalsIgnoreCase ( ( String ) strBehaviorClass ) ) return listener ; } else if ( listener . getClass ( ) . equals ( strBehaviorClass ) ) return listener ; return listener . getListener ( strBehaviorClass , bExactMatch ) ;
public class PipelineActivityMarshaller { /** * Marshall the given parameter object . */ public void marshall ( PipelineActivity pipelineActivity , ProtocolMarshaller protocolMarshaller ) { } }
if ( pipelineActivity == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( pipelineActivity . getChannel ( ) , CHANNEL_BINDING ) ; protocolMarshaller . marshall ( pipelineActivity . getLambda ( ) , LAMBDA_BINDING ) ; protocolMarshaller . marshall ( pipelineActivity . getDatastore ( ) , DATASTORE_BINDING ) ; protocolMarshaller . marshall ( pipelineActivity . getAddAttributes ( ) , ADDATTRIBUTES_BINDING ) ; protocolMarshaller . marshall ( pipelineActivity . getRemoveAttributes ( ) , REMOVEATTRIBUTES_BINDING ) ; protocolMarshaller . marshall ( pipelineActivity . getSelectAttributes ( ) , SELECTATTRIBUTES_BINDING ) ; protocolMarshaller . marshall ( pipelineActivity . getFilter ( ) , FILTER_BINDING ) ; protocolMarshaller . marshall ( pipelineActivity . getMath ( ) , MATH_BINDING ) ; protocolMarshaller . marshall ( pipelineActivity . getDeviceRegistryEnrich ( ) , DEVICEREGISTRYENRICH_BINDING ) ; protocolMarshaller . marshall ( pipelineActivity . getDeviceShadowEnrich ( ) , DEVICESHADOWENRICH_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Address { /** * Appends the given address parts to this address . * This lets you build up addresses in a step - wise fashion . * @ param addressParts new address parts to add to the address . * @ return this address ( which now has the new address parts appended ) . */ public Address add ( String ... addressParts ) { } }
if ( addressParts != null ) { if ( ( addressParts . length % 2 ) != 0 ) { throw new IllegalArgumentException ( "address is incomplete: " + Arrays . toString ( addressParts ) ) ; } if ( addressParts . length > 0 ) { for ( int i = 0 ; i < addressParts . length ; i += 2 ) { addressNode . add ( addressParts [ i ] , addressParts [ i + 1 ] ) ; } } } return this ;
public class SimpleReadWriteLock { /** * Execute the provided callable in a write lock . Note : no * nullable / non - nullable can be assumed . * @ param aCallable * Callable to be executed . May not be < code > null < / code > . * @ return The return value of the callable . May be < code > null < / code > . * @ throws EXTYPE * If the callable throws the exception * @ param < T > * Return type * @ param < EXTYPE > * Exception type to be thrown */ public < T , EXTYPE extends Exception > T writeLockedThrowing ( @ Nonnull final IThrowingSupplier < ? extends T , EXTYPE > aCallable ) throws EXTYPE { } }
writeLock ( ) . lock ( ) ; try { return aCallable . get ( ) ; } finally { writeLock ( ) . unlock ( ) ; }
public class QualifiedName { /** * Returns a { @ link QualifiedName } for { @ code type } . */ public static QualifiedName of ( TypeElement type ) { } }
switch ( type . getNestingKind ( ) ) { case TOP_LEVEL : PackageElement pkg = ( PackageElement ) type . getEnclosingElement ( ) ; return QualifiedName . of ( pkg . getQualifiedName ( ) . toString ( ) , type . getSimpleName ( ) . toString ( ) ) ; case MEMBER : List < String > reversedNames = new ArrayList < String > ( ) ; reversedNames . add ( type . getSimpleName ( ) . toString ( ) ) ; Element parent = type . getEnclosingElement ( ) ; while ( parent . getKind ( ) != ElementKind . PACKAGE ) { reversedNames . add ( parent . getSimpleName ( ) . toString ( ) ) ; parent = parent . getEnclosingElement ( ) ; } return new QualifiedName ( ( ( PackageElement ) parent ) . getQualifiedName ( ) . toString ( ) , ImmutableList . copyOf ( Lists . reverse ( reversedNames ) ) ) ; default : throw new IllegalArgumentException ( "Cannot determine qualified name of " + type ) ; }
public class DeleteApplicationCloudWatchLoggingOptionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteApplicationCloudWatchLoggingOptionRequest deleteApplicationCloudWatchLoggingOptionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteApplicationCloudWatchLoggingOptionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteApplicationCloudWatchLoggingOptionRequest . getApplicationName ( ) , APPLICATIONNAME_BINDING ) ; protocolMarshaller . marshall ( deleteApplicationCloudWatchLoggingOptionRequest . getCurrentApplicationVersionId ( ) , CURRENTAPPLICATIONVERSIONID_BINDING ) ; protocolMarshaller . marshall ( deleteApplicationCloudWatchLoggingOptionRequest . getCloudWatchLoggingOptionId ( ) , CLOUDWATCHLOGGINGOPTIONID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class InterfaceDescriptor { /** * Returns an exception descriptor or { @ literal null } . */ @ Nullable public MessageDescriptor < ? extends Message > getExc ( ) { } }
return exc != null ? exc : ( base != null ? base . exc : null ) ;
public class AmazonElasticTranscoderClient { /** * When you create a job , Elastic Transcoder returns JSON data that includes the values that you specified plus * information about the job that is created . * If you have specified more than one output for your jobs ( for example , one output for the Kindle Fire and another * output for the Apple iPhone 4s ) , you currently must use the Elastic Transcoder API to list the jobs ( as opposed * to the AWS Console ) . * @ param createJobRequest * The < code > CreateJobRequest < / code > structure . * @ return Result of the CreateJob operation returned by the service . * @ throws ValidationException * One or more required parameter values were not provided in the request . * @ throws IncompatibleVersionException * @ throws ResourceNotFoundException * The requested resource does not exist or is not available . For example , the pipeline to which you ' re * trying to add a job doesn ' t exist or is still being created . * @ throws AccessDeniedException * General authentication failure . The request was not signed correctly . * @ throws LimitExceededException * Too many operations for a given AWS account . For example , the number of pipelines exceeds the maximum * allowed . * @ throws InternalServiceException * Elastic Transcoder encountered an unexpected exception while trying to fulfill the request . * @ sample AmazonElasticTranscoder . CreateJob */ @ Override public CreateJobResult createJob ( CreateJobRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateJob ( request ) ;
public class CallActivityMock { /** * On execution , the MockProcess will add the given VariableMap to the execution * @ param variables the variables to add * @ return self */ public CallActivityMock onExecutionAddVariables ( final VariableMap variables ) { } }
return this . onExecutionDo ( "addVariablesServiceMock_" + randomUUID ( ) , ( execution ) -> variables . forEach ( execution :: setVariable ) ) ;
public class Switch { /** * * * * * * Initialization * * * * * */ private void initGraphics ( ) { } }
if ( Double . compare ( getPrefWidth ( ) , 0.0 ) <= 0 || Double . compare ( getPrefHeight ( ) , 0.0 ) <= 0 || Double . compare ( getWidth ( ) , 0.0 ) <= 0 || Double . compare ( getHeight ( ) , 0.0 ) <= 0 ) { if ( getPrefWidth ( ) > 0 && getPrefHeight ( ) > 0 ) { setPrefSize ( getPrefWidth ( ) , getPrefHeight ( ) ) ; } else { setPrefSize ( PREFERRED_WIDTH , PREFERRED_HEIGHT ) ; } } getStyleClass ( ) . add ( "switch" ) ; shadow = new DropShadow ( BlurType . TWO_PASS_BOX , Color . rgb ( 0 , 0 , 0 , 0.65 ) , 3 , 0 , 0 , 0 ) ; switchBorder = new Rectangle ( ) ; switchBorder . setFill ( getForegroundColor ( ) ) ; switchBackground = new Rectangle ( ) ; switchBackground . setMouseTransparent ( true ) ; switchBackground . setFill ( isActive ( ) ? getActiveColor ( ) : getBackgroundColor ( ) ) ; thumb = new Circle ( ) ; thumb . setMouseTransparent ( true ) ; thumb . setFill ( getForegroundColor ( ) ) ; thumb . setEffect ( shadow ) ; pane = new Pane ( switchBorder , switchBackground , thumb ) ; getChildren ( ) . setAll ( pane ) ;
public class AllConnectConnectionHolder { /** * 清空服务列表 * @ return 带回收的服务列表 */ protected Map < ProviderInfo , ClientTransport > clearProviders ( ) { } }
providerLock . lock ( ) ; try { // 当前存活 + 重试的 HashMap < ProviderInfo , ClientTransport > all = new HashMap < ProviderInfo , ClientTransport > ( aliveConnections ) ; all . putAll ( subHealthConnections ) ; all . putAll ( retryConnections ) ; all . putAll ( uninitializedConnections ) ; subHealthConnections . clear ( ) ; aliveConnections . clear ( ) ; retryConnections . clear ( ) ; uninitializedConnections . clear ( ) ; return all ; } finally { providerLock . unlock ( ) ; }
public class OrderAwarePluginRegistry { /** * Creates a new { @ link OrderAwarePluginRegistry } using the { @ code # DEFAULT _ COMPARATOR } . * @ return * @ deprecated since 2.0 , for removal in 2.1 . Prefer { @ link PluginRegistry # empty ( ) } . */ @ Deprecated public static < S , T extends Plugin < S > > OrderAwarePluginRegistry < T , S > create ( ) { } }
return empty ( ) ;
public class TvdbParser { /** * Create a List from a delimited string * @ param input * @ param delim */ private static List < String > parseList ( String input , String delim ) { } }
List < String > result = new ArrayList < > ( ) ; StringTokenizer st = new StringTokenizer ( input , delim ) ; while ( st . hasMoreTokens ( ) ) { String token = st . nextToken ( ) . trim ( ) ; if ( token . length ( ) > 0 ) { result . add ( token ) ; } } return result ;
public class PropertiesUtil { /** * Like { @ link # loadAndGet ( File , String ) } but obtains the properties data via the classloader . * @ return the value of the key in question or null if no such key exists or an error occurred * loading the properties file . */ public static String loadAndGet ( String loaderPath , String key ) { } }
try { Properties props = ConfigUtil . loadProperties ( loaderPath ) ; return props . getProperty ( key ) ; } catch ( IOException ioe ) { return null ; }
public class StaticSelectEkstaziMojo { /** * Returns true if fork is disabled , i . e . , if we cannot set the * agent . */ private boolean isForkDisabled ( Plugin plugin ) throws MojoExecutionException { } }
String forkCountValue = extractParamValue ( plugin , FORK_COUNT_PARAM_NAME ) ; String forkModeValue = extractParamValue ( plugin , FORK_MODE_PARAM_NAME ) ; return ( forkCountValue != null && forkCountValue . equals ( "0" ) ) || ( forkModeValue != null && forkModeValue . equals ( "never" ) ) ;
public class Update { /** * One or more substitution tokens for attribute names in an expression . * @ param expressionAttributeNames * One or more substitution tokens for attribute names in an expression . * @ return Returns a reference to this object so that method calls can be chained together . */ public Update withExpressionAttributeNames ( java . util . Map < String , String > expressionAttributeNames ) { } }
setExpressionAttributeNames ( expressionAttributeNames ) ; return this ;
public class GraphViz { /** * It will call the external dot program , and return the image in binary format . * @ param dot * Source of the graph ( in dot language ) . * @ param type * Type of the output image to be produced , e . g . : gif , dot , fig , pdf , ps , svg , png . * @ return The image of the graph in . gif format . * @ throws GraphVizException */ private byte [ ] get_img_stream ( File dot , String type ) throws GraphVizException { } }
File img ; byte [ ] img_stream = null ; try { img = File . createTempFile ( "graph_" , "." + type ) ; Runtime rt = Runtime . getRuntime ( ) ; // patch by Mike Chenault String [ ] args = { dotPath , "-T" + type , dot . getAbsolutePath ( ) , "-o" , img . getAbsolutePath ( ) } ; Process p = rt . exec ( args ) ; p . waitFor ( ) ; FileInputStream in = new FileInputStream ( img . getAbsolutePath ( ) ) ; img_stream = new byte [ in . available ( ) ] ; in . read ( img_stream ) ; // Close it if we need to if ( in != null ) { in . close ( ) ; } if ( ! img . delete ( ) ) { throw new GraphVizException ( "Warning: " + img . getAbsolutePath ( ) + " could not be deleted!" ) ; } } catch ( IOException ioe ) { throw new GraphVizException ( "Error: in I/O processing of tempfile in dir. Or in calling external command" , ioe ) ; } catch ( InterruptedException ie ) { throw new GraphVizException ( "Error: the execution of the external program was interrupted" , ie ) ; } return img_stream ;
public class OldNameCollector { public LexNameList defaultSSetExp ( SSetExp expression ) throws org . overture . ast . analysis . AnalysisException { } }
if ( expression instanceof ASetCompSetExp || expression instanceof ASetEnumSetExp ) { return expression . apply ( this ) ; } else { return new LexNameList ( ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link StringOrRefType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link StringOrRefType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "dataSource" ) public JAXBElement < StringOrRefType > createDataSource ( StringOrRefType value ) { } }
return new JAXBElement < StringOrRefType > ( _DataSource_QNAME , StringOrRefType . class , null , value ) ;
public class XMLResultsParser { /** * Parse the & lt ; head & gt ; element with the variables and metadata . * @ param base The base URI , initialized to the endpoint URL if known . * @ param rdr The XML reader to parse information from . * @ param cols A list to populate with the columns that may appear in the header . * @ param md A list to populate with metada that may appear in the header . * @ throws XMLStreamException There was an error reading the XML stream . * @ throws SparqlException The XML was not valid SPARQL results . */ static private void parseHeader ( String base , XMLStreamReader rdr , List < String > cols , List < String > md ) throws XMLStreamException , SparqlException { } }
logger . debug ( "xml:base is initially {}" , base ) ; base = getBase ( base , rdr ) ; testOpen ( rdr , rdr . nextTag ( ) , HEAD , "Missing header from XML results" ) ; base = getBase ( base , rdr ) ; boolean endedVars = false ; int eventType ; while ( ( eventType = rdr . nextTag ( ) ) != END_ELEMENT || ! nameIs ( rdr , HEAD ) ) { if ( eventType == START_ELEMENT ) { if ( nameIs ( rdr , VARIABLE ) ) { if ( endedVars ) throw new SparqlException ( "Encountered a variable after header metadata" ) ; String var = rdr . getAttributeValue ( null , "name" ) ; if ( var != null ) cols . add ( var ) ; else logger . warn ( "<variable> element without 'name' attribute" ) ; } else if ( nameIs ( rdr , LINK ) ) { String b = getBase ( base , rdr ) ; // Copy to a new var since we ' re looping . String href = rdr . getAttributeValue ( null , HREF ) ; if ( href != null ) md . add ( resolve ( b , href ) ) ; else logger . warn ( "<link> element without 'href' attribute" ) ; endedVars = true ; } } } // ending on < / head > . next ( ) should be < results > or < boolean > testClose ( rdr , eventType , HEAD , "Unexpected element in header: " + rdr . getLocalName ( ) ) ;
public class WebLocatorAbstractBuilder { /** * < p > < b > Used for finding element process ( to generate xpath address ) < / b > < / p > * @ param container parent containing element . * @ param < T > the element which calls this method * @ return this element */ @ SuppressWarnings ( "unchecked" ) public < T extends WebLocatorAbstractBuilder > T setContainer ( WebLocator container ) { } }
pathBuilder . setContainer ( container ) ; return ( T ) this ;
public class BaseMessagingEngineImpl { /** * will return null for liberty * @ see com . ibm . ws . sib . admin . JsMessagingEngine # getMessageStore ( ) */ public Object getMessageStore ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getMessageStore" , this ) ; SibTr . exit ( tc , "getMessageStore" , null ) ; } return _messageStore ;
public class GetCommand { /** * GET content of the resource . Can be return content of the file . The content * returns in the XML type . If version parameter is present , returns the * content of the version of the resource . * @ param session current session * @ param path resource path * @ param version version name * @ param baseURI base uri * @ param ranges ranges * @ return the instance of javax . ws . rs . core . Response */ public Response get ( Session session , String path , String version , String baseURI , List < Range > ranges , String ifModifiedSince , String ifNoneMatch , Map < MediaType , String > cacheControls ) { } }
if ( version == null ) { if ( path . indexOf ( "?version=" ) > 0 ) { version = path . substring ( path . indexOf ( "?version=" ) + "?version=" . length ( ) ) ; path = path . substring ( 0 , path . indexOf ( "?version=" ) ) ; } } InputStream istream = null ; try { Node node = ( Node ) session . getItem ( path ) ; WebDavNamespaceContext nsContext = new WebDavNamespaceContext ( session ) ; URI uri = new URI ( TextUtil . escape ( baseURI + node . getPath ( ) , '%' , true ) ) ; Resource resource ; if ( ResourceUtil . isFile ( node ) ) { HierarchicalProperty lastModifiedProperty ; String resourceEntityTag ; if ( version != null ) { VersionedResource versionedFile = new VersionedFileResource ( uri , node , nsContext ) ; resource = versionedFile . getVersionHistory ( ) . getVersion ( version ) ; lastModifiedProperty = resource . getProperty ( FileResource . GETLASTMODIFIED ) ; } else { resource = new FileResource ( uri , node , nsContext ) ; lastModifiedProperty = resource . getProperty ( FileResource . GETLASTMODIFIED ) ; } resourceEntityTag = ResourceUtil . generateEntityTag ( node , lastModifiedProperty . getValue ( ) ) ; // check before any other reads if ( ifNoneMatch != null ) { if ( "*" . equals ( ifNoneMatch ) ) { return Response . notModified ( ) . entity ( "Not Modified" ) . build ( ) ; } for ( String eTag : ifNoneMatch . split ( "," ) ) { if ( resourceEntityTag . equals ( eTag ) ) { return Response . notModified ( ) . entity ( "Not Modified" ) . build ( ) ; } } } else if ( ifModifiedSince != null ) { DateFormat dateFormat = new SimpleDateFormat ( WebDavConst . DateFormat . MODIFICATION , Locale . US ) ; Date lastModifiedDate = dateFormat . parse ( lastModifiedProperty . getValue ( ) ) ; dateFormat = new SimpleDateFormat ( WebDavConst . DateFormat . IF_MODIFIED_SINCE_PATTERN , Locale . US ) ; Date ifModifiedSinceDate = dateFormat . parse ( ifModifiedSince ) ; if ( ifModifiedSinceDate . getTime ( ) >= lastModifiedDate . getTime ( ) ) { return Response . notModified ( ) . entity ( "Not Modified" ) . build ( ) ; } } HierarchicalProperty contentLengthProperty = resource . getProperty ( FileResource . GETCONTENTLENGTH ) ; long contentLength = new Long ( contentLengthProperty . getValue ( ) ) ; // content length is not present if ( contentLength == 0 ) { istream = openStream ( resource , version != null ) ; return Response . ok ( ) . header ( ExtHttpHeaders . ACCEPT_RANGES , "bytes" ) . entity ( istream ) . build ( ) ; } HierarchicalProperty mimeTypeProperty = resource . getProperty ( FileResource . GETCONTENTTYPE ) ; String contentType = mimeTypeProperty . getValue ( ) ; // no ranges request if ( ranges . size ( ) == 0 ) { istream = openStream ( resource , version != null ) ; return Response . ok ( ) . header ( HttpHeaders . CONTENT_LENGTH , Long . toString ( contentLength ) ) . header ( ExtHttpHeaders . ACCEPT_RANGES , "bytes" ) . header ( ExtHttpHeaders . LAST_MODIFIED , lastModifiedProperty . getValue ( ) ) . header ( ExtHttpHeaders . ETAG , resourceEntityTag ) . header ( ExtHttpHeaders . CACHE_CONTROL , generateCacheControl ( cacheControls , contentType ) ) . entity ( istream ) . type ( contentType ) . build ( ) ; } // one range if ( ranges . size ( ) == 1 ) { Range range = ranges . get ( 0 ) ; if ( ! validateRange ( range , contentLength ) ) { return Response . status ( HTTPStatus . REQUESTED_RANGE_NOT_SATISFIABLE ) . header ( ExtHttpHeaders . CONTENTRANGE , "bytes */" + contentLength ) . build ( ) ; } long start = range . getStart ( ) ; long end = range . getEnd ( ) ; long returnedContentLength = ( end - start + 1 ) ; istream = openStream ( resource , version != null ) ; RangedInputStream rangedInputStream = new RangedInputStream ( istream , start , end ) ; return Response . status ( HTTPStatus . PARTIAL ) . header ( HttpHeaders . CONTENT_LENGTH , Long . toString ( returnedContentLength ) ) . header ( ExtHttpHeaders . ACCEPT_RANGES , "bytes" ) . header ( ExtHttpHeaders . LAST_MODIFIED , lastModifiedProperty . getValue ( ) ) . header ( ExtHttpHeaders . ETAG , resourceEntityTag ) . header ( ExtHttpHeaders . CONTENTRANGE , "bytes " + start + "-" + end + "/" + contentLength ) . entity ( rangedInputStream ) . type ( contentType ) . build ( ) ; } // multipart byte ranges as byte : 0-100,80-150,210-300 for ( int i = 0 ; i < ranges . size ( ) ; i ++ ) { Range range = ranges . get ( i ) ; if ( ! validateRange ( range , contentLength ) ) { return Response . status ( HTTPStatus . REQUESTED_RANGE_NOT_SATISFIABLE ) . header ( ExtHttpHeaders . CONTENTRANGE , "bytes */" + contentLength ) . build ( ) ; } ranges . set ( i , range ) ; } MultipartByterangesEntity mByterangesEntity = new MultipartByterangesEntity ( resource , ranges , contentType , contentLength ) ; return Response . status ( HTTPStatus . PARTIAL ) . header ( ExtHttpHeaders . ACCEPT_RANGES , "bytes" ) . header ( ExtHttpHeaders . LAST_MODIFIED , lastModifiedProperty . getValue ( ) ) . entity ( mByterangesEntity ) . header ( ExtHttpHeaders . ETAG , resourceEntityTag ) . type ( ExtHttpHeaders . MULTIPART_BYTERANGES + WebDavConst . BOUNDARY ) . build ( ) ; } else { // Collection processing ; resource = new CollectionResource ( uri , node , nsContext ) ; istream = ( ( CollectionResource ) resource ) . getContentAsStream ( baseURI ) ; XSLTStreamingOutput entity = new XSLTStreamingOutput ( "get.method.template" , new StreamSource ( istream ) , xsltParams ) ; return Response . ok ( entity , MediaType . TEXT_HTML ) . build ( ) ; } } catch ( PathNotFoundException exc ) { closeStream ( istream ) ; return Response . status ( HTTPStatus . NOT_FOUND ) . entity ( exc . getMessage ( ) ) . build ( ) ; } catch ( RepositoryException exc ) { closeStream ( istream ) ; LOG . error ( exc . getMessage ( ) , exc ) ; return Response . serverError ( ) . entity ( exc . getMessage ( ) ) . build ( ) ; } catch ( Exception exc ) { closeStream ( istream ) ; LOG . error ( exc . getMessage ( ) , exc ) ; return Response . serverError ( ) . entity ( exc . getMessage ( ) ) . build ( ) ; }
public class Content { /** * Parse a Content - Type / Accept . As found , call " types " and " props " , which do different * things depending on if it ' s a Content - Type or Accepts . * For Content - Type , it builds a tree suitable for Comparison * For Accepts , it compares against the tree , and builds an acceptable type list * Since this parse code is used for every incoming HTTP transaction , I have removed the implementation * that uses String . split , and replaced with integers evaluating the Byte array . This results * in only the necessary strings created , resulting in 1/3 better speed , and less * Garbage collection . * @ param trans * @ param code * @ param cntnt * @ return */ protected boolean parse ( HttpCode < TRANS , ? > code , String cntnt ) { } }
byte bytes [ ] = cntnt . getBytes ( ) ; boolean contType = false , contProp = true ; int cis , cie = - 1 , cend ; int sis , sie , send ; do { cis = cie + 1 ; cie = cntnt . indexOf ( ',' , cis ) ; cend = cie < 0 ? bytes . length : cie ; // Start SEMIS sie = cis - 1 ; Pair < String , Pair < HttpCode < TRANS , ? > , List < Pair < String , Object > > > > me = null ; do { sis = sie + 1 ; sie = cntnt . indexOf ( ';' , sis ) ; send = sie > cend || sie < 0 ? cend : sie ; if ( me == null ) { String semi = new String ( bytes , sis , send - sis ) ; // trans . checkpoint ( semi ) ; // Look at first entity within comma group // Is this an acceptable Type ? me = types ( code , semi ) ; if ( me == null ) { sie = - 1 ; // skip the rest of the processing . . . not a type } else { contType = true ; } } else { // We ' ve looped past the first Semi , now process as properties // If there are additional elements ( more entities within Semi Colons ) // apply Propertys int eq = cntnt . indexOf ( '=' , sis ) ; if ( eq > sis && eq < send ) { String tag = new String ( bytes , sis , eq - sis ) ; String value = new String ( bytes , eq + 1 , send - ( eq + 1 ) ) ; // trans . checkpoint ( " Prop " + tag + " = " + value ) ; boolean bool = props ( me , tag , value ) ; if ( ! bool ) { contProp = false ; } } } // End Property } while ( sie <= cend && sie >= cis ) ; // End SEMIS } while ( cie >= 0 ) ; return contType && contProp ; // for use in finds , True if a type found AND all props matched
public class ObjectToFieldsSerializer { /** * / * ( non - Javadoc ) * @ see org . audit4j . core . ObjectSerializer # serialize ( java . util . List , java . lang . Object , java . lang . String , org . audit4j . core . annotation . DeIdentify ) */ @ Override public void serialize ( List < Field > auditFields , Object object , String objectName , DeIdentify deidentify ) { } }
visited . clear ( ) ; toFields ( auditFields , object , objectName , deidentify ) ;
public class RowScaledMatrix { /** * { @ inheritDoc } */ public DoubleVector getRowVector ( int row ) { } }
return new ScaledDoubleVector ( m . getRowVector ( row ) , scales . get ( row ) ) ;
public class RequestDelegator { /** * Checks whether the request should be delegated to some of the registered { @ link RequestDelegationService } s . */ public boolean tryDelegateRequest ( HttpServletRequest request , HttpServletResponse response , FilterChain filterChain ) { } }
for ( RequestDelegationService service : delegationServices ) { if ( canDelegate ( service , request ) ) { delegate ( service , request , response , filterChain ) ; return true ; } } return false ;
public class SQLMultiScopeRecoveryLog { /** * Creates the database table that is being used for the recovery * log . * @ exception SQLException thrown if a SQLException is * encountered when accessing the * Database . */ private void createDBTable ( Connection conn ) throws SQLException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "createDBTable" , new java . lang . Object [ ] { conn , this } ) ; Statement createTableStmt = null ; PreparedStatement specStatement = null ; try { createTableStmt = conn . createStatement ( ) ; if ( _isOracle ) { String oracleFullTableName = _recoveryTableName + _logIdentifierString + _recoveryTableNameSuffix ; String oracleTableString = oracleTablePreString + oracleFullTableName + oracleTablePostString ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Create Oracle Table using: " + oracleTableString ) ; String oracleIndexString = indexPreString + _recoveryIndexName + _logIdentifierString + _recoveryTableNameSuffix + " ON " + oracleFullTableName + indexPostString ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Create Oracle Index using: " + oracleIndexString ) ; // Create the Oracle table createTableStmt . executeUpdate ( oracleTableString ) ; // Create index on the new table createTableStmt . executeUpdate ( oracleIndexString ) ; } else { String db2FullTableName = _recoveryTableName + _logIdentifierString + _recoveryTableNameSuffix ; String db2TableString = db2TablePreString + db2FullTableName + db2TablePostString ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Create DB2 Table using: " + db2TableString ) ; String db2IndexString = indexPreString + _recoveryIndexName + _logIdentifierString + _recoveryTableNameSuffix + " ON " + db2FullTableName + indexPostString ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Create DB2 Index using: " + db2IndexString ) ; // Create the DB2 table createTableStmt . executeUpdate ( db2TableString ) ; // Create index on the new table createTableStmt . executeUpdate ( db2IndexString ) ; } short serviceId = ( short ) _recoveryAgent . clientIdentifier ( ) ; String insertString = "INSERT INTO " + _recoveryTableName + _logIdentifierString + _recoveryTableNameSuffix + " (SERVER_NAME, SERVICE_ID, RU_ID, RUSECTION_ID, RUSECTION_DATA_INDEX, DATA)" + " VALUES (?,?,?,?,?,?)" ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Have created the table, insert special HA LOCKING row using - " + insertString ) ; specStatement = conn . prepareStatement ( insertString ) ; specStatement . setString ( 1 , _currentProcessServerName ) ; specStatement . setShort ( 2 , serviceId ) ; specStatement . setLong ( 3 , - 1 ) ; // NOTE RU _ ID SET TO - 1 specStatement . setLong ( 4 , 1 ) ; specStatement . setShort ( 5 , ( short ) 1 ) ; byte buf [ ] = new byte [ 2 ] ; specStatement . setBytes ( 6 , buf ) ; int ret = specStatement . executeUpdate ( ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Have inserted HA LOCKING ROW with return: " + ret ) ; } finally { if ( createTableStmt != null && ! createTableStmt . isClosed ( ) ) { createTableStmt . close ( ) ; } if ( specStatement != null && ! specStatement . isClosed ( ) ) { specStatement . close ( ) ; } } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "createDBTable" ) ;
public class InkscapeLoader { /** * Load a SVG document into a diagram * @ param ref * The reference in the classpath to load the diagram from * @ return The diagram loaded * @ throws SlickException * Indicates a failure to process the document */ public static Diagram load ( String ref ) throws SlickException { } }
return load ( ResourceLoader . getResourceAsStream ( ref ) , false ) ;
public class NonBlockingBufferedReader { /** * Reads a line of text . A line is considered to be terminated by any one of a * line feed ( ' \ n ' ) , a carriage return ( ' \ r ' ) , or a carriage return followed * immediately by a linefeed . * @ return A { @ link String } containing the contents of the line , not including * any line - termination characters , or < code > null < / code > if the end of * the stream has been reached * @ exception IOException * If an I / O error occurs */ @ Nullable public String readLine ( ) throws IOException { } }
StringBuilder aSB = null ; int nStartChar ; _ensureOpen ( ) ; boolean bOmitLF = m_bSkipLF ; while ( true ) { if ( m_nNextCharIndex >= m_nChars ) _fill ( ) ; if ( m_nNextCharIndex >= m_nChars ) { /* EOF */ if ( StringHelper . hasText ( aSB ) ) return aSB . toString ( ) ; return null ; } boolean bEOL = false ; char cLast = 0 ; int nIndex ; /* Skip a leftover ' \ n ' , if necessary */ if ( bOmitLF && m_aBuf [ m_nNextCharIndex ] == '\n' ) m_nNextCharIndex ++ ; m_bSkipLF = false ; bOmitLF = false ; for ( nIndex = m_nNextCharIndex ; nIndex < m_nChars ; nIndex ++ ) { cLast = m_aBuf [ nIndex ] ; if ( cLast == '\n' || cLast == '\r' ) { bEOL = true ; break ; } } nStartChar = m_nNextCharIndex ; m_nNextCharIndex = nIndex ; if ( bEOL ) { String sRet ; if ( aSB == null ) sRet = new String ( m_aBuf , nStartChar , nIndex - nStartChar ) ; else { aSB . append ( m_aBuf , nStartChar , nIndex - nStartChar ) ; sRet = aSB . toString ( ) ; } m_nNextCharIndex ++ ; if ( cLast == '\r' ) m_bSkipLF = true ; return sRet ; } if ( aSB == null ) aSB = new StringBuilder ( DEFAULT_EXPECTED_LINE_LENGTH ) ; aSB . append ( m_aBuf , nStartChar , nIndex - nStartChar ) ; }
public class MultiSchemaMultiTenantProcessEngineConfiguration { /** * Add a new { @ link DataSource } for a tenant , identified by the provided tenantId , to the engine . * This can be done after the engine has booted up . * Note that the tenant identifier must have been added to the { @ link TenantInfoHolder } * prior * * to calling this method . */ public void registerTenant ( String tenantId , DataSource dataSource ) { } }
( ( TenantAwareDataSource ) super . getDataSource ( ) ) . addDataSource ( tenantId , dataSource ) ; if ( booted ) { createTenantSchema ( tenantId ) ; createTenantAsyncJobExecutor ( tenantId ) ; tenantInfoHolder . setCurrentTenantId ( tenantId ) ; super . postProcessEngineInitialisation ( ) ; tenantInfoHolder . clearCurrentTenantId ( ) ; }
public class FastSet { /** * Ensures that the { @ link FastSet } can hold enough words . * @ param wordsRequired the minimum acceptable number of words . */ private void ensureCapacity ( int wordsRequired ) { } }
if ( words . length >= wordsRequired ) { return ; } int newLength = Math . max ( words . length << 1 , wordsRequired ) ; words = Arrays . copyOf ( words , newLength ) ;
public class WsocOutboundChain { /** * DS method to activate this component . * Best practice : this should be a protected method , not public or private * @ param properties : Map containing service & config properties * populated / provided by config admin */ protected void activate ( Map < String , Object > properties , ComponentContext context ) { } }
sslOptions . activate ( context ) ; sslFactoryProvider . activate ( context ) ; wsocChain . init ( WS_CHAIN_NAME , chfw . getFramework ( ) ) ; wsocSecureChain . init ( WSS_CHAIN_NAME , chfw . getFramework ( ) ) ; modified ( properties ) ;
public class CommerceOrderNotePersistenceImpl { /** * Returns the commerce order note where companyId = & # 63 ; and externalReferenceCode = & # 63 ; or returns < code > null < / code > if it could not be found . Uses the finder cache . * @ param companyId the company ID * @ param externalReferenceCode the external reference code * @ return the matching commerce order note , or < code > null < / code > if a matching commerce order note could not be found */ @ Override public CommerceOrderNote fetchByC_ERC ( long companyId , String externalReferenceCode ) { } }
return fetchByC_ERC ( companyId , externalReferenceCode , true ) ;
public class ZealotKhala { /** * 生成between区间查询的SQL片段 ( 当某一个值为null时 , 会是大于等于或小于等于的情形 ) . * @ param field 数据库字段 * @ param startValue 开始值 * @ param endValue 结束值 * @ return ZealotKhala实例 */ public ZealotKhala between ( String field , Object startValue , Object endValue ) { } }
return this . doBetween ( ZealotConst . ONE_SPACE , field , startValue , endValue , true ) ;
public class Elements { /** * Get the attribute value for each of the matched elements . If an element does not have this attribute , no value is * included in the result set for that element . * @ param attributeKey the attribute name to return values for . You can add the { @ code abs : } prefix to the key to * get absolute URLs from relative URLs , e . g . : { @ code doc . select ( " a " ) . eachAttr ( " abs : href " ) } . * @ return a list of each element ' s attribute value for the attribute */ public List < String > eachAttr ( String attributeKey ) { } }
List < String > attrs = new ArrayList < > ( size ( ) ) ; for ( Element element : this ) { if ( element . hasAttr ( attributeKey ) ) attrs . add ( element . attr ( attributeKey ) ) ; } return attrs ;
public class CreateBackupPlanRequest { /** * To help organize your resources , you can assign your own metadata to the resources that you create . Each tag is a * key - value pair . The specified tags are assigned to all backups created with this plan . * @ param backupPlanTags * To help organize your resources , you can assign your own metadata to the resources that you create . Each * tag is a key - value pair . The specified tags are assigned to all backups created with this plan . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateBackupPlanRequest withBackupPlanTags ( java . util . Map < String , String > backupPlanTags ) { } }
setBackupPlanTags ( backupPlanTags ) ; return this ;
public class BundleDelegatingExtensionTracker { /** * < p > addWebApplicationFactory . < / p > * @ param webApplicationFactory a { @ link org . ops4j . pax . wicket . api . WebApplicationFactory } object . * @ param properties a { @ link java . util . Map } object . * @ since 3.0.5 */ @ Reference ( service = WebApplicationFactory . class , unbind = "removeWebApplicationFactory" , updated = "modifiedWebApplicationFactory" , cardinality = ReferenceCardinality . MULTIPLE , policy = ReferencePolicy . DYNAMIC ) public void addWebApplicationFactory ( WebApplicationFactory < ? > webApplicationFactory , Map < String , ? > properties ) { } }
synchronized ( this ) { addServicesForServiceReference ( webApplicationFactory , properties ) ; reevaluateAllBundles ( webApplicationFactory ) ; }
public class BusHub { /** * Replies the distance between the given point and this bus hub . * < p > The distance to the hub is the distance to the nearest bus stop * located in the hub . * @ param x x coordinate . * @ param y y coordinate . * @ return the distance to this bus hub */ @ Pure public double distance ( double x , double y ) { } }
double dist = Double . POSITIVE_INFINITY ; if ( isValidPrimitive ( ) ) { for ( final BusStop stop : this . busStops ) { final double d = stop . distance ( x , y ) ; if ( ! Double . isNaN ( d ) && d < dist ) { dist = d ; } } } return Double . isInfinite ( dist ) ? Double . NaN : dist ;
public class LineBuffer { /** * Update a number of lines starting at a specific offset . * @ param offset The line offset ( 0 - indexed to count ) . * @ param lines The new line content . */ public void update ( int offset , List < String > lines ) { } }
if ( lines . isEmpty ( ) ) { throw new IllegalArgumentException ( "Empty line set" ) ; } if ( offset >= count ( ) || offset < 0 ) { throw new IndexOutOfBoundsException ( "Index: " + offset + ", Size: " + count ( ) ) ; } int up = count ( ) - offset - 1 ; for ( int i = 0 ; i < lines . size ( ) ; ++ i ) { String line = lines . get ( i ) ; if ( i == 0 ) { terminal . print ( "\r" ) ; if ( up > 0 ) { terminal . print ( cursorUp ( up ) ) ; } } else { terminal . println ( ) ; -- up ; } String old = offset + i < buffer . size ( ) ? buffer . get ( offset + i ) : null ; buffer . set ( offset + i , line ) ; if ( line . equals ( old ) && ! line . isEmpty ( ) ) { // No change . continue ; } terminal . print ( CURSOR_ERASE ) ; terminal . print ( line ) ; } // Move the cursor back to the end of the last line . if ( up > 0 ) { terminal . format ( "\r%s%s" , cursorDown ( up ) , cursorRight ( printableWidth ( lastLine ( ) ) ) ) ; }
public class Barrier { /** * Wait until all nodes leave barrier * @ return true when required all nodes have left barrier , else wait . * @ throws KeeperException If a keeper exception occurred * @ throws InterruptedException If interrupted */ public boolean leave ( ) throws KeeperException , InterruptedException { } }
zooKeeper . delete ( rootPath + "/" + name , 0 ) ; while ( true ) { synchronized ( mutex ) { List < String > list = zooKeeper . getChildren ( rootPath , true ) ; if ( list . size ( ) > 0 ) { mutex . wait ( ) ; } else { return true ; } } }
public class TCPProxyResponse { /** * If SSL tunneling is enabled set the forward proxy connect buffers * on the TCPWriteRequestContext . * Forward proxy connect buffers contain - > * " CONNECT < proxy . TargetHostname : proxy . TargetPort > HTTP / 1.0CRLF " * " Proxy - authorization : basic < base64 encoded ( username : password ) > CRLF " * " CRLF " * Note : Proxy - authorization is optional header . * @ return boolean true if the forward proxy buffers were set , * false if otherwise */ protected boolean setForwardProxyBuffers ( Map < Object , Object > config ) { } }
byte [ ] target = ( byte [ ] ) config . get ( PROXY_TARGET_HOST_PORT ) ; if ( null == target ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Proxy tunnel attempt missing target host" ) ; } connLink . connectFailed ( new IOException ( "Missing forward proxy target host" ) ) ; return false ; } byte [ ] authInfo = ( byte [ ] ) config . get ( PROXY_TARGET_USER_PASS ) ; // we ' re always going to have about 20 to 50 bytes of information , plus // the target host : port , plus the option authorization info data int size = 100 + target . length + ( ( null != authInfo ) ? authInfo . length : 0 ) ; WsByteBuffer buffer = ChannelFrameworkFactory . getBufferManager ( ) . allocate ( size ) ; buffer . put ( PROXY_CONNECT ) ; buffer . put ( target ) ; buffer . put ( PROXY_HTTPVERSION ) ; // Has authentication info . been provided if ( null != authInfo ) { buffer . put ( PROXY_AUTHORIZATION ) ; buffer . put ( authInfo ) ; buffer . put ( PROXY_CRLF ) ; } buffer . put ( PROXY_CRLF ) ; buffer . flip ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { byte [ ] output = new byte [ buffer . limit ( ) ] ; buffer . get ( output ) ; Tr . debug ( tc , "ForwardProxyBuffers[" + new String ( output ) + "]" ) ; buffer . position ( 0 ) ; } connLink . getWriteInterface ( ) . setBuffer ( buffer ) ; return true ;
public class BackgroundCachingHostResolver { /** * / * package private */ void updateHost ( ) { } }
try { _cachedLocalHostName . set ( _wrappedHostResolver . getLocalHostName ( ) ) ; } catch ( final UnknownHostException e ) { LOGGER . warn ( "Unable to resolve host" , e ) ; }
public class Types { /** * Returns type information for Flink value types ( classes that implement * { @ link org . apache . flink . types . Value } ) . Built - in value types do not support null values ( except * for { @ link org . apache . flink . types . StringValue } ) . * < p > Value types describe their serialization and deserialization manually . Instead of going * through a general purpose serialization framework . A value type is reasonable when general purpose * serialization would be highly inefficient . The wrapped value can be altered , allowing programmers to * reuse objects and take pressure off the garbage collector . * < p > Flink provides built - in value types for all Java primitive types ( such as * { @ link org . apache . flink . types . BooleanValue } , { @ link org . apache . flink . types . IntValue } ) as well * as { @ link org . apache . flink . types . StringValue } , { @ link org . apache . flink . types . NullValue } , * { @ link org . apache . flink . types . ListValue } , and { @ link org . apache . flink . types . MapValue } . * @ param valueType class that implements { @ link org . apache . flink . types . Value } */ public static < V extends Value > TypeInformation < V > VALUE ( Class < V > valueType ) { } }
return new ValueTypeInfo < > ( valueType ) ;
public class PersistenceController { /** * Executes transaction callback ass an observable . * @ param transaction Store transaction . * @ param < T > Store class . * @ return Observable executing given store transaction . */ private < T > Observable < T > asObservable ( Executor < T > transaction ) { } }
return Observable . create ( emitter -> storeFactory . execute ( new StoreTransaction < ChatStore > ( ) { @ Override protected void execute ( ChatStore store ) { try { transaction . execute ( store , emitter ) ; } finally { emitter . onCompleted ( ) ; } } } ) , Emitter . BackpressureMode . BUFFER ) ;
public class XQuery { /** * Parses an XML source and returns an { @ link XQuery } object representing the root of * the document . * @ param r * { @ link Reader } providing the XML document * @ return { @ link XQuery } representing the root of the parsed document * @ throws IOException * if the XML source could not be read or parsed for any reason */ public static @ Nonnull XQuery parse ( @ WillClose Reader r ) throws IOException { } }
return parse ( new InputSource ( r ) ) ;
public class StandardBullhornData { /** * { @ inheritDoc } */ @ Override public Map < String , Object > getSettings ( Set < String > settingSet ) { } }
return this . handleGetSettingsData ( settingSet ) ;
public class Tensor { /** * Add the addend to each value . */ public void add ( double addend ) { } }
for ( int c = 0 ; c < this . values . length ; c ++ ) { addValue ( c , addend ) ; }
public class Body { /** * Answer a UTF - 8 { @ code String } from { @ code body } bytes . * @ param body the byte [ ] * @ return String */ private static String bytesToUTF8 ( final byte [ ] body ) { } }
final String encoded = new String ( body , Charset . forName ( "UTF-8" ) ) ; return encoded ;
public class Classes { /** * Set instance or class field value . Try to set field value throwing exception if field not found . If * < code > object < / code > argument is a class , named field should be static ; otherwise exception is thrown . * This setter tries to adapt < code > value < / code > to field type : if < code > value < / code > is a string and field type is * not , delegates { @ link Converter # asObject ( String , Class ) } . Anyway , if < code > value < / code > is not a string it should * be assignable to field type otherwise bug error is thrown . * @ param object instance or class to set field value to , * @ param fieldName field name , * @ param value field value . * @ throws IllegalArgumentException if < code > object < / code > or < code > fieldName < / code > argument is null . * @ throws NoSuchBeingException if field not found . * @ throws BugError if object is null and field is not static or if object is not null and field is static . * @ throws BugError if value is not assignable to field type . */ public static void setFieldValue ( Object object , String fieldName , Object value ) { } }
Params . notNull ( object , "Object instance or class" ) ; Params . notNull ( fieldName , "Field name" ) ; if ( object instanceof Class < ? > ) { setFieldValue ( null , ( Class < ? > ) object , fieldName , value ) ; } else { setFieldValue ( object , object . getClass ( ) , fieldName , value ) ; }
public class JfifUtil { /** * Reads the content of the input stream until specified marker is found . Marker will be * consumed and the input stream will be positioned after the specified marker . * @ param is the input stream to read bytes from * @ param markerToFind the marker we are looking for * @ return boolean : whether or not we found the expected marker from input stream . */ public static boolean moveToMarker ( InputStream is , int markerToFind ) throws IOException { } }
Preconditions . checkNotNull ( is ) ; // ISO / IEC 10918-1:1993 ( E ) while ( StreamProcessor . readPackedInt ( is , 1 , false ) == MARKER_FIRST_BYTE ) { int marker = MARKER_FIRST_BYTE ; while ( marker == MARKER_FIRST_BYTE ) { marker = StreamProcessor . readPackedInt ( is , 1 , false ) ; } if ( markerToFind == MARKER_SOFn && isSOFn ( marker ) ) { return true ; } if ( marker == markerToFind ) { return true ; } // Check if the marker is SOI or TEM . These two don ' t have length field , so we skip it . if ( marker == MARKER_SOI || marker == MARKER_TEM ) { continue ; } // Check if the marker is EOI or SOS . We will stop reading since metadata markers don ' t // come after these two markers . if ( marker == MARKER_EOI || marker == MARKER_SOS ) { return false ; } // read block length // subtract 2 as length contain SIZE field we just read int length = StreamProcessor . readPackedInt ( is , 2 , false ) - 2 ; // Skip other markers . is . skip ( length ) ; } return false ;
public class ZookeeperRegistry { /** * recover data when connect with zk again . */ protected void recoverRegistryData ( ) { } }
for ( ProviderConfig providerConfig : providerUrls . keySet ( ) ) { registerProviderUrls ( providerConfig ) ; } for ( ConsumerConfig consumerConfig : consumerUrls . keySet ( ) ) { subscribeConsumerUrls ( consumerConfig ) ; }
public class NonBlockingStringWriter { /** * Write a portion of an array of characters . * @ param aBuf * Array of characters * @ param nOfs * Offset from which to start writing characters * @ param nLen * Number of characters to write */ @ Override public void write ( @ Nonnull final char [ ] aBuf , @ Nonnegative final int nOfs , @ Nonnegative final int nLen ) { } }
ValueEnforcer . isArrayOfsLen ( aBuf , nOfs , nLen ) ; if ( nLen > 0 ) m_aSB . append ( aBuf , nOfs , nLen ) ;
public class IPv6Framer { /** * { @ inheritDoc } */ @ Override public IPv6Packet frame ( final MACPacket parent , final Buffer payload ) throws IOException { } }
if ( parent == null ) { throw new IllegalArgumentException ( "The parent frame cannot be null" ) ; } final Buffer fixedHeader = payload . readBytes ( IPv6PacketImpl . FIXED_HEADER_LENGTH ) ; // byte 1 , contains the version and the length final int version = ( fixedHeader . getByte ( 0 ) & 0xF0 ) >> 4 ; if ( version != IPv6Packet . VERSION_IDENTIFIER ) { throw new FramingException ( String . format ( "Invalid IPv6 version: %d" , version ) , Protocol . IPv6 ) ; } final int payloadLength = fixedHeader . getUnsignedShort ( 4 ) ; int nextHeader = fixedHeader . getByte ( 6 ) ; final Buffer extensionHeadersBuffer = Buffers . createBuffer ( 400 ) ; while ( nextHeader == IPv6Packet . EXTENSION_HOP_BY_HOP || nextHeader == IPv6Packet . EXTENSION_DESTINATION_OPTIONS || nextHeader == IPv6Packet . EXTENSION_ROUTING || nextHeader == IPv6Packet . EXTENSION_FRAGMENT || nextHeader == IPv6Packet . EXTENSION_AH || nextHeader == IPv6Packet . EXTENSION_ESP || nextHeader == IPv6Packet . EXTENSION_MOBILITY ) { nextHeader = accumulateNextHeader ( nextHeader , payload , extensionHeadersBuffer ) ; } // TODO : extract actual PayloadLength from Hop - by - Hop extension header , if present // Trim off any padding from the upper layer , e . g . Ethernet padding for small packets . // If the captured frame was truncated , then use the truncated size for the data buffer , instead of what the // IPv6 header says its length should be . final int totalLength = IPv6PacketImpl . FIXED_HEADER_LENGTH + extensionHeadersBuffer . getWriterIndex ( ) + payloadLength ; final Buffer data = payload . slice ( Math . min ( totalLength , payload . capacity ( ) ) ) ; return new IPv6PacketImpl ( parent , Buffers . wrap ( fixedHeader , extensionHeadersBuffer ) , nextHeader , data ) ;
public class LongActionManager { /** * Untested method */ public synchronized void shutdownGracefully ( ) { } }
running = false ; Iterator < LongAction < ? > > iterator = actions . values ( ) . iterator ( ) ; while ( iterator . hasNext ( ) ) { LongAction < ? > longAction = iterator . next ( ) ; longAction . waitForCompletion ( ) ; }
public class FnJodaTimeUtils { /** * The input { @ link Date } is converted into a { @ link DateTime } with the given * { @ link Chronology } * @ param chronology { @ link Chronology } to be used * @ return the { @ link DateTime } created from the input and arguments */ public static final < T extends Date > Function < T , DateTime > dateToDateTime ( Chronology chronology ) { } }
return FnDateTime . dateToDateTime ( chronology ) ;
public class LocalConnection { /** * Closes the connection . */ private void doClose ( ) { } }
open = false ; connections . remove ( this ) ; for ( Map . Entry < Long , ContextualFuture > entry : futures . entrySet ( ) ) { ContextualFuture future = entry . getValue ( ) ; try { future . context . executor ( ) . execute ( ( ) -> future . completeExceptionally ( new ConnectException ( "connection closed" ) ) ) ; } catch ( RejectedExecutionException e ) { } } futures . clear ( ) ; for ( Consumer < Connection > closeListener : closeListeners ) { try { context . executor ( ) . execute ( ( ) -> closeListener . accept ( this ) ) ; } catch ( RejectedExecutionException e ) { } }
public class ZookeeperOverrideObserver { /** * 接口配置删除子节点Data * @ param config 接口配置 * @ param overridePath 覆盖Path * @ param data 子节点Data * @ param registerConfig 注册配置 * @ throws Exception 转换配置异常 */ public void removeConfig ( AbstractInterfaceConfig config , String overridePath , ChildData data , AbstractInterfaceConfig registerConfig ) throws Exception { } }
if ( data == null ) { if ( LOGGER . isInfoEnabled ( config . getAppName ( ) ) ) { LOGGER . infoWithApp ( config . getAppName ( ) , "Receive data is null" ) ; } } else if ( registerConfig == null ) { if ( LOGGER . isInfoEnabled ( config . getAppName ( ) ) ) { LOGGER . infoWithApp ( config . getAppName ( ) , "Register config is null" ) ; } } else { if ( LOGGER . isInfoEnabled ( config . getAppName ( ) ) ) { LOGGER . infoWithApp ( config . getAppName ( ) , "Receive data: path=[" + data . getPath ( ) + "]" + ", data=[" + StringSerializer . decode ( data . getData ( ) ) + "]" + ", stat=[" + data . getStat ( ) + "]" ) ; } notifyListeners ( config , overridePath , data , true , registerConfig ) ; }
public class FavoriteResourcesImpl { /** * Deletes a list of favorites ( all of the same type ) * It mirrors to the following Smartsheet REST API method : DELETE / favorites * Exceptions : * IllegalArgumentException : if any argument is null * InvalidRequestException : if there is any problem with the REST API request * AuthorizationException : if there is any problem with the REST API authorization ( access token ) * ResourceNotFoundException : if the resource can not be found * ServiceUnavailableException : if the REST API service is not available ( possibly due to rate limiting ) * SmartsheetRestException : if there is any other REST API related error occurred during the operation * SmartsheetException : if there is any other error occurred during the operation * @ param favoriteType the favorite type * @ param objectIds a single Favorite object or an array of Favorite objects * @ throws SmartsheetException the smartsheet exception */ public void removeFavorites ( FavoriteType favoriteType , Set < Long > objectIds ) throws SmartsheetException { } }
String path = "favorites/" + favoriteType . toString ( ) ; HashMap < String , Object > parameters = new HashMap < String , Object > ( ) ; if ( objectIds != null ) { parameters . put ( "objectIds" , QueryUtil . generateCommaSeparatedList ( objectIds ) ) ; } path += QueryUtil . generateUrl ( null , parameters ) ; this . deleteResource ( path , Favorite . class ) ;
public class AdGroupChangeData { /** * Gets the adGroupChangeStatus value for this AdGroupChangeData . * @ return adGroupChangeStatus * Whether or not the fields of this adgroup have changed , for * example the AdGroup name . Changes * to the Ads and Criteria are enumerated in their respective * lists and will not be reflected in * this status . */ public com . google . api . ads . adwords . axis . v201809 . ch . ChangeStatus getAdGroupChangeStatus ( ) { } }
return adGroupChangeStatus ;
public class ProctorUtils { /** * TODO ( pwp ) : add some test constants ? */ @ Nonnull private static RuleEvaluator makeRuleEvaluator ( final ExpressionFactory expressionFactory , final FunctionMapper functionMapper ) { } }
// Make the expression evaluation context . final Map < String , Object > testConstants = Collections . emptyMap ( ) ; return new RuleEvaluator ( expressionFactory , functionMapper , testConstants ) ;
public class ViewStateReader12 { /** * { @ inheritDoc } */ @ Override protected Props getProps ( Var2Data varData ) throws IOException { } }
Props props = null ; byte [ ] propsData = varData . getByteArray ( PROPS_ID , PROPS_TYPE ) ; if ( propsData != null ) { props = new Props12 ( new ByteArrayInputStream ( propsData ) ) ; // System . out . println ( props ) ; } return ( props ) ;
public class ObjectMapUtils { /** * TODO cache */ private static Enum < ? > [ ] _enumValues ( Class < ? > enumClass ) throws SecurityException , NoSuchMethodException , IllegalArgumentException , IllegalAccessException , InvocationTargetException { } }
Method m = enumClass . getMethod ( "values" ) ; return ( Enum < ? > [ ] ) m . invoke ( ( Object ) null ) ;
public class PushSubscription { /** * Create a { @ link PushSubscription } instance containing only an id * @ param str the ID obtained from DataSift for creating a push subscription * @ return an instance which can be used by the client */ public static PushSubscription fromString ( String str ) { } }
if ( str == null || str . isEmpty ( ) ) { throw new IllegalArgumentException ( "Cannot create a stream from an empty or null string" ) ; } PushSubscription stream = new PushSubscription ( ) ; stream . id = str ; return stream ;
public class SipParser { /** * Expects a token , which according to RFC3261 section 25.1 Basic Rules is : * token = 1 * ( alphanum / " - " / " . " / " ! " / " % " / " * " / " _ " / " + " / " ` " / " ' " * @ param buffer * @ return the buffer containing the expected token * @ throws IOException * @ throws IndexOutOfBoundsException * @ throws SipParseException * in case there is no token */ public static Buffer expectToken ( final Buffer buffer ) throws IndexOutOfBoundsException , IOException , SipParseException { } }
final Buffer token = consumeToken ( buffer ) ; if ( token == null ) { throw new SipParseException ( buffer . getReaderIndex ( ) , "Expected TOKEN" ) ; } return token ;
public class ChainedProperty { /** * Returns true if the property at the given index should be treated as an * outer join . Index zero is the prime property . * @ param index valid range is 0 to chainCount * @ since 1.2 */ public boolean isOuterJoin ( int index ) throws IndexOutOfBoundsException { } }
if ( index < 0 ) { throw new IndexOutOfBoundsException ( ) ; } if ( mOuterJoin == null ) { if ( index > getChainCount ( ) ) { throw new IndexOutOfBoundsException ( ) ; } return false ; } return mOuterJoin [ index ] ;