signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class MAPServiceLsmImpl { /** * ( non - Javadoc )
* @ see org . restcomm . protocols . ss7 . map . MAPServiceBaseImpl # createNewDialogIncoming
* ( org . restcomm . protocols . ss7 . map . api . MAPApplicationContext , org . restcomm . protocols . ss7 . tcap . api . tc . dialog . Dialog ) */
@ Override protected MAPDialogImpl createNewDialogIncoming ( MAPApplicationContext appCntx , Dialog tcapDialog ) { } } | return new MAPDialogLsmImpl ( appCntx , tcapDialog , this . mapProviderImpl , this , null , null ) ; |
public class Reflect { /** * Call the method .
* @ param instance the instance to call on .
* @ param type the type .
* @ param methodName the name of the method .
* @ param types the types of the parameters .
* @ param args the values of the arguments . */
public static void callProc ( Object instance , Class < ? > type , String methodName , Class < ? > [ ] types , Object ... args ) { } } | try { final Method method = type . getDeclaredMethod ( methodName , types ) ; method . setAccessible ( true ) ; method . invoke ( instance , args ) ; } catch ( Exception exception ) { throw new Error ( exception ) ; } |
public class br_currentconfig { /** * < pre >
* Performs generic data validation for the operation to be performed
* < / pre > */
protected void validate ( String operationType ) throws Exception { } } | super . validate ( operationType ) ; MPSString response_validator = new MPSString ( ) ; response_validator . validate ( operationType , response , "\"response\"" ) ; MPSIPAddress br_ip_address_validator = new MPSIPAddress ( ) ; br_ip_address_validator . validate ( operationType , br_ip_address , "\"br_ip_address\"" ) ; |
public class Bytes { /** * Convert a short value to a byte array of { @ link # SIZEOF _ SHORT } bytes long .
* @ param val value
* @ return the byte array */
public static byte [ ] toBytes ( short val ) { } } | byte [ ] b = new byte [ SIZEOF_SHORT ] ; b [ 1 ] = ( byte ) val ; val >>= 8 ; b [ 0 ] = ( byte ) val ; return b ; |
public class UserExecutorConfiguration { /** * Construct a { @ link UserExecutorConfiguration } for the given { @ link io . micronaut . scheduling . executor . ExecutorType } .
* @ param type The type
* @ param num The number of threads for { @ link io . micronaut . scheduling . executor . ExecutorType # FIXED } or the parallelism for
* { @ link io . micronaut . scheduling . executor . ExecutorType # WORK _ STEALING } or the core pool size for { @ link io . micronaut . scheduling . executor . ExecutorType # SCHEDULED }
* @ return The configuration */
public static UserExecutorConfiguration of ( ExecutorType type , int num ) { } } | ArgumentUtils . check ( "type" , type ) . notNull ( ) ; UserExecutorConfiguration configuration = of ( type ) ; configuration . type = type ; switch ( type ) { case FIXED : configuration . nThreads = num ; break ; case SCHEDULED : configuration . corePoolSize = num ; break ; case WORK_STEALING : configuration . parallelism = num ; break ; default : } return configuration ; |
public class FormulaFactory { /** * Creates a new disjunction from an array of formulas .
* @ param operands the list of formulas
* @ return a new disjunction */
public Formula or ( final Formula ... operands ) { } } | final LinkedHashSet < Formula > ops = new LinkedHashSet < > ( operands . length ) ; Collections . addAll ( ops , operands ) ; return this . constructOr ( ops ) ; |
public class Bucket { /** * @ param element
* the element to look for
* @ return true , if this bucket , contains the given element */
public boolean contains ( Data element ) { } } | boolean contains = false ; byte [ ] dst = new byte [ gp . getElementSize ( ) ] ; for ( int m = 0 ; m < memory . length ; m ++ ) { ByteBuffer bb = ByteBuffer . wrap ( memory [ m ] ) ; if ( m == memory . length - 1 ) { bb . limit ( position_in_chunk ) ; } while ( bb . remaining ( ) > 0 ) { bb . get ( dst ) ; if ( KeyUtils . compareKey ( element . toByteBuffer ( ) . array ( ) , dst ) > 0 ) { contains = true ; break ; } } if ( contains ) { break ; } } return contains ; |
public class TargetStreamSetControl { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . runtime . SIMPPtoPInboundReceiverControllable # requestFlushAtSource ( byte ) */
public void requestFlushAtSource ( boolean indoubtDiscard ) throws SIMPRuntimeOperationFailedException , SIMPControllableNotFoundException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "requestFlushAtSource" , new Boolean ( indoubtDiscard ) ) ; assertValidControllable ( ) ; SIBUuid8 source = streamSet . getRemoteMEUuid ( ) ; SIBUuid12 destID = streamSet . getDestUuid ( ) ; SIBUuid8 busID = streamSet . getBusUuid ( ) ; try { // TODO pass through the indoubtDiscard flag
tsm . requestFlushAtSource ( source , destID , busID , streamID , indoubtDiscard ) ; } catch ( SIException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.runtime.TargetStreamSetControl.requestFlushAtSource" , "1:152:1.16" , this ) ; SIMPRuntimeOperationFailedException finalE = new SIMPRuntimeOperationFailedException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0003" , new Object [ ] { "TargetStreamSetControl.requestFlushAtSource" , "1:160:1.16" , e , streamID } , null ) , e ) ; SibTr . exception ( tc , finalE ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "requestFlushAtSource" , finalE ) ; throw finalE ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "requestFlushAtSource" ) ; |
public class BatchDetectSentimentItemResultMarshaller { /** * Marshall the given parameter object . */
public void marshall ( BatchDetectSentimentItemResult batchDetectSentimentItemResult , ProtocolMarshaller protocolMarshaller ) { } } | if ( batchDetectSentimentItemResult == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( batchDetectSentimentItemResult . getIndex ( ) , INDEX_BINDING ) ; protocolMarshaller . marshall ( batchDetectSentimentItemResult . getSentiment ( ) , SENTIMENT_BINDING ) ; protocolMarshaller . marshall ( batchDetectSentimentItemResult . getSentimentScore ( ) , SENTIMENTSCORE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class XmlResponsesSaxParser { /** * Safely parses the specified string as a long and returns the value . If a
* NumberFormatException occurs while parsing the long , an error is logged
* and - 1 is returned .
* @ param s
* The string to parse and return as a long .
* @ return The long value of the specified string , otherwise - 1 if there
* were any problems parsing the string as a long . */
private static long parseLong ( String s ) { } } | try { return Long . parseLong ( s ) ; } catch ( NumberFormatException nfe ) { log . error ( "Unable to parse long value '" + s + "'" , nfe ) ; } return - 1 ; |
public class DateHelper { /** * Given a date represented by a Calendar instance , set the time
* component of the date based on the hours and minutes of the
* time supplied by the Date instance .
* @ param cal Calendar instance representing the date
* @ param time Date instance representing the time of day */
public static void setTime ( Calendar cal , Date time ) { } } | if ( time != null ) { Calendar startCalendar = popCalendar ( time ) ; cal . set ( Calendar . HOUR_OF_DAY , startCalendar . get ( Calendar . HOUR_OF_DAY ) ) ; cal . set ( Calendar . MINUTE , startCalendar . get ( Calendar . MINUTE ) ) ; cal . set ( Calendar . SECOND , startCalendar . get ( Calendar . SECOND ) ) ; pushCalendar ( startCalendar ) ; } |
public class ContainerDefinition { /** * A list of DNS servers that are presented to the container . This parameter maps to < code > Dns < / code > in the < a
* href = " https : / / docs . docker . com / engine / api / v1.35 / # operation / ContainerCreate " > Create a container < / a > section of the
* < a href = " https : / / docs . docker . com / engine / api / v1.35 / " > Docker Remote API < / a > and the < code > - - dns < / code > option to < a
* href = " https : / / docs . docker . com / engine / reference / run / " > docker run < / a > .
* < note >
* This parameter is not supported for Windows containers .
* < / note >
* @ param dnsServers
* A list of DNS servers that are presented to the container . This parameter maps to < code > Dns < / code > in the
* < a href = " https : / / docs . docker . com / engine / api / v1.35 / # operation / ContainerCreate " > Create a container < / a >
* section of the < a href = " https : / / docs . docker . com / engine / api / v1.35 / " > Docker Remote API < / a > and the
* < code > - - dns < / code > option to < a href = " https : / / docs . docker . com / engine / reference / run / " > docker run < / a > . < / p >
* < note >
* This parameter is not supported for Windows containers . */
public void setDnsServers ( java . util . Collection < String > dnsServers ) { } } | if ( dnsServers == null ) { this . dnsServers = null ; return ; } this . dnsServers = new com . amazonaws . internal . SdkInternalList < String > ( dnsServers ) ; |
public class SearchOptions { /** * Set offset and limit according to page approach . If pageSize is negative , then
* { @ link # MAX _ LIMIT } is used . */
public SearchOptions setPage ( int page , int pageSize ) { } } | checkArgument ( page >= 1 , "Page must be greater or equal to 1 (got " + page + ")" ) ; int lastResultIndex = page * pageSize ; checkArgument ( lastResultIndex <= MAX_RETURNABLE_RESULTS , "Can return only the first %s results. %sth result asked." , MAX_RETURNABLE_RESULTS , lastResultIndex ) ; setLimit ( pageSize ) ; setOffset ( ( page * this . limit ) - this . limit ) ; return this ; |
public class S3RecoverableFsDataOutputStream { public static S3RecoverableFsDataOutputStream newStream ( final RecoverableMultiPartUpload upload , final FunctionWithException < File , RefCountedFile , IOException > tmpFileCreator , final long userDefinedMinPartSize ) throws IOException { } } | checkArgument ( userDefinedMinPartSize >= S3_MULTIPART_MIN_PART_SIZE ) ; final RefCountedBufferingFileStream fileStream = boundedBufferingFileStream ( tmpFileCreator , Optional . empty ( ) ) ; return new S3RecoverableFsDataOutputStream ( upload , tmpFileCreator , fileStream , userDefinedMinPartSize , 0L ) ; |
public class ConfigurationUtil { /** * Gets value for the given key or throws if value is not found . */
public static String getMandatoryConfig ( Configuration config , String key ) throws IOException { } } | String value = config . get ( key ) ; if ( Strings . isNullOrEmpty ( value ) ) { throw new IOException ( "Must supply a value for configuration setting: " + key ) ; } return value ; |
public class ResourceParametersImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public NotificationChain eInverseRemove ( InternalEObject otherEnd , int featureID , NotificationChain msgs ) { } } | switch ( featureID ) { case BpsimPackage . RESOURCE_PARAMETERS__SELECTION : return basicSetSelection ( null , msgs ) ; case BpsimPackage . RESOURCE_PARAMETERS__AVAILABILITY : return basicSetAvailability ( null , msgs ) ; case BpsimPackage . RESOURCE_PARAMETERS__QUANTITY : return basicSetQuantity ( null , msgs ) ; case BpsimPackage . RESOURCE_PARAMETERS__ROLE : return ( ( InternalEList < ? > ) getRole ( ) ) . basicRemove ( otherEnd , msgs ) ; } return super . eInverseRemove ( otherEnd , featureID , msgs ) ; |
public class Node { /** * Returns the Viewport that this Node is on . */
@ Override public Viewport getViewport ( ) { } } | final Node < ? > parent = getParent ( ) ; // change , no iteration , no testing , no casting , recurses upwards to a Viewport , and Viewport returns itself , CYCLES ! ! !
if ( null != parent ) { return parent . getViewport ( ) ; } return null ; |
public class SigmaElectronegativityDescriptor { /** * Sets the parameters attribute of the SigmaElectronegativityDescriptor
* object
* @ param params 1 : max iterations ( optional , defaults to 20)
* @ exception CDKException Description of the Exception */
@ Override public void setParameters ( Object [ ] params ) throws CDKException { } } | if ( params . length > 1 ) { throw new CDKException ( "SigmaElectronegativityDescriptor only expects one parameter" ) ; } if ( ! ( params [ 0 ] instanceof Integer ) ) { throw new CDKException ( "The parameter must be of type Integer" ) ; } if ( params . length == 0 ) return ; maxIterations = ( Integer ) params [ 0 ] ; |
public class AccessControlInterceptor { @ Override public boolean onFlushDirty ( Object entity , Serializable id , Object [ ] currentState , Object [ ] previousState , String [ ] propertyNames , Type [ ] types ) { } } | if ( entity . getClass ( ) . isAssignableFrom ( AccessControlEntry . class ) ) { return false ; } AccessControlContext accessControlContext = AccessControlContextOverride . exists ( ) ? AccessControlContextOverride . get ( ) : accessControlContextProvider . getCurrent ( ) ; accessControlHelper . validateContextAllowsWrite ( entity . getClass ( ) , id , accessControlContext , false ) ; return false ; |
public class ERiC { /** * Performs the ERiC algorithm on the given database .
* @ param relation Relation to process
* @ return Clustering result */
public Clustering < CorrelationModel > run ( Database database , Relation < V > relation ) { } } | final int dim = RelationUtil . dimensionality ( relation ) ; StepProgress stepprog = LOG . isVerbose ( ) ? new StepProgress ( 3 ) : null ; // Run Generalized DBSCAN
LOG . beginStep ( stepprog , 1 , "Preprocessing local correlation dimensionalities and partitioning data" ) ; // FIXME : how to ensure we are running on the same relation ?
ERiCNeighborPredicate < V > . Instance npred = new ERiCNeighborPredicate < V > ( settings ) . instantiate ( database , relation ) ; CorePredicate . Instance < DBIDs > cpred = new MinPtsCorePredicate ( settings . minpts ) . instantiate ( database ) ; Clustering < Model > copacResult = new GeneralizedDBSCAN . Instance < > ( npred , cpred , false ) . run ( ) ; // extract correlation clusters
LOG . beginStep ( stepprog , 2 , "Extract correlation clusters" ) ; List < List < Cluster < CorrelationModel > > > clusterMap = extractCorrelationClusters ( copacResult , relation , dim , npred ) ; if ( LOG . isDebugging ( ) ) { StringBuilder msg = new StringBuilder ( "Step 2: Extract correlation clusters..." ) ; for ( int corrDim = 0 ; corrDim < clusterMap . size ( ) ; corrDim ++ ) { List < Cluster < CorrelationModel > > correlationClusters = clusterMap . get ( corrDim ) ; msg . append ( "\n\ncorrDim " ) . append ( corrDim ) ; for ( Cluster < CorrelationModel > cluster : correlationClusters ) { msg . append ( "\n cluster " ) . append ( cluster ) . append ( ", ids: " ) . append ( cluster . getIDs ( ) . size ( ) ) ; // . append ( " , level : " ) . append ( cluster . getLevel ( ) ) . append ( " , index :
// " ) . append ( cluster . getLevelIndex ( ) ) ;
// msg . append ( " \ n basis " +
// cluster . getPCA ( ) . getWeakEigenvectors ( ) . toString ( " " , NF ) +
// " ids " + cluster . getIDs ( ) . size ( ) ) ;
} } LOG . debugFine ( msg . toString ( ) ) ; } if ( LOG . isVerbose ( ) ) { int clusters = 0 ; for ( List < Cluster < CorrelationModel > > correlationClusters : clusterMap ) { clusters += correlationClusters . size ( ) ; } LOG . verbose ( clusters + " clusters extracted." ) ; } // build hierarchy
LOG . beginStep ( stepprog , 3 , "Building hierarchy" ) ; Clustering < CorrelationModel > clustering = new Clustering < > ( "ERiC clustering" , "eric-clustering" ) ; buildHierarchy ( clustering , clusterMap , npred ) ; if ( LOG . isDebugging ( ) ) { StringBuilder msg = new StringBuilder ( "Step 3: Build hierarchy" ) ; for ( int corrDim = 0 ; corrDim < clusterMap . size ( ) ; corrDim ++ ) { List < Cluster < CorrelationModel > > correlationClusters = clusterMap . get ( corrDim ) ; for ( Cluster < CorrelationModel > cluster : correlationClusters ) { msg . append ( "\n cluster " ) . append ( cluster ) . append ( ", ids: " ) . append ( cluster . getIDs ( ) . size ( ) ) ; // . append ( " , level : " ) . append ( cluster . getLevel ( ) ) . append ( " , index :
// " ) . append ( cluster . getLevelIndex ( ) ) ;
for ( It < Cluster < CorrelationModel > > iter = clustering . getClusterHierarchy ( ) . iterParents ( cluster ) ; iter . valid ( ) ; iter . advance ( ) ) { msg . append ( "\n parent " ) . append ( iter . get ( ) ) ; } for ( It < Cluster < CorrelationModel > > iter = clustering . getClusterHierarchy ( ) . iterChildren ( cluster ) ; iter . valid ( ) ; iter . advance ( ) ) { msg . append ( "\n child " ) . append ( iter . get ( ) ) ; } } } LOG . debugFine ( msg . toString ( ) ) ; } LOG . setCompleted ( stepprog ) ; for ( Cluster < CorrelationModel > rc : clusterMap . get ( clusterMap . size ( ) - 1 ) ) { clustering . addToplevelCluster ( rc ) ; } return clustering ; |
public class InMemoryPushPushPipe { /** * { @ inheritDoc } */
@ Override public boolean subscribe ( IProvider provider , Map < String , Object > paramMap ) { } } | boolean success = super . subscribe ( provider , paramMap ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Provider subscribe{} {} params: {}" , new Object [ ] { ( success ? "d" : " failed" ) , provider , paramMap } ) ; } if ( success ) { fireProviderConnectionEvent ( provider , PipeConnectionEvent . EventType . PROVIDER_CONNECT_PUSH , paramMap ) ; } return success ; |
public class OgmCollectionPersister { /** * Creates an association row representing the given entry and adds it to the association managed by the given
* persister . */
private RowKeyAndTuple createAndPutAssociationRowForInsert ( Serializable key , PersistentCollection collection , AssociationPersister associationPersister , SharedSessionContractImplementor session , int i , Object entry ) { } } | RowKeyBuilder rowKeyBuilder = initializeRowKeyBuilder ( ) ; Tuple associationRow = new Tuple ( ) ; // the collection has a surrogate key ( see @ CollectionId )
if ( hasIdentifier ) { final Object identifier = collection . getIdentifier ( entry , i ) ; String [ ] names = { getIdentifierColumnName ( ) } ; identifierGridType . nullSafeSet ( associationRow , identifier , names , session ) ; } getKeyGridType ( ) . nullSafeSet ( associationRow , key , getKeyColumnNames ( ) , session ) ; // No need to write to where as we don ' t do where clauses in OGM : )
if ( hasIndex ) { Object index = collection . getIndex ( entry , i , this ) ; indexGridType . nullSafeSet ( associationRow , incrementIndexByBase ( index ) , getIndexColumnNames ( ) , session ) ; } // columns of referenced key
final Object element = collection . getElement ( entry ) ; getElementGridType ( ) . nullSafeSet ( associationRow , element , getElementColumnNames ( ) , session ) ; RowKeyAndTuple result = new RowKeyAndTuple ( ) ; result . key = rowKeyBuilder . values ( associationRow ) . build ( ) ; result . tuple = associationRow ; associationPersister . getAssociation ( ) . put ( result . key , result . tuple ) ; return result ; |
public class DevicesStatusApi { /** * Get Device Status ( asynchronously )
* Get Device Status
* @ param deviceId Device ID . ( required )
* @ param includeSnapshot Include device snapshot into the response ( optional )
* @ param includeSnapshotTimestamp Include device snapshot timestamp into the response ( optional )
* @ param callback The callback to be executed when the API call finishes
* @ return The request call
* @ throws ApiException If fail to process the API call , e . g . serializing the request body object */
public com . squareup . okhttp . Call getDeviceStatusAsync ( String deviceId , Boolean includeSnapshot , Boolean includeSnapshotTimestamp , final ApiCallback < DeviceStatus > callback ) throws ApiException { } } | ProgressResponseBody . ProgressListener progressListener = null ; ProgressRequestBody . ProgressRequestListener progressRequestListener = null ; if ( callback != null ) { progressListener = new ProgressResponseBody . ProgressListener ( ) { @ Override public void update ( long bytesRead , long contentLength , boolean done ) { callback . onDownloadProgress ( bytesRead , contentLength , done ) ; } } ; progressRequestListener = new ProgressRequestBody . ProgressRequestListener ( ) { @ Override public void onRequestProgress ( long bytesWritten , long contentLength , boolean done ) { callback . onUploadProgress ( bytesWritten , contentLength , done ) ; } } ; } com . squareup . okhttp . Call call = getDeviceStatusValidateBeforeCall ( deviceId , includeSnapshot , includeSnapshotTimestamp , progressListener , progressRequestListener ) ; Type localVarReturnType = new TypeToken < DeviceStatus > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ; |
public class AccountsInner { /** * Gets the first page of Data Lake Analytics accounts , if any , within the current subscription . This includes a link to the next page , if any .
* @ param filter OData filter . Optional .
* @ param top The number of items to return . Optional .
* @ param skip The number of items to skip over before returning elements . Optional .
* @ param expand OData expansion . Expand related resources in line with the retrieved resources , e . g . Categories / $ expand = Products would expand Product data in line with each Category entry . Optional .
* @ param select OData Select statement . Limits the properties on each entry to just those requested , e . g . Categories ? $ select = CategoryName , Description . Optional .
* @ param orderby OrderBy clause . One or more comma - separated expressions with an optional " asc " ( the default ) or " desc " depending on the order you ' d like the values sorted , e . g . Categories ? $ orderby = CategoryName desc . Optional .
* @ param count The Boolean value of true or false to request a count of the matching resources included with the resources in the response , e . g . Categories ? $ count = true . Optional .
* @ param search A free form search . A free - text search expression to match for whether a particular entry should be included in the feed , e . g . Categories ? $ search = blue OR green . Optional .
* @ param format The desired return format . Return the response in particular formatxii without access to request headers for standard content - type negotiation ( e . g Orders ? $ format = json ) . Optional .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < List < DataLakeAnalyticsAccountInner > > listAsync ( final String filter , final Integer top , final Integer skip , final String expand , final String select , final String orderby , final Boolean count , final String search , final String format , final ListOperationCallback < DataLakeAnalyticsAccountInner > serviceCallback ) { } } | return AzureServiceFuture . fromPageResponse ( listSinglePageAsync ( filter , top , skip , expand , select , orderby , count , search , format ) , new Func1 < String , Observable < ServiceResponse < Page < DataLakeAnalyticsAccountInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < DataLakeAnalyticsAccountInner > > > call ( String nextPageLink ) { return listNextSinglePageAsync ( nextPageLink ) ; } } , serviceCallback ) ; |
public class CmsStaticExportManager { /** * Implements the CmsEvent interface ,
* the static export properties uses the events to clear
* the list of cached keys in case a project is published . < p >
* @ param event CmsEvent that has occurred */
public void cmsEvent ( CmsEvent event ) { } } | if ( ! isStaticExportEnabled ( ) ) { if ( LOG . isWarnEnabled ( ) ) { LOG . warn ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_STATIC_EXPORT_DISABLED_0 ) ) ; } return ; } I_CmsReport report = null ; Map < String , Object > data = event . getData ( ) ; if ( data != null ) { report = ( I_CmsReport ) data . get ( I_CmsEventListener . KEY_REPORT ) ; } if ( report == null ) { report = new CmsLogReport ( CmsLocaleManager . getDefaultLocale ( ) , getClass ( ) ) ; } switch ( event . getType ( ) ) { case I_CmsEventListener . EVENT_UPDATE_EXPORTS : scrubExportFolders ( report ) ; clearCaches ( event ) ; break ; case I_CmsEventListener . EVENT_PUBLISH_PROJECT : if ( data == null ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_EMPTY_EVENT_DATA_0 ) ) ; } return ; } // event data contains a list of the published resources
CmsUUID publishHistoryId = new CmsUUID ( ( String ) data . get ( I_CmsEventListener . KEY_PUBLISHID ) ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_EVENT_PUBLISH_PROJECT_1 , publishHistoryId ) ) ; } synchronized ( m_lockCmsEvent ) { getHandler ( ) . performEventPublishProject ( publishHistoryId , report ) ; } clearCaches ( event ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_EVENT_PUBLISH_PROJECT_FINISHED_1 , publishHistoryId ) ) ; } break ; case I_CmsEventListener . EVENT_CLEAR_CACHES : clearCaches ( event ) ; break ; default : // no operation
} |
public class FlexFlow { /** * Create a FlexFlowCreator to execute create .
* @ param friendlyName Human readable description of this FlexFlow
* @ param chatServiceSid Service Sid .
* @ param channelType Channel type
* @ return FlexFlowCreator capable of executing the create */
public static FlexFlowCreator creator ( final String friendlyName , final String chatServiceSid , final FlexFlow . ChannelType channelType ) { } } | return new FlexFlowCreator ( friendlyName , chatServiceSid , channelType ) ; |
public class XsdAsmInterfaces { /** * Generates all the required interfaces , based on the information gathered while creating the other classes .
* It creates both types of interfaces :
* ElementGroupInterfaces - Interfaces that serve as a base to adding child elements to the current element ;
* AttributeGroupInterfaces - Interface that serve as a base to adding attributes to the current element ;
* @ param createdAttributes Information about the attributes that are already created .
* @ param apiName The name of the generated fluent interface . */
void generateInterfaces ( Map < String , List < XsdAttribute > > createdAttributes , String apiName ) { } } | attributeGroupInterfaces . keySet ( ) . forEach ( attributeGroupInterface -> generateAttributesGroupInterface ( createdAttributes , attributeGroupInterface , attributeGroupInterfaces . get ( attributeGroupInterface ) , apiName ) ) ; hierarchyInterfaces . values ( ) . forEach ( hierarchyInterface -> generateHierarchyAttributeInterfaces ( createdAttributes , hierarchyInterface , apiName ) ) ; |
public class Uber { /** * Extract rel and url from every { @ link UberData } entry .
* @ return */
@ JsonIgnore Links getLinks ( ) { } } | if ( data == null ) { return Links . NONE ; } return data . stream ( ) . flatMap ( uberData -> uberData . getLinks ( ) . stream ( ) ) . collect ( Links . collector ( ) ) ; |
public class HttpExportClient { /** * Calculate the signature of the request using the specified secret key .
* @ param params The parameters to send in the request .
* @ return The parameters including the signature . */
private List < NameValuePair > sign ( URI uri , final List < NameValuePair > params ) { } } | Preconditions . checkNotNull ( m_secret ) ; final List < NameValuePair > sortedParams = Lists . newArrayList ( params ) ; Collections . sort ( sortedParams , new Comparator < NameValuePair > ( ) { @ Override public int compare ( NameValuePair left , NameValuePair right ) { return left . getName ( ) . compareTo ( right . getName ( ) ) ; } } ) ; final StringBuilder paramSb = new StringBuilder ( ) ; String separator = "" ; for ( NameValuePair param : sortedParams ) { paramSb . append ( separator ) . append ( param . getName ( ) ) ; if ( param . getValue ( ) != null ) { paramSb . append ( "=" ) . append ( param . getValue ( ) ) ; } separator = "&" ; } final StringBuilder baseSb = new StringBuilder ( ) ; baseSb . append ( m_method ) . append ( '\n' ) ; baseSb . append ( uri . getHost ( ) ) . append ( '\n' ) ; baseSb . append ( uri . getPath ( ) . isEmpty ( ) ? '/' : uri . getPath ( ) ) . append ( '\n' ) ; baseSb . append ( paramSb . toString ( ) ) ; final Mac hmac ; final Key key ; try { hmac = Mac . getInstance ( m_signatureMethod ) ; key = new SecretKeySpec ( m_secret . getBytes ( Charsets . UTF_8 ) , m_signatureMethod ) ; hmac . init ( key ) ; } catch ( NoSuchAlgorithmException e ) { // should never happen
rateLimitedLogError ( m_logger , "Fail to get HMAC instance %s" , Throwables . getStackTraceAsString ( e ) ) ; return null ; } catch ( InvalidKeyException e ) { rateLimitedLogError ( m_logger , "Fail to sign the message %s" , Throwables . getStackTraceAsString ( e ) ) ; return null ; } sortedParams . add ( new BasicNameValuePair ( m_signatureName , NVPairsDecoder . percentEncode ( Encoder . base64Encode ( hmac . doFinal ( baseSb . toString ( ) . getBytes ( Charsets . UTF_8 ) ) ) ) ) ) ; return sortedParams ; |
public class Tuple3 { /** * Concatenate a tuple to this tuple . */
public final < T4 > Tuple4 < T1 , T2 , T3 , T4 > concat ( Tuple1 < T4 > tuple ) { } } | return new Tuple4 < > ( v1 , v2 , v3 , tuple . v1 ) ; |
public class ELParser { /** * Skip until an EL expression ( ' $ { ' | | ' # { ' ) is reached , allowing escape
* sequences ' \ \ ' and ' \ $ ' and ' \ # ' .
* @ return The text string up to the EL expression */
private String skipUntilEL ( ) { } } | char prev = 0 ; StringBuffer buf = new StringBuffer ( ) ; while ( hasNextChar ( ) ) { char ch = nextChar ( ) ; if ( prev == '\\' ) { prev = 0 ; if ( ch == '\\' ) { buf . append ( '\\' ) ; if ( ! escapeBS ) prev = '\\' ; } else if ( ch == '$' || ch == '#' ) { buf . append ( ch ) ; } // else error !
} else if ( prev == '$' || prev == '#' ) { if ( ch == '{' ) { this . type = prev ; prev = 0 ; break ; } buf . append ( prev ) ; prev = 0 ; } if ( ch == '\\' || ch == '$' || ch == '#' ) { prev = ch ; } else { buf . append ( ch ) ; } } if ( prev != 0 ) { buf . append ( prev ) ; } return buf . toString ( ) ; |
public class RouteLegProgress { /** * Get the next / upcoming step immediately after the current step . If the user is on the last step
* on the last leg , this will return null since a next step doesn ' t exist .
* @ return a { @ link LegStep } representing the next step the user will be on .
* @ since 0.1.0 */
@ Nullable public LegStep upComingStep ( ) { } } | if ( routeLeg ( ) . steps ( ) . size ( ) - 1 > stepIndex ( ) ) { return routeLeg ( ) . steps ( ) . get ( stepIndex ( ) + 1 ) ; } return null ; |
public class ComponentFactory { /** * Factory method for create a new { @ link RadioGroup } .
* @ param < T >
* the generic type of the model
* @ param id
* the id
* @ param model
* the model
* @ return the new { @ link RadioGroup } . */
public static < T > RadioGroup < T > newRadioGroup ( final String id , final IModel < T > model ) { } } | final RadioGroup < T > radioGroup = new RadioGroup < > ( id , model ) ; radioGroup . setOutputMarkupId ( true ) ; return radioGroup ; |
public class DefaultBeanDescriptor { /** * Extract informations form the bean . */
protected void extractBeanDescriptor ( ) { } } | Object defaultInstance = null ; try { defaultInstance = getBeanClass ( ) . newInstance ( ) ; } catch ( Exception e ) { LOGGER . debug ( "Failed to create a new default instance for class " + this . beanClass + ". The BeanDescriptor will not contains any default value information." , e ) ; } try { // Get public fields
for ( Class < ? > currentClass = this . beanClass ; currentClass != null ; currentClass = currentClass . getSuperclass ( ) ) { Field [ ] fields = currentClass . getFields ( ) ; for ( Field field : fields ) { if ( ! Modifier . isStatic ( field . getModifiers ( ) ) ) { extractPropertyDescriptor ( field , defaultInstance ) ; } } } // Get getter / setter based properties
BeanInfo beanInfo = Introspector . getBeanInfo ( this . beanClass ) ; java . beans . PropertyDescriptor [ ] propertyDescriptors = beanInfo . getPropertyDescriptors ( ) ; if ( propertyDescriptors != null ) { for ( java . beans . PropertyDescriptor propertyDescriptor : propertyDescriptors ) { if ( propertyDescriptor != null ) { extractPropertyDescriptor ( propertyDescriptor , defaultInstance ) ; } } } } catch ( Exception e ) { LOGGER . error ( "Failed to load bean descriptor for class " + this . beanClass , e ) ; } |
public class RetrieveTokenApi { /** * Build form parameters
* @ param refreshToken See [ Refresh Token ] ( https : / / tools . ietf . org / html / rfc6749 # section - 1.5 ) for details .
* @ param scope The scope of the access request . The Authentication API supports only the & # x60 ; * & # x60 ; value . ( optional )
* @ param clientId The ID of the application or service that is registered as the client . You & # 39 ; ll need to get this value from your PureEngage Cloud representative . ( optional )
* @ return a map of form parameters
* @ throws IllegalArgumentException if required form parameter is missed */
public static Map < String , Object > createFormParamRefreshTokenGrantType ( String refreshToken , String scope , String clientId ) { } } | if ( refreshToken == null ) { throw new IllegalArgumentException ( "Missing the required parameter 'refresh_token'" ) ; } Map < String , Object > formParams = new HashMap < > ( ) ; formParams . put ( "grant_type" , "refresh_token" ) ; formParams . put ( "refresh_token" , refreshToken ) ; if ( scope != null ) { formParams . put ( "scope" , scope ) ; } if ( clientId != null ) { formParams . put ( "client_id" , clientId ) ; } return formParams ; |
public class ApiOvhTelephony { /** * Alter this object properties
* REST : PUT / telephony / { billingAccount } / offerTask / { taskId }
* @ param body [ required ] New object properties
* @ param billingAccount [ required ] The name of your billingAccount
* @ param taskId [ required ] */
public void billingAccount_offerTask_taskId_PUT ( String billingAccount , Long taskId , OvhOfferTask body ) throws IOException { } } | String qPath = "/telephony/{billingAccount}/offerTask/{taskId}" ; StringBuilder sb = path ( qPath , billingAccount , taskId ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ; |
public class REWildcardStringParser { /** * Builds the regexp parser . */
private boolean buildRegexpParser ( ) { } } | // Convert wildcard string mask to regular expression
String regexp = convertWildcardExpressionToRegularExpression ( mStringMask ) ; if ( regexp == null ) { out . println ( DebugUtil . getPrefixErrorMessage ( this ) + "irregularity in regexp conversion - now not able to parse any strings, all strings will be rejected!" ) ; return false ; } // Instantiate a regular expression parser
try { mRegexpParser = Pattern . compile ( regexp ) ; } catch ( PatternSyntaxException e ) { if ( mDebugging ) { out . println ( DebugUtil . getPrefixErrorMessage ( this ) + "RESyntaxException \"" + e . getMessage ( ) + "\" caught - now not able to parse any strings, all strings will be rejected!" ) ; } if ( mDebugging ) { e . printStackTrace ( System . err ) ; } return false ; } if ( mDebugging ) { out . println ( DebugUtil . getPrefixDebugMessage ( this ) + "regular expression parser from regular expression " + regexp + " extracted from wildcard string mask " + mStringMask + "." ) ; } return true ; |
public class GrouperEntityStoreFactory { /** * Creates an instance .
* @ return The instance .
* @ see IEntityStoreFactory # newEntityStore ( ) */
@ Override public IEntityStore newEntityStore ( ) throws GroupsException { } } | if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Creating new Grouper GrouperEntityGroupStoreFactory" ) ; } return ( IEntityStore ) new GrouperEntityGroupStoreFactory ( ) . newGroupStore ( ) ; |
public class FileMgr { /** * Delete the specified file .
* @ param fileName
* the name of the target file */
public void delete ( String fileName ) { } } | try { synchronized ( prepareAnchor ( fileName ) ) { // Close file , if it was opened
IoChannel fileChannel = openFiles . remove ( fileName ) ; if ( fileChannel != null ) fileChannel . close ( ) ; // Delete the file
boolean hasDeleted = new File ( dbDirectory , fileName ) . delete ( ) ; if ( ! hasDeleted && logger . isLoggable ( Level . WARNING ) ) logger . warning ( "cannot delete file: " + fileName ) ; } } catch ( IOException e ) { if ( logger . isLoggable ( Level . WARNING ) ) logger . warning ( "there is something wrong when deleting " + fileName ) ; e . printStackTrace ( ) ; } |
public class onlinkipv6prefix { /** * Use this API to update onlinkipv6prefix . */
public static base_response update ( nitro_service client , onlinkipv6prefix resource ) throws Exception { } } | onlinkipv6prefix updateresource = new onlinkipv6prefix ( ) ; updateresource . ipv6prefix = resource . ipv6prefix ; updateresource . onlinkprefix = resource . onlinkprefix ; updateresource . autonomusprefix = resource . autonomusprefix ; updateresource . depricateprefix = resource . depricateprefix ; updateresource . decrementprefixlifetimes = resource . decrementprefixlifetimes ; updateresource . prefixvalidelifetime = resource . prefixvalidelifetime ; updateresource . prefixpreferredlifetime = resource . prefixpreferredlifetime ; return updateresource . update_resource ( client ) ; |
public class ClassConverter { /** * { @ inheritDoc } */
@ Override public Class < ? > convert ( String rawString ) { } } | Class < ? > converted = null ; if ( rawString != null ) { try { converted = Class . forName ( rawString ) ; } catch ( ClassNotFoundException e ) { throw new IllegalArgumentException ( e ) ; } } return converted ; |
public class ManifestDescriptorImporter { /** * { @ inheritDoc }
* @ see org . jboss . shrinkwrap . descriptor . api . DescriptorImporter # fromString ( java . lang . String ) */
@ Override public ManifestDescriptor fromString ( String manifest ) throws IllegalArgumentException , DescriptorImportException { } } | if ( manifest == null ) throw new IllegalArgumentException ( "Manifest cannot be null" ) ; InputStream inputStream = new ByteArrayInputStream ( manifest . getBytes ( ) ) ; try { return new ManifestDescriptorImpl ( descriptorName , new ManifestModel ( inputStream ) ) ; } catch ( IOException e ) { throw new DescriptorImportException ( e . getMessage ( ) , e ) ; } |
public class Frame { /** * Get the < i > i < / i > th operand used by given instruction .
* @ param ins
* the instruction , which must be a StackConsumer
* @ param cpg
* the ConstantPoolGen
* @ param i
* index of operand to get : 0 for the first operand , etc .
* @ return the < i > i < / i > th operand used by the given instruction
* @ throws DataflowAnalysisException */
public ValueType getOperand ( StackConsumer ins , ConstantPoolGen cpg , int i ) throws DataflowAnalysisException { } } | int numOperands = ins . consumeStack ( cpg ) ; if ( numOperands == Const . UNPREDICTABLE ) { throw new DataflowAnalysisException ( "Unpredictable stack consumption in " + ins ) ; } return getStackValue ( ( numOperands - 1 ) - i ) ; |
public class DrizzleConnection { /** * returns true if statements on this connection are auto commited .
* @ return true if auto commit is on .
* @ throws SQLException */
public boolean getAutoCommit ( ) throws SQLException { } } | Statement stmt = createStatement ( ) ; ResultSet rs = stmt . executeQuery ( "select @@autocommit" ) ; rs . next ( ) ; boolean autocommit = rs . getBoolean ( 1 ) ; rs . close ( ) ; stmt . close ( ) ; return autocommit ; |
public class FMeasureCounts { /** * Creates an { @ link FMeasureCounts } from counts of true positives , false positives , and false
* negatives . */
public static FMeasureCounts fromTPFPFN ( final double truePositives , final double falsePositives , final double falseNegatives ) { } } | return fromFPFNKeyCountSysCount ( falsePositives , falseNegatives , truePositives + falseNegatives , truePositives + falsePositives ) ; |
public class HandlerManager { /** * Adds a handler .
* @ param < H > The type of handler
* @ param type the event type associated with this handler
* @ param handler the handler
* @ return the handler registration , can be stored in order to remove the handler later */
public < H extends EventHandler > HandlerRegistration addHandler ( GwtEvent . Type < H > type , final H handler ) { } } | if ( type != null && handler != null ) { return new LegacyHandlerWrapper ( this . eventBus . addHandler ( type , handler ) ) ; } return null ; |
public class Service { /** * Iterates over nested link relations . If they are valid filename is set and decription inherited ( if empty ) .
* Invalid ones are removed .
* @ param rel Link relations */
private void resolve ( LinkRelation rel ) { } } | Iterator < LinkRelationRef > refs = rel . getLinkRelationRefs ( ) . iterator ( ) ; while ( refs . hasNext ( ) ) { LinkRelationRef ref = refs . next ( ) ; LinkRelation referencedRel = linkRelations . stream ( ) . filter ( item -> StringUtils . equals ( item . getRel ( ) , ref . getRel ( ) ) ) . findFirst ( ) . orElse ( null ) ; if ( referencedRel == null ) { refs . remove ( ) ; } else { if ( StringUtils . isBlank ( ref . getShortDescription ( ) ) ) { ref . setShortDescription ( referencedRel . getShortDescription ( ) ) ; } } } |
public class JSONWriter { /** * Append a value . < p >
* @ param s a string value
* @ return this
* @ throws JSONException if the value is out of sequence */
private JSONWriter append ( String s ) throws JSONException { } } | if ( s == null ) { throw new JSONException ( "Null pointer" ) ; } if ( ( m_mode == 'o' ) || ( m_mode == 'a' ) ) { try { if ( m_comma && ( m_mode == 'a' ) ) { m_writer . write ( ',' ) ; } m_writer . write ( s ) ; } catch ( IOException e ) { throw new JSONException ( e ) ; } if ( m_mode == 'o' ) { m_mode = 'k' ; } m_comma = true ; return this ; } throw new JSONException ( "Value out of sequence." ) ; |
public class SarlBatchCompiler { /** * Replies the classpath .
* @ return the classpath . */
@ Pure public List < File > getClassPath ( ) { } } | if ( this . classpath == null ) { return Collections . emptyList ( ) ; } return Collections . unmodifiableList ( this . classpath ) ; |
public class BosClient { /** * Adds response headers parameters to the request given , if non - null .
* @ param request The request to add the response header parameters to .
* @ param responseHeaders The full set of response headers to add , or null for none . */
private void addResponseHeaderParameters ( InternalRequest request , ResponseHeaderOverrides responseHeaders ) { } } | if ( responseHeaders != null ) { if ( responseHeaders . getCacheControl ( ) != null ) { request . addParameter ( ResponseHeaderOverrides . RESPONSE_HEADER_CACHE_CONTROL , responseHeaders . getCacheControl ( ) ) ; } if ( responseHeaders . getContentDisposition ( ) != null ) { request . addParameter ( ResponseHeaderOverrides . RESPONSE_HEADER_CONTENT_DISPOSITION , responseHeaders . getContentDisposition ( ) ) ; } if ( responseHeaders . getContentEncoding ( ) != null ) { request . addParameter ( ResponseHeaderOverrides . RESPONSE_HEADER_CONTENT_ENCODING , responseHeaders . getContentEncoding ( ) ) ; } if ( responseHeaders . getContentLanguage ( ) != null ) { request . addParameter ( ResponseHeaderOverrides . RESPONSE_HEADER_CONTENT_LANGUAGE , responseHeaders . getContentLanguage ( ) ) ; } if ( responseHeaders . getContentType ( ) != null ) { request . addParameter ( ResponseHeaderOverrides . RESPONSE_HEADER_CONTENT_TYPE , responseHeaders . getContentType ( ) ) ; } if ( responseHeaders . getExpires ( ) != null ) { request . addParameter ( ResponseHeaderOverrides . RESPONSE_HEADER_EXPIRES , responseHeaders . getExpires ( ) ) ; } } |
public class BitSet { /** * Returns a new bit set containing all the bits in the given long
* buffer between its position and limit .
* < p > More precisely ,
* < br > { @ code BitSet . valueOf ( lb ) . get ( n ) = = ( ( lb . get ( lb . position ( ) + n / 64 ) & ( 1L < < ( n % 64 ) ) ) ! = 0 ) }
* < br > for all { @ code n < 64 * lb . remaining ( ) } .
* < p > The long buffer is not modified by this method , and no
* reference to the buffer is retained by the bit set .
* @ param lb a long buffer containing a little - endian representation
* of a sequence of bits between its position and limit , to be
* used as the initial bits of the new bit set
* @ return a { @ code BitSet } containing all the bits in the buffer in the
* specified range
* @ since 1.7 */
public static BitSet valueOf ( LongBuffer lb ) { } } | lb = lb . slice ( ) ; int n ; for ( n = lb . remaining ( ) ; n > 0 && lb . get ( n - 1 ) == 0 ; n -- ) ; long [ ] words = new long [ n ] ; lb . get ( words ) ; return new BitSet ( words ) ; |
public class CmsGalleryDialog { /** * Displays the search result in the result tab . < p >
* @ param searchObj the search object */
public void fillResultTab ( CmsGallerySearchBean searchObj ) { } } | if ( m_resultsTab == null ) { return ; } List < CmsSearchParamPanel > paramPanels = null ; if ( ! searchObj . isEmpty ( ) ) { enableSearchTab ( ) ; paramPanels = new ArrayList < CmsSearchParamPanel > ( ) ; Iterator < A_CmsTab > it = m_tabbedPanel . iterator ( ) ; while ( it . hasNext ( ) ) { A_CmsTab tab = it . next ( ) ; paramPanels . addAll ( tab . getParamPanels ( searchObj ) ) ; } m_resultsTab . fillContent ( searchObj , paramPanels ) ; } |
public class SqlLine { /** * Output a progress indicator to the console .
* @ param cur the current progress
* @ param max the maximum progress , or - 1 if unknown */
void progress ( int cur , int max ) { } } | StringBuilder out = new StringBuilder ( ) ; if ( lastProgress != null ) { char [ ] back = new char [ lastProgress . length ( ) ] ; Arrays . fill ( back , '\b' ) ; out . append ( back ) ; } String progress = cur + "/" + ( max == - 1 ? "?" : "" + max ) + " " + ( max == - 1 ? "(??%)" : "(" + cur * 100 / ( max == 0 ? 1 : max ) + "%)" ) ; if ( cur >= max && max != - 1 ) { progress += " " + loc ( "done" ) + SEPARATOR ; lastProgress = null ; } else { lastProgress = progress ; } out . append ( progress ) ; getOutputStream ( ) . print ( out . toString ( ) ) ; getOutputStream ( ) . flush ( ) ; |
public class Bessel { /** * Bessel function of order 0.
* @ param x Value .
* @ return J0 value . */
public static double J0 ( double x ) { } } | double ax ; if ( ( ax = Math . abs ( x ) ) < 8.0 ) { double y = x * x ; double ans1 = 57568490574.0 + y * ( - 13362590354.0 + y * ( 651619640.7 + y * ( - 11214424.18 + y * ( 77392.33017 + y * ( - 184.9052456 ) ) ) ) ) ; double ans2 = 57568490411.0 + y * ( 1029532985.0 + y * ( 9494680.718 + y * ( 59272.64853 + y * ( 267.8532712 + y * 1.0 ) ) ) ) ; return ans1 / ans2 ; } else { double z = 8.0 / ax ; double y = z * z ; double xx = ax - 0.785398164 ; double ans1 = 1.0 + y * ( - 0.1098628627e-2 + y * ( 0.2734510407e-4 + y * ( - 0.2073370639e-5 + y * 0.2093887211e-6 ) ) ) ; double ans2 = - 0.1562499995e-1 + y * ( 0.1430488765e-3 + y * ( - 0.6911147651e-5 + y * ( 0.7621095161e-6 - y * 0.934935152e-7 ) ) ) ; return Math . sqrt ( 0.636619772 / ax ) * ( Math . cos ( xx ) * ans1 - z * Math . sin ( xx ) * ans2 ) ; } |
public class SingleThreadEventExecutor { /** * Poll all tasks from the task queue and run them via { @ link Runnable # run ( ) } method .
* @ return { @ code true } if and only if at least one task was run */
protected boolean runAllTasks ( ) { } } | assert inEventLoop ( ) ; boolean fetchedAll ; boolean ranAtLeastOne = false ; do { fetchedAll = fetchFromScheduledTaskQueue ( ) ; if ( runAllTasksFrom ( taskQueue ) ) { ranAtLeastOne = true ; } } while ( ! fetchedAll ) ; // keep on processing until we fetched all scheduled tasks .
if ( ranAtLeastOne ) { lastExecutionTime = ScheduledFutureTask . nanoTime ( ) ; } afterRunningAllTasks ( ) ; return ranAtLeastOne ; |
public class BinaryRowProtocol { /** * Get float from raw binary format .
* @ param columnInfo column information
* @ return float value
* @ throws SQLException if column is not numeric or is not in Float bounds . */
public float getInternalFloat ( ColumnInformation columnInfo ) throws SQLException { } } | if ( lastValueWasNull ( ) ) { return 0 ; } long value ; switch ( columnInfo . getColumnType ( ) ) { case BIT : return parseBit ( ) ; case TINYINT : value = getInternalTinyInt ( columnInfo ) ; break ; case SMALLINT : case YEAR : value = getInternalSmallInt ( columnInfo ) ; break ; case INTEGER : case MEDIUMINT : value = getInternalMediumInt ( columnInfo ) ; break ; case BIGINT : value = ( ( buf [ pos ] & 0xff ) + ( ( long ) ( buf [ pos + 1 ] & 0xff ) << 8 ) + ( ( long ) ( buf [ pos + 2 ] & 0xff ) << 16 ) + ( ( long ) ( buf [ pos + 3 ] & 0xff ) << 24 ) + ( ( long ) ( buf [ pos + 4 ] & 0xff ) << 32 ) + ( ( long ) ( buf [ pos + 5 ] & 0xff ) << 40 ) + ( ( long ) ( buf [ pos + 6 ] & 0xff ) << 48 ) + ( ( long ) ( buf [ pos + 7 ] & 0xff ) << 56 ) ) ; if ( columnInfo . isSigned ( ) ) { return value ; } BigInteger unsignedValue = new BigInteger ( 1 , new byte [ ] { ( byte ) ( value >> 56 ) , ( byte ) ( value >> 48 ) , ( byte ) ( value >> 40 ) , ( byte ) ( value >> 32 ) , ( byte ) ( value >> 24 ) , ( byte ) ( value >> 16 ) , ( byte ) ( value >> 8 ) , ( byte ) value } ) ; return unsignedValue . floatValue ( ) ; case FLOAT : int valueFloat = ( ( buf [ pos ] & 0xff ) + ( ( buf [ pos + 1 ] & 0xff ) << 8 ) + ( ( buf [ pos + 2 ] & 0xff ) << 16 ) + ( ( buf [ pos + 3 ] & 0xff ) << 24 ) ) ; return Float . intBitsToFloat ( valueFloat ) ; case DOUBLE : return ( float ) getInternalDouble ( columnInfo ) ; case DECIMAL : case VARSTRING : case VARCHAR : case STRING : case OLDDECIMAL : try { return Float . valueOf ( new String ( buf , pos , length , StandardCharsets . UTF_8 ) ) ; } catch ( NumberFormatException nfe ) { SQLException sqlException = new SQLException ( "Incorrect format for getFloat for data field with type " + columnInfo . getColumnType ( ) . getJavaTypeName ( ) , "22003" , 1264 , nfe ) ; throw sqlException ; } default : throw new SQLException ( "getFloat not available for data field type " + columnInfo . getColumnType ( ) . getJavaTypeName ( ) ) ; } try { return Float . valueOf ( String . valueOf ( value ) ) ; } catch ( NumberFormatException nfe ) { SQLException sqlException = new SQLException ( "Incorrect format for getFloat for data field with type " + columnInfo . getColumnType ( ) . getJavaTypeName ( ) , "22003" , 1264 , nfe ) ; throw sqlException ; } |
public class HttpContextManagement { /** * Adds the URL to the list if it doesn ' t already exist . Or increment the hit count otherwise .
* @ param url The URL requested . */
public void addReadUrl ( String url ) { } } | // Only cache the url data if logging is enabled .
if ( readUrlLoggingEnabled == false ) { return ; } // Strip off the query parameters .
int paramIndex = url . indexOf ( '?' ) ; String filteredUrl = url ; if ( paramIndex != - 1 ) { filteredUrl = url . substring ( 0 , paramIndex ) ; } // Because Cache doesn ' t take advantage of the underlying concurrentMap , this code either has to ( 1 ) introduce
// sync block to synchronize addReadUrl requests which would negatively impact all templates calling addReadUrl
// or ( 2 ) accept potential race condition during the first add call on the same Url . This implementation takes
// (2 ) approach as this feature is more for reporting and hence not worth the performance degradation . Also ,
// once the first record is added to the map , the race condition no longer exists due to the use of atomic
// variable .
AtomicLong count = ( AtomicLong ) __UrlMap . get ( filteredUrl ) ; if ( count == null ) { count = new AtomicLong ( 1 ) ; __UrlMap . put ( filteredUrl , count ) ; } else { count . incrementAndGet ( ) ; } |
public class Sort { /** * Check if the float array is sorted . It loops through the entire float
* array once , checking that the elements are sorted .
* < br >
* < br >
* < i > Runtime : < / i > O ( n )
* @ param floatArray the float array to check
* @ return < i > true < / i > if the float array is sorted , else < i > false < / i > . */
public static boolean isSorted ( float [ ] floatArray ) { } } | for ( int i = 0 ; i < floatArray . length - 1 ; i ++ ) { if ( floatArray [ i ] > floatArray [ i + 1 ] ) { return false ; } } return true ; |
public class Vector2i { /** * Read this vector from the supplied { @ link IntBuffer } starting at the
* specified absolute buffer position / index .
* This method will not increment the position of the given IntBuffer .
* @ param index
* the absolute position into the IntBuffer
* @ param buffer
* values will be read in < code > x , y < / code > order
* @ return this */
public Vector2i set ( int index , IntBuffer buffer ) { } } | MemUtil . INSTANCE . get ( this , index , buffer ) ; return this ; |
public class ServletModel { /** * Transforms an alias into a url pattern .
* @ param alias to transform
* @ return url pattern */
private static String aliasAsUrlPattern ( final String alias ) { } } | String urlPattern = alias ; if ( urlPattern != null && ! urlPattern . equals ( "/" ) && ! urlPattern . contains ( "*" ) ) { if ( urlPattern . endsWith ( "/" ) ) { urlPattern = urlPattern + "*" ; } else { urlPattern = urlPattern + "/*" ; } } return urlPattern ; |
public class BeanUtil { /** * ServletRequest 参数转Bean
* @ param < T > Bean类型
* @ param beanClass Bean Class
* @ param valueProvider 值提供者
* @ param copyOptions 拷贝选项 , 见 { @ link CopyOptions }
* @ return Bean */
public static < T > T toBean ( Class < T > beanClass , ValueProvider < String > valueProvider , CopyOptions copyOptions ) { } } | return fillBean ( ReflectUtil . newInstance ( beanClass ) , valueProvider , copyOptions ) ; |
public class DcpConnectHandler { /** * Once we get a response from the connect request , check if it is successful and complete / fail the connect
* phase accordingly . */
@ Override protected void channelRead0 ( final ChannelHandlerContext ctx , final ByteBuf msg ) throws Exception { } } | ResponseStatus status = MessageUtil . getResponseStatus ( msg ) ; if ( status . isSuccess ( ) ) { step ++ ; switch ( step ) { case HELLO : hello ( ctx , msg ) ; break ; case SELECT : select ( ctx ) ; break ; case OPEN : open ( ctx ) ; break ; case REMOVE : remove ( ctx ) ; break ; default : originalPromise ( ) . setFailure ( new IllegalStateException ( "Unidentified DcpConnection step " + step ) ) ; break ; } } else { originalPromise ( ) . setFailure ( new IllegalStateException ( "Could not open DCP Connection: Failed in the " + toString ( step ) + " step, response status is " + status ) ) ; } |
public class SeleniumHelper { /** * Determines number displayed for item in ordered list .
* @ param element ordered list item .
* @ return number , if one could be determined . */
public Integer getNumberFor ( WebElement element ) { } } | Integer number = null ; if ( "li" . equalsIgnoreCase ( element . getTagName ( ) ) && element . isDisplayed ( ) ) { int num ; String ownVal = element . getAttribute ( "value" ) ; if ( ownVal != null && ! "0" . equals ( ownVal ) ) { num = toInt ( ownVal , 0 ) ; } else { String start = element . findElement ( By . xpath ( "ancestor::ol" ) ) . getAttribute ( "start" ) ; num = toInt ( start , 1 ) ; List < WebElement > allItems = element . findElements ( By . xpath ( "ancestor::ol/li" ) ) ; int index = allItems . indexOf ( element ) ; for ( int i = 0 ; i < index ; i ++ ) { WebElement item = allItems . get ( i ) ; if ( item . isDisplayed ( ) ) { num ++ ; String val = item . getAttribute ( "value" ) ; int valNum = toInt ( val , num ) ; if ( valNum != 0 ) { num = valNum + 1 ; } } } } number = num ; } return number ; |
public class TransactionableResourceManager { /** * Checks if a global Tx has been started if so the session and its change will be dynamically enrolled
* @ param session the session to enlist in case a Global Tx has been started
* @ param changes the changes to enlist in case a Global Tx has been started
* @ return < code > true < / code > if a global Tx has been started and the session and its change could
* be enrolled successfully , < code > false < / code > otherwise
* @ throws IllegalStateException if the current status of the global transaction is not appropriate */
public boolean canEnrollChangeToGlobalTx ( final SessionImpl session , final PlainChangesLog changes ) { } } | try { int status ; if ( tm != null && ( status = tm . getStatus ( ) ) != Status . STATUS_NO_TRANSACTION ) { if ( status != Status . STATUS_ACTIVE && status != Status . STATUS_PREPARING ) { throw new IllegalStateException ( "The session cannot be enrolled in the current global transaction due " + "to an invalidate state, the current status is " + status + " and only ACTIVE and PREPARING are allowed" ) ; } SecurityHelper . doPrivilegedExceptionAction ( new PrivilegedExceptionAction < Void > ( ) { public Void run ( ) throws Exception { add ( session , changes ) ; return null ; } } ) ; return true ; } } catch ( PrivilegedActionException e ) { log . warn ( "Could not check if a global Tx has been started or register the session into the resource manager" , e ) ; } catch ( SystemException e ) { log . warn ( "Could not check if a global Tx has been started or register the session into the resource manager" , e ) ; } return false ; |
public class TableMetadataBuilder { /** * Add an index . Must be called after including columns because columnMetadata is recovered from the tableMetadata .
* Options in indexMetadata will be null .
* @ param indType the index type .
* @ param indexName the index name .
* @ param fields the columns which define the index .
* @ return the table metadata builder .
* @ throws if an error happens . */
@ TimerJ public TableMetadataBuilder addIndex ( IndexType indType , String indexName , String ... fields ) throws ExecutionException { } } | IndexName indName = new IndexName ( tableName . getName ( ) , tableName . getName ( ) , indexName ) ; Map < ColumnName , ColumnMetadata > columnsMetadata = new HashMap < ColumnName , ColumnMetadata > ( fields . length ) ; // recover the columns from the table metadata
for ( String field : fields ) { ColumnMetadata cMetadata = columns . get ( new ColumnName ( tableName , field ) ) ; if ( cMetadata == null ) { throw new ExecutionException ( "Trying to index a not existing column: " + field ) ; } columnsMetadata . put ( new ColumnName ( tableName , field ) , cMetadata ) ; } IndexMetadata indMetadata = new IndexMetadata ( indName , columnsMetadata , indType , null ) ; indexes . put ( indName , indMetadata ) ; return this ; |
public class JavaParser { /** * src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 317:1 : enumDeclaration : ENUM Identifier ( ' implements ' typeList ) ? enumBody ; */
public final void enumDeclaration ( ) throws RecognitionException { } } | int enumDeclaration_StartIndex = input . index ( ) ; try { if ( state . backtracking > 0 && alreadyParsedRule ( input , 11 ) ) { return ; } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 318:5 : ( ENUM Identifier ( ' implements ' typeList ) ? enumBody )
// src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 318:7 : ENUM Identifier ( ' implements ' typeList ) ? enumBody
{ match ( input , ENUM , FOLLOW_ENUM_in_enumDeclaration411 ) ; if ( state . failed ) return ; match ( input , Identifier , FOLLOW_Identifier_in_enumDeclaration413 ) ; if ( state . failed ) return ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 318:23 : ( ' implements ' typeList ) ?
int alt18 = 2 ; int LA18_0 = input . LA ( 1 ) ; if ( ( LA18_0 == 88 ) ) { alt18 = 1 ; } switch ( alt18 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 318:24 : ' implements ' typeList
{ match ( input , 88 , FOLLOW_88_in_enumDeclaration416 ) ; if ( state . failed ) return ; pushFollow ( FOLLOW_typeList_in_enumDeclaration418 ) ; typeList ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; } pushFollow ( FOLLOW_enumBody_in_enumDeclaration422 ) ; enumBody ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving
if ( state . backtracking > 0 ) { memoize ( input , 11 , enumDeclaration_StartIndex ) ; } } |
public class PropertySet { /** * Removes a set of properties from this set . */
public PropertySet removeAll ( Iterator < String > properties ) { } } | if ( properties != null ) { while ( properties . hasNext ( ) ) { remove ( properties . next ( ) ) ; } } return this ; |
public class Dklu { /** * Solve Lx = b . Assumes L is unit lower triangular and where the unit diagonal
* entry is NOT stored . Overwrites B with the solution X . B is n - by - nrhs
* and is stored in ROW form with row dimension nrhs . nrhs must be in the
* range 1 to 4.
* @ param n
* @ param Lip
* @ param Llen
* @ param LU
* @ param nrhs
* @ param X right - hand - side on input , solution to Lx = b on output */
public static void klu_lsolve ( int n , int [ ] Lip , int Lip_offset , int [ ] Llen , int Llen_offset , double [ ] LU , int nrhs , double [ ] X , int X_offset ) { } } | double [ ] x = new double [ 4 ] ; double lik ; /* int [ ] */
double [ ] Li ; double [ ] Lx ; int k , p , i ; int [ ] len = new int [ 1 ] ; int [ ] Li_offset = new int [ 1 ] ; int [ ] Lx_offset = new int [ 1 ] ; switch ( nrhs ) { case 1 : for ( k = 0 ; k < n ; k ++ ) { x [ 0 ] = X [ X_offset + k ] ; Li = Lx = GET_POINTER ( LU , Lip , Lip_offset , Llen , Llen_offset , Li_offset , Lx_offset , k , len ) ; /* unit diagonal of L is not stored */
for ( p = 0 ; p < len [ 0 ] ; p ++ ) { // MULT _ SUB ( X [ Li [ p ] ] , Lx [ p ] , x [ 0 ] ) ;
X [ X_offset + ( int ) Li [ Li_offset [ 0 ] + p ] ] -= Lx [ Lx_offset [ 0 ] + p ] * x [ 0 ] ; } } break ; case 2 : for ( k = 0 ; k < n ; k ++ ) { x [ 0 ] = X [ X_offset + 2 * k ] ; x [ 1 ] = X [ X_offset + 2 * k + 1 ] ; Li = Lx = GET_POINTER ( LU , Lip , Lip_offset , Llen , Llen_offset , Li_offset , Lx_offset , k , len ) ; for ( p = 0 ; p < len [ 0 ] ; p ++ ) { i = ( int ) Li [ Li_offset [ 0 ] + p ] ; lik = Lx [ Lx_offset [ 0 ] + p ] ; // MULT _ SUB ( X [ 2 * i ] , lik , x [ 0 ] ) ;
X [ X_offset + 2 * i ] -= lik * x [ 0 ] ; // MULT _ SUB ( X [ 2 * i + 1 ] , lik , x [ 1 ] ) ;
X [ X_offset + 2 * i + 1 ] -= lik * x [ 1 ] ; } } break ; case 3 : for ( k = 0 ; k < n ; k ++ ) { x [ 0 ] = X [ X_offset + 3 * k ] ; x [ 1 ] = X [ X_offset + 3 * k + 1 ] ; x [ 2 ] = X [ X_offset + 3 * k + 2 ] ; Li = Lx = GET_POINTER ( LU , Lip , Lip_offset , Llen , Llen_offset , Li_offset , Lx_offset , k , len ) ; for ( p = 0 ; p < len [ 0 ] ; p ++ ) { i = ( int ) Li [ Li_offset [ 0 ] + p ] ; lik = Lx [ Lx_offset [ 0 ] + p ] ; // MULT _ SUB ( X [ 3 * i ] , lik , x [ 0 ] ) ;
X [ X_offset + 3 * i ] -= lik * x [ 0 ] ; // MULT _ SUB ( X [ 3 * i + 1 ] , lik , x [ 1 ] ) ;
X [ X_offset + 3 * i + 1 ] -= lik * x [ 1 ] ; // MULT _ SUB ( X [ 3 * i + 2 ] , lik , x [ 2 ] ) ;
X [ X_offset + 3 * i + 2 ] -= lik * x [ 2 ] ; } } break ; case 4 : for ( k = 0 ; k < n ; k ++ ) { x [ 0 ] = X [ X_offset + 4 * k ] ; x [ 1 ] = X [ X_offset + 4 * k + 1 ] ; x [ 2 ] = X [ X_offset + 4 * k + 2 ] ; x [ 3 ] = X [ X_offset + 4 * k + 3 ] ; Li = Lx = GET_POINTER ( LU , Lip , Lip_offset , Llen , Llen_offset , Li_offset , Lx_offset , k , len ) ; for ( p = 0 ; p < len [ 0 ] ; p ++ ) { i = ( int ) Li [ Li_offset [ 0 ] + p ] ; lik = Lx [ Lx_offset [ 0 ] + p ] ; // MULT _ SUB ( X [ 4 * i ] , lik , x [ 0 ] ) ;
X [ X_offset + 4 * i ] -= lik * x [ 0 ] ; // MULT _ SUB ( X [ 4 * i + 1 ] , lik , x [ 1 ] ) ;
X [ X_offset + 4 * i + 1 ] -= lik * x [ 1 ] ; // MULT _ SUB ( X [ 4 * i + 2 ] , lik , x [ 2 ] ) ;
X [ X_offset + 4 * i + 2 ] -= lik * x [ 2 ] ; // MULT _ SUB ( X [ 4 * i + 3 ] , lik , x [ 3 ] ) ;
X [ X_offset + 4 * i + 3 ] -= lik * x [ 3 ] ; } } break ; } |
public class Analytics { /** * Resets the analytics client by clearing any stored information about the user . Events queued on
* disk are not cleared , and will be uploaded at a later time . */
public void reset ( ) { } } | Utils . getSegmentSharedPreferences ( application , tag ) . edit ( ) . clear ( ) . apply ( ) ; traitsCache . delete ( ) ; traitsCache . set ( Traits . create ( ) ) ; analyticsContext . setTraits ( traitsCache . get ( ) ) ; runOnMainThread ( IntegrationOperation . RESET ) ; |
public class RedisHashMap { /** * 获取一个值
* @ param field
* @ return */
@ SuppressWarnings ( "unchecked" ) public < T > T getOne ( String field ) { } } | return ( T ) get ( field ) . get ( field ) ; |
public class ObjectSizes { /** * Memory a byte buffer consumes
* @ param buffer ByteBuffer to calculate in memory size
* @ return Total in - memory size of the byte buffer */
public static long sizeOnHeapOf ( ByteBuffer buffer ) { } } | if ( buffer . isDirect ( ) ) return BUFFER_EMPTY_SIZE ; // if we ' re only referencing a sub - portion of the ByteBuffer , don ' t count the array overhead ( assume it ' s slab
// allocated , so amortized over all the allocations the overhead is negligible and better to undercount than over )
if ( buffer . capacity ( ) > buffer . remaining ( ) ) return buffer . remaining ( ) ; return BUFFER_EMPTY_SIZE + sizeOfArray ( buffer . capacity ( ) , 1 ) ; |
public class EncodedElement { /** * This method adds a given number of bits of a long to a byte array .
* @ param input long to store bits from
* @ param count number of low - order bits to store
* @ param startPos start bit location in array to begin writing
* @ param dest array to store bits in . dest MUST have enough space to store
* the given data , or this function will fail . */
private static void addLong ( long input , int count , int startPos , byte [ ] dest ) { } } | if ( DEBUG_LEV > 30 ) System . err . println ( "EncodedElement::addLong : Begin" ) ; int currentByte = startPos / 8 ; int currentOffset = startPos % 8 ; int bitRoom ; // how many bits can be placed in current byte
long upMask ; // to clear upper bits ( lower bits auto - cleared by L - shift
int downShift ; // bits to shift down , isolating top bits of input
int upShift ; // bits to shift up , packing byte from top .
while ( count > 0 ) { // find how many bits can be placed in current byte
bitRoom = 8 - currentOffset ; // get those bits
// i . e , take upper ' bitsNeeded ' of input , put to lower part of byte .
downShift = count - bitRoom ; upMask = 255 >>> currentOffset ; upShift = 0 ; if ( downShift < 0 ) { // upMask = 255 > > > bitRoom - count ;
upShift = bitRoom - count ; upMask = 255 >>> ( currentOffset + upShift ) ; downShift = 0 ; } if ( DEBUG_LEV > 30 ) { System . err . println ( "count:offset:bitRoom:downShift:upShift:" + count + ":" + currentOffset + ":" + bitRoom + ":" + downShift + ":" + upShift ) ; } long currentBits = ( input >>> downShift ) & ( upMask ) ; // shift bits back up to match offset
currentBits = currentBits << upShift ; upMask = ( byte ) upMask << upShift ; dest [ currentByte ] = ( byte ) ( dest [ currentByte ] & ( ~ upMask ) ) ; // merge bytes ~
dest [ currentByte ] = ( byte ) ( dest [ currentByte ] | currentBits ) ; // System . out . println ( " new currentByte : " + dest [ currentByte ] ) ;
count -= bitRoom ; currentOffset = 0 ; currentByte ++ ; } if ( DEBUG_LEV > 30 ) System . err . println ( "EncodedElement::addLong : End" ) ; |
public class ARCRecord { /** * Get a record header line as list of tokens .
* We keep reading till we find a LINE _ SEPARATOR or we reach the end
* of file w / o finding a LINE _ SEPARATOR or the line length is crazy .
* @ param stream InputStream to read from .
* @ param list Empty list that gets filled w / string tokens .
* @ return Count of characters read .
* @ exception IOException If problem reading stream or no line separator
* found or EOF before EOL or we didn ' t get minimum header fields . */
private int getTokenizedHeaderLine ( final InputStream stream , List < String > list ) throws IOException { } } | // Preallocate usual line size .
StringBuilder buffer = new StringBuilder ( 2048 + 20 ) ; int read = 0 ; int previous = - 1 ; for ( int c = - 1 ; true ; ) { previous = c ; c = stream . read ( ) ; if ( c == - 1 ) { throw new RecoverableIOException ( "Hit EOF before header EOL." ) ; } c &= 0xff ; read ++ ; if ( read > MAX_HEADER_LINE_LENGTH ) { throw new IOException ( "Header line longer than max allowed " + " -- " + String . valueOf ( MAX_HEADER_LINE_LENGTH ) + " -- or passed buffer doesn't contain a line (Read: " + buffer . length ( ) + "). Here's" + " some of what was read: " + buffer . substring ( 0 , Math . min ( buffer . length ( ) , 256 ) ) ) ; } if ( c == LINE_SEPARATOR ) { if ( buffer . length ( ) == 0 ) { // Empty line at start of buffer . Skip it and try again .
continue ; } if ( list != null ) { list . add ( buffer . toString ( ) ) ; } // LOOP TERMINATION .
break ; } else if ( c == HEADER_FIELD_SEPARATOR ) { if ( ! isStrict ( ) && previous == HEADER_FIELD_SEPARATOR ) { // Early ARCs sometimes had multiple spaces between fields .
continue ; } if ( list != null ) { list . add ( buffer . toString ( ) ) ; } // reset to empty
buffer . setLength ( 0 ) ; } else { buffer . append ( ( char ) c ) ; } } // List must have at least 3 elements in it and no more than 10 . If
// it has other than this , then bogus parse .
if ( list != null && ( list . size ( ) < 3 || list . size ( ) > 100 ) ) { throw new IOException ( "Unparseable header line: " + list ) ; } // save verbatim header String
this . headerString = StringUtils . join ( list , " " ) ; return read ; |
public class MTable { /** * Init this object .
* @ param database The raw data database for this raw data table .
* @ param table The table this raw data table will supply data to .
* @ param The unique lookup key for this raw data table . */
public void init ( MDatabase pDatabase , FieldList record , Object key ) { } } | super . init ( pDatabase , record , key ) ; m_bDataIn = false ; |
public class SwitchSubScreenHandler { /** * Build this sub - screen .
* @ param parentScreen The parent screen .
* @ param screenLocation The location to place the new sub - screen ( null = same as current sub - screen ) .
* @ param iScreenNo The sub - screen to build . */
public BasePanel getSubScreen ( BasePanel parentScreen , ScreenLocation screenLocation , Map < String , Object > properties , int screenNo ) { } } | return null ; // Must override |
public class HistoryCleanupHelper { /** * Creates next batch object for history cleanup . First searches for historic process instances ready for cleanup . If there is still some place left in batch ( configured batch
* size was not reached ) , searches for historic decision instances and also adds them to the batch . Then if there is still some place left in batch , searches for historic case
* instances and historic batches - and adds them to the batch .
* @ param commandContext
* @ return */
public static void prepareNextBatch ( HistoryCleanupBatch historyCleanupBatch , CommandContext commandContext ) { } } | final HistoryCleanupJobHandlerConfiguration configuration = historyCleanupBatch . getConfiguration ( ) ; final Integer batchSize = getHistoryCleanupBatchSize ( commandContext ) ; ProcessEngineConfigurationImpl processEngineConfiguration = commandContext . getProcessEngineConfiguration ( ) ; // add process instance ids
final List < String > historicProcessInstanceIds = commandContext . getHistoricProcessInstanceManager ( ) . findHistoricProcessInstanceIdsForCleanup ( batchSize , configuration . getMinuteFrom ( ) , configuration . getMinuteTo ( ) ) ; if ( historicProcessInstanceIds . size ( ) > 0 ) { historyCleanupBatch . setHistoricProcessInstanceIds ( historicProcessInstanceIds ) ; } // if batch is not full , add decision instance ids
if ( historyCleanupBatch . size ( ) < batchSize && processEngineConfiguration . isDmnEnabled ( ) ) { final List < String > historicDecisionInstanceIds = commandContext . getHistoricDecisionInstanceManager ( ) . findHistoricDecisionInstanceIdsForCleanup ( batchSize - historyCleanupBatch . size ( ) , configuration . getMinuteFrom ( ) , configuration . getMinuteTo ( ) ) ; if ( historicDecisionInstanceIds . size ( ) > 0 ) { historyCleanupBatch . setHistoricDecisionInstanceIds ( historicDecisionInstanceIds ) ; } } // if batch is not full , add case instance ids
if ( historyCleanupBatch . size ( ) < batchSize && processEngineConfiguration . isCmmnEnabled ( ) ) { final List < String > historicCaseInstanceIds = commandContext . getHistoricCaseInstanceManager ( ) . findHistoricCaseInstanceIdsForCleanup ( batchSize - historyCleanupBatch . size ( ) , configuration . getMinuteFrom ( ) , configuration . getMinuteTo ( ) ) ; if ( historicCaseInstanceIds . size ( ) > 0 ) { historyCleanupBatch . setHistoricCaseInstanceIds ( historicCaseInstanceIds ) ; } } // if batch is not full , add batch ids
Map < String , Integer > batchOperationsForHistoryCleanup = processEngineConfiguration . getParsedBatchOperationsForHistoryCleanup ( ) ; if ( historyCleanupBatch . size ( ) < batchSize && batchOperationsForHistoryCleanup != null && ! batchOperationsForHistoryCleanup . isEmpty ( ) ) { List < String > historicBatchIds = commandContext . getHistoricBatchManager ( ) . findHistoricBatchIdsForCleanup ( batchSize - historyCleanupBatch . size ( ) , batchOperationsForHistoryCleanup , configuration . getMinuteFrom ( ) , configuration . getMinuteTo ( ) ) ; if ( historicBatchIds . size ( ) > 0 ) { historyCleanupBatch . setHistoricBatchIds ( historicBatchIds ) ; } } |
public class Distance { /** * Gets the Kumar - Johnson divergence .
* @ param p P vector .
* @ param q Q vector .
* @ return The Kumar - Johnson divergence between p and q . */
public static double KumarJohnsonDivergence ( double [ ] p , double [ ] q ) { } } | double r = 0 ; for ( int i = 0 ; i < p . length ; i ++ ) { if ( p [ i ] != 0 && q [ i ] != 0 ) { r += Math . pow ( p [ i ] * p [ i ] - q [ i ] * q [ i ] , 2 ) / 2 * Math . pow ( p [ i ] * q [ i ] , 1.5 ) ; } } return r ; |
public class ElemTemplateElement { /** * Tell if the result namespace decl should be excluded . Should be called before
* namespace aliasing ( I think ) .
* @ param prefix non - null reference to prefix .
* @ param uri reference to namespace that prefix maps to , which is protected
* for null , but should really never be passed as null .
* @ return true if the given namespace should be excluded .
* @ throws TransformerException */
private boolean excludeResultNSDecl ( String prefix , String uri ) throws TransformerException { } } | if ( uri != null ) { if ( uri . equals ( Constants . S_XSLNAMESPACEURL ) || getStylesheet ( ) . containsExtensionElementURI ( uri ) ) return true ; if ( containsExcludeResultPrefix ( prefix , uri ) ) return true ; } return false ; |
public class FctBnEntitiesProcessors { /** * < p > Get PrcEntityFolSave ( create and put into map ) . < / p >
* @ return requested PrcEntityFolSave
* @ throws Exception - an exception */
protected final PrcEntityFolSave < RS , IHasId < Object > , Object > createPutPrcEntityFolSave ( ) throws Exception { } } | PrcEntityFolSave < RS , IHasId < Object > , Object > proc = new PrcEntityFolSave < RS , IHasId < Object > , Object > ( ) ; proc . setSrvOrm ( getSrvOrm ( ) ) ; proc . setGettersRapiHolder ( getGettersRapiHolder ( ) ) ; // assigning fully initialized object :
this . processorsMap . put ( PrcEntityFolSave . class . getSimpleName ( ) , proc ) ; return proc ; |
public class JSRemoteConsumerPoint { /** * This method is called by the parent to cancel a request . Note that it is possible that the
* request has already been satisfied , and parent . satisfiedRequest ( ) is being called concurrently
* with this method .
* @ param tick The tick identifying the request */
public final void cancelRequest ( long tick ) { } } | Long objTick = Long . valueOf ( tick ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "cancelRequest" , objTick ) ; ArrayList < AORequestedTick > satisfiedTicks = null ; try { this . lock ( ) ; try { // it is possible that this event occurs after this object is closed .
// it is safe to ignore it since we must have already transitioned the tick to its final state
if ( closed ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "cancelRequest" ) ; return ; } AORequestedTick requestedTick = ( AORequestedTick ) tableOfRequests . get ( objTick ) ; if ( requestedTick != null ) { boolean transitionOccured = cancelRequestInternal ( requestedTick , false ) ; // If we expired the request and have more then process those
// requests now
if ( transitionOccured && ! listOfRequests . isEmpty ( ) ) satisfiedTicks = processQueuedMsgs ( null ) ; } } finally { this . unlock ( ) ; } } catch ( SINotPossibleInCurrentConfigurationException e ) { // No FFDC code needed
notifyException ( e ) ; } if ( satisfiedTicks != null ) { // inform parent about satisfied ticks - outside lock
int length = satisfiedTicks . size ( ) ; for ( int i = 0 ; i < length ; i ++ ) { AORequestedTick aotick = ( AORequestedTick ) satisfiedTicks . get ( i ) ; parent . satisfiedRequest ( aotick . tick , aotick . getMessage ( ) ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "cancelRequest" ) ; |
public class KeyTools { /** * 使用指定keySpec构建对称加密的key
* @ param algorithm 算法名称 , 当前仅支持AES和DES
* @ param keySpec keySpec , 多次调用该方法生成的key等效
* @ return 对称加密的key */
public static SecretKey buildKey ( AbstractCipher . Algorithms algorithm , byte [ ] keySpec ) { } } | SecretKeySpec key = new SecretKeySpec ( keySpec , algorithm . name ( ) ) ; return key ; |
public class AtomicGrowingMatrix { /** * { @ inheritDoc } */
public void setRow ( int row , DoubleVector values ) { } } | checkIndices ( row , 0 ) ; AtomicVector rowEntry = getRow ( row , values . length ( ) - 1 , true ) ; denseArrayReadLock . lock ( ) ; Vectors . copy ( rowEntry , values ) ; denseArrayReadLock . unlock ( ) ; |
public class DataReader { /** * Parse the locale identity header from a calendar JSON tree . */
private LocaleID parseIdentity ( String code , JsonObject root ) throws IOException { } } | JsonObject node = resolve ( root , "main" , code , "identity" ) ; return new LocaleID ( string ( node , "language" ) , string ( node , "script" ) , string ( node , "territory" ) , string ( node , "variant" ) ) ; |
public class Span { /** * Compares the specified span to the current span . */
@ Override public int compareTo ( final Span s ) { } } | if ( getStart ( ) < s . getStart ( ) ) { return - 1 ; } else if ( getStart ( ) == s . getStart ( ) ) { if ( getEnd ( ) > s . getEnd ( ) ) { return - 1 ; } else if ( getEnd ( ) < s . getEnd ( ) ) { return 1 ; } else { // compare the type
if ( getType ( ) == null && s . getType ( ) == null ) { return 0 ; } else if ( getType ( ) != null && s . getType ( ) != null ) { // use type lexicography order
return getType ( ) . compareTo ( s . getType ( ) ) ; } else if ( getType ( ) != null ) { return - 1 ; } return 1 ; } } else { return 1 ; } |
public class ZonedDateTime { /** * Obtains an instance of { @ code ZonedDateTime } from an { @ code Instant } .
* This creates a zoned date - time with the same instant as that specified .
* Calling { @ link # toInstant ( ) } will return an instant equal to the one used here .
* Converting an instant to a zoned date - time is simple as there is only one valid
* offset for each instant .
* @ param instant the instant to create the date - time from , not null
* @ param zone the time - zone , not null
* @ return the zoned date - time , not null
* @ throws DateTimeException if the result exceeds the supported range */
public static ZonedDateTime ofInstant ( Instant instant , ZoneId zone ) { } } | Jdk8Methods . requireNonNull ( instant , "instant" ) ; Jdk8Methods . requireNonNull ( zone , "zone" ) ; return create ( instant . getEpochSecond ( ) , instant . getNano ( ) , zone ) ; |
public class ReferenceOrLiteralImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } } | switch ( featureID ) { case SimpleAntlrPackage . REFERENCE_OR_LITERAL__NAME : return getName ( ) ; } return super . eGet ( featureID , resolve , coreType ) ; |
public class OPTICSProjector { /** * Get or produce the actual OPTICS plot .
* @ param context Context to use
* @ return Plot */
public OPTICSPlot getOPTICSPlot ( VisualizerContext context ) { } } | if ( plot == null ) { plot = OPTICSPlot . plotForClusterOrder ( clusterOrder , context ) ; } return plot ; |
public class ClassUtils { /** * get all the dex path
* @ param context the application context
* @ return all the dex path
* @ throws PackageManager . NameNotFoundException
* @ throws IOException */
public static List < String > getSourcePaths ( Context context ) throws PackageManager . NameNotFoundException , IOException { } } | ApplicationInfo applicationInfo = context . getPackageManager ( ) . getApplicationInfo ( context . getPackageName ( ) , 0 ) ; File sourceApk = new File ( applicationInfo . sourceDir ) ; List < String > sourcePaths = new ArrayList < > ( ) ; sourcePaths . add ( applicationInfo . sourceDir ) ; // add the default apk path
// the prefix of extracted file , ie : test . classes
String extractedFilePrefix = sourceApk . getName ( ) + EXTRACTED_NAME_EXT ; // 如果VM已经支持了MultiDex , 就不要去Secondary Folder加载 Classesx . zip了 , 那里已经么有了
// 通过是否存在sp中的multidex . version是不准确的 , 因为从低版本升级上来的用户 , 是包含这个sp配置的
if ( ! isVMMultidexCapable ( ) ) { // the total dex numbers
int totalDexNumber = getMultiDexPreferences ( context ) . getInt ( KEY_DEX_NUMBER , 1 ) ; File dexDir = new File ( applicationInfo . dataDir , SECONDARY_FOLDER_NAME ) ; for ( int secondaryNumber = 2 ; secondaryNumber <= totalDexNumber ; secondaryNumber ++ ) { // for each dex file , ie : test . classes2 . zip , test . classes3 . zip . . .
String fileName = extractedFilePrefix + secondaryNumber + EXTRACTED_SUFFIX ; File extractedFile = new File ( dexDir , fileName ) ; if ( extractedFile . isFile ( ) ) { sourcePaths . add ( extractedFile . getAbsolutePath ( ) ) ; // we ignore the verify zip part
} else { throw new IOException ( "Missing extracted secondary dex file '" + extractedFile . getPath ( ) + "'" ) ; } } } if ( ARouter . debuggable ( ) ) { // Search instant run support only debuggable
sourcePaths . addAll ( tryLoadInstantRunDexFile ( applicationInfo ) ) ; } return sourcePaths ; |
public class OperatingSystem { /** * Supported configuration manager name and versions for an AWS OpsWorks Stacks operating system .
* @ return Supported configuration manager name and versions for an AWS OpsWorks Stacks operating system . */
public java . util . List < OperatingSystemConfigurationManager > getConfigurationManagers ( ) { } } | if ( configurationManagers == null ) { configurationManagers = new com . amazonaws . internal . SdkInternalList < OperatingSystemConfigurationManager > ( ) ; } return configurationManagers ; |
public class ImageUtil { /** * This method decodes a bitmap from a file , and does pixel combining in order to produce an in - memory bitmap that is
* smaller than the original . It will create only the returned bitmap in memory .
* From < a href = " http : / / developer . android . com / training / displaying - bitmaps / load - bitmap . html " > Loading Large Bitmaps Efficiently < / a >
* @ param fileAbsolutePath Full absolute path to the image file . ( optional , maybe null )
* @ param fileUri content uri of the source image . ( optional , maybe null )
* @ param minShrunkWidth If edge of this image is greater than minShrunkWidth , the image will be shrunken such it is not smaller than minShrunkWidth .
* @ param minShrunkHeight If edge of this image is greater than minShrunkHeight , the image will be shrunken such it is not smaller than minShrunkHeight .
* @ param config You can use this to change the number of bytes per pixel using various bitmap configurations .
* @ param orientation The orientation for the image expressed as degrees
* @ return A bitmap whose edges are equal to or less than minShrunkEdge in length . */
private static Bitmap createLightweightScaledBitmap ( String fileAbsolutePath , Uri fileUri , int minShrunkWidth , int minShrunkHeight , Bitmap . Config config , int orientation ) { } } | boolean bCreateFromUri ; Context context = ApptentiveInternal . getInstance ( ) . getApplicationContext ( ) ; if ( context != null && fileUri != null ) { bCreateFromUri = true ; } else if ( ! TextUtils . isEmpty ( fileAbsolutePath ) ) { bCreateFromUri = false ; } else { return null ; } BitmapFactory . Options options = new BitmapFactory . Options ( ) ; if ( config != null ) { options . inPreferredConfig = config ; } final BitmapFactory . Options decodeBoundsOptions = new BitmapFactory . Options ( ) ; decodeBoundsOptions . inJustDecodeBounds = true ; decodeBoundsOptions . inScaled = false ; // Obtain image dimensions without actually decode the image into memory
if ( bCreateFromUri && context != null ) { InputStream is = null ; try { is = context . getContentResolver ( ) . openInputStream ( fileUri ) ; BitmapFactory . decodeStream ( is , null , decodeBoundsOptions ) ; } catch ( FileNotFoundException e ) { throw new NullPointerException ( "Failed to decode image" ) ; } finally { Util . ensureClosed ( is ) ; } } else if ( ! bCreateFromUri ) { BitmapFactory . decodeFile ( fileAbsolutePath , decodeBoundsOptions ) ; } int width , height ; if ( orientation == 90 || orientation == 270 ) { // noinspection SuspiciousNameCombination
width = decodeBoundsOptions . outHeight ; // noinspection SuspiciousNameCombination
height = decodeBoundsOptions . outWidth ; } else { width = decodeBoundsOptions . outWidth ; height = decodeBoundsOptions . outHeight ; } ApptentiveLog . v ( UTIL , "Original bitmap dimensions: %d x %d" , width , height ) ; int sampleRatio = Math . min ( width / minShrunkWidth , height / minShrunkHeight ) ; if ( sampleRatio >= 2 ) { options . inSampleSize = sampleRatio ; } options . inScaled = false ; options . inJustDecodeBounds = false ; ApptentiveLog . v ( UTIL , "Bitmap sample size = %d" , options . inSampleSize ) ; Bitmap retImg = null ; if ( bCreateFromUri && context != null ) { InputStream is = null ; try { is = context . getContentResolver ( ) . openInputStream ( fileUri ) ; retImg = BitmapFactory . decodeStream ( is , null , options ) ; } catch ( FileNotFoundException e ) { throw new NullPointerException ( "Failed to decode image" ) ; } finally { Util . ensureClosed ( is ) ; } } else if ( ! bCreateFromUri ) { retImg = BitmapFactory . decodeFile ( fileAbsolutePath , options ) ; } ApptentiveLog . v ( UTIL , "Sampled bitmap size = %d X %d" , options . outWidth , options . outHeight ) ; if ( ( orientation != 0 && orientation != - 1 ) && retImg != null ) { Matrix matrix = new Matrix ( ) ; switch ( orientation ) { case ExifInterface . ORIENTATION_ROTATE_90 : matrix . postRotate ( 90 ) ; break ; case ExifInterface . ORIENTATION_ROTATE_180 : matrix . postRotate ( 180 ) ; break ; case ExifInterface . ORIENTATION_ROTATE_270 : matrix . postRotate ( 270 ) ; break ; } try { retImg = Bitmap . createBitmap ( retImg , 0 , 0 , retImg . getWidth ( ) , retImg . getHeight ( ) , matrix , true ) ; } catch ( IllegalArgumentException e ) { throw new NullPointerException ( "Failed to decode image" ) ; } } if ( retImg == null ) { throw new NullPointerException ( "Failed to decode image" ) ; } return retImg ; |
public class ColorPalettePreference { /** * Obtains all attributes from a specific attribute set .
* @ param attributeSet
* The attribute set , the attributes should be obtained from , as an instance of the type
* { @ link AttributeSet } or null , if no attributes should be obtained
* @ param defaultStyle
* The default style to apply to this preference . If 0 , no style will be applied ( beyond
* what is included in the theme ) . This may either be an attribute resource , whose value
* will be retrieved from the current theme , or an explicit style resource
* @ param defaultStyleResource
* A resource identifier of a style resource that supplies default values for the
* preference , used only if the default style is 0 or can not be found in the theme . Can
* be 0 to not look for defaults */
private void obtainStyledAttributes ( @ Nullable final AttributeSet attributeSet , @ AttrRes final int defaultStyle , @ StyleRes final int defaultStyleResource ) { } } | TypedArray typedArray = getContext ( ) . obtainStyledAttributes ( attributeSet , R . styleable . ColorPalettePreference , defaultStyle , defaultStyleResource ) ; try { obtainColorPalette ( typedArray ) ; obtainDialogPreviewSize ( typedArray ) ; obtainDialogPreviewShape ( typedArray ) ; obtainDialogPreviewBorderWidth ( typedArray ) ; obtainDialogPreviewBorderColor ( typedArray ) ; obtainDialogPreviewBackground ( typedArray ) ; obtainNumberOfColumns ( typedArray ) ; } finally { typedArray . recycle ( ) ; } |
public class Th { /** * Gets the value of the headers property .
* This accessor method returns a reference to the live list ,
* not a snapshot . Therefore any modification you make to the
* returned list will be present inside the JAXB object .
* This is why there is not a < CODE > set < / CODE > method for the headers property .
* For example , to add a new item , do as follows :
* < pre >
* getHeaders ( ) . add ( newItem ) ;
* < / pre >
* Objects of the following type ( s ) are allowed in the list
* { @ link Object } */
public java . util . List < Object > getHeaders ( ) { } } | if ( headers == null ) { headers = new ArrayList < Object > ( ) ; } return this . headers ; |
public class CmsUserDriver { /** * Returns the folder for the given organizational units , or the base folder if < code > null < / code > . < p >
* The base folder will be created if it does not exist . < p >
* @ param dbc the current db context
* @ param orgUnit the organizational unit to get the folder for
* @ return the base folder for organizational units
* @ throws CmsException if something goes wrong */
protected CmsResource internalOrgUnitFolder ( CmsDbContext dbc , CmsOrganizationalUnit orgUnit ) throws CmsException { } } | if ( orgUnit != null ) { return m_driverManager . readResource ( dbc , ORGUNIT_BASE_FOLDER + orgUnit . getName ( ) , CmsResourceFilter . DEFAULT ) ; } else { return null ; } |
public class MobileCommand { /** * This method forms a { @ link java . util . Map } of parameters for the
* long key event invocation .
* @ param key code for the long key pressed on the device .
* @ return a key - value pair . The key is the command name . The value is a
* { @ link java . util . Map } command arguments . */
public static Map . Entry < String , Map < String , ? > > longPressKeyCodeCommand ( int key ) { } } | return new AbstractMap . SimpleEntry < > ( LONG_PRESS_KEY_CODE , prepareArguments ( "keycode" , key ) ) ; |
public class Hierarchy { /** * Determine whether the given INVOKESTATIC instruction is an inner - class
* field accessor method .
* @ param inv
* the INVOKESTATIC instruction
* @ param cpg
* the ConstantPoolGen for the method
* @ return true if the instruction is an inner - class field accessor , false
* if not */
public static boolean isInnerClassAccess ( INVOKESTATIC inv , ConstantPoolGen cpg ) { } } | String methodName = inv . getName ( cpg ) ; return methodName . startsWith ( "access$" ) ; |
public class Notification { /** * The list of job states that will trigger a notification for this job .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setJobStatesToNotify ( java . util . Collection ) } or { @ link # withJobStatesToNotify ( java . util . Collection ) } if
* you want to override the existing values .
* @ param jobStatesToNotify
* The list of job states that will trigger a notification for this job .
* @ return Returns a reference to this object so that method calls can be chained together .
* @ see JobState */
public Notification withJobStatesToNotify ( String ... jobStatesToNotify ) { } } | if ( this . jobStatesToNotify == null ) { setJobStatesToNotify ( new java . util . ArrayList < String > ( jobStatesToNotify . length ) ) ; } for ( String ele : jobStatesToNotify ) { this . jobStatesToNotify . add ( ele ) ; } return this ; |
public class MeanVarianceMinMax { /** * Create and initialize a new array of MeanVarianceMinMax
* @ param dimensionality Dimensionality
* @ return New and initialized Array */
public static MeanVarianceMinMax [ ] newArray ( int dimensionality ) { } } | MeanVarianceMinMax [ ] arr = new MeanVarianceMinMax [ dimensionality ] ; for ( int i = 0 ; i < dimensionality ; i ++ ) { arr [ i ] = new MeanVarianceMinMax ( ) ; } return arr ; |
public class PojoDataParser { /** * { @ inheritDoc } */
@ NonNull @ Override public List < BaseCell > parseComponent ( @ Nullable JSONArray data , Card parent , ServiceManager serviceManager ) { } } | if ( data == null ) { return new ArrayList < > ( ) ; } final int size = data . length ( ) ; final List < BaseCell > result = new ArrayList < > ( size ) ; // parse body
JSONArray componentArray = data ; if ( componentArray != null ) { final int cellLength = componentArray . length ( ) ; for ( int i = 0 ; i < cellLength ; i ++ ) { final JSONObject cellData = componentArray . optJSONObject ( i ) ; BaseCell cell = parseSingleComponent ( cellData , parent , serviceManager ) ; if ( cell != null ) { result . add ( cell ) ; } } } return result ; |
public class PixelInfoStatusBar { /** * Sets the model dictating the pixel shown by this status bar .
* @ param newModel the new model
* @ throws NullPointerException if { @ code newModel } is { @ code null } */
public final void setModel ( PixelModel newModel ) { } } | if ( newModel == null ) throw new NullPointerException ( ) ; if ( model != newModel ) { if ( model != null ) model . removeChangeListener ( modelListener ) ; model = newModel ; model . addChangeListener ( modelListener ) ; update ( ) ; } |
public class LocationJSONImpl { /** * / * package */
static ResponseList < Location > createLocationList ( JSONArray list , boolean storeJSON ) throws TwitterException { } } | try { int size = list . length ( ) ; ResponseList < Location > locations = new ResponseListImpl < Location > ( size , null ) ; for ( int i = 0 ; i < size ; i ++ ) { JSONObject json = list . getJSONObject ( i ) ; Location location = new LocationJSONImpl ( json ) ; locations . add ( location ) ; if ( storeJSON ) { TwitterObjectFactory . registerJSONObject ( location , json ) ; } } if ( storeJSON ) { TwitterObjectFactory . registerJSONObject ( locations , list ) ; } return locations ; } catch ( JSONException jsone ) { throw new TwitterException ( jsone ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.