signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class Cappuccino { /** * Throws { @ link CappuccinoException } if no { @ link CappuccinoResourceWatcher } has yet been associated with
* { @ param name } .
* @ param name The name associated with the { @ link CappuccinoResourceWatcher } .
* @ throws CappuccinoException if there is no { @ code CappuccinoResourceWatcher } associated
* with the given { @ param name } . */
private static void throwIfAbsent ( @ NonNull String name ) { } }
|
if ( ! mResourceWatcherRegistry . containsKey ( name ) ) { throw new CappuccinoException ( String . format ( "There is no %s associated with the name `%s`" , CappuccinoResourceWatcher . class . getSimpleName ( ) , name ) ) ; }
|
public class AbstractProbeListener { /** * Retrieve the fingerprint of a test based on its description
* @ param description The description
* @ return The fingerprint */
protected final String getFingerprint ( Description description ) { } }
|
return TestResultDataUtils . getFingerprint ( description . getTestClass ( ) , description . getMethodName ( ) ) ;
|
public class DServer { synchronized String [ ] dev_poll_status ( final String dev_name ) throws DevFailed { } }
|
Util . out4 . println ( "In dev_poll_status command" ) ; // Find the device
final DeviceImpl dev = Util . instance ( ) . get_device_by_name ( dev_name ) ; final Vector poll_list = dev . get_poll_obj_list ( ) ; final int nb_poll_obj = poll_list . size ( ) ; // Return an empty sequence if nothing is polled for this device
if ( nb_poll_obj == 0 ) { return new String [ 0 ] ; } // Compute how many cmds and / or attributes are polled
int nb_cmd = 0 ; for ( int i = 0 ; i < nb_poll_obj ; i ++ ) { final PollObj poll_obj = ( PollObj ) poll_list . elementAt ( i ) ; if ( poll_obj . get_type ( ) == Tango_POLL_CMD ) { nb_cmd ++ ; } } // Allocate memory for returned strings
final String [ ] ret = new String [ nb_poll_obj ] ; // Populate returned strings
int cmd_ind = 0 ; int attr_ind = nb_cmd ; String returned_info ; for ( int i = 0 ; i < nb_poll_obj ; i ++ ) { final PollObj poll_obj = ( PollObj ) poll_list . elementAt ( i ) ; // First , the name
final int type = poll_obj . get_type ( ) ; if ( type == Tango_POLL_CMD ) { returned_info = "Polled command name = " ; } else { returned_info = "Polled attribute name = " ; } returned_info += poll_obj . get_name ( ) ; // Add update period
returned_info += "\nPolling period (mS) = " ; final int tmp = poll_obj . get_upd ( ) ; returned_info = returned_info + tmp ; // Add ring buffer depth
returned_info += "\nPolling ring buffer depth = " ; final int depth = dev . get_poll_ring_depth ( ) ; if ( depth == 0 ) { returned_info += Tango_DefaultPollRingDepth ; } else { returned_info += depth ; } // Add a message if the data ring is empty
if ( poll_obj . is_ring_empty ( ) == true ) { returned_info += "\nNo data recorded yet" ; } else { // Add needed time to execute last command
returned_info += "\nTime needed for the last " ; if ( type == Tango_POLL_CMD ) { returned_info += "command execution (mS) = " ; } else { returned_info += "attribute reading (mS) = " ; } returned_info += poll_obj . get_needed_time_i ( ) ; // Add not updated since . . . info
returned_info += "\nData not updated since " ; final double since = poll_obj . get_last_insert_date_i ( ) ; final long ctm = System . currentTimeMillis ( ) ; final int tv_sec = ( int ) ( ctm / 1000 ) ; final int tv_usec = ( int ) ( ctm - 1000 * tv_sec ) * 1000 ; final double now_d = tv_sec + ( double ) tv_usec / 1000000 ; final double diff_t = now_d - since ; if ( diff_t < 1.0 ) { final int nb_msec = ( int ) ( diff_t * 1000 ) ; returned_info = returned_info + nb_msec + " mS" ; } else if ( diff_t < 60.0 ) { final int nb_sec = ( int ) diff_t ; final int nb_msec = ( int ) ( ( diff_t - nb_sec ) * 1000 ) ; returned_info = returned_info + nb_sec + " S and " ; returned_info = returned_info + nb_msec + " mS" ; } else { final int nb_min = ( int ) ( diff_t / 60 ) ; final int nb_sec = ( int ) ( diff_t - 60 * nb_min ) ; final int nb_msec = ( int ) ( ( diff_t - ( int ) diff_t ) * 1000 ) ; returned_info = returned_info + nb_min + " MN" ; if ( nb_sec != 0 ) { returned_info = returned_info + " ," + nb_sec + " S" ; } if ( nb_msec != 0 ) { returned_info = returned_info + " and " + nb_msec + " mS" ; } } // Add delta _ t between last record ( s )
try { returned_info += "\nDelta between last records (in mS) = " ; final double [ ] delta = poll_obj . get_delta_t_i ( 4 ) ; for ( int j = 0 ; j < delta . length ; j ++ ) { final int nb_msec = ( int ) ( delta [ j ] * 1000 ) ; returned_info = returned_info + nb_msec ; if ( j != delta . length - 1 ) { returned_info = returned_info + ", " ; } } } catch ( final DevFailed e ) { } // Add last polling exception fields ( if any )
if ( poll_obj . is_last_an_error_i ( ) == true ) { if ( type == Tango_POLL_CMD ) { returned_info += "\nLast command execution FAILED :" ; } else { returned_info += "\nLast attribute read FAILED :" ; } final DevFailed ex = poll_obj . get_last_except_i ( ) ; returned_info += "\n\tReason = " + ex . errors [ 0 ] . reason ; returned_info += "\n\tDesc = " + ex . errors [ 0 ] . desc ; returned_info += "\n\tOrigin = " + ex . errors [ 0 ] . origin ; } } // Init . string in sequence
if ( type == Tango_POLL_CMD ) { ret [ cmd_ind ] = returned_info ; cmd_ind ++ ; } else { ret [ attr_ind ] = returned_info ; attr_ind ++ ; } } return ret ;
|
public class DynamicList { /** * don ' t create a separate unit test - tools tree doesn ' t currently warrant them */
private boolean isWellFormed ( ) { } }
|
for ( int i = 0 ; i < maxHeight ; i ++ ) { int c = 0 ; for ( Node node = head ; node != null ; node = node . next ( i ) ) { if ( node . prev ( i ) != null && node . prev ( i ) . next ( i ) != node ) return false ; if ( node . next ( i ) != null && node . next ( i ) . prev ( i ) != node ) return false ; c += node . size [ i ] ; if ( i + 1 < maxHeight && node . parent ( i + 1 ) . next ( i + 1 ) == node . next ( i ) ) { if ( node . parent ( i + 1 ) . size [ i + 1 ] != c ) return false ; c = 0 ; } } if ( i == maxHeight - 1 && c != size + 1 ) return false ; } return true ;
|
public class MapConverter { /** * getAll .
* @ param data a { @ link java . util . Map } object .
* @ param attr a { @ link java . lang . String } object .
* @ param clazz a { @ link java . lang . Class } object .
* @ param < T > a T object .
* @ return an array of T objects . */
public < T > T [ ] getAll ( Map < String , Object > data , String attr , Class < T > clazz ) { } }
|
return convert ( ( Object [ ] ) data . get ( attr ) , clazz ) ;
|
public class Task { /** * / * - - - - - [ closeOutputs ] - - - - - */
protected void closeInputs ( ) throws IOException { } }
|
while ( true ) { if ( in instanceof Transport ) return ; in . close ( ) ; in = in . detachInput ( ) ; }
|
public class BizwifiAPI { /** * 商家主页管理 - 查询商家主页
* @ param accessToken accessToken
* @ param shopInfo shopInfo
* @ return HomePageGetResult */
public static HomePageGetResult homepageGet ( String accessToken , ShopInfo shopInfo ) { } }
|
return homepageGet ( accessToken , JsonUtil . toJSONString ( shopInfo ) ) ;
|
public class AnimatedDialog { /** * < / p > Opens the dialog with a translation animation to the content view < / p > */
private void slideOpen ( ) { } }
|
TranslateAnimation slideUp = new TranslateAnimation ( Animation . RELATIVE_TO_SELF , 0 , Animation . RELATIVE_TO_SELF , 0 , Animation . RELATIVE_TO_SELF , 1.0f , Animation . RELATIVE_TO_SELF , 0f ) ; slideUp . setDuration ( 500 ) ; slideUp . setInterpolator ( new AccelerateInterpolator ( ) ) ; ( ( ViewGroup ) getWindow ( ) . getDecorView ( ) ) . getChildAt ( 0 ) . startAnimation ( slideUp ) ; super . show ( ) ;
|
public class DRLogSegmentId { /** * Initial Ack DR Id is used as initial value for DR Idempotency filter */
public static boolean isInitialAckDRId ( long drId ) { } }
|
if ( drId == - 1 ) return true ; int clusterId = getClusterIdFromDRId ( drId ) ; if ( clusterId >= 0 && clusterId <= MAX_CLUSTER_ID ) { return ( ( drId >>> 63 ) != 1L ) && ( getSequenceNumberFromDRId ( drId ) == MAX_SEQUENCE_NUMBER ) ; } return false ;
|
public class XMLConfiguration { protected void initServletMapping ( XmlParser . Node node ) { } }
|
String name = node . getString ( "servlet-name" , false , true ) ; String pathSpec = node . getString ( "url-pattern" , false , true ) ; getWebApplicationHandler ( ) . mapPathToServlet ( pathSpec , name ) ;
|
public class Weeks { /** * Adds this amount to the specified temporal object .
* This returns a temporal object of the same observable type as the input
* with this amount added .
* In most cases , it is clearer to reverse the calling pattern by using
* { @ link Temporal # plus ( TemporalAmount ) } .
* < pre >
* / / these two lines are equivalent , but the second approach is recommended
* dateTime = thisAmount . addTo ( dateTime ) ;
* dateTime = dateTime . plus ( thisAmount ) ;
* < / pre >
* Only non - zero amounts will be added .
* This instance is immutable and unaffected by this method call .
* @ param temporal the temporal object to adjust , not null
* @ return an object of the same type with the adjustment made , not null
* @ throws DateTimeException if unable to add
* @ throws UnsupportedTemporalTypeException if the WEEKS unit is not supported
* @ throws ArithmeticException if numeric overflow occurs */
@ Override public Temporal addTo ( Temporal temporal ) { } }
|
if ( weeks != 0 ) { temporal = temporal . plus ( weeks , WEEKS ) ; } return temporal ;
|
public class DefaultJsonQueryLogEntryCreator { /** * Write batch size as json .
* < p > default : " batchSize " : 1,
* @ param sb StringBuilder to write
* @ param execInfo execution info
* @ param queryInfoList query info list */
protected void writeBatchSizeEntry ( StringBuilder sb , ExecutionInfo execInfo , List < QueryInfo > queryInfoList ) { } }
|
sb . append ( "\"batchSize\":" ) ; sb . append ( execInfo . getBatchSize ( ) ) ; sb . append ( ", " ) ;
|
public class JvmTypesBuilder { /** * / * @ Nullable */
public JvmOperation toGetter ( /* @ Nullable */
final EObject sourceElement , /* @ Nullable */
final String propertyName , /* @ Nullable */
final String fieldName , /* @ Nullable */
JvmTypeReference typeRef ) { } }
|
if ( sourceElement == null || propertyName == null || fieldName == null ) return null ; JvmOperation result = typesFactory . createJvmOperation ( ) ; result . setVisibility ( JvmVisibility . PUBLIC ) ; String prefix = ( isPrimitiveBoolean ( typeRef ) ? "is" : "get" ) ; result . setSimpleName ( prefix + Strings . toFirstUpper ( propertyName ) ) ; result . setReturnType ( cloneWithProxies ( typeRef ) ) ; setBody ( result , new Procedures . Procedure1 < ITreeAppendable > ( ) { @ Override public void apply ( /* @ Nullable */
ITreeAppendable p ) { if ( p != null ) { p = p . trace ( sourceElement ) ; p . append ( "return this." ) ; p . append ( javaKeywords . isJavaKeyword ( fieldName ) ? fieldName + "_" : fieldName ) ; p . append ( ";" ) ; } } } ) ; return associate ( sourceElement , result ) ;
|
public class Type { /** * map a type function over all immediate descendants of this type */
public < Z > Type map ( TypeMapping < Z > mapping , Z arg ) { } }
|
return mapping . visit ( this , arg ) ;
|
public class MessageBox { /** * This will show a MessageBox expecting an answer .
* onFinished . onSuccess will be called with true for yes , and false for no .
* if the dialogbox is closed with the closebutton instead of a button , onFinished . onFailure will be called .
* @ param title
* @ param question
* @ param onFinished
* @ return */
public static MessageBox showYesNoMessageBox ( String title , String question , Callback < Boolean , Void > onFinished ) { } }
|
MessageBox box = new MessageBox ( title , question , onFinished ) ; box . setMessageStyleType ( MessageStyleType . HELP ) ; box . setMessageBoxType ( MessageBoxType . YESNO ) ; box . center ( ) ; return box ;
|
public class ParserHelper { /** * Method that sets paraphrase value for a type into paraphrases stack .
* @ param type
* paraphrase type
* @ param value
* paraphrase value */
public void setParaphrasesValue ( DroolsParaphraseTypes type , String value ) { } }
|
paraphrases . peek ( ) . put ( type , value ) ;
|
public class CmsCmisRepository { /** * Extracts the resource type from a set of CMIS properties . < p >
* @ param properties the CMIS properties
* @ param defaultValue the default value
* @ return the resource type property , or the default value if the property was not found */
protected String getResourceTypeFromProperties ( Map < String , PropertyData < ? > > properties , String defaultValue ) { } }
|
PropertyData < ? > typeProp = properties . get ( CmsCmisTypeManager . PROPERTY_RESOURCE_TYPE ) ; String resTypeName = defaultValue ; if ( typeProp != null ) { resTypeName = ( String ) typeProp . getFirstValue ( ) ; } return resTypeName ;
|
public class CampaignFeed { /** * Gets the matchingFunction value for this CampaignFeed .
* @ return matchingFunction * Matching function associated with the CampaignFeed .
* The matching function will return true / false indicating
* which feed items may serve .
* < span class = " constraint Selectable " > This field can
* be selected using the value " MatchingFunction " . < / span >
* < span class = " constraint Required " > This field is required
* and should not be { @ code null } when it is contained within { @ link
* Operator } s : ADD . < / span > */
public com . google . api . ads . adwords . axis . v201809 . cm . Function getMatchingFunction ( ) { } }
|
return matchingFunction ;
|
public class Uris { /** * Prepends a string with a slash , if there isn ' t one already */
private static String prependSlash ( final String path ) { } }
|
if ( path . length ( ) == 0 || path . charAt ( 0 ) != '/' ) { /* our path doesn ' t start with a slash , so we prepend it */
return "/" + path ; } return path ;
|
public class AbstractPainter { /** * Sets the dirty bit . If true , then the painter is considered dirty , and the cache
* will be cleared . This property is bound .
* @ param d whether this < code > Painter < / code > is dirty . */
protected void setDirty ( boolean d ) { } }
|
boolean old = isDirty ( ) ; this . dirty = d ; firePropertyChange ( "dirty" , old , isDirty ( ) ) ; if ( isDirty ( ) ) { clearCache ( ) ; }
|
public class CatalogueClient { /** * Catalogue methods */
public Catalogue getCatalogue ( String uri , Parameter ... parameters ) { } }
|
return invoke ( catalogueDigesterLoader , uri , parameters ) ;
|
public class ServiceLookup { /** * Returns a service matching the given filter .
* @ param bc bundle context for accessing the OSGi registry
* @ param ldapFilter LDAP filter to be matched by the service . The class name must be part of the
* filter .
* @ return matching service ( not null )
* @ throws ServiceLookupException when no matching service has been found after the default
* timeout */
public static Object getServiceByFilter ( BundleContext bc , String ldapFilter ) { } }
|
return getServiceByFilter ( bc , ldapFilter , DEFAULT_TIMEOUT ) ;
|
public class RESTRequest { /** * Get the input entity ( body ) of this REST request as a string . If no input entity
* was provided , an empty string is returned . If an input entity exists and is
* compressed , it is decompressed first . The binary input entity is converted to a
* string using UTF - 8.
* @ return The input entity ( body ) of this REST request as a string . It is empty if
* there is no input string . */
public String getInputBody ( ) { } }
|
if ( m_requestEntity . length == 0 || ! m_bEntityCompressed ) { return Utils . toString ( m_requestEntity ) ; } else { try { return Utils . toString ( Utils . decompressGZIP ( m_requestEntity ) ) ; } catch ( IOException e ) { throw new IllegalArgumentException ( "Error decompressing input: " + e . toString ( ) ) ; } }
|
public class StandardTransactionBuilder { /** * Create an unsigned transaction without specifying a fee . The fee is
* automatically calculated to pass minimum relay and mining requirements .
* @ param unspent
* The list of unspent transaction outputs that can be used as
* funding
* @ param changeAddress
* The address to send any change to
* @ param keyRing
* The public key ring matching the unspent outputs
* @ param network
* The network we are working on
* @ return An unsigned transaction or null if not enough funds were available
* @ throws InsufficientFundsException if there is not enough funds available */
public UnsignedTransaction createUnsignedTransaction ( List < UnspentTransactionOutput > unspent , Address changeAddress , PublicKeyRing keyRing , NetworkParameters network ) throws InsufficientFundsException { } }
|
long fee = MIN_MINER_FEE ; while ( true ) { UnsignedTransaction unsigned ; try { unsigned = createUnsignedTransaction ( unspent , changeAddress , fee , keyRing , network ) ; } catch ( InsufficientFundsException e ) { // We did not even have enough funds to pay the minimum fee
throw e ; } int txSize = estimateTransacrionSize ( unsigned ) ; // fee is based on the size of the transaction , we have to pay for
// every 1000 bytes
long requiredFee = ( 1 + ( txSize / 1000 ) ) * MIN_MINER_FEE ; if ( fee >= requiredFee ) { return unsigned ; } // collect coins anew with an increased fee
fee += MIN_MINER_FEE ; }
|
public class PackageDocImpl { /** * Return a list of all classes contained in this package , including
* member classes of those classes , and their member classes , etc . */
private List < ClassDocImpl > getClasses ( boolean filtered ) { } }
|
if ( allClasses != null && ! filtered ) { return allClasses ; } if ( allClassesFiltered != null && filtered ) { return allClassesFiltered ; } ListBuffer < ClassDocImpl > classes = new ListBuffer < ClassDocImpl > ( ) ; for ( Scope . Entry e = sym . members ( ) . elems ; e != null ; e = e . sibling ) { if ( e . sym != null ) { ClassSymbol s = ( ClassSymbol ) e . sym ; ClassDocImpl c = env . getClassDoc ( s ) ; if ( c != null && ! c . isSynthetic ( ) ) c . addAllClasses ( classes , filtered ) ; } } if ( filtered ) return allClassesFiltered = classes . toList ( ) ; else return allClasses = classes . toList ( ) ;
|
public class Cache { /** * Find and return the object associated with the specified key . */
public synchronized Object get ( Object key ) { } }
|
ConcurrentHashMap < Object , Object > tableRef = primaryTable ; Entry curEntry = ( Entry ) primaryTable . get ( key ) ; // Not found in primary
if ( curEntry == null ) { tableRef = secondaryTable ; curEntry = ( Entry ) secondaryTable . get ( key ) ; // Not found in primary or secondary
if ( curEntry == null ) { tableRef = tertiaryTable ; curEntry = ( Entry ) tertiaryTable . get ( key ) ; } // Not found in primary , secondary , or tertiary
if ( curEntry == null ) { tableRef = null ; } } // If found in secondary or tertiary , move entry to primary
if ( ( tableRef != null ) && ( tableRef != primaryTable ) ) { primaryTable . put ( key , curEntry ) ; tableRef . remove ( key ) ; } // If not present even in any table , add an empty entry
// that can be found faster for update
if ( tableRef == null ) { curEntry = ( Entry ) primaryTable . get ( key ) ; if ( curEntry == null ) { curEntry = new Entry ( ) ; Entry prevEntry = ( Entry ) primaryTable . putIfAbsent ( key , curEntry ) ; if ( prevEntry != null ) curEntry = prevEntry ; // We lost the race , so use the entry from the other thread
} } return curEntry . value ;
|
public class FilterLoader { /** * Load and cache filters by className
* @ param classNames The class names to load
* @ return List of the loaded filters
* @ throws Exception If any specified filter fails to load , this will abort . This is a safety mechanism so we can
* prevent running in a partially loaded state . */
public List < ZuulFilter > putFiltersForClasses ( String [ ] classNames ) throws Exception { } }
|
List < ZuulFilter > newFilters = new ArrayList < > ( ) ; for ( String className : classNames ) { newFilters . add ( putFilterForClassName ( className ) ) ; } return newFilters ;
|
public class Pinyins { /** * 获字符拼音 */
public static String getPinyin ( char c , PinyinFormat pinyinFormat ) { } }
|
HanyuPinyinOutputFormat outputFormat = null ; boolean formatNotNull = $ . notNull ( pinyinFormat ) ; String ret = null ; if ( formatNotNull ) { PinyinTONEFormat toneFormat = pinyinFormat . getToneFormat ( ) ; PinyinULetterFormat uLetterFormat = pinyinFormat . getuLetterFormat ( ) ; if ( $ . notNull ( toneFormat ) ) { outputFormat = new HanyuPinyinOutputFormat ( ) ; switch ( toneFormat ) { case TONE_MARK : outputFormat . setToneType ( HanyuPinyinToneType . WITH_TONE_MARK ) ; break ; case TONE_NUMBER : outputFormat . setToneType ( HanyuPinyinToneType . WITH_TONE_NUMBER ) ; break ; case TONE_NONE : outputFormat . setToneType ( HanyuPinyinToneType . WITHOUT_TONE ) ; break ; } } if ( $ . notNull ( uLetterFormat ) ) { outputFormat = $ . isNull ( outputFormat ) ? new HanyuPinyinOutputFormat ( ) : outputFormat ; switch ( uLetterFormat ) { case U_AND_COLON : outputFormat . setVCharType ( HanyuPinyinVCharType . WITH_U_AND_COLON ) ; break ; case U_UNICODE : outputFormat . setVCharType ( HanyuPinyinVCharType . WITH_U_UNICODE ) ; break ; case U_V : outputFormat . setVCharType ( HanyuPinyinVCharType . WITH_V ) ; break ; } } } String [ ] pys = null ; try { pys = $ . notNull ( outputFormat ) ? PinyinHelper . toHanyuPinyinStringArray ( c , outputFormat ) : PinyinHelper . toHanyuPinyinStringArray ( c ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } if ( $ . notEmpty ( pys ) ) { ret = pys [ 0 ] ; int tempLen = ret . length ( ) - 1 ; if ( formatNotNull ) { PinyinUpperFormat upperFormat = pinyinFormat . getUpperFormat ( ) ; PinyinCUTFormat cutFormat = pinyinFormat . getCutFormat ( ) ; if ( $ . notNull ( upperFormat ) ) { switch ( upperFormat ) { case UPPER_ALL : ret = ret . toUpperCase ( ) ; break ; case UPPER_FIRST_LETTER : ret = String . valueOf ( ret . charAt ( 0 ) ) . toUpperCase ( ) + ret . substring ( 1 ) ; break ; case UPPER_LAST_LETTER : ret = ret . substring ( 0 , tempLen ) + String . valueOf ( ret . charAt ( tempLen ) ) . toUpperCase ( ) ; break ; case UPPER_FIRST_LAST_LETTER : ret = ret . length ( ) > 2 ? String . valueOf ( ret . charAt ( 0 ) ) . toUpperCase ( ) + ret . substring ( 1 , tempLen ) + String . valueOf ( ret . charAt ( tempLen ) ) . toUpperCase ( ) : ret . toUpperCase ( ) ; break ; case UPPER_MIDDLE_LETTER : ret = ret . length ( ) > 2 ? String . valueOf ( ret . charAt ( 0 ) ) + ret . substring ( 1 , tempLen ) . toUpperCase ( ) + String . valueOf ( ret . charAt ( tempLen ) ) : ret . toLowerCase ( ) ; break ; } } if ( $ . notNull ( cutFormat ) ) { switch ( cutFormat ) { case CUT_FIRST_LETTER : ret = String . valueOf ( ret . charAt ( 0 ) ) ; break ; case CUT_LAST_LETTER : ret = String . valueOf ( ret . charAt ( tempLen ) ) ; break ; case CUT_MIDDLE_LETTER : ret = ret . length ( ) > 2 ? ret . substring ( 1 , tempLen ) : ret ; break ; case CUT_FIRST_LAST_LETTER : ret = ret . length ( ) > 2 ? String . valueOf ( ret . charAt ( 0 ) ) + String . valueOf ( ret . charAt ( tempLen ) ) : ret ; break ; } } } } return $ . notNull ( ret ) ? ret : String . valueOf ( c ) ;
|
public class MaintenanceWindowStepFunctionsParametersMarshaller { /** * Marshall the given parameter object . */
public void marshall ( MaintenanceWindowStepFunctionsParameters maintenanceWindowStepFunctionsParameters , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( maintenanceWindowStepFunctionsParameters == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( maintenanceWindowStepFunctionsParameters . getInput ( ) , INPUT_BINDING ) ; protocolMarshaller . marshall ( maintenanceWindowStepFunctionsParameters . getName ( ) , NAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class InputMapTemplate { /** * If the given { @ link EventPattern } matches the given event type , runs the given action , and then attempts
* to pattern match the event type with the next { @ code InputMap } ( if one exists ) . */
public static < S , T extends Event , U extends T > InputMapTemplate < S , U > process ( EventPattern < ? super T , ? extends U > eventPattern , BiFunction < ? super S , ? super U , InputHandler . Result > action ) { } }
|
return new PatternActionTemplate < > ( eventPattern , action ) ;
|
public class TaskResult { /** * Inserts a Serializable value into the mapping of this Bundle , replacing any existing value for
* the given key . Either key or value may be null .
* @ param key a String , or null
* @ param value a Serializable object , or null */
public TaskResult add ( String key , Serializable value ) { } }
|
mBundle . putSerializable ( key , value ) ; return this ;
|
public class HostInfo { private boolean isIPV4address ( String address ) { } }
|
StringTokenizer st = new StringTokenizer ( address , "." ) ; List < String > nodes = new ArrayList < String > ( ) ; while ( st . hasMoreTokens ( ) ) nodes . add ( st . nextToken ( ) ) ; return ( nodes . size ( ) == 4 ) ;
|
public class BitsUtil { /** * XOR o onto v in - place , i . e . v ^ = o
* @ param v Primary object
* @ param o data to xor
* @ return v */
public static long [ ] xorI ( long [ ] v , long [ ] o ) { } }
|
assert ( o . length <= v . length ) : "Bit set sizes do not agree." ; for ( int i = 0 ; i < o . length ; i ++ ) { v [ i ] ^= o [ i ] ; } return v ;
|
public class DescribeConversionTasksRequest { /** * The conversion task IDs .
* @ return The conversion task IDs . */
public java . util . List < String > getConversionTaskIds ( ) { } }
|
if ( conversionTaskIds == null ) { conversionTaskIds = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return conversionTaskIds ;
|
public class SoftCache { /** * Maps the specified key to the specified value . */
public V put ( K key , V value ) { } }
|
SoftReference < V > old = _map . put ( key , createReference ( value ) ) ; return ( old == null ) ? null : old . get ( ) ;
|
public class CPDefinitionLocalServiceUtil { /** * Moves the commerce product definition to the recycle bin .
* @ param userId the primary key of the user moving the commerce product
* definition
* @ param cpDefinition the commerce product definition to be moved
* @ return the moved commerce product definition */
public static com . liferay . commerce . product . model . CPDefinition moveCPDefinitionToTrash ( long userId , com . liferay . commerce . product . model . CPDefinition cpDefinition ) throws com . liferay . portal . kernel . exception . PortalException { } }
|
return getService ( ) . moveCPDefinitionToTrash ( userId , cpDefinition ) ;
|
public class CLogConfigurationBase { /** * { @ code i = = 0 } identifies the caller of this method , for { @ code i > 0 } ,
* the stack is walked upwards .
* @ param i
* @ return class name */
protected static String detectClass ( int i ) { } }
|
if ( i < 1 ) { throw new IllegalArgumentException ( "Expected value > 0, got " + i ) ; } StackTraceElement [ ] stack = Thread . currentThread ( ) . getStackTrace ( ) ; // 0 getStackTrace
// 1 detectClass
// 2 getLogger
// 3 caller
if ( stack . length <= i + 2 ) return "" ; return stack [ i + 2 ] . getClassName ( ) ;
|
public class CompoundPainter { /** * Sets the array of Painters to use . These painters will be executed in
* order . A null value will be treated as an empty array . To prevent unexpected
* behavior all values in provided array are copied to internally held array .
* Any changes to the original array will not be reflected .
* @ param painters array of painters , which will be painted in order */
public void setPainters ( Painter < T > ... painters ) { } }
|
List < ? extends Painter < T > > l ; if ( painters == null ) l = Collections . emptyList ( ) ; else l = Arrays . asList ( painters ) ; setPainters ( l ) ;
|
public class LasFileDataManager { /** * Open the main folder file and read the main index .
* @ throws Exception */
@ Override public void open ( ) throws Exception { } }
|
lasReader = ALasReader . getReader ( lasFile , crs ) ; lasReader . open ( ) ; lasHeader = lasReader . getHeader ( ) ; isOpen = true ;
|
public class CPOptionValueUtil { /** * Returns the cp option value where companyId = & # 63 ; and externalReferenceCode = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache .
* @ param companyId the company ID
* @ param externalReferenceCode the external reference code
* @ param retrieveFromCache whether to retrieve from the finder cache
* @ return the matching cp option value , or < code > null < / code > if a matching cp option value could not be found */
public static CPOptionValue fetchByC_ERC ( long companyId , String externalReferenceCode , boolean retrieveFromCache ) { } }
|
return getPersistence ( ) . fetchByC_ERC ( companyId , externalReferenceCode , retrieveFromCache ) ;
|
public class ServerRedirectService { /** * Returns true or false depending on whether or not Odo can handle the request for this server / clientUUID pair
* @ param serverName server Name
* @ return true if odo can handle , false otherwise
* @ throws Exception exception */
public Boolean canHandleRequest ( String serverName ) throws Exception { } }
|
// TODO : Future optimizations
try { Profile [ ] profiles = this . getProfilesForServerName ( serverName ) ; if ( profiles == null ) { logger . info ( "No matching profiles found for path" ) ; return false ; } for ( Profile profile : profiles ) { List < Client > clients = ClientService . getInstance ( ) . findAllClients ( profile . getId ( ) ) ; for ( Client client : clients ) { if ( client . getIsActive ( ) ) { return true ; } } } } catch ( Exception e ) { e . printStackTrace ( ) ; } return false ;
|
public class MtasSolrCollectionCache { /** * Gets the data by id .
* @ param id the id
* @ return the data by id
* @ throws IOException Signals that an I / O exception has occurred . */
public HashSet < String > getDataById ( String id ) throws IOException { } }
|
if ( idToVersion . containsKey ( id ) ) { return get ( id ) ; } else { return null ; }
|
public class BasicQueryOutputProcessor { /** * { @ inheritDoc } */
public < T > T toBean ( QueryParameters params , Class < T > type ) throws MjdbcException { } }
|
T result = null ; int [ ] columnToProperty = null ; if ( params != null ) { PropertyDescriptor [ ] props = MappingUtils . propertyDescriptors ( type ) ; columnToProperty = this . mapColumnsToProperties ( params , props ) ; result = this . createBean ( params , type , props , columnToProperty ) ; } return result ;
|
public class BaseDTO { /** * 执行函数
* @ param callable 函数
* @ param msg 错误消息模板
* @ param param 错误消息模板参数
* @ param < T > 返回数据类型
* @ return 执行结果 */
public static < T > BaseDTO < T > exec ( Callable < BaseDTO < T > > callable , String msg , Object ... param ) { } }
|
try { log . info ( "函数调用" ) ; BaseDTO < T > dto = callable . call ( ) ; log . info ( "函数调用结果为:[{}]" , dto ) ; return dto ; } catch ( Exception e ) { if ( param == null || param . length == 0 ) { log . error ( msg , e ) ; } else { Object [ ] params = new Object [ param . length + 1 ] ; System . arraycopy ( param , 0 , params , 0 , param . length ) ; params [ param . length ] = e ; log . error ( msg , params ) ; } return BaseDTO . buildError ( ) ; }
|
public class FunctionInformationMap { /** * < code > repeated group Entry = 1 { . . . } < / code > */
public com . google . javascript . jscomp . FunctionInformationMap . EntryOrBuilder getEntryOrBuilder ( int index ) { } }
|
return entry_ . get ( index ) ;
|
public class DemuxingIoHandler { /** * Deregisters a { @ link MessageHandler } that receives the messages of
* the specified < code > type < / code > .
* @ return the removed handler if successfully removed . < tt > null < / tt > otherwise . */
@ SuppressWarnings ( "unchecked" ) public < E extends Throwable > ExceptionHandler < ? super E > removeExceptionHandler ( Class < E > type ) { } }
|
exceptionHandlerCache . clear ( ) ; return ( ExceptionHandler < ? super E > ) exceptionHandlers . remove ( type ) ;
|
public class Helper { /** * < p > formatParamList . < / p >
* @ param params a { @ link java . lang . String } object .
* @ return a { @ link java . lang . String } object . */
public static String formatParamList ( String ... params ) { } }
|
StringBuilder s = new StringBuilder ( ) ; for ( String p : params ) { if ( s . length ( ) != 0 ) { s . append ( ", " ) ; } s . append ( p ) ; } return s . toString ( ) ;
|
public class JsonOne { /** * Creates a new instance of JsonOne . */
@ JsonCreator public static JsonOne create ( @ JsonProperty ( "one" ) String one , @ JsonProperty ( "hello" ) String hello ) { } }
|
return builder ( ) . setOne ( one ) . setHello ( hello ) . build ( ) ;
|
public class JsMainAdminComponentImpl { /** * Declarative Services method for unsetting the JsAdminService service
* reference .
* @ param ref
* reference to the service */
protected void unsetJsAdminService ( ServiceReference < JsAdminService > ref ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . entry ( tc , "unsetJsAdminService" , ref ) ; jsAdminServiceref . setReference ( ref ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . exit ( tc , "unsetJsAdminService" ) ;
|
public class Transformers { /** * Transform from a RESTEasy Response to an Item instance .
* No need to specify the " to " type because Item is a concrete type .
* @ param from Response object .
* @ return Item instance . */
@ Transformer ( from = "java:org.jboss.resteasy.client.core.BaseClientResponse" ) public Item transform ( BaseClientResponse < Item > from ) { } }
|
from . setReturnType ( Item . class ) ; return ( Item ) from . getEntity ( ) ;
|
public class CollectionReference { /** * Returns a DocumentReference to the containing Document if this is a subcollection , else null .
* @ return A DocumentReference pointing to the parent document . */
@ Nullable public DocumentReference getParent ( ) { } }
|
ResourcePath parent = path . getParent ( ) ; return parent . isDocument ( ) ? new DocumentReference ( firestore , parent ) : null ;
|
public class TransactionManager { /** * Closes all transaction scopes . Should be called only when repository is
* closed .
* @ param suspend when true , indefinitely suspend all threads interacting
* with transactions */
public synchronized void close ( boolean suspend ) throws RepositoryException { } }
|
if ( mState == SUSPENDED ) { // If suspended , attempting to close again will likely deadlock .
return ; } if ( suspend ) { for ( TransactionScope < ? > scope : mAllScopes . keySet ( ) ) { // Lock scope but don ' t release it . This prevents other threads
// from beginning work during shutdown , which will likely fail
// along the way .
scope . getLock ( ) . lock ( ) ; } } mState = suspend ? SUSPENDED : CLOSED ; for ( TransactionScope < ? > scope : mAllScopes . keySet ( ) ) { scope . close ( ) ; } mAllScopes . clear ( ) ; mLocalScope . remove ( ) ;
|
public class AbcGrammar { /** * all except < tt > ] < tt >
* non - bracket - char : : = * ( WSP / % 21 - % 5C / % 5E - 7E ) */
Rule NonBracketChar ( ) { } }
|
return ZeroOrMore ( FirstOf ( WSP ( ) , CharRange ( '!' , '\\' ) , CharRange ( '^' , '~' ) , LatinExtendedAndOtherAlphabet ( ) ) ) . label ( NonBracketChar ) ;
|
public class FacesConfigTypeImpl { /** * Returns all < code > factory < / code > elements
* @ return list of < code > factory < / code > */
public List < FacesConfigFactoryType < FacesConfigType < T > > > getAllFactory ( ) { } }
|
List < FacesConfigFactoryType < FacesConfigType < T > > > list = new ArrayList < FacesConfigFactoryType < FacesConfigType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "factory" ) ; for ( Node node : nodeList ) { FacesConfigFactoryType < FacesConfigType < T > > type = new FacesConfigFactoryTypeImpl < FacesConfigType < T > > ( this , "factory" , childNode , node ) ; list . add ( type ) ; } return list ;
|
public class OutboundTransferTask { /** * Cancel some of the segments . If all segments get cancelled then the whole task will be cancelled .
* @ param cancelledSegments segments to cancel . */
public void cancelSegments ( IntSet cancelledSegments ) { } }
|
if ( segments . removeAll ( cancelledSegments ) ) { if ( trace ) { log . tracef ( "Cancelling outbound transfer to node %s, segments %s (remaining segments %s)" , destination , cancelledSegments , segments ) ; } entriesBySegment . keySet ( ) . removeAll ( cancelledSegments ) ; // here we do not update accumulatedEntries but this inaccuracy does not cause any harm
if ( segments . isEmpty ( ) ) { cancel ( ) ; } }
|
public class DescribeReservedNodesResult { /** * The list of < code > ReservedNode < / code > objects .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setReservedNodes ( java . util . Collection ) } or { @ link # withReservedNodes ( java . util . Collection ) } if you want
* to override the existing values .
* @ param reservedNodes
* The list of < code > ReservedNode < / code > objects .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeReservedNodesResult withReservedNodes ( ReservedNode ... reservedNodes ) { } }
|
if ( this . reservedNodes == null ) { setReservedNodes ( new com . amazonaws . internal . SdkInternalList < ReservedNode > ( reservedNodes . length ) ) ; } for ( ReservedNode ele : reservedNodes ) { this . reservedNodes . add ( ele ) ; } return this ;
|
public class WebhookDefinition { /** * A list of rules applied to the body / payload sent in the POST request to a webhook URL . All defined rules must
* pass for the request to be accepted and the pipeline started .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setFilters ( java . util . Collection ) } or { @ link # withFilters ( java . util . Collection ) } if you want to override
* the existing values .
* @ param filters
* A list of rules applied to the body / payload sent in the POST request to a webhook URL . All defined rules
* must pass for the request to be accepted and the pipeline started .
* @ return Returns a reference to this object so that method calls can be chained together . */
public WebhookDefinition withFilters ( WebhookFilterRule ... filters ) { } }
|
if ( this . filters == null ) { setFilters ( new java . util . ArrayList < WebhookFilterRule > ( filters . length ) ) ; } for ( WebhookFilterRule ele : filters ) { this . filters . add ( ele ) ; } return this ;
|
public class DfState { /** * Set the molecule to be matched .
* @ param mol the molecule */
void setMol ( IAtomContainer mol ) { } }
|
this . mol = mol ; Arrays . fill ( amap , - 1 ) ; numMapped = 0 ; this . avisit = new boolean [ mol . getAtomCount ( ) ] ; sptr = 0 ; store ( 0 , null ) ;
|
public class BucketFunctions { /** * Returns a function that maps age values to a set of buckets . Example use - case would be
* tracking the age of data flowing through a processing pipeline . Values that are less than
* 0 will be marked as " future " . These typically occur due to minor variations in the clocks
* across nodes . In addition to a bucket at the max , it will create buckets at max / 2 , max / 4,
* and max / 8.
* @ param max
* Maximum expected age of data flowing through . Values greater than this max will be mapped
* to an " old " bucket .
* @ param unit
* Unit for the max value .
* @ return
* Function mapping age values to string labels . The labels for buckets will sort
* so they can be used with a simple group by . */
public static BucketFunction age ( long max , TimeUnit unit ) { } }
|
return wrap ( com . netflix . spectator . api . histogram . BucketFunctions . age ( max , unit ) ) ;
|
public class AbstractSingleFileObjectStore { /** * ( non - Javadoc )
* @ see com . ibm . ws . objectManager . ObjectStore # clear ( ) */
protected synchronized void clear ( ) throws ObjectManagerException { } }
|
final String methodName = "clear" ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , methodName ) ; // No call to super . clear ( ) because we use a WeakValueHashMap .
inMemoryTokens . clear ( ) ; cachedManagedObjects = new java . lang . ref . SoftReference [ cachedManagedObjectsSize ] ; managedObjectsToWrite . clear ( ) ; tokensToDelete . clear ( ) ; // No need to clear newFreeSpace because the existing one is cleared just before we flush anyway .
// Allow for two headers to be written at the front of the file .
storeFileSizeUsed = pageSize * 2 ; freeSpaceByAddressHead = null ; freeSpaceByLength = new java . util . TreeSet ( new LengthComparator ( ) ) ; freeSpaceStoreArea = null ; // Sequence numbers 0-200 are reserved .
sequenceNumber = initialSequenceNumber ; directory = makeDirectory ( initialDirectoryMinimumNodeSize , 0 , 0 ) ; directoryReservedSize = directory . spaceRequired ( ) ; reservedSize . set ( 0 ) ; setAllocationAllowed ( ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , methodName ) ;
|
public class BlockedTaskRejectedExecutionHandler { /** * Always log per 1000 mults rejects .
* @ param r
* @ param executor */
@ Override public void rejectedExecution ( Runnable r , ThreadPoolExecutor executor ) { } }
|
int counts = rejectCounts . incrementAndGet ( ) ; if ( logger . isWarnEnabled ( ) ) { int t = counts % 100 ; if ( t == 0 ) { logger . warn ( new StringBuilder ( ) . append ( "Task[" ) . append ( message ) . append ( "] blocked " ) . append ( counts ) . append ( " times." ) . toString ( ) ) ; } } try { if ( sliceScrollBlockedWaitTimeout <= 0 ) { executor . getQueue ( ) . put ( r ) ; } else { boolean result = executor . getQueue ( ) . offer ( r , this . sliceScrollBlockedWaitTimeout , TimeUnit . MILLISECONDS ) ; if ( ! result ) { throw new RejectedExecutionException ( new StringBuilder ( ) . append ( "Task[" ) . append ( message ) . append ( "] rejected: wait timeout after " ) . append ( sliceScrollBlockedWaitTimeout ) . append ( " MILLISECONDS." ) . toString ( ) ) ; } } } catch ( InterruptedException e1 ) { throw new RejectedExecutionException ( e1 ) ; }
|
public class GcsUploader { /** * Generate the storage object name in gcs given the topologyName and filename .
* @ param topologyName the name of the topology
* @ param filename the name of the file to upload to gcs
* @ return the name of the object . */
private static String generateStorageObjectName ( String topologyName , String filename ) { } }
|
return String . format ( "%s/%s" , topologyName , filename ) ;
|
public class ServerStateMachine { /** * Create the episode object for the requested state .
* @ param state the state the mod is entering
* @ return a MissionStateEpisode that localises all the logic required to run this state */
@ Override protected StateEpisode getStateEpisodeForState ( IState state ) { } }
|
if ( ! ( state instanceof ServerState ) ) return null ; ServerState sstate = ( ServerState ) state ; switch ( sstate ) { case WAITING_FOR_MOD_READY : return new InitialiseServerModEpisode ( this ) ; case DORMANT : return new DormantEpisode ( this ) ; case BUILDING_WORLD : return new BuildingWorldEpisode ( this ) ; case WAITING_FOR_AGENTS_TO_ASSEMBLE : return new WaitingForAgentsEpisode ( this ) ; case RUNNING : return new RunningEpisode ( this ) ; case WAITING_FOR_AGENTS_TO_QUIT : return new WaitingForAgentsToQuitEpisode ( this ) ; case ERROR : return new ErrorEpisode ( this ) ; case CLEAN_UP : return new CleanUpEpisode ( this ) ; case MISSION_ENDED : return null ; // new MissionEndedEpisode ( this , MissionResult . ENDED ) ;
case MISSION_ABORTED : return null ; // new MissionEndedEpisode ( this , MissionResult . AGENT _ QUIT ) ;
default : break ; } return null ;
|
public class ForgotPasswordRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ForgotPasswordRequest forgotPasswordRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( forgotPasswordRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( forgotPasswordRequest . getClientId ( ) , CLIENTID_BINDING ) ; protocolMarshaller . marshall ( forgotPasswordRequest . getSecretHash ( ) , SECRETHASH_BINDING ) ; protocolMarshaller . marshall ( forgotPasswordRequest . getUserContextData ( ) , USERCONTEXTDATA_BINDING ) ; protocolMarshaller . marshall ( forgotPasswordRequest . getUsername ( ) , USERNAME_BINDING ) ; protocolMarshaller . marshall ( forgotPasswordRequest . getAnalyticsMetadata ( ) , ANALYTICSMETADATA_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class RenderableThunk { /** * Resolves the value by writing it to appendable
* @ param appendable An Appendable that you can call toString on to get the appended value */
void doResolveOnto ( Appendable appendable ) throws IOException { } }
|
doRender ( appendable ) ; content = appendable . toString ( ) ; if ( kind == null ) { resolved = StringData . forValue ( content ) ; } else { resolved = UnsafeSanitizedContentOrdainer . ordainAsSafe ( content , kind ) ; }
|
public class GeneratedMessage { /** * Calls invoke and throws a RuntimeException if it fails . */
private static Object invokeOrDie ( final Method method , final Object object , final Object ... params ) { } }
|
try { return method . invoke ( object , params ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( "Couldn't use Java reflection to implement protocol message " + "reflection." , e ) ; } catch ( InvocationTargetException e ) { final Throwable cause = e . getCause ( ) ; if ( cause instanceof RuntimeException ) { throw ( RuntimeException ) cause ; } else if ( cause instanceof Error ) { throw ( Error ) cause ; } else { throw new RuntimeException ( "Unexpected exception thrown by generated accessor method." , cause ) ; } }
|
public class ServiceRefWrapper { /** * @ Override
* public Annotation [ ] getApiAnnotations ( )
* return getDelegate ( ) . getApiAnnotations ( ) ; */
@ Override public MethodRefAmp methodByName ( String methodName ) { } }
|
try { return delegate ( ) . methodByName ( methodName ) ; } catch ( Exception e ) { return new MethodRefException ( this , methodName , e ) ; }
|
public class PippoSettings { /** * Add a value that may be interpolated .
* @ param name
* @ param value */
protected void addInterpolationValue ( String name , String value ) { } }
|
interpolationValues . put ( String . format ( "${%s}" , name ) , value ) ; interpolationValues . put ( String . format ( "@{%s}" , name ) , value ) ;
|
public class MSPDIReader { /** * This method processes any extended attributes associated with a task .
* @ param xml MSPDI task instance
* @ param mpx MPX task instance */
private void readTaskExtendedAttributes ( Project . Tasks . Task xml , Task mpx ) { } }
|
for ( Project . Tasks . Task . ExtendedAttribute attrib : xml . getExtendedAttribute ( ) ) { int xmlFieldID = Integer . parseInt ( attrib . getFieldID ( ) ) & 0x0000FFFF ; TaskField mpxFieldID = MPPTaskField . getInstance ( xmlFieldID ) ; TimeUnit durationFormat = DatatypeConverter . parseDurationTimeUnits ( attrib . getDurationFormat ( ) , null ) ; DatatypeConverter . parseExtendedAttribute ( m_projectFile , mpx , attrib . getValue ( ) , mpxFieldID , durationFormat ) ; }
|
public class MapFileTileSetIDBroker { /** * documentation inherited */
public void commit ( ) throws PersistenceException { } }
|
// only write ourselves out if we ' ve changed
if ( _storedTileSetID == _nextTileSetID ) { return ; } try { BufferedWriter bout = new BufferedWriter ( new FileWriter ( _mapfile ) ) ; // write out our metadata
String tline = "" + _nextTileSetID ; bout . write ( tline , 0 , tline . length ( ) ) ; bout . newLine ( ) ; // write out our mappings
writeMapFile ( bout , _map ) ; bout . close ( ) ; } catch ( IOException ioe ) { String errmsg = "Failure writing map file." ; throw new PersistenceException ( errmsg , ioe ) ; }
|
public class ProteinModificationIdentifier { /** * Identify a set of modifications in a a chains .
* @ param chain query { @ link Chain } .
* @ param potentialModifications query { @ link ProteinModification } s . */
public void identify ( final Chain chain , final Set < ProteinModification > potentialModifications ) { } }
|
identify ( Collections . singletonList ( chain ) , potentialModifications ) ;
|
public class QueryBuilder { /** * Remove the provided { @ link FeatureCode } s from the set of query constraints .
* @ param codes the { @ link FeatureCode } s to remove
* @ return this */
public QueryBuilder removeFeatureCodes ( final Collection < FeatureCode > codes ) { } }
|
if ( codes != null ) { featureCodes . removeAll ( codes ) ; } return this ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcMonthInYearNumber ( ) { } }
|
if ( ifcMonthInYearNumberEClass == null ) { ifcMonthInYearNumberEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 846 ) ; } return ifcMonthInYearNumberEClass ;
|
public class AbstractXTreeNode { /** * Reads the id of this node , the numEntries and the entries array from the
* specified stream . If the { @ link # supernode } field is set , < code > this < / code >
* cannot be contained in < code > in < / code > . Such a node has to be manually
* filled using { @ link # readSuperNode ( ObjectInput , AbstractXTree ) } .
* @ param in the stream to read data from in order to restore the object
* @ throws java . io . IOException if I / O errors occur
* @ throws ClassNotFoundException If the class for an object being restored
* cannot be found . */
@ Override public void readExternal ( ObjectInput in ) throws IOException , ClassNotFoundException { } }
|
setPageID ( in . readInt ( ) ) ; isLeaf = in . readBoolean ( ) ; supernode = in . readBoolean ( ) ; numEntries = in . readInt ( ) ; final int capacity = in . readInt ( ) ; if ( supernode ) { // this node is a supernode and is yet to be filled
capacity_to_be_filled = capacity ; return ; } // the following causes a null pointer - - something is obviously missing
// entries = ( E [ ] ) java . lang . reflect . Array . newInstance ( eclass , capacity ) ;
if ( isLeaf ( ) ) { entries = ( Entry [ ] ) new SpatialPointLeafEntry [ capacity ] ; } else { entries = ( Entry [ ] ) new XTreeDirectoryEntry [ capacity ] ; } for ( int i = 0 ; i < numEntries ; i ++ ) { SpatialEntry s = isLeaf ( ) ? new SpatialPointLeafEntry ( ) : new XTreeDirectoryEntry ( ) ; s . readExternal ( in ) ; entries [ i ] = s ; }
|
public class TicketSeedFileParser { /** * Returns a list of tickets parsed from the given JSON bytes . The keys are returned in a format suitable for
* use with netty . The first keys are the current keys , following that are the new keys and old keys .
* @ param json the JSON bytes containing ticket seed data .
* @ return a list of ticket keys . Current keys are first , then new keys , then old keys .
* @ throws IOException if parsing the JSON fails .
* @ throws IllegalArgumentException if the JSON does not contain any current seeds . */
public List < SessionTicketKey > parseBytes ( byte [ ] json ) throws IOException { } }
|
List < String > allSeeds = TicketSeeds . parseFromJSONBytes ( json , objectMapper ) . getAllSeeds ( ) ; return allSeeds . stream ( ) . map ( this :: deriveKeyFromSeed ) . collect ( Collectors . toList ( ) ) ;
|
public class JMJson { /** * With bytes t .
* @ param < T > the type parameter
* @ param bytes the bytes
* @ param typeReference the type reference
* @ return the t */
public static < T > T withBytes ( byte [ ] bytes , TypeReference < T > typeReference ) { } }
|
try { return jsonMapper . readValue ( bytes , typeReference ) ; } catch ( Exception e ) { return JMExceptionManager . handleExceptionAndReturnNull ( log , e , "withBytes" , new String ( bytes ) ) ; }
|
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertIfcAirTerminalBoxTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } }
|
return instanceValue == null ? null : instanceValue . toString ( ) ;
|
public class RetryStateImpl { /** * { @ inheritDoc } */
@ Override public RetryResult recordResult ( MethodResult < ? > methodResult ) { } }
|
ResultCategory resultCategory = null ; attempts ++ ; long duration = System . nanoTime ( ) - startNanos ; if ( methodResult . isFailure ( ) ) { // Failure case
if ( abortOn ( methodResult . getFailure ( ) ) ) { resultCategory = ResultCategory . EXCEPTION_IN_ABORT_ON ; } else if ( retryOn ( methodResult . getFailure ( ) ) ) { resultCategory = ResultCategory . EXCEPTION_IN_RETRY_ON ; } else { resultCategory = ResultCategory . EXCEPTION_NOT_IN_RETRY_ON ; } } else { // Successful case
resultCategory = ResultCategory . NO_EXCEPTION ; } // Capture whether this result was considered a retry - able failure by the Retry
boolean resultWasRetryableFailure = shouldRetry ( resultCategory ) ; // If we want to retry based on the methodResult , check if there ' s some other reason we shouldn ' t
if ( resultWasRetryableFailure ) { int maxAttempts = policy . getMaxRetries ( ) + 1 ; if ( maxAttempts != 0 && attempts >= maxAttempts ) { resultCategory = ResultCategory . MAX_RETRIES_REACHED ; } else if ( overMaxDuration ( duration ) ) { resultCategory = ResultCategory . MAX_DURATION_REACHED ; } } if ( shouldRetry ( resultCategory ) ) { metricRecorder . incrementRetriesCount ( ) ; } else { // Finished execution , record metrics
if ( resultWasRetryableFailure ) { metricRecorder . incrementRetryCallsFailureCount ( ) ; } else { if ( attempts > 1 ) { metricRecorder . incrementRetryCallsSuccessRetriesCount ( ) ; } else { metricRecorder . incrementRetryCallsSuccessImmediateCount ( ) ; } } } return createResult ( resultCategory ) ;
|
public class WavefrontNamingConvention { /** * Valid characters are : alphanumeric , hyphen ( " - " ) , underscore ( " _ " ) , dot ( " . " ) */
@ Override public String tagKey ( String key ) { } }
|
return KEY_CLEANUP_PATTERN . matcher ( delegate . tagKey ( key ) ) . replaceAll ( "_" ) ;
|
public class DBClusterSnapshotAttributesResult { /** * The list of attributes and values for the manual DB cluster snapshot .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setDBClusterSnapshotAttributes ( java . util . Collection ) } or
* { @ link # withDBClusterSnapshotAttributes ( java . util . Collection ) } if you want to override the existing values .
* @ param dBClusterSnapshotAttributes
* The list of attributes and values for the manual DB cluster snapshot .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DBClusterSnapshotAttributesResult withDBClusterSnapshotAttributes ( DBClusterSnapshotAttribute ... dBClusterSnapshotAttributes ) { } }
|
if ( this . dBClusterSnapshotAttributes == null ) { setDBClusterSnapshotAttributes ( new com . amazonaws . internal . SdkInternalList < DBClusterSnapshotAttribute > ( dBClusterSnapshotAttributes . length ) ) ; } for ( DBClusterSnapshotAttribute ele : dBClusterSnapshotAttributes ) { this . dBClusterSnapshotAttributes . add ( ele ) ; } return this ;
|
public class ProcessingParameters { /** * Merge in a configuration and return a ProcessingParameters object representing
* the merged values
* @ param pConfig config to merge in
* @ return a new ProcessingParameters instance if the given config is not null . Otherwise this object
* is returned . */
public ProcessingParameters mergedParams ( Map < String , String > pConfig ) { } }
|
if ( pConfig == null ) { return this ; } else { Map < ConfigKey , String > newParams = new HashMap < ConfigKey , String > ( ) ; newParams . putAll ( params ) ; newParams . putAll ( convertToConfigMap ( pConfig ) ) ; return new ProcessingParameters ( newParams , pathInfo ) ; }
|
public class PurandareFirstOrder { /** * Processes the compressed version of a document where each integer
* indicates that token ' s index and identifies all the contexts for the
* target word , adding them as new rows to the context matrix .
* @ param termIndex the term whose contexts should be extracted
* @ param document the document to be processed where each { @ code int } is a
* term index
* @ param contextMatrix the matrix that will contain all the contexts for
* the term with { @ code termIndex } when this method returns
* @ param rowStart the next row index in the matrix where a new context can
* be added
* @ param featuresForTerm a mapping from term index to the set of other term
* indices that are valid feature for that term
* @ return the number of contexts present in this document */
private int processIntDocument ( int termIndex , int [ ] document , Matrix contextMatrix , int rowStart , BitSet featuresForTerm ) { } }
|
int contexts = 0 ; for ( int i = 0 ; i < document . length ; ++ i ) { int curToken = document [ i ] ; // Skip processing tokens that are not the current focus
if ( curToken != termIndex ) continue ; // Buffer the count of how many times each feature appeared in the
// context .
SparseArray < Integer > contextCounts = new SparseHashArray < Integer > ( ) ; // Process all the tokes to the left ( prior ) to the current token ;
for ( int left = Math . max ( i - contextWindowSize , 0 ) ; left < i ; ++ left ) { // NOTE : this token value could be - 1 if the token ' s original
// text was filtered out from the corpus , i . e . was EMPTY _ TOKEN
int token = document [ left ] ; // Only count co - occurrences that are valid features for the
// current token
if ( token >= 0 && featuresForTerm . get ( token ) ) { Integer count = contextCounts . get ( token ) ; contextCounts . set ( token , ( count == null ) ? 1 : count + 1 ) ; } } // Process all the tokes to the right ( after ) to the current token ;
int end = Math . min ( i + contextWindowSize , document . length ) ; for ( int right = i + 1 ; right < end ; ++ right ) { int token = document [ right ] ; // Only count co - occurrences that are valid features for the
// current token
if ( token >= 0 && featuresForTerm . get ( token ) ) { Integer count = contextCounts . get ( token ) ; contextCounts . set ( token , ( count == null ) ? 1 : count + 1 ) ; } } // Each word in the document represents a new context , so the
// specific context instance can be determined from the current word
// and the number of previously process words
int curContext = rowStart + contexts ; for ( int feat : contextCounts . getElementIndices ( ) ) { // System . out . println ( " setting row : " + curContext + " , col : " + feat ) ;
contextMatrix . set ( curContext , feat , contextCounts . get ( feat ) ) ; } // If the current token wasn ' t skipped , indicate that another
// context was seen
contexts ++ ; } return contexts ;
|
public class Cells { /** * Returns the { @ code BigInteger } value of the { @ link Cell } ( associated to { @ code table } ) whose name iscellName , or
* null if this Cells object contains no cell whose name is cellName .
* @ param nameSpace the name of the owning table
* @ param cellName the name of the Cell we want to retrieve from this Cells object .
* @ return the { @ code BigInteger } value of the { @ link Cell } ( associated to { @ code table } ) whose name is cellName , or
* null if this Cells object contains no cell whose name is cellName */
public BigDecimal getBigDecimal ( String nameSpace , String cellName ) { } }
|
return getValue ( nameSpace , cellName , BigDecimal . class ) ;
|
public class ClassGraph { /** * Scan one or more specific classes , without scanning other classes in the same package unless the package is
* itself whitelisted .
* N . B . Automatically calls { @ link # enableClassInfo ( ) } .
* @ param classNames
* The fully - qualified names of classes to scan ( using ' . ' as a separator ) . May not include a glob
* wildcard ( { @ code ' * ' } ) .
* @ return this ( for method chaining ) . */
public ClassGraph whitelistClasses ( final String ... classNames ) { } }
|
enableClassInfo ( ) ; for ( final String className : classNames ) { if ( className . contains ( "*" ) ) { throw new IllegalArgumentException ( "Cannot use a glob wildcard here: " + className ) ; } final String classNameNormalized = WhiteBlackList . normalizePackageOrClassName ( className ) ; // Whitelist the class itself
scanSpec . classWhiteBlackList . addToWhitelist ( classNameNormalized ) ; scanSpec . classfilePathWhiteBlackList . addToWhitelist ( WhiteBlackList . classNameToClassfilePath ( classNameNormalized ) ) ; final String packageName = PackageInfo . getParentPackageName ( classNameNormalized ) ; // Record the package containing the class , so we can recurse to this point even if the package
// is not itself whitelisted
scanSpec . classPackageWhiteBlackList . addToWhitelist ( packageName ) ; scanSpec . classPackagePathWhiteBlackList . addToWhitelist ( WhiteBlackList . packageNameToPath ( packageName ) + "/" ) ; } return this ;
|
public class MapElement { /** * Invoked when the shape of this element changed .
* < p > This method also reset the bounding box to allow
* its re - computation ( with a call to { @ link # resetBoundingBox ( ) } .
* < p > In the implementation of a MapElement , prefers to call
* { @ link # fireGraphicalAttributeChanged ( ) } or
* { @ code fireShapeChanged ( ) } instead of { @ link # resetBoundingBox ( ) } .
* < p > If the attributes that change does not concern the shape ( bounding box )
* of the element , prefers an invocation of { @ link # fireGraphicalAttributeChanged ( ) }
* instead of { @ code fireShapeChanged ( ) } */
protected final void fireShapeChanged ( ) { } }
|
resetBoundingBox ( ) ; if ( isEventFirable ( ) ) { final GISElementContainer < ? > container = getContainer ( ) ; if ( container != null ) { container . onMapElementGraphicalAttributeChanged ( ) ; } }
|
public class Table { /** * Updates the table with a new table model , effectively replacing the content of the table completely
* @ param tableModel New table model
* @ return Itself */
public synchronized Table < V > setTableModel ( TableModel < V > tableModel ) { } }
|
if ( tableModel == null ) { throw new IllegalArgumentException ( "Cannot assign a null TableModel" ) ; } this . tableModel . removeListener ( tableModelListener ) ; this . tableModel = tableModel ; this . tableModel . addListener ( tableModelListener ) ; invalidate ( ) ; return this ;
|
public class ElementBox { /** * Copy the values from another element box .
* @ param src source element box */
public void copyValues ( ElementBox src ) { } }
|
super . copyValues ( src ) ; nested . addAll ( src . nested ) ; textonly = src . textonly ; pseudoElements = new HashMap < > ( src . pseudoElements ) ; style = src . style ; pseudoStyle = new HashMap < > ( src . pseudoStyle ) ; startChild = src . startChild ; endChild = src . endChild ; isblock = src . isblock ; style = src . style ; display = src . display ; lineHeight = src . lineHeight ; whitespace = src . whitespace ; bgcolor = ( src . bgcolor == null ) ? null : new Color ( src . bgcolor . getRed ( ) , src . bgcolor . getGreen ( ) , src . bgcolor . getBlue ( ) , src . bgcolor . getAlpha ( ) ) ; bgimages = ( src . bgimages == null ) ? null : new Vector < BackgroundImage > ( src . bgimages ) ; transform = src . transform ; position = src . position ; topset = src . topset ; leftset = src . leftset ; bottomset = src . bottomset ; rightset = src . rightset ; if ( src . coords != null ) coords = new LengthSet ( src . coords ) ; if ( src . margin != null ) margin = new LengthSet ( src . margin ) ; if ( src . emargin != null ) emargin = new LengthSet ( src . emargin ) ; if ( src . border != null ) border = new LengthSet ( src . border ) ; if ( src . padding != null ) padding = new LengthSet ( src . padding ) ; if ( src . content != null ) content = new Dimension ( src . content ) ;
|
public class VerticalText { /** * Outputs the lines to the document . The output can be simulated .
* @ param simulate < CODE > true < / CODE > to simulate the writing to the document
* @ return returns the result of the operation . It can be < CODE > NO _ MORE _ TEXT < / CODE >
* and / or < CODE > NO _ MORE _ COLUMN < / CODE > */
public int go ( boolean simulate ) { } }
|
boolean dirty = false ; PdfContentByte graphics = null ; if ( text != null ) { graphics = text . getDuplicate ( ) ; } else if ( ! simulate ) throw new NullPointerException ( "VerticalText.go with simulate==false and text==null." ) ; int status = 0 ; for ( ; ; ) { if ( maxLines <= 0 ) { status = NO_MORE_COLUMN ; if ( chunks . isEmpty ( ) ) status |= NO_MORE_TEXT ; break ; } if ( chunks . isEmpty ( ) ) { status = NO_MORE_TEXT ; break ; } PdfLine line = createLine ( height ) ; if ( ! simulate && ! dirty ) { text . beginText ( ) ; dirty = true ; } shortenChunkArray ( ) ; if ( ! simulate ) { text . setTextMatrix ( startX , startY - line . indentLeft ( ) ) ; writeLine ( line , text , graphics ) ; } -- maxLines ; startX -= leading ; } if ( dirty ) { text . endText ( ) ; text . add ( graphics ) ; } return status ;
|
public class ComputerSet { /** * First check point in creating a new agent . */
@ RequirePOST public synchronized void doCreateItem ( StaplerRequest req , StaplerResponse rsp , @ QueryParameter String name , @ QueryParameter String mode , @ QueryParameter String from ) throws IOException , ServletException { } }
|
final Jenkins app = Jenkins . getInstance ( ) ; app . checkPermission ( Computer . CREATE ) ; if ( mode != null && mode . equals ( "copy" ) ) { name = checkName ( name ) ; Node src = app . getNode ( from ) ; if ( src == null ) { if ( Util . fixEmpty ( from ) == null ) { throw new Failure ( Messages . ComputerSet_SpecifySlaveToCopy ( ) ) ; } else { throw new Failure ( Messages . ComputerSet_NoSuchSlave ( from ) ) ; } } // copy through XStream
String xml = Jenkins . XSTREAM . toXML ( src ) ; Node result = ( Node ) Jenkins . XSTREAM . fromXML ( xml ) ; result . setNodeName ( name ) ; if ( result instanceof Slave ) { // change userId too
User user = User . current ( ) ; ( ( Slave ) result ) . setUserId ( user == null ? "anonymous" : user . getId ( ) ) ; } result . holdOffLaunchUntilSave = true ; app . addNode ( result ) ; // send the browser to the config page
rsp . sendRedirect2 ( result . getNodeName ( ) + "/configure" ) ; } else { // proceed to step 2
if ( mode == null ) { throw new Failure ( "No mode given" ) ; } NodeDescriptor d = NodeDescriptor . all ( ) . findByName ( mode ) ; if ( d == null ) { throw new Failure ( "No node type ‘" + mode + "’ is known" ) ; } d . handleNewNodePage ( this , name , req , rsp ) ; }
|
public class PolicyUtils { /** * Read file and return contents as a string
* @ param f File to read
* @ return string contents of file
* @ throws MelcoePDPException */
public String fileToString ( File f ) throws MelcoePDPException { } }
|
ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; byte [ ] bytes = new byte [ 1024 ] ; try { FileInputStream fis = new FileInputStream ( f ) ; int count = fis . read ( bytes ) ; while ( count > - 1 ) { out . write ( bytes , 0 , count ) ; count = fis . read ( bytes ) ; } fis . close ( ) ; } catch ( IOException e ) { throw new MelcoePDPException ( "Error reading file: " + f . getName ( ) , e ) ; } return out . toString ( ) ;
|
public class Element { /** * Process a DOM element , descending into its children , and construct the output MessageML tree . */
public void buildAll ( MessageMLParser context , org . w3c . dom . Element element ) throws InvalidInputException , ProcessingException { } }
|
NamedNodeMap attr = element . getAttributes ( ) ; for ( int i = 0 ; i < attr . getLength ( ) ; i ++ ) { buildAttribute ( attr . item ( i ) ) ; } NodeList children = element . getChildNodes ( ) ; for ( int i = 0 ; i < children . getLength ( ) ; i ++ ) { buildNode ( context , children . item ( i ) ) ; }
|
public class Table { /** * Sets the index roots and next identity . */
void setIndexRoots ( Session session , String s ) { } }
|
if ( ! isCached ) { throw Error . error ( ErrorCode . X_42501 , tableName . name ) ; } ParserDQL p = new ParserDQL ( session , new Scanner ( s ) ) ; int [ ] roots = new int [ getIndexCount ( ) ] ; p . read ( ) ; for ( int i = 0 ; i < getIndexCount ( ) ; i ++ ) { int v = p . readInteger ( ) ; roots [ i ] = v ; } setIndexRoots ( roots ) ;
|
public class StringUtils { /** * < p > Removes one newline from end of a String if it ' s there ,
* otherwise leave it alone . A newline is & quot ; { @ code \ n } & quot ; ,
* & quot ; { @ code \ r } & quot ; , or & quot ; { @ code \ r \ n } & quot ; . < / p >
* < p > NOTE : This method changed in 2.0.
* It now more closely matches Perl chomp . < / p >
* < pre >
* StringUtils . chomp ( null ) = null
* StringUtils . chomp ( " " ) = " "
* StringUtils . chomp ( " abc \ r " ) = " abc "
* StringUtils . chomp ( " abc \ n " ) = " abc "
* StringUtils . chomp ( " abc \ r \ n " ) = " abc "
* StringUtils . chomp ( " abc \ r \ n \ r \ n " ) = " abc \ r \ n "
* StringUtils . chomp ( " abc \ n \ r " ) = " abc \ n "
* StringUtils . chomp ( " abc \ n \ rabc " ) = " abc \ n \ rabc "
* StringUtils . chomp ( " \ r " ) = " "
* StringUtils . chomp ( " \ n " ) = " "
* StringUtils . chomp ( " \ r \ n " ) = " "
* < / pre >
* @ param str the String to chomp a newline from , may be null
* @ return String without newline , { @ code null } if null String input */
public static String chomp ( final String str ) { } }
|
if ( isEmpty ( str ) ) { return str ; } if ( str . length ( ) == 1 ) { final char ch = str . charAt ( 0 ) ; if ( ch == CharUtils . CR || ch == CharUtils . LF ) { return EMPTY ; } return str ; } int lastIdx = str . length ( ) - 1 ; final char last = str . charAt ( lastIdx ) ; if ( last == CharUtils . LF ) { if ( str . charAt ( lastIdx - 1 ) == CharUtils . CR ) { lastIdx -- ; } } else if ( last != CharUtils . CR ) { lastIdx ++ ; } return str . substring ( 0 , lastIdx ) ;
|
public class Validators { /** * Creates and returns a validator , which allows to validate texts to ensure , that they have at
* least a specific length .
* @ param errorMessage
* The error message , which should be shown , if the validation fails , as an instance of
* the type { @ link CharSequence } . The error message may not be null
* @ param minLength
* The minimum length a text must have as an { @ link Integer } value . The minimum length
* must be at least 1
* @ return @ return The validator , which has been created , as an instance of the type { @ link
* Validator } */
public static Validator < CharSequence > minLength ( @ NonNull final CharSequence errorMessage , final int minLength ) { } }
|
return new MinLengthValidator ( errorMessage , minLength ) ;
|
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertIfcTextPathToString ( EDataType eDataType , Object instanceValue ) { } }
|
return instanceValue == null ? null : instanceValue . toString ( ) ;
|
public class StringBlock { /** * Finds index of the string .
* Returns - 1 if the string was not found . */
public int find ( String string ) { } }
|
if ( m_strings == null ) { return - 1 ; } for ( int i = 0 ; i < m_strings . length - 1 ; i ++ ) { if ( m_strings [ i ] . equals ( string ) ) { return i ; } } return - 1 ;
|
public class DccClient { /** * Get the detail information of specified dcc .
* @ param request The request containing all options for getting the instance info .
* @ return A dcc detail model for the dedicatedHostId . */
public GetDedicatedHostResponse getDedicatedHost ( GetDedicatedHostRequest request ) { } }
|
checkNotNull ( request , "request should not be null." ) ; checkNotNull ( request . getDedicatedHostId ( ) , "request dedicatedHostId should not be null." ) ; InternalRequest internalRequest = this . createRequest ( request , HttpMethodName . GET , request . getDedicatedHostId ( ) ) ; return this . invokeHttpClient ( internalRequest , GetDedicatedHostResponse . class ) ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EEnum getIfcDuctFittingTypeEnum ( ) { } }
|
if ( ifcDuctFittingTypeEnumEEnum == null ) { ifcDuctFittingTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 971 ) ; } return ifcDuctFittingTypeEnumEEnum ;
|
public class OkRequest { /** * Write part of a multipart request to the request body
* @ param name
* @ param filename
* @ param contentType value of the Content - Type part header
* @ param part
* @ return this request */
public OkRequest < T > part ( final String name , final String filename , final String contentType , final InputStream part ) throws IOException { } }
|
try { startPart ( ) ; writePartHeader ( name , filename , contentType ) ; copy ( part , mOutput ) ; } catch ( IOException ex ) { throw ex ; } catch ( Exception ex ) { VolleyLog . e ( ex , "error on part" ) ; } return this ;
|
public class SliceInput { /** * Transfers this buffer ' s data to the specified destination starting at
* the current { @ code position } and increases the { @ code position }
* by the number of the transferred bytes ( = { @ code length } ) . This method
* is basically same with { @ link # readBytes ( Slice , int , int ) } ,
* except that this method increases the { @ code writerIndex } of the
* destination by the number of the transferred bytes ( = { @ code length } )
* while { @ link # readBytes ( Slice , int , int ) } does not .
* @ throws IndexOutOfBoundsException if { @ code length } is greater than { @ code this . available ( ) } or
* if { @ code length } is greater than { @ code destination . writableBytes } */
public void readBytes ( Slice destination , int length ) { } }
|
if ( length > destination . length ( ) ) { throw new IndexOutOfBoundsException ( ) ; } readBytes ( destination , destination . length ( ) , length ) ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.