signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class XtextAntlrGeneratorComparisonFragment { /** * offers a singleton temporary folder */
protected File getTmpFolder ( ) { } } | final ArrayList < ? > _cacheKey = CollectionLiterals . newArrayList ( ) ; final File _result ; synchronized ( _createCache_getTmpFolder ) { if ( _createCache_getTmpFolder . containsKey ( _cacheKey ) ) { return _createCache_getTmpFolder . get ( _cacheKey ) ; } File _createTempDir = Files . createTempDir ( ) ; _result = _createTempDir ; _createCache_getTmpFolder . put ( _cacheKey , _result ) ; } _init_getTmpFolder ( _result ) ; return _result ; |
public class ConjunctionTransform { /** * { @ inheritDoc } */
public DependencyTreeNode [ ] transform ( DependencyTreeNode [ ] dependencyTree ) { } } | // Add in extra relations for conjunction relations
for ( DependencyTreeNode treeNode : dependencyTree ) { // Determine whether or not there is a conjunction link connected to
// this node . For simplicty , only consider relations where the
// current node is the parent in a conjunction relation . Also find
// the parent relation for this node .
boolean hasConj = false ; DependencyRelation parentLink = null ; for ( DependencyRelation link : treeNode . neighbors ( ) ) { if ( link . relation ( ) . equals ( "conj" ) && link . headNode ( ) . equals ( treeNode ) ) hasConj = true ; if ( ! link . headNode ( ) . equals ( treeNode ) ) parentLink = link ; } // Skip any nodes that have no conjunction links or have a missing
// parent node .
if ( ! hasConj || parentLink == null ) continue ; for ( DependencyRelation link : treeNode . neighbors ( ) ) { // Find any nodes that are connected through a conjunction
// with this node . Add an artifical link between the parent
// of this node and the " conj " child of this node .
if ( link . relation ( ) . equals ( "conj" ) && link . headNode ( ) . equals ( treeNode ) ) { DependencyRelation newLink = new SimpleDependencyRelation ( parentLink . headNode ( ) , parentLink . relation ( ) , link . dependentNode ( ) ) ; parentLink . headNode ( ) . neighbors ( ) . add ( newLink ) ; } } } return dependencyTree ; |
public class TioWebsocketMsgHandler { /** * close connection
* @ param wsRequest wsRequest
* @ param bytes bytes
* @ param channelContext channelContext
* @ return AnyObject
* @ throws Exception e */
@ Override public Object onClose ( WsRequest wsRequest , byte [ ] bytes , ChannelContext channelContext ) throws Exception { } } | TioWebsocketMethodMapper onClose = methods . getOnClose ( ) ; if ( onClose != null ) { onClose . getMethod ( ) . invoke ( onClose . getInstance ( ) , channelContext ) ; } log . debug ( "onClose" ) ; Tio . remove ( channelContext , "onClose" ) ; return null ; |
public class CustomFunctions { /** * Returns the first word in the given text string */
public static String first_word ( EvaluationContext ctx , Object text ) { } } | // In Excel this would be IF ( ISERR ( FIND ( " " , A2 ) ) , " " , LEFT ( A2 , FIND ( " " , A2 ) - 1 ) )
return word ( ctx , text , 1 , false ) ; |
public class DefaultClusterEventService { /** * Returns the next node ID for the given message topic .
* @ param topicName the topic for which to return the next node ID
* @ return the next node ID for the given message topic */
private MemberId getNextMemberId ( String topicName ) { } } | InternalTopic topic = topics . get ( topicName ) ; if ( topic == null ) { return null ; } TopicIterator iterator = topic . iterator ( ) ; if ( iterator . hasNext ( ) ) { return iterator . next ( ) . memberId ( ) ; } return null ; |
public class Occurrence { /** * 计算左熵
* @ param pair
* @ return */
public double computeLeftEntropy ( PairFrequency pair ) { } } | Set < Map . Entry < String , TriaFrequency > > entrySet = trieTria . prefixSearch ( pair . getKey ( ) + LEFT ) ; return computeEntropy ( entrySet ) ; |
public class GeometryUtilities { /** * Get the position of a point ( left , right , on line ) for a given line .
* @ param point the point to check .
* @ param lineStart the start coordinate of the line .
* @ param lineEnd the end coordinate of the line .
* @ return 1 if the point is left of the line , - 1 if it is right , 0 if it is on the line . */
public static int getPointPositionAgainstLine ( Coordinate point , Coordinate lineStart , Coordinate lineEnd ) { } } | double value = ( lineEnd . x - lineStart . x ) * ( point . y - lineStart . y ) - ( point . x - lineStart . x ) * ( lineEnd . y - lineStart . y ) ; if ( value > 0 ) { return 1 ; } else if ( value < 0 ) { return - 1 ; } else { return 0 ; } |
public class KTypeHashSet { /** * { @ inheritDoc } */
@ Override public boolean contains ( KType key ) { } } | if ( Intrinsics . isEmpty ( key ) ) { return hasEmptyKey ; } else { final KType [ ] keys = Intrinsics . < KType [ ] > cast ( this . keys ) ; final int mask = this . mask ; int slot = hashKey ( key ) & mask ; KType existing ; while ( ! Intrinsics . isEmpty ( existing = keys [ slot ] ) ) { if ( Intrinsics . equals ( this , key , existing ) ) { return true ; } slot = ( slot + 1 ) & mask ; } return false ; } |
public class ORMapping { /** * 獲取某字段對應的get方法
* @ param column */
public Method getGetter ( String column ) { } } | MappingItem methods = getMethodPair ( column ) ; return methods == null ? null : methods . getGetter ( ) ; |
public class AbstractDocumentationMojo { /** * Format the error message .
* @ param inputFile the input file .
* @ param exception the error .
* @ return the error message . */
protected String formatErrorMessage ( File inputFile , Throwable exception ) { } } | File filename ; int lineno = 0 ; final boolean addExceptionName ; if ( exception instanceof ParsingException ) { addExceptionName = false ; final ParsingException pexception = ( ParsingException ) exception ; final File file = pexception . getFile ( ) ; if ( file != null ) { filename = file ; } else { filename = inputFile ; } lineno = pexception . getLineno ( ) ; } else { addExceptionName = true ; filename = inputFile ; } for ( final String sourceDir : this . session . getCurrentProject ( ) . getCompileSourceRoots ( ) ) { final File root = new File ( sourceDir ) ; if ( isParentFile ( filename , root ) ) { try { filename = FileSystem . makeRelative ( filename , root ) ; } catch ( IOException exception1 ) { } break ; } } final StringBuilder msg = new StringBuilder ( ) ; msg . append ( filename . toString ( ) ) ; if ( lineno > 0 ) { msg . append ( ":" ) . append ( lineno ) ; // $ NON - NLS - 1 $
} msg . append ( ": " ) ; // $ NON - NLS - 1 $
final Throwable rootEx = Throwables . getRootCause ( exception ) ; if ( addExceptionName ) { msg . append ( rootEx . getClass ( ) . getName ( ) ) ; msg . append ( " - " ) ; // $ NON - NLS - 1 $
} msg . append ( rootEx . getLocalizedMessage ( ) ) ; return msg . toString ( ) ; |
public class AnimationConfig { /** * Create the animation data from configurer .
* @ param root The root reference ( must not be < code > null < / code > ) .
* @ return The animations configuration instance .
* @ throws LionEngineException If unable to read data . */
public static AnimationConfig imports ( Xml root ) { } } | Check . notNull ( root ) ; final Map < String , Animation > animations = new HashMap < > ( 0 ) ; for ( final Xml node : root . getChildren ( ANIMATION ) ) { final String anim = node . readString ( ANIMATION_NAME ) ; final Animation animation = createAnimation ( node ) ; animations . put ( anim , animation ) ; } return new AnimationConfig ( animations ) ; |
public class DBCleanService { /** * Returns database cleaner for workspace .
* @ param jdbcConn
* database connection which need to use
* @ param wsEntry
* workspace configuration
* @ return DBCleanerTool
* @ throws DBCleanException */
public static DBCleanerTool getWorkspaceDBCleaner ( Connection jdbcConn , WorkspaceEntry wsEntry ) throws DBCleanException { } } | SecurityHelper . validateSecurityPermission ( JCRRuntimePermissions . MANAGE_REPOSITORY_PERMISSION ) ; String dialect = resolveDialect ( jdbcConn , wsEntry ) ; boolean autoCommit = dialect . startsWith ( DialectConstants . DB_DIALECT_SYBASE ) ; DBCleaningScripts scripts = DBCleaningScriptsFactory . prepareScripts ( dialect , wsEntry ) ; return new DBCleanerTool ( jdbcConn , autoCommit , scripts . getCleaningScripts ( ) , scripts . getCommittingScripts ( ) , scripts . getRollbackingScripts ( ) ) ; |
public class JAXBMarshallerHelper { /** * Check if the passed Marshaller is a Sun JAXB v2 marshaller . Use this method
* to determined , whether the Sun specific methods may be invoked or not .
* @ param aMarshaller
* The marshaller to be checked . May be < code > null < / code > .
* @ return < code > true < / code > if the passed marshaller is not < code > null < / code >
* and is of the Sun class . */
public static boolean isSunJAXB2Marshaller ( @ Nullable final Marshaller aMarshaller ) { } } | if ( aMarshaller == null ) return false ; final String sClassName = aMarshaller . getClass ( ) . getName ( ) ; return sClassName . equals ( JAXB_EXTERNAL_CLASS_NAME ) ; |
public class ZiplinesChunkIterator { /** * / * ( non - Javadoc )
* @ see java . util . Iterator # hasNext ( ) */
public boolean hasNext ( ) { } } | if ( cachedNext != null ) { return true ; } while ( cachedNext == null ) { if ( br != null ) { // attempt to read the next line from this :
try { cachedNext = br . readLine ( ) ; if ( cachedNext == null ) { br = null ; // next loop :
} else { return true ; } } catch ( IOException e ) { e . printStackTrace ( ) ; br = null ; } } else { // do we have more blocks to use ?
if ( blockItr . hasNext ( ) ) { try { br = blockItr . next ( ) . readBlock ( ) ; } catch ( IOException e ) { throw new RuntimeIOException ( ) ; } } else { return false ; } } } return false ; |
public class TableSubject { /** * Fails if the table does not contain the given value . */
public void containsValue ( @ NullableDecl Object value ) { } } | check ( "values()" ) . that ( actual ( ) . values ( ) ) . contains ( value ) ; |
public class Event { /** * indexed getter for themes _ event - gets an indexed value -
* @ generated
* @ param i index in the array to get
* @ return value of the element at index i */
public Event getThemes_event ( int i ) { } } | if ( Event_Type . featOkTst && ( ( Event_Type ) jcasType ) . casFeat_themes_event == null ) jcasType . jcas . throwFeatMissing ( "themes_event" , "ch.epfl.bbp.uima.genia.Event" ) ; jcasType . jcas . checkArrayBounds ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Event_Type ) jcasType ) . casFeatCode_themes_event ) , i ) ; return ( Event ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefArrayValue ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Event_Type ) jcasType ) . casFeatCode_themes_event ) , i ) ) ) ; |
public class AttributeCollector { /** * Low - level accessor method that attribute validation code may call
* for certain types of attributes ; generally only for id and idref / idrefs
* attributes . It returns the underlying ' raw ' attribute value buffer
* for direct access . */
public final TextBuilder getAttrBuilder ( String attrPrefix , String attrLocalName ) throws XMLStreamException { } } | /* Ok : we have parsed prefixed - name of a regular
* attribute . So let ' s initialize the instance . . . */
if ( mAttrCount == 0 ) { if ( mAttributes == null ) { allocBuffers ( ) ; } mAttributes [ 0 ] = new Attribute ( attrPrefix , attrLocalName , 0 ) ; } else { int valueStart = mValueBuilder . getCharSize ( ) ; if ( mAttrCount >= mAttributes . length ) { if ( ( mAttrCount + mNsCount ) >= mMaxAttributesPerElement ) { throw new XMLStreamException ( "Attribute limit (" + mMaxAttributesPerElement + ") exceeded" ) ; } mAttributes = ( Attribute [ ] ) DataUtil . growArrayBy50Pct ( mAttributes ) ; } Attribute curr = mAttributes [ mAttrCount ] ; if ( curr == null ) { mAttributes [ mAttrCount ] = new Attribute ( attrPrefix , attrLocalName , valueStart ) ; } else { curr . reset ( attrPrefix , attrLocalName , valueStart ) ; } } ++ mAttrCount ; // 25 - Sep - 2006 , TSa : Need to keep track of xml : id attribute ?
if ( attrLocalName == mXmlIdLocalName ) { if ( attrPrefix == mXmlIdPrefix ) { if ( mXmlIdAttrIndex != XMLID_IX_DISABLED ) { mXmlIdAttrIndex = mAttrCount - 1 ; } } } /* Can ' t yet create attribute map by name , since we only know
* name prefix , not necessarily matching URI . */
return mValueBuilder ; |
public class AbstractCommonColorsPainter { /** * DOCUMENT ME !
* @ param type DOCUMENT ME !
* @ return DOCUMENT ME ! */
public FourColors getCommonInteriorColors ( CommonControlState type ) { } } | switch ( type ) { case DISABLED : return interiorDisabled ; case DISABLED_SELECTED : return interiorDisabledSelected ; case ENABLED : return interiorEnabled ; case PRESSED : return interiorPressed ; case DEFAULT : return interiorDefault ; case DEFAULT_PRESSED : return interiorDefaultPressed ; case SELECTED : return interiorSelected ; case PRESSED_SELECTED : return interiorPressedSelected ; } return null ; |
public class LogicalTimer { /** * Check if the event with a given timestamp has occurred , according to the timer .
* This implementation always returns true and updates current timer ' s time to the timestamp
* of the most distant future event .
* @ param time Timestamp in milliseconds .
* @ return Always returns true .
* @ deprecated [ REEF - 1532 ] Prefer passing Time object instead of the numeric timestamp .
* Remove after release 0.16. */
@ Override public boolean isReady ( final long time ) { } } | while ( true ) { final long thisTs = this . current . get ( ) ; if ( thisTs >= time || this . current . compareAndSet ( thisTs , time ) ) { return true ; } } |
public class ListSecurityConfigurationsResult { /** * The creation date and time , and name , of each security configuration .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setSecurityConfigurations ( java . util . Collection ) } or
* { @ link # withSecurityConfigurations ( java . util . Collection ) } if you want to override the existing values .
* @ param securityConfigurations
* The creation date and time , and name , of each security configuration .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListSecurityConfigurationsResult withSecurityConfigurations ( SecurityConfigurationSummary ... securityConfigurations ) { } } | if ( this . securityConfigurations == null ) { setSecurityConfigurations ( new com . amazonaws . internal . SdkInternalList < SecurityConfigurationSummary > ( securityConfigurations . length ) ) ; } for ( SecurityConfigurationSummary ele : securityConfigurations ) { this . securityConfigurations . add ( ele ) ; } return this ; |
public class FlowImpl { /** * If not already created , a new < code > step < / code > element will be created and returned .
* Otherwise , the first existing < code > step < / code > element will be returned .
* @ return the instance defined for the element < code > step < / code > */
public Step < Flow < T > > getOrCreateStep ( ) { } } | List < Node > nodeList = childNode . get ( "step" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new StepImpl < Flow < T > > ( this , "step" , childNode , nodeList . get ( 0 ) ) ; } return createStep ( ) ; |
public class JsHdrsImpl { /** * Get the unique StreamId used by the flush protocol to determine
* whether a stream is active or flushed .
* Javadoc description supplied by CommonMessageHeaders interface . */
public final SIBUuid12 getGuaranteedStreamUUID ( ) { } } | byte [ ] b = ( byte [ ] ) getHdr2 ( ) . getField ( JsHdr2Access . GUARANTEED_SET_STREAMUUID ) ; if ( b != null ) return new SIBUuid12 ( b ) ; return null ; |
public class Feature { /** * Set attribute value of given type .
* @ param name attribute name
* @ param value attribute value */
public void setBooleanAttribute ( String name , Boolean value ) { } } | Attribute attribute = getAttributes ( ) . get ( name ) ; if ( ! ( attribute instanceof BooleanAttribute ) ) { throw new IllegalStateException ( "Cannot set boolean value on attribute with different type, " + attribute . getClass ( ) . getName ( ) + " setting value " + value ) ; } ( ( BooleanAttribute ) attribute ) . setValue ( value ) ; |
public class ClientCacheHelper { /** * Creates a new cache configuration on Hazelcast members .
* @ param client the client instance which will send the operation to server
* @ param newCacheConfig the cache configuration to be sent to server
* @ param < K > type of the key of the cache
* @ param < V > type of the value of the cache
* @ return the created cache configuration
* @ see com . hazelcast . cache . impl . operation . CacheCreateConfigOperation */
static < K , V > CacheConfig < K , V > createCacheConfig ( HazelcastClientInstanceImpl client , CacheConfig < K , V > newCacheConfig ) { } } | try { String nameWithPrefix = newCacheConfig . getNameWithPrefix ( ) ; int partitionId = client . getClientPartitionService ( ) . getPartitionId ( nameWithPrefix ) ; Object resolvedConfig = resolveCacheConfigWithRetry ( client , newCacheConfig , partitionId ) ; Data configData = client . getSerializationService ( ) . toData ( resolvedConfig ) ; ClientMessage request = CacheCreateConfigCodec . encodeRequest ( configData , true ) ; ClientInvocation clientInvocation = new ClientInvocation ( client , request , nameWithPrefix , partitionId ) ; Future < ClientMessage > future = clientInvocation . invoke ( ) ; final ClientMessage response = future . get ( ) ; final Data data = CacheCreateConfigCodec . decodeResponse ( response ) . response ; return resolveCacheConfig ( client , clientInvocation , data ) ; } catch ( Exception e ) { throw rethrow ( e ) ; } |
public class ResultSetResult { /** * { @ inheritDoc } */
@ Override public void close ( ) { } } | try { resultSet . getStatement ( ) . close ( ) ; } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to close ResultSet Statement" , e ) ; } try { resultSet . close ( ) ; } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to close ResultSet" , e ) ; } |
public class MailUtil { /** * 使用配置文件中设置的账户发送邮件 , 发送给多人
* @ param tos 收件人列表
* @ param subject 标题
* @ param content 正文
* @ param isHtml 是否为HTML
* @ param files 附件列表 */
public static void send ( Collection < String > tos , String subject , String content , boolean isHtml , File ... files ) { } } | send ( GlobalMailAccount . INSTANCE . getAccount ( ) , tos , subject , content , isHtml , files ) ; |
public class Memory { /** * Transfers count bytes from buffer to Memory
* @ param memoryOffset start offset in the memory
* @ param buffer the data buffer
* @ param bufferOffset start offset of the buffer
* @ param count number of bytes to transfer */
public void setBytes ( long memoryOffset , byte [ ] buffer , int bufferOffset , int count ) { } } | if ( buffer == null ) throw new NullPointerException ( ) ; else if ( bufferOffset < 0 || count < 0 || bufferOffset + count > buffer . length ) throw new IndexOutOfBoundsException ( ) ; else if ( count == 0 ) return ; long end = memoryOffset + count ; checkBounds ( memoryOffset , end ) ; unsafe . copyMemory ( buffer , BYTE_ARRAY_BASE_OFFSET + bufferOffset , null , peer + memoryOffset , count ) ; |
public class IntPoint { /** * Multiply values of two points .
* @ param point1 IntPoint .
* @ param point2 IntPoint .
* @ return IntPoint that contains X and Y axis coordinate . */
public static IntPoint Multiply ( IntPoint point1 , IntPoint point2 ) { } } | IntPoint result = new IntPoint ( point1 ) ; result . Multiply ( point2 ) ; return result ; |
public class Filters { /** * Shortcut for { @ link # registrationScope ( Class ) } for special scopes ( like classpath scan , bundles lookup etc ) .
* @ param specialScope special scope type
* @ param < T > expected info container type ( if used within single configuration type )
* @ return items registered in specified context filter */
public static < T extends ItemInfo > Predicate < T > registrationScope ( final ConfigScope specialScope ) { } } | return registrationScope ( specialScope . getType ( ) ) ; |
public class UrlUtilities { /** * Get content from the passed in URL . This code will open a connection to
* the passed in server , fetch the requested content , and return it as a
* String .
* @ param url URL to hit
* @ param inCookies Map of session cookies ( or null if not needed )
* @ param outCookies Map of session cookies ( or null if not needed )
* @ param trustAllCerts if true , SSL connection will always be trusted .
* @ return String of content fetched from URL . */
public static String getContentFromUrlAsString ( String url , Map inCookies , Map outCookies , boolean trustAllCerts ) { } } | byte [ ] bytes = getContentFromUrl ( url , inCookies , outCookies , trustAllCerts ) ; return bytes == null ? null : StringUtilities . createString ( bytes , "UTF-8" ) ; |
public class WorkQueue { /** * Increases the number of concurrently processing threads by one . */
private void addThread ( ) { } } | Thread t = new WorkerThread ( workQueue ) ; threads . add ( t ) ; t . start ( ) ; |
public class MappingUtils { /** * Invokes Property Descriptor Setter and sets value @ value into it .
* @ param target Object Getter of which would be executed
* @ param prop Property Descriptor which would be used to invoke Getter
* @ param value Value which should be set into @ target */
public static void callSetter ( Object target , PropertyDescriptor prop , Object value ) { } } | Method setter = prop . getWriteMethod ( ) ; if ( setter == null ) { return ; } try { setter . invoke ( target , new Object [ ] { value } ) ; } catch ( IllegalArgumentException e ) { throw new RuntimeException ( "Cannot set " + prop . getName ( ) + ": " + e . getMessage ( ) , e ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( "Cannot set " + prop . getName ( ) + ": " + e . getMessage ( ) , e ) ; } catch ( InvocationTargetException e ) { throw new RuntimeException ( "Cannot set " + prop . getName ( ) + ": " + e . getMessage ( ) , e ) ; } |
public class BOMInputStream { /** * Read - ahead four bytes and check for BOM marks . < br >
* Extra bytes are unread back to the stream , only BOM bytes are skipped .
* @ throws IOException 读取引起的异常 */
protected void init ( ) throws IOException { } } | if ( isInited ) { return ; } byte bom [ ] = new byte [ BOM_SIZE ] ; int n , unread ; n = in . read ( bom , 0 , bom . length ) ; if ( ( bom [ 0 ] == ( byte ) 0x00 ) && ( bom [ 1 ] == ( byte ) 0x00 ) && ( bom [ 2 ] == ( byte ) 0xFE ) && ( bom [ 3 ] == ( byte ) 0xFF ) ) { charset = "UTF-32BE" ; unread = n - 4 ; } else if ( ( bom [ 0 ] == ( byte ) 0xFF ) && ( bom [ 1 ] == ( byte ) 0xFE ) && ( bom [ 2 ] == ( byte ) 0x00 ) && ( bom [ 3 ] == ( byte ) 0x00 ) ) { charset = "UTF-32LE" ; unread = n - 4 ; } else if ( ( bom [ 0 ] == ( byte ) 0xEF ) && ( bom [ 1 ] == ( byte ) 0xBB ) && ( bom [ 2 ] == ( byte ) 0xBF ) ) { charset = "UTF-8" ; unread = n - 3 ; } else if ( ( bom [ 0 ] == ( byte ) 0xFE ) && ( bom [ 1 ] == ( byte ) 0xFF ) ) { charset = "UTF-16BE" ; unread = n - 2 ; } else if ( ( bom [ 0 ] == ( byte ) 0xFF ) && ( bom [ 1 ] == ( byte ) 0xFE ) ) { charset = "UTF-16LE" ; unread = n - 2 ; } else { // Unicode BOM mark not found , unread all bytes
charset = defaultCharset ; unread = n ; } // System . out . println ( " read = " + n + " , unread = " + unread ) ;
if ( unread > 0 ) { in . unread ( bom , ( n - unread ) , unread ) ; } isInited = true ; |
public class UpdateBuilder { /** * Overwrites the document referred to by this DocumentReference . If the document doesn ' t exist
* yet , it will be created . If a document already exists , it will be overwritten .
* @ param documentReference The DocumentReference to overwrite .
* @ param fields A map of the field paths and values for the document .
* @ return The instance for chaining . */
@ Nonnull public T set ( @ Nonnull DocumentReference documentReference , @ Nonnull Map < String , Object > fields ) { } } | return set ( documentReference , fields , SetOptions . OVERWRITE ) ; |
public class Repartitioner { /** * Ensures that no more than maxContiguousPartitionsPerZone partitions are
* contiguous within a single zone .
* Moves the necessary partitions to break up contiguous runs from each zone
* to some other random zone / node . There is some chance that such random
* moves could result in contiguous partitions in other zones .
* @ param nextCandidateCluster cluster metadata
* @ param maxContiguousPartitionsPerZone See RebalanceCLI .
* @ return Return updated cluster metadata . */
public static Cluster balanceContiguousPartitionsPerZone ( final Cluster nextCandidateCluster , final int maxContiguousPartitionsPerZone ) { } } | System . out . println ( "Balance number of contiguous partitions within a zone." ) ; System . out . println ( "numPartitionsPerZone" ) ; for ( int zoneId : nextCandidateCluster . getZoneIds ( ) ) { System . out . println ( zoneId + " : " + nextCandidateCluster . getNumberOfPartitionsInZone ( zoneId ) ) ; } System . out . println ( "numNodesPerZone" ) ; for ( int zoneId : nextCandidateCluster . getZoneIds ( ) ) { System . out . println ( zoneId + " : " + nextCandidateCluster . getNumberOfNodesInZone ( zoneId ) ) ; } // Break up contiguous partitions within each zone
HashMap < Integer , List < Integer > > partitionsToRemoveFromZone = Maps . newHashMap ( ) ; System . out . println ( "Contiguous partitions" ) ; for ( Integer zoneId : nextCandidateCluster . getZoneIds ( ) ) { System . out . println ( "\tZone: " + zoneId ) ; Map < Integer , Integer > partitionToRunLength = PartitionBalanceUtils . getMapOfContiguousPartitions ( nextCandidateCluster , zoneId ) ; List < Integer > partitionsToRemoveFromThisZone = new ArrayList < Integer > ( ) ; for ( Map . Entry < Integer , Integer > entry : partitionToRunLength . entrySet ( ) ) { if ( entry . getValue ( ) > maxContiguousPartitionsPerZone ) { List < Integer > contiguousPartitions = new ArrayList < Integer > ( entry . getValue ( ) ) ; for ( int partitionId = entry . getKey ( ) ; partitionId < entry . getKey ( ) + entry . getValue ( ) ; partitionId ++ ) { contiguousPartitions . add ( partitionId % nextCandidateCluster . getNumberOfPartitions ( ) ) ; } System . out . println ( "Contiguous partitions: " + contiguousPartitions ) ; partitionsToRemoveFromThisZone . addAll ( Utils . removeItemsToSplitListEvenly ( contiguousPartitions , maxContiguousPartitionsPerZone ) ) ; } } partitionsToRemoveFromZone . put ( zoneId , partitionsToRemoveFromThisZone ) ; System . out . println ( "\t\tPartitions to remove: " + partitionsToRemoveFromThisZone ) ; } Cluster returnCluster = Cluster . cloneCluster ( nextCandidateCluster ) ; Random r = new Random ( ) ; for ( int zoneId : returnCluster . getZoneIds ( ) ) { for ( int partitionId : partitionsToRemoveFromZone . get ( zoneId ) ) { // Pick a random other zone Id
List < Integer > otherZoneIds = new ArrayList < Integer > ( ) ; for ( int otherZoneId : returnCluster . getZoneIds ( ) ) { if ( otherZoneId != zoneId ) { otherZoneIds . add ( otherZoneId ) ; } } int whichOtherZoneId = otherZoneIds . get ( r . nextInt ( otherZoneIds . size ( ) ) ) ; // Pick a random node from other zone ID
int whichNodeOffset = r . nextInt ( returnCluster . getNumberOfNodesInZone ( whichOtherZoneId ) ) ; int whichNodeId = new ArrayList < Integer > ( returnCluster . getNodeIdsInZone ( whichOtherZoneId ) ) . get ( whichNodeOffset ) ; // Steal partition from one zone to another !
returnCluster = UpdateClusterUtils . createUpdatedCluster ( returnCluster , whichNodeId , Lists . newArrayList ( partitionId ) ) ; } } return returnCluster ; |
public class PoolsImpl { /** * Lists the usage metrics , aggregated by pool across individual time intervals , for the specified account .
* If you do not specify a $ filter clause including a poolId , the response includes all pools that existed in the account in the time range of the returned aggregation intervals . If you do not specify a $ filter clause including a startTime or endTime these filters default to the start and end times of the last aggregation interval currently available ; that is , only the last aggregation interval is returned .
* @ param poolListUsageMetricsOptions Additional parameters for the operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; PoolUsageMetrics & gt ; object */
public Observable < ServiceResponseWithHeaders < Page < PoolUsageMetrics > , PoolListUsageMetricsHeaders > > listUsageMetricsWithServiceResponseAsync ( final PoolListUsageMetricsOptions poolListUsageMetricsOptions ) { } } | return listUsageMetricsSinglePageAsync ( poolListUsageMetricsOptions ) . concatMap ( new Func1 < ServiceResponseWithHeaders < Page < PoolUsageMetrics > , PoolListUsageMetricsHeaders > , Observable < ServiceResponseWithHeaders < Page < PoolUsageMetrics > , PoolListUsageMetricsHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Page < PoolUsageMetrics > , PoolListUsageMetricsHeaders > > call ( ServiceResponseWithHeaders < Page < PoolUsageMetrics > , PoolListUsageMetricsHeaders > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } PoolListUsageMetricsNextOptions poolListUsageMetricsNextOptions = null ; if ( poolListUsageMetricsOptions != null ) { poolListUsageMetricsNextOptions = new PoolListUsageMetricsNextOptions ( ) ; poolListUsageMetricsNextOptions . withClientRequestId ( poolListUsageMetricsOptions . clientRequestId ( ) ) ; poolListUsageMetricsNextOptions . withReturnClientRequestId ( poolListUsageMetricsOptions . returnClientRequestId ( ) ) ; poolListUsageMetricsNextOptions . withOcpDate ( poolListUsageMetricsOptions . ocpDate ( ) ) ; } return Observable . just ( page ) . concatWith ( listUsageMetricsNextWithServiceResponseAsync ( nextPageLink , poolListUsageMetricsNextOptions ) ) ; } } ) ; |
public class DoubleStat { /** * Returns the sum of seen values . If any value is a NaN or the sum is at any point a NaN then the average will be
* NaN . The average returned can vary depending upon the order in which values are seen .
* @ return the sum of values */
Double getSum ( ) { } } | if ( count == 0 ) { return null ; } // Better error bounds to add both terms as the final sum
double tmp = sum + sumCompensation ; if ( Double . isNaN ( tmp ) && Double . isInfinite ( simpleSum ) ) { // If the compensated sum is spuriously NaN from
// accumulating one or more same - signed infinite values ,
// return the correctly - signed infinity stored in simpleSum .
return simpleSum ; } else { return tmp ; } |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcPreDefinedProperties ( ) { } } | if ( ifcPreDefinedPropertiesEClass == null ) { ifcPreDefinedPropertiesEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 443 ) ; } return ifcPreDefinedPropertiesEClass ; |
public class ListEntitiesForPolicyResult { /** * A list of IAM users that the policy is attached to .
* @ return A list of IAM users that the policy is attached to . */
public java . util . List < PolicyUser > getPolicyUsers ( ) { } } | if ( policyUsers == null ) { policyUsers = new com . amazonaws . internal . SdkInternalList < PolicyUser > ( ) ; } return policyUsers ; |
public class Proxy { /** * Returns the newly formatted URL based on if the client is enabled , server mappings etc . .
* This also figures out what client ID to use for a profile ( default is " - 1 " ( PROFILE _ CLIENT _ DEFAULT _ ID ) )
* @ param httpServletRequest
* @ param history
* @ param requestType
* @ return
* @ throws Exception */
private String getProxyURL ( HttpServletRequest httpServletRequest , History history , Integer requestType ) throws Exception { } } | // first determine if Odo will even fully handle this request
RequestInformation requestInfo = requestInformation . get ( ) ; if ( ServerRedirectService . getInstance ( ) . canHandleRequest ( HttpUtilities . getHostNameFromURL ( httpServletRequest . getRequestURL ( ) . toString ( ) ) ) ) { requestInfo . handle = true ; } String stringProxyURL = "http://" ; // determine http vs https
String originalURL = httpServletRequest . getRequestURL ( ) . toString ( ) ; history . setOriginalRequestURL ( originalURL ) ; if ( originalURL . startsWith ( "https://" ) ) { stringProxyURL = "https://" ; } String hostName = HttpUtilities . getHostNameFromURL ( originalURL ) ; int port = HttpUtilities . getPortFromURL ( originalURL ) ; String origHostName = hostName ; logger . info ( "original host name = {}" , hostName ) ; processClientId ( httpServletRequest , history ) ; String queryString = httpServletRequest . getQueryString ( ) ; if ( queryString == null ) { queryString = "" ; } else { queryString = "?" + queryString . replace ( "|" , "%7C" ) . replace ( "[" , "%5B" ) . replace ( "]" , "%5D" ) ; } // if this can ' t be overridden we are going to finish the string and bail
if ( ! requestInfo . handle ) { stringProxyURL = stringProxyURL + hostName + ":" + port ; // Handle the path given to the servlet
stringProxyURL += httpServletRequest . getPathInfo ( ) ; stringProxyURL += queryString ; return stringProxyURL ; } // figure out what profile to use based on source server name and matching paths
// if no profile has matching paths then we just pick the first enabled one
// if no profile is enabled then we pick the first one so that we have a URL mapping
for ( Profile tryProfile : ServerRedirectService . getInstance ( ) . getProfilesForServerName ( origHostName ) ) { logger . info ( "Trying {}" , tryProfile . getName ( ) ) ; Client tryClient = ClientService . getInstance ( ) . findClient ( history . getClientUUID ( ) , tryProfile . getId ( ) ) ; if ( tryClient == null ) { continue ; } List < EndpointOverride > trySelectedRequestPaths = PathOverrideService . getInstance ( ) . getSelectedPaths ( Constants . OVERRIDE_TYPE_REQUEST , tryClient , tryProfile , httpServletRequest . getRequestURL ( ) + queryString , requestType , false ) ; List < EndpointOverride > trySelectedResponsePaths = PathOverrideService . getInstance ( ) . getSelectedPaths ( Constants . OVERRIDE_TYPE_RESPONSE , tryClient , tryProfile , httpServletRequest . getRequestURL ( ) + queryString , requestType , false ) ; logger . info ( "Sizes {} {}" , trySelectedRequestPaths . size ( ) , trySelectedResponsePaths . size ( ) ) ; if ( ( trySelectedRequestPaths . size ( ) > 0 || trySelectedResponsePaths . size ( ) > 0 ) || tryClient . getIsActive ( ) || requestInfo . profile == null ) { logger . info ( "Selected {}, {}, " + httpServletRequest . getRequestURL ( ) + "?" + httpServletRequest . getQueryString ( ) , tryProfile . getName ( ) , tryClient . getId ( ) ) ; // reset history UUID based on client
history . setClientUUID ( tryClient . getUUID ( ) ) ; requestInfo . profile = tryProfile ; requestInfo . selectedRequestPaths = new ArrayList < EndpointOverride > ( trySelectedRequestPaths ) ; requestInfo . selectedResponsePaths = new ArrayList < EndpointOverride > ( trySelectedResponsePaths ) ; requestInfo . client = tryClient ; } } // we always should do this mapping since a request coming through us means it was redirected . .
// don ' t want to cause a loop
hostName = getDestinationHostName ( hostName ) ; logger . info ( "new host name = {}" , hostName ) ; stringProxyURL = stringProxyURL + hostName ; // Handle the path given to the servlet
stringProxyURL += httpServletRequest . getPathInfo ( ) ; stringProxyURL += processQueryString ( queryString ) . queryString ; logger . info ( "url = {}" , stringProxyURL ) ; history . setProfileId ( requestInfo . profile . getId ( ) ) ; return stringProxyURL ; |
public class DependencyCustomizer { /** * Add a single dependency with the specified classifier and type and , optionally , all
* of its dependencies . The group ID and version of the dependency are resolved from
* the module by using the customizer ' s { @ link ArtifactCoordinatesResolver } .
* @ param module the module ID
* @ param classifier the classifier , may be { @ code null }
* @ param type the type , may be { @ code null }
* @ param transitive { @ code true } if the transitive dependencies should also be added ,
* otherwise { @ code false }
* @ return this { @ link DependencyCustomizer } for continued use */
public DependencyCustomizer add ( String module , String classifier , String type , boolean transitive ) { } } | if ( canAdd ( ) ) { ArtifactCoordinatesResolver artifactCoordinatesResolver = this . dependencyResolutionContext . getArtifactCoordinatesResolver ( ) ; this . classNode . addAnnotation ( createGrabAnnotation ( artifactCoordinatesResolver . getGroupId ( module ) , artifactCoordinatesResolver . getArtifactId ( module ) , artifactCoordinatesResolver . getVersion ( module ) , classifier , type , transitive ) ) ; } return this ; |
public class OpenPgpPubSubUtil { /** * Publishes a { @ link SecretkeyElement } to the secret key node .
* The node will be configured to use the whitelist access model to prevent access from subscribers .
* @ see < a href = " https : / / xmpp . org / extensions / xep - 0373 . html # synchro - pep " >
* XEP - 0373 § 5 . Synchronizing the Secret Key with a Private PEP Node < / a >
* @ param connection { @ link XMPPConnection } of the user
* @ param element a { @ link SecretkeyElement } containing the encrypted secret key of the user
* @ throws InterruptedException if the thread gets interrupted .
* @ throws PubSubException . NotALeafNodeException if something is wrong with the PubSub node
* @ throws XMPPException . XMPPErrorException in case of an protocol related error
* @ throws SmackException . NotConnectedException if we are not connected
* @ throws SmackException . NoResponseException / watch ? v = 0peBq89ZTrc
* @ throws SmackException . FeatureNotSupportedException if the Server doesn ' t support the whitelist access model */
public static void depositSecretKey ( XMPPConnection connection , SecretkeyElement element ) throws InterruptedException , PubSubException . NotALeafNodeException , XMPPException . XMPPErrorException , SmackException . NotConnectedException , SmackException . NoResponseException , SmackException . FeatureNotSupportedException { } } | if ( ! OpenPgpManager . serverSupportsSecretKeyBackups ( connection ) ) { throw new SmackException . FeatureNotSupportedException ( "http://jabber.org/protocol/pubsub#access-whitelist" ) ; } PubSubManager pm = PepManager . getInstanceFor ( connection ) . getPepPubSubManager ( ) ; LeafNode secretKeyNode = pm . getOrCreateLeafNode ( PEP_NODE_SECRET_KEY ) ; OpenPgpPubSubUtil . changeAccessModelIfNecessary ( secretKeyNode , AccessModel . whitelist ) ; secretKeyNode . publish ( new PayloadItem < > ( element ) ) ; |
public class ButtonSprite { /** * Updates this sprite ' s bounds after a change to the label . */
public void updateBounds ( ) { } } | // invalidate the old . . .
invalidate ( ) ; // size the bounds to fit our label
Dimension size = _label . getSize ( ) ; _bounds . width = size . width + PADDING * 2 + ( _style == ROUNDED ? _arcWidth : 0 ) ; _bounds . height = size . height + PADDING * 2 ; // . . . and the new
invalidate ( ) ; |
public class AbstractInternalAntlrParser { /** * currentNode = currentNode . getParent ( ) ; */
protected void afterParserOrEnumRuleCall ( ) { } } | ICompositeNode newCurrent = nodeBuilder . compressAndReturnParent ( currentNode ) ; if ( currentNode == lastConsumedNode ) { lastConsumedNode = newCurrent ; } currentNode = newCurrent ; |
public class IntUnaryOperatorBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */
@ Nonnull public static IntUnaryOperator intUnaryOperatorFrom ( Consumer < IntUnaryOperatorBuilder > buildingFunction ) { } } | IntUnaryOperatorBuilder builder = new IntUnaryOperatorBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ; |
public class WRTagTaglet { /** * Split string to get tag set .
* @ param tags
* the String of tags to split .
* @ return set of tags . */
public static Set < String > getTagSet ( String tags ) { } } | tags = StringUtils . substringBefore ( tags , "\n" ) ; String [ ] tagArr = tags . split ( "[\\s,,;;]" ) ; Set < String > tagSet = new HashSet < String > ( ) ; for ( int i = 0 ; i < tagArr . length ; i ++ ) { String tag = tagArr [ i ] . trim ( ) ; if ( ! tag . isEmpty ( ) ) { tagSet . add ( tag ) ; } } return tagSet ; |
public class BigComplex { /** * Calculates the square of the absolute value of this complex number .
* < p > This method is faster than { @ link # abs ( MathContext ) } since it does not need to calculate the { @ link BigDecimalMath # sqrt ( BigDecimal , MathContext ) } . < / p >
* < p > This methods < strong > does not < / strong > modify this instance . < / p >
* @ param mathContext the { @ link MathContext } used to calculate the result
* @ return the calculated { @ link BigComplex } result
* @ see # abs ( MathContext ) */
public BigDecimal absSquare ( MathContext mathContext ) { } } | return re . multiply ( re , mathContext ) . add ( im . multiply ( im , mathContext ) , mathContext ) ; |
public class IncrementalScopeCreator { /** * Get an instance of the ScopeCreator */
public static IncrementalScopeCreator getInstance ( AbstractCompiler compiler ) { } } | IncrementalScopeCreator creator = compiler . getScopeCreator ( ) ; if ( creator == null ) { creator = new IncrementalScopeCreator ( compiler ) ; compiler . putScopeCreator ( creator ) ; } return creator ; |
public class CmsHelpNavigationListView { /** * Creates the HTML for the internal help . < p >
* @ param buffer the StringBuffer to which the Navigation will be appended
* @ param navElements the navigation elements to build the navigation for */
private void createNavigationInternal ( StringBuffer buffer , List < CmsJspNavElement > navElements ) { } } | // take the element to render .
CmsJspNavElement element = navElements . remove ( 0 ) ; int elementLevel = element . getNavTreeLevel ( ) ; String spacing = getSpaces ( elementLevel * 2 ) ; // render element :
buffer . append ( spacing ) . append ( "<li>\n" ) ; buffer . append ( spacing ) . append ( " <a href=\"" ) ; buffer . append ( m_jsp . link ( element . getResourceName ( ) ) ) ; buffer . append ( "\" title=\"" ) ; buffer . append ( element . getNavText ( ) ) ; buffer . append ( "\"" ) ; if ( elementLevel == 1 ) { buffer . append ( " class=\"bold\"" ) ; } buffer . append ( ">" ) ; buffer . append ( element . getNavText ( ) ) ; buffer . append ( "</a>\n" ) ; // peek at the next ( list is depth - first by contract )
if ( ! navElements . isEmpty ( ) ) { CmsJspNavElement child = navElements . get ( 0 ) ; int childLevel = child . getNavTreeLevel ( ) ; if ( elementLevel < childLevel ) { // next one goes down a level : it is a child by tree means
buffer . append ( spacing ) . append ( " <ul>\n" ) ; } else if ( elementLevel == childLevel ) { // it is a sibling : close our list item , no recursion
buffer . append ( spacing ) . append ( "</li>\n" ) ; } else { // next element gets up one layer
// this has to happen because of the depth - first contract !
buffer . append ( spacing ) . append ( " </li>\n" ) . append ( spacing ) . append ( "</ul>\n" ) ; } createNavigationInternal ( buffer , navElements ) ; } else { // no more next elements : get back and close all lists ( by using the recursion we are in )
buffer . append ( spacing ) . append ( " </li>\n" ) . append ( spacing ) . append ( "</ul>\n" ) ; } |
public class EventMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Event event , ProtocolMarshaller protocolMarshaller ) { } } | if ( event == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( event . getSourceIdentifier ( ) , SOURCEIDENTIFIER_BINDING ) ; protocolMarshaller . marshall ( event . getSourceType ( ) , SOURCETYPE_BINDING ) ; protocolMarshaller . marshall ( event . getMessage ( ) , MESSAGE_BINDING ) ; protocolMarshaller . marshall ( event . getEventCategories ( ) , EVENTCATEGORIES_BINDING ) ; protocolMarshaller . marshall ( event . getDate ( ) , DATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class PrototypeSize { /** * Computes and returns the width of this Size ' s prototype in pixel . Ignores the component list
* and measures . Obtains the FontMetrics from the given layout { @ code container } for the default
* dialog font provided by { @ link DefaultUnitConverter # getDefaultDialogFont ( ) }
* Invoked by { @ link com . privatejgoodies . forms . layout . FormSpec } to determine the size of a
* column or row .
* @ param container the layout container
* @ param components the list of components used to compute the size
* @ param minMeasure the measure that determines the minimum sizes
* @ param prefMeasure the measure that determines the preferred sizes
* @ param defaultMeasure the measure that determines the default sizes
* @ return the { @ code stringWidth } for this size ' s prototype string computed by the
* { @ code container } ' s FontMetrics for the { @ code DefaultUnitConverter } ' s default dialog font */
@ Override public int maximumSize ( Container container , List components , FormLayout . Measure minMeasure , FormLayout . Measure prefMeasure , FormLayout . Measure defaultMeasure ) { } } | Font font = DefaultUnitConverter . getInstance ( ) . getDefaultDialogFont ( ) ; FontMetrics fm = container . getFontMetrics ( font ) ; return fm . stringWidth ( getPrototype ( ) ) ; |
public class ProfileComplianceManager { /** * SDK5.0 signature */
public ProfileExpressionMetadata [ ] queryExpressionMetadata ( String [ ] expressionName , Profile profile ) throws RuntimeFault , RemoteException { } } | return getVimService ( ) . queryExpressionMetadata ( getMOR ( ) , expressionName , profile == null ? null : profile . getMOR ( ) ) ; |
public class AmazonRoute53ResolverClient { /** * Deletes a resolver endpoint . The effect of deleting a resolver endpoint depends on whether it ' s an inbound or an
* outbound resolver endpoint :
* < ul >
* < li >
* < b > Inbound < / b > : DNS queries from your network or another VPC are no longer routed to the DNS service for the
* specified VPC .
* < / li >
* < li >
* < b > Outbound < / b > : DNS queries from a VPC are no longer routed to your network or to another VPC .
* < / li >
* < / ul >
* @ param deleteResolverEndpointRequest
* @ return Result of the DeleteResolverEndpoint operation returned by the service .
* @ throws ResourceNotFoundException
* The specified resource doesn ' t exist .
* @ throws InvalidParameterException
* One or more parameters in this request are not valid .
* @ throws InvalidRequestException
* The request is invalid .
* @ throws InternalServiceErrorException
* We encountered an unknown error . Try again in a few minutes .
* @ throws ThrottlingException
* The request was throttled . Try again in a few minutes .
* @ sample AmazonRoute53Resolver . DeleteResolverEndpoint
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53resolver - 2018-04-01 / DeleteResolverEndpoint "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DeleteResolverEndpointResult deleteResolverEndpoint ( DeleteResolverEndpointRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDeleteResolverEndpoint ( request ) ; |
public class Buffer { public void assertInvariant ( ) { } } | if ( start > pages . getSize ( ) ) { throw new IllegalStateException ( ) ; } if ( start > pageSize ) { throw new IllegalStateException ( ) ; } if ( start > getEndOfs ( ) ) { throw new IllegalStateException ( ) ; } if ( end > endFilled ) { throw new IllegalStateException ( ) ; } if ( endPage != pages . get ( endPageIdx ) ) { throw new IllegalStateException ( ) ; } |
public class EntropyStatistic { /** * Compute an exclusive split ( i . e . ' feature ' ' = = ' ' val ' ) */
@ Override protected Split eqSplit ( int col , Data d , int [ ] dist , int distWeight , Random rand ) { } } | final int [ ] distR = new int [ d . classes ( ) ] , distL = dist . clone ( ) ; final double upperBoundReduction = upperBoundReduction ( d . classes ( ) ) ; double maxReduction = - 1 ; int bestSplit = - 1 ; int min = d . colMinIdx ( col ) ; int max = d . colMaxIdx ( col ) ; for ( int i = min ; i < max + 1 ; ++ i ) { for ( int j = 0 ; j < distR . length ; ++ j ) { int v = _columnDists [ col ] [ i ] [ j ] ; distL [ j ] += distR [ j ] ; distR [ j ] = v ; distL [ j ] -= v ; } int totL = 0 , totR = 0 ; for ( int e : distL ) totL += e ; if ( totL == 0 ) continue ; for ( int e : distR ) totR += e ; if ( totR == 0 ) continue ; double eL = 0 , eR = 0 ; for ( int e : distL ) eL += gain ( e , totL ) ; for ( int e : distR ) eR += gain ( e , totR ) ; double eReduction = upperBoundReduction - ( ( eL * totL + eR * totR ) / ( totL + totR ) ) ; if ( eReduction == maxReduction ) { if ( rand . nextInt ( 10 ) < 2 ) bestSplit = i ; // randomly pick one out of several
} else if ( eReduction > maxReduction ) { bestSplit = i ; maxReduction = eReduction ; } if ( i == 0 && d . columnArity ( col ) == 1 ) break ; // for boolean features , only one split needs to be evaluated
} return bestSplit == - 1 ? Split . impossible ( Utils . maxIndex ( dist , _random ) ) : Split . exclusion ( col , bestSplit , maxReduction ) ; |
public class SibRaDispatchEndpointActivation { /** * Closes the connection for the given messaging engine if there is one
* open . Removes any corresponding sessions from the maps .
* @ param meUuid
* the UUID for the messaging engine to close the connection for */
protected void closeConnection ( final String meUuid , boolean alreadyClosed ) { } } | final String methodName = "closeConnection" ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , methodName , meUuid ) ; } synchronized ( _sessionsByMeUuid ) { super . closeConnection ( meUuid , alreadyClosed ) ; _sessionsByMeUuid . remove ( meUuid ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , methodName ) ; } |
public class MultiColumnText { /** * Add a new column . The parameters are limits for each column
* wall in the format of a sequence of points ( x1 , y1 , x2 , y2 , . . . ) .
* @ param left limits for left column
* @ param right limits for right column */
public void addColumn ( float [ ] left , float [ ] right ) { } } | ColumnDef nextDef = new ColumnDef ( left , right ) ; if ( ! nextDef . isSimple ( ) ) simple = false ; columnDefs . add ( nextDef ) ; |
public class CopySoundexHandler { /** * Set this cloned listener to the same state at this listener .
* @ param field The field this new listener will be added to .
* @ param The new listener to sync to this .
* @ param Has the init method been called ?
* @ return True if I called init . */
public boolean syncClonedListener ( BaseField field , FieldListener listener , boolean bInitCalled ) { } } | if ( ! bInitCalled ) ( ( CopySoundexHandler ) listener ) . init ( null , m_iFieldSeq ) ; return super . syncClonedListener ( field , listener , true ) ; |
public class ScopeFormat { /** * Concatenates the given component names separated by the delimiter character . Additionally
* the character filter is applied to all component names .
* @ param filter Character filter to be applied to the component names
* @ param delimiter Delimiter to separate component names
* @ param components Array of component names
* @ return The concatenated component name */
public static String concat ( CharacterFilter filter , Character delimiter , String ... components ) { } } | StringBuilder sb = new StringBuilder ( ) ; sb . append ( filter . filterCharacters ( components [ 0 ] ) ) ; for ( int x = 1 ; x < components . length ; x ++ ) { sb . append ( delimiter ) ; sb . append ( filter . filterCharacters ( components [ x ] ) ) ; } return sb . toString ( ) ; |
public class PeepholeRemoveDeadCode { /** * Returns a expression executing { @ code expr } which is legal in any expression context .
* @ param expr An attached expression
* @ return A detached expression */
private static Node asDetachedExpression ( Node expr ) { } } | switch ( expr . getToken ( ) ) { case SPREAD : expr = IR . arraylit ( expr . detach ( ) ) . srcref ( expr ) ; break ; default : break ; } if ( expr . getParent ( ) != null ) { expr . detach ( ) ; } checkState ( IR . mayBeExpression ( expr ) , expr ) ; return expr ; |
public class AbstractBeanDefinition { /** * Adds an injection point for a field . Typically called by a dynamically generated subclass .
* @ param declaringType The declaring type
* @ param fieldType The field type
* @ param field The name of the field
* @ param annotationMetadata The annotation metadata for the field
* @ param typeArguments The arguments
* @ param requiresReflection Whether reflection is required
* @ return this component definition */
@ SuppressWarnings ( { } } | "unused" , "unchecked" } ) @ Internal @ UsedByGeneratedCode protected final AbstractBeanDefinition addInjectionPoint ( Class declaringType , Class fieldType , String field , @ Nullable AnnotationMetadata annotationMetadata , @ Nullable Argument [ ] typeArguments , boolean requiresReflection ) { if ( annotationMetadata != null && annotationMetadata . hasDeclaredAnnotation ( Inject . class ) ) { requiredComponents . add ( fieldType ) ; } if ( requiresReflection ) { fieldInjectionPoints . add ( new ReflectionFieldInjectionPoint ( this , declaringType , fieldType , field , annotationMetadata , typeArguments ) ) ; } else { fieldInjectionPoints . add ( new DefaultFieldInjectionPoint ( this , declaringType , fieldType , field , annotationMetadata , typeArguments ) ) ; } return this ; |
public class AbstractJanitor { /** * { @ inheritDoc } */
public synchronized boolean killConnectionListener ( String id ) { } } | ConnectionListener cl = null ; Iterator < ConnectionListener > it = listeners . keySet ( ) . iterator ( ) ; while ( cl == null && it . hasNext ( ) ) { ConnectionListener l = it . next ( ) ; if ( Integer . toHexString ( System . identityHashCode ( l ) ) . equals ( id ) ) cl = l ; } if ( cl != null ) { try { pool . returnConnectionListener ( cl , true ) ; } catch ( Exception e ) { log . tracef ( e , "killConnectionListener(%s)" , id ) ; } return true ; } return false ; |
public class VarOptItemsSketch { /** * / * swap values of data _ , weights _ , and marks between src and dst indices */
private void swapValues ( final int src , final int dst ) { } } | final T item = data_ . get ( src ) ; data_ . set ( src , data_ . get ( dst ) ) ; data_ . set ( dst , item ) ; final Double wt = weights_ . get ( src ) ; weights_ . set ( src , weights_ . get ( dst ) ) ; weights_ . set ( dst , wt ) ; if ( marks_ != null ) { final Boolean mark = marks_ . get ( src ) ; marks_ . set ( src , marks_ . get ( dst ) ) ; marks_ . set ( dst , mark ) ; } |
public class RevisionDataInputStream { /** * Creates a new instance of the RevisionDataInputStream class . Upon a successful call to this method , 4 bytes
* will have been read from the InputStream representing the expected length of the serialization .
* @ param inputStream The InputStream to wrap .
* @ throws IOException If an IO Exception occurred . */
static RevisionDataInputStream wrap ( InputStream inputStream ) throws IOException { } } | int bound = BitConverter . readInt ( inputStream ) ; return new RevisionDataInputStream ( new BoundedInputStream ( inputStream , bound ) ) ; |
public class PostgreSQLDatabase { /** * < p > Creates sequence < code > _ name < / code > in PostgreSQL . As name of the
* sequence the lower case of < code > _ name < / code > is used . < / p >
* < p > The minimum and starting value is set to < code > _ startValue < / code >
* minus one and then updated to current value ( by fetching a value from
* the sequence ) . The current value is < code > _ startValue < / code > minus one
* so that a call to { @ link # nextSequence ( Connection , String ) } returns the
* expected < code > _ startValue < / code > . < / p >
* @ param _ con SQL connection
* @ param _ name name of the sequence to update
* @ param _ startValue start value of the sequence
* @ return this database instance
* @ throws SQLException if sequence could not be created
* @ see # nextSequence ( Connection , String ) */
@ Override public PostgreSQLDatabase createSequence ( final Connection _con , final String _name , final long _startValue ) throws SQLException { } } | final long value = _startValue - 1 ; final StringBuilder cmd = new StringBuilder ( ) ; cmd . append ( "CREATE SEQUENCE \"" ) . append ( _name . toLowerCase ( ) ) . append ( "\" INCREMENT 1" ) . append ( " MINVALUE " ) . append ( value ) . append ( " MAXVALUE 9223372036854775807 " ) . append ( " START " ) . append ( value ) . append ( " CACHE 1;" ) ; final PreparedStatement stmt = _con . prepareStatement ( cmd . toString ( ) ) ; try { stmt . execute ( ) ; } finally { stmt . close ( ) ; } if ( ! _con . getAutoCommit ( ) ) { _con . commit ( ) ; } return this ; |
public class MinimumViableSystem { /** * Visible for testing */
void checkWritableDir ( String tempPath ) { } } | try { File tempFile = File . createTempFile ( "check" , "tmp" , new File ( tempPath ) ) ; deleteQuietly ( tempFile ) ; } catch ( IOException e ) { throw new IllegalStateException ( format ( "Temp directory is not writable: %s" , tempPath ) , e ) ; } |
public class SibRaCommonEndpointActivation { /** * Overrides the parent method so that if there are no connections left it will
* check to see if a new connection can be made
* @ param messagingEngine
* the messaging engine that is stopping
* @ param mode
* the mode with which the engine is stopping */
@ Override public synchronized void messagingEngineStopping ( final JsMessagingEngine messagingEngine , final int mode ) { } } | final String methodName = "messagingEngineStopping" ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , methodName , new Object [ ] { messagingEngine , Integer . valueOf ( mode ) } ) ; } SibTr . info ( TRACE , "ME_STOPPING_CWSIV0784" , new Object [ ] { messagingEngine . getName ( ) , messagingEngine . getBus ( ) } ) ; // dropConnection ( messagingEngine . getUuid ( ) . toString ( ) , null , true ) ;
if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , methodName ) ; } |
public class SNISSLExplorer { /** * Launch and explore the security capabilities from byte buffer .
* This method tries to parse as few records as possible from
* { @ code source } byte buffer to get the capabilities
* of an SSL / TLS connection .
* Please NOTE that this method must be called before any handshaking
* occurs . The behavior of this method is not defined in this release
* if the handshake has begun , or has completed .
* This method accesses the { @ code source } parameter in read - only
* mode , and does not update the buffer ' s properties such as capacity ,
* limit , position , and mark values .
* @ param source
* a { @ link ByteBuffer } containing
* inbound or outbound network data for an SSL / TLS connection .
* @ throws IOException on network data error
* @ throws BufferUnderflowException if not enough source bytes available
* to make a complete exploration .
* @ return the explored capabilities of the SSL / TLS
* connection */
public static List < SNIServerName > explore ( ByteBuffer source ) throws SSLException { } } | ByteBuffer input = source . duplicate ( ) ; // Do we have a complete header ?
if ( input . remaining ( ) < RECORD_HEADER_SIZE ) { throw new BufferUnderflowException ( ) ; } // Is it a handshake message ?
byte firstByte = input . get ( ) ; byte secondByte = input . get ( ) ; byte thirdByte = input . get ( ) ; if ( ( firstByte & 0x80 ) != 0 && thirdByte == 0x01 ) { // looks like a V2ClientHello
return Collections . emptyList ( ) ; } else if ( firstByte == 22 ) { // 22 : handshake record
return exploreTLSRecord ( input , firstByte , secondByte , thirdByte ) ; } else { throw UndertowMessages . MESSAGES . notHandshakeRecord ( ) ; } |
public class ShardedDistributedMessageQueue { /** * Extract a message body from a column
* @ param column
* @ return */
Message extractMessageFromColumn ( Column < MessageQueueEntry > column ) { } } | // Next , parse the message metadata and add a timeout entry
Message message = null ; try { ByteArrayInputStream bais = new ByteArrayInputStream ( column . getByteArrayValue ( ) ) ; message = mapper . readValue ( bais , Message . class ) ; } catch ( Exception e ) { LOG . warn ( "Error processing message " , e ) ; try { message = invalidMessageHandler . apply ( column . getStringValue ( ) ) ; } catch ( Exception e2 ) { LOG . warn ( "Error processing invalid message" , e2 ) ; } } return message ; |
public class MainScene { /** * Rotate root widget to make it facing to the front of the scene */
public void rotateToFront ( ) { } } | GVRTransform transform = mSceneRootObject . getTransform ( ) ; transform . setRotation ( 1 , 0 , 0 , 0 ) ; transform . rotateByAxisWithPivot ( - frontFacingRotation + 180 , 0 , 1 , 0 , 0 , 0 , 0 ) ; |
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIPS ( ) { } } | if ( ipsEClass == null ) { ipsEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 282 ) ; } return ipsEClass ; |
public class GridNode { /** * Calculates the slope from the current to the supplied point .
* @ param node the node to which to calculate the slope to .
* @ return the slope . */
public double getSlopeTo ( GridNode node ) { } } | double slope = ( elevation - node . elevation ) / getDistance ( node ) ; return slope ; |
public class WildcardConfigPatternMatcher { /** * This method is public to be accessible by { @ link com . hazelcast . security . permission . InstancePermission } .
* @ param pattern configuration pattern to match with
* @ param itemName item name to match
* @ return { @ code true } if itemName matches , { @ code false } otherwise */
public boolean matches ( String pattern , String itemName ) { } } | final int index = pattern . indexOf ( '*' ) ; if ( index == - 1 ) { return itemName . equals ( pattern ) ; } final String firstPart = pattern . substring ( 0 , index ) ; if ( ! itemName . startsWith ( firstPart ) ) { return false ; } final String secondPart = pattern . substring ( index + 1 ) ; if ( ! itemName . endsWith ( secondPart ) ) { return false ; } return true ; |
public class InstanceGroupClient { /** * Removes one or more instances from the specified instance group , but does not delete those
* instances .
* < p > If the group is part of a backend service that has enabled connection draining , it can take
* up to 60 seconds after the connection draining duration before the VM instance is removed or
* deleted .
* < p > Sample code :
* < pre > < code >
* try ( InstanceGroupClient instanceGroupClient = InstanceGroupClient . create ( ) ) {
* ProjectZoneInstanceGroupName instanceGroup = ProjectZoneInstanceGroupName . of ( " [ PROJECT ] " , " [ ZONE ] " , " [ INSTANCE _ GROUP ] " ) ;
* InstanceGroupsRemoveInstancesRequest instanceGroupsRemoveInstancesRequestResource = InstanceGroupsRemoveInstancesRequest . newBuilder ( ) . build ( ) ;
* Operation response = instanceGroupClient . removeInstancesInstanceGroup ( instanceGroup , instanceGroupsRemoveInstancesRequestResource ) ;
* < / code > < / pre >
* @ param instanceGroup The name of the instance group where the specified instances will be
* removed .
* @ param instanceGroupsRemoveInstancesRequestResource
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation removeInstancesInstanceGroup ( ProjectZoneInstanceGroupName instanceGroup , InstanceGroupsRemoveInstancesRequest instanceGroupsRemoveInstancesRequestResource ) { } } | RemoveInstancesInstanceGroupHttpRequest request = RemoveInstancesInstanceGroupHttpRequest . newBuilder ( ) . setInstanceGroup ( instanceGroup == null ? null : instanceGroup . toString ( ) ) . setInstanceGroupsRemoveInstancesRequestResource ( instanceGroupsRemoveInstancesRequestResource ) . build ( ) ; return removeInstancesInstanceGroup ( request ) ; |
public class ApptentiveNotificationObserverList { /** * Posts notification to all observers . */
void notifyObservers ( ApptentiveNotification notification ) { } } | boolean hasLostReferences = false ; // create a temporary list of observers to avoid concurrent modification errors
List < ApptentiveNotificationObserver > temp = new ArrayList < > ( observers . size ( ) ) ; for ( int i = 0 ; i < observers . size ( ) ; ++ i ) { ApptentiveNotificationObserver observer = observers . get ( i ) ; ObserverWeakReference observerRef = ObjectUtils . as ( observer , ObserverWeakReference . class ) ; if ( observerRef == null || ! observerRef . isReferenceLost ( ) ) { temp . add ( observer ) ; } else { hasLostReferences = true ; } } // notify observers
for ( int i = 0 ; i < temp . size ( ) ; ++ i ) { try { temp . get ( i ) . onReceiveNotification ( notification ) ; } catch ( Exception e ) { ApptentiveLog . e ( e , "Exception while posting notification: %s" , notification ) ; logException ( e ) ; // TODO : add more context info
} } // clean lost references
if ( hasLostReferences ) { for ( int i = observers . size ( ) - 1 ; i >= 0 ; -- i ) { final ObserverWeakReference observerRef = ObjectUtils . as ( observers . get ( i ) , ObserverWeakReference . class ) ; if ( observerRef != null && observerRef . isReferenceLost ( ) ) { observers . remove ( i ) ; } } } |
public class IterativeDataSet { @ Override protected org . apache . flink . api . common . operators . SingleInputOperator < T , T , ? > translateToDataFlow ( Operator < T > input ) { } } | // All the translation magic happens when the iteration end is encountered .
throw new InvalidProgramException ( "A data set that is part of an iteration was used as a sink or action." + " Did you forget to close the iteration?" ) ; |
public class BindParameterBinding { /** * Lowers case of a given argument if its type is String , Iterable < String > or String [ ] if shouldIgnoreCase is true
* @ param argument
* @ param shouldIgnoreCase
* @ return */
private Object ignoreArgumentCase ( final Object argument , final boolean shouldIgnoreCase ) { } } | if ( ! shouldIgnoreCase ) { return argument ; } if ( argument instanceof String ) { return ( ( String ) argument ) . toLowerCase ( ) ; } final List < String > lowered = new LinkedList < > ( ) ; if ( argument . getClass ( ) . isArray ( ) ) { final String [ ] array = ( String [ ] ) argument ; for ( final String string : array ) { lowered . add ( string . toLowerCase ( ) ) ; } } else { @ SuppressWarnings ( "unchecked" ) final Iterable < String > iterable = ( Iterable < String > ) argument ; for ( final Object object : iterable ) { lowered . add ( ( ( String ) object ) . toLowerCase ( ) ) ; } } return lowered ; |
public class ImageAnchor { /** * Render the image and hyperlink .
* @ throws JspException if a JSP exception has occurred */
public int doEndTag ( ) throws JspException { } } | // report errors that may have occurred when the required attributes are being set
if ( hasErrors ( ) ) return reportAndExit ( EVAL_PAGE ) ; HttpServletRequest req = ( HttpServletRequest ) pageContext . getRequest ( ) ; // build the anchor into the results
// render the anchor tag
WriteRenderAppender writer = new WriteRenderAppender ( pageContext ) ; TagRenderingBase trb = TagRenderingBase . Factory . getRendering ( TagRenderingBase . ANCHOR_TAG , req ) ; ByRef script = new ByRef ( ) ; if ( ! createAnchorBeginTag ( req , script , trb , writer , REQUIRED_ATTR ) ) { reportErrors ( ) ; if ( ! script . isNull ( ) ) write ( script . getRef ( ) . toString ( ) ) ; localRelease ( ) ; return EVAL_PAGE ; } // set the source and lowsrc attributes
// the lowsrc is deprecated and should be removed .
HttpServletResponse response = ( HttpServletResponse ) pageContext . getResponse ( ) ; if ( _imgState . src != null ) { try { String uri = PageFlowTagUtils . rewriteResourceURL ( pageContext , _imgState . src , null , null ) ; _imgState . src = response . encodeURL ( uri ) ; } catch ( URISyntaxException e ) { // report the error . . .
String s = Bundle . getString ( "Tags_Image_URLException" , new Object [ ] { _imgState . src , e . getMessage ( ) } ) ; registerTagError ( s , e ) ; } } // set the rollover image
if ( _rolloverImage != null ) { try { String uri = PageFlowTagUtils . rewriteResourceURL ( pageContext , _rolloverImage , null , null ) ; _rolloverImage = response . encodeURL ( uri ) ; } catch ( URISyntaxException e ) { // report the error . . .
String s = Bundle . getString ( "Tags_Rollover_Image_URLException" , new Object [ ] { _rolloverImage , e . getMessage ( ) } ) ; registerTagError ( s , e ) ; } if ( getJavaScriptAttribute ( ONMOUSEOUT ) == null ) { String s = "swapImage(this,'" + response . encodeURL ( _imgState . src ) + "')" ; _imgState . registerAttribute ( AbstractHtmlState . ATTR_JAVASCRIPT , ONMOUSEOUT , s ) ; } if ( getJavaScriptAttribute ( ONMOUSEOVER ) == null ) { String s = "swapImage(this,'" + response . encodeURL ( _rolloverImage ) + "')" ; _imgState . registerAttribute ( AbstractHtmlState . ATTR_JAVASCRIPT , ONMOUSEOVER , s ) ; } } // render the image tag .
TagRenderingBase br = TagRenderingBase . Factory . getRendering ( TagRenderingBase . IMAGE_TAG , req ) ; br . doStartTag ( writer , _imgState ) ; br . doEndTag ( writer ) ; // write the end tag
trb . doEndTag ( writer ) ; if ( ! script . isNull ( ) ) write ( script . getRef ( ) . toString ( ) ) ; localRelease ( ) ; return EVAL_PAGE ; |
public class MuzeiArtSource { /** * Sets the list of available user - visible commands for the source . Commands can be built - in ,
* such as { @ link # BUILTIN _ COMMAND _ ID _ NEXT _ ARTWORK } , or custom - defined . Custom commands must
* have identifiers below { @ link # MAX _ CUSTOM _ COMMAND _ ID } .
* @ param commands the new set of user - visible commands the source supports .
* @ see # BUILTIN _ COMMAND _ ID _ NEXT _ ARTWORK
* @ see # MAX _ CUSTOM _ COMMAND _ ID */
protected final void setUserCommands ( List < UserCommand > commands ) { } } | mCurrentState . setUserCommands ( commands ) ; mServiceHandler . removeCallbacks ( mPublishStateRunnable ) ; mServiceHandler . post ( mPublishStateRunnable ) ; |
public class ContainerTracker { /** * Get all shutdown descriptors for a given pom label and remove it from the tracker . The descriptors
* are returned in reverse order of their registration .
* If no pom label is given , then all descriptors are returned .
* @ param gavLabel the label for which to get the descriptors or < code > null < / code > for all descriptors
* @ return the descriptors for the given label or an empty collection */
public synchronized Collection < ContainerShutdownDescriptor > removeShutdownDescriptors ( GavLabel gavLabel ) { } } | List < ContainerShutdownDescriptor > descriptors ; if ( gavLabel != null ) { descriptors = removeFromPomLabelMap ( gavLabel ) ; removeFromPerContainerMap ( descriptors ) ; } else { // All entries are requested
descriptors = new ArrayList < > ( shutdownDescriptorPerContainerMap . values ( ) ) ; clearAllMaps ( ) ; } Collections . reverse ( descriptors ) ; return descriptors ; |
public class KTypeArrayDeque { /** * Inserts all elements from the given container to the end of this deque .
* @ param container The container to iterate over .
* @ return Returns the number of elements actually added as a result of this
* call . */
public int addLast ( KTypeContainer < ? extends KType > container ) { } } | int size = container . size ( ) ; ensureBufferSpace ( size ) ; for ( KTypeCursor < ? extends KType > cursor : container ) { addLast ( cursor . value ) ; } return size ; |
public class ApplicationInfoMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ApplicationInfo applicationInfo , ProtocolMarshaller protocolMarshaller ) { } } | if ( applicationInfo == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( applicationInfo . getApplicationId ( ) , APPLICATIONID_BINDING ) ; protocolMarshaller . marshall ( applicationInfo . getApplicationName ( ) , APPLICATIONNAME_BINDING ) ; protocolMarshaller . marshall ( applicationInfo . getCreateTime ( ) , CREATETIME_BINDING ) ; protocolMarshaller . marshall ( applicationInfo . getLinkedToGitHub ( ) , LINKEDTOGITHUB_BINDING ) ; protocolMarshaller . marshall ( applicationInfo . getGitHubAccountName ( ) , GITHUBACCOUNTNAME_BINDING ) ; protocolMarshaller . marshall ( applicationInfo . getComputePlatform ( ) , COMPUTEPLATFORM_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ListVoiceConnectorsResult { /** * The details of the Amazon Chime Voice Connectors .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setVoiceConnectors ( java . util . Collection ) } or { @ link # withVoiceConnectors ( java . util . Collection ) } if you
* want to override the existing values .
* @ param voiceConnectors
* The details of the Amazon Chime Voice Connectors .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListVoiceConnectorsResult withVoiceConnectors ( VoiceConnector ... voiceConnectors ) { } } | if ( this . voiceConnectors == null ) { setVoiceConnectors ( new java . util . ArrayList < VoiceConnector > ( voiceConnectors . length ) ) ; } for ( VoiceConnector ele : voiceConnectors ) { this . voiceConnectors . add ( ele ) ; } return this ; |
public class IconicsDrawable { /** * Set the color of the drawable .
* @ param colorResId The color resource , from your R file .
* @ return The current IconicsDrawable for chaining . */
@ NonNull public IconicsDrawable colorRes ( @ ColorRes int colorResId ) { } } | return color ( ContextCompat . getColor ( mContext , colorResId ) ) ; |
public class Bindable { /** * Create a new { @ link Bindable } of the type of the specified instance with an
* existing value equal to the instance .
* @ param < T > the source type
* @ param instance the instance ( must not be { @ code null } )
* @ return a { @ link Bindable } instance
* @ see # of ( ResolvableType )
* @ see # withExistingValue ( Object ) */
@ SuppressWarnings ( "unchecked" ) public static < T > Bindable < T > ofInstance ( T instance ) { } } | Assert . notNull ( instance , "Instance must not be null" ) ; Class < T > type = ( Class < T > ) instance . getClass ( ) ; return of ( type ) . withExistingValue ( instance ) ; |
public class AbstractJSSEProvider { /** * @ see com . ibm . websphere . ssl . JSSEProvider # getSSLSocketFactory ( java . util . Map ,
* com . ibm . ws . ssl . config . SSLConfig ) */
@ Override public SSLSocketFactory getSSLSocketFactory ( Map < String , Object > connectionInfo , SSLConfig config ) throws Exception { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "getSSLSocketFactory" , new Object [ ] { connectionInfo } ) ; SSLContext context = getSSLContext ( connectionInfo , config ) ; if ( context != null ) { SSLSocketFactory factory = context . getSocketFactory ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "getSSLSocketFactory -> " + factory . getClass ( ) . getName ( ) ) ; return factory ; } throw new SSLException ( "SSLContext could not be created to return an SSLSocketFactory." ) ; |
public class LocalFileBlockWriter { /** * Writes data to the block from an input { @ link ByteBuffer } .
* @ param offset starting offset of the block file to write
* @ param inputBuf { @ link ByteBuffer } that input data is stored in
* @ return the size of data that was written */
private long write ( long offset , ByteBuffer inputBuf ) throws IOException { } } | int inputBufLength = inputBuf . limit ( ) - inputBuf . position ( ) ; MappedByteBuffer outputBuf = mLocalFileChannel . map ( FileChannel . MapMode . READ_WRITE , offset , inputBufLength ) ; outputBuf . put ( inputBuf ) ; int bytesWritten = outputBuf . limit ( ) ; BufferUtils . cleanDirectBuffer ( outputBuf ) ; return bytesWritten ; |
public class Tracer { /** * Destroy managed connection pool
* @ param poolName The name of the pool
* @ param mcp The managed connection pool */
public static synchronized void destroyManagedConnectionPool ( String poolName , Object mcp ) { } } | log . tracef ( "%s" , new TraceEvent ( poolName , Integer . toHexString ( System . identityHashCode ( mcp ) ) , TraceEvent . MANAGED_CONNECTION_POOL_DESTROY , "NONE" ) ) ; |
public class GraphIterator { /** * Create an instance of GraphIterationElement .
* @ param previous _ segment is the previous element that permits to reach this object during an iteration
* @ param segment is the current segment
* @ param point is the point on which the iteration arrived on the current segment .
* @ param distanceToReach is the distance that is already consumed to reach the segment .
* @ param distanceToConsume is the rest of distance to consume including the segment .
* @ return a graph iteration element . */
protected GraphIterationElement < ST , PT > newIterationElement ( ST previous_segment , ST segment , PT point , double distanceToReach , double distanceToConsume ) { } } | return new GraphIterationElement < > ( previous_segment , segment , point , distanceToReach , distanceToConsume ) ; |
public class AbstractExternalAuthenticationController { /** * Utility method that may be used to obtain the Relay State for the request .
* @ param context
* the profile context
* @ return the relay state
* @ see # getRelayState ( HttpServletRequest ) */
protected String getRelayState ( ProfileRequestContext < ? , ? > context ) { } } | SAMLBindingContext samlBinding = this . samlBindingContextLookupStrategy . apply ( context ) ; return samlBinding != null ? samlBinding . getRelayState ( ) : null ; |
public class HttpHeaders { /** * Set the given date under the given header name after formatting it as a string
* using the pattern { @ code " EEE , dd MMM yyyy HH : mm : ss zzz " } . The equivalent of
* { @ link # set ( String , String ) } but for date headers . */
public void setDate ( String headerName , long date ) { } } | SimpleDateFormat dateFormat = new SimpleDateFormat ( DATE_FORMATS [ 0 ] , Locale . US ) ; dateFormat . setTimeZone ( GMT ) ; set ( headerName , dateFormat . format ( new Date ( date ) ) ) ; |
public class AbstractTreebankParserParams { /** * Returns a collection of unordered ( but directed ! ) untyped word - word dependencies for the tree . */
public static Collection < List < String > > unorderedUntypedDependencyObjectify ( Tree t , HeadFinder hf , TreeTransformer collinizer ) { } } | return dependencyObjectify ( t , hf , collinizer , new UnorderedUntypedDependencyTyper ( hf ) ) ; |
public class WebSocketServerHandshakerFactory { /** * Instances a new handshaker
* @ return A new WebSocketServerHandshaker for the requested web socket version . Null if web
* socket version is not supported . */
public WebSocketServerHandshaker newHandshaker ( HttpRequest req ) { } } | CharSequence version = req . headers ( ) . get ( HttpHeaderNames . SEC_WEBSOCKET_VERSION ) ; if ( version != null ) { if ( version . equals ( WebSocketVersion . V13 . toHttpHeaderValue ( ) ) ) { // Version 13 of the wire protocol - RFC 6455 ( version 17 of the draft hybi specification ) .
return new WebSocketServerHandshaker13 ( webSocketURL , subprotocols , allowExtensions , maxFramePayloadLength , allowMaskMismatch ) ; } else if ( version . equals ( WebSocketVersion . V08 . toHttpHeaderValue ( ) ) ) { // Version 8 of the wire protocol - version 10 of the draft hybi specification .
return new WebSocketServerHandshaker08 ( webSocketURL , subprotocols , allowExtensions , maxFramePayloadLength , allowMaskMismatch ) ; } else if ( version . equals ( WebSocketVersion . V07 . toHttpHeaderValue ( ) ) ) { // Version 8 of the wire protocol - version 07 of the draft hybi specification .
return new WebSocketServerHandshaker07 ( webSocketURL , subprotocols , allowExtensions , maxFramePayloadLength , allowMaskMismatch ) ; } else { return null ; } } else { // Assume version 00 where version header was not specified
return new WebSocketServerHandshaker00 ( webSocketURL , subprotocols , maxFramePayloadLength ) ; } |
public class UTF16 { /** * Inserts char32 codepoint into target at the argument offset16 . If the offset16 is in the
* middle of a supplementary codepoint , char32 will be inserted after the supplementary
* codepoint . Limit increases by one if codepoint is non - supplementary , 2 otherwise .
* The overall effect is exactly as if the argument were converted to a string by the method
* valueOf ( char ) and the characters in that string were then inserted into target at the
* position indicated by offset16.
* The offset argument must be greater than or equal to 0 , and less than or equal to the limit .
* @ param target Char array to insert to
* @ param limit End index of the char array , limit & lt ; = target . length
* @ param offset16 Offset which char32 will be inserted in
* @ param char32 Codepoint to be inserted
* @ return new limit size
* @ exception IndexOutOfBoundsException Thrown if offset16 is invalid . */
public static int insert ( char target [ ] , int limit , int offset16 , int char32 ) { } } | String str = valueOf ( char32 ) ; if ( offset16 != limit && bounds ( target , 0 , limit , offset16 ) == TRAIL_SURROGATE_BOUNDARY ) { offset16 ++ ; } int size = str . length ( ) ; if ( limit + size > target . length ) { throw new ArrayIndexOutOfBoundsException ( offset16 + size ) ; } System . arraycopy ( target , offset16 , target , offset16 + size , limit - offset16 ) ; target [ offset16 ] = str . charAt ( 0 ) ; if ( size == 2 ) { target [ offset16 + 1 ] = str . charAt ( 1 ) ; } return limit + size ; |
public class CmsUnlockUser { /** * Unlocks the user . < p >
* @ throws Exception if something goes wrong */
public void actionUnlockUser ( ) throws Exception { } } | CmsUUID userId = new CmsUUID ( m_paramUserId ) ; CmsObject cms = getCms ( ) ; CmsUser user = cms . readUser ( userId ) ; OpenCms . getLoginManager ( ) . unlockUser ( getCms ( ) , user ) ; actionCloseDialog ( ) ; |
public class SimpleNotificationEmitter { /** * Send notification . supplier is called only if there are listeners
* @ param textSupplier
* @ param userDataSupplier
* @ param timestampSupplier */
public synchronized void sendNotification ( Supplier < String > textSupplier , Supplier < U > userDataSupplier , LongSupplier timestampSupplier ) { } } | if ( ! map . isEmpty ( ) ) { sendNotification ( textSupplier . get ( ) , userDataSupplier . get ( ) , timestampSupplier . getAsLong ( ) ) ; } |
public class IVector { /** * Multiplication from a vectors with an double */
public IVector mul ( double b ) { } } | IVector result = new IVector ( size ) ; mul ( b , result ) ; return result ; |
public class ConllReader { /** * - DOCSTART - - X - O O
* CRICKET NNP I - NP O
* - : O O
* LEICESTERSHIRE NNP I - NP I - ORG
* TAKE NNP I - NP O
* OVER IN I - PP O
* AT NNP I - NP O
* TOP NNP I - NP O
* AFTER NNP I - NP O
* INNINGS NNP I - NP O
* VICTORY NN I - NP O
* . . O O
* LONDON NNP I - NP I - LOC
* 1996-08-30 CD I - NP O
* West NNP I - NP I - MISC
* . . O O
* - DOCSTART - - X - O O
* CRICKET NNP I - NP O */
private Collection < Sentence > readDoc ( ) throws IOException { } } | final Collection < Sentence > sentences = new ArrayList < > ( ) ; StringBuilder sentence = new StringBuilder ( ) ; Collection < ConllToken > tokens = new ArrayList < > ( ) ; int tokenStartIndex = 0 ; for ( String line ; ( line = r . readLine ( ) ) != null ; ) { // empty line means end - of - sentence
if ( line . isEmpty ( ) ) { // if we have an empty line and something in the actual sentence then add it
if ( sentence . length ( ) > 0 ) { sentences . add ( new Sentence ( sentence . toString ( ) , tokens ) ) ; sentence = new StringBuilder ( ) ; tokens = new ArrayList < > ( ) ; tokenStartIndex = 0 ; } } else { // this assumes there is ever only a single space between token and classifiers
final String [ ] parts = SPACES . split ( line ) ; switch ( parts [ 0 ] ) { case "-DOCSTART-" : // we use DOCSTART as the end of the current doc , also throw away the following empty line
r . readLine ( ) ; // no need to think about a current sentence , the previous empty line will have saved it
return sentences ; default : if ( sentence . length ( ) > 0 ) sentence . append ( " " ) ; sentence . append ( parts [ 0 ] ) ; tokens . add ( new ConllToken ( tokenStartIndex , parts [ 0 ] , parts [ 1 ] + " " + parts [ 2 ] , parts [ 3 ] ) ) ; tokenStartIndex += parts [ 0 ] . length ( ) + 1 ; // add 1 for the space between tokens
} } } // if we run out of data in the file
if ( sentence . length ( ) > 0 ) sentences . add ( new Sentence ( sentence . toString ( ) , tokens ) ) ; return sentences ; |
public class Attributes { /** * Sets showtitle value as an alias for notitle !
* @ param showTitle
* value . */
public void setShowTitle ( boolean showTitle ) { } } | if ( showTitle ) { this . attributes . put ( SHOW_TITLE , true ) ; this . attributes . remove ( NOTITLE ) ; } else { this . attributes . put ( NOTITLE , true ) ; this . attributes . remove ( SHOW_TITLE ) ; } |
public class SearchSkillGroupsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( SearchSkillGroupsRequest searchSkillGroupsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( searchSkillGroupsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( searchSkillGroupsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( searchSkillGroupsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( searchSkillGroupsRequest . getFilters ( ) , FILTERS_BINDING ) ; protocolMarshaller . marshall ( searchSkillGroupsRequest . getSortCriteria ( ) , SORTCRITERIA_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.