signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ClusterZKImpl { /** * Register Host to cluster .
* @ param host Host to be part of cluster . */
@ Override @ Synchronized public void registerHost ( Host host ) { } } | Preconditions . checkNotNull ( host , "host" ) ; Exceptions . checkArgument ( ! entryMap . containsKey ( host ) , "host" , "host is already registered to cluster." ) ; String hostPath = ZKPaths . makePath ( getPathPrefix ( ) , host . toString ( ) ) ; PersistentNode node = new PersistentNode ( client , CreateMode . EPHEMERAL , false , hostPath , host . toBytes ( ) ) ; node . start ( ) ; // start creation of ephemeral node in background .
entryMap . put ( host , node ) ; |
public class Error { /** * Thrown if the configured field hasn ' t classes parameter .
* @ param fieldName name of the field
* @ param aClass class ' s field */
public static void classesAbsent ( String fieldName , Class < ? > aClass ) { } } | throw new MappingErrorException ( MSG . INSTANCE . message ( mappingErrorRelationalException2 , fieldName , aClass . getSimpleName ( ) ) ) ; |
public class ZipUtils { /** * Decompresses the given byte array without transforming it into a String
* @ param compressed byte array input
* @ return decompressed data in a byte array
* @ throws IOException */
public static byte [ ] decompressBytesNonBase64 ( byte [ ] compressed ) throws IOException { } } | ByteArrayInputStream is = new ByteArrayInputStream ( compressed ) ; try ( InputStream gis = new GZIPInputStream ( is ) ) { return IOUtils . toByteArray ( gis ) ; } |
public class JschUtil { /** * 打开SSH会话 , 并绑定远程端口到本地的一个随机端口
* @ param sshConn SSH连接信息对象
* @ param remoteHost 远程主机
* @ param remotePort 远程端口
* @ return 映射后的本地端口
* @ throws JschRuntimeException 连接异常 */
public static int openAndBindPortToLocal ( Connector sshConn , String remoteHost , int remotePort ) throws JschRuntimeException { } } | final Session session = openSession ( sshConn . getHost ( ) , sshConn . getPort ( ) , sshConn . getUser ( ) , sshConn . getPassword ( ) ) ; if ( session == null ) { throw new JschRuntimeException ( "Error to create SSH Session!" ) ; } final int localPort = generateLocalPort ( ) ; bindPort ( session , remoteHost , remotePort , localPort ) ; return localPort ; |
public class FieldWriterImpl { /** * { @ inheritDoc } */
protected void addInheritedSummaryLink ( ClassDoc cd , ProgramElementDoc member , Content linksTree ) { } } | linksTree . addContent ( writer . getDocLink ( LinkInfoImpl . Kind . MEMBER , cd , ( MemberDoc ) member , member . name ( ) , false ) ) ; |
public class OneShotSQLGeneratorEngine { /** * Returns the valid SQL lexical form of rdf literals based on the current
* database and the datatype specified in the function predicate .
* For example , if the function is xsd : boolean , and the current database is
* H2 , the SQL lexical form would be for " true " " TRUE " ( or any combination
* of lower and upper case ) or " 1 " is always
* @ param constant
* @ return */
private String getSQLLexicalForm ( ValueConstant constant ) { } } | if ( constant . equals ( termFactory . getNullConstant ( ) ) ) { // TODO : we should not have to treat NULL as a special case !
// It is because this constant is currently of type COL _ TYPE . STRING !
return "NULL" ; } switch ( COL_TYPE . getColType ( constant . getType ( ) . getIRI ( ) ) ) { case BNODE : case OBJECT : case STRING : return sqladapter . getSQLLexicalFormString ( constant . getValue ( ) ) ; case BOOLEAN : boolean v = XsdDatatypeConverter . parseXsdBoolean ( constant . getValue ( ) ) ; return sqladapter . getSQLLexicalFormBoolean ( v ) ; case DATETIME : return sqladapter . getSQLLexicalFormDatetime ( constant . getValue ( ) ) ; case DATETIME_STAMP : return sqladapter . getSQLLexicalFormDatetimeStamp ( constant . getValue ( ) ) ; case DECIMAL : case DOUBLE : case INTEGER : case LONG : case FLOAT : case NON_POSITIVE_INTEGER : case INT : case UNSIGNED_INT : case NEGATIVE_INTEGER : case POSITIVE_INTEGER : case NON_NEGATIVE_INTEGER : return constant . getValue ( ) ; case NULL : return "NULL" ; default : return "'" + constant . getValue ( ) + "'" ; } |
public class EnumMap { /** * Returns < tt > true < / tt > if this map maps one or more keys to the
* specified value .
* @ param value the value whose presence in this map is to be tested
* @ return < tt > true < / tt > if this map maps one or more keys to this value */
public boolean containsValue ( Object value ) { } } | value = maskNull ( value ) ; for ( Object val : vals ) if ( value . equals ( val ) ) return true ; return false ; |
public class CPTaxCategoryUtil { /** * Returns the cp tax categories before and after the current cp tax category in the ordered set where groupId = & # 63 ; .
* @ param CPTaxCategoryId the primary key of the current cp tax category
* @ param groupId the group ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the previous , current , and next cp tax category
* @ throws NoSuchCPTaxCategoryException if a cp tax category with the primary key could not be found */
public static CPTaxCategory [ ] findByGroupId_PrevAndNext ( long CPTaxCategoryId , long groupId , OrderByComparator < CPTaxCategory > orderByComparator ) throws com . liferay . commerce . product . exception . NoSuchCPTaxCategoryException { } } | return getPersistence ( ) . findByGroupId_PrevAndNext ( CPTaxCategoryId , groupId , orderByComparator ) ; |
public class AbstractGeometry { /** * Basically this function checks if the geometry is self - intersecting or not .
* @ return True or false . True if there are no self - intersections in the geometry . */
public boolean isSimple ( ) { } } | if ( isEmpty ( ) ) { return true ; } if ( getNumGeometries ( ) > 1 ) { for ( int n = 0 ; n < getNumGeometries ( ) ; n ++ ) { if ( ! getGeometryN ( n ) . isSimple ( ) ) { return false ; } } } else { final Coordinate [ ] coords1 = getCoordinates ( ) ; final Coordinate [ ] coords2 = getCoordinates ( ) ; if ( coords1 . length > 1 && coords2 . length > 1 ) { for ( int i = 0 ; i < coords2 . length - 1 ; i ++ ) { for ( int j = 0 ; j < coords1 . length - 1 ; j ++ ) { if ( Mathlib . lineIntersects ( coords2 [ i ] , coords2 [ i + 1 ] , coords1 [ j ] , coords1 [ j + 1 ] ) ) { return false ; } } } } else { // TODO implement me
} } return true ; |
public class GitlabAPI { /** * Get details information of the runner with the specified id .
* @ param id Runner id .
* @ return Extensive GitlabRunner Details .
* @ throws IOException on gitlab api call error */
public GitlabRunner getRunnerDetail ( int id ) throws IOException { } } | String tailUrl = String . format ( "%s/%d" , GitlabRunner . URL , id ) ; return retrieve ( ) . to ( tailUrl , GitlabRunner . class ) ; |
public class DynoJedisUtils { /** * This adds MAX _ SCORE of elements in a sorted set
* @ param key
* @ return " OK " if all write operations have succeeded
* @ throws Exception */
public String nonPipelineZADD ( String key , DataGenerator dataGenerator , String z_key_prefix , int max_score ) throws Exception { } } | String zKey = z_key_prefix + key ; int success = 0 ; long returnOp = 0 ; for ( int i = 0 ; i < max_score ; i ++ ) { returnOp = jedisClient . get ( ) . zadd ( zKey , i , dataGenerator . getRandomValue ( ) + "__" + zKey ) ; success += returnOp ; } // all the above operations will separate entries
if ( success != max_score - 1 ) { return null ; } return "OK" ; |
public class MessageClient { /** * Please use { @ link cn . jmessage . api . reportv2 . ReportClient # v2GetUserMessagesByCursor ( String , String ) }
* Get message list with cursor , the cursor will effective in 120 seconds . And will
* return same count of messages as first request .
* @ param cursor First request will return cursor
* @ return MessageListResult
* @ throws APIConnectionException connect exception
* @ throws APIRequestException request exception */
@ Deprecated public MessageListResult getMessageListByCursor ( String cursor ) throws APIConnectionException , APIRequestException { } } | if ( null != cursor ) { String requestUrl = reportBaseUrl + v2_messagePath + "?cursor=" + cursor ; ResponseWrapper response = _httpClient . sendGet ( requestUrl ) ; return MessageListResult . fromResponse ( response , MessageListResult . class ) ; } else { throw new IllegalArgumentException ( "the cursor parameter should not be null" ) ; } |
public class DecodedVorbisAudioInputStream { /** * Reads from the oggBitStream _ a specified number of Bytes ( bufferSize _ )
* worth starting at index and puts them in the specified buffer [ ] .
* @ param buffer
* @ param index
* @ param bufferSize _
* @ return the number of bytes read or - 1 if error . */
private int readFromStream ( byte [ ] buffer , int index , int bufferSize_ ) { } } | int bytes ; try { bytes = oggBitStream_ . read ( buffer , index , bufferSize_ ) ; } catch ( Exception e ) { LOG . log ( Level . FINE , "Cannot Read Selected Song" ) ; bytes = - 1 ; } currentBytes = currentBytes + bytes ; return bytes ; |
public class TSGetTimeZoneResp { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false otherwise */
public boolean isSet ( _Fields field ) { } } | if ( field == null ) { throw new IllegalArgumentException ( ) ; } switch ( field ) { case STATUS : return isSetStatus ( ) ; case TIME_ZONE : return isSetTimeZone ( ) ; } throw new IllegalStateException ( ) ; |
public class CommonOps_DDF4 { /** * Changes the sign of every element in the vector . < br >
* < br >
* a < sub > i < / sub > = - a < sub > i < / sub >
* @ param a A vector . Modified . */
public static void changeSign ( DMatrix4 a ) { } } | a . a1 = - a . a1 ; a . a2 = - a . a2 ; a . a3 = - a . a3 ; a . a4 = - a . a4 ; |
public class ArrayContext { /** * Create an array of < code > double < / code > values from the given array of
* { @ link Double } values . Each value in the given array will be unwrapped to
* its primitive form . If any values within the given array are
* < code > null < / code > , then they will be ignored and the resulting array will
* be smaller than the given array .
* @ param doubles The array of doubles to convert
* @ return The array of primitive doubles */
public double [ ] createDoubleArray ( Double [ ] doubles ) { } } | double [ ] result ; if ( doubles != null ) { int count = 0 ; int inputLength = doubles . length ; double [ ] tempResult = new double [ inputLength ] ; for ( int i = 0 ; i < inputLength ; i ++ ) { if ( doubles [ i ] != null ) { tempResult [ count ] = doubles [ i ] . doubleValue ( ) ; count ++ ; } } result = tempResult ; if ( count != inputLength ) { result = new double [ count ] ; System . arraycopy ( tempResult , 0 , result , 0 , count ) ; } } else { result = null ; } return result ; |
public class PairtreeUtils { /** * Maps the supplied ID to a Pairtree path .
* @ param aID An ID to map to a Pairtree path
* @ return The Pairtree path for the supplied ID */
public static String mapToPtPath ( final String aID ) { } } | Objects . requireNonNull ( aID ) ; final String encodedID = encodeID ( aID ) ; final List < String > shorties = new ArrayList < > ( ) ; int start = 0 ; while ( start < encodedID . length ( ) ) { int end = start + myShortyLength ; if ( end > encodedID . length ( ) ) { end = encodedID . length ( ) ; } shorties . add ( encodedID . substring ( start , end ) ) ; start = end ; } return concat ( shorties . toArray ( new String [ 0 ] ) ) ; |
public class StringUtils { /** * return the value in a JSON friendly way */
public static String toJsonString ( Object value ) { } } | if ( value == null ) { return "null" ; } else if ( value . getClass ( ) . equals ( String . class ) ) { return "\"" + StringUtils . jsonEncoding ( value . toString ( ) ) + "\"" ; } // else it ' s a RawJson , Boolean or Number so no escaping or quotes
else { return value . toString ( ) ; } |
public class Instance { /** * Sets the tags for this instance .
* @ return a zone operation if the set request was issued correctly , { @ code null } if the instance
* was not found
* @ throws ComputeException upon failure */
public Operation setTags ( Tags tags , OperationOption ... options ) { } } | return compute . setTags ( getInstanceId ( ) , tags , options ) ; |
public class OffsetDateTimeField { /** * Add the specified amount of offset units to the specified time
* instant . The amount added may be negative .
* @ param instant the time instant in millis to update .
* @ param amount the amount of units to add ( can be negative ) .
* @ return the updated time instant . */
public long add ( long instant , int amount ) { } } | instant = super . add ( instant , amount ) ; FieldUtils . verifyValueBounds ( this , get ( instant ) , iMin , iMax ) ; return instant ; |
public class MimeTypeInfoManager { /** * Get the primary ( = first ) mime type associated with the specified filename .
* @ param sFilename
* The filename to retrieve the primary mime type from . May neither be
* < code > null < / code > nor empty .
* @ return < code > null < / code > if no mime type is associated with the extension
* of the passed filename */
@ Nullable public String getPrimaryMimeTypeStringForFilename ( @ Nonnull @ Nonempty final String sFilename ) { } } | ValueEnforcer . notEmpty ( sFilename , "Filename" ) ; final String sExtension = FilenameHelper . getExtension ( sFilename ) ; return getPrimaryMimeTypeStringForExtension ( sExtension ) ; |
public class PatternTokenizer { /** * Sets the extra characters to be quoted in literals
* @ param syntaxCharacters Characters to be set as extra quoting characters .
* @ return A PatternTokenizer object in which characters are specified as extra quoting characters . */
public PatternTokenizer setExtraQuotingCharacters ( UnicodeSet syntaxCharacters ) { } } | this . extraQuotingCharacters = ( UnicodeSet ) syntaxCharacters . clone ( ) ; needingQuoteCharacters = null ; return this ; |
public class LocalQueueBrowser { /** * ( non - Javadoc )
* @ see javax . jms . QueueBrowser # getEnumeration ( ) */
@ Override public Enumeration getEnumeration ( ) throws JMSException { } } | checkNotClosed ( ) ; // Security check
checkDestinationPermission ( ) ; LocalQueueBrowserEnumeration queueBrowserEnum = new LocalQueueBrowserEnumeration ( this , ( LocalQueue ) queue , parsedSelector , UUIDProvider . getInstance ( ) . getShortUUID ( ) ) ; registerEnumeration ( queueBrowserEnum ) ; return queueBrowserEnum ; |
public class ListWebsiteCertificateAuthoritiesRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListWebsiteCertificateAuthoritiesRequest listWebsiteCertificateAuthoritiesRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listWebsiteCertificateAuthoritiesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listWebsiteCertificateAuthoritiesRequest . getFleetArn ( ) , FLEETARN_BINDING ) ; protocolMarshaller . marshall ( listWebsiteCertificateAuthoritiesRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listWebsiteCertificateAuthoritiesRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class MD4 { /** * { @ inheritDoc } */
protected void engineUpdate ( byte b ) { } } | int pos = ( int ) ( msgLength % BYTE_BLOCK_LENGTH ) ; buffer [ pos ] = b ; msgLength ++ ; // If buffer contains enough data then process it .
if ( pos == ( BYTE_BLOCK_LENGTH - 1 ) ) { process ( buffer , 0 ) ; } |
public class FastDateFormat { /** * 获得 { @ link FastDateFormat } 实例 < br >
* 支持缓存
* @ param pattern 使用 { @ link java . text . SimpleDateFormat } 相同的日期格式
* @ param timeZone 时区 { @ link TimeZone }
* @ param locale { @ link Locale } 日期地理位置
* @ return { @ link FastDateFormat }
* @ throws IllegalArgumentException 日期格式问题 */
public static FastDateFormat getInstance ( final String pattern , final TimeZone timeZone , final Locale locale ) { } } | return cache . getInstance ( pattern , timeZone , locale ) ; |
public class StringParser { /** * Parse the given { @ link String } as { @ link Integer } with radix
* { @ value # DEFAULT _ RADIX } .
* @ param sStr
* The string to parse . May be < code > null < / code > .
* @ return < code > null < / code > if the string does not represent a valid value . */
@ Nullable public static Integer parseIntObj ( @ Nullable final String sStr ) { } } | return parseIntObj ( sStr , DEFAULT_RADIX , null ) ; |
public class MockPropertyService { /** * Load property values from a property file . A property values are assumed local unless the
* property name is prefixed with " global . " . You may specify an instance name by appending to
* the property name a " \ " followed by the instance name . For example :
* < pre >
* prop1 = value1
* global . prop2 = value2
* prop3 \ inst3 = value3
* global . prop4 \ inst4 = value4
* < / pre >
* @ param resource Property file resource .
* @ throws IOException IO exception . */
public void addResource ( Resource resource ) throws IOException { } } | Properties props = PropertiesLoaderUtils . loadProperties ( resource ) ; for ( Entry < ? , ? > entry : props . entrySet ( ) ) { String key = ( String ) entry . getKey ( ) ; String value = StringEscapeUtils . unescapeJava ( ( String ) entry . getValue ( ) ) ; boolean global = key . startsWith ( "global." ) ; key = global ? key . substring ( 7 ) : key ; if ( ! key . contains ( delim ) ) { key += delim ; } ( global ? global_map : local_map ) . put ( key , value ) ; } |
public class LinkedWorkspaceStorageCacheImpl { /** * Removes data and its children in cache . < br >
* Implementation details < br >
* Remove Item from cache C , for Node removes lists in CN and CP ( only lists ) . < br >
* Remove Item from parent ' s child lists ( CN for Node , CP for Property ) . NOTE : if CN or CP of the
* Item parent are iterrating now ConcurrentModificationException will occurs there . NOTE # 2 : used
* from onSaveItems ( ) .
* @ param item */
public void remove ( final ItemData item ) { } } | if ( enabled && item != null ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( name + ", remove() " + item . getQPath ( ) . getAsString ( ) + " " + item . getIdentifier ( ) ) ; } writeLock . lock ( ) ; try { final String itemId = item . getIdentifier ( ) ; removeItem ( item ) ; if ( item . isNode ( ) ) { // removing childs of the node
nodesCache . remove ( itemId ) ; propertiesCache . remove ( itemId ) ; // removing child from the node ' s parent child nodes list
removeChildNode ( item . getParentIdentifier ( ) , itemId ) ; } else { removeChildProperty ( item . getParentIdentifier ( ) , itemId ) ; } } catch ( Exception e ) { LOG . error ( name + ", Error remove item data from cache: " + item . getQPath ( ) . getAsString ( ) , e ) ; } finally { writeLock . unlock ( ) ; } } |
public class TargetHttpsProxyClient { /** * Retrieves the list of TargetHttpsProxy resources available to the specified project .
* < p > Sample code :
* < pre > < code >
* try ( TargetHttpsProxyClient targetHttpsProxyClient = TargetHttpsProxyClient . create ( ) ) {
* ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ;
* for ( TargetHttpsProxy element : targetHttpsProxyClient . listTargetHttpsProxies ( project . toString ( ) ) . iterateAll ( ) ) {
* / / doThingsWith ( element ) ;
* < / code > < / pre >
* @ param project Project ID for this request .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final ListTargetHttpsProxiesPagedResponse listTargetHttpsProxies ( String project ) { } } | ListTargetHttpsProxiesHttpRequest request = ListTargetHttpsProxiesHttpRequest . newBuilder ( ) . setProject ( project ) . build ( ) ; return listTargetHttpsProxies ( request ) ; |
public class CmsJspTagContainer { /** * Returns the serialized data of the given container . < p >
* @ param cms the cms context
* @ param maxElements the maximum number of elements allowed within this container
* @ param isDetailView < code > true < / code > if this container is currently being used for the detail view
* @ param isDetailOnly < code > true < / code > if this is a detail only container
* @ return the serialized container data */
protected String getContainerData ( CmsObject cms , int maxElements , boolean isDetailView , boolean isDetailOnly ) { } } | int width = - 1 ; try { if ( getWidth ( ) != null ) { width = Integer . parseInt ( getWidth ( ) ) ; } } catch ( NumberFormatException e ) { // ignore ; set width to - 1
LOG . debug ( "Error parsing container width." , e ) ; } CmsContainer cont = new CmsContainer ( getName ( ) , getType ( ) , m_bodyContent , width , maxElements , isDetailView , ! m_hasModelGroupAncestor && isEditable ( cms ) , null , m_parentContainer != null ? m_parentContainer . getName ( ) : null , m_parentElement != null ? m_parentElement . getInstanceId ( ) : null , m_settingPresets ) ; cont . setDeatilOnly ( isDetailOnly ) ; String result = "" ; try { result = CmsContainerpageService . getSerializedContainerInfo ( cont ) ; } catch ( Exception e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } return result ; |
public class XMLUtilities { /** * Get the text value for the specified element . If the element is null , or the element ' s body is empty then this method will return null .
* @ param element
* The Element
* @ return The value String or null */
public static String getValue ( Element element ) { } } | if ( element != null ) { Node dataNode = element . getFirstChild ( ) ; if ( dataNode != null ) { return ( ( Text ) dataNode ) . getData ( ) ; } } return null ; |
public class FlagUtil { /** * Returns a list of all flags , which are set in the value
* @ param value
* @ return list of all flags that are set */
public static < T extends Characteristic > List < T > getAllMatching ( long value , T [ ] flags ) { } } | List < T > list = new ArrayList < > ( ) ; // check every characteristic if it fits
for ( T ch : flags ) { // read mask
long mask = ch . getValue ( ) ; // use mask to check if flag is set
if ( ( value & mask ) != 0 ) { list . add ( ch ) ; } } return list ; |
public class ArrayUtils { /** * < p > Defensive programming technique to change a < code > null < / code >
* reference to an empty one . < / p >
* < p > This method returns an empty array for a < code > null < / code > input array . < / p >
* < p > As a memory optimizing technique an empty array passed in will be overridden with
* the empty < code > public static < / code > references in this class . < / p >
* @ param array the array to check for < code > null < / code > or empty
* @ return the same array , < code > public static < / code > empty array if < code > null < / code > or empty input
* @ since 2.5 */
public static String [ ] nullToEmpty ( String [ ] array ) { } } | if ( array == null || array . length == 0 ) { return EMPTY_STRING_ARRAY ; } return array ; |
public class DateRangeMapperBuilder { /** * Returns the { @ link DateRangeMapper } represented by this { @ link MapperBuilder } .
* @ param field the name of the field to be built
* @ return the { @ link DateRangeMapper } represented by this */
@ Override public DateRangeMapper build ( String field ) { } } | return new DateRangeMapper ( field , validated , from , to , pattern ) ; |
public class SVGPlot { /** * Update style element - invoke this appropriately after any change to the
* CSS styles . */
public void updateStyleElement ( ) { } } | // TODO : this should be sufficient - why does Batik occasionally not pick up
// the changes unless we actually replace the style element itself ?
// cssman . updateStyleElement ( document , style ) ;
Element newstyle = cssman . makeStyleElement ( document ) ; style . getParentNode ( ) . replaceChild ( newstyle , style ) ; style = newstyle ; |
public class HybridIterator { @ Override public boolean hasNext ( ) { } } | if ( this . items != null ) return i != this . itemsSize ; else if ( this . itemsIterator != null ) return this . itemsIterator . hasNext ( ) ; else return this . itemsEnumerator . hasMoreElements ( ) ; |
public class ApiApp { /** * Set the signer page primary button text hover color .
* @ param color String hex color code
* @ throws HelloSignException thrown if the color string is an invalid hex
* string */
public void setPrimaryButtonTextHoverColor ( String color ) throws HelloSignException { } } | if ( white_labeling_options == null ) { white_labeling_options = new WhiteLabelingOptions ( ) ; } white_labeling_options . setPrimaryButtonTextHoverColor ( color ) ; |
public class FacesConfigTypeImpl { /** * If not already created , a new < code > managed - bean < / code > element will be created and returned .
* Otherwise , the first existing < code > managed - bean < / code > element will be returned .
* @ return the instance defined for the element < code > managed - bean < / code > */
public FacesConfigManagedBeanType < FacesConfigType < T > > getOrCreateManagedBean ( ) { } } | List < Node > nodeList = childNode . get ( "managed-bean" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new FacesConfigManagedBeanTypeImpl < FacesConfigType < T > > ( this , "managed-bean" , childNode , nodeList . get ( 0 ) ) ; } return createManagedBean ( ) ; |
public class MPOImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EList < MPORG > getRG ( ) { } } | if ( rg == null ) { rg = new EObjectContainmentEList . Resolving < MPORG > ( MPORG . class , this , AfplibPackage . MPO__RG ) ; } return rg ; |
public class PersonaAuthorizer { public static String registerAssertion ( String assertion ) { } } | Map < String , Object > result = parseAssertion ( assertion ) ; if ( result == null ) return null ; String email = ( String ) result . get ( ASSERTION_FIELD_EMAIL ) ; String origin = ( String ) result . get ( ASSERTION_FIELD_ORIGIN ) ; if ( origin == null ) throw new IllegalArgumentException ( "Invalid assertion, origin was null" ) ; // Normalize the origin URL string :
try { URL originURL = new URL ( origin ) ; origin = originURL . toExternalForm ( ) . toLowerCase ( ) ; } catch ( MalformedURLException e ) { String msg = String . format ( Locale . ENGLISH , "Error registering assertion: %s" , assertion ) ; Log . e ( Log . TAG_SYNC , msg , e ) ; throw new IllegalArgumentException ( msg , e ) ; } List < String > key = new ArrayList < String > ( ) ; key . add ( email ) ; key . add ( origin ) ; Log . v ( Log . TAG_SYNC , "PersonaAuthorizer registering key: %s" , key ) ; sAssertions . put ( key , assertion ) ; return email ; |
public class ListCommandVersionRepairer { /** * Updates the version of the remote commands that should be executed locally to ensure that the local list version
* equals the version that is resent to the server when they are executed .
* @ param indexRepairedRemoteCommands
* The remote commands thats indices where already repaired . This list can be empty .
* @ param versionRepairedLocalCommands
* The local commands that should be resent to the server thats version was already repaired by this
* class . This queue must contain at least one element .
* @ return The remote commands with repaired list versions . */
public List < ? extends ListCommand > repairRemoteCommandVersion ( final List < ? extends ListCommand > indexRepairedRemoteCommands , final List < ListCommand > versionRepairedLocalCommands ) { } } | final int commandCount = indexRepairedRemoteCommands . size ( ) ; final ListCommand lastLocalCommand = versionRepairedLocalCommands . get ( versionRepairedLocalCommands . size ( ) - 1 ) ; if ( commandCount == 0 ) { return asList ( new RemoveFromList ( lastLocalCommand . getListId ( ) , new ListVersionChange ( randomUUID ( ) , lastLocalCommand . getListVersionChange ( ) . getToVersion ( ) ) , 0 , 0 ) ) ; } final List < ListCommand > repaired = new ArrayList < ListCommand > ( commandCount ) ; for ( int i = 0 ; i < commandCount - 1 ; i ++ ) { repaired . add ( indexRepairedRemoteCommands . get ( i ) ) ; } repaired . add ( repairCommand ( indexRepairedRemoteCommands . get ( commandCount - 1 ) , randomUUID ( ) , lastLocalCommand . getListVersionChange ( ) . getToVersion ( ) ) ) ; return repaired ; |
public class Initialization { /** * @ param clazz service class
* @ param < T > the service type
* @ return Returns a collection of implementations loaded . */
public static < T > Collection < T > getLoadedImplementations ( Class < T > clazz ) { } } | @ SuppressWarnings ( "unchecked" ) Collection < T > retVal = ( Collection < T > ) extensionsMap . get ( clazz ) ; if ( retVal == null ) { return new HashSet < > ( ) ; } return retVal ; |
public class AWSCloudTrailProcessingExecutor { /** * Validate the user ' s input before processing logs . */
private void validateBeforeStart ( ) { } } | LibraryUtils . checkArgumentNotNull ( config , "Configuration is null." ) ; config . validate ( ) ; LibraryUtils . checkArgumentNotNull ( sourceFilter , "sourceFilter is null." ) ; LibraryUtils . checkArgumentNotNull ( eventFilter , "eventFilter is null." ) ; LibraryUtils . checkArgumentNotNull ( eventsProcessor , "eventsProcessor is null." ) ; LibraryUtils . checkArgumentNotNull ( progressReporter , "progressReporter is null." ) ; LibraryUtils . checkArgumentNotNull ( exceptionHandler , "exceptionHandler is null." ) ; LibraryUtils . checkArgumentNotNull ( scheduledThreadPool , "scheduledThreadPool is null." ) ; LibraryUtils . checkArgumentNotNull ( mainThreadPool , "mainThreadPool is null." ) ; LibraryUtils . checkArgumentNotNull ( readerFactory , "readerFactory is null." ) ; |
public class ISPNQuotaPersister { /** * { @ inheritDoc } */
public void setRepositoryQuota ( final String repositoryName , final long quotaLimit ) { } } | SecurityHelper . doPrivilegedAction ( new PrivilegedAction < Void > ( ) { public Void run ( ) { CacheKey key = new RepositoryQuotaKey ( repositoryName ) ; cache . put ( key , quotaLimit ) ; return null ; } } ) ; |
public class CommerceNotificationTemplateUserSegmentRelLocalServiceBaseImpl { /** * Deletes the commerce notification template user segment rel with the primary key from the database . Also notifies the appropriate model listeners .
* @ param commerceNotificationTemplateUserSegmentRelId the primary key of the commerce notification template user segment rel
* @ return the commerce notification template user segment rel that was removed
* @ throws PortalException if a commerce notification template user segment rel with the primary key could not be found */
@ Indexable ( type = IndexableType . DELETE ) @ Override public CommerceNotificationTemplateUserSegmentRel deleteCommerceNotificationTemplateUserSegmentRel ( long commerceNotificationTemplateUserSegmentRelId ) throws PortalException { } } | return commerceNotificationTemplateUserSegmentRelPersistence . remove ( commerceNotificationTemplateUserSegmentRelId ) ; |
public class mpssession { /** * Use this API to fetch filtered set of mpssession resources .
* filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */
public static mpssession [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } } | mpssession obj = new mpssession ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; mpssession [ ] response = ( mpssession [ ] ) obj . getfiltered ( service , option ) ; return response ; |
public class JvmCustomAnnotationValueImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } } | switch ( featureID ) { case TypesPackage . JVM_CUSTOM_ANNOTATION_VALUE__VALUES : getValues ( ) . clear ( ) ; getValues ( ) . addAll ( ( Collection < ? extends EObject > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ; |
public class Job { /** * Block synchronously waiting for a job to end , success or not .
* @ param jobkey Job to wait for .
* @ param pollingIntervalMillis Polling interval sleep time . */
public static void waitUntilJobEnded ( Key jobkey , int pollingIntervalMillis ) { } } | while ( true ) { if ( Job . isEnded ( jobkey ) ) { return ; } try { Thread . sleep ( pollingIntervalMillis ) ; } catch ( Exception ignore ) { } } |
public class InodeLockList { /** * Downgrades from edge write - locking to inode write - locking . This reduces the scope of the write
* lock by pushing it forward one entry .
* For example , if the lock list is in write mode with entries [ a , a - > b , b , b - > c ] ,
* downgradeEdgeToInode ( c , mode ) will change the list to [ a , a - > b , b , b - > c , c ] , with b - > c
* downgraded to a read lock . c will be locked according to the mode .
* The read lock on the final edge is taken before releasing the write lock .
* @ param inode the next inode in the lock list
* @ param mode the mode to downgrade to */
public void downgradeEdgeToInode ( Inode inode , LockMode mode ) { } } | Preconditions . checkState ( ! endsInInode ( ) ) ; Preconditions . checkState ( ! mEntries . isEmpty ( ) ) ; Preconditions . checkState ( mLockMode == LockMode . WRITE ) ; EdgeEntry last = ( EdgeEntry ) mEntries . get ( mEntries . size ( ) - 1 ) ; LockResource inodeLock = mInodeLockManager . lockInode ( inode , mode ) ; LockResource edgeLock = mInodeLockManager . lockEdge ( last . mEdge , LockMode . READ ) ; last . getLock ( ) . close ( ) ; mEntries . set ( mEntries . size ( ) - 1 , new EdgeEntry ( edgeLock , last . getEdge ( ) ) ) ; mEntries . add ( new InodeEntry ( inodeLock , inode ) ) ; mLockedInodes . add ( inode ) ; mLockMode = mode ; |
public class CmsGalleryController { /** * Sorts the categories according to given parameters and updates the list . < p >
* @ param sortParams the sort parameters
* @ param filter the filter to apply before sorting */
public void sortCategories ( String sortParams , String filter ) { } } | List < CmsCategoryBean > categories ; SortParams sort = SortParams . valueOf ( sortParams ) ; switch ( sort ) { case tree : m_handler . onUpdateCategoriesTree ( m_dialogBean . getCategories ( ) , m_searchObject . getCategories ( ) ) ; break ; case title_asc : categories = getFilteredCategories ( filter ) ; Collections . sort ( categories , new CmsComparatorTitle ( true ) ) ; m_handler . onUpdateCategoriesList ( categories , m_searchObject . getCategories ( ) ) ; break ; case title_desc : categories = getFilteredCategories ( filter ) ; Collections . sort ( categories , new CmsComparatorTitle ( false ) ) ; m_handler . onUpdateCategoriesList ( categories , m_searchObject . getCategories ( ) ) ; break ; case type_asc : case type_desc : case path_asc : case path_desc : case dateLastModified_asc : case dateLastModified_desc : default : } |
public class VertxGenerator { /** * Copied ( more ore less ) from JavaGenerator .
* Generates fetchByCYZ - and fetchOneByCYZ - methods
* @ param table
* @ param out */
protected void generateFetchMethods ( TableDefinition table , JavaWriter out ) { } } | VertxJavaWriter vOut = ( VertxJavaWriter ) out ; String pType = vOut . ref ( getStrategy ( ) . getFullJavaClassName ( table , GeneratorStrategy . Mode . POJO ) ) ; UniqueKeyDefinition primaryKey = table . getPrimaryKey ( ) ; ColumnDefinition firstPrimaryKeyColumn = primaryKey . getKeyColumns ( ) . get ( 0 ) ; for ( ColumnDefinition column : table . getColumns ( ) ) { final String colName = column . getOutputName ( ) ; final String colClass = getStrategy ( ) . getJavaClassName ( column ) ; final String colType = vOut . ref ( getJavaType ( column . getType ( ) ) ) ; final String colIdentifier = vOut . ref ( getStrategy ( ) . getFullJavaIdentifier ( column ) , colRefSegments ( column ) ) ; // fetchById is already defined in VertxDAO
if ( ! firstPrimaryKeyColumn . equals ( column ) ) { // fetchBy [ Column ] ( [ T ] . . . )
generateFindManyByMethods ( out , pType , colName , colClass , colType , colIdentifier ) ; } ukLoop : for ( UniqueKeyDefinition uk : column . getUniqueKeys ( ) ) { // If column is part of a single - column unique key . . .
if ( uk . getKeyColumns ( ) . size ( ) == 1 && uk . getKeyColumns ( ) . get ( 0 ) . equals ( column ) && ! uk . isPrimaryKey ( ) ) { // fetchOneBy [ Column ] ( [ T ] )
generateFindOneByMethods ( out , pType , colName , colClass , colType , colIdentifier ) ; break ukLoop ; } } } |
public class TokenAttributes { /** * Used to determine if the instance contains all of the
* { @ link TokenAttributes # requiredTokenAttributes } . If any of the
* required attributes are missing , this will return { @ code false } .
* @ return { @ code true } if all required attributes are present */
public boolean isValid ( ) { } } | boolean result = true ; // The username attribute will always be required .
result = ( this . getUsername ( ) != null ) ; if ( result == true && this . requiredTokenAttributes != null ) { for ( String attr : this . requiredTokenAttributes ) { if ( this . get ( attr ) == null ) { result = false ; break ; } } } return result ; |
public class Equation { /** * See if a range for assignment is specified . If so return the range , otherwise return null
* Example of assign range :
* a ( 0:3,4:5 ) = blah
* a ( ( 0 + 2 ) : 3,4:5 ) = blah */
private List < Variable > parseAssignRange ( Sequence sequence , TokenList tokens , TokenList . Token t0 ) { } } | // find assignment symbol
TokenList . Token tokenAssign = t0 . next ; while ( tokenAssign != null && tokenAssign . symbol != Symbol . ASSIGN ) { tokenAssign = tokenAssign . next ; } if ( tokenAssign == null ) throw new ParseError ( "Can't find assignment operator" ) ; // see if it is a sub matrix before
if ( tokenAssign . previous . symbol == Symbol . PAREN_RIGHT ) { TokenList . Token start = t0 . next ; if ( start . symbol != Symbol . PAREN_LEFT ) throw new ParseError ( ( "Expected left param for assignment" ) ) ; TokenList . Token end = tokenAssign . previous ; TokenList subTokens = tokens . extractSubList ( start , end ) ; subTokens . remove ( subTokens . getFirst ( ) ) ; subTokens . remove ( subTokens . getLast ( ) ) ; handleParentheses ( subTokens , sequence ) ; List < TokenList . Token > inputs = parseParameterCommaBlock ( subTokens , sequence ) ; if ( inputs . isEmpty ( ) ) throw new ParseError ( "Empty function input parameters" ) ; List < Variable > range = new ArrayList < > ( ) ; addSubMatrixVariables ( inputs , range ) ; if ( range . size ( ) != 1 && range . size ( ) != 2 ) { throw new ParseError ( "Unexpected number of range variables. 1 or 2 expected" ) ; } return range ; } return null ; |
public class Market { /** * Add an order to an order book .
* < p > An update event is triggered .
* < p > If the order book for the instrument is closed or the order identifier is known , do nothing .
* @ param instrument the instrument
* @ param orderId the order identifier
* @ param side the side
* @ param price the price
* @ param size the size */
public void add ( long instrument , long orderId , Side side , long price , long size ) { } } | if ( orders . containsKey ( orderId ) ) { return ; } OrderBook book = books . get ( instrument ) ; if ( book == null ) { return ; } Order order = new Order ( book , side , price , size ) ; boolean bbo = book . add ( side , price , size ) ; orders . put ( orderId , order ) ; listener . update ( book , bbo ) ; |
public class GraphService { /** * Adds the specified type to this frame , and returns a new object that implements this type . */
public static < T extends WindupVertexFrame > T addTypeToModel ( GraphContext graphContext , WindupVertexFrame frame , Class < T > type ) { } } | Vertex vertex = frame . getElement ( ) ; graphContext . getGraphTypeManager ( ) . addTypeToElement ( type , vertex ) ; return graphContext . getFramed ( ) . frameElement ( vertex , type ) ; |
public class LinkUtil { /** * Retrieves the target for a resource if there are redirects declared .
* @ return the target path or url ( can be external ) ; ' null ' if no redirect detected
* @ throws RedirectLoopException if a ' loop ' has been detected during redirect resolving */
public static String getFinalTarget ( Resource resource ) throws RedirectLoopException { } } | ResourceHandle handle = ResourceHandle . use ( resource ) ; String finalTarget = getFinalTarget ( handle , new ArrayList < String > ( ) ) ; return finalTarget ; |
public class Condition { /** * 得到查询条件中所有的命名参数 .
* @ return a { @ link java . util . List } object . */
public List < String > getParamNames ( ) { } } | if ( ! Strings . contains ( content , ":" ) ) { return Collections . emptyList ( ) ; } final List < String > params = new ArrayList < String > ( ) ; int index = 0 ; do { final int colonIndex = content . indexOf ( ':' , index ) ; if ( - 1 == colonIndex ) { break ; } index = colonIndex + 1 ; while ( index < content . length ( ) ) { final char c = content . charAt ( index ) ; if ( isValidIdentifierStarter ( c ) ) { index ++ ; } else { break ; } } final String paramName = content . substring ( colonIndex + 1 , index ) ; if ( ! params . contains ( paramName ) ) { params . add ( paramName ) ; } } while ( index < content . length ( ) ) ; return params ; |
public class GetCredentialsForIdentityRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetCredentialsForIdentityRequest getCredentialsForIdentityRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getCredentialsForIdentityRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getCredentialsForIdentityRequest . getIdentityId ( ) , IDENTITYID_BINDING ) ; protocolMarshaller . marshall ( getCredentialsForIdentityRequest . getLogins ( ) , LOGINS_BINDING ) ; protocolMarshaller . marshall ( getCredentialsForIdentityRequest . getCustomRoleArn ( ) , CUSTOMROLEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class MMElementRule { /** * Validate the occurrence of this IMolecularFormula .
* @ param formula Parameter is the IMolecularFormula
* @ return An ArrayList containing 9 elements in the order described above */
@ Override public double validate ( IMolecularFormula formula ) throws CDKException { } } | logger . info ( "Start validation of " , formula ) ; double isValid = 1.0 ; Iterator < IElement > itElem = MolecularFormulaManipulator . elements ( formula ) . iterator ( ) ; while ( itElem . hasNext ( ) ) { IElement element = itElem . next ( ) ; int occur = MolecularFormulaManipulator . getElementCount ( formula , element ) ; if ( occur > hashMap . get ( element . getSymbol ( ) ) ) { isValid = 0.0 ; break ; } } return isValid ; |
public class FileLogOutput { /** * Sets the amount of space still left in the log file .
* @ throws ObjectManagerException */
private void setFileSpaceLeft ( ) throws ObjectManagerException { } } | if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "setFileSpaceLeft" , new Object [ ] { new Long ( fileLogHeader . fileSize ) , new Long ( fileLogHeader . startByteAddress ) , new Long ( filePosition ) } ) ; // Assume we have wrapped around the end of the file , the space left is between the current
// file position and the start of the log file .
long newFileSpaceLeft = fileLogHeader . startByteAddress - filePosition ; if ( newFileSpaceLeft <= 0 ) // If we have not wrapped .
newFileSpaceLeft = newFileSpaceLeft + fileLogHeader . fileSize - FileLogHeader . headerLength * 2 ; fileSpaceLeft = newFileSpaceLeft ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "setFileSpaceLeft" , new Object [ ] { new Long ( fileSpaceLeft ) } ) ; |
public class InterfaceService { /** * Hides current view , destroys all screens and shows the recreated current view . Note that it won ' t recreate all
* views that were previously initiated , as views are constructed on demand .
* @ see # initiateAllControllers ( ) */
public void reload ( ) { } } | currentController . hide ( Actions . sequence ( hidingActionProvider . provideAction ( currentController , currentController ) , Actions . run ( CommonActionRunnables . getActionPosterRunnable ( getViewReloadingRunnable ( ) ) ) ) ) ; |
public class FileExecutor { /** * 保存文件 , 覆盖原内容
* @ param path 文件路径
* @ param content 内容
* @ throws IOException 异常 */
public static void saveFile ( String path , String content ) throws IOException { } } | saveFile ( path , content , false ) ; |
public class CsvProcessor { /** * Extract a value from the line , convert it into its java equivalent , and assign it to our target object . */
private void extractAndAssignValue ( String line , int lineNumber , ColumnInfo < Object > columnInfo , String columnStr , int linePos , Object target , ParseError parseError ) { } } | Object value = extractValue ( line , lineNumber , columnInfo , columnStr , linePos , target , parseError ) ; if ( value == null ) { assignParseErrorFields ( parseError , columnInfo , columnStr ) ; // either error or no value
return ; } try { columnInfo . setValue ( target , value ) ; } catch ( Exception e ) { parseError . setErrorType ( ErrorType . INTERNAL_ERROR ) ; parseError . setMessage ( "setting value for field '" + columnInfo . getFieldName ( ) + "' error: " + e . getMessage ( ) ) ; assignParseErrorFields ( parseError , columnInfo , columnStr ) ; parseError . setLinePos ( linePos ) ; } |
public class CPDefinitionOptionValueRelLocalServiceBaseImpl { /** * Deletes the cp definition option value rel with the primary key from the database . Also notifies the appropriate model listeners .
* @ param CPDefinitionOptionValueRelId the primary key of the cp definition option value rel
* @ return the cp definition option value rel that was removed
* @ throws PortalException if a cp definition option value rel with the primary key could not be found */
@ Indexable ( type = IndexableType . DELETE ) @ Override public CPDefinitionOptionValueRel deleteCPDefinitionOptionValueRel ( long CPDefinitionOptionValueRelId ) throws PortalException { } } | return cpDefinitionOptionValueRelPersistence . remove ( CPDefinitionOptionValueRelId ) ; |
public class AutoAnnotationProcessor { /** * Issue a compilation error and return an exception that , when thrown , will cause the processing
* of this class to be abandoned . This does not prevent the processing of other classes . */
private AbortProcessingException abortWithError ( String msg , Element e ) { } } | reportError ( e , msg ) ; return new AbortProcessingException ( ) ; |
public class CmsModuleXmlHandler { /** * Sets the current imported module to an old ( 5.0 . x ) style module . */
public void setOldModule ( ) { } } | m_oldModule = true ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_OLD_MODULE_IMPORTED_0 ) ) ; } |
public class GraphCanvas { /** * check if there are any new events in the event list and add them to the plot */
private void addEvents ( ) { } } | if ( clusterEvents != null && clusterEvents . size ( ) > eventCounter ) { ClusterEvent ev = clusterEvents . get ( eventCounter ) ; eventCounter ++ ; JLabel eventMarker = new JLabel ( ev . getType ( ) . substring ( 0 , 1 ) ) ; eventMarker . setPreferredSize ( new Dimension ( 20 , y_offset_top ) ) ; eventMarker . setSize ( new Dimension ( 20 , y_offset_top ) ) ; eventMarker . setHorizontalAlignment ( javax . swing . SwingConstants . CENTER ) ; int x = ( int ) ( ev . getTimestamp ( ) / processFrequency / x_resolution ) ; eventMarker . setLocation ( x - 10 , 0 ) ; eventMarker . setToolTipText ( ev . getType ( ) + " at " + ev . getTimestamp ( ) + ": " + ev . getMessage ( ) ) ; eventPanel . add ( eventMarker ) ; eventLabelList . add ( eventMarker ) ; eventPanel . repaint ( ) ; } |
public class AdHocThrottleManager { /** * Returns the throttle in effect for the given HTTP method and path . This method is always guaranteed to return
* a non - null throttle that is not expired . If no throttle has been configured or if the configured throttle has
* expired then it will return an unlimited throttle with an effectively infinite expiration date . */
public AdHocThrottle getThrottle ( AdHocThrottleEndpoint endpoint ) { } } | String key = endpoint . toString ( ) ; AdHocThrottle throttle = _throttleMap . get ( key ) ; if ( throttle == null ) { // No throttle set , allow unlimited
throttle = AdHocThrottle . unlimitedInstance ( ) ; } else if ( throttle . getExpiration ( ) . isBefore ( Instant . now ( ) ) ) { // Throttle is expired ; remove it and allow unlimited . There is a slight chance for a race condition
// here but since throttles are rarely put in place this is extremely unlikely . To help avoid this
// wait 24 hours before removing .
if ( throttle . getExpiration ( ) . isBefore ( Instant . now ( ) . minus ( Duration . ofDays ( 1 ) ) ) ) { try { _throttleMap . remove ( key ) ; } catch ( Exception e ) { _log . warn ( "Failed to remove expired throttle for {} {}" , endpoint . getMethod ( ) , endpoint . getPath ( ) , e ) ; } } throttle = AdHocThrottle . unlimitedInstance ( ) ; } return throttle ; |
public class TapProducer { /** * { @ inheritDoc } */
@ Override public void dump ( TestSet testSet , File output ) { } } | Charset charset = null ; if ( representer instanceof Tap13Representer ) { charset = Charset . forName ( ( ( Tap13Representer ) representer ) . getOptions ( ) . getCharset ( ) ) ; } else { charset = Charset . defaultCharset ( ) ; } try ( FileOutputStream outputStream = new FileOutputStream ( output ) ; OutputStreamWriter writer = new OutputStreamWriter ( outputStream , charset . newEncoder ( ) ) ) { this . dump ( testSet , writer ) ; } catch ( IOException e ) { throw new ProducerException ( String . format ( "Failed to dump TAP Stream: %s" , e . getMessage ( ) ) , e ) ; } |
public class CoalesceVariableNames { /** * In order to determine when it is appropriate to coalesce two variables , we use a live variables
* analysis to make sure they are not alive at the same time . We take every pairing of variables
* and for every CFG node , determine whether the two variables are alive at the same time . If two
* variables are alive at the same time , we create an edge between them in the interference graph .
* The interference graph is the input to a graph coloring algorithm that ensures any interfering
* variables are marked in different color groups , while variables that can safely be coalesced
* are assigned the same color group .
* @ param cfg
* @ param escaped we don ' t want to coalesce any escaped variables
* @ return graph with variable nodes and edges representing variable interference */
private UndiGraph < Var , Void > computeVariableNamesInterferenceGraph ( ControlFlowGraph < Node > cfg , Set < ? extends Var > escaped ) { } } | UndiGraph < Var , Void > interferenceGraph = LinkedUndirectedGraph . create ( ) ; // First create a node for each non - escaped variable . We add these nodes in the order in which
// they appear in the code because we want the names that appear earlier in the code to be used
// when coalescing to variables that appear later in the code .
List < Var > orderedVariables = liveness . getAllVariablesInOrder ( ) ; for ( Var v : orderedVariables ) { if ( escaped . contains ( v ) ) { continue ; } // NOTE ( user ) : In theory , we CAN coalesce function names just like any variables . Our
// Liveness analysis captures this just like it as described in the specification . However , we
// saw some zipped and unzipped size increase after this . We are not totally sure why
// that is but , for now , we will respect the dead functions and not play around with it
if ( v . getParentNode ( ) . isFunction ( ) ) { continue ; } // NOTE : we skip class declarations for a combination of two reasons :
// 1 . they are block - scoped , so we would need to rewrite them as class expressions
// e . g . ` class C { } ` - > ` var C = class { } ` to avoid incorrect semantics
// ( see testDontCoalesceClassDeclarationsWithDestructuringDeclaration ) .
// This is possible but increases pre - gzip code size and complexity .
// 2 . since function declaration coalescing seems to cause a size regression ( as discussed
// above ) we assume that coalescing class names may cause a similar size regression .
if ( v . getParentNode ( ) . isClass ( ) ) { continue ; } // Skip lets and consts that have multiple variables declared in them , otherwise this produces
// incorrect semantics . See test case " testCapture " .
// Skipping vars technically isn ' t needed for correct semantics , but works around a Safari
// bug for var redeclarations ( https : / / github . com / google / closure - compiler / issues / 3164)
if ( isInMultipleLvalueDecl ( v ) ) { continue ; } interferenceGraph . createNode ( v ) ; } // Go through each variable and try to connect them .
int v1Index = - 1 ; for ( Var v1 : orderedVariables ) { v1Index ++ ; int v2Index = - 1 ; NEXT_VAR_PAIR : for ( Var v2 : orderedVariables ) { v2Index ++ ; // Skip duplicate pairs .
if ( v1Index > v2Index ) { continue ; } if ( ! interferenceGraph . hasNode ( v1 ) || ! interferenceGraph . hasNode ( v2 ) ) { // Skip nodes that were not added . They are globals and escaped
// locals . Also avoid merging a variable with itself .
continue NEXT_VAR_PAIR ; } if ( v1 . isParam ( ) && v2 . isParam ( ) ) { interferenceGraph . connectIfNotFound ( v1 , null , v2 ) ; continue NEXT_VAR_PAIR ; } // Go through every CFG node in the program and look at
// this variable pair . If they are both live at the same
// time , add an edge between them and continue to the next pair .
NEXT_CROSS_CFG_NODE : for ( DiGraphNode < Node , Branch > cfgNode : cfg . getDirectedGraphNodes ( ) ) { if ( cfg . isImplicitReturn ( cfgNode ) ) { continue NEXT_CROSS_CFG_NODE ; } FlowState < LiveVariableLattice > state = cfgNode . getAnnotation ( ) ; // Check the live states and add edge when possible .
if ( ( state . getIn ( ) . isLive ( v1Index ) && state . getIn ( ) . isLive ( v2Index ) ) || ( state . getOut ( ) . isLive ( v1Index ) && state . getOut ( ) . isLive ( v2Index ) ) ) { interferenceGraph . connectIfNotFound ( v1 , null , v2 ) ; continue NEXT_VAR_PAIR ; } } // v1 and v2 might not have an edge between them ! woohoo . there ' s
// one last sanity check that we have to do : we have to check
// if there ' s a collision * within * the cfg node .
NEXT_INTRA_CFG_NODE : for ( DiGraphNode < Node , Branch > cfgNode : cfg . getDirectedGraphNodes ( ) ) { if ( cfg . isImplicitReturn ( cfgNode ) ) { continue NEXT_INTRA_CFG_NODE ; } FlowState < LiveVariableLattice > state = cfgNode . getAnnotation ( ) ; boolean v1OutLive = state . getOut ( ) . isLive ( v1Index ) ; boolean v2OutLive = state . getOut ( ) . isLive ( v2Index ) ; CombinedLiveRangeChecker checker = new CombinedLiveRangeChecker ( cfgNode . getValue ( ) , new LiveRangeChecker ( v1 , v2OutLive ? null : v2 ) , new LiveRangeChecker ( v2 , v1OutLive ? null : v1 ) ) ; checker . check ( cfgNode . getValue ( ) ) ; if ( checker . connectIfCrossed ( interferenceGraph ) ) { continue NEXT_VAR_PAIR ; } } } } return interferenceGraph ; |
public class CalendarUtil { /** * Determines number of years between two dates so that premium duration can
* be established .
* @ param startDate start date of a period ( e . g . date of birth of insured
* party ) .
* @ param endDate end date ( e . g . premium end date of an insurance ) .
* @ return number of years between start and end . */
public int getDurationInYears ( XMLGregorianCalendar startDate , XMLGregorianCalendar endDate ) { } } | int startYear = startDate . getYear ( ) ; final int dec = 12 ; if ( startDate . getMonth ( ) == dec ) { // started in December , increase year with one
startYear ++ ; } int endYear = endDate . getYear ( ) ; return endYear - startYear ; |
public class XMLBuilder2 { /** * @ throws XMLBuilderRuntimeException
* to wrap { @ link TransformerException } */
@ Override public void toWriter ( boolean wholeDocument , Writer writer , Properties outputProperties ) { } } | try { super . toWriter ( wholeDocument , writer , outputProperties ) ; } catch ( TransformerException e ) { throw wrapExceptionAsRuntimeException ( e ) ; } |
public class HBCIUtils { /** * Gibt zu einer gegebenen Bankleitzahl die PIN / TAN - URL
* zurück .
* @ param blz Bankleitzahl der Bank
* @ return PIN / TAN - URL . Falls keine URL bekannt
* ist , wird ein leerer String zurückgegeben .
* @ deprecated Bitte { @ link HBCIUtils # getBankInfo ( String ) } verwenden . */
public static String getPinTanURLForBLZ ( String blz ) { } } | BankInfo info = getBankInfo ( blz ) ; if ( info == null ) return "" ; return info . getPinTanAddress ( ) != null ? info . getPinTanAddress ( ) : "" ; |
public class SocksServerSocket { /** * Get address assigned by proxy to listen for incomming
* connections , or the local machine address if doing direct
* connection . */
public InetAddress getInetAddress ( ) { } } | if ( localIP == null ) { try { localIP = InetAddress . getByName ( localHost ) ; } catch ( UnknownHostException e ) { return null ; } } return localIP ; |
public class SlicedFileConsumer { /** * Push message through pipe
* @ param pipe
* Pipe
* @ param message
* Message to push
* @ throws IOException
* if message could not be written */
@ SuppressWarnings ( "rawtypes" ) public void pushMessage ( IPipe pipe , IMessage message ) throws IOException { } } | if ( message instanceof RTMPMessage ) { final IRTMPEvent msg = ( ( RTMPMessage ) message ) . getBody ( ) ; // get the type
byte dataType = msg . getDataType ( ) ; // get the timestamp
int timestamp = msg . getTimestamp ( ) ; log . trace ( "Data type: {} timestamp: {}" , dataType , timestamp ) ; // if writes are delayed , queue the data and sort it by time
if ( queue == null ) { // if we plan to use a queue , create one
queue = new PriorityQueue < QueuedMediaData > ( queueThreshold <= 0 ? 11 : queueThreshold ) ; } QueuedMediaData queued = null ; if ( msg instanceof IStreamData ) { if ( log . isTraceEnabled ( ) ) { log . trace ( "Stream data, body saved. Data type: {} class type: {}" , dataType , msg . getClass ( ) . getName ( ) ) ; } // ensure that our first video frame written is a key frame
if ( msg instanceof VideoData ) { log . debug ( "pushMessage video - waitForVideoKeyframe: {} gotVideoKeyframe: {}" , waitForVideoKeyframe , gotVideoKeyframe ) ; if ( ! gotVideoKeyframe ) { VideoData video = ( VideoData ) msg ; if ( video . getFrameType ( ) == FrameType . KEYFRAME ) { log . debug ( "Got our first keyframe" ) ; gotVideoKeyframe = true ; } if ( waitForVideoKeyframe && ! gotVideoKeyframe ) { // skip this frame bail out
log . debug ( "Skipping video data since keyframe has not been written yet" ) ; return ; } } } queued = new QueuedMediaData ( timestamp , dataType , ( IStreamData ) msg ) ; } else { // XXX what type of message are we saving that has no body data ? ?
if ( log . isTraceEnabled ( ) ) { log . trace ( "Non-stream data, body not saved. Data type: {} class type: {}" , dataType , msg . getClass ( ) . getName ( ) ) ; } queued = new QueuedMediaData ( timestamp , dataType ) ; } if ( queued != null ) { writeLock . lock ( ) ; try { // add to the queue
queue . add ( queued ) ; } finally { writeLock . unlock ( ) ; } } int queueSize = 0 ; readLock . lock ( ) ; try { queueSize = queue . size ( ) ; } finally { readLock . unlock ( ) ; } // initialize a writer
if ( writer == null ) { init ( ) ; if ( msg instanceof VideoData ) { writeQueuedDataSlice ( createTimestampLimitedSlice ( msg . getTimestamp ( ) ) ) ; } else if ( queueThreshold >= 0 && queueSize >= queueThreshold ) { writeQueuedDataSlice ( createFixedLengthSlice ( queueThreshold / ( 100 / percentage ) ) ) ; } } } else if ( message instanceof ResetMessage ) { startTimestamp = - 1 ; } else if ( log . isDebugEnabled ( ) ) { log . debug ( "Ignoring pushed message: {}" , message ) ; } |
public class AuthorizedValuesRule { /** * ( non - Javadoc )
* @ see javax . validation . ConstraintValidator # isValid ( java . lang . Object , javax . validation . ConstraintValidatorContext ) */
@ Override public boolean isValid ( Object value , ConstraintValidatorContext constraintContext ) { } } | // Si la liste des valeurs permisse est vide : true
if ( values == null || values . length == 0 ) return true ; // Si l ' objet est null : false
if ( value == null ) return false ; // Si l ' objet est une chaine
if ( value instanceof String ) { // On caste
String stringValue = ( ( String ) value ) . trim ( ) ; // Si la chaîne est vide : false
if ( stringValue . length ( ) == 0 ) return false ; // Si on compare avec casse
if ( caseSensitive ) { // On recherche la valeur
for ( String authValue : values ) { // Si la valeur est hors des valeurs prescrites
if ( ! stringValue . equals ( authValue . trim ( ) ) ) return false ; } } else { // On recherche la valeur
for ( String authValue : values ) { // Si la valeur est hors des valeurs prescrites
if ( ! stringValue . equalsIgnoreCase ( authValue . trim ( ) ) ) return false ; } } } else { // On recherche la valeur
for ( Object authValue : values ) { // Si la valeur est hors des valeurs prescrites
if ( ! value . equals ( authValue ) ) return false ; } } // On retourne false
return true ; |
public class AbstractDataServiceVisitor { /** * Return if the method have to considerate < br >
* The method is public , < br >
* Not annotated by TransientDataService < br >
* Not static and not from Object herited .
* @ param methodProceeds
* @ param methodElement
* @ return */
boolean isConsiderateMethod ( Collection < String > methodProceeds , ExecutableElement methodElement ) { } } | // int argNum methodElement . getParameters ( ) . size ( ) ;
String signature = methodElement . getSimpleName ( ) . toString ( ) ; // + " ( " + argNum + " ) " ;
// Check if method ith same signature has been already proceed .
if ( methodProceeds . contains ( signature ) ) { return false ; } // Herited from Object
TypeElement objectElement = environment . getElementUtils ( ) . getTypeElement ( Object . class . getName ( ) ) ; if ( objectElement . getEnclosedElements ( ) . contains ( methodElement ) ) { return false ; } // Static , not public ?
if ( ! methodElement . getModifiers ( ) . contains ( Modifier . PUBLIC ) || methodElement . getModifiers ( ) . contains ( Modifier . STATIC ) ) { return false ; } // TransientDataService ?
List < ? extends AnnotationMirror > annotationMirrors = methodElement . getAnnotationMirrors ( ) ; for ( AnnotationMirror annotationMirror : annotationMirrors ) { if ( annotationMirror . getAnnotationType ( ) . toString ( ) . equals ( TransientDataService . class . getName ( ) ) ) { return false ; } } methodProceeds . add ( signature ) ; return true ; |
public class BeginSegmentImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } } | switch ( featureID ) { case AfplibPackage . BEGIN_SEGMENT__SEGNAME : return getSEGNAME ( ) ; } return super . eGet ( featureID , resolve , coreType ) ; |
public class RecordSetsInner { /** * Lists all record sets in a DNS zone .
* @ param resourceGroupName The name of the resource group .
* @ param zoneName The name of the DNS zone ( without a terminating dot ) .
* @ param top The maximum number of record sets to return . If not specified , returns up to 100 record sets .
* @ param recordsetnamesuffix The suffix label of the record set name that has to be used to filter the record set enumerations . If this parameter is specified , Enumeration will return only records that end with . & lt ; recordSetNameSuffix & gt ;
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < List < RecordSetInner > > listByDnsZoneAsync ( final String resourceGroupName , final String zoneName , final Integer top , final String recordsetnamesuffix , final ListOperationCallback < RecordSetInner > serviceCallback ) { } } | return AzureServiceFuture . fromPageResponse ( listByDnsZoneSinglePageAsync ( resourceGroupName , zoneName , top , recordsetnamesuffix ) , new Func1 < String , Observable < ServiceResponse < Page < RecordSetInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < RecordSetInner > > > call ( String nextPageLink ) { return listByDnsZoneNextSinglePageAsync ( nextPageLink ) ; } } , serviceCallback ) ; |
public class Uris { /** * Returns a URI that has been truncated to its directory .
* @ param uri the URI to resolve to the directory level
* @ param strict whether or not to do strict escaping
* @ return the resolved and normalized URI
* @ throws NormalizationException if there was a problem normalizing the URL */
public static URI toDirectory ( final URI uri , final boolean strict ) throws NormalizationException { } } | return resolve ( uri , getRawDirectory ( uri , strict ) , strict ) ; |
public class CreateCoreDefinitionRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CreateCoreDefinitionRequest createCoreDefinitionRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( createCoreDefinitionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createCoreDefinitionRequest . getAmznClientToken ( ) , AMZNCLIENTTOKEN_BINDING ) ; protocolMarshaller . marshall ( createCoreDefinitionRequest . getInitialVersion ( ) , INITIALVERSION_BINDING ) ; protocolMarshaller . marshall ( createCoreDefinitionRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( createCoreDefinitionRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class URLToken { /** * Sets the current token , causing it to parse the parameters and the tokenId .
* @ param completeToken The History token to be parsed , in the form of : < pre > { @ code tokenId & param1 = value1 & param2 & param3 = ' complex & value ' } < / pre > */
public void setToken ( String completeToken ) { } } | clearParameters ( ) ; id = "" ; if ( completeToken != null ) { StringBuilder builder = new StringBuilder ( ) ; char [ ] chs = completeToken . toCharArray ( ) ; String currentKey = null ; TokenParseState state = TokenParseState . PARSING_ID ; for ( char ch : chs ) { switch ( state ) { case PARSING_ID : if ( ch == '&' ) { state = TokenParseState . PARSING_KEY ; id = builder . toString ( ) ; builder . delete ( 0 , builder . length ( ) ) ; } else { builder . append ( ch ) ; } break ; case PARSING_KEY : if ( ch == '=' ) { state = TokenParseState . PARSING_VALUE ; currentKey = builder . toString ( ) ; if ( ! currentKey . isEmpty ( ) ) { parameters . put ( currentKey , "" ) ; builder . delete ( 0 , builder . length ( ) ) ; } } else if ( ch == '&' ) { currentKey = builder . toString ( ) ; if ( ! currentKey . isEmpty ( ) ) { parameters . put ( currentKey , "" ) ; builder . delete ( 0 , builder . length ( ) ) ; } } else { builder . append ( ch ) ; } break ; case PARSING_VALUE : if ( ch == '\'' ) { state = TokenParseState . PARSING_COMPLEX_VALUE ; } else if ( ch == '&' ) { state = TokenParseState . PARSING_KEY ; parameters . put ( currentKey , builder . toString ( ) ) ; builder . delete ( 0 , builder . length ( ) ) ; } else { builder . append ( ch ) ; } break ; case PARSING_COMPLEX_VALUE : if ( ch == '\'' ) { state = TokenParseState . PARSED_COMPLEX_VALUE ; parameters . put ( currentKey , builder . toString ( ) ) ; builder . delete ( 0 , builder . length ( ) ) ; } else { builder . append ( ch ) ; } break ; case PARSED_COMPLEX_VALUE : if ( ch == '&' ) { state = TokenParseState . PARSING_KEY ; } break ; } } switch ( state ) { case PARSING_ID : id = builder . toString ( ) ; break ; case PARSING_KEY : currentKey = builder . toString ( ) ; if ( ! currentKey . isEmpty ( ) ) { parameters . put ( currentKey , "" ) ; } break ; case PARSING_COMPLEX_VALUE : case PARSING_VALUE : String value = builder . toString ( ) ; if ( ! value . isEmpty ( ) && currentKey != null ) { parameters . put ( currentKey , value ) ; } break ; default : break ; } } |
public class TransformersLogger { /** * flushes log queue , this actually writes combined log message into system log */
void flushLogQueue ( ) { } } | Set < String > problems = new LinkedHashSet < String > ( ) ; synchronized ( messageQueue ) { Iterator < LogEntry > i = messageQueue . iterator ( ) ; while ( i . hasNext ( ) ) { problems . add ( "\t\t" + i . next ( ) . getMessage ( ) + "\n" ) ; i . remove ( ) ; } } if ( ! problems . isEmpty ( ) ) { logger . transformationWarnings ( target . getHostName ( ) , problems ) ; } |
public class ICalendar { /** * Sets the calendar ' s unique identifier .
* @ param uid the unique identifier or null to remove
* @ return the property object that was created
* @ see < a
* href = " http : / / tools . ietf . org / html / draft - ietf - calext - extensions - 01 # page - 6 " > draft - ietf - calext - extensions - 01
* p . 6 < / a > */
public Uid setUid ( String uid ) { } } | Uid property = ( uid == null ) ? null : new Uid ( uid ) ; setUid ( property ) ; return property ; |
public class CommerceShippingFixedOptionRelServiceBaseImpl { /** * Sets the commerce shipping fixed option remote service .
* @ param commerceShippingFixedOptionService the commerce shipping fixed option remote service */
public void setCommerceShippingFixedOptionService ( com . liferay . commerce . shipping . engine . fixed . service . CommerceShippingFixedOptionService commerceShippingFixedOptionService ) { } } | this . commerceShippingFixedOptionService = commerceShippingFixedOptionService ; |
public class IntStream { /** * Creates a lazily concatenated stream whose elements are all the
* elements of the first stream followed by all the elements of the
* second stream .
* < p > Example :
* < pre >
* stream a : [ 1 , 2 , 3 , 4]
* stream b : [ 5 , 6]
* result : [ 1 , 2 , 3 , 4 , 5 , 6]
* < / pre >
* @ param a the first stream
* @ param b the second stream
* @ return the concatenation of the two input streams
* @ throws NullPointerException if { @ code a } or { @ code b } is null */
@ NotNull public static IntStream concat ( @ NotNull final IntStream a , @ NotNull final IntStream b ) { } } | Objects . requireNonNull ( a ) ; Objects . requireNonNull ( b ) ; IntStream result = new IntStream ( new IntConcat ( a . iterator , b . iterator ) ) ; return result . onClose ( Compose . closeables ( a , b ) ) ; |
public class ElasticsearchClientUtil { /** * Try to find a client version in the classpath
* @ param config Path to FSCrawler configuration files ( elasticsearch templates )
* @ param settings FSCrawler settings . Can not be null .
* @ param version Version to load
* @ return A Client instance */
public static ElasticsearchClient getInstance ( Path config , FsSettings settings , int version ) throws ClassNotFoundException { } } | Objects . requireNonNull ( settings , "settings can not be null" ) ; Class < ElasticsearchClient > clazz = null ; try { clazz = findClass ( version ) ; } catch ( ClassNotFoundException e ) { logger . trace ( "ElasticsearchClient class not found for version {} in the classpath. Skipping..." , version ) ; } if ( clazz == null ) { throw new ClassNotFoundException ( "Can not find any ElasticsearchClient in the classpath. " + "Did you forget to add the elasticsearch client library?" ) ; } logger . trace ( "Found [{}] class as the elasticsearch client implementation." , clazz . getName ( ) ) ; try { Constructor < ? extends ElasticsearchClient > constructor = clazz . getConstructor ( Path . class , FsSettings . class ) ; return constructor . newInstance ( config , settings ) ; } catch ( NoSuchMethodException e ) { throw new IllegalArgumentException ( "Class " + clazz . getName ( ) + " does not have the expected ctor (Path, FsSettings)." , e ) ; } catch ( IllegalAccessException | InstantiationException | InvocationTargetException e ) { throw new IllegalArgumentException ( "Can not create an instance of " + clazz . getName ( ) , e ) ; } |
public class AtomSymbol { /** * Convenience function to center an atom symbol on a specified point . The
* centering depends on the symbol alignment .
* @ param x x - axis location
* @ param y y - axis location
* @ return the centered symbol ( new instance ) */
AtomSymbol center ( double x , double y ) { } } | Point2D center = getAlignmentCenter ( ) ; return translate ( x - center . getX ( ) , y - center . getY ( ) ) ; |
public class LocalUnitsManager { /** * Get local cached unit singleton .
* the same as { @ link # getLocalUnit ( String ) } */
public static Unit getLocalUnit ( String groupName , String unitName ) { } } | return getLocalUnit ( Unit . fullName ( groupName , unitName ) ) ; |
public class ListCompilationJobsResult { /** * An array of < a > CompilationJobSummary < / a > objects , each describing a model compilation job .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setCompilationJobSummaries ( java . util . Collection ) } or
* { @ link # withCompilationJobSummaries ( java . util . Collection ) } if you want to override the existing values .
* @ param compilationJobSummaries
* An array of < a > CompilationJobSummary < / a > objects , each describing a model compilation job .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListCompilationJobsResult withCompilationJobSummaries ( CompilationJobSummary ... compilationJobSummaries ) { } } | if ( this . compilationJobSummaries == null ) { setCompilationJobSummaries ( new java . util . ArrayList < CompilationJobSummary > ( compilationJobSummaries . length ) ) ; } for ( CompilationJobSummary ele : compilationJobSummaries ) { this . compilationJobSummaries . add ( ele ) ; } return this ; |
public class Router { /** * Specify a middleware that will be called for a matching HTTP POST
* @ param regex A regular expression
* @ param handlers The middleware to call */
public Router post ( @ NotNull final Pattern regex , @ NotNull final IMiddleware ... handlers ) { } } | addRegEx ( "POST" , regex , handlers , postBindings ) ; return this ; |
public class AbstractQueryProtocol { /** * Read OK _ Packet .
* @ param buffer current buffer
* @ param results result object
* @ see < a href = " https : / / mariadb . com / kb / en / mariadb / ok _ packet / " > OK _ Packet < / a > */
private void readOkPacket ( Buffer buffer , Results results ) { } } | buffer . skipByte ( ) ; // fieldCount
final long updateCount = buffer . getLengthEncodedNumeric ( ) ; final long insertId = buffer . getLengthEncodedNumeric ( ) ; serverStatus = buffer . readShort ( ) ; hasWarnings = ( buffer . readShort ( ) > 0 ) ; if ( ( serverStatus & ServerStatus . SERVER_SESSION_STATE_CHANGED ) != 0 ) { handleStateChange ( buffer , results ) ; } results . addStats ( updateCount , insertId , hasMoreResults ( ) ) ; |
public class AuthCallsCredentialListMapping { /** * Create a AuthCallsCredentialListMappingDeleter to execute delete .
* @ param pathAccountSid The SID of the Account that created the resources to
* delete
* @ param pathDomainSid The SID of the SIP domain that contains the resource to
* delete
* @ param pathSid The unique string that identifies the resource
* @ return AuthCallsCredentialListMappingDeleter capable of executing the delete */
public static AuthCallsCredentialListMappingDeleter deleter ( final String pathAccountSid , final String pathDomainSid , final String pathSid ) { } } | return new AuthCallsCredentialListMappingDeleter ( pathAccountSid , pathDomainSid , pathSid ) ; |
public class OpenshiftAdapterSupport { /** * Checks if a custom URL for OpenShift has been used .
* @ param config The openshift configuration .
* @ return True if both master and openshift url have the same root . */
static boolean hasCustomOpenShiftUrl ( OpenShiftConfig config ) { } } | try { URI masterUri = new URI ( config . getMasterUrl ( ) ) . resolve ( "/" ) ; URI openshfitUri = new URI ( config . getOpenShiftUrl ( ) ) . resolve ( "/" ) ; return ! masterUri . equals ( openshfitUri ) ; } catch ( Exception e ) { throw KubernetesClientException . launderThrowable ( e ) ; } |
public class VisibleBufferedInputStream { /** * Reads more bytes into the buffer .
* @ param wanted How much should be at least read .
* @ return True if at least some bytes were read .
* @ throws IOException If reading of the wrapped stream failed . */
private boolean readMore ( int wanted ) throws IOException { } } | if ( endIndex == index ) { index = 0 ; endIndex = 0 ; } int canFit = buffer . length - endIndex ; if ( canFit < wanted ) { // would the wanted bytes fit if we compacted the buffer
// and still leave some slack
if ( index + canFit > wanted + MINIMUM_READ ) { compact ( ) ; } else { doubleBuffer ( ) ; } canFit = buffer . length - endIndex ; } int read = wrapped . read ( buffer , endIndex , canFit ) ; if ( read < 0 ) { return false ; } endIndex += read ; return true ; |
public class TypeUtils { /** * Checks if two parameterized types are exactly equal , under the variable
* replacement described in the typeVarMap . */
private static boolean typeEquals ( ParameterizedType from , ParameterizedType to , Map < String , Type > typeVarMap ) { } } | if ( from . getRawType ( ) . equals ( to . getRawType ( ) ) ) { Type [ ] fromArgs = from . getActualTypeArguments ( ) ; Type [ ] toArgs = to . getActualTypeArguments ( ) ; for ( int i = 0 ; i < fromArgs . length ; i ++ ) { if ( ! matches ( fromArgs [ i ] , toArgs [ i ] , typeVarMap ) ) { return false ; } } return true ; } return false ; |
public class Graphics { /** * Informs the graphics system that the main framebuffer size has changed . The supplied size
* should be in physical pixels . */
protected void viewportChanged ( int pixelWidth , int pixelHeight ) { } } | viewPixelWidth = pixelWidth ; viewPixelHeight = pixelHeight ; viewSizeM . width = scale . invScaled ( pixelWidth ) ; viewSizeM . height = scale . invScaled ( pixelHeight ) ; plat . log ( ) . info ( "viewPortChanged " + pixelWidth + "x" + pixelHeight + " / " + scale . factor + " -> " + viewSize ) ; |
public class CmsTreeItem { /** * Returns the tree item with the given id . < p >
* @ param itemId the id of the item to retrieve
* @ return the tree item
* @ see org . opencms . gwt . client . ui . CmsList # getItem ( String ) */
public CmsTreeItem getChild ( String itemId ) { } } | CmsTreeItem result = m_children . getItem ( itemId ) ; return result ; |
public class Bucket { /** * Get < tt > size < / tt > number of files , starting at < tt > start < / tt > . Only get files containing the query . Leave the
* query empty to get all files .
* @ param query the query to filter for
* @ param limit the limit to apply
* @ return all files which contain the query */
public List < StoredObject > getObjects ( @ Nonnull String query , Limit limit ) { } } | try ( Stream < Path > stream = Files . list ( file . toPath ( ) ) ) { return stream . map ( Path :: toFile ) . filter ( currentFile -> isMatchingObject ( query , currentFile ) ) . filter ( limit . asPredicate ( ) ) . map ( StoredObject :: new ) . collect ( Collectors . toList ( ) ) ; } catch ( IOException e ) { throw Exceptions . handle ( e ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.