signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcCartesianTransformationOperator2D ( ) { } }
|
if ( ifcCartesianTransformationOperator2DEClass == null ) { ifcCartesianTransformationOperator2DEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 82 ) ; } return ifcCartesianTransformationOperator2DEClass ;
|
public class SymmetryHttpMessageConverter { private static Charset getCharset ( HttpOutputMessage outputMessage ) { } }
|
Charset charset = outputMessage . getHeaders ( ) . getContentType ( ) . getCharSet ( ) ; if ( charset == null ) { charset = DEFAULT_CHARSET ; } return charset ;
|
public class ConsumerSessionImpl { /** * Adds the bifurcated consumer to the list of associated consumers .
* @ param consumer */
protected void attachBifurcatedConsumer ( BifurcatedConsumerSessionImpl consumer ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "attachBifurcatedConsumer" , consumer ) ; // Create a bifurcated list if required
if ( _bifurcatedConsumers == null ) { synchronized ( this ) { if ( _bifurcatedConsumers == null ) _bifurcatedConsumers = new LinkedList < BifurcatedConsumerSessionImpl > ( ) ; } } synchronized ( _bifurcatedConsumers ) { _bifurcatedConsumers . add ( consumer ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "attachBifurcatedConsumer" ) ;
|
public class CollationIterator { /** * Returns the next code point and its local CE32 value .
* Returns Collation . FALLBACK _ CE32 at the end of the text ( c < 0)
* or when c ' s CE32 value is to be looked up in the base data ( fallback ) .
* The code point is used for fallbacks , context and implicit weights .
* It is ignored when the returned CE32 is not special ( e . g . , FFFD _ CE32 ) .
* Returns the code point in bits 63 . . 32 ( signed ) and the CE32 in bits 31 . . 0. */
protected long handleNextCE32 ( ) { } }
|
int c = nextCodePoint ( ) ; if ( c < 0 ) { return NO_CP_AND_CE32 ; } return makeCodePointAndCE32Pair ( c , data . getCE32 ( c ) ) ;
|
public class UniqueCountMap { /** * Returns total bytes used by all internal maps
* @ return total bytes used by all internal maps */
public long getMemoryUsageBytes ( ) { } }
|
long total = 0 ; for ( int i = 0 ; i < maps_ . length ; i ++ ) { if ( maps_ [ i ] != null ) { total += maps_ [ i ] . getMemoryUsageBytes ( ) ; } } return total ;
|
public class CmsSelectWidgetOption { /** * Returns a list of default options from the given list of select options . < p >
* If an element found in the given list is not of type
* < code > { @ link CmsSelectWidgetOption } < / code > , this is ignored . < p >
* @ param options the list of select options to get the default from
* @ return a list of < code > { @ link CmsSelectWidgetOption } < / code > objects */
public static List < CmsSelectWidgetOption > getDefaultOptions ( List < CmsSelectWidgetOption > options ) { } }
|
List < CmsSelectWidgetOption > defaults = new ArrayList < CmsSelectWidgetOption > ( ) ; if ( ( options == null ) || ( options . size ( ) == 0 ) ) { return defaults ; } for ( int i = 0 ; i < options . size ( ) ; i ++ ) { Object o = options . get ( i ) ; if ( o instanceof CmsSelectWidgetOption ) { CmsSelectWidgetOption option = ( CmsSelectWidgetOption ) o ; if ( option . isDefault ( ) ) { defaults . add ( option ) ; } } } return defaults ;
|
public class Expression { /** * Return the first node out of the nodeset , if this expression is
* a nodeset expression .
* @ param xctxt The XPath runtime context .
* @ return the first node out of the nodeset , or DTM . NULL .
* @ throws javax . xml . transform . TransformerException */
public int asNode ( XPathContext xctxt ) throws javax . xml . transform . TransformerException { } }
|
DTMIterator iter = execute ( xctxt ) . iter ( ) ; return iter . nextNode ( ) ;
|
public class MavenHelpers { /** * Updates the given maven property value if value is not null and returns true if the pom has been changed
* @ return true if the value changed and was non null or updated was true */
public static boolean updatePomProperty ( Properties properties , String name , Object value , boolean updated ) { } }
|
if ( value != null ) { Object oldValue = properties . get ( name ) ; if ( ! Objects . equal ( oldValue , value ) ) { getLOG ( ) . debug ( "Updating pom.xml property: " + name + " to " + value ) ; properties . put ( name , value ) ; return true ; } } return updated ;
|
public class GenMapAndTopicListModule { /** * Handle topic which are only conref sources from normal processing . */
private void handleConref ( ) { } }
|
// Get pure conref targets
final Set < URI > pureConrefTargets = new HashSet < > ( 128 ) ; for ( final URI target : conrefTargetSet ) { if ( ! nonConrefCopytoTargetSet . contains ( target ) ) { pureConrefTargets . add ( target ) ; } } conrefTargetSet = pureConrefTargets ; // Remove pure conref targets from fullTopicSet
fullTopicSet . removeAll ( pureConrefTargets ) ; // Treat pure conref targets same as resource - only
resourceOnlySet . addAll ( pureConrefTargets ) ;
|
public class LocalThreadObjectPool { /** * Gets an array of Objects from the pool . This method will return a varying
* number
* of elements based on the number of available objects ( it will always leave
* at
* least 1/2 the minimum pool size ) and the batch size configured for the
* pool .
* @ return Object [ ] The arrray of objects removed from the pool . */
protected Object [ ] getBatch ( ) { } }
|
Object [ ] objectArray = new Object [ batchSize ] ; objectArray [ 0 ] = get ( ) ; // always attempt to get at least 1 , get ( ) could
// return null
// if no factory was defined when instantiated .
int numElements = getCurrentNumElements ( ) ; for ( int i = 1 ; i <= numElements && i < batchSize ; i ++ ) { objectArray [ i ] = get ( ) ; } return objectArray ;
|
public class StringToTranscriptEffect { /** * Split the specified string into a list of effects .
* @ param s string to split
* @ return the specified string split into a list of effects */
List < String > splitEffects ( final String s ) { } }
|
return Splitter . on ( "&" ) . omitEmptyStrings ( ) . splitToList ( s ) ;
|
public class JdbcCpoXaAdapter { /** * Retrieves the bean from the datasource . The assumption is that the bean exists in the datasource .
* @ param name The filter name which tells the datasource which beans should be returned . The name also signifies what
* data in the bean will be populated .
* @ param criteria This is an bean that has been defined within the metadata of the datasource . If the class is not
* defined an exception will be thrown . If the bean does not exist in the datasource , an exception will be thrown .
* This bean is used to specify the parameters used to retrieve the collection of beans .
* @ return A collection of beans will be returned that meet the criteria specified by obj . The beans will be of the
* same type as the bean that was passed in . If no beans match the criteria , an empty collection will be returned
* @ throws CpoException Thrown if there are errors accessing the datasource */
@ Override public < C > List < C > retrieveBeans ( String name , C criteria ) throws CpoException { } }
|
return getCurrentResource ( ) . retrieveBeans ( name , criteria ) ;
|
public class Parameters { /** * Set a float value to the query parameter referenced by the given name . A query parameter
* is defined by using the Expression ' s parameter ( String name ) function .
* @ param name The parameter name .
* @ param value The float value .
* @ return The self object . */
@ NonNull public Parameters setFloat ( @ NonNull String name , float value ) { } }
|
return setValue ( name , value ) ;
|
public class ThumborUrlBuilder { /** * This filter permit to return an image sized exactly as requested wherever is its ratio by
* filling with chosen color the missing parts . Usually used with " fit - in " or " adaptive - fit - in "
* @ param color integer representation of color . */
public static String fill ( int color ) { } }
|
final String colorCode = Integer . toHexString ( color & 0xFFFFFF ) ; // Strip alpha
return FILTER_FILL + "(" + colorCode + ")" ;
|
public class ZipLoader { /** * Use this call to load a zip file using the
* { @ link ZipLoader } and apply the { @ link ZipEntryProcessor } to each entry . The result is a
* list of all processed entries obtained from the zip file .
* @ param gvrContext the GVRf context
* @ param zipFileName the name of the zip file . This must be a file in the assets folder .
* @ param processor the { @ link ZipEntryProcessor } to be applied to each zip entry in the file .
* @ return a list of processed zip file entries .
* @ throws IOException this function returns an { @ link IOException } if there are issues
* processing the provided zip file . */
public static < T > List < T > load ( GVRContext gvrContext , String zipFileName , ZipEntryProcessor < T > processor ) throws IOException { } }
|
Context context = gvrContext . getContext ( ) ; InputStream inputStream = context . getAssets ( ) . open ( zipFileName ) ; ZipInputStream zipInputStream = new ZipInputStream ( inputStream ) ; List < T > result = new ArrayList < T > ( ) ; try { ZipEntry zipEntry ; while ( ( zipEntry = zipInputStream . getNextEntry ( ) ) != null ) { ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; byte [ ] buffer = new byte [ 1024 ] ; int count ; while ( ( count = zipInputStream . read ( buffer ) ) != - 1 ) { baos . write ( buffer , 0 , count ) ; } byte [ ] bytes = baos . toByteArray ( ) ; InputStream resourceInputStream = new ByteArrayInputStream ( bytes ) ; GVRAndroidResource androidResource = new GVRAndroidResource ( zipEntry . getName ( ) , resourceInputStream ) ; T item = processor . getItem ( gvrContext , androidResource ) ; result . add ( item ) ; } } finally { zipInputStream . close ( ) ; } return result ;
|
public class KvStateLocation { /** * Registers a KvState instance for the given key group index .
* @ param keyGroupRange Key group range to unregister .
* @ throws IndexOutOfBoundsException If key group range start < 0 or key group range end > = Number of key groups
* @ throws IllegalArgumentException If no location information registered for a key group index in the range . */
void unregisterKvState ( KeyGroupRange keyGroupRange ) { } }
|
if ( keyGroupRange . getStartKeyGroup ( ) < 0 || keyGroupRange . getEndKeyGroup ( ) >= numKeyGroups ) { throw new IndexOutOfBoundsException ( "Key group index" ) ; } for ( int kgIdx = keyGroupRange . getStartKeyGroup ( ) ; kgIdx <= keyGroupRange . getEndKeyGroup ( ) ; ++ kgIdx ) { if ( kvStateIds [ kgIdx ] == null || kvStateAddresses [ kgIdx ] == null ) { throw new IllegalArgumentException ( "Not registered. Probably registration/unregistration race." ) ; } numRegisteredKeyGroups -- ; kvStateIds [ kgIdx ] = null ; kvStateAddresses [ kgIdx ] = null ; }
|
public class AcceptReservedInstancesExchangeQuoteRequest { /** * The configuration of the target Convertible Reserved Instance to exchange for your current Convertible Reserved
* Instances .
* @ param targetConfigurations
* The configuration of the target Convertible Reserved Instance to exchange for your current Convertible
* Reserved Instances . */
public void setTargetConfigurations ( java . util . Collection < TargetConfigurationRequest > targetConfigurations ) { } }
|
if ( targetConfigurations == null ) { this . targetConfigurations = null ; return ; } this . targetConfigurations = new com . amazonaws . internal . SdkInternalList < TargetConfigurationRequest > ( targetConfigurations ) ;
|
public class PolicyRestrictor { /** * { @ inheritDoc } */
public boolean isAttributeWriteAllowed ( ObjectName pName , String pAttribute ) { } }
|
return check ( RequestType . WRITE , pName , pAttribute ) ;
|
public class ResponseCreationSupport { /** * Create a { @ link URI } from a { @ link URL } . This is similar to calling
* ` url . toURI ( ) ` with the { @ link URISyntaxException }
* converted to a { @ link IllegalArgumentException } .
* @ param url the url
* @ return the uri
* @ throws IllegalArgumentException if the url violates RFC 2396 */
public static URI uriFromUrl ( URL url ) throws IllegalArgumentException { } }
|
try { return url . toURI ( ) ; } catch ( URISyntaxException e ) { throw new IllegalArgumentException ( e ) ; }
|
public class RocksUtils { /** * Generates a path to use for a RocksDB database .
* @ param baseDir the base directory path
* @ param dbName a name for the database
* @ return the generated database path */
public static String generateDbPath ( String baseDir , String dbName ) { } }
|
return PathUtils . concatPath ( baseDir , dbName ) ;
|
public class RulesController { /** * Ensure scheme has a double slash . eg replaces " http : / blah " with " http : / / blah " */
private String fixupSchemeSlashes ( String surt ) { } }
|
if ( surt . indexOf ( ":/(" ) == surt . indexOf ( ":" ) ) { int i = surt . indexOf ( ":" ) ; surt = surt . substring ( 0 , i + 1 ) + "/" + surt . substring ( i + 1 ) ; } return surt ;
|
public class FormatTables { /** * Retrieves a variants map for a given format name .
* @ param name
* The format name
* @ return a mutable map of variants */
public static Map < String , Format > get ( String name ) { } }
|
return instance ( ) . methods . get ( name ) ;
|
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getCTC ( ) { } }
|
if ( ctcEClass == null ) { ctcEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 231 ) ; } return ctcEClass ;
|
public class HTTPAdminListener { /** * Load a template for the admin page , fill it out and return the value .
* @ param params The key - value set of variables to replace in the template .
* @ return The completed template . */
String getHTMLForAdminPage ( Map < String , String > params ) { } }
|
try { String template = m_htmlTemplates . get ( "admintemplate.html" ) ; for ( Entry < String , String > e : params . entrySet ( ) ) { String key = e . getKey ( ) . toUpperCase ( ) ; String value = e . getValue ( ) ; if ( key == null ) continue ; if ( value == null ) value = "NULL" ; template = template . replace ( "#" + key + "#" , value ) ; } return template ; } catch ( Exception e ) { e . printStackTrace ( ) ; } return "<html><body>An unrecoverable error was encountered while generating this page.</body></html>" ;
|
public class SVBar { /** * Set the bar color . < br >
* < br >
* Its discouraged to use this method .
* @ param color */
public void setColor ( int color ) { } }
|
int x1 , y1 ; if ( mOrientation ) { x1 = ( mBarLength + mBarPointerHaloRadius ) ; y1 = mBarThickness ; } else { x1 = mBarThickness ; y1 = ( mBarLength + mBarPointerHaloRadius ) ; } Color . colorToHSV ( color , mHSVColor ) ; shader = new LinearGradient ( mBarPointerHaloRadius , 0 , x1 , y1 , new int [ ] { Color . WHITE , color , Color . BLACK } , null , Shader . TileMode . CLAMP ) ; mBarPaint . setShader ( shader ) ; calculateColor ( mBarPointerPosition ) ; mBarPointerPaint . setColor ( mColor ) ; if ( mPicker != null ) { mPicker . setNewCenterColor ( mColor ) ; if ( mPicker . hasOpacityBar ( ) ) mPicker . changeOpacityBarColor ( mColor ) ; } invalidate ( ) ;
|
public class AdminToolUtils { /** * Utility function that constructs AdminClient .
* @ param url URL pointing to the bootstrap node
* @ return Newly constructed AdminClient */
public static AdminClient getAdminClient ( String url ) { } }
|
ClientConfig config = new ClientConfig ( ) . setBootstrapUrls ( url ) . setConnectionTimeout ( 5 , TimeUnit . SECONDS ) ; AdminClientConfig adminConfig = new AdminClientConfig ( ) . setAdminSocketTimeoutSec ( 5 ) ; return new AdminClient ( adminConfig , config ) ;
|
public class Jenkins { /** * Accepts submission from the node configuration page . */
@ RequirePOST public synchronized void doConfigExecutorsSubmit ( StaplerRequest req , StaplerResponse rsp ) throws IOException , ServletException , FormException { } }
|
checkPermission ( ADMINISTER ) ; BulkChange bc = new BulkChange ( this ) ; try { JSONObject json = req . getSubmittedForm ( ) ; ExtensionList . lookupSingleton ( MasterBuildConfiguration . class ) . configure ( req , json ) ; getNodeProperties ( ) . rebuild ( req , json . optJSONObject ( "nodeProperties" ) , NodeProperty . all ( ) ) ; } finally { bc . commit ( ) ; } updateComputerList ( ) ; rsp . sendRedirect ( req . getContextPath ( ) + '/' + toComputer ( ) . getUrl ( ) ) ; // back to the computer page
|
public class Controller { /** * Returns the Controller with the given instance id or { @ code null } if no such Controller
* exists . May return the Controller itself or a matching descendant
* @ param instanceId The instance ID being searched for */
@ Nullable final Controller findController ( @ NonNull String instanceId ) { } }
|
if ( this . instanceId . equals ( instanceId ) ) { return this ; } for ( Router router : childRouters ) { Controller matchingChild = router . getControllerWithInstanceId ( instanceId ) ; if ( matchingChild != null ) { return matchingChild ; } } return null ;
|
public class PortComponentType { /** * { @ inheritDoc } */
@ Override public QName getWSDLService ( ) { } }
|
if ( wsdl_service != null ) { return new QName ( wsdl_service . getNamespaceURI ( ) , wsdl_service . getLocalPart ( ) ) ; } else return null ;
|
public class BlobStore { /** * Wrapper around readBlobTo which
* returns a ByteArray output stream .
* @ param key Key for the blob .
* the read privilege for the blob .
* @ return ByteArrayOutputStream
* @ throws IOException
* @ throws KeyNotFoundException */
public byte [ ] readBlob ( String key ) throws IOException , KeyNotFoundException { } }
|
ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; readBlobTo ( key , out ) ; byte [ ] bytes = out . toByteArray ( ) ; out . close ( ) ; return bytes ;
|
public class MetricCollectorSupport { /** * Stops this collector immediately , dropping all pending metrics in memory . */
@ Override public boolean stop ( ) { } }
|
synchronized ( MetricCollectorSupport . class ) { if ( uploaderThread != null ) { uploaderThread . cancel ( ) ; uploaderThread . interrupt ( ) ; uploaderThread = null ; if ( singleton == this ) { // defensive check
singleton = null ; } return true ; } } return false ;
|
public class XmlParser { /** * Parse File . */
public synchronized Node parse ( File file ) throws IOException , SAXException { } }
|
if ( log . isDebugEnabled ( ) ) log . debug ( "parse: " + file ) ; return parse ( new InputSource ( file . toURL ( ) . toString ( ) ) ) ;
|
public class SortedArrayList { /** * Searches for the first occurrence of the given argument , testing
* for equality using the < tt > equals < / tt > method .
* @ param elem an object .
* @ return the index of the first occurrence of the argument in this
* list ; returns < tt > - 1 < / tt > if the object is not found .
* @ see Object # equals ( Object ) */
@ Override public int indexOf ( Object elem ) { } }
|
int elemHash = elem . hashCode ( ) ; // Find the location to insert the object at
int elemHashPos = binarySearchHashCode ( elemHash ) ; // Not found
if ( elemHashPos < 0 ) return - 1 ; // Try backwards until found or different hashCode
int pos = elemHashPos ; while ( pos >= 0 ) { E T = get ( pos ) ; if ( T . hashCode ( ) != elemHash ) break ; if ( T . equals ( elem ) ) { // Found one , iterate backwards to the first one
while ( pos > 0 && get ( pos - 1 ) . equals ( elem ) ) pos -- ; return pos ; } pos -- ; } // Try forwards until found or different hashCode
pos = elemHashPos + 1 ; int size = size ( ) ; while ( pos < size ) { E T = get ( pos ) ; if ( T . hashCode ( ) != elemHash ) break ; if ( T . equals ( elem ) ) return pos ; pos ++ ; } // Not found
return - 1 ;
|
public class ConsumerDispatcher { /** * Checks to see if another update has occured to the ReceiveAllowedState .
* If it has it returns false to indicate that another run is required of the
* thread , otherwise it deletes this instance of the receiveAllowedThread by setting
* the reference to null */
private synchronized boolean deleteReceiveAllowedThread ( ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "deleteReceiveAllowedThread" ) ; if ( _receiveAllowedThread . isMarkedForUpdate ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "deleteReceiveAllowedThread" , Boolean . FALSE ) ; return false ; } _receiveAllowedThread = null ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "deleteReceiveAllowedThread" , Boolean . TRUE ) ; return true ;
|
public class Pays { /** * JS支付 ( 公众号支付 )
* @ param request 支付请求对象
* @ return JsPayResponse对象 , 或抛WepayException */
public JsPayResponse jsPay ( JsPayRequest request ) { } }
|
checkJsPayParams ( request ) ; Map < String , Object > respData = doJsPay ( request , TradeType . JSAPI ) ; return buildJsPayResp ( respData ) ;
|
public class SqlDocument { /** * Override for other languages */
protected boolean isQuoteDelimiter ( String character ) { } }
|
String quoteDelimiters = "\"'" ; if ( quoteDelimiters . indexOf ( character ) < 0 ) return false ; else return true ;
|
public class RequestUtil { /** * Unrafels a cookie string containing a username and password .
* @ param value
* The cookie value .
* @ return String [ ] containing the username at index 0 and the password at
* index 1 , or < code > { null , null } < / code > if cookieVal equals
* < code > null < / code > or the empty string . */
private static String [ ] decodePasswordCookie ( String cookieVal ) { } }
|
// check that the cookie value isn ' t null or zero - length
if ( cookieVal == null || cookieVal . length ( ) <= 0 ) { return null ; } // unrafel the cookie value
char [ ] chars = cookieVal . toCharArray ( ) ; byte [ ] bytes = new byte [ chars . length / 2 ] ; int b ; for ( int n = 0 , m = 0 ; n < bytes . length ; n ++ ) { b = chars [ m ++ ] - ENCODE_CHAR_OFFSET1 ; b |= ( chars [ m ++ ] - ENCODE_CHAR_OFFSET2 ) << 4 ; bytes [ n ] = ( byte ) ( b ^ ( ENCODE_XORMASK + n ) ) ; } cookieVal = new String ( bytes ) ; int pos = cookieVal . indexOf ( ENCODE_DELIMETER ) ; String username = ( pos < 0 ) ? "" : cookieVal . substring ( 0 , pos ) ; String password = ( pos < 0 ) ? "" : cookieVal . substring ( pos + 1 ) ; return new String [ ] { username , password } ;
|
public class CmsDefaultAuthorizationHandler { /** * Checks if the current request contains HTTP basic authentication information in
* the headers , if so the user is tried to log in with this data , and on success a
* session is generated . < p >
* @ param req the current HTTP request
* @ return the authenticated cms object , or < code > null < / code > if failed */
protected CmsObject checkBasicAuthorization ( HttpServletRequest req ) { } }
|
if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Checking for basic authorization." ) ; } try { CmsObject cms = OpenCms . initCmsObject ( OpenCms . getDefaultUsers ( ) . getUserGuest ( ) ) ; if ( OpenCms . getSystemInfo ( ) . getHttpAuthenticationSettings ( ) . getBrowserBasedAuthenticationMechanism ( ) == null ) { // browser base authorization is not enabled , return Guest user CmsObject
if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Browser based authorization not enabled." ) ; } return cms ; } // no user identified from the session and basic authentication is enabled
String auth = req . getHeader ( HEADER_AUTHORIZATION ) ; if ( ( auth == null ) || ! auth . toUpperCase ( ) . startsWith ( AUTHORIZATION_BASIC_PREFIX ) ) { // no authorization data is available
return cms ; } // get encoded user and password , following after " BASIC "
String base64Token = auth . substring ( 6 ) ; // decode it , using base 64 decoder
String token = new String ( Base64 . decodeBase64 ( base64Token . getBytes ( ) ) ) ; String username = null ; String password = null ; int pos = token . indexOf ( SEPARATOR_CREDENTIALS ) ; if ( pos != - 1 ) { username = token . substring ( 0 , pos ) ; password = token . substring ( pos + 1 ) ; } // authentication in the DB
cms . loginUser ( username , password ) ; // authorization was successful create a session
req . getSession ( true ) ; return cms ; } catch ( CmsException e ) { // authorization failed
return null ; }
|
public class ZonedDateTime { /** * Obtains an instance of { @ code ZonedDateTime } using seconds from the
* epoch of 1970-01-01T00:00:00Z .
* @ param epochSecond the number of seconds from the epoch of 1970-01-01T00:00:00Z
* @ param nanoOfSecond the nanosecond within the second , from 0 to 999,999,999
* @ param zone the time - zone , not null
* @ return the zoned date - time , not null
* @ throws DateTimeException if the result exceeds the supported range */
private static ZonedDateTime create ( long epochSecond , int nanoOfSecond , ZoneId zone ) { } }
|
ZoneRules rules = zone . getRules ( ) ; Instant instant = Instant . ofEpochSecond ( epochSecond , nanoOfSecond ) ; // TODO : rules should be queryable by epochSeconds
ZoneOffset offset = rules . getOffset ( instant ) ; LocalDateTime ldt = LocalDateTime . ofEpochSecond ( epochSecond , nanoOfSecond , offset ) ; return new ZonedDateTime ( ldt , offset , zone ) ;
|
public class CompactionAuditCountVerifier { /** * Verify a specific dataset by following below steps
* 1 ) Retrieve a tier - to - count mapping
* 2 ) Read count from { @ link CompactionAuditCountVerifier # gobblinTier }
* 3 ) Read count from all other { @ link CompactionAuditCountVerifier # referenceTiers }
* 4 ) Compare count retrieved from steps 2 ) and 3 ) , if any of ( gobblin / refenence ) > = threshold , return true , else return false
* @ param dataset Dataset needs to be verified
* @ return If verification is succeeded */
public Result verify ( FileSystemDataset dataset ) { } }
|
if ( auditCountClient == null ) { log . debug ( "No audit count client specified, skipped" ) ; return new Result ( true , "" ) ; } CompactionPathParser . CompactionParserResult result = new CompactionPathParser ( this . state ) . parse ( dataset ) ; DateTime startTime = result . getTime ( ) ; DateTime endTime = startTime . plusHours ( 1 ) ; String datasetName = result . getDatasetName ( ) ; try { Map < String , Long > countsByTier = auditCountClient . fetch ( datasetName , startTime . getMillis ( ) , endTime . getMillis ( ) ) ; for ( String tier : referenceTiers ) { Result rst = passed ( datasetName , countsByTier , tier ) ; if ( rst . isSuccessful ( ) ) { return new Result ( true , "" ) ; } } } catch ( IOException e ) { return new Result ( false , ExceptionUtils . getFullStackTrace ( e ) ) ; } return new Result ( false , String . format ( "%s data is not complete between %s and %s" , datasetName , startTime , endTime ) ) ;
|
public class Validate { /** * < p > Validate that the specified argument character sequence matches the specified regular
* expression pattern ; otherwise throwing an exception . < / p >
* < pre > Validate . matchesPattern ( " hi " , " [ a - z ] * " ) ; < / pre >
* < p > The syntax of the pattern is the one used in the { @ link Pattern } class . < / p >
* @ param input the character sequence to validate , not null
* @ param pattern the regular expression pattern , not null
* @ throws IllegalArgumentException if the character sequence does not match the pattern
* @ see # matchesPattern ( CharSequence , String , String , Object . . . )
* @ since 3.0 */
public static void matchesPattern ( final CharSequence input , final String pattern ) { } }
|
// TODO when breaking BC , consider returning input
if ( input == null || ! input . toString ( ) . matches ( pattern ) ) { throw new IllegalArgumentException ( StringUtils . simpleFormat ( DEFAULT_MATCHES_PATTERN_EX , input , pattern ) ) ; }
|
public class AmazonElastiCacheClient { /** * Makes a copy of an existing snapshot .
* < note >
* This operation is valid for Redis only .
* < / note > < important >
* Users or groups that have permissions to use the < code > CopySnapshot < / code > operation can create their own Amazon
* S3 buckets and copy snapshots to it . To control access to your snapshots , use an IAM policy to control who has
* the ability to use the < code > CopySnapshot < / code > operation . For more information about using IAM to control the
* use of ElastiCache operations , see < a
* href = " http : / / docs . aws . amazon . com / AmazonElastiCache / latest / red - ug / Snapshots . Exporting . html " > Exporting
* Snapshots < / a > and < a href = " http : / / docs . aws . amazon . com / AmazonElastiCache / latest / red - ug / IAM . html " > Authentication
* & amp ; Access Control < / a > .
* < / important >
* You could receive the following error messages .
* < p class = " title " >
* < b > Error Messages < / b >
* < ul >
* < li >
* < b > Error Message : < / b > The S3 bucket % s is outside of the region .
* < b > Solution : < / b > Create an Amazon S3 bucket in the same region as your snapshot . For more information , see < a
* href =
* " http : / / docs . aws . amazon . com / AmazonElastiCache / latest / red - ug / Snapshots . Exporting . html # Snapshots . Exporting . CreateBucket "
* > Step 1 : Create an Amazon S3 Bucket < / a > in the ElastiCache User Guide .
* < / li >
* < li >
* < b > Error Message : < / b > The S3 bucket % s does not exist .
* < b > Solution : < / b > Create an Amazon S3 bucket in the same region as your snapshot . For more information , see < a
* href =
* " http : / / docs . aws . amazon . com / AmazonElastiCache / latest / red - ug / Snapshots . Exporting . html # Snapshots . Exporting . CreateBucket "
* > Step 1 : Create an Amazon S3 Bucket < / a > in the ElastiCache User Guide .
* < / li >
* < li >
* < b > Error Message : < / b > The S3 bucket % s is not owned by the authenticated user .
* < b > Solution : < / b > Create an Amazon S3 bucket in the same region as your snapshot . For more information , see < a
* href =
* " http : / / docs . aws . amazon . com / AmazonElastiCache / latest / red - ug / Snapshots . Exporting . html # Snapshots . Exporting . CreateBucket "
* > Step 1 : Create an Amazon S3 Bucket < / a > in the ElastiCache User Guide .
* < / li >
* < li >
* < b > Error Message : < / b > The authenticated user does not have sufficient permissions to perform the desired
* activity .
* < b > Solution : < / b > Contact your system administrator to get the needed permissions .
* < / li >
* < li >
* < b > Error Message : < / b > The S3 bucket % s already contains an object with key % s .
* < b > Solution : < / b > Give the < code > TargetSnapshotName < / code > a new and unique value . If exporting a snapshot , you
* could alternatively create a new Amazon S3 bucket and use this same value for < code > TargetSnapshotName < / code > .
* < / li >
* < li >
* < b > Error Message : < / b > ElastiCache has not been granted READ permissions % s on the S3 Bucket .
* < b > Solution : < / b > Add List and Read permissions on the bucket . For more information , see < a href =
* " http : / / docs . aws . amazon . com / AmazonElastiCache / latest / red - ug / Snapshots . Exporting . html # Snapshots . Exporting . GrantAccess "
* > Step 2 : Grant ElastiCache Access to Your Amazon S3 Bucket < / a > in the ElastiCache User Guide .
* < / li >
* < li >
* < b > Error Message : < / b > ElastiCache has not been granted WRITE permissions % s on the S3 Bucket .
* < b > Solution : < / b > Add Upload / Delete permissions on the bucket . For more information , see < a href =
* " http : / / docs . aws . amazon . com / AmazonElastiCache / latest / red - ug / Snapshots . Exporting . html # Snapshots . Exporting . GrantAccess "
* > Step 2 : Grant ElastiCache Access to Your Amazon S3 Bucket < / a > in the ElastiCache User Guide .
* < / li >
* < li >
* < b > Error Message : < / b > ElastiCache has not been granted READ _ ACP permissions % s on the S3 Bucket .
* < b > Solution : < / b > Add View Permissions on the bucket . For more information , see < a href =
* " http : / / docs . aws . amazon . com / AmazonElastiCache / latest / red - ug / Snapshots . Exporting . html # Snapshots . Exporting . GrantAccess "
* > Step 2 : Grant ElastiCache Access to Your Amazon S3 Bucket < / a > in the ElastiCache User Guide .
* < / li >
* < / ul >
* @ param copySnapshotRequest
* Represents the input of a < code > CopySnapshotMessage < / code > operation .
* @ return Result of the CopySnapshot operation returned by the service .
* @ throws SnapshotAlreadyExistsException
* You already have a snapshot with the given name .
* @ throws SnapshotNotFoundException
* The requested snapshot name does not refer to an existing snapshot .
* @ throws SnapshotQuotaExceededException
* The request cannot be processed because it would exceed the maximum number of snapshots .
* @ throws InvalidSnapshotStateException
* The current state of the snapshot does not allow the requested operation to occur .
* @ throws InvalidParameterValueException
* The value for a parameter is invalid .
* @ throws InvalidParameterCombinationException
* Two or more incompatible parameters were specified .
* @ sample AmazonElastiCache . CopySnapshot
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticache - 2015-02-02 / CopySnapshot " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public Snapshot copySnapshot ( CopySnapshotRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeCopySnapshot ( request ) ;
|
public class ValueTaglet { /** * { @ inheritDoc } */
public Content getTagletOutput ( Element holder , DocTree tag , TagletWriter writer ) { } }
|
Utils utils = writer . configuration ( ) . utils ; Messages messages = writer . configuration ( ) . getMessages ( ) ; VariableElement field = getVariableElement ( holder , writer . configuration ( ) , tag ) ; if ( field == null ) { if ( tag . toString ( ) . isEmpty ( ) ) { // Invalid use of @ value
messages . warning ( holder , "doclet.value_tag_invalid_use" ) ; } else { // Reference is unknown .
messages . warning ( holder , "doclet.value_tag_invalid_reference" , tag . toString ( ) ) ; } } else if ( field . getConstantValue ( ) != null ) { return writer . valueTagOutput ( field , utils . constantValueExpresion ( field ) , // TODO : investigate and cleanup
// in the j . l . m world , equals will not be accurate
// ! field . equals ( tag . holder ( ) )
! utils . elementsEqual ( field , holder ) ) ; } else { // Referenced field is not a constant .
messages . warning ( holder , "doclet.value_tag_invalid_constant" , utils . getSimpleName ( field ) ) ; } return writer . getOutputInstance ( ) ;
|
public class ObjectStructuredFieldOffsetImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setSFOff ( Integer newSFOff ) { } }
|
Integer oldSFOff = sfOff ; sfOff = newSFOff ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . OBJECT_STRUCTURED_FIELD_OFFSET__SF_OFF , oldSFOff , sfOff ) ) ;
|
public class ArrayTransitionModel { /** * Called by CompositeTransitionModel */
void dump ( int size , Appendable buffer ) throws IOException { } }
|
for ( int i = 0 ; i < size ; i ++ ) { ZonalTransition transition = this . transitions [ i ] ; TransitionModel . dump ( transition , buffer ) ; }
|
public class JenkinsLogs { /** * Adds { @ link Jenkins # logRecords } ( from core ) into the support bundle .
* This is a small ring buffer that contains most recent log entries emitted from j . u . l logging .
* @ see WebAppMain # installLogger ( ) */
private void addMasterJulRingBuffer ( Container result ) { } }
|
result . add ( new LogRecordContent ( "nodes/master/logs/jenkins.log" ) { @ Override public Iterable < LogRecord > getLogRecords ( ) { return Lists . reverse ( new ArrayList < LogRecord > ( Jenkins . logRecords ) ) ; } } ) ;
|
public class ThreadPoolController { /** * Evaluate current poolSize against farthest poolSize to decide whether it makes sense
* to shrink . The final outcome is probabilistic , not deterministic .
* @ param smallerPoolSize - smaller poolSize for comparison
* @ param largerPoolSize - larger poolSize for comparison
* @ param smallerPoolTput - tput ( historical or expected ) of smaller poolSize
* @ param largerPoolTput - tput ( historical or expected ) of larger poolSize
* @ return - true if the ratios and coinFlips favor shrinking */
private boolean leanTowardShrinking ( Integer smallerPoolSize , int largerPoolSize , double smallerPoolTput , double largerPoolTput ) { } }
|
boolean shouldShrink = false ; double poolRatio = largerPoolSize / smallerPoolSize ; double tputRatio = largerPoolTput / smallerPoolTput ; double poolTputRatio = poolRatio / tputRatio ; // compare the poolSize ratio and tput ratio between current and largest poolSizes
// if tput no better at larger poolSize , or not much better , lean toward shrinking
if ( tputRatio < 1.0 ) { // much larger poolSize has smaller tput - lean strongly ( 75 % ) toward shrinking
shouldShrink = ( flipCoin ( ) && flipCoin ( ) ) ? false : true ; } else if ( poolTputRatio > poolTputRatioHigh ) { // poolSize ratio is much larger than tput ratio - lean strongly ( 75 % ) toward shrinking
shouldShrink = ( flipCoin ( ) && flipCoin ( ) ) ? false : true ; } else if ( poolTputRatio > poolTputRatioLow ) { // poolSize ratio is slightly larger than tput ratio - lean weakly ( 50 % ) toward shrinking
shouldShrink = ( flipCoin ( ) ) ? false : true ; } // Format an event level trace point with the key tput ratio data
if ( tc . isEventEnabled ( ) && shouldShrink ) Tr . event ( tc , "Tput ratio shrinkScore adjustment, larger poolSizes" , poolTputRatioData ( poolTputRatio , poolRatio , tputRatio , smallerPoolTput , largerPoolTput , smallerPoolSize , largerPoolSize ) ) ; return shouldShrink ;
|
public class AmazonECRClient { /** * Retrieves the results of the specified lifecycle policy preview request .
* @ param getLifecyclePolicyPreviewRequest
* @ return Result of the GetLifecyclePolicyPreview operation returned by the service .
* @ throws ServerException
* These errors are usually caused by a server - side issue .
* @ throws InvalidParameterException
* The specified parameter is invalid . Review the available parameters for the API request .
* @ throws RepositoryNotFoundException
* The specified repository could not be found . Check the spelling of the specified repository and ensure
* that you are performing operations on the correct registry .
* @ throws LifecyclePolicyPreviewNotFoundException
* There is no dry run for this repository .
* @ sample AmazonECR . GetLifecyclePolicyPreview
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ecr - 2015-09-21 / GetLifecyclePolicyPreview " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public GetLifecyclePolicyPreviewResult getLifecyclePolicyPreview ( GetLifecyclePolicyPreviewRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeGetLifecyclePolicyPreview ( request ) ;
|
public class ResilientActiveKeyValueStore { /** * @ param path
* @ param watcher
* @ return String
* @ throws InterruptedException
* @ throws KeeperException
* @ Description : 读数据
* @ author liaoqiqi
* @ date 2013-6-14 */
public String read ( String path , Watcher watcher , Stat stat ) throws InterruptedException , KeeperException { } }
|
byte [ ] data = zk . getData ( path , watcher , stat ) ; return new String ( data , CHARSET ) ;
|
public class Where { /** * Add a EXISTS clause with a sub - query inside of parenthesis .
* < b > NOTE : < / b > The sub - query will be prepared at the same time that the outside query is . */
public Where < T , ID > exists ( QueryBuilder < ? , ? > subQueryBuilder ) { } }
|
// we do this to turn off the automatic addition of the ID column in the select column list
subQueryBuilder . enableInnerQuery ( ) ; addClause ( new Exists ( new InternalQueryBuilderWrapper ( subQueryBuilder ) ) ) ; return this ;
|
public class CfgAlignmentModel { /** * Gets the { @ code beamSize } highest - scoring logical form derivations
* for { @ code example } according to this model . The returned list is
* approximate ( because it is produced by a beam search over CFG parses ) .
* @ param example
* @ param beamSize
* @ return */
public List < AlignedExpressionTree > getBestAlignments ( AlignmentExample example , int beamSize ) { } }
|
CfgParser parser = getCfgParser ( example ) ; ExpressionTree tree = example . getTree ( ) ; Factor rootFactor = getRootFactor ( tree , parser . getParentVariable ( ) ) ; List < CfgParseTree > parseTrees = parser . beamSearch ( example . getWords ( ) , beamSize ) ; List < AlignedExpressionTree > expressionTrees = Lists . newArrayList ( ) ; for ( CfgParseTree parseTree : parseTrees ) { if ( rootFactor . getUnnormalizedProbability ( parseTree . getRoot ( ) ) > 0 ) { expressionTrees . add ( decodeCfgParse ( parseTree , 0 ) ) ; } } return expressionTrees ;
|
public class MRCompactorJobPropCreator { /** * Check if inputFolder contains any files which have modification times which are more
* recent than the last compaction time as stored within outputFolder ; return any files
* which do . An empty list will be returned if all files are older than the last compaction time . */
private Set < Path > getNewDataInFolder ( Path inputFolder , Path outputFolder ) throws IOException { } }
|
Set < Path > newFiles = Sets . newHashSet ( ) ; if ( ! this . fs . exists ( inputFolder ) || ! this . fs . exists ( outputFolder ) ) { return newFiles ; } DateTime lastCompactionTime = new DateTime ( MRCompactor . readCompactionTimestamp ( this . fs , outputFolder ) ) ; for ( FileStatus fstat : FileListUtils . listFilesRecursively ( this . fs , inputFolder ) ) { DateTime fileModificationTime = new DateTime ( fstat . getModificationTime ( ) ) ; if ( fileModificationTime . isAfter ( lastCompactionTime ) ) { LOG . info ( "[" + fileModificationTime . getMillis ( ) + "] " + fstat . getPath ( ) + " is after " + lastCompactionTime . getMillis ( ) ) ; newFiles . add ( fstat . getPath ( ) ) ; } } if ( ! newFiles . isEmpty ( ) ) { LOG . info ( String . format ( "Found %d new files within folder %s which are more recent than the previous " + "compaction start time of %s." , newFiles . size ( ) , inputFolder , lastCompactionTime ) ) ; } return newFiles ;
|
public class Main { String expand ( String s ) { } }
|
int i1 = 0 ; int i2 = 0 ; while ( s != null ) { i1 = s . indexOf ( "$(" , i2 ) ; if ( i1 < 0 ) break ; i2 = s . indexOf ( ")" , i1 + 2 ) ; if ( i2 < 0 ) break ; String property = System . getProperty ( s . substring ( i1 + 2 , i2 ) , "" ) ; s = s . substring ( 0 , i1 ) + property + s . substring ( i2 + 1 ) ; } return s ;
|
public class FbBotMillMockMediator { /** * Forwards an envelope to the registered bots .
* @ param envelope
* the envelope to forward . */
public void forward ( MessageEnvelope envelope ) { } }
|
List < FbBot > bots = FbBotMillContext . getInstance ( ) . getRegisteredBots ( ) ; for ( FbBot b : bots ) { b . processMessage ( envelope ) ; }
|
public class CsvBindingErrors { /** * フィールドエラーを登録します 。
* @ param field フィールドパス 。
* @ param errorCode エラーコード 。
* @ param messageVariables メッセージ中の変数 。
* @ param defaultMessage 指定したエラーコードに対するメッセージが見つからないときに使用するメッセージです 。 指定しない場合はnullを設定します 。 */
public void rejectValue ( final String field , final String errorCode , final Map < String , Object > messageVariables , final String defaultMessage ) { } }
|
rejectValue ( field , null , errorCode , Collections . emptyMap ( ) , defaultMessage ) ;
|
public class JobOperations { /** * Adds a job to the Batch account .
* @ param jobId The ID of the job to be added .
* @ param poolInfo Specifies how a job should be assigned to a pool .
* @ param additionalBehaviors A collection of { @ link BatchClientBehavior } instances that are applied to the Batch service request .
* @ throws BatchErrorException Exception thrown when an error response is received from the Batch service .
* @ throws IOException Exception thrown when there is an error in serialization / deserialization of data sent to / received from the Batch service . */
public void createJob ( String jobId , PoolInformation poolInfo , Iterable < BatchClientBehavior > additionalBehaviors ) throws BatchErrorException , IOException { } }
|
JobAddParameter param = new JobAddParameter ( ) . withId ( jobId ) . withPoolInfo ( poolInfo ) ; createJob ( param , additionalBehaviors ) ;
|
public class User { /** * Called by tests in the JTH . Otherwise this shouldn ' t be called .
* Even in the tests this usage is questionable . */
@ Deprecated public static void clear ( ) { } }
|
if ( ExtensionList . lookup ( AllUsers . class ) . isEmpty ( ) ) { return ; } UserIdMapper . getInstance ( ) . clear ( ) ; AllUsers . clear ( ) ;
|
public class FSAConfiguration { /** * returns data of proxy url from command line parameter proxy */
public static String [ ] parseProxy ( String proxy , List < String > errors ) { } }
|
String [ ] parsedProxyInfo = new String [ 4 ] ; if ( proxy != null ) { try { URL proxyAsUrl = new URL ( proxy ) ; parsedProxyInfo [ 0 ] = proxyAsUrl . getHost ( ) ; parsedProxyInfo [ 1 ] = String . valueOf ( proxyAsUrl . getPort ( ) ) ; if ( proxyAsUrl . getUserInfo ( ) != null ) { String [ ] parsedCred = proxyAsUrl . getUserInfo ( ) . split ( COLON ) ; parsedProxyInfo [ 2 ] = parsedCred [ 0 ] ; if ( parsedCred . length > 1 ) { parsedProxyInfo [ 3 ] = parsedCred [ 1 ] ; } } } catch ( MalformedURLException e ) { errors . add ( "Malformed proxy url : {}" + e . getMessage ( ) ) ; } } return parsedProxyInfo ;
|
public class DescribeDeliveryChannelStatusRequest { /** * A list of delivery channel names .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setDeliveryChannelNames ( java . util . Collection ) } or { @ link # withDeliveryChannelNames ( java . util . Collection ) }
* if you want to override the existing values .
* @ param deliveryChannelNames
* A list of delivery channel names .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeDeliveryChannelStatusRequest withDeliveryChannelNames ( String ... deliveryChannelNames ) { } }
|
if ( this . deliveryChannelNames == null ) { setDeliveryChannelNames ( new com . amazonaws . internal . SdkInternalList < String > ( deliveryChannelNames . length ) ) ; } for ( String ele : deliveryChannelNames ) { this . deliveryChannelNames . add ( ele ) ; } return this ;
|
public class CommonUtils { /** * 字符串转布尔
* @ param bool 数字
* @ param defaultInt 默认值
* @ return int */
public static boolean parseBoolean ( String bool , boolean defaultInt ) { } }
|
if ( bool == null ) { return defaultInt ; } else { return Boolean . parseBoolean ( bool ) ; }
|
public class BackendMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Backend backend , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( backend == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( backend . getVirtualService ( ) , VIRTUALSERVICE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class ArrayListIterate { /** * Mutates the internal array of the ArrayList by sorting it and then returns the same ArrayList . */
public static < T extends Comparable < ? super T > > ArrayList < T > sortThis ( ArrayList < T > list ) { } }
|
return ArrayListIterate . sortThis ( list , Comparators . naturalOrder ( ) ) ;
|
public class LineItemSummary { /** * Gets the startDateTimeType value for this LineItemSummary .
* @ return startDateTimeType * Specifies whether to start serving to the { @ code LineItem }
* right away , in
* an hour , etc . This attribute is optional and defaults
* to
* { @ link StartDateTimeType # USE _ START _ DATE _ TIME } . */
public com . google . api . ads . admanager . axis . v201902 . StartDateTimeType getStartDateTimeType ( ) { } }
|
return startDateTimeType ;
|
public class ExceptionSoftening { /** * reduces the end pc based on the optional LocalVariableTable ' s exception
* register scope
* @ param infos the list of active catch blocks
* @ param pc the current pc
* @ param seen the currently parsed opcode */
private void updateEndPCsOnCatchRegScope ( List < CatchInfo > infos , int pc , int seen ) { } }
|
if ( lvt != null ) { for ( CatchInfo ci : infos ) { if ( ( ci . getStart ( ) == pc ) && OpcodeUtils . isAStore ( seen ) ) { int exReg = RegisterUtils . getAStoreReg ( this , seen ) ; LocalVariable lv = lvt . getLocalVariable ( exReg , pc + 1 ) ; if ( lv != null ) { ci . setFinish ( lv . getStartPC ( ) + lv . getLength ( ) ) ; } break ; } } }
|
public class CmsXmlContentDefinition { /** * Factory method that returns the XML content definition instance for a given resource . < p >
* @ param cms the cms - object
* @ param resource the resource
* @ return the XML content definition
* @ throws CmsException if something goes wrong */
public static CmsXmlContentDefinition getContentDefinitionForResource ( CmsObject cms , CmsResource resource ) throws CmsException { } }
|
CmsXmlContentDefinition contentDef = null ; I_CmsResourceType resType = OpenCms . getResourceManager ( ) . getResourceType ( resource . getTypeId ( ) ) ; String schema = resType . getConfiguration ( ) . get ( CmsResourceTypeXmlContent . CONFIGURATION_SCHEMA ) ; if ( schema != null ) { try { // this wont in most cases read the file content because of caching
contentDef = unmarshal ( cms , schema ) ; } catch ( CmsException e ) { // this should never happen , unless the configured schema is different than the schema in the XML
if ( ! LOG . isDebugEnabled ( ) ) { LOG . warn ( e ) ; } LOG . debug ( e . getLocalizedMessage ( ) , e ) ; } } if ( contentDef == null ) { // could still be empty since it is not mandatory to configure the resource type in the XML configuration
// try through the XSD relation
List < CmsRelation > relations = cms . getRelationsForResource ( resource , CmsRelationFilter . TARGETS . filterType ( CmsRelationType . XSD ) ) ; if ( ( relations != null ) && ! relations . isEmpty ( ) ) { CmsXmlEntityResolver entityResolver = new CmsXmlEntityResolver ( cms ) ; String xsd = cms . getSitePath ( relations . get ( 0 ) . getTarget ( cms , CmsResourceFilter . ALL ) ) ; contentDef = entityResolver . getCachedContentDefinition ( xsd ) ; } } if ( contentDef == null ) { // could still be empty if the XML content has been saved with an OpenCms before 8.0.0
// so , to unmarshal is the only possibility left
CmsXmlContent content = CmsXmlContentFactory . unmarshal ( cms , cms . readFile ( resource ) ) ; contentDef = content . getContentDefinition ( ) ; } return contentDef ;
|
public class ProjectImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public EList < NewService > getNewServices ( ) { } }
|
return ( EList < NewService > ) eGet ( StorePackage . Literals . PROJECT__NEW_SERVICES , true ) ;
|
public class Scene { /** * Moves the layer one level down in this scene .
* @ param layer */
@ Override public final Scene moveDown ( final Layer layer ) { } }
|
if ( ( null != layer ) && ( LienzoCore . IS_CANVAS_SUPPORTED ) ) { final int size = getElement ( ) . getChildCount ( ) ; if ( size < 2 ) { return this ; } final DivElement element = layer . getElement ( ) ; for ( int i = 0 ; i < size ; i ++ ) { final DivElement look = getElement ( ) . getChild ( i ) . cast ( ) ; if ( look == element ) { if ( i == 0 ) { // already at bottom
break ; } getElement ( ) . insertBefore ( element , getElement ( ) . getChild ( i - 1 ) ) ; break ; } } final NFastArrayList < Layer > layers = getChildNodes ( ) ; if ( null != layers ) { layers . moveDown ( layer ) ; } } return this ;
|
public class AssociateGreedy { /** * Associates the two sets objects against each other by minimizing fit score .
* @ param src Source list .
* @ param dst Destination list . */
@ Override public void associate ( FastQueue < D > src , FastQueue < D > dst ) { } }
|
fitQuality . reset ( ) ; pairs . reset ( ) ; workBuffer . reset ( ) ; pairs . resize ( src . size ) ; fitQuality . resize ( src . size ) ; workBuffer . resize ( src . size * dst . size ) ; // CONCURRENT _ BELOW BoofConcurrency . loopFor ( 0 , src . size , i - > {
for ( int i = 0 ; i < src . size ; i ++ ) { D a = src . data [ i ] ; double bestScore = maxFitError ; int bestIndex = - 1 ; int workIdx = i * dst . size ; for ( int j = 0 ; j < dst . size ; j ++ ) { D b = dst . data [ j ] ; double fit = score . score ( a , b ) ; workBuffer . set ( workIdx + j , fit ) ; if ( fit <= bestScore ) { bestIndex = j ; bestScore = fit ; } } pairs . set ( i , bestIndex ) ; fitQuality . set ( i , bestScore ) ; } // CONCURRENT _ ABOVE } ) ;
if ( backwardsValidation ) { // CONCURRENT _ BELOW BoofConcurrency . loopFor ( 0 , src . size , i - > {
for ( int i = 0 ; i < src . size ; i ++ ) { int match = pairs . data [ i ] ; if ( match == - 1 ) // CONCURRENT _ BELOW return ;
continue ; double scoreToBeat = workBuffer . data [ i * dst . size + match ] ; for ( int j = 0 ; j < src . size ; j ++ , match += dst . size ) { if ( workBuffer . data [ match ] <= scoreToBeat && j != i ) { pairs . data [ i ] = - 1 ; fitQuality . data [ i ] = Double . MAX_VALUE ; break ; } } } // CONCURRENT _ ABOVE } ) ;
}
|
public class LCAGraphManager { private void initParams ( ) { } }
|
nbActives = graph . getNodes ( ) . size ( ) ; for ( int i = 0 ; i < nbNodes ; i ++ ) { successors [ i ] = graph . getSuccOf ( i ) ; dfsNumberOfNode [ i ] = - 1 ; father [ i ] = - 1 ; A [ i ] = - 1 ; }
|
public class UniqueCountMap { /** * Returns total bytes used for key storage
* @ return total bytes used for key storage */
public long getKeyMemoryUsageBytes ( ) { } }
|
long total = 0 ; for ( int i = 0 ; i < maps_ . length ; i ++ ) { if ( maps_ [ i ] != null ) { total += ( long ) ( maps_ [ i ] . getActiveEntries ( ) ) * keySizeBytes_ ; } } return total ;
|
public class Streams { /** * Generic zip function . E . g . Zipping a Stream and a Sequence
* < pre >
* { @ code
* Stream < List < Integer > > zipped = Streams . zip ( Stream . of ( 1,2,3)
* , ReactiveSeq . of ( 2,3,4 ) ,
* ( a , b ) - > Arrays . asList ( a , b ) ) ;
* List < Integer > zip = zipped . collect ( CyclopsCollectors . toList ( ) ) . getValue ( 1 ) ;
* assertThat ( zip . getValue ( 0 ) , equalTo ( 2 ) ) ;
* assertThat ( zip . getValue ( 1 ) , equalTo ( 3 ) ) ;
* < / pre >
* @ param second
* Monad to zip with
* @ param zipper
* Zipping function
* @ return Stream zipping two Monads */
public final static < T , S , R > Stream < R > zipSequence ( final Stream < T > stream , final Stream < ? extends S > second , final BiFunction < ? super T , ? super S , ? extends R > zipper ) { } }
|
final Iterator < T > left = stream . iterator ( ) ; final Iterator < ? extends S > right = second . iterator ( ) ; return Streams . stream ( new Iterator < R > ( ) { @ Override public boolean hasNext ( ) { return left . hasNext ( ) && right . hasNext ( ) ; } @ Override public R next ( ) { return zipper . apply ( left . next ( ) , right . next ( ) ) ; } } ) ;
|
public class UIComponentClassicTagBase { /** * < p > Pop the top { @ link UIComponentTag } instance off of our component tag
* stack , deleting the stack if this was the last entry . < / p > */
private void popUIComponentClassicTagBase ( ) { } }
|
List list = ( List ) context . getAttributes ( ) . get ( COMPONENT_TAG_STACK_ATTR ) ; // if an exception occurred in a nested tag ,
// there could be a few tags left in the stack .
UIComponentClassicTagBase uic = null ; while ( list != null && uic != this ) { int idx = list . size ( ) - 1 ; uic = ( UIComponentClassicTagBase ) list . get ( idx ) ; list . remove ( idx ) ; if ( idx < 1 ) { context . getAttributes ( ) . remove ( COMPONENT_TAG_STACK_ATTR ) ; list = null ; } }
|
public class Normalizer { /** * Normalize a string .
* The string will be normalized according to the specified normalization
* mode and options .
* @ param source The char array to normalize .
* @ param target A char buffer to receive the normalized text .
* @ param mode The normalization mode ; one of Normalizer . NONE ,
* Normalizer . NFD , Normalizer . NFC , Normalizer . NFKC ,
* Normalizer . NFKD , Normalizer . DEFAULT
* @ param options The normalization options , ORed together ( 0 for no options ) .
* @ return int The total buffer size needed ; if greater than length of
* result , the output was truncated .
* @ exception IndexOutOfBoundsException if the target capacity is less
* than the required length
* @ deprecated ICU 56 Use { @ link Normalizer2 } instead .
* @ hide original deprecated declaration */
@ Deprecated public static int normalize ( char [ ] source , char [ ] target , Mode mode , int options ) { } }
|
return normalize ( source , 0 , source . length , target , 0 , target . length , mode , options ) ;
|
public class MergeRequestApi { /** * Creates a merge request and optionally assigns a reviewer to it .
* < pre > < code > GitLab Endpoint : POST / projects / : id / merge _ requests < / code > < / pre >
* @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance
* @ param sourceBranch the source branch , required
* @ param targetBranch the target branch , required
* @ param title the title for the merge request , required
* @ param description the description of the merge request
* @ param assigneeId the Assignee user ID , optional
* @ return the created MergeRequest instance
* @ throws GitLabApiException if any exception occurs */
public MergeRequest createMergeRequest ( Object projectIdOrPath , String sourceBranch , String targetBranch , String title , String description , Integer assigneeId ) throws GitLabApiException { } }
|
return createMergeRequest ( projectIdOrPath , sourceBranch , targetBranch , title , description , assigneeId , null , null , null , null ) ;
|
public class TomlWriter { /** * Write an Object in TOML to a { @ link File } . Output is encoded as UTF - 8.
* @ param from the object to be written
* @ param target the File to which the TOML will be written
* @ throws IOException if any file operations fail */
public void write ( Object from , File target ) throws IOException { } }
|
OutputStream outputStream = new FileOutputStream ( target ) ; try { write ( from , outputStream ) ; } finally { outputStream . close ( ) ; }
|
public class EJBMethodInfoImpl { /** * Return method name for this method . < p > */
@ Override public String getMethodName ( ) { } }
|
// Generally , the method name is pretty static , but for internal
// CMR methods ( with index < 0 ) they are dynamic , and based on
// the signature . For performance , the substring operation is
// deferred until the method name is actually used . d154342.10
if ( methodName == null && methodSignature != null ) methodName = methodSignature . substring ( 0 , methodSignature . indexOf ( ":" ) ) ; return methodName ;
|
public class AbstractMethodTypeListener { /** * Allows traverse the input type hierarchy .
* @ param type encountered by Guice .
* @ param encounter the injection context . */
private < I > void hear ( Class < ? super I > type , TypeEncounter < I > encounter ) { } }
|
if ( type == null || type . getPackage ( ) . getName ( ) . startsWith ( JAVA_PACKAGE ) ) { return ; } for ( Method method : type . getDeclaredMethods ( ) ) { if ( method . isAnnotationPresent ( annotationType ) ) { if ( method . getParameterTypes ( ) . length != 0 ) { encounter . addError ( "Annotated methods with @%s must not accept any argument, found %s" , annotationType . getName ( ) , method ) ; } hear ( method , encounter ) ; } } hear ( type . getSuperclass ( ) , encounter ) ;
|
public class OQL { /** * FROM
* FROM foo . bar . Type Type */
private void _buildFrom ( final String alias , final StringBuilder stmt ) { } }
|
stmt . append ( _FROM_ ) . append ( getType ( ) . getName ( ) ) ; stmt . append ( _SPACE_ ) . append ( alias ) ;
|
public class ObjectMappedQuery { /** * Executes the query and returns the results as a list of objects . */
public List < T > getResults ( Connection conn , DataObject object ) throws Exception { } }
|
return executeSelect ( conn , object , new ResultSetMapper < ArrayListCollector < T > > ( new ArrayListCollector < T > ( ) ) ) ;
|
public class Signatures { /** * Selects the best method for the given argument types .
* @ param methods
* @ param argTypes
* @ return method
* @ throws AmbiguousSignatureMatchException if multiple methods match equally */
public static Method bestMethod ( Method [ ] methods , Class < ? > [ ] argTypes ) throws AmbiguousMethodMatchException { } }
|
try { return best ( methods , collectSignatures ( methods ) , collectVarArgs ( methods ) , argTypes ) ; } catch ( AmbiguousSignatureMatchException e ) { throw new AmbiguousMethodMatchException ( e , methods ) ; }
|
public class LocaleSelectorImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eSet ( int featureID , Object newValue ) { } }
|
switch ( featureID ) { case AfplibPackage . LOCALE_SELECTOR__LOC_FLGS : setLocFlgs ( ( Integer ) newValue ) ; return ; case AfplibPackage . LOCALE_SELECTOR__LANG_CODE : setLangCode ( ( String ) newValue ) ; return ; case AfplibPackage . LOCALE_SELECTOR__SCRPT_CDE : setScrptCde ( ( String ) newValue ) ; return ; case AfplibPackage . LOCALE_SELECTOR__REG_CDE : setRegCde ( ( String ) newValue ) ; return ; case AfplibPackage . LOCALE_SELECTOR__RESERVED : setReserved ( ( byte [ ] ) newValue ) ; return ; case AfplibPackage . LOCALE_SELECTOR__VAR_CDE : setVarCde ( ( String ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
|
public class AWSOrganizationsClient { /** * Lists all of the organizational units ( OUs ) or accounts that are contained in the specified parent OU or root .
* This operation , along with < a > ListParents < / a > enables you to traverse the tree structure that makes up this root .
* < note >
* Always check the < code > NextToken < / code > response parameter for a < code > null < / code > value when calling a
* < code > List * < / code > operation . These operations can occasionally return an empty set of results even when there
* are more results available . The < code > NextToken < / code > response parameter value is < code > null < / code > < i > only < / i >
* when there are no more results to display .
* < / note >
* This operation can be called only from the organization ' s master account .
* @ param listChildrenRequest
* @ return Result of the ListChildren operation returned by the service .
* @ throws AccessDeniedException
* You don ' t have permissions to perform the requested operation . The user or role that is making the
* request must have at least one IAM permissions policy attached that grants the required permissions . For
* more information , see < a href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / access . html " > Access
* Management < / a > in the < i > IAM User Guide < / i > .
* @ throws AWSOrganizationsNotInUseException
* Your account isn ' t a member of an organization . To make this request , you must use the credentials of an
* account that belongs to an organization .
* @ throws InvalidInputException
* The requested operation failed because you provided invalid values for one or more of the request
* parameters . This exception includes a reason that contains additional information about the violated
* limit : < / p > < note >
* Some of the reasons in the following list might not be applicable to this specific API or operation :
* < / note >
* < ul >
* < li >
* IMMUTABLE _ POLICY : You specified a policy that is managed by AWS and can ' t be modified .
* < / li >
* < li >
* INPUT _ REQUIRED : You must include a value for all required parameters .
* < / li >
* < li >
* INVALID _ ENUM : You specified a value that isn ' t valid for that parameter .
* < / li >
* < li >
* INVALID _ FULL _ NAME _ TARGET : You specified a full name that contains invalid characters .
* < / li >
* < li >
* INVALID _ LIST _ MEMBER : You provided a list to a parameter that contains at least one invalid value .
* < / li >
* < li >
* INVALID _ PARTY _ TYPE _ TARGET : You specified the wrong type of entity ( account , organization , or email ) as a
* party .
* < / li >
* < li >
* INVALID _ PAGINATION _ TOKEN : Get the value for the < code > NextToken < / code > parameter from the response to a
* previous call of the operation .
* < / li >
* < li >
* INVALID _ PATTERN : You provided a value that doesn ' t match the required pattern .
* < / li >
* < li >
* INVALID _ PATTERN _ TARGET _ ID : You specified a policy target ID that doesn ' t match the required pattern .
* < / li >
* < li >
* INVALID _ ROLE _ NAME : You provided a role name that isn ' t valid . A role name can ' t begin with the reserved
* prefix < code > AWSServiceRoleFor < / code > .
* < / li >
* < li >
* INVALID _ SYNTAX _ ORGANIZATION _ ARN : You specified an invalid Amazon Resource Name ( ARN ) for the
* organization .
* < / li >
* < li >
* INVALID _ SYNTAX _ POLICY _ ID : You specified an invalid policy ID .
* < / li >
* < li >
* MAX _ FILTER _ LIMIT _ EXCEEDED : You can specify only one filter parameter for the operation .
* < / li >
* < li >
* MAX _ LENGTH _ EXCEEDED : You provided a string parameter that is longer than allowed .
* < / li >
* < li >
* MAX _ VALUE _ EXCEEDED : You provided a numeric parameter that has a larger value than allowed .
* < / li >
* < li >
* MIN _ LENGTH _ EXCEEDED : You provided a string parameter that is shorter than allowed .
* < / li >
* < li >
* MIN _ VALUE _ EXCEEDED : You provided a numeric parameter that has a smaller value than allowed .
* < / li >
* < li >
* MOVING _ ACCOUNT _ BETWEEN _ DIFFERENT _ ROOTS : You can move an account only between entities in the same root .
* < / li >
* @ throws ParentNotFoundException
* We can ' t find a root or OU with the < code > ParentId < / code > that you specified .
* @ throws ServiceException
* AWS Organizations can ' t complete your request because of an internal service error . Try again later .
* @ throws TooManyRequestsException
* You ' ve sent too many requests in too short a period of time . The limit helps protect against
* denial - of - service attacks . Try again later . < / p >
* For information on limits that affect Organizations , see < a
* href = " https : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ reference _ limits . html " > Limits of
* AWS Organizations < / a > in the < i > AWS Organizations User Guide < / i > .
* @ sample AWSOrganizations . ListChildren
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / organizations - 2016-11-28 / ListChildren " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ListChildrenResult listChildren ( ListChildrenRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeListChildren ( request ) ;
|
public class JSONUtil { /** * 读取JSON
* @ param file JSON文件
* @ param charset 编码
* @ return JSON ( 包括JSONObject和JSONArray )
* @ throws IORuntimeException IO异常 */
public static JSON readJSON ( File file , Charset charset ) throws IORuntimeException { } }
|
return parse ( FileReader . create ( file , charset ) . readString ( ) ) ;
|
public class StringTransformerChain { /** * This method implements { @ link # transform ( String ) } recursively .
* @ param original is the original value .
* @ param state is the { @ link State } used to indicate if a { @ link StringTransformerRule rule } causes the chain to
* { @ link State # stop } .
* @ return the transformed result . */
private String transformRecursive ( String original , State state ) { } }
|
String value = original ; if ( this . parent != null ) { value = this . parent . transformRecursive ( original , state ) ; if ( state . stop ) { return value ; } } for ( StringTransformerRule rule : this . rules ) { String transformed = rule . transform ( value ) ; if ( ( transformed != value ) && ( rule . isStopOnMatch ( ) ) ) { state . stop = true ; return transformed ; } value = transformed ; } return value ;
|
public class PatternsImpl { /** * Deletes the patterns with the specified IDs .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param patternIds The patterns IDs .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the OperationStatus object */
public Observable < OperationStatus > deletePatternsAsync ( UUID appId , String versionId , List < UUID > patternIds ) { } }
|
return deletePatternsWithServiceResponseAsync ( appId , versionId , patternIds ) . map ( new Func1 < ServiceResponse < OperationStatus > , OperationStatus > ( ) { @ Override public OperationStatus call ( ServiceResponse < OperationStatus > response ) { return response . body ( ) ; } } ) ;
|
public class FieldErrorBuilder { /** * セルのアドレス情報を設定します 。
* < p > 値が存在する場合のみ設定されます 。 < / p >
* @ param address アドレス情報
* @ return 自身のインスタンス */
public FieldErrorBuilder address ( final Optional < CellPosition > address ) { } }
|
address . ifPresent ( a -> address ( a ) ) ; return this ;
|
public class JDBCBlobClient { /** * Writes the given array of bytes to the < code > BLOB < / code > value that
* this < code > Blob < / code > object represents , starting at position
* < code > pos < / code > , and returns the number of bytes written .
* @ param pos the position in the < code > BLOB < / code > object at which to
* start writing
* @ param bytes the array of bytes to be written to the
* < code > BLOB < / code > value that this < code > Blob < / code > object
* represents
* @ return the number of bytes written
* @ throws SQLException if there is an error accessing the
* < code > BLOB < / code > value */
public synchronized int setBytes ( long pos , byte [ ] bytes ) throws SQLException { } }
|
if ( ! isInLimits ( Long . MAX_VALUE , pos - 1 , bytes . length ) ) { throw Util . outOfRangeArgument ( ) ; } try { return blob . setBytes ( session , pos - 1 , bytes ) ; } catch ( HsqlException e ) { throw Util . sqlException ( e ) ; }
|
public class ParamGraphCanvas { /** * Sets the scatter graph .
* @ param measures
* information about the curves
* @ param measureStds
* standard deviation for the measures
* @ param variedParamValues
* values of the varied parameter
* @ param colors
* color encoding for the param array */
public void setGraph ( MeasureCollection [ ] measures , MeasureCollection [ ] measureStds , double [ ] variedParamValues , Color [ ] colors ) { } }
|
this . measures = measures ; this . variedParamValues = variedParamValues ; ( ( GraphScatter ) this . plotPanel ) . setGraph ( measures , measureStds , variedParamValues , colors ) ; updateCanvas ( false ) ;
|
public class VerticalViewPager { /** * Set a { @ link ViewPager . PageTransformer } that will be called for each attached page whenever
* the scroll position is changed . This allows the application to apply custom property
* transformations to each page , overriding the default sliding look and feel .
* < p > < em > Note : < / em > Prior to Android 3.0 the property animation APIs did not exist .
* As a result , setting a PageTransformer prior to Android 3.0 ( API 11 ) will have no effect . < / p >
* @ param reverseDrawingOrder true if the supplied PageTransformer requires page views
* to be drawn from last to first instead of first to last .
* @ param transformer PageTransformer that will modify each page ' s animation properties */
public void setPageTransformer ( boolean reverseDrawingOrder , ViewPager . PageTransformer transformer ) { } }
|
if ( Build . VERSION . SDK_INT >= 11 ) { final boolean hasTransformer = transformer != null ; final boolean needsPopulate = hasTransformer != ( mPageTransformer != null ) ; mPageTransformer = transformer ; setChildrenDrawingOrderEnabledCompat ( hasTransformer ) ; if ( hasTransformer ) { mDrawingOrder = reverseDrawingOrder ? DRAW_ORDER_REVERSE : DRAW_ORDER_FORWARD ; } else { mDrawingOrder = DRAW_ORDER_DEFAULT ; } if ( needsPopulate ) populate ( ) ; }
|
public class AccessScreen { /** * Add button ( s ) to the toolbar . */
public void addToolbarButtons ( ToolScreen toolScreen ) { } }
|
new SCannedBox ( toolScreen . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . SET_ANCHOR ) , toolScreen , null , ScreenConstants . DEFAULT_DISPLAY , null , "Delete All" , MenuConstants . DELETE , "Delete All" , null ) ;
|
public class SingleAdditionNeighbourhood { /** * Generates a list of all possible addition moves that add a single ID to the selection of a given
* subset solution . Possible fixed IDs are not considered to be added and the maximum subset size
* is taken into account . May return an empty list if no addition moves can be generated .
* @ param solution solution for which all possible addition moves are generated
* @ return list of all addition moves , may be empty */
@ Override public List < SubsetMove > getAllMoves ( SubsetSolution solution ) { } }
|
// check size limit
if ( maxSizeReached ( solution ) ) { return Collections . emptyList ( ) ; } // get set of candidate IDs for addition ( possibly fixed IDs are discarded )
Set < Integer > addCandidates = getAddCandidates ( solution ) ; // check if there are any candidates to be added
if ( addCandidates . isEmpty ( ) ) { return Collections . emptyList ( ) ; } // create addition move for all add candidates
return addCandidates . stream ( ) . map ( add -> new AdditionMove ( add ) ) . collect ( Collectors . toList ( ) ) ;
|
public class CsvEscapeUtil { /** * Perform an escape operation , based on a Reader , writing the results to a Writer .
* Note this reader is going to be read char - by - char , so some kind of buffering might be appropriate if this
* is an inconvenience for the specific Reader implementation . */
static void escape ( final Reader reader , final Writer writer ) throws IOException { } }
|
if ( reader == null ) { return ; } /* * Escape in CSV requires using buffers because CSV escaped text might be surrounded by quotes or not
* depending on whether they contain any non - alphanumeric chars or not , which is something we cannot
* know until we find any . */
int doQuote = - 1 ; int bufferSize = 0 ; char [ ] buffer = new char [ 10 ] ; int read = reader . read ( buffer , 0 , buffer . length ) ; if ( read < 0 ) { return ; } char cq ; while ( doQuote < 0 && read >= 0 ) { int i = bufferSize ; bufferSize += read ; while ( doQuote < 0 && i < bufferSize ) { cq = buffer [ i ++ ] ; if ( ! ( ( cq >= 'a' && cq <= 'z' ) || ( cq >= 'A' && cq <= 'Z' ) || ( cq >= '0' && cq <= '9' ) ) ) { doQuote = 1 ; // We must add quotes !
break ; } } if ( doQuote < 0 && read >= 0 ) { if ( bufferSize == buffer . length ) { // Actually , there is no room for reading more , so let ' s grow the buffer
final char [ ] newBuffer = new char [ buffer . length + ( buffer . length / 2 ) ] ; System . arraycopy ( buffer , 0 , newBuffer , 0 , buffer . length ) ; buffer = newBuffer ; } read = reader . read ( buffer , bufferSize , ( buffer . length - bufferSize ) ) ; } } doQuote = Math . max ( doQuote , 0 ) ; // 0 = no quote , 1 = quote
/* * Output initial quotes , if needed */
if ( doQuote == 1 ) { writer . write ( '"' ) ; } /* * First we will output the already - checked buffer , escaping quotes as needed */
if ( bufferSize > 0 ) { char c ; for ( int i = 0 ; i < bufferSize ; i ++ ) { c = buffer [ i ] ; /* * Check whether the character is a double - quote ( in which case , we escape it ) */
if ( c == DOUBLE_QUOTE ) { writer . write ( TWO_DOUBLE_QUOTES ) ; } else { writer . write ( c ) ; } } } /* * Once the buffer has been processed , we will process the rest of the input by reading it on - the - fly */
if ( read >= 0 ) { int c1 , c2 ; // c1 : current char , c2 : next char
c1 = - 1 ; c2 = reader . read ( ) ; while ( c2 >= 0 ) { c1 = c2 ; c2 = reader . read ( ) ; /* * Check whether the character is a double - quote ( in which case , we escape it ) */
if ( c1 == DOUBLE_QUOTE ) { writer . write ( TWO_DOUBLE_QUOTES ) ; } else { writer . write ( c1 ) ; } } } /* * Output ending quotes , if needed */
if ( doQuote == 1 ) { writer . write ( '"' ) ; }
|
public class ZipUtil { /** * Returns the unzipped contents of the zipped input stream in a byte array
* @ param in
* the input stream to unzip
* @ return the unzipped content in a byte array
* @ throws IOException */
static public byte [ ] unzip ( InputStream in ) throws IOException { } }
|
ByteArrayOutputStream bos = new ByteArrayOutputStream ( ) ; CopyUtil . copy ( new GZIPInputStream ( in ) , bos ) ; return bos . toByteArray ( ) ;
|
public class UndoHelper { /** * convenience method to be used if you have previously set a { @ link Snackbar } with { @ link # withSnackBar ( Snackbar , String ) }
* @ param positions the positions where the items were removed
* @ return the snackbar or null if { @ link # withSnackBar ( Snackbar , String ) } was not previously called */
public @ Nullable Snackbar remove ( Set < Integer > positions ) { } }
|
if ( mSnackBar == null ) { return null ; } View snackbarView = mSnackBar . getView ( ) ; TextView snackbarText = ( TextView ) snackbarView . findViewById ( com . google . android . material . R . id . snackbar_text ) ; return remove ( snackbarView , snackbarText . getText ( ) . toString ( ) , mSnackbarActionText , mSnackBar . getDuration ( ) , positions ) ;
|
public class IfcComplexNumberImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public EList < Double > getWrappedValue ( ) { } }
|
return ( EList < Double > ) eGet ( Ifc4Package . Literals . IFC_COMPLEX_NUMBER__WRAPPED_VALUE , true ) ;
|
public class ParallelMapIterate { /** * A parallel form of forEachKeyValue .
* @ see MapIterate # forEachKeyValue ( Map , Procedure2)
* @ see ParallelIterate */
public static < K , V > void forEachKeyValue ( Map < K , V > map , Procedure2 < ? super K , ? super V > procedure , Executor executor ) { } }
|
ParallelMapIterate . forEachKeyValue ( map , procedure , 2 , map . size ( ) , executor ) ;
|
public class NestedSerializersSnapshotDelegate { /** * Writes the composite snapshot of all the contained serializers . */
public final void writeNestedSerializerSnapshots ( DataOutputView out ) throws IOException { } }
|
out . writeInt ( MAGIC_NUMBER ) ; out . writeInt ( VERSION ) ; out . writeInt ( nestedSnapshots . length ) ; for ( TypeSerializerSnapshot < ? > snap : nestedSnapshots ) { TypeSerializerSnapshot . writeVersionedSnapshot ( out , snap ) ; }
|
public class Pattern { /** * Creates a mapped constraint with the given generative constraint and the indexes it applies .
* Also labels the last given index .
* @ param constr constraint to add
* @ param label a label for the last of the given indices */
public void add ( Constraint constr , String ... label ) { } }
|
checkLabels ( constr . canGenerate ( ) , label ) ; int [ ] ind = convertLabelsToInds ( label ) ; if ( ind . length != constr . getVariableSize ( ) ) { throw new IllegalArgumentException ( "Mapped elements do not match the constraint size." ) ; } // This will also increment lastIndex if necessary
add ( constr , ind ) ; if ( ! hasLabel ( label [ label . length - 1 ] ) && constr . canGenerate ( ) ) { label ( label [ label . length - 1 ] , lastIndex ) ; }
|
public class ViewSet { /** * Gets the set of all views ( except filtered views ) .
* @ return a Collection of View objects */
@ JsonIgnore public Collection < View > getViews ( ) { } }
|
HashSet < View > views = new HashSet < > ( ) ; views . addAll ( getSystemLandscapeViews ( ) ) ; views . addAll ( getSystemContextViews ( ) ) ; views . addAll ( getContainerViews ( ) ) ; views . addAll ( getComponentViews ( ) ) ; views . addAll ( getDynamicViews ( ) ) ; views . addAll ( getDeploymentViews ( ) ) ; return views ;
|
public class HostDirectives { /** * Change the user agent string used to crawl after initialization . This will
* reorder ( recreate ) the list of user agent directives for this host .
* @ param userAgent The new user agent to use . */
public void setUserAgent ( String userAgent ) { } }
|
this . userAgent = userAgent . toLowerCase ( ) ; // Re - order the set
Set < UserAgentDirectives > replace = new TreeSet < UserAgentDirectives > ( new UserAgentDirectives . UserAgentComparator ( this . userAgent ) ) ; replace . addAll ( rules ) ; rules = replace ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.