signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ListFleetsResult { /** * A list of fleet details meeting the request criteria .
* @ param fleetDetails
* A list of fleet details meeting the request criteria . */
public void setFleetDetails ( java . util . Collection < Fleet > fleetDetails ) { } } | if ( fleetDetails == null ) { this . fleetDetails = null ; return ; } this . fleetDetails = new java . util . ArrayList < Fleet > ( fleetDetails ) ; |
public class FastMoney { /** * Internal method to check for correct number parameter .
* @ param number the number to be checked , including null . .
* @ throws NullPointerException If the number is null
* @ throws java . lang . ArithmeticException If the number exceeds the capabilities of this class . */
protected void checkNumber ( Number number ) { } } | Objects . requireNonNull ( number , "Number is required." ) ; // numeric check for overflow . . .
if ( number . longValue ( ) > MAX_BD . longValue ( ) ) { throw new ArithmeticException ( "Value exceeds maximal value: " + MAX_BD ) ; } BigDecimal bd = MoneyUtils . getBigDecimal ( number ) ; if ( bd . precision ( ) > MAX_BD . precision ( ) ) { throw new ArithmeticException ( "Precision exceeds maximal precision: " + MAX_BD . precision ( ) ) ; } if ( bd . scale ( ) > SCALE ) { String val = MonetaryConfig . getConfig ( ) . get ( "org.javamoney.moneta.FastMoney.enforceScaleCompatibility" ) ; if ( val == null ) { val = "false" ; } if ( Boolean . parseBoolean ( val ) ) { throw new ArithmeticException ( "Scale of " + bd + " exceeds maximal scale: " + SCALE ) ; } else { if ( LOG . isLoggable ( Level . FINEST ) ) { LOG . finest ( "Scale exceeds maximal scale of FastMoney (" + SCALE + "), implicit rounding will be applied to " + number ) ; } } } |
public class GlobalizationPreferences { /** * Convenience function to get a ResourceBundle instance using
* the specified base name and class loader based on the language / locale
* priority list stored in this object .
* @ param baseName the base name of the resource bundle , a fully qualified
* class name
* @ param loader the class object from which to load the resource bundle
* @ return a resource bundle for the given base name and locale based on the
* language / locale priority list stored in this object
* @ hide draft / provisional / internal are hidden on Android */
public ResourceBundle getResourceBundle ( String baseName , ClassLoader loader ) { } } | UResourceBundle urb = null ; UResourceBundle candidate = null ; String actualLocaleName = null ; List < ULocale > fallbacks = getLocales ( ) ; for ( int i = 0 ; i < fallbacks . size ( ) ; i ++ ) { String localeName = ( fallbacks . get ( i ) ) . toString ( ) ; if ( actualLocaleName != null && localeName . equals ( actualLocaleName ) ) { // Actual locale name in the previous round may exactly matches
// with the next fallback locale
urb = candidate ; break ; } try { if ( loader == null ) { candidate = UResourceBundle . getBundleInstance ( baseName , localeName ) ; } else { candidate = UResourceBundle . getBundleInstance ( baseName , localeName , loader ) ; } if ( candidate != null ) { actualLocaleName = candidate . getULocale ( ) . getName ( ) ; if ( actualLocaleName . equals ( localeName ) ) { urb = candidate ; break ; } if ( urb == null ) { // Preserve the available bundle as the last resort
urb = candidate ; } } } catch ( MissingResourceException mre ) { actualLocaleName = null ; continue ; } } if ( urb == null ) { throw new MissingResourceException ( "Can't find bundle for base name " + baseName , baseName , "" ) ; } return urb ; |
public class DescribeStackProvisioningParametersResult { /** * An embedded object that contains the provisioning parameters .
* @ param parameters
* An embedded object that contains the provisioning parameters .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeStackProvisioningParametersResult withParameters ( java . util . Map < String , String > parameters ) { } } | setParameters ( parameters ) ; return this ; |
public class BuildStepsInner { /** * Creates a build step for a build task .
* @ param resourceGroupName The name of the resource group to which the container registry belongs .
* @ param registryName The name of the container registry .
* @ param buildTaskName The name of the container registry build task .
* @ param stepName The name of a build step for a container registry build task .
* @ param properties The properties of a build step .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < BuildStepInner > createAsync ( String resourceGroupName , String registryName , String buildTaskName , String stepName , BuildStepProperties properties ) { } } | return createWithServiceResponseAsync ( resourceGroupName , registryName , buildTaskName , stepName , properties ) . map ( new Func1 < ServiceResponse < BuildStepInner > , BuildStepInner > ( ) { @ Override public BuildStepInner call ( ServiceResponse < BuildStepInner > response ) { return response . body ( ) ; } } ) ; |
public class Filter { /** * Evaluates the filter , returns true if the supplied Task or Resource
* instance matches the filter criteria .
* @ param container Task or Resource instance
* @ param promptValues respose to prompts
* @ return boolean flag */
public boolean evaluate ( FieldContainer container , Map < GenericCriteriaPrompt , Object > promptValues ) { } } | boolean result = true ; if ( m_criteria != null ) { result = m_criteria . evaluate ( container , promptValues ) ; // If this row has failed , but it is a summary row , and we are
// including related summary rows , then we need to recursively test
// its children
if ( ! result && m_showRelatedSummaryRows && container instanceof Task ) { for ( Task task : ( ( Task ) container ) . getChildTasks ( ) ) { if ( evaluate ( task , promptValues ) ) { result = true ; break ; } } } } return ( result ) ; |
public class TypeExtractionUtils { /** * Extracts type from given index from lambda . It supports nested types .
* @ param baseClass SAM function that the lambda implements
* @ param exec lambda function to extract the type from
* @ param lambdaTypeArgumentIndices position of type to extract in type hierarchy
* @ param paramLen count of total parameters of the lambda ( including closure parameters )
* @ param baseParametersLen count of lambda interface parameters ( without closure parameters )
* @ return extracted type */
public static Type extractTypeFromLambda ( Class < ? > baseClass , LambdaExecutable exec , int [ ] lambdaTypeArgumentIndices , int paramLen , int baseParametersLen ) { } } | Type output = exec . getParameterTypes ( ) [ paramLen - baseParametersLen + lambdaTypeArgumentIndices [ 0 ] ] ; for ( int i = 1 ; i < lambdaTypeArgumentIndices . length ; i ++ ) { validateLambdaType ( baseClass , output ) ; output = extractTypeArgument ( output , lambdaTypeArgumentIndices [ i ] ) ; } validateLambdaType ( baseClass , output ) ; return output ; |
public class ReflectExtensions { /** * / * @ Nullable */
public Object invoke ( Object receiver , String methodName , /* @ Nullable */
Object ... args ) throws SecurityException , IllegalArgumentException , IllegalAccessException , InvocationTargetException , NoSuchMethodException { } } | Preconditions . checkNotNull ( receiver , "receiver" ) ; Preconditions . checkNotNull ( methodName , "methodName" ) ; final Object [ ] arguments = args == null ? new Object [ ] { null } : args ; Class < ? extends Object > clazz = receiver . getClass ( ) ; Method compatible = null ; do { for ( Method candidate : clazz . getDeclaredMethods ( ) ) { if ( candidate != null && ! candidate . isBridge ( ) && isCompatible ( candidate , methodName , arguments ) ) { if ( compatible != null ) throw new IllegalStateException ( "Ambiguous methods to invoke. Both " + compatible + " and " + candidate + " would be compatible choices." ) ; compatible = candidate ; } } } while ( compatible == null && ( clazz = clazz . getSuperclass ( ) ) != null ) ; if ( compatible != null ) { if ( ! compatible . isAccessible ( ) ) compatible . setAccessible ( true ) ; return compatible . invoke ( receiver , arguments ) ; } // not found provoke method not found exception
Class < ? > [ ] paramTypes = new Class < ? > [ arguments . length ] ; for ( int i = 0 ; i < arguments . length ; i ++ ) { paramTypes [ i ] = arguments [ i ] == null ? Object . class : arguments [ i ] . getClass ( ) ; } Method method = receiver . getClass ( ) . getMethod ( methodName , paramTypes ) ; return method . invoke ( receiver , arguments ) ; |
public class Messages { /** * Creates a message from the specified key and replacement
* arguments , localized to the given locale .
* @ param errorCode The key for the message text .
* @ param fResourceBundle The resource bundle to use .
* @ param msgKey The message key to use .
* @ param args The arguments to be used as replacement text
* in the message created .
* @ return The formatted message string .
* @ xsl . usage internal */
private final String createMsg ( ListResourceBundle fResourceBundle , String msgKey , Object args [ ] ) // throws Exception
{ } } | String fmsg = null ; boolean throwex = false ; String msg = null ; if ( msgKey != null ) msg = fResourceBundle . getString ( msgKey ) ; else msgKey = "" ; if ( msg == null ) { throwex = true ; /* The message is not in the bundle . . . this is bad ,
* so try to get the message that the message is not in the bundle */
try { msg = java . text . MessageFormat . format ( MsgKey . BAD_MSGKEY , new Object [ ] { msgKey , m_resourceBundleName } ) ; } catch ( Exception e ) { /* even the message that the message is not in the bundle is
* not there . . . this is really bad */
msg = "The message key '" + msgKey + "' is not in the message class '" + m_resourceBundleName + "'" ; } } else if ( args != null ) { try { // Do this to keep format from crying .
// This is better than making a bunch of conditional
// code all over the place .
int n = args . length ; for ( int i = 0 ; i < n ; i ++ ) { if ( null == args [ i ] ) args [ i ] = "" ; } fmsg = java . text . MessageFormat . format ( msg , args ) ; // if we get past the line above we have create the message . . . hurray !
} catch ( Exception e ) { throwex = true ; try { // Get the message that the format failed .
fmsg = java . text . MessageFormat . format ( MsgKey . BAD_MSGFORMAT , new Object [ ] { msgKey , m_resourceBundleName } ) ; fmsg += " " + msg ; } catch ( Exception formatfailed ) { // We couldn ' t even get the message that the format of
// the message failed . . . so fall back to English .
fmsg = "The format of message '" + msgKey + "' in message class '" + m_resourceBundleName + "' failed." ; } } } else fmsg = msg ; if ( throwex ) { throw new RuntimeException ( fmsg ) ; } return fmsg ; |
public class mail_profile { /** * < pre >
* Use this operation to delete mail profile . .
* < / pre > */
public static mail_profile delete ( nitro_service client , mail_profile resource ) throws Exception { } } | resource . validate ( "delete" ) ; return ( ( mail_profile [ ] ) resource . delete_resource ( client ) ) [ 0 ] ; |
public class AmazonCloudFrontClient { /** * Remove a field - level encryption profile .
* @ param deleteFieldLevelEncryptionProfileRequest
* @ return Result of the DeleteFieldLevelEncryptionProfile operation returned by the service .
* @ throws AccessDeniedException
* Access denied .
* @ throws InvalidIfMatchVersionException
* The < code > If - Match < / code > version is missing or not valid for the distribution .
* @ throws NoSuchFieldLevelEncryptionProfileException
* The specified profile for field - level encryption doesn ' t exist .
* @ throws PreconditionFailedException
* The precondition given in one or more of the request - header fields evaluated to < code > false < / code > .
* @ throws FieldLevelEncryptionProfileInUseException
* The specified profile for field - level encryption is in use .
* @ sample AmazonCloudFront . DeleteFieldLevelEncryptionProfile
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cloudfront - 2018-11-05 / DeleteFieldLevelEncryptionProfile "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DeleteFieldLevelEncryptionProfileResult deleteFieldLevelEncryptionProfile ( DeleteFieldLevelEncryptionProfileRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDeleteFieldLevelEncryptionProfile ( request ) ; |
public class ST_GraphAnalysis { /** * Calculate centrality indices on the nodes and edges of a graph
* constructed from the input table .
* @ param connection Connection
* @ param inputTable Input table
* @ param orientation Global orientation
* @ param weight Edge weight column name
* @ return True if the calculation was successful
* @ throws SQLException
* @ throws InvocationTargetException
* @ throws NoSuchMethodException
* @ throws InstantiationException
* @ throws IllegalAccessException */
public static boolean doGraphAnalysis ( Connection connection , String inputTable , String orientation , String weight ) throws SQLException , InvocationTargetException , NoSuchMethodException , InstantiationException , IllegalAccessException { } } | final TableLocation tableName = TableUtilities . parseInputTable ( connection , inputTable ) ; final TableLocation nodesName = TableUtilities . suffixTableLocation ( tableName , NODE_CENT_SUFFIX ) ; final TableLocation edgesName = TableUtilities . suffixTableLocation ( tableName , EDGE_CENT_SUFFIX ) ; try { createTables ( connection , nodesName , edgesName ) ; final KeyedGraph graph = doAnalysisAndReturnGraph ( connection , inputTable , orientation , weight ) ; final boolean previousAutoCommit = connection . getAutoCommit ( ) ; connection . setAutoCommit ( false ) ; storeNodeCentrality ( connection , nodesName , graph ) ; storeEdgeCentrality ( connection , edgesName , graph ) ; connection . setAutoCommit ( previousAutoCommit ) ; } catch ( SQLException e ) { LOGGER . error ( "Problem creating centrality tables." ) ; final Statement statement = connection . createStatement ( ) ; try { statement . execute ( "DROP TABLE IF EXISTS " + nodesName ) ; statement . execute ( "DROP TABLE IF EXISTS " + edgesName ) ; } finally { statement . close ( ) ; } return false ; } return true ; |
public class BidiagonalHelper_DDRB { /** * Performs a standard bidiagonal decomposition just on the outer blocks of the provided matrix
* @ param blockLength
* @ param A
* @ param gammasU */
public static boolean bidiagOuterBlocks ( final int blockLength , final DSubmatrixD1 A , final double gammasU [ ] , final double gammasV [ ] ) { } } | // System . out . println ( " - - - - - Orig " ) ;
// A . original . print ( ) ;
int width = Math . min ( blockLength , A . col1 - A . col0 ) ; int height = Math . min ( blockLength , A . row1 - A . row0 ) ; int min = Math . min ( width , height ) ; for ( int i = 0 ; i < min ; i ++ ) { // - - - Apply reflector to the column
// compute the householder vector
if ( ! computeHouseHolderCol ( blockLength , A , gammasU , i ) ) return false ; // apply to rest of the columns in the column block
rank1UpdateMultR_Col ( blockLength , A , i , gammasU [ A . col0 + i ] ) ; // apply to the top row block
rank1UpdateMultR_TopRow ( blockLength , A , i , gammasU [ A . col0 + i ] ) ; System . out . println ( "After column stuff" ) ; A . original . print ( ) ; // - - Apply reflector to the row
if ( ! computeHouseHolderRow ( blockLength , A , gammasV , i ) ) return false ; // apply to rest of the rows in the row block
rank1UpdateMultL_Row ( blockLength , A , i , i + 1 , gammasV [ A . row0 + i ] ) ; System . out . println ( "After update row" ) ; A . original . print ( ) ; // apply to the left column block
// TODO THIS WON ' T WORK ! ! ! ! !
// Needs the whole matrix to have been updated by the left reflector to compute the correct solution
// rank1UpdateMultL _ LeftCol ( blockLength , A , i , i + 1 , gammasV [ A . row0 + i ] ) ;
System . out . println ( "After row stuff" ) ; A . original . print ( ) ; } return true ; |
public class BeanHelper { /** * Removes pattern item from list
* @ param bean
* @ param property */
public static final Object removeList ( Object bean , String property ) { } } | return doFor ( bean , property , null , ( Object a , int i ) -> { throw new UnsupportedOperationException ( "not supported" ) ; } , ( List l , int i ) -> { return l . remove ( i ) ; } , ( Object b , Class c , String p ) -> { throw new UnsupportedOperationException ( "not supported" ) ; } , ( Object b , Method m ) -> { throw new UnsupportedOperationException ( "not supported" ) ; } ) ; |
public class PdfContext { /** * Draws the specified image with the first rectangle ' s bounds , clipping with the second one .
* @ param img image
* @ param rect rectangle
* @ param clipRect clipping bounds
* @ param opacity opacity of the image ( 1 = opaque , 0 = transparent ) */
public void drawImage ( Image img , Rectangle rect , Rectangle clipRect , float opacity ) { } } | try { template . saveState ( ) ; // opacity
PdfGState state = new PdfGState ( ) ; state . setFillOpacity ( opacity ) ; state . setBlendMode ( PdfGState . BM_NORMAL ) ; template . setGState ( state ) ; // clipping code
if ( clipRect != null ) { template . rectangle ( clipRect . getLeft ( ) + origX , clipRect . getBottom ( ) + origY , clipRect . getWidth ( ) , clipRect . getHeight ( ) ) ; template . clip ( ) ; template . newPath ( ) ; } template . addImage ( img , rect . getWidth ( ) , 0 , 0 , rect . getHeight ( ) , origX + rect . getLeft ( ) , origY + rect . getBottom ( ) ) ; } catch ( DocumentException e ) { log . warn ( "could not draw image" , e ) ; } finally { template . restoreState ( ) ; } |
public class HostProvisioner { /** * uploads the file or listed files in a directory */
private void upload ( String fileOrDir , String uploadRootDir ) throws Exception { } } | if ( uploadRootDir . isEmpty ( ) ) uploadRootDir = "." ; File origin = new File ( fileOrDir ) ; if ( fileOrDir . endsWith ( ".tar" ) || fileOrDir . endsWith ( ".tar.gz" ) ) { upload ( new File ( fileOrDir ) , uploadRootDir ) ; untar ( uploadRootDir ) ; } else if ( origin . isFile ( ) ) { upload ( new File ( fileOrDir ) , uploadRootDir ) ; } else { File [ ] childFiles = origin . listFiles ( ) ; if ( childFiles != null ) upload ( Arrays . asList ( childFiles ) , uploadRootDir ) ; } |
public class ZoneRegion { /** * Obtains an instance of { @ code ZoneId } from an identifier .
* @ param zoneId the time - zone ID , not null
* @ param checkAvailable whether to check if the zone ID is available
* @ return the zone ID , not null
* @ throws DateTimeException if the ID format is invalid
* @ throws ZoneRulesException if checking availability and the ID cannot be found */
static ZoneRegion ofId ( String zoneId , boolean checkAvailable ) { } } | Objects . requireNonNull ( zoneId , "zoneId" ) ; checkName ( zoneId ) ; ZoneRules rules = null ; try { // always attempt load for better behavior after deserialization
rules = ZoneRulesProvider . getRules ( zoneId , true ) ; } catch ( ZoneRulesException ex ) { if ( checkAvailable ) { throw ex ; } } return new ZoneRegion ( zoneId , rules ) ; |
public class HtmlFocusBaseTag { /** * Sets the onSelect javascript event .
* @ param onselect the onSelect event .
* @ jsptagref . attributedescription The onSelect JavaScript event .
* @ jsptagref . databindable false
* @ jsptagref . attributesyntaxvalue < i > string _ onSelect < / i >
* @ netui : attribute required = " false " rtexprvalue = " true "
* description = " Sets the onSelect javascript event . " */
public void setOnSelect ( String onselect ) { } } | AbstractHtmlState tsh = getState ( ) ; tsh . registerAttribute ( AbstractHtmlState . ATTR_JAVASCRIPT , ONSELECT , onselect ) ; |
public class UpdateTagsForDomainRequest { /** * A list of the tag keys and values that you want to add or update . If you specify a key that already exists , the
* corresponding value will be replaced .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setTagsToUpdate ( java . util . Collection ) } or { @ link # withTagsToUpdate ( java . util . Collection ) } if you want to
* override the existing values .
* @ param tagsToUpdate
* A list of the tag keys and values that you want to add or update . If you specify a key that already
* exists , the corresponding value will be replaced .
* @ return Returns a reference to this object so that method calls can be chained together . */
public UpdateTagsForDomainRequest withTagsToUpdate ( Tag ... tagsToUpdate ) { } } | if ( this . tagsToUpdate == null ) { setTagsToUpdate ( new com . amazonaws . internal . SdkInternalList < Tag > ( tagsToUpdate . length ) ) ; } for ( Tag ele : tagsToUpdate ) { this . tagsToUpdate . add ( ele ) ; } return this ; |
public class ApiOvhOrder { /** * Get prices and contracts information
* REST : GET / order / overTheBox / new / { duration }
* @ param offer [ required ] Offer name
* @ param deviceId [ required ] The id of the device
* @ param voucher [ required ] An optional voucher
* @ param duration [ required ] Duration */
public OvhOrder overTheBox_new_duration_GET ( String duration , String deviceId , String offer , String voucher ) throws IOException { } } | String qPath = "/order/overTheBox/new/{duration}" ; StringBuilder sb = path ( qPath , duration ) ; query ( sb , "deviceId" , deviceId ) ; query ( sb , "offer" , offer ) ; query ( sb , "voucher" , voucher ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhOrder . class ) ; |
public class Transition { /** * Adds the Class of a target view that this Transition is interested in
* animating . By default , there are no targetTypes , and a Transition will
* listen for changes on every view in the hierarchy below the sceneRoot
* of the Scene being transitioned into . Setting targetTypes constrains
* the Transition to only listen for , and act on , views with these classes .
* Views with different classes will be ignored .
* < p > Note that any View that can be cast to targetType will be included , so
* if targetType is < code > View . class < / code > , all Views will be included . < / p >
* @ see # addTarget ( int )
* @ see # addTarget ( android . view . View )
* @ see # excludeTarget ( Class , boolean )
* @ see # excludeChildren ( Class , boolean )
* @ param targetType The type to include when running this transition .
* @ return The Transition to which the target class was added .
* Returning the same object makes it easier to chain calls during
* construction , such as
* < code > transitionSet . addTransitions ( new Fade ( ) ) . addTarget ( ImageView . class ) ; < / code > */
@ NonNull public Transition addTarget ( @ Nullable Class targetType ) { } } | if ( targetType != null ) { if ( mTargetTypes == null ) { mTargetTypes = new ArrayList < Class > ( ) ; } mTargetTypes . add ( targetType ) ; } return this ; |
public class MessageRunner { /** * Handles the { @ link IllegalArgumentException } associated with requesting
* a sequence larger than the { @ code tailSequence + 1 } .
* This may indicate that an entire partition or an entire ringbuffer was
* lost .
* @ param t the exception
* @ return if the exception was handled and the listener may continue reading */
private boolean handleIllegalArgumentException ( IllegalArgumentException t ) { } } | final long currentHeadSequence = ringbuffer . headSequence ( ) ; if ( listener . isLossTolerant ( ) ) { if ( logger . isFinestEnabled ( ) ) { logger . finest ( String . format ( "MessageListener %s on topic %s requested a too large sequence: %s. " + ". Jumping from old sequence: %s to sequence: %s" , listener , topicName , t . getMessage ( ) , sequence , currentHeadSequence ) ) ; } this . sequence = currentHeadSequence ; return true ; } logger . warning ( "Terminating MessageListener:" + listener + " on topic: " + topicName + ". " + "Reason: Underlying ring buffer data related to reliable topic is lost. " ) ; return false ; |
public class WSFederationValidateRequestController { /** * Handle federation request .
* @ param response the response
* @ param request the request
* @ throws Exception the exception */
@ GetMapping ( path = WSFederationConstants . ENDPOINT_FEDERATION_REQUEST ) protected void handleFederationRequest ( final HttpServletResponse response , final HttpServletRequest request ) throws Exception { } } | val fedRequest = WSFederationRequest . of ( request ) ; val wa = fedRequest . getWa ( ) ; if ( StringUtils . isBlank ( wa ) ) { throw new UnauthorizedAuthenticationException ( "Unable to determine the [WA] parameter" , new HashMap < > ( 0 ) ) ; } switch ( wa . toLowerCase ( ) ) { case WSFederationConstants . WSIGNOUT10 : case WSFederationConstants . WSIGNOUT_CLEANUP10 : handleLogoutRequest ( fedRequest , request , response ) ; break ; case WSFederationConstants . WSIGNIN10 : val targetService = getWsFederationRequestConfigurationContext ( ) . getWebApplicationServiceFactory ( ) . createService ( fedRequest . getWreply ( ) ) ; handleInitialAuthenticationRequest ( fedRequest , targetService , response , request ) ; break ; default : throw new UnauthorizedAuthenticationException ( "The authentication request is not recognized" , new HashMap < > ( 0 ) ) ; } |
public class CliPrinter { /** * Print information about a command . Called to display assistance information about a command , or if a parse
* error occurred while parsing the command ' s parameters .
* @ param info Command info to print . */
public void printCommandInfo ( CommandInfo info ) { } } | final PrintContext context = new PrintContext ( ) ; // Print command name : description
final CliCommand command = info . getCommand ( ) ; printIdentifiable ( context , command ) ; // Print bound params .
final BoundParams boundParams = info . getBoundParams ( ) ; printBoundParams ( context , command , boundParams ) ; |
public class SingularityClient { /** * Get all requests that their state is ACTIVE
* @ return
* All ACTIVE { @ link SingularityRequestParent } instances */
public Collection < SingularityRequestParent > getActiveSingularityRequests ( ) { } } | final Function < String , String > requestUri = ( host ) -> String . format ( REQUESTS_GET_ACTIVE_FORMAT , getApiBase ( host ) ) ; return getCollection ( requestUri , "ACTIVE requests" , REQUESTS_COLLECTION ) ; |
public class CollectSerIteratorFactory { /** * Creates an iterator wrapper for a meta - property value .
* @ param prop the meta - property defining the value , not null
* @ param beanClass the class of the bean , not the meta - property , for better generics , not null
* @ return the iterator , null if not a collection - like type */
@ Override public SerIterable createIterable ( final MetaProperty < ? > prop , Class < ? > beanClass ) { } } | if ( Grid . class . isAssignableFrom ( prop . propertyType ( ) ) ) { Class < ? > valueType = JodaBeanUtils . collectionType ( prop , beanClass ) ; List < Class < ? > > valueTypeTypes = JodaBeanUtils . collectionTypeTypes ( prop , beanClass ) ; return grid ( valueType , valueTypeTypes ) ; } return super . createIterable ( prop , beanClass ) ; |
public class ServerStats { /** * Gets the 98 - th percentile in the total amount of time spent handling a request , in milliseconds . */
@ Monitor ( name = "ResponseTimeMillis98Percentile" , type = DataSourceType . INFORMATIONAL , description = "98th percentile in total time to handle a request, in milliseconds" ) public double getResponseTime98thPercentile ( ) { } } | return getResponseTimePercentile ( Percent . NINETY_EIGHT ) ; |
public class BloomFilter { /** * Generates a digest based on the contents of an array of bytes .
* @ param data specifies input data .
* @ return digest as long . */
public static long createHash ( byte [ ] data ) { } } | long h = 0 ; byte [ ] res ; synchronized ( digestFunction ) { res = digestFunction . digest ( data ) ; } for ( int i = 0 ; i < 4 ; i ++ ) { h <<= 8 ; h |= ( ( int ) res [ i ] ) & 0xFF ; } return h ; |
public class ResourceLoader { /** * Start to load resources in a separate thread .
* @ throws LionEngineException If loader has already been started . */
public synchronized void start ( ) { } } | if ( started . get ( ) ) { throw new LionEngineException ( ERROR_STARTED ) ; } started . set ( true ) ; thread . start ( ) ; |
public class SourceHandler { /** * Set pipeline related properties . These should be set only once per connection . */
private void setPipeliningProperties ( ) { } } | if ( ctx . channel ( ) . attr ( Constants . MAX_RESPONSES_ALLOWED_TO_BE_QUEUED ) . get ( ) == null ) { ctx . channel ( ) . attr ( Constants . MAX_RESPONSES_ALLOWED_TO_BE_QUEUED ) . set ( pipeliningLimit ) ; } if ( ctx . channel ( ) . attr ( Constants . RESPONSE_QUEUE ) . get ( ) == null ) { ctx . channel ( ) . attr ( Constants . RESPONSE_QUEUE ) . set ( holdingQueue ) ; } if ( ctx . channel ( ) . attr ( Constants . NEXT_SEQUENCE_NUMBER ) . get ( ) == null ) { ctx . channel ( ) . attr ( Constants . NEXT_SEQUENCE_NUMBER ) . set ( EXPECTED_SEQUENCE_NUMBER ) ; } if ( ctx . channel ( ) . attr ( Constants . PIPELINING_EXECUTOR ) . get ( ) == null ) { ctx . channel ( ) . attr ( Constants . PIPELINING_EXECUTOR ) . set ( pipeliningGroup ) ; } |
public class JolokiaServer { /** * Start this server . If we manage an own HttpServer , then the HttpServer will
* be started as well . */
public void start ( ) { } } | // URL as configured takes precedence
String configUrl = NetworkUtil . replaceExpression ( config . getJolokiaConfig ( ) . get ( ConfigKey . DISCOVERY_AGENT_URL ) ) ; jolokiaHttpHandler . start ( lazy , configUrl != null ? configUrl : url , config . getAuthenticator ( ) != null ) ; if ( httpServer != null ) { // Starting our own server in an own thread group with a fixed name
// so that the cleanup thread can recognize it .
ThreadGroup threadGroup = new ThreadGroup ( "jolokia" ) ; threadGroup . setDaemon ( false ) ; Thread starterThread = new Thread ( threadGroup , new Runnable ( ) { @ Override public void run ( ) { httpServer . start ( ) ; } } ) ; starterThread . start ( ) ; cleaner = new CleanupThread ( httpServer , threadGroup ) ; cleaner . start ( ) ; } |
public class SpringUtils { /** * Gets a bean by its id and class .
* @ param appContext
* @ param id
* @ param clazz
* @ return */
public static < T > T getBean ( ApplicationContext appContext , String id , Class < T > clazz ) { } } | try { return appContext . getBean ( id , clazz ) ; } catch ( BeansException e ) { return null ; } |
public class DescribeSpotPriceHistoryResult { /** * The historical Spot prices .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setSpotPriceHistory ( java . util . Collection ) } or { @ link # withSpotPriceHistory ( java . util . Collection ) } if you
* want to override the existing values .
* @ param spotPriceHistory
* The historical Spot prices .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeSpotPriceHistoryResult withSpotPriceHistory ( SpotPrice ... spotPriceHistory ) { } } | if ( this . spotPriceHistory == null ) { setSpotPriceHistory ( new com . amazonaws . internal . SdkInternalList < SpotPrice > ( spotPriceHistory . length ) ) ; } for ( SpotPrice ele : spotPriceHistory ) { this . spotPriceHistory . add ( ele ) ; } return this ; |
public class HistoryCleanupJobHandlerConfiguration { /** * The delay between two " empty " runs increases twice each time until it reaches { @ link HistoryCleanupJobHandlerConfiguration # MAX _ DELAY } value .
* @ param date date to count delay from
* @ return date with delay */
public Date getNextRunWithDelay ( Date date ) { } } | Date result = addSeconds ( date , Math . min ( ( int ) ( Math . pow ( 2. , ( double ) countEmptyRuns ) * START_DELAY ) , MAX_DELAY ) ) ; return result ; |
public class CliFrontend { /** * Gets the custom command - line for the arguments .
* @ param commandLine The input to the command - line .
* @ return custom command - line which is active ( may only be one at a time ) */
public CustomCommandLine < ? > getActiveCustomCommandLine ( CommandLine commandLine ) { } } | for ( CustomCommandLine < ? > cli : customCommandLines ) { if ( cli . isActive ( commandLine ) ) { return cli ; } } throw new IllegalStateException ( "No command-line ran." ) ; |
public class ModuleAggregate { /** * Remove SpdData from the aggregate data */
public synchronized boolean remove ( SpdData data ) { } } | if ( data == null ) return false ; SpdGroup aggregateData = ( SpdGroup ) dataTable . get ( new Integer ( data . getId ( ) ) ) ; if ( aggregateData == null ) return false ; return aggregateData . remove ( data ) ; |
public class ProviderHelper { /** * Compare two provider list , return add list and remove list
* @ param oldList old Provider list
* @ param newList new provider list
* @ param add provider list need add
* @ param remove provider list need remove */
public static void compareProviders ( List < ProviderInfo > oldList , List < ProviderInfo > newList , List < ProviderInfo > add , List < ProviderInfo > remove ) { } } | // 比较老列表和当前列表
if ( CommonUtils . isEmpty ( oldList ) ) { // 空变成非空
if ( CommonUtils . isNotEmpty ( newList ) ) { add . addAll ( newList ) ; } // 空到空 , 忽略
} else { // 非空变成空
if ( CommonUtils . isEmpty ( newList ) ) { remove . addAll ( oldList ) ; } else { // 非空变成非空 , 比较
if ( CommonUtils . isNotEmpty ( oldList ) ) { List < ProviderInfo > tmpList = new ArrayList < ProviderInfo > ( newList ) ; // 遍历老的
for ( ProviderInfo oldProvider : oldList ) { if ( tmpList . contains ( oldProvider ) ) { tmpList . remove ( oldProvider ) ; } else { // 新的没有 , 老的有 , 删掉
remove . add ( oldProvider ) ; } } add . addAll ( tmpList ) ; } } } |
public class Entity { /** * Sets the value of the provided property to the provided value .
* @ param propName
* allowed object is { @ link String }
* @ param value
* allowed object is { @ link Object } */
public void set ( String propName , Object value ) { } } | if ( propName . equals ( PROP_IDENTIFIER ) ) { setIdentifier ( ( ( IdentifierType ) value ) ) ; } if ( propName . equals ( PROP_VIEW_IDENTIFIERS ) ) { getViewIdentifiers ( ) . add ( ( ( com . ibm . wsspi . security . wim . model . ViewIdentifierType ) value ) ) ; } if ( propName . equals ( PROP_PARENT ) ) { setParent ( ( ( Entity ) value ) ) ; } if ( propName . equals ( PROP_CHILDREN ) ) { getChildren ( ) . add ( ( ( com . ibm . wsspi . security . wim . model . Entity ) value ) ) ; } if ( propName . equals ( PROP_GROUPS ) ) { getGroups ( ) . add ( ( ( com . ibm . wsspi . security . wim . model . Group ) value ) ) ; } if ( propName . equals ( PROP_CREATE_TIMESTAMP ) ) { setCreateTimestamp ( ( ( Date ) value ) ) ; } if ( propName . equals ( PROP_MODIFY_TIMESTAMP ) ) { setModifyTimestamp ( ( ( Date ) value ) ) ; } if ( propName . equals ( PROP_ENTITLEMENT_INFO ) ) { setEntitlementInfo ( ( ( EntitlementInfoType ) value ) ) ; } if ( propName . equals ( PROP_CHANGE_TYPE ) ) { setChangeType ( ( ( String ) value ) ) ; } |
public class NodeGroupClient { /** * Retrieves an aggregated list of node groups . Note : use nodeGroups . listNodes for more details
* about each group .
* < p > Sample code :
* < pre > < code >
* try ( NodeGroupClient nodeGroupClient = NodeGroupClient . create ( ) ) {
* ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ;
* for ( NodeGroupsScopedList element : nodeGroupClient . aggregatedListNodeGroups ( project ) . iterateAll ( ) ) {
* / / doThingsWith ( element ) ;
* < / code > < / pre >
* @ param project Project ID for this request .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final AggregatedListNodeGroupsPagedResponse aggregatedListNodeGroups ( ProjectName project ) { } } | AggregatedListNodeGroupsHttpRequest request = AggregatedListNodeGroupsHttpRequest . newBuilder ( ) . setProject ( project == null ? null : project . toString ( ) ) . build ( ) ; return aggregatedListNodeGroups ( request ) ; |
public class ControlsStateChangeDetailedMiner { /** * Creates values for the result file columns .
* @ param m current match
* @ param col current column
* @ return value of the given match at the given column */
@ Override public String getValue ( Match m , int col ) { } } | switch ( col ) { case 0 : { return getGeneSymbol ( m , "controller ER" ) ; } case 1 : { return concat ( getModifications ( m , "controller simple PE" , "controller PE" ) , " " ) ; } case 2 : { return getGeneSymbol ( m , "changed ER" ) ; } case 3 : { return concat ( getDeltaModifications ( m , "input simple PE" , "input PE" , "output simple PE" , "output PE" ) [ 0 ] , " " ) ; } case 4 : { return concat ( getDeltaModifications ( m , "input simple PE" , "input PE" , "output simple PE" , "output PE" ) [ 1 ] , " " ) ; } default : throw new RuntimeException ( "Invalid col number: " + col ) ; } |
public class ThreadSafety { /** * Returns an { @ link Violation } explaining whether the type is threadsafe .
* @ param allowContainerTypeParameters true when checking the instantiation of an { @ code
* typeParameterAnnotation } - annotated type parameter ; indicates that { @ code
* containerTypeParameters } should be ignored
* @ param containerTypeParameters type parameters in enclosing elements ' containerOf
* specifications
* @ param type to check for thread - safety */
public Violation isThreadSafeType ( boolean allowContainerTypeParameters , Set < String > containerTypeParameters , Type type ) { } } | return type . accept ( new ThreadSafeTypeVisitor ( allowContainerTypeParameters , containerTypeParameters ) , null ) ; |
public class SslCipherSuites { /** * openssl / ssl / ssl _ ciph . c */
private static List < SslCipher > getCiphers ( String token ) { } } | List < SslCipher > ciphers ; SslCipher cipher = CIPHERS . get ( token ) ; if ( cipher != null ) { ciphers = new ArrayList < > ( 1 ) ; ciphers . add ( cipher ) ; return ciphers ; } cipher = CIPHER_NICKNAMES . get ( token ) ; if ( cipher != null ) { ciphers = new ArrayList < > ( 1 ) ; ciphers . add ( cipher ) ; return ciphers ; } ciphers = CIPHER_STRENGTHS . get ( token ) ; if ( ciphers != null ) { return ciphers ; } ciphers = CIPHER_KEY_EXCHANGES . get ( token ) ; if ( ciphers != null ) { return ciphers ; } ciphers = CIPHER_AUTHNS . get ( token ) ; if ( ciphers != null ) { return ciphers ; } ciphers = CIPHER_ENCRYPTS . get ( token ) ; if ( ciphers != null ) { return ciphers ; } ciphers = CIPHER_MACS . get ( token ) ; if ( ciphers != null ) { return ciphers ; } ciphers = CIPHER_PROTOCOLS . get ( token ) ; if ( ciphers != null ) { return ciphers ; } if ( token . equals ( "FIPS" ) ) { return CIPHER_FIPS ; } // If we reach this point , then we don ' t how to handle this
// token . Could be a custom ciphersuite name for some non - standard
// JSSE provider , so we leave it alone .
cipher = new OtherSslCipher ( token ) ; ciphers = new ArrayList < > ( 1 ) ; ciphers . add ( cipher ) ; return ciphers ; |
public class Lockable { /** * Write - lock & delete ' k ' . Will fail if ' k ' is locked by anybody other than ' job _ key ' */
public static void delete ( Key k , Key job_key ) { } } | if ( k == null ) return ; Value val = DKV . get ( k ) ; if ( val == null ) return ; // Or just nothing there to delete
if ( ! val . isLockable ( ) ) UKV . remove ( k ) ; // Simple things being deleted
else ( ( Lockable ) val . get ( ) ) . delete ( job_key , 0.0f ) ; // Lockable being deleted |
public class HtmlBuilder { /** * Build a String containing a HTML opening tag with given CSS class and / or
* style and concatenates the given content .
* @ param tag String name of HTML tag
* @ param clazz CSS class of the tag
* @ param style style for tag ( plain CSS )
* @ param contentIsHtml if false content is prepared with { @ link HtmlBuilder # htmlEncode ( String ) } .
* @ param content content string
* @ return HTML tag element as string */
private static String openTag ( String tag , String clazz , String style , boolean contentIsHtml , String ... content ) { } } | StringBuilder result = new StringBuilder ( "<" ) . append ( tag ) ; if ( clazz != null && ! "" . equals ( clazz ) ) { result . append ( " " ) . append ( Html . Attribute . CLASS ) . append ( "='" ) . append ( clazz ) . append ( "'" ) ; } if ( style != null && ! "" . equals ( style ) ) { result . append ( " " ) . append ( Html . Attribute . STYLE ) . append ( "='" ) . append ( style ) . append ( "'" ) ; } result . append ( ">" ) ; if ( content != null && content . length > 0 ) { for ( String c : content ) { if ( contentIsHtml ) { result . append ( c ) ; } else { result . append ( htmlEncode ( c ) ) ; } } } return result . toString ( ) ; |
public class syslog_generic { /** * < pre >
* Use this operation to delete generic syslog message details . .
* < / pre > */
public static syslog_generic delete ( nitro_service client , syslog_generic resource ) throws Exception { } } | resource . validate ( "delete" ) ; return ( ( syslog_generic [ ] ) resource . delete_resource ( client ) ) [ 0 ] ; |
public class AbstractRadial { /** * Returns the image of the pointer . This pointer is centered in the gauge .
* @ param WIDTH
* @ param POINTER _ TYPE
* @ param POINTER _ COLOR
* @ return the pointer image that is used in all gauges that have a centered pointer */
protected BufferedImage create_POINTER_Image ( final int WIDTH , final PointerType POINTER_TYPE , final ColorDef POINTER_COLOR ) { } } | return POINTER_FACTORY . createStandardPointer ( WIDTH , POINTER_TYPE , POINTER_COLOR , getBackgroundColor ( ) ) ; |
public class ListSigningJobsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListSigningJobsRequest listSigningJobsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listSigningJobsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listSigningJobsRequest . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( listSigningJobsRequest . getPlatformId ( ) , PLATFORMID_BINDING ) ; protocolMarshaller . marshall ( listSigningJobsRequest . getRequestedBy ( ) , REQUESTEDBY_BINDING ) ; protocolMarshaller . marshall ( listSigningJobsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listSigningJobsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class GeneratePluginConfigMBean { /** * Required static reference : called before activate */
protected void setVirtualHostMgr ( DynamicVirtualHostManager vhostMgr ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "setVirtualHostMgr : DynamicVirtualHost set : " + vhostMgr . toString ( ) ) ; dynVhostMgr = vhostMgr ; |
public class XPathParser { /** * Parses the the rule PathExpr according to the following production rule :
* [ 25 ] PathExpr : : = ( " / " RelativePathExpr ? ) | ( " / / " RelativePathExpr ) | RelativePathExpr .
* @ throws TTXPathException */
private void parsePathExpr ( ) throws TTXPathException { } } | if ( is ( TokenType . SLASH , true ) ) { // path expression starts from the root
mPipeBuilder . addStep ( new DocumentNodeAxis ( getTransaction ( ) ) ) ; final TokenType type = mToken . getType ( ) ; if ( type != TokenType . END && type != TokenType . COMMA ) { // all immediately following keywords or ' * ' are nametests , not
// operators
// leading - lone - slash constrain
parseRelativePathExpr ( ) ; } } else if ( is ( TokenType . DESC_STEP , true ) ) { // path expression starts from the root with a descendant - or - self
// step
mPipeBuilder . addStep ( new DocumentNodeAxis ( getTransaction ( ) ) ) ; final AbsAxis mAxis = new DescendantAxis ( getTransaction ( ) , true ) ; mPipeBuilder . addStep ( mAxis ) ; parseRelativePathExpr ( ) ; } else { parseRelativePathExpr ( ) ; } |
public class FastAdapter { /** * adds new event hooks for an item
* NOTE : this has to be called before adding the first items , as this won ' t be called anymore after the ViewHolders were created
* @ param eventHooks the event hooks to be added for an item
* @ return this */
public FastAdapter < Item > withEventHooks ( @ Nullable Collection < ? extends EventHook < Item > > eventHooks ) { } } | if ( eventHooks == null ) { return this ; } if ( this . eventHooks == null ) { this . eventHooks = new LinkedList < > ( ) ; } this . eventHooks . addAll ( eventHooks ) ; return this ; |
public class SerializationUtils { /** * Serialize an object to byte array , with a custom class loader .
* This method uses FST lib .
* @ param obj
* @ param classLoader
* @ return
* @ since 0.6.0 */
public static byte [ ] toByteArrayFst ( final Object obj , final ClassLoader classLoader ) { } } | if ( obj == null ) { return null ; } ClassLoader oldClassLoader = Thread . currentThread ( ) . getContextClassLoader ( ) ; if ( classLoader != null ) { Thread . currentThread ( ) . setContextClassLoader ( classLoader ) ; } try { FSTConfiguration conf = fstConf . get ( ) ; conf . setClassLoader ( classLoader != null ? classLoader : oldClassLoader ) ; return conf . asByteArray ( obj ) ; } catch ( Exception e ) { throw e instanceof SerializationException ? ( SerializationException ) e : new SerializationException ( e ) ; } finally { Thread . currentThread ( ) . setContextClassLoader ( oldClassLoader ) ; } |
public class FactoryImageDenoise { /** * Default wavelet transform used for denoising images . */
private static WaveletTransform createDefaultShrinkTransform ( ImageDataType imageType , int numLevels , double minPixelValue , double maxPixelValue ) { } } | WaveletTransform descTran ; if ( ! imageType . isInteger ( ) ) { WaveletDescription < WlCoef_F32 > waveletDesc_F32 = FactoryWaveletDaub . daubJ_F32 ( 4 ) ; descTran = FactoryWaveletTransform . create_F32 ( waveletDesc_F32 , numLevels , ( float ) minPixelValue , ( float ) maxPixelValue ) ; } else { WaveletDescription < WlCoef_I32 > waveletDesc_I32 = FactoryWaveletDaub . biorthogonal_I32 ( 5 , BorderType . REFLECT ) ; descTran = FactoryWaveletTransform . create_I ( waveletDesc_I32 , numLevels , ( int ) minPixelValue , ( int ) maxPixelValue , ImageType . getImageClass ( ImageType . Family . GRAY , imageType ) ) ; } return descTran ; |
public class JobHistory { /** * Parses history file and invokes Listener . handle ( ) for
* each line of history . It can be used for looking through history
* files for specific items without having to keep whole history in memory .
* @ param path path to history file
* @ param l Listener for history events
* @ param fs FileSystem where history file is present
* @ throws IOException */
public static void parseHistoryFromFS ( String path , Listener l , FileSystem fs ) throws IOException { } } | FSDataInputStream in = fs . open ( new Path ( path ) ) ; BufferedReader reader = new BufferedReader ( new InputStreamReader ( in ) ) ; try { String line = null ; StringBuffer buf = new StringBuffer ( ) ; // Read the meta - info line . Note that this might a jobinfo line for files
// written with older format
line = reader . readLine ( ) ; // Check if the file is empty
if ( line == null ) { return ; } // Get the information required for further processing
MetaInfoManager mgr = new MetaInfoManager ( line ) ; boolean isEscaped = mgr . isValueEscaped ( ) ; String lineDelim = String . valueOf ( mgr . getLineDelim ( ) ) ; String escapedLineDelim = StringUtils . escapeString ( lineDelim , StringUtils . ESCAPE_CHAR , mgr . getLineDelim ( ) ) ; do { buf . append ( line ) ; if ( ! line . trim ( ) . endsWith ( lineDelim ) || line . trim ( ) . endsWith ( escapedLineDelim ) ) { buf . append ( "\n" ) ; continue ; } parseLine ( buf . toString ( ) , l , isEscaped ) ; buf = new StringBuffer ( ) ; } while ( ( line = reader . readLine ( ) ) != null ) ; } finally { try { reader . close ( ) ; } catch ( IOException ex ) { } } |
public class INode { /** * Convert strings to byte arrays for path components . */
static byte [ ] [ ] getPathComponents ( String [ ] strings ) { } } | if ( strings . length == 0 ) { return new byte [ ] [ ] { null } ; } byte [ ] [ ] bytes = new byte [ strings . length ] [ ] ; for ( int i = 0 ; i < strings . length ; i ++ ) bytes [ i ] = DFSUtil . string2Bytes ( strings [ i ] ) ; return bytes ; |
public class Range { /** * Creates a new { @ link Range } with the specified inclusive end and the current start . */
public R endClosed ( @ Nonnull T end ) { } } | this . end = Preconditions . checkNotNull ( end , "End can't be null" ) ; this . endBound = BoundType . CLOSED ; return thisT ( ) ; |
public class CmsMessageBundleEditorModel { /** * Returns the editable columns for the provided edit mode .
* @ param mode the edit mode .
* @ return the editable columns for the provided edit mode . */
public List < TableProperty > getEditableColumns ( CmsMessageBundleEditorTypes . EditMode mode ) { } } | return m_editorState . get ( mode ) . getEditableColumns ( ) ; |
public class XmlStreamReaderUtils { /** * Returns the value of an attribute as a float . If the attribute is empty , this method returns
* the default value provided .
* @ param reader
* < code > XMLStreamReader < / code > that contains attribute values .
* @ param namespace
* String
* @ param localName
* local name of attribute ( the namespace is ignored ) .
* @ param defaultValue
* default value
* @ return value of attribute , or the default value if the attribute is empty . */
public static float optionalFloatAttribute ( final XMLStreamReader reader , final String namespace , final String localName , final float defaultValue ) { } } | final String value = reader . getAttributeValue ( namespace , localName ) ; if ( value != null ) { return Float . parseFloat ( value ) ; } return defaultValue ; |
public class ColumnMapperInet { /** * { @ inheritDoc } */
@ Override public String indexValue ( String name , Object value ) { } } | if ( value == null ) { return null ; } else if ( value instanceof InetAddress ) { InetAddress inetAddress = ( InetAddress ) value ; return inetAddress . getHostAddress ( ) ; } else if ( value instanceof String ) { String svalue = ( String ) value ; if ( IPV4_PATTERN . matcher ( svalue ) . matches ( ) || IPV6_PATTERN . matcher ( svalue ) . matches ( ) || IPV6_COMPRESSED_PATTERN . matcher ( svalue ) . matches ( ) ) { try { return InetAddress . getByName ( svalue ) . getHostAddress ( ) ; } catch ( UnknownHostException e ) { Log . error ( e , e . getMessage ( ) ) ; } } } throw new IllegalArgumentException ( String . format ( "Value '%s' cannot be cast to InetAddress" , value ) ) ; |
public class BatchUpdateDaemon { /** * Because it ' s too complicated to keep all the state necessary for sorting
* these events as they happen , we collect all events in a single vector and
* then sort them upon batch . Such complications would include temporary
* Hashtables for invalidating fragments . */
private void cleanUpEventLists ( DCache cache , HashMap invalidateIdEvents , HashMap invalidateTemplateEvents , ArrayList pushEntryEvents , ArrayList pushECFEvents , ArrayList aliasEntryEvents ) { } } | // 245015
// Remove invalidateByIdEvent if the CacheEntry is newer than the invalidate request .
Iterator it = invalidateIdEvents . values ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { InvalidateByIdEvent invalidateByIdEvent = ( InvalidateByIdEvent ) it . next ( ) ; Object id = invalidateByIdEvent . getId ( ) ; CacheEntry cacheEntry = cache . getEntryFromMemory ( id ) ; // 245015
// In PushPull mode , dmap . put ( ) - - > cache . invalidateAndSet ( ) - - > ( 1 ) get old value ; ( 2 ) invalidate but not performing renounce id
// in DRS ' s BatchUpdate . The not - InvalidateByIdEvent . INVOKE _ DRS _ RENOUNCE ( ' False ' ) will be passed when creating InvalidateByEvent ;
// (3 ) put entry in memory and performing announce id in DRS ' s BatchUpdate . For this case , we need to keep both invalidate event
// in the InvalidateIdEvents and id in the PushEntryEvents .
if ( cacheEntry != null && invalidateByIdEvent . getTimeStamp ( ) < cacheEntry . getTimeStamp ( ) && invalidateByIdEvent . isInvokeDRSRenounce ( ) == true ) { // 495487
if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "cleanUpEventLists(): Filtered out InvalidateByIdEvent when cache entry is newer in memory cache. cacheName=" + cache . getCacheName ( ) + " id=" + id ) ; } it . remove ( ) ; } } // check pushEntryEvents to see if id , data ids , or template are in invalidate lists
it = pushEntryEvents . iterator ( ) ; while ( it . hasNext ( ) ) { boolean remove = false ; CacheEntry cacheEntry = ( CacheEntry ) it . next ( ) ; if ( invalidateIdEvents . containsKey ( cacheEntry . getIdObject ( ) ) ) { InvalidateByIdEvent invalidateByIdEvent = ( InvalidateByIdEvent ) invalidateIdEvents . get ( cacheEntry . getIdObject ( ) ) ; // In PushPull mode , dmap . put ( ) - - > cache . invalidateAndSet ( ) - - > ( 1 ) get old value ; ( 2 ) invalidate but not performing renounce id
// in DRS ' s BatchUpdate . The not - InvalidateByIdEvent . INVOKE _ DRS _ RENOUNCE ( ' False ' ) will be passed when creating InvalidateByEvent ;
// (3 ) put entry in memory and performing announce id in DRS ' s BatchUpdate . For this case , we need to keep both invalidate event
// in the InvalidateIdEvents and id in the PushEntryEvents .
if ( invalidateByIdEvent . isInvokeDRSRenounce ( ) == true ) { // 495487
remove = true ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "cleanUpEventLists(): Filtered out pushEntryEvents when id is n invalidation list. cacheName=" + cache . getCacheName ( ) + " id=" + cacheEntry . getIdObject ( ) ) ; } } } if ( ! remove ) { Enumeration e = cacheEntry . getDataIds ( ) ; while ( e . hasMoreElements ( ) ) { Object did = e . nextElement ( ) ; if ( invalidateIdEvents . containsKey ( did ) ) { remove = true ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "cleanUpEventLists(): Filtered out pushEntryEvents when dependency id is in invalidation list. cacheName=" + cache . getCacheName ( ) + " id=" + cacheEntry . getIdObject ( ) + " depid=" + did ) ; } break ; } } } if ( ! remove ) { Enumeration e = cacheEntry . getTemplates ( ) ; while ( e . hasMoreElements ( ) ) { Object template = e . nextElement ( ) ; if ( invalidateIdEvents . containsKey ( template ) ) { remove = true ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "cleanUpEventLists(): Filtered out pushEntryEvents when template is in invalidation list. cacheName=" + cache . getCacheName ( ) + " id=" + cacheEntry . getIdObject ( ) + " template=" + template ) ; } break ; } } } // Remove PUSH event if the ce is not serializable
if ( ! remove && ! cacheEntry . prepareForSerialization ( ) ) { // a msg was logged during the above call
remove = true ; } if ( remove ) { it . remove ( ) ; } } // check aliasEntryEvents to see if id is in invalidate lists or
// the CacheEntry is newer than the add / remove alias request .
it = aliasEntryEvents . iterator ( ) ; while ( it . hasNext ( ) ) { boolean remove = false ; AliasEntry aliasEntry = ( AliasEntry ) it . next ( ) ; if ( invalidateIdEvents . containsKey ( aliasEntry . id ) ) { remove = true ; } else { Iterator it_ce = pushEntryEvents . iterator ( ) ; while ( it_ce . hasNext ( ) ) { CacheEntry cacheEntry = ( CacheEntry ) it_ce . next ( ) ; if ( cacheEntry . getIdObject ( ) . equals ( aliasEntry . id ) ) { if ( cacheEntry . getTimeStamp ( ) > aliasEntry . getTimeStamp ( ) ) { remove = true ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "cleanUpEventLists(): Filtered out aliasEntryEvents when cache entry is newer than alias request. cacheName=" + cache . getCacheName ( ) + " id=" + cacheEntry . getIdObject ( ) ) ; } break ; } } } } if ( remove ) { it . remove ( ) ; } } // check pushECFEvents to make sure they should not be invalidated
it = pushECFEvents . iterator ( ) ; while ( it . hasNext ( ) ) { boolean remove = false ; ExternalInvalidation externalCacheFragment = ( ExternalInvalidation ) it . next ( ) ; Enumeration enumeration = externalCacheFragment . getTemplates ( ) ; while ( ! remove && enumeration . hasMoreElements ( ) ) { String template = ( String ) enumeration . nextElement ( ) ; if ( invalidateTemplateEvents . containsKey ( template ) ) { remove = true ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "cleanUpEventLists(): Filtered out pushECFEvents when template is in invalidation list. cacheName=" + cache . getCacheName ( ) + " template=" + template ) ; } } } enumeration = externalCacheFragment . getInvalidationIds ( ) ; while ( ! remove && enumeration . hasMoreElements ( ) ) { String id = ( String ) enumeration . nextElement ( ) ; if ( invalidateIdEvents . containsKey ( id ) ) { remove = true ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "cleanUpEventLists(): Filtered out pushECFEvents when invalidation id is in invalidation list. cacheName=" + cache . getCacheName ( ) + " ide=" + id ) ; } } } if ( remove ) it . remove ( ) ; } |
public class ChronoHistory { /** * / * [ deutsch ]
* < p > Erzeugt eine Kopie dieser Instanz mit den angegebenen historischen julianischen Schaltjahren . < / p >
* < p > Diese Methode hat keine Auswirkung , wenn angewandt auf { @ code ChronoHistory . PROLEPTIC _ GREGORIAN }
* oder { @ code ChronoHistory . PROLEPTIC _ JULIAN } oder { @ code ChronoHistory . PROLEPTIC _ BYZANTINE } . < / p >
* @ param ancientJulianLeapYears sequence of historic julian leap years
* @ return new history which starts at first of January in year BC 45
* @ since 3.11/4.8 */
public ChronoHistory with ( AncientJulianLeapYears ancientJulianLeapYears ) { } } | if ( ancientJulianLeapYears == null ) { throw new NullPointerException ( "Missing ancient julian leap years." ) ; } else if ( ! this . hasGregorianCutOverDate ( ) ) { return this ; } return new ChronoHistory ( this . variant , this . events , ancientJulianLeapYears , this . nys , this . eraPreference ) ; |
public class WordUtils { /** * < p > Converts all the delimiter separated words in a String into capitalized words ,
* that is each word is made up of a titlecase character and then a series of
* lowercase characters . < / p >
* < p > The delimiters represent a set of characters understood to separate words .
* The first string character and the first non - delimiter character after a
* delimiter will be capitalized . < / p >
* < p > A < code > null < / code > input String returns < code > null < / code > .
* Capitalization uses the Unicode title case , normally equivalent to
* upper case . < / p >
* < pre >
* WordUtils . capitalizeFully ( null , * ) = null
* WordUtils . capitalizeFully ( " " , * ) = " "
* WordUtils . capitalizeFully ( * , null ) = *
* WordUtils . capitalizeFully ( * , new char [ 0 ] ) = *
* WordUtils . capitalizeFully ( " i aM . fine " , { ' . ' } ) = " I am . Fine "
* < / pre >
* @ param str the String to capitalize , may be null
* @ param delimiters set of characters to determine capitalization , null means whitespace
* @ return capitalized String , < code > null < / code > if null String input
* @ since 2.1 */
@ GwtIncompatible ( "incompatible method" ) public static String capitalizeFully ( String str , final char ... delimiters ) { } } | final int delimLen = delimiters == null ? - 1 : delimiters . length ; if ( StringUtils . isEmpty ( str ) || delimLen == 0 ) { return str ; } str = str . toLowerCase ( ) ; return capitalize ( str , delimiters ) ; |
public class AbstractGraph { /** * Gets the related wrapper for the given object , creates the wrapper if not created before .
* @ param obj Object to wrap
* @ return wrapper */
public GraphObject getGraphObject ( Object obj ) { } } | String key = getKey ( obj ) ; GraphObject go = objectMap . get ( key ) ; // if ( obj instanceof Conversion & & go = = null )
// go = objectMap . get ( key + ConversionWrapper . LEFT _ TO _ RIGHT ) ;
// if ( go = = null )
// go = objectMap . get ( key + ConversionWrapper . RIGHT _ TO _ LEFT ) ;
if ( go == null ) { Node node = wrap ( obj ) ; if ( node != null ) { objectMap . put ( key , node ) ; node . init ( ) ; } } return objectMap . get ( key ) ; |
public class BatchGetNamedQueryResult { /** * Information about the named query IDs submitted .
* @ param namedQueries
* Information about the named query IDs submitted . */
public void setNamedQueries ( java . util . Collection < NamedQuery > namedQueries ) { } } | if ( namedQueries == null ) { this . namedQueries = null ; return ; } this . namedQueries = new java . util . ArrayList < NamedQuery > ( namedQueries ) ; |
public class ReasonerQueryImpl { /** * Does id predicates - > answer conversion
* @ return substitution obtained from all id predicates ( including internal ) in the query */
public ConceptMap getSubstitution ( ) { } } | if ( substitution == null ) { Set < Variable > varNames = getVarNames ( ) ; Set < IdPredicate > predicates = getAtoms ( IsaAtomBase . class ) . map ( IsaAtomBase :: getTypePredicate ) . filter ( Objects :: nonNull ) . filter ( p -> varNames . contains ( p . getVarName ( ) ) ) . collect ( Collectors . toSet ( ) ) ; getAtoms ( IdPredicate . class ) . forEach ( predicates :: add ) ; HashMap < Variable , Concept > answerMap = new HashMap < > ( ) ; predicates . forEach ( p -> { Concept concept = tx ( ) . getConcept ( p . getPredicate ( ) ) ; if ( concept == null ) throw GraqlCheckedException . idNotFound ( p . getPredicate ( ) ) ; answerMap . put ( p . getVarName ( ) , concept ) ; } ) ; substitution = new ConceptMap ( answerMap ) ; } return substitution ; |
public class ListAssociationsRequest { /** * One or more filters . Use a filter to return a more specific list of results .
* @ param associationFilterList
* One or more filters . Use a filter to return a more specific list of results . */
public void setAssociationFilterList ( java . util . Collection < AssociationFilter > associationFilterList ) { } } | if ( associationFilterList == null ) { this . associationFilterList = null ; return ; } this . associationFilterList = new com . amazonaws . internal . SdkInternalList < AssociationFilter > ( associationFilterList ) ; |
public class VertexLabel { /** * TODO refactor to not use SchemaTable at this level */
Pair < Set < SchemaTable > , Set < SchemaTable > > getTableLabels ( ) { } } | Set < SchemaTable > inSchemaTables = new HashSet < > ( ) ; Set < SchemaTable > outSchemaTables = new HashSet < > ( ) ; for ( EdgeLabel inEdgeLabel : this . inEdgeLabels . values ( ) ) { inSchemaTables . add ( SchemaTable . of ( inEdgeLabel . getSchema ( ) . getName ( ) , EDGE_PREFIX + inEdgeLabel . getLabel ( ) ) ) ; } for ( EdgeLabel outEdgeLabel : this . outEdgeLabels . values ( ) ) { outSchemaTables . add ( SchemaTable . of ( outEdgeLabel . getSchema ( ) . getName ( ) , EDGE_PREFIX + outEdgeLabel . getLabel ( ) ) ) ; } if ( this . schema . getTopology ( ) . isSqlWriteLockHeldByCurrentThread ( ) ) { for ( EdgeLabel inEdgeLabel : this . uncommittedInEdgeLabels . values ( ) ) { inSchemaTables . add ( SchemaTable . of ( inEdgeLabel . getSchema ( ) . getName ( ) , EDGE_PREFIX + inEdgeLabel . getLabel ( ) ) ) ; } for ( EdgeLabel outEdgeLabel : this . uncommittedOutEdgeLabels . values ( ) ) { outSchemaTables . add ( SchemaTable . of ( outEdgeLabel . getSchema ( ) . getName ( ) , EDGE_PREFIX + outEdgeLabel . getLabel ( ) ) ) ; } } return Pair . of ( inSchemaTables , outSchemaTables ) ; |
public class CmsLoginController { /** * Returns the reset password link . < p >
* @ return the reset password link */
public String getResetPasswordLink ( ) { } } | return OpenCms . getLinkManager ( ) . substituteLinkForUnknownTarget ( CmsLoginUI . m_adminCms , CmsWorkplaceLoginHandler . LOGIN_HANDLER , false ) + "?" + CmsLoginHelper . PARAM_RESET_PASSWORD ; |
public class MavenConfigurationHelper { /** * Load settings . xml file and apply custom properties . */
public static MavenConfiguration getConfig ( final File settingsFile , final Properties props ) throws Exception { } } | props . setProperty ( ServiceConstants . PID + MavenConstants . PROPERTY_SETTINGS_FILE , settingsFile . toURI ( ) . toASCIIString ( ) ) ; final MavenConfigurationImpl config = new MavenConfigurationImpl ( new PropertiesPropertyResolver ( props ) , ServiceConstants . PID ) ; final MavenSettings settings = new MavenSettingsImpl ( settingsFile . toURI ( ) . toURL ( ) ) ; config . setSettings ( settings ) ; return config ; |
public class RosettaData { /** * / / @ Override */
public RosettaData < T > load ( Reader rdr ) throws APIException { } } | Parse < Reader , ? > in = df . getIn ( inType ) ; TimeTaken tt = in . start ( trans ) ; try { saved . extract ( rdr , ( Writer ) null , in ) ; xml = json = null ; } catch ( Exception e ) { throw new APIException ( e ) ; } finally { tt . done ( ) ; } return this ; |
public class OWLSubObjectPropertyOfAxiomImpl_CustomFieldSerializer { /** * Deserializes the content of the object from the
* { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } .
* @ param streamReader the { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } to read the
* object ' s content from
* @ param instance the object instance to deserialize
* @ throws com . google . gwt . user . client . rpc . SerializationException
* if the deserialization operation is not
* successful */
@ Override public void deserializeInstance ( SerializationStreamReader streamReader , OWLSubObjectPropertyOfAxiomImpl instance ) throws SerializationException { } } | deserialize ( streamReader , instance ) ; |
public class ESResponseWrapper { /** * Gets the select expression order .
* @ param query
* the query
* @ return the select expression order */
public ListIterable < Expression > getSelectExpressionOrder ( KunderaQuery query ) { } } | if ( ! KunderaQueryUtils . isSelectStatement ( query . getJpqlExpression ( ) ) ) { return null ; } Expression selectExpression = ( ( SelectClause ) ( query . getSelectStatement ( ) ) . getSelectClause ( ) ) . getSelectExpression ( ) ; List < Expression > list ; if ( ! ( selectExpression instanceof CollectionExpression ) ) { list = new LinkedList < Expression > ( ) ; list . add ( selectExpression ) ; return new SnapshotCloneListIterable < Expression > ( list ) ; } else { return selectExpression . children ( ) ; } |
public class ClusterNodesParser { /** * Parse partition lines into Partitions object .
* @ param nodes output of CLUSTER NODES
* @ return the partitions object . */
public static Collection < DisqueNode > parse ( String nodes ) { } } | List < DisqueNode > result = new ArrayList < > ( ) ; Iterator < String > iterator = TOKEN_PATTERN . splitAsStream ( nodes ) . iterator ( ) ; try { while ( iterator . hasNext ( ) ) { String node = iterator . next ( ) ; DisqueNode partition = parseNode ( node ) ; result . add ( partition ) ; } } catch ( Exception e ) { throw new RedisException ( "Cannot parse " + nodes , e ) ; } return result ; |
public class MultimapSubject { /** * Starts a method chain for a check in which the actual values ( i . e . the values of the { @ link
* Multimap } under test ) are compared to expected values using the given { @ link Correspondence } .
* The actual values must be of type { @ code A } , and the expected values must be of type { @ code E } .
* The check is actually executed by continuing the method chain . For example :
* < pre > { @ code
* assertThat ( actualMultimap )
* . comparingValuesUsing ( correspondence )
* . containsEntry ( expectedKey , expectedValue ) ;
* } < / pre >
* where { @ code actualMultimap } is a { @ code Multimap < ? , A > } ( or , more generally , a { @ code
* Multimap < ? , ? extends A > } ) , { @ code correspondence } is a { @ code Correspondence < A , E > } , and
* { @ code expectedValue } is an { @ code E } .
* < p > Note that keys will always be compared with regular object equality ( { @ link Object # equals } ) .
* < p > Any of the methods on the returned object may throw { @ link ClassCastException } if they
* encounter an actual value that is not of type { @ code A } . */
public < A , E > UsingCorrespondence < A , E > comparingValuesUsing ( Correspondence < ? super A , ? super E > correspondence ) { } } | return new UsingCorrespondence < > ( correspondence ) ; |
public class MappingOptionImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eUnset ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . MAPPING_OPTION__MAP_VALUE : setMapValue ( MAP_VALUE_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ; |
public class ExtendedBufferedReader { /** * Reads the next char from the input stream .
* @ return the next char or END _ OF _ STREAM if end of stream has been reached . */
@ Override public int read ( ) throws IOException { } } | // initalize the lookahead
if ( lookaheadChar == UNDEFINED ) { lookaheadChar = super . read ( ) ; } lastChar = lookaheadChar ; if ( super . ready ( ) ) { lookaheadChar = super . read ( ) ; } else { lookaheadChar = UNDEFINED ; } if ( lastChar == '\n' ) { lineCounter ++ ; } return lastChar ; |
public class AverageSeeker { /** * Add new value with weight
* @ param value
* @ param weight */
@ Override public void add ( double value , double weight ) { } } | writeLock . lock ( ) ; try { average . add ( value , weight ) ; double fast = average . fast ( ) ; min . accept ( fast ) ; max . accept ( fast ) ; check ( ) ; } finally { writeLock . unlock ( ) ; } |
public class Check { /** * Ensures that a passed class has an annotation of a specific type
* @ param clazz
* the class that must have a required annotation
* @ param annotation
* the type of annotation that is required on the class
* @ return the given annotation which is present on the checked class
* @ throws IllegalMissingAnnotationException
* if the passed annotation is not annotated at the given class */
@ ArgumentsChecked @ Throws ( { } } | IllegalNullArgumentException . class , IllegalMissingAnnotationException . class } ) public static Annotation hasAnnotation ( @ Nonnull final Class < ? > clazz , @ Nonnull final Class < ? extends Annotation > annotation ) { Check . notNull ( clazz , "clazz" ) ; Check . notNull ( annotation , "annotation" ) ; if ( ! clazz . isAnnotationPresent ( annotation ) ) { throw new IllegalMissingAnnotationException ( annotation , clazz ) ; } return clazz . getAnnotation ( annotation ) ; |
public class PagingCollection { @ SuppressWarnings ( { } } | "rawtypes" , "unchecked" } ) public boolean addAll ( Collection < ? extends T > values ) { boolean wasAllAdded = true ; for ( Iterator iterator = values . iterator ( ) ; iterator . hasNext ( ) ; ) { if ( ! add ( ( T ) iterator . next ( ) ) ) wasAllAdded = false ; } return wasAllAdded ; |
public class Kmeans { /** * 类内方差
* @ param docs
* @ param centroid
* @ return */
private float calculateClusterQuality ( ArrayList < Instance > docs , HashSparseVector centroid ) { } } | float quality = 0.0f ; HashSparseVector c = centroid ; for ( int i = 0 ; i < docs . size ( ) ; ++ i ) { Instance doc = docs . get ( i ) ; quality += c . distanceEuclidean ( ( HashSparseVector ) doc . getData ( ) ) ; } return quality ; |
public class RawCursor { /** * for preserving key . */
private boolean toBoundedLast ( ) throws FetchException { } } | if ( mEndBound == null ) { if ( ! toLast ( ) ) { return false ; } } else { if ( ! toLast ( mEndBound . clone ( ) ) ) { return false ; } if ( ! mInclusiveEnd ) { byte [ ] currentKey = getCurrentKey ( ) ; if ( currentKey == null ) { return false ; } if ( compareKeysPartially ( mEndBound , currentKey ) == 0 ) { if ( ! toPreviousKey ( ) ) { return false ; } } } } if ( mStartBound != null ) { byte [ ] currentKey = getCurrentKey ( ) ; if ( currentKey == null ) { return false ; } int result = compareKeysPartially ( currentKey , mStartBound ) ; if ( result <= 0 ) { if ( result < 0 || ! mInclusiveStart ) { return false ; } } } return prefixMatches ( ) ; |
public class JobOperations { /** * Gets the specified { @ link CloudJob } .
* @ param jobId The ID of the job to get .
* @ param detailLevel A { @ link DetailLevel } used for controlling which properties are retrieved from the service .
* @ param additionalBehaviors A collection of { @ link BatchClientBehavior } instances that are applied to the Batch service request .
* @ return A { @ link CloudJob } containing information about the specified Azure Batch job .
* @ throws BatchErrorException Exception thrown when an error response is received from the Batch service .
* @ throws IOException Exception thrown when there is an error in serialization / deserialization of data sent to / received from the Batch service . */
public CloudJob getJob ( String jobId , DetailLevel detailLevel , Iterable < BatchClientBehavior > additionalBehaviors ) throws BatchErrorException , IOException { } } | JobGetOptions getJobOptions = new JobGetOptions ( ) ; BehaviorManager bhMgr = new BehaviorManager ( this . customBehaviors ( ) , additionalBehaviors ) ; bhMgr . appendDetailLevelToPerCallBehaviors ( detailLevel ) ; bhMgr . applyRequestBehaviors ( getJobOptions ) ; return this . parentBatchClient . protocolLayer ( ) . jobs ( ) . get ( jobId , getJobOptions ) ; |
public class XPathFactory { /** * < p > Get a new < code > XPathFactory < / code > instance using the default object model ,
* { @ link # DEFAULT _ OBJECT _ MODEL _ URI } ,
* the W3C DOM . < / p >
* < p > This method is functionally equivalent to : < / p >
* < pre >
* newInstance ( DEFAULT _ OBJECT _ MODEL _ URI )
* < / pre >
* < p > Since the implementation for the W3C DOM is always available , this method will never fail . < / p >
* @ return Instance of an < code > XPathFactory < / code > . */
public static final XPathFactory newInstance ( ) { } } | try { return newInstance ( DEFAULT_OBJECT_MODEL_URI ) ; } catch ( XPathFactoryConfigurationException xpathFactoryConfigurationException ) { throw new RuntimeException ( "XPathFactory#newInstance() failed to create an XPathFactory for the default object model: " + DEFAULT_OBJECT_MODEL_URI + " with the XPathFactoryConfigurationException: " + xpathFactoryConfigurationException . toString ( ) ) ; } |
public class LessCssGenerator { /** * Compile the LESS source to a CSS source
* @ param bundle
* the bundle
* @ param content
* the resource content to compile
* @ param path
* the compiled resource path
* @ param context
* the generator context
* @ return the compiled CSS content */
public String compile ( JoinableResourceBundle bundle , String content , String path , GeneratorContext context ) { } } | JawrLessSource source = new JawrLessSource ( bundle , content , path , rsHandler ) ; try { CompilationResult result = compiler . compile ( source , lessConfig ) ; addLinkedResources ( path , context , source . getLinkedResources ( ) ) ; return result . getCss ( ) ; } catch ( Less4jException e ) { throw new BundlingProcessException ( "Unable to generate content for resource path : '" + path + "'" , e ) ; } |
public class ASMMethod { /** * Returns a { @ code Class } object that represents the formal return type of the method represented
* by this { @ code Method } object .
* @ return the return type for the method this object represents */
public Class < ? > getReturnType ( ) { } } | if ( returnType == null ) { returnType = ClassUtil . loadClass ( getReturnTypeAsString ( ) , null ) ; if ( returnType == null ) initAddionalParams ( ) ; } return returnType ; |
public class ORBConfigAdapter { /** * Translate client configuration into the
* property bundle necessary to configure the
* client ORB instance .
* @ param clientProps configuration properties
* @ param subsystemFactories configured subsystem factories
* @ return A property bundle that can be passed to ORB . init ( ) ;
* @ exception ConfigException if configuration cannot be interpreted */
private Properties translateToClientProps ( Map < String , Object > clientProps , Collection < SubsystemFactory > subsystemFactories ) throws ConfigException { } } | Properties result = createYokoORBProperties ( ) ; for ( SubsystemFactory sf : subsystemFactories ) { addInitializerPropertyForSubsystem ( result , sf , false ) ; sf . addClientORBInitProperties ( result , clientProps ) ; } return result ; |
public class ExtendedPropertyUrl { /** * Get Resource Url for DeleteExtendedProperty
* @ param key
* @ return String Resource Url */
public static MozuUrl deleteExtendedPropertyUrl ( String key ) { } } | UrlFormatter formatter = new UrlFormatter ( "/api/commerce/carts/current/extendedproperties/{key}" ) ; formatter . formatUrl ( "key" , key ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ; |
public class BaseGradleReleaseAction { /** * Get the root path where the build is located , the project may be checked out to
* a sub - directory from the root workspace location .
* @ param globalEnv EnvVars to take the workspace from , if workspace is not found
* then it is take from project . getSomeWorkspace ( )
* @ return The location of the root of the Gradle build .
* @ throws IOException
* @ throws InterruptedException */
public FilePath getModuleRoot ( Map < String , String > globalEnv ) throws IOException , InterruptedException { } } | FilePath someWorkspace = project . getSomeWorkspace ( ) ; if ( someWorkspace == null ) { throw new IllegalStateException ( "Couldn't find workspace" ) ; } Map < String , String > workspaceEnv = Maps . newHashMap ( ) ; workspaceEnv . put ( "WORKSPACE" , someWorkspace . getRemote ( ) ) ; for ( Builder builder : getBuilders ( ) ) { if ( builder instanceof Gradle ) { Gradle gradleBuilder = ( Gradle ) builder ; String rootBuildScriptDir = gradleBuilder . getRootBuildScriptDir ( ) ; if ( rootBuildScriptDir != null && rootBuildScriptDir . trim ( ) . length ( ) != 0 ) { String rootBuildScriptNormalized = Util . replaceMacro ( rootBuildScriptDir . trim ( ) , workspaceEnv ) ; rootBuildScriptNormalized = Util . replaceMacro ( rootBuildScriptNormalized , globalEnv ) ; return new FilePath ( someWorkspace , rootBuildScriptNormalized ) ; } else { return someWorkspace ; } } } throw new IllegalArgumentException ( "Couldn't find Gradle builder in the current builders list" ) ; |
public class FilterMenuLayout { /** * find the middle point of two intersect points in circle , only one point will be correct
* @ param center
* @ param a
* @ param b
* @ param area
* @ param radius
* @ return */
private static Point findMidnormalPoint ( Point center , Point a , Point b , Rect area , int radius ) { } } | if ( a . y == b . y ) { // top
if ( a . y < center . y ) { return new Point ( ( a . x + b . x ) / 2 , center . y + radius ) ; } // bottom
return new Point ( ( a . x + b . x ) / 2 , center . y - radius ) ; } if ( a . x == b . x ) { // left
if ( a . x < center . x ) { return new Point ( center . x + radius , ( a . y + b . y ) / 2 ) ; } // right
return new Point ( center . x - radius , ( a . y + b . y ) / 2 ) ; } // slope of line ab
double abSlope = ( a . y - b . y ) / ( a . x - b . x * 1.0 ) ; // slope of midnormal
double midnormalSlope = - 1.0 / abSlope ; double radian = Math . tan ( midnormalSlope ) ; int dy = ( int ) ( radius * Math . sin ( radian ) ) ; int dx = ( int ) ( radius * Math . cos ( radian ) ) ; Point point = new Point ( center . x + dx , center . y + dy ) ; if ( ! inArea ( point , area , 0 ) ) { point = new Point ( center . x - dx , center . y - dy ) ; } return point ; |
public class JavolutionTranscoder { /** * Get the object represented by the given serialized bytes .
* @ param in
* the bytes to deserialize
* @ return the resulting object */
@ Override public ConcurrentMap < String , Object > deserializeAttributes ( final byte [ ] in ) { } } | if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Reading serialized data:\n" + new String ( in ) ) ; } return doDeserialize ( in , "attributes" ) ; |
public class WhiteboxImpl { /** * Set the value of a field using reflection . This method will traverse the
* super class hierarchy until a field with name < tt > fieldName < / tt > is
* found .
* @ param object the object whose field to modify
* @ param fieldName the name of the field
* @ param value the new value of the field */
public static void setInternalState ( Object object , String fieldName , Object value ) { } } | Field foundField = findFieldInHierarchy ( object , fieldName ) ; setField ( object , value , foundField ) ; |
public class SQSConnection { /** * Creates a < code > QueueSession < / code >
* @ param transacted
* Only false is supported .
* @ param acknowledgeMode
* Legal values are < code > Session . AUTO _ ACKNOWLEDGE < / code > ,
* < code > Session . CLIENT _ ACKNOWLEDGE < / code > ,
* < code > Session . DUPS _ OK _ ACKNOWLEDGE < / code > , and
* < code > SQSSession . UNORDERED _ ACKNOWLEDGE < / code >
* @ return a new queue session .
* @ throws JMSException
* If the QueueConnection object fails to create a session due
* to some internal error or lack of support for the specific
* transaction and acknowledge mode . */
@ Override public QueueSession createQueueSession ( boolean transacted , int acknowledgeMode ) throws JMSException { } } | return ( QueueSession ) createSession ( transacted , acknowledgeMode ) ; |
public class URLDecoder { /** * Decodes URL octets except % 2f ( i . e . / character )
* @ param str string to encode
* @ return encoded string */
public static String decode ( String str ) { } } | if ( str != null ) { try { StringBuilder sb = new StringBuilder ( ) ; for ( int idx = str . toLowerCase ( ) . indexOf ( "%2f" ) ; idx >= 0 ; idx = str . toLowerCase ( ) . indexOf ( "%2f" ) ) { sb . append ( java . net . URLDecoder . decode ( str . substring ( 0 , idx ) , "UTF-8" ) ) . append ( str . substring ( idx , idx + 3 ) ) ; str = str . substring ( idx + 3 ) ; } sb . append ( java . net . URLDecoder . decode ( str , "UTF-8" ) ) ; str = sb . toString ( ) ; } catch ( Exception ex ) { } } return str ; |
public class LogisticsCenter { /** * method for arouter - auto - register plugin to register Interceptors
* @ param interceptorGroup IInterceptorGroup implementation class in the package : com . alibaba . android . arouter . core . routers
* @ author billy . qi < a href = " mailto : qiyilike @ 163 . com " > Contact me . < / a >
* @ since 2017-12-06 */
private static void registerInterceptor ( IInterceptorGroup interceptorGroup ) { } } | markRegisteredByPlugin ( ) ; if ( interceptorGroup != null ) { interceptorGroup . loadInto ( Warehouse . interceptorsIndex ) ; } |
public class UTF8Reader { /** * Reads into a character buffer using the correct encoding . */
public int read ( ) throws IOException { } } | if ( _peek >= 0 ) { int peek = _peek ; _peek = - 1 ; return peek ; } InputStream is = _is ; int ch1 = is . read ( ) ; if ( ch1 < 0x80 ) { return ch1 ; } else if ( ( ch1 & 0xe0 ) == 0xc0 ) { int ch2 = is . read ( ) ; if ( ch2 < 0 ) { return error ( "unexpected end of file in utf8 character" ) ; } else if ( ( ch2 & 0xc0 ) != 0x80 ) { return error ( "utf-8 character conversion error for '{0}' because second byte is invalid at " + String . format ( "0x%02x 0x%02x" , ch1 , ch2 ) ) ; } return ( ( ch1 & 0x1f ) << 6 ) + ( ch2 & 0x3f ) ; } else if ( ( ch1 & 0xf0 ) == 0xe0 ) { int ch2 = is . read ( ) ; int ch3 = is . read ( ) ; if ( ch2 < 0 ) return error ( "unexpected end of file in utf8 character" ) ; else if ( ( ch2 & 0xc0 ) != 0x80 ) { return error ( "illegal utf8 encoding at " + "\\x" + Integer . toHexString ( ch1 ) + "\\x" + Integer . toHexString ( ch2 ) + "\\x" + Integer . toHexString ( ch3 ) ) ; } if ( ch3 < 0 ) return error ( "unexpected end of file in utf8 character" ) ; else if ( ( ch3 & 0xc0 ) != 0x80 ) return error ( "illegal utf8 encoding at " + "\\x" + Integer . toHexString ( ch1 ) + "\\x" + Integer . toHexString ( ch2 ) + "\\x" + Integer . toHexString ( ch3 ) ) ; int ch = ( ( ch1 & 0x1f ) << 12 ) + ( ( ch2 & 0x3f ) << 6 ) + ( ch3 & 0x3f ) ; if ( ch == 0xfeff ) { // If byte - order - mark , read next character
// server / 1m00
return read ( ) ; } else return ch ; } else if ( ( ch1 & 0xf0 ) == 0xf0 ) { int ch2 = is . read ( ) ; int ch3 = is . read ( ) ; int ch4 = is . read ( ) ; if ( ch2 < 0 ) return error ( "unexpected end of file in utf8 character" ) ; else if ( ( ch2 & 0xc0 ) != 0x80 ) return error ( "illegal utf8 encoding at 0x" + Integer . toHexString ( ch2 ) ) ; if ( ch3 < 0 ) return error ( "unexpected end of file in utf8 character" ) ; else if ( ( ch3 & 0xc0 ) != 0x80 ) return error ( "illegal utf8 encoding at 0x" + Integer . toHexString ( ch3 ) ) ; if ( ch4 < 0 ) return error ( "unexpected end of file in utf8 character" ) ; else if ( ( ch4 & 0xc0 ) != 0x80 ) return error ( "illegal utf8 encoding at 0x" + Integer . toHexString ( ch4 ) ) ; int ch = ( ( ( ch1 & 0xf ) << 18 ) + ( ( ch2 & 0x3f ) << 12 ) + ( ( ch3 & 0x3f ) << 6 ) + ( ( ch4 & 0x3f ) ) ) ; _peek = 0xdc00 + ( ch & 0x3ff ) ; return 0xd800 + ( ( ch - 0x10000 ) / 0x400 ) ; } else { return error ( "illegal utf8 encoding at (0x" + Integer . toHexString ( ch1 ) + ")" ) ; } |
public class World { /** * Sets the { @ code PrivateRootActor } instances as a { @ code Stoppable } . ( INTERNAL ONLY )
* @ param privateRoot the { @ code Stoppable } protocol backed by the { @ code PrivateRootActor } */
synchronized void setPrivateRoot ( final Stoppable privateRoot ) { } } | if ( privateRoot != null && this . privateRoot != null ) { privateRoot . stop ( ) ; throw new IllegalStateException ( "Private root already exists." ) ; } this . privateRoot = privateRoot ; |
public class StatefulBeanReaper { /** * d103404.1 */
public synchronized Iterator < BeanId > getPassivatedStatefulBeanIds ( J2EEName homeName ) { } } | ArrayList < BeanId > beanList = new ArrayList < BeanId > ( ) ; for ( Enumeration < TimeoutElement > e = ivStatefulBeanList . elements ( ) ; e . hasMoreElements ( ) ; ) { TimeoutElement elt = e . nextElement ( ) ; if ( homeName . equals ( elt . beanId . getJ2EEName ( ) ) && ( elt . passivated ) ) beanList . add ( elt . beanId ) ; } return ( beanList . iterator ( ) ) ; |
public class ShakeDetector { /** * Starts listening for shakes on devices with appropriate hardware .
* @ return true if the device supports shake detection . */
public boolean start ( SensorManager sensorManager ) { } } | // Already started ?
if ( accelerometer != null ) { return true ; } accelerometer = sensorManager . getDefaultSensor ( Sensor . TYPE_ACCELEROMETER ) ; // If this phone has an accelerometer , listen to it .
if ( accelerometer != null ) { this . sensorManager = sensorManager ; sensorManager . registerListener ( this , accelerometer , SensorManager . SENSOR_DELAY_FASTEST ) ; } return accelerometer != null ; |
public class MergeSmallRegions { /** * Merges together smaller regions . Segmented image , region member count , and region color are all updated .
* @ param image Input image . Used to compute color of each region
* @ param pixelToRegion ( input / output ) Segmented image with the ID of each region . Modified .
* @ param regionMemberCount ( input / output ) Number of members in each region Modified .
* @ param regionColor ( Output ) Storage for colors of each region . Will contains the color of each region on output . */
public void process ( T image , GrayS32 pixelToRegion , GrowQueue_I32 regionMemberCount , FastQueue < float [ ] > regionColor ) { } } | stopRequested = false ; // iterate until no more regions need to be merged together
while ( ! stopRequested ) { // Update the color of each region
regionColor . resize ( regionMemberCount . size ) ; computeColor . process ( image , pixelToRegion , regionMemberCount , regionColor ) ; initializeMerge ( regionMemberCount . size ) ; // Create a list of regions which are to be pruned
if ( ! setupPruneList ( regionMemberCount ) ) break ; // Scan the image and create a list of regions which the pruned regions connect to
findAdjacentRegions ( pixelToRegion ) ; // Select the closest match to merge into
for ( int i = 0 ; i < pruneGraph . size ; i ++ ) { selectMerge ( i , regionColor ) ; } // Do the usual merge stuff
performMerge ( pixelToRegion , regionMemberCount ) ; } |
public class FormLayout { /** * Specifies whether the given component shall be taken into account for sizing and positioning .
* This setting overrides the container - wide default . See { @ link # setHonorsVisibility ( boolean ) }
* for details .
* @ param component the component that shall get an individual setting
* @ param b { @ code Boolean . TRUE } to override the container default and honor the visibility for
* the given component , { @ code Boolean . FALSE } to override the container default and ignore the
* visibility for the given component , { @ code null } to use the container default value as
* specified by { @ link # getHonorsVisibility ( ) } .
* @ since 1.2 */
public void setHonorsVisibility ( Component component , Boolean b ) { } } | CellConstraints constraints = getConstraints0 ( component ) ; if ( Objects . equals ( b , constraints . honorsVisibility ) ) { return ; } constraints . honorsVisibility = b ; invalidateAndRepaint ( component . getParent ( ) ) ; |
public class CxxAstScanner { /** * include the intersection / overlap only once . */
public static String intersectingConcatenate ( String a , String b ) { } } | // find length of maximum possible match
int lenOfA = a . length ( ) ; int lenOfB = b . length ( ) ; int minIntersectionLen = min ( lenOfB , lenOfA ) ; // search down from maximum match size , to get longest possible intersection
for ( int size = minIntersectionLen ; size > 0 ; size -- ) { if ( a . regionMatches ( lenOfA - size , b , 0 , size ) ) { return a + b . substring ( size , lenOfB ) ; } } // Didn ' t find any intersection . Fall back to straight concatenation .
return a + b ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.