signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class TraceEventHelper { /** * Get status
* @ param data The data
* @ return The status */
public static TraceEventStatus mergeStatus ( Collection < TraceEventStatus > data ) { } } | TraceEventStatus result = TraceEventStatus . GREEN ; for ( TraceEventStatus tes : data ) { if ( tes == TraceEventStatus . YELLOW ) { result = TraceEventStatus . YELLOW ; } else if ( tes == TraceEventStatus . RED ) { return TraceEventStatus . RED ; } } return result ; |
public class AWSCognitoIdentityProviderClient { /** * Starts the user import .
* @ param startUserImportJobRequest
* Represents the request to start the user import job .
* @ return Result of the StartUserImportJob operation returned by the service .
* @ throws ResourceNotFoundException
* This exception is thrown when the Amazon Cognito service cannot find the requested resource .
* @ throws InvalidParameterException
* This exception is thrown when the Amazon Cognito service encounters an invalid parameter .
* @ throws TooManyRequestsException
* This exception is thrown when the user has made too many requests for a given operation .
* @ throws InternalErrorException
* This exception is thrown when Amazon Cognito encounters an internal error .
* @ throws PreconditionNotMetException
* This exception is thrown when a precondition is not met .
* @ throws NotAuthorizedException
* This exception is thrown when a user is not authorized .
* @ sample AWSCognitoIdentityProvider . StartUserImportJob
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cognito - idp - 2016-04-18 / StartUserImportJob " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public StartUserImportJobResult startUserImportJob ( StartUserImportJobRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeStartUserImportJob ( request ) ; |
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public PGPRGPgFlgs createPGPRGPgFlgsFromString ( EDataType eDataType , String initialValue ) { } } | PGPRGPgFlgs result = PGPRGPgFlgs . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ; |
public class DiagnosticGroup { /** * Returns whether all of the types in the given group are in this group . */
boolean isSubGroup ( DiagnosticGroup group ) { } } | for ( DiagnosticType type : group . types ) { if ( ! matches ( type ) ) { return false ; } } return true ; |
public class HttpRequest { /** * Parses an http message from an input stream . The first line of input is
* save in the protected < tt > command < / tt > variable . The subsequent lines are
* put into a linked hash as field / value pairs . Input is parsed until a
* blank line is reached , after which any data should appear .
* @ param in An InputStream containing a valid HTTP message */
private void parse ( InputStream in ) throws IOException { } } | Pattern p = Pattern . compile ( ":" ) ; BufferedReader bin = new BufferedReader ( new InputStreamReader ( in ) , 1 ) ; String currLine = bin . readLine ( ) ; // command = currLine ;
// parse the command to get the request method
parseCommand ( currLine ) ; // parse headers
currLine = bin . readLine ( ) ; while ( currLine != null ) { if ( Utils . isEmpty ( currLine ) ) { break ; // we have reached the end of the headers
} // split the headers into name - value pairs
String [ ] split = currLine . split ( ": " ) ; if ( split . length < 2 ) { split = new String [ ] { split [ 0 ] , "" } ; } String headerName = Utils . trim ( split [ 0 ] ) ; String headerValue = Utils . trim ( split [ 1 ] ) ; headers . put ( headerName , headerValue ) ; currLine = bin . readLine ( ) ; } // parse the POST body , if there is one
if ( currLine != null ) { StringBuilder bodyBuilder = new StringBuilder ( ) ; while ( ( currLine = bin . readLine ( ) ) != null ) { bodyBuilder . append ( currLine ) . append ( "\n" ) ; } body = bodyBuilder . toString ( ) ; } |
public class ProjectsInner { /** * Get projects in a service .
* The project resource is a nested resource representing a stored migration project . This method returns a list of projects owned by a service resource .
* @ param groupName Name of the resource group
* @ param serviceName Name of the service
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; ProjectInner & gt ; object */
public Observable < Page < ProjectInner > > listAsync ( final String groupName , final String serviceName ) { } } | return listWithServiceResponseAsync ( groupName , serviceName ) . map ( new Func1 < ServiceResponse < Page < ProjectInner > > , Page < ProjectInner > > ( ) { @ Override public Page < ProjectInner > call ( ServiceResponse < Page < ProjectInner > > response ) { return response . body ( ) ; } } ) ; |
public class ViewDragHelper { /** * Settle the captured view at the given ( left , top ) position .
* @ param finalLeft Target left position for the captured view
* @ param finalTop Target top position for the captured view
* @ param xvel Horizontal velocity
* @ param yvel Vertical velocity
* @ return true if animation should continue through { @ link # continueSettling ( boolean ) } calls */
private boolean forceSettleCapturedViewAt ( int finalLeft , int finalTop , int xvel , int yvel ) { } } | final int startLeft = mCapturedView . getLeft ( ) ; final int startTop = mCapturedView . getTop ( ) ; final int dx = finalLeft - startLeft ; final int dy = finalTop - startTop ; if ( dx == 0 && dy == 0 ) { // Nothing to do . Send callbacks , be done .
mScroller . abortAnimation ( ) ; setDragState ( STATE_IDLE ) ; return false ; } final int duration = computeSettleDuration ( mCapturedView , dx , dy , xvel , yvel ) ; mScroller . startScroll ( startLeft , startTop , dx , dy , duration ) ; setDragState ( STATE_SETTLING ) ; return true ; |
public class SwapFile { /** * Not applicable . Call get ( File , String ) method instead .
* @ throws IOException
* I / O error */
public static SwapFile createTempFile ( String prefix , String suffix , File directory ) throws IOException { } } | throw new IOException ( "Not applicable. Call get(File, String) method instead" ) ; |
public class ClientAsyncResult { /** * readObject and writeObject implementations */
private void writeObject ( java . io . ObjectOutputStream out ) throws IOException { } } | if ( svLogger . isLoggable ( Level . FINER ) ) svLogger . logp ( Level . FINER , CLASS_NAME , "writeObject" , toString ( ) ) ; // if ivServer object is null throw and exception because there is no server side
// Future object to communicate with .
if ( ivServer == null ) { throw new EJBException ( "No Server side Future object exists." ) ; } out . defaultWriteObject ( ) ; // write out the header information
out . write ( Constants . CLIENT_ASYNC_RESULT_EYE_CATCHER ) ; out . writeShort ( Constants . PLATFORM_DISTRIBUTED ) ; out . writeShort ( Constants . CLIENT_ASYNC_RESULT_V1 ) ; // write out the data
out . writeObject ( ivServer ) ; out . writeBoolean ( ivBusinessRmiRemote ) ; |
public class RedisClusterStorage { /** * Store a trigger in redis
* @ param trigger the trigger to be stored
* @ param replaceExisting true if an existing trigger with the same identity should be replaced
* @ param jedis a thread - safe Redis connection
* @ throws JobPersistenceException
* @ throws ObjectAlreadyExistsException */
@ Override public void storeTrigger ( OperableTrigger trigger , boolean replaceExisting , JedisCluster jedis ) throws JobPersistenceException { } } | final String triggerHashKey = redisSchema . triggerHashKey ( trigger . getKey ( ) ) ; final String triggerGroupSetKey = redisSchema . triggerGroupSetKey ( trigger . getKey ( ) ) ; final String jobTriggerSetKey = redisSchema . jobTriggersSetKey ( trigger . getJobKey ( ) ) ; if ( ! ( trigger instanceof SimpleTrigger ) && ! ( trigger instanceof CronTrigger ) ) { throw new UnsupportedOperationException ( "Only SimpleTrigger and CronTrigger are supported." ) ; } final boolean exists = jedis . exists ( triggerHashKey ) ; if ( exists && ! replaceExisting ) { throw new ObjectAlreadyExistsException ( trigger ) ; } Map < String , String > triggerMap = mapper . convertValue ( trigger , new TypeReference < HashMap < String , String > > ( ) { } ) ; triggerMap . put ( TRIGGER_CLASS , trigger . getClass ( ) . getName ( ) ) ; jedis . hmset ( triggerHashKey , triggerMap ) ; jedis . sadd ( redisSchema . triggersSet ( ) , triggerHashKey ) ; jedis . sadd ( redisSchema . triggerGroupsSet ( ) , triggerGroupSetKey ) ; jedis . sadd ( triggerGroupSetKey , triggerHashKey ) ; jedis . sadd ( jobTriggerSetKey , triggerHashKey ) ; if ( trigger . getCalendarName ( ) != null && ! trigger . getCalendarName ( ) . isEmpty ( ) ) { final String calendarTriggersSetKey = redisSchema . calendarTriggersSetKey ( trigger . getCalendarName ( ) ) ; jedis . sadd ( calendarTriggersSetKey , triggerHashKey ) ; } if ( trigger . getJobDataMap ( ) != null && ! trigger . getJobDataMap ( ) . isEmpty ( ) ) { final String triggerDataMapHashKey = redisSchema . triggerDataMapHashKey ( trigger . getKey ( ) ) ; jedis . hmset ( triggerDataMapHashKey , getStringDataMap ( trigger . getJobDataMap ( ) ) ) ; } if ( exists ) { // We ' re overwriting a previously stored instance of this trigger , so clear any existing trigger state .
unsetTriggerState ( triggerHashKey , jedis ) ; } Boolean triggerPausedResponse = jedis . sismember ( redisSchema . pausedTriggerGroupsSet ( ) , triggerGroupSetKey ) ; Boolean jobPausedResponse = jedis . sismember ( redisSchema . pausedJobGroupsSet ( ) , redisSchema . jobGroupSetKey ( trigger . getJobKey ( ) ) ) ; if ( triggerPausedResponse || jobPausedResponse ) { final long nextFireTime = trigger . getNextFireTime ( ) != null ? trigger . getNextFireTime ( ) . getTime ( ) : - 1 ; final String jobHashKey = redisSchema . jobHashKey ( trigger . getJobKey ( ) ) ; if ( isBlockedJob ( jobHashKey , jedis ) ) { setTriggerState ( RedisTriggerState . PAUSED_BLOCKED , ( double ) nextFireTime , triggerHashKey , jedis ) ; } else { setTriggerState ( RedisTriggerState . PAUSED , ( double ) nextFireTime , triggerHashKey , jedis ) ; } } else if ( trigger . getNextFireTime ( ) != null ) { setTriggerState ( RedisTriggerState . WAITING , ( double ) trigger . getNextFireTime ( ) . getTime ( ) , triggerHashKey , jedis ) ; } |
public class DateParser { /** * / * TODO : things that Graphite can parse in addition :
* & from = 04:00_20110501 & until = 16:00_20110501
* ( shows 4AM - 4PM on May 1st , 2011)
* & from = 20091201 & until = 20091231
* ( shows December 2009)
* & from = noon + yesterday
* ( shows data since 12:00pm on the previous day )
* & from = 6pm + today
* ( shows data since 6:00pm on the same day )
* & from = january + 1
* ( shows data since the beginning of the current year )
* & from = monday
* ( show data since the previous monday ) */
public static Date parseURLDate ( String dateStr , Date defaultDate ) { } } | if ( StringUtils . isEmpty ( dateStr ) ) { return defaultDate ; } /* There are multiple formats for these functions :
& from = - RELATIVE _ TIME
& from = ABSOLUTE _ TIME
& from and & until can mix absolute and relative time if desired . */
if ( dateStr . startsWith ( "-" ) ) { /* RELATIVE _ TIME is a length of time since the current time . It is always preceded my a minus sign ( - ) and follow by a unit of time . Valid units of time :
Abbreviation Unit
s Seconds
min Minutes
h Hours
d Days
w Weeks
mon 30 Days ( month )
y 365 Days ( year ) */
// cut off leading minus - sign and parse value and unit
String parse = dateStr . substring ( 1 ) . toLowerCase ( ) ; Matcher matcher = RELATIVE_TIME_PATTERN . matcher ( parse ) ; if ( ! matcher . matches ( ) ) { throw new IllegalArgumentException ( "Could not parse relative time value " + parse + ", expected to match " + RELATIVE_TIME_PATTERN ) ; } String value = matcher . group ( 1 ) ; String unit = matcher . group ( 2 ) ; // check adjustment factor for this unit
Integer factor = UNIT_CONVERSION_TABLE . get ( unit ) ; if ( factor == null ) { throw new IllegalArgumentException ( "Unknown unit: " + unit + " found while parsing relative time: " + parse ) ; } // finally use the factor to calculate the resulting previous point in time
return DateUtils . addSeconds ( new Date ( ) , ( - 1 ) * Integer . parseInt ( value ) * factor ) ; } /* ABSOLUTE _ TIME is in the format HH : MM _ YYMMDD , YYYYMMDD , MM / DD / YY , or any other at ( 1 ) - compatible time format .
Abbreviation Meaning
HH Hours , in 24h clock format . Times before 12PM must include leading zeroes .
MM Minutes
YYYY 4 Digit Year .
MM Numeric month representation with leading zero
DD Day of month with leading zero */
for ( FastDateFormat format : DATE_PARSERS ) { try { return format . parse ( dateStr ) ; } catch ( ParseException e ) { // expected here if the format does not match
} } StringBuilder string = new StringBuilder ( ) ; for ( FastDateFormat format : DATE_PARSERS ) { string . append ( format . getPattern ( ) ) . append ( ", " ) ; } throw new IllegalArgumentException ( "Could not parse absolute date " + dateStr + " via any of the available parsers: " + string . toString ( ) ) ; |
public class CmsListTab { /** * Sets the clear list button enabled . < p >
* @ param enabled < code > true < / code > to enable the button */
public void setClearButtonEnabled ( boolean enabled ) { } } | if ( enabled ) { m_clearButton . enable ( ) ; } else { m_clearButton . disable ( Messages . get ( ) . key ( Messages . GUI_DISABLE_CLEAR_LIST_0 ) ) ; } |
public class DefaultBeanContext { /** * The start method will read all bean definition classes found on the classpath and initialize any pre - required
* state . */
@ Override public synchronized BeanContext start ( ) { } } | if ( ! isRunning ( ) ) { if ( initializing . compareAndSet ( false , true ) ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Starting BeanContext" ) ; } readAllBeanConfigurations ( ) ; readAllBeanDefinitionClasses ( ) ; if ( LOG . isDebugEnabled ( ) ) { String activeConfigurations = beanConfigurations . values ( ) . stream ( ) . filter ( config -> config . isEnabled ( this ) ) . map ( BeanConfiguration :: getName ) . collect ( Collectors . joining ( "," ) ) ; if ( StringUtils . isNotEmpty ( activeConfigurations ) ) { LOG . debug ( "Loaded active configurations: {}" , activeConfigurations ) ; } } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "BeanContext Started." ) ; } publishEvent ( new StartupEvent ( this ) ) ; } // start thread for parallel beans
processParallelBeans ( ) ; running . set ( true ) ; initializing . set ( false ) ; } return this ; |
public class JarafeMETrainer { /** * @ param label - A string value that is the " gold standard " label for this training instance
* @ param features - A list of strings that correspond to the names of the features present for this training instance */
public void addTrainingInstance ( String label , List < String > features ) { } } | maxent . addInstance ( label , features ) ; |
public class FileRecyclerViewAdapter { /** * From the google examples , decodes a bitmap as a byte array and then resizes it for the required
* width and hieght .
* @ param picture the picture byte array
* @ param reqWidth the required width
* @ param reqHeight the required height
* @ return a Bitmap */
public static Bitmap decodeSampledBitmapFromByteArray ( byte [ ] picture , int reqWidth , int reqHeight ) { } } | // First decode with inJustDecodeBounds = true to check dimensions
BitmapFactory . Options options = new BitmapFactory . Options ( ) ; options . inJustDecodeBounds = true ; BitmapFactory . decodeByteArray ( picture , 0 , picture . length , options ) ; // Calculate inSampleSize
options . inSampleSize = calculateInSampleSize ( options , reqWidth , reqHeight ) ; // Decode bitmap with inSampleSize set
options . inJustDecodeBounds = false ; return BitmapFactory . decodeByteArray ( picture , 0 , picture . length , options ) ; |
public class SelfCalibrationGuessAndCheckFocus { /** * Given the focal lengths for the first two views compute homography H
* @ param f1 view 1 focal length
* @ param f2 view 2 focal length
* @ param P2 projective camera matrix for view 2
* @ param H ( Output ) homography
* @ return true if successful */
boolean computeRectifyH ( double f1 , double f2 , DMatrixRMaj P2 , DMatrixRMaj H ) { } } | estimatePlaneInf . setCamera1 ( f1 , f1 , 0 , 0 , 0 ) ; estimatePlaneInf . setCamera2 ( f2 , f2 , 0 , 0 , 0 ) ; if ( ! estimatePlaneInf . estimatePlaneAtInfinity ( P2 , planeInf ) ) return false ; // TODO add a cost for distance from nominal and scale other cost by focal length fx for each view
// RefineDualQuadraticConstraint refine = new RefineDualQuadraticConstraint ( ) ;
// refine . setZeroSkew ( true ) ;
// refine . setAspectRatio ( true ) ;
// refine . setZeroPrinciplePoint ( true ) ;
// refine . setKnownIntrinsic1 ( true ) ;
// refine . setFixedCamera ( false ) ;
// CameraPinhole intrinsic = new CameraPinhole ( f1 , f1,0,0,0,0,0 ) ;
// if ( ! refine . refine ( normalizedP . toList ( ) , intrinsic , planeInf ) )
// return false ;
K1 . zero ( ) ; K1 . set ( 0 , 0 , f1 ) ; K1 . set ( 1 , 1 , f1 ) ; K1 . set ( 2 , 2 , 1 ) ; MultiViewOps . createProjectiveToMetric ( K1 , planeInf . x , planeInf . y , planeInf . z , 1 , H ) ; return true ; |
public class ComponentVertex { /** * ( non - Javadoc )
* @ see org . jgrapes . core . core . Manager # fire
* ( org . jgrapes . core . Event , org . jgrapes . core . Channel ) */
@ Override public < T > Event < T > fire ( Event < T > event , Channel ... channels ) { } } | if ( channels . length == 0 ) { channels = event . channels ( ) ; if ( channels . length == 0 ) { channels = new Channel [ ] { channel ( ) } ; } } event . setChannels ( channels ) ; tree ( ) . fire ( event , channels ) ; return event ; |
public class Assert { /** * Check that the parameter array has exactly the right number of elements .
* @ param parameterName
* The name of the user - supplied parameter that we are validating
* so that the user can easily find the error in their code .
* @ param actualLength
* The actual array length
* @ param expectedLength
* The expected array length */
public static void checkArrayLength ( String parameterName , int actualLength , int expectedLength ) { } } | if ( actualLength != expectedLength ) { throw Exceptions . IllegalArgument ( "Array %s should have %d elements, not %d" , parameterName , expectedLength , actualLength ) ; } |
public class SyntheticStorableReferenceBuilder { /** * Sets all the properties of the given index entry , using the applicable
* properties of the given master .
* @ param indexEntry index entry whose properties will be set
* @ param master source of property values
* @ deprecated call getReferenceAccess */
@ Deprecated public void copyFromMaster ( Storable indexEntry , S master ) throws FetchException { } } | getReferenceAccess ( ) . copyFromMaster ( indexEntry , master ) ; |
public class Session { /** * Opens a session based on an existing Facebook access token , and also makes this session
* the currently active session . This method should be used
* only in instances where an application has previously obtained an access token and wishes
* to import it into the Session / TokenCachingStrategy - based session - management system . A primary
* example would be an application which previously did not use the Facebook SDK for Android
* and implemented its own session - management scheme , but wishes to implement an upgrade path
* for existing users so they do not need to log in again when upgrading to a version of
* the app that uses the SDK . In general , this method will be called only once , when the app
* detects that it has been upgraded - - after that , the usual Session lifecycle methods
* should be used to manage the session and its associated token .
* No validation is done that the token , token source , or permissions are actually valid .
* It is the caller ' s responsibility to ensure that these accurately reflect the state of
* the token that has been passed in , or calls to the Facebook API may fail .
* @ param context the Context to use for creation the session
* @ param accessToken the access token obtained from Facebook
* @ param callback a callback that will be called when the session status changes ; may be null
* @ return The new Session or null if one could not be created */
public static Session openActiveSessionWithAccessToken ( Context context , AccessToken accessToken , StatusCallback callback ) { } } | Session session = new Session ( context , null , null , false ) ; setActiveSession ( session ) ; session . open ( accessToken , callback ) ; return session ; |
public class DescribeTrailsRequest { /** * Specifies a list of trail names , trail ARNs , or both , of the trails to describe . The format of a trail ARN is :
* < code > arn : aws : cloudtrail : us - east - 2:123456789012 : trail / MyTrail < / code >
* If an empty list is specified , information for the trail in the current region is returned .
* < ul >
* < li >
* If an empty list is specified and < code > IncludeShadowTrails < / code > is false , then information for all trails in
* the current region is returned .
* < / li >
* < li >
* If an empty list is specified and IncludeShadowTrails is null or true , then information for all trails in the
* current region and any associated shadow trails in other regions is returned .
* < / li >
* < / ul >
* < note >
* If one or more trail names are specified , information is returned only if the names match the names of trails
* belonging only to the current region . To return information about a trail in another region , you must specify its
* trail ARN .
* < / note >
* @ param trailNameList
* Specifies a list of trail names , trail ARNs , or both , of the trails to describe . The format of a trail ARN
* is : < / p >
* < code > arn : aws : cloudtrail : us - east - 2:123456789012 : trail / MyTrail < / code >
* If an empty list is specified , information for the trail in the current region is returned .
* < ul >
* < li >
* If an empty list is specified and < code > IncludeShadowTrails < / code > is false , then information for all
* trails in the current region is returned .
* < / li >
* < li >
* If an empty list is specified and IncludeShadowTrails is null or true , then information for all trails in
* the current region and any associated shadow trails in other regions is returned .
* < / li >
* < / ul >
* < note >
* If one or more trail names are specified , information is returned only if the names match the names of
* trails belonging only to the current region . To return information about a trail in another region , you
* must specify its trail ARN . */
public void setTrailNameList ( java . util . Collection < String > trailNameList ) { } } | if ( trailNameList == null ) { this . trailNameList = null ; return ; } this . trailNameList = new com . amazonaws . internal . SdkInternalList < String > ( trailNameList ) ; |
public class XmlDataProviderImpl { /** * Generates a list of the declared type after parsing the XML file .
* @ return A { @ link List } of object of declared type { @ link XmlFileSystemResource # getCls ( ) } . */
private List < ? > loadDataFromXmlFile ( ) { } } | logger . entering ( ) ; Preconditions . checkArgument ( resource . getCls ( ) != null , "Please provide a valid type." ) ; List < ? > returned ; try { JAXBContext context = JAXBContext . newInstance ( Wrapper . class , resource . getCls ( ) ) ; Unmarshaller unmarshaller = context . createUnmarshaller ( ) ; StreamSource xmlStreamSource = new StreamSource ( resource . getInputStream ( ) ) ; Wrapper < ? > wrapper = unmarshaller . unmarshal ( xmlStreamSource , Wrapper . class ) . getValue ( ) ; returned = wrapper . getList ( ) ; } catch ( JAXBException excp ) { logger . exiting ( excp . getMessage ( ) ) ; throw new DataProviderException ( "Error unmarshalling XML file." , excp ) ; } logger . exiting ( returned ) ; return returned ; |
public class ObjectPropertiesController { /** * Retrieve a single value property .
* @ param method method definition
* @ param object target object
* @ param map parameter values */
private void getSingleValue ( Method method , Object object , Map < String , String > map ) { } } | Object value ; try { value = filterValue ( method . invoke ( object ) ) ; } catch ( Exception ex ) { value = ex . toString ( ) ; } if ( value != null ) { map . put ( getPropertyName ( method ) , String . valueOf ( value ) ) ; } |
public class Axis { /** * Generates Axis with values and labels from given lists , both lists must have the same size . */
public static Axis generateAxisFromCollection ( List < Float > axisValues , List < String > axisValuesLabels ) { } } | if ( axisValues . size ( ) != axisValuesLabels . size ( ) ) { throw new IllegalArgumentException ( "Values and labels lists must have the same size!" ) ; } List < AxisValue > values = new ArrayList < AxisValue > ( ) ; int index = 0 ; for ( float value : axisValues ) { AxisValue axisValue = new AxisValue ( value ) . setLabel ( axisValuesLabels . get ( index ) ) ; values . add ( axisValue ) ; ++ index ; } Axis axis = new Axis ( values ) ; return axis ; |
public class IDValueSelect { /** * { @ inheritDoc } */
@ Override public int append2SQLSelect ( final Type _type , final SQLSelect _select , final int _tableIndex , final int _colIndex ) { } } | int ret = 0 ; if ( getParent ( ) == null || ! "type" . equals ( getParent ( ) . getValueType ( ) ) ) { _select . column ( _tableIndex , "ID" ) ; getColIndexs ( ) . add ( _colIndex ) ; ret ++ ; this . attribute = _type . getAttribute ( "ID" ) ; } return ret ; |
public class MobileCommand { /** * This method forms a { @ link java . util . Map } of parameters for the
* device unlocking .
* @ return a key - value pair . The key is the command name . The value is a
* { @ link java . util . Map } command arguments . */
public static Map . Entry < String , Map < String , ? > > unlockDeviceCommand ( ) { } } | return new AbstractMap . SimpleEntry < > ( UNLOCK , ImmutableMap . of ( ) ) ; |
public class LoggingConfigurator { /** * Configure logging with default behaviour and log to stderr . If the SPOTIFY _ SYSLOG _ HOST or
* SPOTIFY _ SYSLOG _ PORT environment variable is defined , the syslog appender will be used ,
* otherwise console appender will be .
* @ param ident The logging identity .
* @ param level logging level to use .
* @ param replaceNewLines configures new lines replacement in the messages */
public static void configureDefaults ( final String ident , final Level level , final ReplaceNewLines replaceNewLines ) { } } | // Call configureSyslogDefaults if the SPOTIFY _ SYSLOG _ HOST or SPOTIFY _ SYSLOG _ PORT env var is
// set . If this causes a problem , we could introduce a configureConsoleDefaults method which
// users could call instead to avoid this behavior .
final String syslogHost = getSyslogHost ( ) ; final int syslogPort = getSyslogPort ( ) ; if ( syslogHost != null || syslogPort != - 1 ) { configureSyslogDefaults ( ident , level , syslogHost , syslogPort , replaceNewLines ) ; return ; } final Logger rootLogger = ( Logger ) LoggerFactory . getLogger ( Logger . ROOT_LOGGER_NAME ) ; // Setup context
final LoggerContext context = setupLoggerContext ( rootLogger , ident ) ; // Setup stderr output
rootLogger . addAppender ( getStdErrAppender ( context , replaceNewLines ) ) ; // Setup logging level
rootLogger . setLevel ( level . logbackLevel ) ; // Log uncaught exceptions
UncaughtExceptionLogger . setDefaultUncaughtExceptionHandler ( ) ; |
public class PickerSpinner { /** * Removes the specified item from the adapter and takes care of handling selection changes .
* Always call this method instead of getAdapter ( ) . remove ( ) .
* Note that if you remove the selected item here , it will just reselect the next one instead of
* creating a temporary item containing the current selection .
* @ param index The index of the item to be removed . */
public void removeAdapterItemAt ( int index ) { } } | PickerSpinnerAdapter adapter = ( PickerSpinnerAdapter ) getAdapter ( ) ; int count = adapter . getCount ( ) ; int selection = getSelectedItemPosition ( ) ; // check which item will be removed :
if ( index == count ) // temporary selection
selectTemporary ( null ) ; else if ( index == count - 1 && adapter . hasFooter ( ) ) { // footer
if ( selection == count ) setSelectionQuietly ( selection - 1 ) ; adapter . setFooter ( null ) ; } else { // a normal item
// keep the right selection in either of these cases :
if ( index == selection ) { // we delete the selected item and
if ( index == getLastItemPosition ( ) ) // it is the last real item
setSelection ( selection - 1 ) ; else { // we need to reselect the current item
// ( this is not guaranteed to fire a selection callback when multiple operations
// modify the dataset , so it is a lot better to first select the item you want
// to have selected , best by overriding this method in your subclass ) .
setSelectionQuietly ( index == 0 && count > 1 ? 1 : 0 ) ; setSelection ( selection ) ; } } else if ( index < selection && selection != count ) // we remove an item above it
setSelectionQuietly ( selection - 1 ) ; adapter . remove ( adapter . getItem ( index ) ) ; if ( selection == count ) { // we have a temporary item selected
reselectTemporaryItem = true ; setSelectionQuietly ( selection - 1 ) ; } } |
public class GingerbreadPurgeableDecoder { /** * Decodes a byteArray containing jpeg encoded bytes into a purgeable bitmap
* < p > Adds a JFIF End - Of - Image marker if needed before decoding .
* @ param bytesRef the byte buffer that contains the encoded bytes
* @ param length the length of bytes for decox
* @ param options the options passed to the BitmapFactory
* @ return the decoded bitmap */
@ Override protected Bitmap decodeJPEGByteArrayAsPurgeable ( CloseableReference < PooledByteBuffer > bytesRef , int length , BitmapFactory . Options options ) { } } | byte [ ] suffix = endsWithEOI ( bytesRef , length ) ? null : EOI ; return decodeFileDescriptorAsPurgeable ( bytesRef , length , suffix , options ) ; |
public class WARCRecordToSearchResultAdapter { /** * This just calls adaptInner , returning null if an Exception is thrown : */
public CaptureSearchResult adapt ( WARCRecord rec ) { } } | try { return adaptInner ( rec ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; return null ; } catch ( OutOfMemoryError e ) { e . printStackTrace ( ) ; return null ; } |
public class A_CmsStaticExportHandler { /** * Gets the exported container pages that should be purged when the content with the given id is published . < p >
* @ param cms the current CMS context
* @ param targetId the structure id of the published content
* @ return the list of files to purge */
private List < File > getContainerPagesToPurge ( CmsObject cms , CmsUUID targetId ) { } } | try { List < File > purgePages = new ArrayList < File > ( ) ; List < CmsRelation > relations = cms . readRelations ( CmsRelationFilter . relationsToStructureId ( targetId ) ) ; for ( CmsRelation relation : relations ) { CmsResource source = null ; try { source = relation . getSource ( cms , CmsResourceFilter . ALL ) ; } catch ( CmsPermissionViolationException e ) { // export user can ' t read the file
continue ; } if ( CmsResourceTypeXmlContainerPage . isContainerPage ( source ) ) { // purge pages directly containing the content
String vfsName = source . getRootPath ( ) ; String rfsName = OpenCms . getStaticExportManager ( ) . getRfsName ( cms , vfsName ) ; String exportPath = CmsFileUtil . normalizePath ( OpenCms . getStaticExportManager ( ) . getExportPath ( vfsName ) ) ; String rfsExportFileName = exportPath + rfsName . substring ( OpenCms . getStaticExportManager ( ) . getRfsPrefix ( vfsName ) . length ( ) ) ; File file = new File ( rfsExportFileName ) ; purgePages . add ( file ) ; } else if ( targetId . equals ( source . getStructureId ( ) ) && OpenCms . getResourceManager ( ) . getResourceType ( source . getTypeId ( ) ) . getTypeName ( ) . equals ( CmsResourceTypeXmlContainerPage . GROUP_CONTAINER_TYPE_NAME ) ) { LOG . warn ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_WARN_ELEMENT_GROUP_REFERENCES_SELF_1 , source . getRootPath ( ) ) ) ; } else if ( OpenCms . getResourceManager ( ) . getResourceType ( source . getTypeId ( ) ) . getTypeName ( ) . equals ( CmsResourceTypeXmlContainerPage . GROUP_CONTAINER_TYPE_NAME ) ) { // purge pages containing group containers containing the content
purgePages . addAll ( getContainerPagesToPurge ( cms , source . getStructureId ( ) ) ) ; } } return purgePages ; } catch ( CmsException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; return Collections . emptyList ( ) ; } |
public class ManagementHttpRequestProcessor { /** * This gets called in HttpShutdownService # stop ( ) to signal that the server is about to stop */
void prepareShutdown ( ) { } } | int oldState , newState ; do { oldState = state ; if ( ( oldState & CLOSED ) != 0 ) { return ; } newState = oldState | CLOSED ; } while ( ! stateUpdater . compareAndSet ( this , oldState , newState ) ) ; // If there no active requests notify listeners directly
if ( newState == CLOSED ) { handleCompleted ( ) ; } |
public class DolphinPlatformApplication { /** * This methods defines parts of the Dolphin Platform lifecyycle and is therefore defined as final .
* Use the { @ link DolphinPlatformApplication # start ( Stage , ClientContext ) } method instead .
* @ param primaryStage the primary stage
* @ throws Exception in case of an error */
@ Override public final void start ( final Stage primaryStage ) throws Exception { } } | Assert . requireNonNull ( primaryStage , "primaryStage" ) ; this . primaryStage = primaryStage ; if ( initializationException == null ) { if ( clientContext != null ) { try { start ( primaryStage , clientContext ) ; } catch ( Exception e ) { handleInitializationError ( primaryStage , new ClientInitializationException ( "Error in application start!" , e ) ) ; } } else { handleInitializationError ( primaryStage , new ClientInitializationException ( "No clientContext was created!" ) ) ; } } else { handleInitializationError ( primaryStage , initializationException ) ; } |
public class AbstractRepositoryBuilder { /** * Throw a configuration exception if the configuration is not filled out
* sufficiently and correctly such that a repository could be instantiated
* from it . */
public final void assertReady ( ) throws ConfigurationException { } } | ArrayList < String > messages = new ArrayList < String > ( ) ; errorCheck ( messages ) ; int size = messages . size ( ) ; if ( size == 0 ) { return ; } StringBuilder b = new StringBuilder ( ) ; if ( size > 1 ) { b . append ( "Multiple problems: " ) ; } for ( int i = 0 ; i < size ; i ++ ) { if ( i > 0 ) { b . append ( "; " ) ; } b . append ( messages . get ( i ) ) ; } throw new ConfigurationException ( b . toString ( ) ) ; |
public class LandmarkStorage { /** * This method calculates the landmarks and initial weightings to & from them . */
public void createLandmarks ( ) { } } | if ( isInitialized ( ) ) throw new IllegalStateException ( "Initialize the landmark storage only once!" ) ; // fill ' from ' and ' to ' weights with maximum value
long maxBytes = ( long ) graph . getNodes ( ) * LM_ROW_LENGTH ; this . landmarkWeightDA . create ( 2000 ) ; this . landmarkWeightDA . ensureCapacity ( maxBytes ) ; for ( long pointer = 0 ; pointer < maxBytes ; pointer += 4 ) { landmarkWeightDA . setInt ( pointer , ( DELTA_INF << FROM_WEIGHT_BITS ) | FROM_WEIGHT_INF ) ; } String additionalInfo = "" ; // guess the factor
if ( factor <= 0 ) { // A ' factor ' is necessary to store the weight in just a short value but without loosing too much precision .
// This factor is rather delicate to pick , we estimate it through the graph boundaries its maximum distance .
// For small areas we use max _ bounds _ dist * X and otherwise we use a big fixed value for this distance .
// If we would pick the distance too big for small areas this could lead to ( slightly ) suboptimal routes as there
// will be too big rounding errors . But picking it too small is dangerous regarding performance
// e . g . for Germany at least 1500km is very important otherwise speed is at least twice as slow e . g . for just 1000km
BBox bounds = graph . getBounds ( ) ; double distanceInMeter = Helper . DIST_EARTH . calcDist ( bounds . maxLat , bounds . maxLon , bounds . minLat , bounds . minLon ) * 7 ; if ( distanceInMeter > 50_000 * 7 || /* for tests and convenience we do for now : */
! bounds . isValid ( ) ) distanceInMeter = 30_000_000 ; double maxWeight = weighting . getMinWeight ( distanceInMeter ) ; setMaximumWeight ( maxWeight ) ; additionalInfo = ", maxWeight:" + maxWeight + ", from max distance:" + distanceInMeter / 1000f + "km" ; } if ( logDetails ) LOGGER . info ( "init landmarks for subnetworks with node count greater than " + minimumNodes + " with factor:" + factor + additionalInfo ) ; int [ ] empty = new int [ landmarks ] ; Arrays . fill ( empty , UNSET_SUBNETWORK ) ; landmarkIDs . add ( empty ) ; byte [ ] subnetworks = new byte [ graph . getNodes ( ) ] ; Arrays . fill ( subnetworks , ( byte ) UNSET_SUBNETWORK ) ; EdgeFilter tarjanFilter = DefaultEdgeFilter . outEdges ( encoder ) ; IntHashSet blockedEdges = new IntHashSet ( ) ; // the ruleLookup splits certain areas from each other but avoids making this a permanent change so that other algorithms still can route through these regions .
if ( ruleLookup != null && ruleLookup . size ( ) > 0 ) { StopWatch sw = new StopWatch ( ) . start ( ) ; blockedEdges = findBorderEdgeIds ( ruleLookup ) ; tarjanFilter = new BlockedEdgesFilter ( encoder . getAccessEnc ( ) , false , true , blockedEdges ) ; if ( logDetails ) LOGGER . info ( "Made " + blockedEdges . size ( ) + " edges inaccessible. Calculated country cut in " + sw . stop ( ) . getSeconds ( ) + "s, " + Helper . getMemInfo ( ) ) ; } StopWatch sw = new StopWatch ( ) . start ( ) ; // we cannot reuse the components calculated in PrepareRoutingSubnetworks as the edgeIds changed in between ( called graph . optimize )
// also calculating subnetworks from scratch makes bigger problems when working with many oneways
TarjansSCCAlgorithm tarjanAlgo = new TarjansSCCAlgorithm ( graph , tarjanFilter , true ) ; List < IntArrayList > graphComponents = tarjanAlgo . findComponents ( ) ; if ( logDetails ) LOGGER . info ( "Calculated " + graphComponents . size ( ) + " subnetworks via tarjan in " + sw . stop ( ) . getSeconds ( ) + "s, " + Helper . getMemInfo ( ) ) ; EdgeExplorer tmpExplorer = graph . createEdgeExplorer ( new RequireBothDirectionsEdgeFilter ( encoder ) ) ; int nodes = 0 ; for ( IntArrayList subnetworkIds : graphComponents ) { nodes += subnetworkIds . size ( ) ; if ( subnetworkIds . size ( ) < minimumNodes ) continue ; int index = subnetworkIds . size ( ) - 1 ; // ensure start node is reachable from both sides and no subnetwork is associated
for ( ; index >= 0 ; index -- ) { int nextStartNode = subnetworkIds . get ( index ) ; if ( subnetworks [ nextStartNode ] == UNSET_SUBNETWORK && GHUtility . count ( tmpExplorer . setBaseNode ( nextStartNode ) ) > 0 ) { GHPoint p = createPoint ( graph , nextStartNode ) ; if ( logDetails ) LOGGER . info ( "start node: " + nextStartNode + " (" + p + ") subnetwork size: " + subnetworkIds . size ( ) + ", " + Helper . getMemInfo ( ) + ( ( ruleLookup == null ) ? "" : " area:" + ruleLookup . lookupRule ( p ) . getId ( ) ) ) ; if ( createLandmarksForSubnetwork ( nextStartNode , subnetworks , blockedEdges ) ) break ; } } if ( index < 0 ) LOGGER . warn ( "next start node not found in big enough network of size " + subnetworkIds . size ( ) + ", first element is " + subnetworkIds . get ( 0 ) + ", " + createPoint ( graph , subnetworkIds . get ( 0 ) ) ) ; } int subnetworkCount = landmarkIDs . size ( ) ; // store all landmark node IDs and one int for the factor itself .
this . landmarkWeightDA . ensureCapacity ( maxBytes /* landmark weights */
+ subnetworkCount * landmarks /* landmark mapping per subnetwork */
) ; // calculate offset to point into landmark mapping
long bytePos = maxBytes ; for ( int [ ] landmarks : landmarkIDs ) { for ( int lmNodeId : landmarks ) { landmarkWeightDA . setInt ( bytePos , lmNodeId ) ; bytePos += 4L ; } } // make backward incompatible to force rebuilt ( pre 0.11 releases had nodes count at 0)
landmarkWeightDA . setHeader ( 0 * 4 , getVersion ( ) ) ; landmarkWeightDA . setHeader ( 1 * 4 , landmarks ) ; landmarkWeightDA . setHeader ( 2 * 4 , subnetworkCount ) ; if ( factor * DOUBLE_MLTPL > Integer . MAX_VALUE ) throw new UnsupportedOperationException ( "landmark weight factor cannot be bigger than Integer.MAX_VALUE " + factor * DOUBLE_MLTPL ) ; landmarkWeightDA . setHeader ( 3 * 4 , ( int ) Math . round ( factor * DOUBLE_MLTPL ) ) ; landmarkWeightDA . setHeader ( 4 * 4 , graph . getNodes ( ) ) ; // serialize fast byte [ ] into DataAccess
subnetworkStorage . create ( graph . getNodes ( ) ) ; for ( int nodeId = 0 ; nodeId < subnetworks . length ; nodeId ++ ) { subnetworkStorage . setSubnetwork ( nodeId , subnetworks [ nodeId ] ) ; } if ( logDetails ) LOGGER . info ( "Finished landmark creation. Subnetwork node count sum " + nodes + " vs. nodes " + graph . getNodes ( ) ) ; initialized = true ; |
public class SRTServletRequest { /** * Returns null since this request has no concept of servlet mappings .
* This method will be overidden by the webapp layer . */
public String getPathInfo ( ) { } } | if ( WCCustomProperties . CHECK_REQUEST_OBJECT_IN_USE ) { checkRequestObjectInUse ( ) ; } SRTServletRequestThreadData reqData = SRTServletRequestThreadData . getInstance ( ) ; // Begin PK06988 , strip session id of when url rewriting is enabled
if ( reqData . getPathInfo ( ) == null ) { String aPathInfo = ( ( WebAppDispatcherContext ) this . getDispatchContext ( ) ) . getPathInfo ( ) ; if ( aPathInfo == null ) return null ; else { // Do not strip based on ? again , it was already done and we don ' t want to strip ' % 3f ' s that have since been decoded to ? ' s
reqData . setPathInfo ( WebGroup . stripURL ( aPathInfo , false ) ) ; // 293696 ServletRequest . getPathInfo ( ) fails WASCC . web . webcontainer
} } // 321485
String path = reqData . getPathInfo ( ) ; // PK28078
if ( path . equals ( "" ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) logger . logp ( Level . FINE , CLASS_NAME , "getPathInfo" , " path is \"\", returning null" ) ; return null ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) logger . logp ( Level . FINE , CLASS_NAME , "getPathInfo" , " path --> [" + path + "]" ) ; return path ; } // return path ;
// End PK06988 , strip session id of when url rewriting is enabled |
public class DFSClient { /** * create the iterator from the iterative listing with block locations */
private RemoteIterator < LocatedFileStatus > iteratorListing ( final String src ) throws IOException { } } | return new RemoteIterator < LocatedFileStatus > ( ) { private LocatedDirectoryListing thisListing ; private int i ; { // initializer
// fetch the first batch of entries in the directory
thisListing = namenode . getLocatedPartialListing ( src , HdfsFileStatus . EMPTY_NAME ) ; if ( thisListing == null ) { // the directory does not exist
throw new FileNotFoundException ( "File " + src + " does not exist." ) ; } } @ Override public boolean hasNext ( ) throws IOException { if ( i >= thisListing . getPartialListing ( ) . length && thisListing . hasMore ( ) ) { // current listing is exhausted & fetch a new listing
thisListing = namenode . getLocatedPartialListing ( src , thisListing . getLastName ( ) ) ; if ( thisListing == null ) { throw new FileNotFoundException ( "File " + src + " does not exist." ) ; } i = 0 ; } return i < thisListing . getPartialListing ( ) . length ; } @ Override public LocatedFileStatus next ( ) throws IOException { if ( ! hasNext ( ) ) { throw new java . util . NoSuchElementException ( "No more entry in " + src ) ; } return HdfsFileStatus . toLocatedFileStatus ( thisListing . getPartialListing ( ) [ i ] , thisListing . getBlockLocations ( ) [ i ++ ] , src ) ; } } ; |
public class AbstractGitFlowMojo { /** * Initializes command line executables . */
private void initExecutables ( ) { } } | if ( StringUtils . isBlank ( cmdMvn . getExecutable ( ) ) ) { if ( StringUtils . isBlank ( mvnExecutable ) ) { mvnExecutable = "mvn" ; } cmdMvn . setExecutable ( mvnExecutable ) ; } if ( StringUtils . isBlank ( cmdGit . getExecutable ( ) ) ) { if ( StringUtils . isBlank ( gitExecutable ) ) { gitExecutable = "git" ; } cmdGit . setExecutable ( gitExecutable ) ; } |
public class TreeMap { /** * ( non - Javadoc )
* @ see com . ibm . ws . objectManager . Map # get ( java . lang . Object , com . ibm . ws . objectManager . Transaction ) */
public synchronized Token get ( Object key , Transaction transaction ) throws ObjectManagerException { } } | if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "get" , new Object [ ] { key , transaction } ) ; Entry entry = getEntry ( key , transaction ) ; Token value = null ; if ( entry != null ) value = entry . getValue ( ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "get" , new Object [ ] { entry , value } ) ; return value ; |
public class BitmapUtils { /** * Get width and height of the bitmap specified with the file path .
* @ param path the bitmap file path .
* @ return the size . */
public static Point getSize ( String path ) { } } | BitmapFactory . Options options = new BitmapFactory . Options ( ) ; options . inJustDecodeBounds = true ; BitmapFactory . decodeFile ( path , options ) ; int width = options . outWidth ; int height = options . outHeight ; return new Point ( width , height ) ; |
public class FieldType { /** * Return whether or not the field value passed in is the default value for the type of the field . Null will return
* true . */
public Object getJavaDefaultValueDefault ( ) { } } | if ( field . getType ( ) == boolean . class ) { return DEFAULT_VALUE_BOOLEAN ; } else if ( field . getType ( ) == byte . class || field . getType ( ) == Byte . class ) { return DEFAULT_VALUE_BYTE ; } else if ( field . getType ( ) == char . class || field . getType ( ) == Character . class ) { return DEFAULT_VALUE_CHAR ; } else if ( field . getType ( ) == short . class || field . getType ( ) == Short . class ) { return DEFAULT_VALUE_SHORT ; } else if ( field . getType ( ) == int . class || field . getType ( ) == Integer . class ) { return DEFAULT_VALUE_INT ; } else if ( field . getType ( ) == long . class || field . getType ( ) == Long . class ) { return DEFAULT_VALUE_LONG ; } else if ( field . getType ( ) == float . class || field . getType ( ) == Float . class ) { return DEFAULT_VALUE_FLOAT ; } else if ( field . getType ( ) == double . class || field . getType ( ) == Double . class ) { return DEFAULT_VALUE_DOUBLE ; } else { return null ; } |
public class LdapAuthenticationService { /** * Set the authorizations for the roles which may be defined . If the keys are DN values , the application role names
* are taken from the leftmost RDN value . Use { @ link LdapAuthenticationService # setNamedRoles ( Map ) } instead of this
* method to explicitly define application role names .
* @ param roles map with roles , keys are the values for { @ link # rolesAttribute } , probably DN values
* @ since 1.10.0 ( actually already from 1.9.0 but annotations was missing ) */
@ Api public void setRoles ( Map < String , List < AuthorizationInfo > > roles ) { } } | Map < String , List < NamedRoleInfo > > namedRoles = new HashMap < String , List < NamedRoleInfo > > ( ) ; for ( String ldapRole : roles . keySet ( ) ) { DN dn ; List < AuthorizationInfo > auth = roles . get ( ldapRole ) ; NamedRoleInfo role = new NamedRoleInfo ( ) ; role . setAuthorizations ( auth ) ; try { dn = new DN ( ldapRole ) ; role . setName ( dn . getRDN ( ) . getAttributeValues ( ) [ 0 ] ) ; } catch ( LDAPException e ) { role . setName ( ldapRole ) ; } namedRoles . put ( ldapRole , Collections . singletonList ( role ) ) ; } setNamedRoles ( namedRoles ) ; |
public class EJBFactoryImpl { /** * Returns a reference to the EJB object specified . < p >
* The combination of application name , module name , and bean name
* uniquely identify any EJB object . < p >
* This method if intended to be used when neither the specific
* module name nor bean name are known . All modules in the application
* will be searched for an EJB that implements the specified interface .
* If multiple EJB objects in the application implement the specified
* interface , this is considered an ambiguous reference , and an
* EJBException will be thrown . < p >
* @ param application name of the application containing the EJB .
* @ param interfaceName component home or business interface of EJB .
* @ return a reference to the EJB object specified .
* @ exception EJBException is thrown when the specified EJB cannot
* be found or a failure occurs creating an instance . */
@ Override public Object findByInterface ( String application , String interfaceName ) throws EJBException , RemoteException { } } | return ejbLinkResolver . findByInterface ( application , interfaceName ) ; |
public class BplusTree { /** * Returns the greatest key less than or equal to the given key , or null if there is no such key .
* @ param key the key
* @ return the Entry with greatest key less than or equal to key , or null if there is no such key */
public synchronized TreeEntry < K , V > floorEntry ( final K key ) { } } | // Retorna la clave mas cercana menor o igual a la clave indicada
return getRoundEntry ( key , false , true ) ; |
public class FrequentlyUsedPolicy { /** * Moves the entry to the next higher frequency list , creating it if necessary . */
private void onHit ( Node node ) { } } | policyStats . recordHit ( ) ; int newCount = node . freq . count + 1 ; FrequencyNode freqN = ( node . freq . next . count == newCount ) ? node . freq . next : new FrequencyNode ( newCount , node . freq ) ; node . remove ( ) ; if ( node . freq . isEmpty ( ) ) { node . freq . remove ( ) ; } node . freq = freqN ; node . append ( ) ; |
public class TreeData { /** * 设置treeDatas值
* @ param treeDatas */
public TreeData children ( TreeData ... treeDatas ) { } } | if ( treeDatas == null || treeDatas . length == 0 ) { return this ; } this . children ( ) . addAll ( Arrays . asList ( treeDatas ) ) ; return this ; |
public class Card { /** * Setter for the card number . Note that mutating the number of this card object
* invalidates the { @ link # brand } and { @ link # last4 } .
* @ param number the new { @ link # number } */
@ Deprecated public void setNumber ( @ Nullable String number ) { } } | this . number = number ; this . brand = null ; this . last4 = null ; |
public class ReadBuffer { /** * Reads the given amount of bytes from the file into the read buffer and resets the internal buffer position . If
* the capacity of the read buffer is too small , a larger one is created automatically .
* @ param length the amount of bytes to read from the file .
* @ return true if the whole data was read successfully , false otherwise .
* @ throws IOException if an error occurs while reading the file . */
public boolean readFromFile ( int length ) throws IOException { } } | // ensure that the read buffer is large enough
if ( this . bufferData == null || this . bufferData . length < length ) { // ensure that the read buffer is not too large
if ( length > Parameters . MAXIMUM_BUFFER_SIZE ) { LOGGER . warning ( "invalid read length: " + length ) ; return false ; } this . bufferData = new byte [ length ] ; this . bufferWrapper = ByteBuffer . wrap ( this . bufferData , 0 , length ) ; } // reset the buffer position and read the data into the buffer
this . bufferPosition = 0 ; this . bufferWrapper . clear ( ) ; return this . inputChannel . read ( this . bufferWrapper ) == length ; |
public class InternalLocaleBuilder { /** * Reset Builder ' s internal state with the given language tag */
public InternalLocaleBuilder setLanguageTag ( LanguageTag langtag ) { } } | clear ( ) ; if ( ! langtag . getExtlangs ( ) . isEmpty ( ) ) { language = langtag . getExtlangs ( ) . get ( 0 ) ; } else { String lang = langtag . getLanguage ( ) ; if ( ! lang . equals ( LanguageTag . UNDETERMINED ) ) { language = lang ; } } script = langtag . getScript ( ) ; region = langtag . getRegion ( ) ; List < String > bcpVariants = langtag . getVariants ( ) ; if ( ! bcpVariants . isEmpty ( ) ) { StringBuilder var = new StringBuilder ( bcpVariants . get ( 0 ) ) ; int size = bcpVariants . size ( ) ; for ( int i = 1 ; i < size ; i ++ ) { var . append ( BaseLocale . SEP ) . append ( bcpVariants . get ( i ) ) ; } variant = var . toString ( ) ; } setExtensions ( langtag . getExtensions ( ) , langtag . getPrivateuse ( ) ) ; return this ; |
public class ResizeManager { /** * Triggered
* @ param event Down event */
private boolean onDownEvent ( @ NonNull MotionEvent event ) { } } | if ( MotionEventCompat . getActionMasked ( event ) != MotionEvent . ACTION_DOWN ) { throw new IllegalStateException ( "Has to be down event!" ) ; } if ( mVelocityTracker == null ) { mVelocityTracker = VelocityTracker . obtain ( ) ; } else { mVelocityTracker . clear ( ) ; } mDownY = event . getY ( ) ; if ( ! mScroller . isFinished ( ) ) { mScroller . forceFinished ( true ) ; if ( mScroller . getFinalY ( ) == 0 ) { mDragStartY = mDownY + mScroller . getStartY ( ) - mScroller . getCurrY ( ) ; } else { mDragStartY = mDownY - mScroller . getCurrY ( ) ; } mState = State . DRAGGING ; return true ; } else { return false ; } |
public class StringUtils { /** * < p > Checks if the CharSequence contains only lowercase characters . < / p >
* < p > { @ code null } will return { @ code false } .
* An empty CharSequence ( length ( ) = 0 ) will return { @ code false } . < / p >
* < pre >
* StringUtils . isAllLowerCase ( null ) = false
* StringUtils . isAllLowerCase ( " " ) = false
* StringUtils . isAllLowerCase ( " " ) = false
* StringUtils . isAllLowerCase ( " abc " ) = true
* StringUtils . isAllLowerCase ( " abC " ) = false
* StringUtils . isAllLowerCase ( " ab c " ) = false
* StringUtils . isAllLowerCase ( " ab1c " ) = false
* StringUtils . isAllLowerCase ( " ab / c " ) = false
* < / pre >
* @ param cs the CharSequence to check , may be null
* @ return { @ code true } if only contains lowercase characters , and is non - null
* @ since 2.5
* @ since 3.0 Changed signature from isAllLowerCase ( String ) to isAllLowerCase ( CharSequence ) */
public static boolean isAllLowerCase ( final CharSequence cs ) { } } | if ( cs == null || isEmpty ( cs ) ) { return false ; } final int sz = cs . length ( ) ; for ( int i = 0 ; i < sz ; i ++ ) { if ( ! Character . isLowerCase ( cs . charAt ( i ) ) ) { return false ; } } return true ; |
public class WTree { /** * Override preparePaint to register an AJAX operation .
* @ param request the request being responded to . */
@ Override protected void preparePaintComponent ( final Request request ) { } } | super . preparePaintComponent ( request ) ; // If is an internal AJAX action , set the action type .
if ( isCurrentAjaxTrigger ( ) ) { AjaxOperation operation = AjaxHelper . getCurrentOperation ( ) ; if ( operation . isInternalAjaxRequest ( ) ) { // Want to replace children in the target ( Internal defaults to REPLACE target )
operation . setAction ( AjaxOperation . AjaxAction . IN ) ; } } // Check if a custom tree needs the expanded rows checked
TreeItemIdNode custom = getCustomTree ( ) ; if ( custom != null ) { checkExpandedCustomNodes ( ) ; } // Make sure the ID maps are up to date
clearItemIdMaps ( ) ; if ( getExpandMode ( ) == ExpandMode . LAZY ) { if ( AjaxHelper . getCurrentOperation ( ) == null ) { clearPrevExpandedRows ( ) ; } else { addPrevExpandedCurrent ( ) ; } } |
public class TypeValidator { /** * Expect the type to contain an object sometimes . If the expectation is not met , issue a warning
* at the provided node ' s source code position . */
void expectAnyObject ( Node n , JSType type , String msg ) { } } | JSType anyObjectType = getNativeType ( NO_OBJECT_TYPE ) ; if ( ! anyObjectType . isSubtypeOf ( type ) && ! type . isEmptyType ( ) ) { mismatch ( n , msg , type , anyObjectType ) ; } |
public class DefaultMonitorRegistry { /** * Gets the { @ link ObjectNameMapper } to use by looking at the
* { @ code com . netflix . servo . DefaultMonitorRegistry . jmxMapperClass }
* property . If not specified , then { @ link ObjectNameMapper # DEFAULT }
* is used .
* @ param props the properties
* @ return the mapper to use */
private static ObjectNameMapper getObjectNameMapper ( Properties props ) { } } | ObjectNameMapper mapper = ObjectNameMapper . DEFAULT ; final String jmxNameMapperClass = props . getProperty ( REGISTRY_JMX_NAME_PROP ) ; if ( jmxNameMapperClass != null ) { try { Class < ? > mapperClazz = Class . forName ( jmxNameMapperClass ) ; mapper = ( ObjectNameMapper ) mapperClazz . newInstance ( ) ; } catch ( Throwable t ) { LOG . error ( "failed to create the JMX ObjectNameMapper instance of class " + jmxNameMapperClass + ", using the default naming scheme" , t ) ; } } return mapper ; |
public class ArgParser { /** * Gets the name specified in @ Opt ( name = . . . ) if present , or the name of the field otherwise . */
private static String getName ( Opt option , Field field ) { } } | if ( option . name ( ) . equals ( Opt . DEFAULT_STRING ) ) { return field . getName ( ) ; } else { return option . name ( ) ; } |
public class PatternBox { /** * Finds the cases where transcription relation is shown using a Conversion instead of a
* TemplateReaction .
* @ return the pattern */
public static Pattern controlsExpressionWithConversion ( ) { } } | Pattern p = new Pattern ( SequenceEntityReference . class , "TF ER" ) ; p . add ( linkedER ( true ) , "TF ER" , "TF generic ER" ) ; p . add ( erToPE ( ) , "TF generic ER" , "TF SPE" ) ; p . add ( linkToComplex ( ) , "TF SPE" , "TF PE" ) ; p . add ( peToControl ( ) , "TF PE" , "Control" ) ; p . add ( controlToConv ( ) , "Control" , "Conversion" ) ; p . add ( new Size ( right ( ) , 1 , Size . Type . EQUAL ) , "Conversion" ) ; p . add ( new OR ( new MappedConst ( new Empty ( left ( ) ) , 0 ) , new MappedConst ( new ConstraintAdapter ( 1 ) { @ Override public boolean satisfies ( Match match , int ... ind ) { Conversion cnv = ( Conversion ) match . get ( ind [ 0 ] ) ; Set < PhysicalEntity > left = cnv . getLeft ( ) ; if ( left . size ( ) > 1 ) return false ; if ( left . isEmpty ( ) ) return true ; PhysicalEntity pe = left . iterator ( ) . next ( ) ; if ( pe instanceof NucleicAcid ) { PhysicalEntity rPE = cnv . getRight ( ) . iterator ( ) . next ( ) ; return rPE instanceof Protein ; } return false ; } } , 0 ) ) , "Conversion" ) ; p . add ( right ( ) , "Conversion" , "right PE" ) ; p . add ( linkToSpecific ( ) , "right PE" , "right SPE" ) ; p . add ( new Type ( SequenceEntity . class ) , "right SPE" ) ; p . add ( peToER ( ) , "right SPE" , "product generic ER" ) ; p . add ( linkedER ( false ) , "product generic ER" , "product ER" ) ; p . add ( equal ( false ) , "TF ER" , "product ER" ) ; return p ; |
public class PointerHierarchyRepresentationBuilder { /** * Add an element to the pointer representation .
* Important : If an algorithm does not produce links in an increasing fashion ,
* a warning will be issued and the linking distance will be increased .
* Otherwise , the hierarchy would be misinterpreted when links are executed
* ordered by their distance .
* @ param cur Current object
* @ param distance Link distance
* @ param par Parent */
public void add ( DBIDRef cur , double distance , DBIDRef par ) { } } | assert prototypes == null ; parent . putDBID ( cur , par ) ; double olddist = parentDistance . putDouble ( cur , distance ) ; assert ( olddist == Double . POSITIVE_INFINITY ) : "Object was already linked!" ; order . putInt ( cur , mergecount ) ; ++ mergecount ; |
public class PointLocationFormatter { /** * Formats a point location as an ISO 6709 string .
* @ param pointLocation
* Point location to format
* @ return Formatted string */
private static String formatISO6709Long ( final PointLocation pointLocation ) { } } | final Latitude latitude = pointLocation . getLatitude ( ) ; final Longitude longitude = pointLocation . getLongitude ( ) ; String string = formatLatitudeLong ( latitude ) + formatLongitudeLong ( longitude ) ; final double altitude = pointLocation . getAltitude ( ) ; string = string + formatAltitudeWithSign ( altitude ) ; final String crs = pointLocation . getCoordinateReferenceSystemIdentifier ( ) ; string = string + formatCoordinateReferenceSystemIdentifier ( crs ) ; return string + "/" ; |
public class GridList { /** * Get the record bookmark at this location .
* @ param iTargetPosition The logical position to retrieve .
* @ return The bookmark at this location . */
public Object elementAt ( int iTargetPosition ) { } } | int iArrayIndex ; Object bookmark = null ; if ( ! this . inRecordList ( iTargetPosition ) ) return null ; // Not here , you need to Move to < bookmark , and move next until you hit this one
iArrayIndex = this . listToArrayIndex ( iTargetPosition ) ; bookmark = m_aRecords [ iArrayIndex ] ; return bookmark ; |
public class BaseMigrationOperation { /** * Verifies that the cluster is active . */
private void verifyClusterState ( ) { } } | NodeEngineImpl nodeEngine = ( NodeEngineImpl ) getNodeEngine ( ) ; ClusterState clusterState = nodeEngine . getClusterService ( ) . getClusterState ( ) ; if ( ! clusterState . isMigrationAllowed ( ) ) { throw new IllegalStateException ( "Cluster state does not allow migrations! " + clusterState ) ; } |
public class A_CmsSearchIndex { /** * Initializes the search index . < p >
* @ throws CmsSearchException if the index source association failed or a configuration error occurred */
public void initialize ( ) throws CmsSearchException { } } | if ( ! isEnabled ( ) ) { // index is disabled , no initialization is required
return ; } String sourceName = null ; CmsSearchIndexSource indexSource = null ; List < String > searchIndexSourceDocumentTypes = null ; List < String > resourceNames = null ; String resourceName = null ; m_sources = new ArrayList < CmsSearchIndexSource > ( ) ; m_path = getPath ( ) ; for ( int i = 0 , n = m_sourceNames . size ( ) ; i < n ; i ++ ) { try { sourceName = m_sourceNames . get ( i ) ; indexSource = OpenCms . getSearchManager ( ) . getIndexSource ( sourceName ) ; m_sources . add ( indexSource ) ; resourceNames = indexSource . getResourcesNames ( ) ; searchIndexSourceDocumentTypes = indexSource . getDocumentTypes ( ) ; for ( int j = 0 , m = resourceNames . size ( ) ; j < m ; j ++ ) { resourceName = resourceNames . get ( j ) ; m_documenttypes . put ( resourceName , searchIndexSourceDocumentTypes ) ; } } catch ( Exception e ) { // mark this index as disabled
setEnabled ( false ) ; throw new CmsSearchException ( Messages . get ( ) . container ( Messages . ERR_INDEX_SOURCE_ASSOCIATION_1 , sourceName ) , e ) ; } } // initialize the search field configuration
if ( m_fieldConfigurationName == null ) { // if not set , use standard field configuration
m_fieldConfigurationName = CmsSearchFieldConfiguration . STR_STANDARD ; } m_fieldConfiguration = OpenCms . getSearchManager ( ) . getFieldConfiguration ( m_fieldConfigurationName ) ; if ( m_fieldConfiguration == null ) { // we must have a valid field configuration to continue
throw new CmsSearchException ( Messages . get ( ) . container ( Messages . ERR_FIELD_CONFIGURATION_UNKNOWN_2 , m_name , m_fieldConfigurationName ) ) ; } // initialize the index searcher instance
onIndexChanged ( true ) ; |
public class AbstractScope { /** * Returns < code > true < / code > if the given description { @ code input } from the parent scope is
* shadowed by local elements .
* @ return < code > true < / code > if the given description { @ code input } from the parent scope is
* shadowed by local elements . */
protected boolean isShadowed ( IEObjectDescription input ) { } } | final Iterable < IEObjectDescription > localElements = getLocalElementsByName ( input . getName ( ) ) ; final boolean isEmpty = isEmpty ( localElements ) ; return ! isEmpty ; |
public class A_CmsSearchIndex { /** * Returns the document type factory used for the given resource in this index , or < code > null < / code >
* in case the resource is not indexed by this index . < p >
* A resource is indexed if the following is all true : < ol >
* < li > The index contains at last one index source matching the root path of the given resource .
* < li > For this matching index source , the document type factory needed by the resource is also configured .
* < / ol >
* This default implementation uses the check as internal Solr indexes do . Overwrite it if necessary .
* @ param res the resource to check
* @ return the document type factory used for the given resource in this index , or < code > null < / code >
* in case the resource is not indexed by this index */
public I_CmsDocumentFactory getDocumentFactory ( CmsResource res ) { } } | if ( isIndexing ( res ) ) { if ( OpenCms . getResourceManager ( ) . getResourceType ( res ) instanceof CmsResourceTypeXmlContainerPage ) { return OpenCms . getSearchManager ( ) . getDocumentFactory ( CmsSolrDocumentContainerPage . TYPE_CONTAINERPAGE_SOLR , "text/html" ) ; } if ( CmsResourceTypeXmlContent . isXmlContent ( res ) ) { return OpenCms . getSearchManager ( ) . getDocumentFactory ( CmsSolrDocumentXmlContent . TYPE_XMLCONTENT_SOLR , "text/html" ) ; } return OpenCms . getSearchManager ( ) . getDocumentFactory ( res ) ; } return null ; |
public class EntryPointNode { /** * Retract a fact object from this < code > RuleBase < / code > and the specified
* < code > WorkingMemory < / code > .
* @ param handle
* The handle of the fact to retract .
* @ param workingMemory
* The working memory session . */
public void retractObject ( final InternalFactHandle handle , final PropagationContext context , final ObjectTypeConf objectTypeConf , final InternalWorkingMemory workingMemory ) { } } | if ( log . isTraceEnabled ( ) ) { log . trace ( "Delete {}" , handle . toString ( ) ) ; } workingMemory . addPropagation ( new PropagationEntry . Delete ( this , handle , context , objectTypeConf ) ) ; |
public class ReactionEngine { /** * Extract the mechanism necessary for this reaction .
* @ param entry The EntryReact object */
private void extractMechanism ( EntryReact entry ) { } } | String mechanismName = "org.openscience.cdk.reaction.mechanism." + entry . getMechanism ( ) ; try { mechanism = ( IReactionMechanism ) this . getClass ( ) . getClassLoader ( ) . loadClass ( mechanismName ) . newInstance ( ) ; logger . info ( "Loaded mechanism: " , mechanismName ) ; } catch ( ClassNotFoundException exception ) { logger . error ( "Could not find this IReactionMechanism: " , mechanismName ) ; logger . debug ( exception ) ; } catch ( InstantiationException | IllegalAccessException exception ) { logger . error ( "Could not load this IReactionMechanism: " , mechanismName ) ; logger . debug ( exception ) ; } |
public class PersistenceBrokerImpl { /** * Deletes the concrete representation of the specified object in the underlying
* persistence system . This method is intended for use in top - level api or
* by internal calls .
* @ param obj The object to delete .
* @ param ignoreReferences With this flag the automatic deletion / unlinking
* of references can be suppressed ( independent of the used auto - delete setting in metadata ) ,
* except { @ link org . apache . ojb . broker . metadata . SuperReferenceDescriptor }
* these kind of reference ( descriptor ) will always be performed . If < em > true < / em >
* all " normal " referenced objects will be ignored , only the specified object is handled .
* @ throws PersistenceBrokerException */
public void delete ( Object obj , boolean ignoreReferences ) throws PersistenceBrokerException { } } | if ( isTxCheck ( ) && ! isInTransaction ( ) ) { if ( logger . isEnabledFor ( Logger . ERROR ) ) { String msg = "No running PB-tx found. Please, only delete objects in context of a PB-transaction" + " to avoid side-effects - e.g. when rollback of complex objects." ; try { throw new Exception ( "** Delete object without active PersistenceBroker transaction **" ) ; } catch ( Exception e ) { logger . error ( msg , e ) ; } } } try { doDelete ( obj , ignoreReferences ) ; } finally { markedForDelete . clear ( ) ; } |
public class SystemUtil { /** * returns the Hoome Directory of the System
* @ return home directory */
public static Resource getHomeDirectory ( ) { } } | if ( homeFile != null ) return homeFile ; ResourceProvider frp = ResourcesImpl . getFileResourceProvider ( ) ; String homeStr = System . getProperty ( "user.home" ) ; if ( homeStr != null ) { homeFile = frp . getResource ( homeStr ) ; homeFile = ResourceUtil . getCanonicalResourceEL ( homeFile ) ; } return homeFile ; |
public class AppEngineDatastoreService { /** * Executes the specified query without an offset qualification .
* @ param paramQuery The query to be executed .
* @ return The query result as { @ code List } view . */
public List < Entity > query ( Query query ) { } } | return query ( query , FetchOptions . Builder . withOffset ( 0 ) ) ; |
public class StackTraceSampleCoordinator { /** * Triggers a stack trace sample to all tasks .
* @ param tasksToSample Tasks to sample .
* @ param numSamples Number of stack trace samples to collect .
* @ param delayBetweenSamples Delay between consecutive samples .
* @ param maxStackTraceDepth Maximum depth of the stack trace . 0 indicates
* no maximum and keeps the complete stack trace .
* @ return A future of the completed stack trace sample */
@ SuppressWarnings ( "unchecked" ) public CompletableFuture < StackTraceSample > triggerStackTraceSample ( ExecutionVertex [ ] tasksToSample , int numSamples , Time delayBetweenSamples , int maxStackTraceDepth ) { } } | checkNotNull ( tasksToSample , "Tasks to sample" ) ; checkArgument ( tasksToSample . length >= 1 , "No tasks to sample" ) ; checkArgument ( numSamples >= 1 , "No number of samples" ) ; checkArgument ( maxStackTraceDepth >= 0 , "Negative maximum stack trace depth" ) ; // Execution IDs of running tasks
ExecutionAttemptID [ ] triggerIds = new ExecutionAttemptID [ tasksToSample . length ] ; Execution [ ] executions = new Execution [ tasksToSample . length ] ; // Check that all tasks are RUNNING before triggering anything . The
// triggering can still fail .
for ( int i = 0 ; i < triggerIds . length ; i ++ ) { Execution execution = tasksToSample [ i ] . getCurrentExecutionAttempt ( ) ; if ( execution != null && execution . getState ( ) == ExecutionState . RUNNING ) { executions [ i ] = execution ; triggerIds [ i ] = execution . getAttemptId ( ) ; } else { return FutureUtils . completedExceptionally ( new IllegalStateException ( "Task " + tasksToSample [ i ] . getTaskNameWithSubtaskIndex ( ) + " is not running." ) ) ; } } synchronized ( lock ) { if ( isShutDown ) { return FutureUtils . completedExceptionally ( new IllegalStateException ( "Shut down" ) ) ; } final int sampleId = sampleIdCounter ++ ; LOG . debug ( "Triggering stack trace sample {}" , sampleId ) ; final PendingStackTraceSample pending = new PendingStackTraceSample ( sampleId , triggerIds ) ; // Discard the sample if it takes too long . We don ' t send cancel
// messages to the task managers , but only wait for the responses
// and then ignore them .
long expectedDuration = numSamples * delayBetweenSamples . toMilliseconds ( ) ; Time timeout = Time . milliseconds ( expectedDuration + sampleTimeout ) ; // Add the pending sample before scheduling the discard task to
// prevent races with removing it again .
pendingSamples . put ( sampleId , pending ) ; // Trigger all samples
for ( Execution execution : executions ) { final CompletableFuture < StackTraceSampleResponse > stackTraceSampleFuture = execution . requestStackTraceSample ( sampleId , numSamples , delayBetweenSamples , maxStackTraceDepth , timeout ) ; stackTraceSampleFuture . handleAsync ( ( StackTraceSampleResponse stackTraceSampleResponse , Throwable throwable ) -> { if ( stackTraceSampleResponse != null ) { collectStackTraces ( stackTraceSampleResponse . getSampleId ( ) , stackTraceSampleResponse . getExecutionAttemptID ( ) , stackTraceSampleResponse . getSamples ( ) ) ; } else { cancelStackTraceSample ( sampleId , throwable ) ; } return null ; } , executor ) ; } return pending . getStackTraceSampleFuture ( ) ; } |
public class GodHandableAction { protected ActionResponse processHookBefore ( ActionHook hook ) { } } | if ( hook == null ) { return ActionResponse . undefined ( ) ; } showBefore ( runtime ) ; ActionResponse response = hook . godHandPrologue ( runtime ) ; if ( isUndefined ( response ) ) { response = hook . hookBefore ( runtime ) ; } if ( isDefined ( response ) ) { runtime . manageActionResponse ( response ) ; } redCardableAssist . assertAfterTxCommitHookNotSpecified ( "before" , response ) ; return response ; |
public class WSManUtils { /** * Validates a UUID value and throws a specific exception if UUID is invalid .
* @ param uuid The UUID value to validate .
* @ param uuidValueOf The property associated to the given UUID value .
* @ throws RuntimeException */
public static void validateUUID ( String uuid , String uuidValueOf ) throws RuntimeException { } } | if ( ! WSManUtils . isUUID ( uuid ) ) { throw new RuntimeException ( "The returned " + uuidValueOf + " is not a valid UUID value! " + uuidValueOf + ": " + uuid ) ; } |
public class StandardBullhornData { /** * Makes the api call to add files to an entity . Takes a MultipartFile .
* @ param type
* @ param entityId
* @ param multipartFile
* @ param externalId
* @ param params
* @ param deleteFile
* @ return */
protected FileWrapper handleAddFileWithMultipartFile ( Class < ? extends FileEntity > type , Integer entityId , MultipartFile multipartFile , String externalId , FileParams params , boolean deleteFile ) { } } | MultiValueMap < String , Object > multiValueMap = null ; try { multiValueMap = restFileManager . addFileToMultiValueMap ( multipartFile ) ; } catch ( IOException e ) { log . error ( "Error creating temp file" , e ) ; } Map < String , String > uriVariables = restUriVariablesFactory . getUriVariablesForAddFile ( BullhornEntityInfo . getTypesRestEntityName ( type ) , entityId , externalId , params ) ; String url = restUrlFactory . assembleAddFileUrl ( params ) ; return this . handleAddFile ( type , entityId , multiValueMap , url , uriVariables , multipartFile . getOriginalFilename ( ) , deleteFile ) ; |
public class KeystoreFactory { /** * Based on a public certificate , private key , alias and password , this method will load the certificate and
* private key as an entry into a newly created keystore , and it will set the provided alias and password to the
* keystore entry .
* @ param certResourceLocation
* @ param privateKeyResourceLocation
* @ param alias
* @ param keyPassword
* @ return */
@ SneakyThrows public KeyStore loadKeystore ( String certResourceLocation , String privateKeyResourceLocation , String alias , String keyPassword ) { } } | KeyStore keystore = createEmptyKeystore ( ) ; X509Certificate cert = loadCert ( certResourceLocation ) ; RSAPrivateKey privateKey = loadPrivateKey ( privateKeyResourceLocation ) ; addKeyToKeystore ( keystore , cert , privateKey , alias , keyPassword ) ; return keystore ; |
public class DoubleRangeValidator { /** * < p > Return the specified attribute value , converted to a
* < code > double < / code > . < / p >
* @ param attributeValue The attribute value to be converted
* @ throws NumberFormatException if conversion is not possible */
private static double doubleValue ( Object attributeValue ) throws NumberFormatException { } } | if ( attributeValue instanceof Number ) { return ( ( ( Number ) attributeValue ) . doubleValue ( ) ) ; } else { return ( Double . parseDouble ( attributeValue . toString ( ) ) ) ; } |
public class Image { /** * Draw the image with a given scale
* @ param x The x position to draw the image at
* @ param y The y position to draw the image at
* @ param scale The scaling to apply */
public void draw ( float x , float y , float scale ) { } } | init ( ) ; draw ( x , y , width * scale , height * scale , Color . white ) ; |
public class EpicsApi { /** * Gets all epics of the requested group and its subgroups as a Stream .
* < pre > < code > GitLab Endpoint : GET / groups / : id / epics < / code > < / pre >
* @ param groupIdOrPath the group ID , path of the group , or a Group instance holding the group ID or path
* @ param authorId returns epics created by the given user id
* @ param labels return epics matching a comma separated list of labels names .
* Label names from the epic group or a parent group can be used
* @ param orderBy return epics ordered by CREATED _ AT or UPDATED _ AT . Default is CREATED _ AT
* @ param sortOrder return epics sorted in ASC or DESC order . Default is DESC
* @ param search search epics against their title and description
* @ return a Stream of matching epics of the requested group and its subgroups
* @ throws GitLabApiException if any exception occurs */
public Stream < Epic > getEpicsStream ( Object groupIdOrPath , Integer authorId , String labels , EpicOrderBy orderBy , SortOrder sortOrder , String search ) throws GitLabApiException { } } | return ( getEpics ( groupIdOrPath , authorId , labels , orderBy , sortOrder , search , getDefaultPerPage ( ) ) . stream ( ) ) ; |
public class CompositeELResolver { /** * For a given base and property , attempts to identify the most general type that is acceptable
* for an object to be passed as the value parameter in a future call to the
* { @ link # setValue ( ELContext , Object , Object , Object ) } method . The result is obtained by
* querying all component resolvers . If this resolver handles the given ( base , property ) pair ,
* the propertyResolved property of the ELContext object must be set to true by the resolver ,
* before returning . If this property is not true after this method is called , the caller should
* ignore the return value . First , propertyResolved is set to false on the provided ELContext .
* Next , for each component resolver in this composite :
* < ol >
* < li > The getType ( ) method is called , passing in the provided context , base and property . < / li >
* < li > If the ELContext ' s propertyResolved flag is false then iteration continues . < / li >
* < li > Otherwise , iteration stops and no more component resolvers are considered . The value
* returned by getType ( ) is returned by this method . < / li >
* < / ol >
* If none of the component resolvers were able to perform this operation , the value null is
* returned and the propertyResolved flag remains set to false . Any exception thrown by
* component resolvers during the iteration is propagated to the caller of this method .
* @ param context
* The context of this evaluation .
* @ param base
* The base object to return the most general property type for , or null to enumerate
* the set of top - level variables that this resolver can evaluate .
* @ param property
* The property or variable to return the acceptable type for .
* @ return If the propertyResolved property of ELContext was set to true , then the most general
* acceptable type ; otherwise undefined .
* @ throws NullPointerException
* if context is null
* @ throws PropertyNotFoundException
* if base is not null and the specified property does not exist or is not readable .
* @ throws ELException
* if an exception was thrown while performing the property or variable resolution .
* The thrown exception must be included as the cause property of this exception , if
* available . */
@ Override public Class < ? > getType ( ELContext context , Object base , Object property ) { } } | context . setPropertyResolved ( false ) ; for ( ELResolver resolver : resolvers ) { Class < ? > type = resolver . getType ( context , base , property ) ; if ( context . isPropertyResolved ( ) ) { return type ; } } return null ; |
public class CmsJspStandardContextBean { /** * Returns the subsite path for the currently requested URI . < p >
* @ return the subsite path */
public String getSubSitePath ( ) { } } | return m_cms . getRequestContext ( ) . removeSiteRoot ( OpenCms . getADEManager ( ) . getSubSiteRoot ( m_cms , m_cms . getRequestContext ( ) . getRootUri ( ) ) ) ; |
public class PredicatedImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public NotificationChain eInverseRemove ( InternalEObject otherEnd , int featureID , NotificationChain msgs ) { } } | switch ( featureID ) { case SimpleAntlrPackage . PREDICATED__PREDICATE : return basicSetPredicate ( null , msgs ) ; case SimpleAntlrPackage . PREDICATED__ELEMENT : return basicSetElement ( null , msgs ) ; } return super . eInverseRemove ( otherEnd , featureID , msgs ) ; |
public class JestClientFactory { /** * Extension point */
protected HttpClientConnectionManager getConnectionManager ( ) { } } | HttpClientConnectionManager retval ; Registry < ConnectionSocketFactory > registry = RegistryBuilder . < ConnectionSocketFactory > create ( ) . register ( "http" , httpClientConfig . getPlainSocketFactory ( ) ) . register ( "https" , httpClientConfig . getSslSocketFactory ( ) ) . build ( ) ; if ( httpClientConfig . isMultiThreaded ( ) ) { log . info ( "Using multi thread/connection supporting pooling connection manager" ) ; final PoolingHttpClientConnectionManager poolingConnMgr = new PoolingHttpClientConnectionManager ( registry ) ; final Integer maxTotal = httpClientConfig . getMaxTotalConnection ( ) ; if ( maxTotal != null ) { poolingConnMgr . setMaxTotal ( maxTotal ) ; } final Integer defaultMaxPerRoute = httpClientConfig . getDefaultMaxTotalConnectionPerRoute ( ) ; if ( defaultMaxPerRoute != null ) { poolingConnMgr . setDefaultMaxPerRoute ( defaultMaxPerRoute ) ; } final Map < HttpRoute , Integer > maxPerRoute = httpClientConfig . getMaxTotalConnectionPerRoute ( ) ; for ( Map . Entry < HttpRoute , Integer > entry : maxPerRoute . entrySet ( ) ) { poolingConnMgr . setMaxPerRoute ( entry . getKey ( ) , entry . getValue ( ) ) ; } retval = poolingConnMgr ; } else { log . info ( "Using single thread/connection supporting basic connection manager" ) ; retval = new BasicHttpClientConnectionManager ( registry ) ; } return retval ; |
public class MetadataService { /** * Adds the specified { @ link Token } to the specified { @ code projectName } . */
public CompletableFuture < Revision > addToken ( Author author , String projectName , Token token , ProjectRole role ) { } } | return addToken ( author , projectName , requireNonNull ( token , "token" ) . appId ( ) , role ) ; |
public class ObjectFactory { /** * Create an instance of { @ link Project . Storepoints . Storepoint . Calendars . Calendar . NonWork } */
public Project . Storepoints . Storepoint . Calendars . Calendar . NonWork createProjectStorepointsStorepointCalendarsCalendarNonWork ( ) { } } | return new Project . Storepoints . Storepoint . Calendars . Calendar . NonWork ( ) ; |
public class Model { /** * Import a context from the given configuration
* @ param ctx the context to import the context data to .
* @ param config the { @ code Configuration } containing the context data .
* @ throws ConfigurationException if an error occurred while reading the context data from the { @ code Configuration } .
* @ throws IllegalArgumentException ( since TODO add version ) if the given context or configuration is { @ code null } .
* @ since 2.4.0 */
public void importContext ( Context ctx , Configuration config ) throws ConfigurationException { } } | validateContextNotNull ( ctx ) ; validateConfigNotNull ( config ) ; for ( ContextDataFactory cdf : this . contextDataFactories ) { cdf . importContextData ( ctx , config ) ; } |
public class File { /** * Converts a String [ ] containing filenames to a File [ ] .
* Note that the filenames must not contain slashes .
* This method is to remove duplication in the implementation
* of File . list ' s overloads . */
private File [ ] filenamesToFiles ( String [ ] filenames ) { } } | if ( filenames == null ) { return null ; } int count = filenames . length ; File [ ] result = new File [ count ] ; for ( int i = 0 ; i < count ; ++ i ) { result [ i ] = new File ( this , filenames [ i ] ) ; } return result ; |
public class StringUtil { /** * Converts the specified array of bytes to a hex string .
* @ param bytes The array of bytes to convert to a string .
* @ return The hexadecimal representation of < code > bytes < / code > . */
public static String toHex ( byte [ ] bytes ) { } } | final char [ ] string = new char [ 2 * bytes . length ] ; int i = 0 ; for ( byte b : bytes ) { string [ i ++ ] = hexDigits [ ( b >> 4 ) & 0x0f ] ; string [ i ++ ] = hexDigits [ b & 0x0f ] ; } return new String ( string ) ; |
public class Property { /** * Sets the property value which this metamodel property instance
* represents to the specified entity instance .
* @ param entity The entity instance into which you attempt to set the
* property value . < b > The specified entity have to make sure to be JavaBeans
* to set property value into it . < / b >
* @ param value The property value to be set into entity instance . */
public void set ( E entity , Object value ) { } } | Object object = entity ; for ( int i = 0 ; i < path ( ) . size ( ) - 1 ; i ++ ) { try { Field field = object . getClass ( ) . getDeclaredField ( path ( ) . get ( i ) ) ; field . setAccessible ( true ) ; object = field . get ( object ) ; } catch ( Exception e ) { throw new UncheckedException ( e ) ; } } try { Field field = object . getClass ( ) . getDeclaredField ( name ) ; field . setAccessible ( true ) ; field . set ( object , value ) ; } catch ( Exception e ) { throw new UncheckedException ( e ) ; } |
public class share { /** * Private method that handles facebook and twitter sharing
* @ param message Message to be delivered
* @ param activityInfoName */
private static void shareMethod ( String message , String activityInfoName ) { } } | Intent shareIntent = new Intent ( Intent . ACTION_SEND ) ; shareIntent . setType ( "text/plain" ) ; shareIntent . putExtra ( Intent . EXTRA_TEXT , message ) ; PackageManager pm = QuickUtils . getContext ( ) . getPackageManager ( ) ; List < ResolveInfo > activityList = pm . queryIntentActivities ( shareIntent , 0 ) ; for ( final ResolveInfo app : activityList ) { if ( ( app . activityInfo . name ) . contains ( activityInfoName ) ) { final ActivityInfo activity = app . activityInfo ; final ComponentName name = new ComponentName ( activity . applicationInfo . packageName , activity . name ) ; shareIntent . addCategory ( Intent . CATEGORY_LAUNCHER ) ; shareIntent . setFlags ( Intent . FLAG_ACTIVITY_NEW_TASK | Intent . FLAG_ACTIVITY_RESET_TASK_IF_NEEDED ) ; shareIntent . setComponent ( name ) ; QuickUtils . getContext ( ) . startActivity ( shareIntent ) ; break ; } } |
public class NodeLocatorHelper { /** * Returns all nodes known in the current config .
* @ return all currently known nodes . */
public List < InetAddress > nodes ( ) { } } | List < InetAddress > allNodes = new ArrayList < InetAddress > ( ) ; BucketConfig config = bucketConfig . get ( ) ; for ( NodeInfo nodeInfo : config . nodes ( ) ) { try { allNodes . add ( InetAddress . getByName ( nodeInfo . hostname ( ) . address ( ) ) ) ; } catch ( UnknownHostException e ) { throw new IllegalStateException ( e ) ; } } return allNodes ; |
public class SoundStore { /** * Play the specified buffer as a sound effect with the specified
* pitch and gain .
* @ param buffer The ID of the buffer to play
* @ param pitch The pitch to play at
* @ param gain The gain to play at
* @ param loop True if the sound should loop
* @ param x The x position to play the sound from
* @ param y The y position to play the sound from
* @ param z The z position to play the sound from
* @ return source The source that will be used */
int playAsSoundAt ( int buffer , float pitch , float gain , boolean loop , float x , float y , float z ) { } } | gain *= soundVolume ; if ( gain == 0 ) { gain = 0.001f ; } if ( soundWorks ) { if ( sounds ) { int nextSource = findFreeSource ( ) ; if ( nextSource == - 1 ) { return - 1 ; } AL10 . alSourceStop ( sources . get ( nextSource ) ) ; AL10 . alSourcei ( sources . get ( nextSource ) , AL10 . AL_BUFFER , buffer ) ; AL10 . alSourcef ( sources . get ( nextSource ) , AL10 . AL_PITCH , pitch ) ; AL10 . alSourcef ( sources . get ( nextSource ) , AL10 . AL_GAIN , gain ) ; AL10 . alSourcei ( sources . get ( nextSource ) , AL10 . AL_LOOPING , loop ? AL10 . AL_TRUE : AL10 . AL_FALSE ) ; sourcePos . clear ( ) ; sourceVel . clear ( ) ; sourceVel . put ( new float [ ] { 0 , 0 , 0 } ) ; sourcePos . put ( new float [ ] { x , y , z } ) ; sourcePos . flip ( ) ; sourceVel . flip ( ) ; AL10 . alSource ( sources . get ( nextSource ) , AL10 . AL_POSITION , sourcePos ) ; AL10 . alSource ( sources . get ( nextSource ) , AL10 . AL_VELOCITY , sourceVel ) ; AL10 . alSourcePlay ( sources . get ( nextSource ) ) ; return nextSource ; } } return - 1 ; |
public class SummaryComputer { /** * 计算一个句子的分数
* @ param sentence
* @ param sf */
private void computeScore ( Sentence sentence , SmartForest < Double > forest ) { } } | SmartGetWord < Double > sgw = new SmartGetWord < > ( forest , sentence . value ) ; String name = null ; while ( ( name = sgw . getFrontWords ( ) ) != null ) { sentence . updateScore ( name , sgw . getParam ( ) ) ; } if ( sentence . score == 0 ) { sentence . score = sentence . value . length ( ) * - 0.005 ; } else { sentence . score /= Math . log ( sentence . value . length ( ) + 3 ) ; } |
public class JumboEnumSet { /** * Adds the specified element to this set if it is not already present .
* @ param e element to be added to this set
* @ return < tt > true < / tt > if the set changed as a result of the call
* @ throws NullPointerException if < tt > e < / tt > is null */
public boolean add ( E e ) { } } | typeCheck ( e ) ; int eOrdinal = e . ordinal ( ) ; int eWordNum = eOrdinal >>> 6 ; long oldElements = elements [ eWordNum ] ; elements [ eWordNum ] |= ( 1L << eOrdinal ) ; boolean result = ( elements [ eWordNum ] != oldElements ) ; if ( result ) size ++ ; return result ; |
public class SortAttributesOperation { /** * Sort attributes of each element */
@ Override public void processElement ( Wrapper < Element > elementWrapper ) { } } | Element element = elementWrapper . getContent ( ) ; element . setAttributes ( getSortedAttributes ( element ) ) ; |
public class backup_policy { /** * < pre >
* Performs generic data validation for the operation to be performed
* < / pre > */
protected void validate ( String operationType ) throws Exception { } } | super . validate ( operationType ) ; MPSString policy_name_validator = new MPSString ( ) ; policy_name_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 128 ) ; policy_name_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; policy_name_validator . validate ( operationType , policy_name , "\"policy_name\"" ) ; MPSInt backup_to_retain_validator = new MPSInt ( ) ; backup_to_retain_validator . setConstraintMinValue ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; backup_to_retain_validator . setConstraintMaxValue ( MPSConstants . GENERIC_CONSTRAINT , 30 ) ; backup_to_retain_validator . setConstraintIsReq ( MPSConstants . MODIFY_CONSTRAINT , true ) ; backup_to_retain_validator . validate ( operationType , backup_to_retain , "\"backup_to_retain\"" ) ; |
public class RegressionDataSet { /** * Returns the i ' th data point in the data set paired with its target regressor value .
* Modifying the DataPointPair will effect the data set .
* @ param i the index of the data point to obtain
* @ return the i ' th DataPOintPair */
public DataPointPair < Double > getDataPointPair ( int i ) { } } | return new DataPointPair < > ( getDataPoint ( i ) , targets . get ( i ) ) ; |
public class cmpglobal_binding { /** * Use this API to fetch a cmpglobal _ binding resource . */
public static cmpglobal_binding get ( nitro_service service ) throws Exception { } } | cmpglobal_binding obj = new cmpglobal_binding ( ) ; cmpglobal_binding response = ( cmpglobal_binding ) obj . get_resource ( service ) ; return response ; |
public class BindingSet { /** * True when this type ' s bindings use Resource directly instead of Context . */
private boolean hasResourceBindingsNeedingResource ( int sdk ) { } } | for ( ResourceBinding binding : resourceBindings ) { if ( binding . requiresResources ( sdk ) ) { return true ; } } return false ; |
public class AmazonInspectorClient { /** * Lists the ARNs of the assessment targets within this AWS account . For more information about assessment targets ,
* see < a href = " https : / / docs . aws . amazon . com / inspector / latest / userguide / inspector _ applications . html " > Amazon Inspector
* Assessment Targets < / a > .
* @ param listAssessmentTargetsRequest
* @ return Result of the ListAssessmentTargets operation returned by the service .
* @ throws InternalException
* Internal server error .
* @ throws InvalidInputException
* The request was rejected because an invalid or out - of - range value was supplied for an input parameter .
* @ throws AccessDeniedException
* You do not have required permissions to access the requested resource .
* @ sample AmazonInspector . ListAssessmentTargets
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / inspector - 2016-02-16 / ListAssessmentTargets "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ListAssessmentTargetsResult listAssessmentTargets ( ListAssessmentTargetsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListAssessmentTargets ( request ) ; |
public class ConfigOptionBuilder { /** * if you don ' t have an argument , choose the value that is going to be inserted into the map instead
* @ param commandLineOption specification of the command line options
* @ param value the value that is going to be inserted into the map instead of the argument */
public ConfigOptionBuilder setCommandLineOptionWithoutArgument ( CommandLineOption commandLineOption , Object value ) { } } | co . setCommandLineOption ( commandLineOption ) ; co . setValue ( value ) ; return this ; |
public class JmxData { /** * Convert object to string and checking if it fails . */
@ SuppressWarnings ( "PMD.AvoidCatchingThrowable" ) static String mkString ( Object obj ) { } } | if ( obj == null ) { return "null" ; } try { return obj . toString ( ) + " (type is " + obj . getClass ( ) + ")" ; } catch ( Throwable t ) { return t . getClass ( ) . toString ( ) + ": " + t . getMessage ( ) + " (type is " + obj . getClass ( ) + ")" ; } |
public class EnableEnhancedMonitoringResult { /** * Represents the list of all the metrics that would be in the enhanced state after the operation .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setDesiredShardLevelMetrics ( java . util . Collection ) } or
* { @ link # withDesiredShardLevelMetrics ( java . util . Collection ) } if you want to override the existing values .
* @ param desiredShardLevelMetrics
* Represents the list of all the metrics that would be in the enhanced state after the operation .
* @ return Returns a reference to this object so that method calls can be chained together .
* @ see MetricsName */
public EnableEnhancedMonitoringResult withDesiredShardLevelMetrics ( String ... desiredShardLevelMetrics ) { } } | if ( this . desiredShardLevelMetrics == null ) { setDesiredShardLevelMetrics ( new com . amazonaws . internal . SdkInternalList < String > ( desiredShardLevelMetrics . length ) ) ; } for ( String ele : desiredShardLevelMetrics ) { this . desiredShardLevelMetrics . add ( ele ) ; } return this ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.