signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class sslservice { /** * Use this API to unset the properties of sslservice resource . * Properties that need to be unset are specified in args array . */ public static base_response unset ( nitro_service client , sslservice resource , String [ ] args ) throws Exception { } }
sslservice unsetresource = new sslservice ( ) ; unsetresource . servicename = resource . servicename ; return unsetresource . unset_resource ( client , args ) ;
public class CollectionDescriptor { /** * @ see XmlCapable # toXML ( ) */ public String toXML ( ) { } }
RepositoryTags tags = RepositoryTags . getInstance ( ) ; String eol = SystemUtils . LINE_SEPARATOR ; // write opening tag String result = " " + tags . getOpeningTagNonClosingById ( COLLECTION_DESCRIPTOR ) + eol ; // write attributes // name result += " " + tags . getAttribute ( FIELD_NAME , this . getAttributeName ( ) ) + eol ; // collection class is optional if ( getCollectionClassName ( ) != null ) { result += " " + tags . getAttribute ( COLLECTION_CLASS , this . getCollectionClassName ( ) ) + eol ; } // element - class - ref result += " " + tags . getAttribute ( ITEMS_CLASS , this . getItemClassName ( ) ) + eol ; // indirection - table is optional if ( isMtoNRelation ( ) ) { result += " " + tags . getAttribute ( INDIRECTION_TABLE , getIndirectionTable ( ) ) + eol ; } // proxyReference is optional , disabled by default if ( isLazy ( ) ) { result += " " + tags . getAttribute ( PROXY_REFERENCE , "true" ) + eol ; result += " " + tags . getAttribute ( PROXY_PREFETCHING_LIMIT , "" + this . getProxyPrefetchingLimit ( ) ) + eol ; } // reference refresh is optional , disabled by default if ( isRefresh ( ) ) { result += " " + tags . getAttribute ( REFRESH , "true" ) + eol ; } // auto retrieve result += " " + tags . getAttribute ( AUTO_RETRIEVE , "" + getCascadeRetrieve ( ) ) + eol ; // auto update result += " " + tags . getAttribute ( AUTO_UPDATE , getCascadeAsString ( getCascadingStore ( ) ) ) + eol ; // auto delete result += " " + tags . getAttribute ( AUTO_DELETE , getCascadeAsString ( getCascadingDelete ( ) ) ) + eol ; // otm - dependent is optional , disabled by default if ( getOtmDependent ( ) ) { result += " " + tags . getAttribute ( OTM_DEPENDENT , "true" ) + eol ; } // close opening tag result += " >" + eol ; // write elements // inverse fk elements for ( int i = 0 ; i < getForeignKeyFields ( ) . size ( ) ; i ++ ) { Object obj = getForeignKeyFields ( ) . get ( i ) ; if ( obj instanceof Integer ) { String fkId = obj . toString ( ) ; result += " " + tags . getOpeningTagNonClosingById ( INVERSE_FK ) + " " ; result += tags . getAttribute ( FIELD_ID_REF , fkId ) + "/>" + eol ; } else { String fk = ( String ) obj ; result += " " + tags . getOpeningTagNonClosingById ( INVERSE_FK ) + " " ; result += tags . getAttribute ( FIELD_REF , fk ) + "/>" + eol ; } } // write optional M : N elements // m : n relationship settings , optional if ( isMtoNRelation ( ) ) { // foreign keys to this class for ( int i = 0 ; i < getFksToThisClass ( ) . length ; i ++ ) { String fkId = getFksToThisClass ( ) [ i ] ; result += " " + tags . getOpeningTagNonClosingById ( FK_POINTING_TO_THIS_CLASS ) + " " ; result += tags . getAttribute ( COLUMN_NAME , fkId ) + "/>" + eol ; } // foreign keys to item class for ( int i = 0 ; i < getFksToItemClass ( ) . length ; i ++ ) { String fkId = getFksToItemClass ( ) [ i ] ; result += " " + tags . getOpeningTagNonClosingById ( FK_POINTING_TO_ITEMS_CLASS ) + " " ; result += tags . getAttribute ( COLUMN_NAME , fkId ) + "/>" + eol ; } } // closing tag result += " " + tags . getClosingTagById ( COLLECTION_DESCRIPTOR ) + eol ; return result ;
public class PollTcpManagerNio { /** * Starts the manager . */ @ Override public boolean start ( ) { } }
if ( ! _lifecycle . toStarting ( ) ) { return false ; } log . isLoggable ( Level . FINER ) ; String name = "resin-nio-select-manager-" + _gId ++ ; _thread = new Thread ( this , name ) ; _thread . setDaemon ( true ) ; _thread . start ( ) ; _lifecycle . waitForActive ( 2000 ) ; if ( log . isLoggable ( Level . FINER ) ) log . finer ( this + " active" ) ; log . fine ( "Non-blocking keepalive enabled with max sockets = " + _selectMax ) ; return true ;
public class Utils { /** * Replies if the given annotation is an annotation from the SARL core library . * @ param type the type of the annotation * @ return < code > true < / code > if the given type is a SARL annotation . */ public static boolean isSARLAnnotation ( Class < ? > type ) { } }
return ( type != null && Annotation . class . isAssignableFrom ( type ) ) && isSARLAnnotation ( type . getPackage ( ) . getName ( ) ) ;
public class Executors { /** * Creates a work - stealing thread pool using the number of * { @ linkplain Runtime # availableProcessors available processors } * as its target parallelism level . * @ return the newly created thread pool * @ see # newWorkStealingPool ( int ) * @ since 1.8 */ public static ExecutorService newWorkStealingPool ( ) { } }
return new ForkJoinPool ( Runtime . getRuntime ( ) . availableProcessors ( ) , ForkJoinPool . defaultForkJoinWorkerThreadFactory , null , true ) ;
public class ErrorReporterImpl { /** * ( non - Javadoc ) * @ see * java . lang . Thread . UncaughtExceptionHandler # uncaughtException ( java . lang * . Thread , java . lang . Throwable ) */ @ Override public void uncaughtException ( @ Nullable Thread t , @ NonNull Throwable e ) { } }
// If we ' re not enabled then just pass the Exception on to the defaultExceptionHandler . if ( ! reportExecutor . isEnabled ( ) ) { reportExecutor . handReportToDefaultExceptionHandler ( t , e ) ; return ; } try { ACRA . log . e ( LOG_TAG , "ACRA caught a " + e . getClass ( ) . getSimpleName ( ) + " for " + context . getPackageName ( ) , e ) ; if ( ACRA . DEV_LOGGING ) ACRA . log . d ( LOG_TAG , "Building report" ) ; // Generate and send crash report new ReportBuilder ( ) . uncaughtExceptionThread ( t ) . exception ( e ) . customData ( customData ) . endApplication ( ) . build ( reportExecutor ) ; } catch ( Exception fatality ) { // ACRA failed . Prevent any recursive call to ACRA . uncaughtException ( ) , let the native reporter do its job . ACRA . log . e ( LOG_TAG , "ACRA failed to capture the error - handing off to native error reporter" , fatality ) ; reportExecutor . handReportToDefaultExceptionHandler ( t , e ) ; }
public class Chars { /** * 判断字符中每个字符的类型 * @ param str 字符串 * @ see Chars # getType ( char ) */ public static CharType [ ] getType ( String str ) { } }
CharType [ ] tag = new CharType [ str . length ( ) ] ; for ( int i = 0 ; i < str . length ( ) ; i ++ ) { char c = str . charAt ( i ) ; tag [ i ] = getType ( c ) ; } return tag ;
public class CATAsynchReadAheadReader { /** * Safely stop the consumer * @ param Message to be traced for stop reason . */ public void stopConsumer ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "stopConsumer" ) ; try { // lock the consumerSession to ensure visibility of update to started . synchronized ( consumerSession ) { consumerSession . getConsumerSession ( ) . stop ( ) ; consumerSession . started = false ; } } catch ( Throwable t ) { FFDCFilter . processException ( t , CLASS_NAME + ".consumeMessages" , CommsConstants . CATASYNCHRHREADER_CONSUME_MSGS_02 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Unable to stop consumer session due to Throwable: " + t ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "stopConsumer" ) ;
public class N { /** * The present order is kept in the result list . * @ param a * @ param n * @ param cmp * @ return */ public static < T > List < T > topp ( final T [ ] a , final int n , final Comparator < ? super T > cmp ) { } }
return topp ( a , 0 , len ( a ) , n , cmp ) ;
public class UsersAPIClient { /** * Update the user _ metadata calling < a href = " https : / / auth0 . com / docs / api / management / v2 # ! / Users / patch _ users _ by _ id " > ' / api / v2 / users / : userId ' < / a > endpoint * Example usage : * < pre > * { @ code * client . updateMetadata ( " { user id } " , " { user metadata } " ) * . start ( new BaseCallback < UserProfile , ManagementException > ( ) { * { @ literal } Override * public void onSuccess ( UserProfile payload ) { } * { @ literal } Override * public void onFailure ( ManagementException error ) { } * < / pre > * @ param userId of the primary identity to unlink * @ param userMetadata to merge with the existing one * @ return a request to start */ @ SuppressWarnings ( "WeakerAccess" ) public Request < UserProfile , ManagementException > updateMetadata ( String userId , Map < String , Object > userMetadata ) { } }
HttpUrl url = HttpUrl . parse ( auth0 . getDomainUrl ( ) ) . newBuilder ( ) . addPathSegment ( API_PATH ) . addPathSegment ( V2_PATH ) . addPathSegment ( USERS_PATH ) . addPathSegment ( userId ) . build ( ) ; return factory . PATCH ( url , client , gson , UserProfile . class , mgmtErrorBuilder ) . addParameter ( USER_METADATA_KEY , userMetadata ) ;
public class LogFileHeader { /** * Return the firstRecordSequenceNumber field stored in the target header * @ return long The firstRecordSequenceNumber field . */ public long firstRecordSequenceNumber ( ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "firstRecordSequenceNumber" , this ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "firstRecordSequenceNumber" , new Long ( _firstRecordSequenceNumber ) ) ; return _firstRecordSequenceNumber ;
public class MinioClient { /** * Creates a bucket with default region . * @ param bucketName Bucket name . * @ throws InvalidBucketNameException upon invalid bucket name is given * @ throws NoSuchAlgorithmException * upon requested algorithm was not found during signature calculation * @ throws IOException upon connection error * @ throws InvalidKeyException * upon an invalid access key or secret key * @ throws NoResponseException upon no response from server * @ throws XmlPullParserException upon parsing response xml * @ throws ErrorResponseException upon unsuccessful execution . * @ throws ErrorResponseException upon unsuccessful execution * @ throws InternalException upon internal library error * @ throws InsufficientDataException upon getting EOFException while reading given */ public void makeBucket ( String bucketName ) throws InvalidBucketNameException , RegionConflictException , NoSuchAlgorithmException , InsufficientDataException , IOException , InvalidKeyException , NoResponseException , XmlPullParserException , ErrorResponseException , InternalException { } }
this . makeBucket ( bucketName , null ) ;
public class HttpConnector { /** * Returns the system port for this connector . */ public int getPort ( ) { } }
if ( port != 0 ) { return port ; } else { final Connector connector = connectorHolder . get ( ) ; if ( connector != null ) { Preconditions . checkState ( connector . getLocalPort ( ) > 0 , "no port was set and the connector is not yet started!" ) ; return connector . getLocalPort ( ) ; } else { return 0 ; } }
public class PropertiesField { /** * Set this property in the user ' s property area . * @ param strProperty The property key . * @ param strValue The property value . * @ param iDisplayOption If true , display the new field . * @ param iMoveMove The move mode . * @ return An error code ( NORMAL _ RETURN for success ) . */ public int setProperty ( String strProperty , String strValue , boolean bDisplayOption , int iMoveMode ) { } }
if ( m_propertiesCache == null ) m_propertiesCache = this . loadProperties ( ) ; boolean bChanged = false ; // If strValue = = null , delete ; if = ' ' , it ' s okay ( key = ' ' ) [ a blank property ] ! if ( strValue == null ) { if ( m_propertiesCache . get ( strProperty ) != null ) { m_propertiesCache . remove ( strProperty ) ; bChanged = true ; } } else { if ( ! strValue . equals ( m_propertiesCache . get ( strProperty ) ) ) { m_propertiesCache . put ( strProperty , strValue ) ; // Add this param bChanged = true ; } } int iErrorCode = DBConstants . NORMAL_RETURN ; if ( bChanged ) { // Only change if there is a change of properties ( not just a change in the # date line in properties ) String strProperties = this . propertiesToInternalString ( m_propertiesCache ) ; Map < String , Object > propertiesSave = m_propertiesCache ; iErrorCode = this . setString ( strProperties , bDisplayOption , iMoveMode ) ; m_propertiesCache = propertiesSave ; // Zeroed out in set String } return iErrorCode ;
public class StoreVersionManager { /** * Inspects the specified versionDir to see if it has been marked as disabled * ( via a . disabled file in the directory ) . If the file is absent , the store is * assumed to be enabled . * @ param versionDir to inspect * @ return true if the specified version is enabled , false otherwise * @ throws IllegalArgumentException if the version does not exist */ private boolean isVersionEnabled ( File versionDir ) throws IllegalArgumentException { } }
if ( ! versionDir . exists ( ) ) { throw new IllegalArgumentException ( "The versionDir " + versionDir . getName ( ) + " does not exist." ) ; } File [ ] relevantFile = versionDir . listFiles ( new FileFilter ( ) { public boolean accept ( File pathName ) { return pathName . getName ( ) . equals ( DISABLED_MARKER_NAME ) ; } } ) ; return relevantFile . length == 0 ;
public class BufferedServletResponseWrapper { /** * headers relates methods */ @ Override public void sendError ( int sc , String msg ) throws IOException { } }
if ( isCommitted ( ) ) { throw new IllegalStateException ( "Cannot set error status - response is already committed" ) ; } notifyBeforeCommit ( ) ; super . sendError ( sc , msg ) ; setCommitted ( ) ;
public class BooleanExpressionParser { /** * Counterpart of { @ link # replaceBooleanStringByIntegerRepresentation } * Checks whether a String is the Integer representation of a Boolean value and replaces it with its Boolean representation * " 1 " - > " true " * " 0 " - > " false " * otherwise - > value * @ param value " 1 " , " 0 " or other string * @ return " true " , " false " or the input value */ private static String replaceIntegerStringByBooleanRepresentation ( final String value ) { } }
if ( value . equals ( "0" ) ) { return FALSE . toString ( ) ; } else if ( value . equals ( "1" ) ) { return TRUE . toString ( ) ; } return value ;
public class Channel { /** * Register a chaincode event listener . Both chaincodeId pattern AND eventName pattern must match to invoke * the chaincodeEventListener * @ param chaincodeId Java pattern for chaincode identifier also know as chaincode name . If ma * @ param eventName Java pattern to match the event name . * @ param chaincodeEventListener The listener to be invoked if both chaincodeId and eventName pattern matches . * @ return Handle to be used to unregister the event listener { @ link # unregisterChaincodeEventListener ( String ) } * @ throws InvalidArgumentException */ public String registerChaincodeEventListener ( Pattern chaincodeId , Pattern eventName , ChaincodeEventListener chaincodeEventListener ) throws InvalidArgumentException { } }
if ( shutdown ) { throw new InvalidArgumentException ( format ( "Channel %s has been shutdown." , name ) ) ; } if ( chaincodeId == null ) { throw new InvalidArgumentException ( "The chaincodeId argument may not be null." ) ; } if ( eventName == null ) { throw new InvalidArgumentException ( "The eventName argument may not be null." ) ; } if ( chaincodeEventListener == null ) { throw new InvalidArgumentException ( "The chaincodeEventListener argument may not be null." ) ; } ChaincodeEventListenerEntry chaincodeEventListenerEntry = new ChaincodeEventListenerEntry ( chaincodeId , eventName , chaincodeEventListener ) ; synchronized ( this ) { if ( null == blh ) { blh = registerChaincodeListenerProcessor ( ) ; } } return chaincodeEventListenerEntry . handle ;
public class MVELEvalBuilder { /** * Builds and returns an Eval Conditional Element * @ param context The current build context * @ param descr The Eval Descriptor to build the eval conditional element from * @ return the Eval Conditional Element */ public RuleConditionElement build ( final RuleBuildContext context , final BaseDescr descr , final Pattern prefixPattern ) { } }
boolean typesafe = context . isTypesafe ( ) ; // it must be an EvalDescr final EvalDescr evalDescr = ( EvalDescr ) descr ; try { MVELDialect dialect = ( MVELDialect ) context . getDialect ( "mvel" ) ; Map < String , Declaration > decls = context . getDeclarationResolver ( ) . getDeclarations ( context . getRule ( ) ) ; AnalysisResult analysis = context . getDialect ( ) . analyzeExpression ( context , evalDescr , evalDescr . getContent ( ) , new BoundIdentifiers ( DeclarationScopeResolver . getDeclarationClasses ( decls ) , context ) ) ; final BoundIdentifiers usedIdentifiers = analysis . getBoundIdentifiers ( ) ; int i = usedIdentifiers . getDeclrClasses ( ) . keySet ( ) . size ( ) ; Declaration [ ] previousDeclarations = new Declaration [ i ] ; i = 0 ; for ( String id : usedIdentifiers . getDeclrClasses ( ) . keySet ( ) ) { previousDeclarations [ i ++ ] = decls . get ( id ) ; } Arrays . sort ( previousDeclarations , SortDeclarations . instance ) ; MVELCompilationUnit unit = dialect . getMVELCompilationUnit ( ( String ) evalDescr . getContent ( ) , analysis , previousDeclarations , null , null , context , "drools" , KnowledgeHelper . class , false , MVELCompilationUnit . Scope . EXPRESSION ) ; final EvalCondition eval = new EvalCondition ( previousDeclarations ) ; MVELEvalExpression expr = new MVELEvalExpression ( unit , dialect . getId ( ) ) ; eval . setEvalExpression ( KiePolicyHelper . isPolicyEnabled ( ) ? new SafeEvalExpression ( expr ) : expr ) ; MVELDialectRuntimeData data = ( MVELDialectRuntimeData ) context . getPkg ( ) . getDialectRuntimeRegistry ( ) . getDialectData ( "mvel" ) ; data . addCompileable ( eval , expr ) ; expr . compile ( data , context . getRule ( ) ) ; return eval ; } catch ( final Exception e ) { copyErrorLocation ( e , evalDescr ) ; context . addError ( new DescrBuildError ( context . getParentDescr ( ) , evalDescr , e , "Unable to build expression for 'eval':" + e . getMessage ( ) + " '" + evalDescr . getContent ( ) + "'" ) ) ; return null ; } finally { context . setTypesafe ( typesafe ) ; }
public class ZipExtensions { /** * Extract zip entry . * @ param zipFile * the zip file * @ param target * the target * @ param toDirectory * the to directory * @ throws IOException * Signals that an I / O exception has occurred . */ public static void extractZipEntry ( final ZipFile zipFile , final ZipEntry target , final File toDirectory ) throws IOException { } }
final File fileToExtract = new File ( toDirectory , target . getName ( ) ) ; new File ( fileToExtract . getParent ( ) ) . mkdirs ( ) ; try ( InputStream is = zipFile . getInputStream ( target ) ; BufferedInputStream bis = new BufferedInputStream ( is ) ; FileOutputStream fos = new FileOutputStream ( fileToExtract ) ; BufferedOutputStream bos = new BufferedOutputStream ( fos ) ) { for ( int c ; ( c = bis . read ( ) ) != - 1 ; ) { bos . write ( ( byte ) c ) ; } bos . flush ( ) ; } catch ( final IOException e ) { throw e ; }
public class JSON { /** * Write a reconfiguration plan . * @ param plan the plan to write * @ param a the stream to write on . * @ throws IllegalArgumentException if an error occurred while writing the json */ public static void write ( ReconfigurationPlan plan , Appendable a ) { } }
try { ReconfigurationPlanConverter c = new ReconfigurationPlanConverter ( ) ; c . toJSON ( plan ) . writeJSONString ( a ) ; } catch ( IOException | JSONConverterException e ) { throw new IllegalArgumentException ( e ) ; }
public class TitlePaneButtonForegroundPainter { /** * Paint the mouse - over state of the button foreground . * @ param g the Graphics2D context to paint with . * @ param c the button to paint . * @ param width the width to paint . * @ param height the height to paint . */ public void paintHover ( Graphics2D g , JComponent c , int width , int height ) { } }
paint ( g , c , width , height , hoverBorder , hoverCorner , hoverInterior ) ;
public class QueueBuffer { /** * Deletes a message from SQS . Does not return until a confirmation from SQS has been received * @ return never null */ public DeleteMessageResult deleteMessageSync ( DeleteMessageRequest request ) { } }
Future < DeleteMessageResult > future = deleteMessage ( request , null ) ; return waitForFuture ( future ) ;
public class WXBizMsgCrypt { /** * 将公众平台回复用户的消息加密打包 . * < ol > * < li > 对要发送的消息进行AES - CBC加密 < / li > * < li > 生成安全签名 < / li > * < li > 将消息密文和安全签名打包成xml格式 < / li > * < / ol > * @ param replyMsg 公众平台待回复用户的消息 , xml格式的字符串 * @ param timeStamp 时间戳 , 可以自己生成 , 也可以用URL参数的timestamp * @ param nonce 随机串 , 可以自己生成 , 也可以用URL参数的nonce * @ return 加密后的可以直接回复用户的密文 , 包括msg _ signature , timestamp , nonce , encrypt的xml格式的字符串 * @ throws AesException 执行失败 , 请查看该异常的错误码和具体的错误信息 */ public String encryptMsg ( String replyMsg , String timeStamp , String nonce ) throws AesException , IOException { } }
// 加密 String encrypt = encrypt ( getRandomStr ( ) , replyMsg ) ; // 生成安全签名 if ( timeStamp . equals ( "" ) ) { timeStamp = Long . toString ( System . currentTimeMillis ( ) ) ; } String signature = SHA1 . getSHA1 ( token , timeStamp , nonce , encrypt ) ; return XMLParser . generate ( encrypt , signature , timeStamp , nonce ) ;
public class VForDefinition { /** * v - for on an array with just a loop variable and an index : " ( Item item , index ) in myArray " * @ param loopVariablesDefinition The variable definition ( " ( Item item , index ) " above ) * @ param context The context of the parser * @ return true if we managed the case , false otherwise */ private boolean vForVariableAndIndex ( String loopVariablesDefinition , TemplateParserContext context ) { } }
Matcher matcher = VFOR_VARIABLE_AND_INDEX . matcher ( loopVariablesDefinition ) ; if ( matcher . matches ( ) ) { initLoopVariable ( matcher . group ( 1 ) , matcher . group ( 2 ) , context ) ; initIndexVariable ( matcher . group ( 3 ) , context ) ; return true ; } return false ;
public class DepTreeNode { /** * Removes the specified child node * @ param child * The node to remove */ public void remove ( DepTreeNode child ) { } }
if ( children != null ) { DepTreeNode node = children . get ( child . getName ( ) ) ; if ( node == child ) { children . remove ( child . getName ( ) ) ; } } child . setParent ( null ) ;
public class ComputeNodeOperations { /** * Updates the specified user account on the specified compute node . * @ param poolId The ID of the pool that contains the compute node . * @ param nodeId The ID of the compute node where the user account will be updated . * @ param userName The name of the user account to update . * @ param sshPublicKey The SSH public key that can be used for remote login to the compute node . If null , the SSH public key is removed . * @ throws BatchErrorException Exception thrown when an error response is received from the Batch service . * @ throws IOException Exception thrown when there is an error in serialization / deserialization of data sent to / received from the Batch service . */ public void updateComputeNodeUser ( String poolId , String nodeId , String userName , String sshPublicKey ) throws BatchErrorException , IOException { } }
updateComputeNodeUser ( poolId , nodeId , userName , sshPublicKey , ( Iterable < BatchClientBehavior > ) null ) ;
public class CacheConfigurationBuilder { /** * Adds { @ link ExpiryPolicy } configuration to the returned builder . * { @ code ExpiryPolicy } is what controls data freshness in a cache . * @ param expiry the expiry to use * @ return a new builder with the added expiry */ public CacheConfigurationBuilder < K , V > withExpiry ( ExpiryPolicy < ? super K , ? super V > expiry ) { } }
if ( expiry == null ) { throw new NullPointerException ( "Null expiry" ) ; } CacheConfigurationBuilder < K , V > otherBuilder = new CacheConfigurationBuilder < > ( this ) ; otherBuilder . expiry = expiry ; return otherBuilder ;
public class GregorianCalendar { /** * Computes the fixed date under either the Gregorian or the * Julian calendar , using the given year and the specified calendar fields . * @ param cal the CalendarSystem to be used for the date calculation * @ param year the normalized year number , with 0 indicating the * year 1 BCE , - 1 indicating 2 BCE , etc . * @ param fieldMask the calendar fields to be used for the date calculation * @ return the fixed date * @ see Calendar # selectFields */ private long getFixedDate ( BaseCalendar cal , int year , int fieldMask ) { } }
int month = JANUARY ; if ( isFieldSet ( fieldMask , MONTH ) ) { // No need to check if MONTH has been set ( no isSet ( MONTH ) // call ) since its unset value happens to be JANUARY ( 0 ) . month = internalGet ( MONTH ) ; // If the month is out of range , adjust it into range if ( month > DECEMBER ) { year += month / 12 ; month %= 12 ; } else if ( month < JANUARY ) { int [ ] rem = new int [ 1 ] ; year += CalendarUtils . floorDivide ( month , 12 , rem ) ; month = rem [ 0 ] ; } } // Get the fixed date since Jan 1 , 1 ( Gregorian ) . We are on // the first day of either ` month ' or January in ' year ' . long fixedDate = cal . getFixedDate ( year , month + 1 , 1 , cal == gcal ? gdate : null ) ; if ( isFieldSet ( fieldMask , MONTH ) ) { // Month - based calculations if ( isFieldSet ( fieldMask , DAY_OF_MONTH ) ) { // We are on the first day of the month . Just add the // offset if DAY _ OF _ MONTH is set . If the isSet call // returns false , that means DAY _ OF _ MONTH has been // selected just because of the selected // combination . We don ' t need to add any since the // default value is the 1st . if ( isSet ( DAY_OF_MONTH ) ) { // To avoid underflow with DAY _ OF _ MONTH - 1 , add // DAY _ OF _ MONTH , then subtract 1. fixedDate += internalGet ( DAY_OF_MONTH ) ; fixedDate -- ; } } else { if ( isFieldSet ( fieldMask , WEEK_OF_MONTH ) ) { long firstDayOfWeek = BaseCalendar . getDayOfWeekDateOnOrBefore ( fixedDate + 6 , getFirstDayOfWeek ( ) ) ; // If we have enough days in the first week , then // move to the previous week . if ( ( firstDayOfWeek - fixedDate ) >= getMinimalDaysInFirstWeek ( ) ) { firstDayOfWeek -= 7 ; } if ( isFieldSet ( fieldMask , DAY_OF_WEEK ) ) { firstDayOfWeek = BaseCalendar . getDayOfWeekDateOnOrBefore ( firstDayOfWeek + 6 , internalGet ( DAY_OF_WEEK ) ) ; } // In lenient mode , we treat days of the previous // months as a part of the specified // WEEK _ OF _ MONTH . See 4633646. fixedDate = firstDayOfWeek + 7 * ( internalGet ( WEEK_OF_MONTH ) - 1 ) ; } else { int dayOfWeek ; if ( isFieldSet ( fieldMask , DAY_OF_WEEK ) ) { dayOfWeek = internalGet ( DAY_OF_WEEK ) ; } else { dayOfWeek = getFirstDayOfWeek ( ) ; } // We are basing this on the day - of - week - in - month . The only // trickiness occurs if the day - of - week - in - month is // negative . int dowim ; if ( isFieldSet ( fieldMask , DAY_OF_WEEK_IN_MONTH ) ) { dowim = internalGet ( DAY_OF_WEEK_IN_MONTH ) ; } else { dowim = 1 ; } if ( dowim >= 0 ) { fixedDate = BaseCalendar . getDayOfWeekDateOnOrBefore ( fixedDate + ( 7 * dowim ) - 1 , dayOfWeek ) ; } else { // Go to the first day of the next week of // the specified week boundary . int lastDate = monthLength ( month , year ) + ( 7 * ( dowim + 1 ) ) ; // Then , get the day of week date on or before the last date . fixedDate = BaseCalendar . getDayOfWeekDateOnOrBefore ( fixedDate + lastDate - 1 , dayOfWeek ) ; } } } } else { if ( year == gregorianCutoverYear && cal == gcal && fixedDate < gregorianCutoverDate && gregorianCutoverYear != gregorianCutoverYearJulian ) { // January 1 of the year doesn ' t exist . Use // gregorianCutoverDate as the first day of the // year . fixedDate = gregorianCutoverDate ; } // We are on the first day of the year . if ( isFieldSet ( fieldMask , DAY_OF_YEAR ) ) { // Add the offset , then subtract 1 . ( Make sure to avoid underflow . ) fixedDate += internalGet ( DAY_OF_YEAR ) ; fixedDate -- ; } else { long firstDayOfWeek = BaseCalendar . getDayOfWeekDateOnOrBefore ( fixedDate + 6 , getFirstDayOfWeek ( ) ) ; // If we have enough days in the first week , then move // to the previous week . if ( ( firstDayOfWeek - fixedDate ) >= getMinimalDaysInFirstWeek ( ) ) { firstDayOfWeek -= 7 ; } if ( isFieldSet ( fieldMask , DAY_OF_WEEK ) ) { int dayOfWeek = internalGet ( DAY_OF_WEEK ) ; if ( dayOfWeek != getFirstDayOfWeek ( ) ) { firstDayOfWeek = BaseCalendar . getDayOfWeekDateOnOrBefore ( firstDayOfWeek + 6 , dayOfWeek ) ; } } fixedDate = firstDayOfWeek + 7 * ( ( long ) internalGet ( WEEK_OF_YEAR ) - 1 ) ; } } return fixedDate ;
public class InputDescription { /** * Returns the in - application stream names that are mapped to the stream source . * @ param inAppStreamNames * Returns the in - application stream names that are mapped to the stream source . */ public void setInAppStreamNames ( java . util . Collection < String > inAppStreamNames ) { } }
if ( inAppStreamNames == null ) { this . inAppStreamNames = null ; return ; } this . inAppStreamNames = new java . util . ArrayList < String > ( inAppStreamNames ) ;
public class AbstractIntSet { /** * { @ inheritDoc } * < p > This implementation simply counts the elements in the interator . */ @ Override public int size ( ) { } }
// dumb implementation . You should override . int size = 0 ; for ( Interator it = interator ( ) ; ( size < Integer . MAX_VALUE ) && it . hasNext ( ) ; it . nextInt ( ) ) { size ++ ; } return size ;
public class DynamoDBMapperFieldModel { /** * Creates a condition which filters on the specified value . * @ param value The value . * @ return The condition . * @ see com . amazonaws . services . dynamodbv2 . model . ComparisonOperator # EQ * @ see com . amazonaws . services . dynamodbv2 . model . Condition */ public final Condition eq ( final V value ) { } }
return new Condition ( ) . withComparisonOperator ( EQ ) . withAttributeValueList ( convert ( value ) ) ;
public class XmlExporter { /** * Build XML list tag determined by fullname status and ending class phrase * @ return XML list tag * @ see # exportClassEnding * @ see # exportClassFullName */ private < T > String buildClassListTag ( final T t ) { } }
return ( exportClassFullName != null ) ? exportClassFullName : t . getClass ( ) . getSimpleName ( ) + exportClassEnding ;
public class FindDialog { /** * Get the FindDialog for the parent if there is one or creates and returns a new one . * @ param parent the parent Window ( or Frame ) for this FindDialog * @ param modal a boolean indicating whether the FindDialog should ( { @ code true } ) , * or shouldn ' t ( { @ code false } ) be modal . * @ return The existing FindDialog for the parent ( if there is one ) , or a new FindDialog . * @ throws IllegalArgumentException if the { @ code parent } is { @ code null } . * @ since 2.7.0 */ public static FindDialog getDialog ( Window parent , boolean modal ) { } }
if ( parent == null ) { throw new IllegalArgumentException ( "The parent must not be null." ) ; } FindDialog activeDialog = getParentsMap ( ) . get ( parent ) ; if ( activeDialog != null ) { activeDialog . getTxtFind ( ) . requestFocus ( ) ; return activeDialog ; } FindDialog newDialog = new FindDialog ( parent , modal ) ; getParentsMap ( ) . put ( parent , newDialog ) ; newDialog . addWindowListener ( new WindowAdapter ( ) { @ Override public void windowClosed ( WindowEvent e ) { getParentsMap ( ) . remove ( parent ) ; } } ) ; return newDialog ;
public class Mtp2 { /** * Handles received data . * @ param buff the buffer which conatins received data . * @ param len the number of received bytes . */ private void processRx ( byte [ ] buff , int len ) { } }
int i = 0 ; // start HDLC alg while ( i < len ) { while ( rxState . bits <= 24 && i < len ) { int b = buff [ i ++ ] & 0xff ; hdlc . fasthdlc_rx_load_nocheck ( rxState , b ) ; if ( rxState . state == 0 ) { // octet counting mode nCount = ( nCount + 1 ) % 16 ; if ( nCount == 0 ) { countError ( "on receive" ) ; } } } int res = hdlc . fasthdlc_rx_run ( rxState ) ; switch ( res ) { case FastHDLC . RETURN_COMPLETE_FLAG : // frame received and we count it countFrame ( ) ; // checking length and CRC of the received frame if ( rxFrame . len == 0 ) { } else if ( rxFrame . len < 5 ) { // frame must be at least 5 bytes in length countError ( "hdlc error, frame LI<5" ) ; } else if ( rxCRC == 0xF0B8 ) { // good frame received processFrame ( ) ; } else { countError ( "hdlc complete, wrong terms." ) ; } rxFrame . len = 0 ; rxCRC = 0xffff ; break ; case FastHDLC . RETURN_DISCARD_FLAG : // looking for next flag rxCRC = 0xffff ; rxFrame . len = 0 ; // eCount = 0; countFrame ( ) ; // " on receive , hdlc discard " countError ( "hdlc discard." ) ; break ; case FastHDLC . RETURN_EMPTY_FLAG : rxFrame . len = 0 ; break ; default : if ( rxFrame . len > 279 ) { rxState . state = 0 ; rxFrame . len = 0 ; rxCRC = 0xffff ; eCount = 0 ; countFrame ( ) ; countError ( "Overlong MTP frame, entering octet mode on link '" + name + "'" ) ; } else { rxFrame . frame [ rxFrame . len ++ ] = ( byte ) res ; rxCRC = PPP_FCS ( rxCRC , res & 0xff ) ; } } }
public class BeanUtils { /** * Get property value , loads nested properties * @ param root root * @ param properties properties forming a path * @ return value at path */ public static Object getPropertyValue ( final Object root , final String ... properties ) { } }
Object object = root ; for ( String property : properties ) { if ( object == null ) { return null ; } if ( property . equals ( "this" ) ) { if ( ! ( object instanceof Map ) ) { continue ; } else { Object aThis = ( ( Map ) object ) . get ( "this" ) ; if ( aThis != null ) { object = aThis ; continue ; } else { continue ; } } } if ( object instanceof Map ) { object = ( ( Map ) object ) . get ( property ) ; continue ; } char c = property . charAt ( 0 ) ; if ( CharScanner . isDigit ( c ) ) { /* We can index numbers and names . */ object = idx ( object , StringScanner . parseInt ( property ) ) ; } else { if ( object instanceof Collection ) { object = _getFieldValuesFromCollectionOrArray ( object , property ) ; continue ; } else if ( isArray ( object ) ) { Iterator iter = Conversions . iterator ( object ) ; List list = Lists . list ( iter ) ; object = _getFieldValuesFromCollectionOrArray ( list , property ) ; continue ; } Map < String , FieldAccess > fields = getPropertyFieldAccessMap ( object . getClass ( ) ) ; FieldAccess field = fields . get ( property ) ; if ( field == null ) { return null ; } object = field . getValue ( object ) ; } } return object ;
public class ParserUtils { /** * Returns the block comment body stripped of leading whitespace and * in all but the first line , * and the block comments delimiters removed . * @ param blockComment a C - style block comment * @ return the comment ' s body with all but the first line specially trimmed */ public static String getTrimmedBlockCommentContent ( final String blockComment ) { } }
final String content = blockComment . substring ( 0 , blockComment . length ( ) - 2 ) ; return BLOCK_COMMENT_PROTO_STRIPPER . matcher ( content ) . replaceAll ( "" ) . substring ( 2 ) ;
public class HomographyInducedStereo3Pts { /** * Fill rows of M with observations from image 1 */ private void fillM ( Point2D_F64 x1 , Point2D_F64 x2 , Point2D_F64 x3 ) { } }
M . data [ 0 ] = x1 . x ; M . data [ 1 ] = x1 . y ; M . data [ 2 ] = 1 ; M . data [ 3 ] = x2 . x ; M . data [ 4 ] = x2 . y ; M . data [ 5 ] = 1 ; M . data [ 6 ] = x3 . x ; M . data [ 7 ] = x3 . y ; M . data [ 8 ] = 1 ;
public class UicStatsAsHtml { /** * Writes out the given statistics in HTML format . * @ param writer the writer to write to . * @ param stats the stats to write . */ public static void write ( final PrintWriter writer , final UicStats stats ) { } }
writer . println ( "<dl>" ) ; writer . print ( "<dt>Total root wcomponents found in UIC</dt>" ) ; writer . println ( "<dd>" + stats . getRootWCs ( ) . size ( ) + "</dd>" ) ; writer . print ( "<dt>Size of UIC (by serialization)</dt>" ) ; writer . println ( "<dd>" + stats . getOverallSerializedSize ( ) + "</dd>" ) ; writer . print ( "<dt>UI</dt>" ) ; writer . println ( "<dd>" + stats . getUI ( ) . getClass ( ) . getName ( ) + "</dd>" ) ; writer . println ( "</dl>" ) ; for ( Iterator < WComponent > it = stats . getWCsAnalysed ( ) ; it . hasNext ( ) ; ) { WComponent comp = it . next ( ) ; Map < WComponent , UicStats . Stat > treeStats = stats . getWCTreeStats ( comp ) ; writer . println ( "<br /><strong>Analysed component:</strong> " + comp ) ; writer . println ( "<br /><strong>Number of components in tree:</strong> " + treeStats . size ( ) ) ; writeHeader ( writer ) ; writeProfileForTree ( writer , treeStats ) ; writeFooter ( writer ) ; }
public class ExecutorsUtils { /** * Shutdown an { @ link ExecutorService } gradually , first disabling new task submissions and later cancelling * existing tasks . * The implementation is based on the implementation of Guava ' s MoreExecutors . shutdownAndAwaitTermination , * which is available since version 17.0 . We cannot use Guava version 17.0 or after directly , however , as * it cannot be used with Hadoop 2.6.0 or after due to the issue reported in HADOOP - 10961. * @ param executorService the { @ link ExecutorService } to shutdown * @ param logger an { @ link Optional } wrapping the { @ link Logger } that is used to log metadata of the executorService * if it cannot shutdown all its threads * @ param timeout the maximum time to wait for the { @ code ExecutorService } to terminate * @ param unit the time unit of the timeout argument */ public static void shutdownExecutorService ( ExecutorService executorService , Optional < Logger > logger , long timeout , TimeUnit unit ) { } }
Preconditions . checkNotNull ( unit ) ; // Disable new tasks from being submitted executorService . shutdown ( ) ; if ( logger . isPresent ( ) ) { logger . get ( ) . info ( "Attempting to shutdown ExecutorService: " + executorService ) ; } try { long halfTimeoutNanos = TimeUnit . NANOSECONDS . convert ( timeout , unit ) / 2 ; // Wait for half the duration of the timeout for existing tasks to terminate if ( ! executorService . awaitTermination ( halfTimeoutNanos , TimeUnit . NANOSECONDS ) ) { // Cancel currently executing tasks executorService . shutdownNow ( ) ; if ( logger . isPresent ( ) ) { logger . get ( ) . info ( "Shutdown un-successful, attempting shutdownNow of ExecutorService: " + executorService ) ; } // Wait the other half of the timeout for tasks to respond to being cancelled if ( ! executorService . awaitTermination ( halfTimeoutNanos , TimeUnit . NANOSECONDS ) && logger . isPresent ( ) ) { logger . get ( ) . error ( "Could not shutdown all threads in ExecutorService: " + executorService ) ; } } else if ( logger . isPresent ( ) ) { logger . get ( ) . info ( "Successfully shutdown ExecutorService: " + executorService ) ; } } catch ( InterruptedException ie ) { // Preserve interrupt status Thread . currentThread ( ) . interrupt ( ) ; // ( Re - ) Cancel if current thread also interrupted executorService . shutdownNow ( ) ; if ( logger . isPresent ( ) ) { logger . get ( ) . info ( "Attempting to shutdownNow ExecutorService: " + executorService ) ; } }
public class JSONUtils { /** * Transforms the string into a valid Java Identifier . < br > * The default strategy is JavaIdentifierTransformer . NOOP * @ throws JSONException if the string can not be transformed . */ public static String convertToJavaIdentifier ( String key , JsonConfig jsonConfig ) { } }
try { return jsonConfig . getJavaIdentifierTransformer ( ) . transformToJavaIdentifier ( key ) ; } catch ( JSONException jsone ) { throw jsone ; } catch ( Exception e ) { throw new JSONException ( e ) ; }
public class RRFedNonFedBudgetV1_1Generator { /** * This method gets KeyPersons details such as Name , ProjectRole , Compensation , TotalFundForAttachedKeyPersons * TotalFundForKeyPersons and AttachedKeyPersons based on BudgetPeriodInfo for the RRFedNonFedBudget . * @ param periodInfo ( BudgetPeriodInfo ) budget period entry . * @ return KeyPersons details corresponding to the BudgetPeriodInfo object . */ private KeyPersons getKeyPersons ( BudgetPeriodDto periodInfo ) { } }
KeyPersons keyPersons = KeyPersons . Factory . newInstance ( ) ; if ( periodInfo != null ) { if ( periodInfo . getKeyPersons ( ) != null ) { List < KeyPersonDataType > keyPersonList = new ArrayList < > ( ) ; int keyPersonCount = 0 ; for ( KeyPersonDto keyPerson : periodInfo . getKeyPersons ( ) ) { if ( keyPerson . getRole ( ) . equals ( NID_PD_PI ) || hasPersonnelBudget ( keyPerson , periodInfo . getBudgetPeriod ( ) ) ) { KeyPersonDataType keyPersonDataType = KeyPersonDataType . Factory . newInstance ( ) ; keyPersonDataType . setName ( globLibV20Generator . getHumanNameDataType ( keyPerson ) ) ; if ( keyPerson . getKeyPersonRole ( ) != null ) { keyPersonDataType . setProjectRole ( keyPerson . getKeyPersonRole ( ) ) ; } else { keyPersonDataType . setProjectRole ( keyPerson . getRole ( ) ) ; } keyPersonDataType . setCompensation ( getCompensation ( keyPerson , periodInfo . getBudgetPeriod ( ) ) ) ; keyPersonList . add ( keyPersonDataType ) ; keyPersonCount ++ ; LOG . info ( "keyPersonCount:" + keyPersonCount ) ; } } keyPersons . setKeyPersonArray ( keyPersonList . toArray ( new KeyPersonDataType [ 0 ] ) ) ; } SummaryDataType summary = SummaryDataType . Factory . newInstance ( ) ; if ( periodInfo . getTotalFundsKeyPersons ( ) != null ) { summary . setFederalSummary ( periodInfo . getTotalFundsKeyPersons ( ) . bigDecimalValue ( ) ) ; } if ( periodInfo . getTotalNonFundsKeyPersons ( ) != null ) { summary . setNonFederalSummary ( periodInfo . getTotalNonFundsKeyPersons ( ) . bigDecimalValue ( ) ) ; if ( periodInfo . getTotalFundsKeyPersons ( ) != null ) { summary . setTotalFedNonFedSummary ( periodInfo . getTotalFundsKeyPersons ( ) . add ( periodInfo . getTotalNonFundsKeyPersons ( ) ) . bigDecimalValue ( ) ) ; } else { summary . setTotalFedNonFedSummary ( periodInfo . getTotalNonFundsKeyPersons ( ) . bigDecimalValue ( ) ) ; } } keyPersons . setTotalFundForKeyPersons ( summary ) ; SummaryDataType summaryAttachedKey = SummaryDataType . Factory . newInstance ( ) ; BigDecimal totalFederalSummary = BigDecimal . ZERO ; BigDecimal totalNonFederalSummary = BigDecimal . ZERO ; for ( KeyPersonDto keyPersonInfo : periodInfo . getExtraKeyPersons ( ) ) { totalFederalSummary = totalFederalSummary . add ( keyPersonInfo . getFundsRequested ( ) . bigDecimalValue ( ) ) ; totalNonFederalSummary = totalNonFederalSummary . add ( keyPersonInfo . getNonFundsRequested ( ) . bigDecimalValue ( ) ) ; } summaryAttachedKey . setFederalSummary ( totalFederalSummary ) ; summaryAttachedKey . setNonFederalSummary ( totalNonFederalSummary ) ; summaryAttachedKey . setTotalFedNonFedSummary ( totalFederalSummary . add ( totalNonFederalSummary ) ) ; keyPersons . setTotalFundForAttachedKeyPersons ( summaryAttachedKey ) ; } NarrativeContract extraKeyPersonNarr = saveExtraKeyPersons ( periodInfo ) ; AttachedFileDataType attachedFileDataType = null ; if ( extraKeyPersonNarr != null ) { attachedFileDataType = getAttachedFileType ( extraKeyPersonNarr ) ; if ( attachedFileDataType != null ) { keyPersons . setAttachedKeyPersons ( attachedFileDataType ) ; } } return keyPersons ;
public class JFapByteBuffer { /** * This method is called just before this buffer is due to be transmitted by the JFap channel . * When calling this method the underlying byte buffer is prepared by setting the correct limits * and the buffer is added to a List that is returned . Once this method is called the buffer * may not be used again and any attempt to modify the data will cause a RuntimeException to be * thrown . This method can only be called once . * @ return Returns a List containing one or more WsByteBuffer ' s with the data in it . */ public synchronized WsByteBuffer [ ] getBuffersForTransmission ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getBufferForTransmission" ) ; // Ensure the buffer has been prepared checkNotValid ( ) ; WsByteBuffer [ ] bufferArray = new WsByteBuffer [ dataList . size ( ) ] ; for ( int x = 0 ; x < dataList . size ( ) ; x ++ ) { bufferArray [ x ] = dataList . get ( x ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getBufferForTransmission" , dataList ) ; return bufferArray ;
public class CommunicationSeverityAnalyser { /** * This method evaluates the severity of nodes / links within a supplied set of * communication summary stats . * @ param nodes The nodes for the communication summary stats */ public void evaluateCommunicationSummarySeverity ( Collection < CommunicationSummaryStatistics > nodes ) { } }
long max = 0 ; Map < String , CommunicationSummaryStatistics > nodeMap = new HashMap < String , CommunicationSummaryStatistics > ( ) ; for ( CommunicationSummaryStatistics css : nodes ) { // Calculate maximum average duration over the list of nodes if ( css . getAverageDuration ( ) > max ) { max = css . getAverageDuration ( ) ; } nodeMap . put ( css . getId ( ) , css ) ; } for ( CommunicationSummaryStatistics css : nodes ) { deriveSeverity ( css , max , nodeMap ) ; }
public class MediaRow { /** * Set the data from an image with optional quality * @ param image * image * @ param imageFormat * image format * @ param quality * null or quality between 0.0 and 1.0 * @ throws IOException * upon failure * @ since 3.2.0 */ public void setData ( BufferedImage image , String imageFormat , Float quality ) throws IOException { } }
setData ( ImageUtils . writeImageToBytes ( image , imageFormat , quality ) ) ;
public class Statement { /** * Registers an input parameter into the statement . * @ param key parameter key . * @ param value parameter initial value ( optional ) . * @ param type parameter SQL type code ( optional ) . * @ throws SQLException if error occurs while registering parameter . * @ since v1.0 */ public void in ( Object key , Object value , Integer type ) throws SQLException { } }
if ( key instanceof Integer ) { if ( type == null ) { base . setObject ( ( Integer ) key , value ) ; } else { base . setObject ( ( Integer ) key , value , type ) ; } } else { if ( type == null ) { base . setObject ( ( String ) key , value ) ; } else { base . setObject ( ( String ) key , value , type ) ; } }
public class GraphicalModel { /** * The point here is to allow us to save a copy of the model with a current set of factors and metadata mappings , * which can come in super handy with gameplaying applications . The cloned model doesn ' t instantiate the feature * thunks inside factors , those are just taken over individually . * @ return a clone */ public GraphicalModel cloneModel ( ) { } }
GraphicalModel clone = new GraphicalModel ( ) ; clone . modelMetaData . putAll ( modelMetaData ) ; for ( int i = 0 ; i < variableMetaData . size ( ) ; i ++ ) { if ( variableMetaData . get ( i ) != null ) { clone . getVariableMetaDataByReference ( i ) . putAll ( variableMetaData . get ( i ) ) ; } } for ( Factor f : factors ) { clone . factors . add ( f . cloneFactor ( ) ) ; } return clone ;
public class ShareSheetStyle { /** * Include items from the ShareSheet by package name Array . If only " com . Slack " * is included , then only preferred sharing options + Slack * will be displayed , for example . * @ param packageName { @ link String [ ] } package name to be included . * @ return this Builder object to allow for chaining of calls to set methods . */ public ShareSheetStyle includeInShareSheet ( @ NonNull String [ ] packageName ) { } }
includeInShareSheet . addAll ( Arrays . asList ( packageName ) ) ; return this ;
public class ConnectionFactoryValidator { /** * Utility method that attempts to construct a ConnectionSpec impl of the specified name , * which might or might not exist in the resource adapter . * @ param cciConFactory the connection factory class * @ param conSpecClassName possible connection spec impl class name to try * @ param userName user name to set on the connection spec * @ param password password to set on the connection spec * @ return ConnectionSpec instance if successful . Otherwise null . */ @ FFDCIgnore ( Throwable . class ) private ConnectionSpec createConnectionSpec ( ConnectionFactory cciConFactory , String conSpecClassName , String userName , @ Sensitive String password ) { } }
try { @ SuppressWarnings ( "unchecked" ) Class < ConnectionSpec > conSpecClass = ( Class < ConnectionSpec > ) cciConFactory . getClass ( ) . getClassLoader ( ) . loadClass ( conSpecClassName ) ; ConnectionSpec conSpec = conSpecClass . newInstance ( ) ; conSpecClass . getMethod ( "setPassword" , String . class ) . invoke ( conSpec , password ) ; conSpecClass . getMethod ( "setUserName" , String . class ) . invoke ( conSpec , userName ) ; return conSpec ; } catch ( Throwable x ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "Unable to create or populate ConnectionSpec" , x . getMessage ( ) ) ; return null ; }
public class ComponentFilter { /** * Does the mapping respected the component grouping specified by the * query . * @ param mapping a permutation of the query vertices * @ return the mapping preserves the specified grouping */ @ Override public boolean apply ( final int [ ] mapping ) { } }
// no grouping required if ( queryComponents == null ) return true ; // bidirectional map of query / target components , last index // of query components holds the count int [ ] usedBy = new int [ targetComponents [ targetComponents . length - 1 ] + 1 ] ; int [ ] usedIn = new int [ queryComponents [ queryComponents . length - 1 ] + 1 ] ; // verify we don ' t have any collisions for ( int v = 0 ; v < mapping . length ; v ++ ) { if ( queryComponents [ v ] == 0 ) continue ; int w = mapping [ v ] ; int queryComponent = queryComponents [ v ] ; int targetComponent = targetComponents [ w ] ; // is the target component already used by a query component ? if ( usedBy [ targetComponent ] == 0 ) usedBy [ targetComponent ] = queryComponent ; else if ( usedBy [ targetComponent ] != queryComponent ) return false ; // is the query component already used in a target component ? if ( usedIn [ queryComponent ] == 0 ) usedIn [ queryComponent ] = targetComponent ; else if ( usedIn [ queryComponent ] != targetComponent ) return false ; } return true ;
public class CustomerAccountUrl { /** * Get Resource Url for ResetPassword * @ return String Resource Url */ public static MozuUrl resetPasswordUrl ( ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/customer/accounts/Reset-Password" ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class SystemPropertiesConfiguration { /** * Looks for System properties with the names defined in { @ link org . greencheek . related . util . config . ConfigurationConstants } * Parsing the resulting values in to appropriate types . If the system property is not defined an * entry in the return map ( with the same name as the constant in ConfigurationContants ) , is not created . * If a system property is defined an entry in the map will exist . * @ return */ public static Map < String , Object > parseSystemProperties ( ) { } }
Map < String , String > stringSystemProperties = readSystemProperties ( ) ; return parseProperties ( stringSystemProperties ) ;
public class ChromosomeMappingTools { /** * Converts the genetic coordinate to the position of the nucleotide on the mRNA sequence for a gene * living on the forward DNA strand . * @ param chromPos The genetic coordinate on a chromosome * @ param exonStarts The list holding the genetic coordinates pointing to the start positions of the exons ( including UTR regions ) * @ param exonEnds The list holding the genetic coordinates pointing to the end positions of the exons ( including UTR regions ) * @ param cdsStart The start position of a coding region * @ param cdsEnd The end position of a coding region * @ return the position of the nucleotide base on the mRNA sequence corresponding to the input genetic coordinate ( base 1) * @ author Yana Valasatava */ public static int getCDSPosForward ( int chromPos , List < Integer > exonStarts , List < Integer > exonEnds , int cdsStart , int cdsEnd ) { } }
// the genetic coordinate is not in a coding region if ( ( chromPos < ( cdsStart + base ) ) || ( chromPos > ( cdsEnd + base ) ) ) { logger . debug ( "The " + format ( chromPos ) + " position is not in a coding region" ) ; return - 1 ; } logger . debug ( "looking for CDS position for " + format ( chromPos ) ) ; // map the genetic coordinates of coding region on a stretch of a reverse strand List < Range < Integer > > cdsRegions = getCDSRegions ( exonStarts , exonEnds , cdsStart , cdsEnd ) ; int codingLength = 0 ; int lengthExon = 0 ; for ( Range < Integer > range : cdsRegions ) { int start = range . lowerEndpoint ( ) ; int end = range . upperEndpoint ( ) ; lengthExon = end - start ; if ( start + base <= chromPos && end >= chromPos ) { return codingLength + ( chromPos - start ) ; } else { codingLength += lengthExon ; } } return - 1 ;
public class Expressions { /** * Creates an StringIsLessThanOrEqual expression from the given expression and constant . * @ param left The left expression . * @ param constant The constant to compare to ( must be a String ) . * @ throws IllegalArgumentException If the constant is not a String * @ return A new is less than binary expression . */ public static StringIsLessThanOrEqual isLessThanOrEqual ( StringExpression left , Object constant ) { } }
if ( ! ( constant instanceof String ) ) throw new IllegalArgumentException ( "constant is not a String" ) ; return new StringIsLessThanOrEqual ( left , constant ( ( String ) constant ) ) ;
public class IdemixUtils { /** * ecpToBytes turns an ECP into a byte array * @ param e the ECP to turn into bytes * @ return a byte array representation of the ECP */ static byte [ ] ecpToBytes ( ECP e ) { } }
byte [ ] ret = new byte [ 2 * FIELD_BYTES + 1 ] ; e . toBytes ( ret , false ) ; return ret ;
public class FDistort { /** * All this does is set the references to the images . Nothing else is changed and its up to the * user to correctly update everything else . * If called the first time you need to do the following * < pre > * 1 ) specify the interpolation method * 2 ) specify the transform * 3 ) specify the border * < / pre > * If called again and the image shape has changed you need to do the following : * < pre > * 1 ) Update the transform * < / pre > */ public FDistort setRefs ( ImageBase input , ImageBase output ) { } }
this . input = input ; this . output = output ; inputType = input . getImageType ( ) ; return this ;
public class CmsXMLSearchConfigurationParser { /** * Helper to read an optional String value . * @ param path The XML path of the element to read . * @ return The String value stored in the XML , or < code > null < / code > if the value could not be read . */ protected String parseOptionalStringValue ( final String path ) { } }
final I_CmsXmlContentValue value = m_xml . getValue ( path , m_locale ) ; if ( value == null ) { return null ; } else { return value . getStringValue ( null ) ; }
public class AWSOpsWorksCMClient { /** * Lists all configuration management servers that are identified with your account . Only the stored results from * Amazon DynamoDB are returned . AWS OpsWorks CM does not query other services . * This operation is synchronous . * A < code > ResourceNotFoundException < / code > is thrown when the server does not exist . A * < code > ValidationException < / code > is raised when parameters of the request are not valid . * @ param describeServersRequest * @ return Result of the DescribeServers operation returned by the service . * @ throws ValidationException * One or more of the provided request parameters are not valid . * @ throws ResourceNotFoundException * The requested resource does not exist , or access was denied . * @ throws InvalidNextTokenException * This occurs when the provided nextToken is not valid . * @ sample AWSOpsWorksCM . DescribeServers * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / opsworkscm - 2016-11-01 / DescribeServers " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeServersResult describeServers ( DescribeServersRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeServers ( request ) ;
public class RouteFiltersInner { /** * Gets all route filters in a resource group . * @ param resourceGroupName The name of the resource group . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; RouteFilterInner & gt ; object */ public Observable < Page < RouteFilterInner > > listByResourceGroupAsync ( final String resourceGroupName ) { } }
return listByResourceGroupWithServiceResponseAsync ( resourceGroupName ) . map ( new Func1 < ServiceResponse < Page < RouteFilterInner > > , Page < RouteFilterInner > > ( ) { @ Override public Page < RouteFilterInner > call ( ServiceResponse < Page < RouteFilterInner > > response ) { return response . body ( ) ; } } ) ;
public class Messenger { /** * Starting phone auth * @ param phone phone for authentication * @ return promise of AuthStartRes */ @ NotNull @ ObjectiveCName ( "doStartAuthWithPhone:" ) public Promise < AuthStartRes > doStartPhoneAuth ( long phone ) { } }
return modules . getAuthModule ( ) . doStartPhoneAuth ( phone ) ;
public class PlaceVisitor { /** * Visit an Attributes . Look at Attributes to find Places . * @ see GedObjectVisitor # visit ( Attribute ) */ @ Override public void visit ( final Attribute attribute ) { } }
for ( final GedObject gob : attribute . getAttributes ( ) ) { gob . accept ( this ) ; }
public class RxInstrumentedWrappers { /** * Wrap a observer . * @ param downstream The downstream observer * @ param instrumentations The instrumentations * @ param < T > The type * @ return The wrapped subscriber */ static < T > MaybeObserver < T > wrap ( MaybeObserver < T > downstream , List < RunnableInstrumenter > instrumentations ) { } }
return new RxInstrumentedMaybeObserver < > ( downstream , instrumentations ) ;
public class EnvVars { /** * Takes a string that looks like " a = b " and adds that to this map . */ public void addLine ( String line ) { } }
int sep = line . indexOf ( '=' ) ; if ( sep > 0 ) { put ( line . substring ( 0 , sep ) , line . substring ( sep + 1 ) ) ; }
public class AmazonKinesisFirehoseClient { /** * Writes a single data record into an Amazon Kinesis Data Firehose delivery stream . To write multiple data records * into a delivery stream , use < a > PutRecordBatch < / a > . Applications using these operations are referred to as * producers . * By default , each delivery stream can take in up to 2,000 transactions per second , 5,000 records per second , or 5 * MB per second . If you use < a > PutRecord < / a > and < a > PutRecordBatch < / a > , the limits are an aggregate across these * two operations for each delivery stream . For more information about limits and how to request an increase , see < a * href = " http : / / docs . aws . amazon . com / firehose / latest / dev / limits . html " > Amazon Kinesis Data Firehose Limits < / a > . * You must specify the name of the delivery stream and the data record when using < a > PutRecord < / a > . The data record * consists of a data blob that can be up to 1,000 KB in size , and any kind of data . For example , it can be a * segment from a log file , geographic location data , website clickstream data , and so on . * Kinesis Data Firehose buffers records before delivering them to the destination . To disambiguate the data blobs * at the destination , a common solution is to use delimiters in the data , such as a newline ( < code > \ n < / code > ) or * some other character unique within the data . This allows the consumer application to parse individual data items * when reading the data from the destination . * The < code > PutRecord < / code > operation returns a < code > RecordId < / code > , which is a unique string assigned to each * record . Producer applications can use this ID for purposes such as auditability and investigation . * If the < code > PutRecord < / code > operation throws a < code > ServiceUnavailableException < / code > , back off and retry . If * the exception persists , it is possible that the throughput limits have been exceeded for the delivery stream . * Data records sent to Kinesis Data Firehose are stored for 24 hours from the time they are added to a delivery * stream as it tries to send the records to the destination . If the destination is unreachable for more than 24 * hours , the data is no longer available . * < important > * Don ' t concatenate two or more base64 strings to form the data fields of your records . Instead , concatenate the * raw data , then perform base64 encoding . * < / important > * @ param putRecordRequest * @ return Result of the PutRecord operation returned by the service . * @ throws ResourceNotFoundException * The specified resource could not be found . * @ throws InvalidArgumentException * The specified input parameter has a value that is not valid . * @ throws ServiceUnavailableException * The service is unavailable . Back off and retry the operation . If you continue to see the exception , * throughput limits for the delivery stream may have been exceeded . For more information about limits and * how to request an increase , see < a * href = " http : / / docs . aws . amazon . com / firehose / latest / dev / limits . html " > Amazon Kinesis Data Firehose * Limits < / a > . * @ sample AmazonKinesisFirehose . PutRecord * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / firehose - 2015-08-04 / PutRecord " target = " _ top " > AWS API * Documentation < / a > */ @ Override public PutRecordResult putRecord ( PutRecordRequest request ) { } }
request = beforeClientExecution ( request ) ; return executePutRecord ( request ) ;
public class DbRemoteConfigLoader { /** * 加载有变动的adapter配置 */ private void loadModifiedAdapterConfigs ( ) { } }
Map < String , ConfigItem > remoteConfigStatus = new HashMap < > ( ) ; String sql = "select id, category, name, modified_time from canal_adapter_config" ; try ( Connection conn = dataSource . getConnection ( ) ; Statement stmt = conn . createStatement ( ) ; ResultSet rs = stmt . executeQuery ( sql ) ) { while ( rs . next ( ) ) { ConfigItem configItem = new ConfigItem ( ) ; configItem . setId ( rs . getLong ( "id" ) ) ; configItem . setCategory ( rs . getString ( "category" ) ) ; configItem . setName ( rs . getString ( "name" ) ) ; configItem . setModifiedTime ( rs . getTimestamp ( "modified_time" ) . getTime ( ) ) ; remoteConfigStatus . put ( configItem . getCategory ( ) + "/" + configItem . getName ( ) , configItem ) ; } } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } if ( ! remoteConfigStatus . isEmpty ( ) ) { List < Long > changedIds = new ArrayList < > ( ) ; for ( ConfigItem remoteConfigStat : remoteConfigStatus . values ( ) ) { ConfigItem currentConfig = remoteAdapterConfigs . get ( remoteConfigStat . getCategory ( ) + "/" + remoteConfigStat . getName ( ) ) ; if ( currentConfig == null ) { // 新增 changedIds . add ( remoteConfigStat . getId ( ) ) ; } else { // 修改 if ( currentConfig . getModifiedTime ( ) != remoteConfigStat . getModifiedTime ( ) ) { changedIds . add ( remoteConfigStat . getId ( ) ) ; } } } if ( ! changedIds . isEmpty ( ) ) { String contentsSql = "select id, category, name, content, modified_time from canal_adapter_config where id in (" + Joiner . on ( "," ) . join ( changedIds ) + ")" ; try ( Connection conn = dataSource . getConnection ( ) ; Statement stmt = conn . createStatement ( ) ; ResultSet rs = stmt . executeQuery ( contentsSql ) ) { while ( rs . next ( ) ) { ConfigItem configItemNew = new ConfigItem ( ) ; configItemNew . setId ( rs . getLong ( "id" ) ) ; configItemNew . setCategory ( rs . getString ( "category" ) ) ; configItemNew . setName ( rs . getString ( "name" ) ) ; configItemNew . setContent ( rs . getString ( "content" ) ) ; configItemNew . setModifiedTime ( rs . getTimestamp ( "modified_time" ) . getTime ( ) ) ; remoteAdapterConfigs . put ( configItemNew . getCategory ( ) + "/" + configItemNew . getName ( ) , configItemNew ) ; remoteAdapterMonitor . onModify ( configItemNew ) ; } } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } } } for ( ConfigItem configItem : remoteAdapterConfigs . values ( ) ) { if ( ! remoteConfigStatus . containsKey ( configItem . getCategory ( ) + "/" + configItem . getName ( ) ) ) { // 删除 remoteAdapterConfigs . remove ( configItem . getCategory ( ) + "/" + configItem . getName ( ) ) ; remoteAdapterMonitor . onDelete ( configItem . getCategory ( ) + "/" + configItem . getName ( ) ) ; } }
public class StatementAnnotationMapTable { /** * { @ inheritDoc } */ @ Override protected void _from ( ObjectInput in ) throws IOException , ClassNotFoundException { } }
final int size = in . readInt ( ) ; for ( int i = 0 ; i < size ; i ++ ) { final int statementIndex = in . readInt ( ) ; final int annotationPairsSize = in . readInt ( ) ; Set < AnnotationPair > annotationPairs = new HashSet < AnnotationPair > ( annotationPairsSize ) ; for ( int j = 0 ; j < annotationPairsSize ; ++ j ) { AnnotationPair pair = new AnnotationPair ( ) ; pair . readExternal ( in ) ; annotationPairs . add ( pair ) ; } addStatementAnnotation ( statementIndex , annotationPairs ) ; }
public class DfsTask { /** * Create the appropriate output properties with their respective output , * restore System . out , System . err and release any resources from created * ClassLoaders to aid garbage collection . */ protected void popContext ( ) { } }
// write output to property , if applicable if ( outprop != null && ! System . out . checkError ( ) ) getProject ( ) . setNewProperty ( outprop , out . toString ( ) ) ; if ( out != err && errprop != null && ! System . err . checkError ( ) ) getProject ( ) . setNewProperty ( errprop , err . toString ( ) ) ; System . setErr ( antErr ) ; System . setOut ( antOut ) ; confloader . cleanup ( ) ; confloader . setParent ( null ) ;
public class SeLionSelendroidDriver { /** * Scroll the screen to the right . The underlying application should have atleast one scroll view belonging to the * class ' android . widget . ScrollView ' . */ public void scrollRight ( ) { } }
logger . entering ( ) ; WebElement webElement = this . findElement ( By . className ( SCROLLVIEW_CLASS ) ) ; swipeRight ( webElement ) ; logger . exiting ( ) ;
public class TimeBaseProvider { /** * Creates a time base object which can subsequently be fetched by the client and used to send * delta times . * @ param timeBase the name of the time base to create . * @ return the created and registered time base object . */ public static TimeBaseObject createTimeBase ( String timeBase ) { } }
TimeBaseObject object = _omgr . registerObject ( new TimeBaseObject ( ) ) ; _timeBases . put ( timeBase , object ) ; return object ;
public class CmsUpdateBean { /** * Prepares step 5 of the update wizard . < p > */ public void prepareUpdateStep5b ( ) { } }
if ( ! isInitialized ( ) ) { return ; } addSubscriptionDriver ( ) ; if ( ( m_workplaceUpdateThread != null ) && ( m_workplaceUpdateThread . isFinished ( ) ) ) { // update is already finished , just wait for client to collect final data return ; } if ( m_workplaceUpdateThread == null ) { m_workplaceUpdateThread = new CmsUpdateThread ( this ) ; } if ( ! m_workplaceUpdateThread . isAlive ( ) ) { m_workplaceUpdateThread . start ( ) ; }
public class AbstractController { /** * Gets the source . * @ param event the event * @ param type the cls * @ param < T > the generic type * @ return the source */ protected < T > Optional < T > getSource ( Event event , Class < T > type ) { } }
return getValue ( event , event :: getSource , type ) ;
public class ReflectiveVisitorHelper { /** * Use reflection to call the appropriate < code > visit < / code > method on the * provided visitor , passing in the specified argument . * @ param visitor * the visitor encapsulating the logic to process the argument * @ param argument * the argument to dispatch * @ throws IllegalArgumentException * if the visitor parameter is null */ public Object invokeVisit ( Object visitor , Object argument ) { } }
Assert . notNull ( visitor , "The visitor to visit is required" ) ; // Perform call back on the visitor through reflection . Method method = getMethod ( visitor . getClass ( ) , argument ) ; if ( method == null ) { if ( logger . isWarnEnabled ( ) ) { logger . warn ( "No method found by reflection for visitor class [" + visitor . getClass ( ) . getName ( ) + "] and argument of type [" + ( argument != null ? argument . getClass ( ) . getName ( ) : "" ) + "]" ) ; } return null ; } try { Object [ ] args = null ; if ( argument != null ) { args = new Object [ ] { argument } ; } if ( ! Modifier . isPublic ( method . getModifiers ( ) ) && ! method . isAccessible ( ) ) { method . setAccessible ( true ) ; } return method . invoke ( visitor , args ) ; } catch ( Exception ex ) { ReflectionUtils . handleReflectionException ( ex ) ; throw new IllegalStateException ( "Should never get here" ) ; }
public class JDK14Logger { /** * Infers the caller of a Logger method from the current stack trace . This can be used by * wrappers to provide the correct calling class and method information to their underlying log * implementation . * @ return a two element array containing { class name , method name } or { null , null } if the * caller could not be inferred . */ protected String [ ] inferCaller ( ) { } }
String self = getClass ( ) . getName ( ) ; // locate ourselves in the call stack StackTraceElement [ ] stack = ( new Throwable ( ) ) . getStackTrace ( ) ; int ii = 0 ; for ( ; ii < stack . length ; ii ++ ) { if ( self . equals ( stack [ ii ] . getClassName ( ) ) ) { break ; } } System . err . println ( "Found self at " + ii ) ; // now locate the first thing that ' s not us , that ' s the caller for ( ; ii < stack . length ; ii ++ ) { String cname = stack [ ii ] . getClassName ( ) ; if ( ! cname . equals ( self ) ) { System . err . println ( "Found non-self at " + ii + " " + cname ) ; return new String [ ] { cname , stack [ ii ] . getMethodName ( ) } ; } } System . err . println ( "Failed to find non-self." ) ; return new String [ ] { null , null } ;
public class Tracy { /** * Call before starting an operation you want to capture elapsed time for . < br > * You can nest before ( ) calls if you want to trace both caller and callee methods , * but make sure you call after ( ) for every before ( ) . * @ param label is the name you will see in the trace event report of graph node representation or timeline */ public static void before ( String label ) { } }
TracyThreadContext ctx = threadContext . get ( ) ; if ( isValidContext ( ctx ) ) { ctx . push ( label ) ; }
public class MutableByte { /** * Compares this mutable to another in ascending order . * @ param other the other mutable to compare to , not null * @ return negative if this is less , zero if equal , positive if greater */ @ Override public int compareTo ( final MutableByte other ) { } }
return ( this . value > other . value ) ? 1 : ( ( this . value == other . value ) ? 0 : - 1 ) ;
public class SolutionStackDescription { /** * The permitted file types allowed for a solution stack . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setPermittedFileTypes ( java . util . Collection ) } or { @ link # withPermittedFileTypes ( java . util . Collection ) } if * you want to override the existing values . * @ param permittedFileTypes * The permitted file types allowed for a solution stack . * @ return Returns a reference to this object so that method calls can be chained together . */ public SolutionStackDescription withPermittedFileTypes ( String ... permittedFileTypes ) { } }
if ( this . permittedFileTypes == null ) { setPermittedFileTypes ( new com . amazonaws . internal . SdkInternalList < String > ( permittedFileTypes . length ) ) ; } for ( String ele : permittedFileTypes ) { this . permittedFileTypes . add ( ele ) ; } return this ;
public class MultitonKey { /** * Generate the string key for an object . * @ param object the object which is part of the global key * @ return the unique string for this object */ private String buildObjectKey ( final Object object ) { } }
String objectKey = null ; final Class < ? > objectClass = object . getClass ( ) ; final KeyGenerator typeGenerator = objectClass . getAnnotation ( KeyGenerator . class ) ; if ( typeGenerator == null ) { objectKey = generateAggregatedKey ( object ) ; } else { objectKey = generateTypeKey ( object , typeGenerator ) ; } // If no Type keyGenerator neither Method Generator were used , use the default toString method if ( objectKey == null ) { objectKey = object . toString ( ) ; } return objectKey ;
public class Template { /** * Merge this template . * @ param vars * @ param writer * @ return Context * @ throws ScriptRuntimeException * @ throws ParseException */ public Context merge ( final Vars vars , final Writer writer ) { } }
return merge ( vars , new WriterOut ( writer , engine ) ) ;
public class AWS4SignerRequestParams { /** * Returns the scope to be used for the signing . */ private String generateScope ( SignableRequest < ? > request , String dateStamp , String serviceName , String regionName ) { } }
final StringBuilder scopeBuilder = new StringBuilder ( ) ; return scopeBuilder . append ( dateStamp ) . append ( "/" ) . append ( regionName ) . append ( "/" ) . append ( serviceName ) . append ( "/" ) . append ( SignerConstants . AWS4_TERMINATOR ) . toString ( ) ;
public class DirectClustering { /** * Clusters { @ link matrix } using the { @ link RandomSeed } seeding algorithm * and the default kmeans { @ link CriterionFunction } . The best scoring * solution out of { @ code numRepetitions } will be returned . */ public static Assignments cluster ( Matrix matrix , int numClusters , int numRepetitions ) { } }
return cluster ( matrix , numClusters , numRepetitions , new RandomSeed ( ) , new I1Function ( ) ) ;
public class DiffBase { /** * Crush the diff into an encoded string which describes the operations * required to transform text1 into text2. * E . g . " = 3 \ t - 2 \ t + ing " - & gt ; Keep 3 chars , delete 2 chars , insert ' ing ' . * Operations are tab - separated . Inserted text is escaped using % xx notation . * @ return Delta text . */ public String toDelta ( ) { } }
StringBuilder text = new StringBuilder ( ) ; for ( Change aDiff : getChangeList ( ) ) { switch ( aDiff . operation ) { case INSERT : try { text . append ( "+" ) . append ( URLEncoder . encode ( aDiff . text , "UTF-8" ) . replace ( '+' , ' ' ) ) . append ( "\t" ) ; } catch ( UnsupportedEncodingException e ) { // Not likely on modern system . throw new Error ( "This system does not support UTF-8." , e ) ; } break ; case DELETE : text . append ( "-" ) . append ( aDiff . text . length ( ) ) . append ( "\t" ) ; break ; case EQUAL : text . append ( "=" ) . append ( aDiff . text . length ( ) ) . append ( "\t" ) ; break ; } } String delta = text . toString ( ) ; if ( delta . length ( ) != 0 ) { // Strip off trailing tab character . delta = delta . substring ( 0 , delta . length ( ) - 1 ) ; delta = Strings . unescapeForEncodeUriCompatability ( delta ) ; } return delta ;
public class DefaultGroovyMethods { /** * Recursively iterates through this collection transforming each non - Collection value * into a new value using the closure as a transformer . Returns a potentially nested * list of transformed values . * < pre class = " groovyTestCase " > * assert [ 2 , [ 4,6 ] , [ 8 ] , [ ] ] = = [ 1 , [ 2,3 ] , [ 4 ] , [ ] ] . collectNested { it * 2 } * < / pre > * @ param self a collection * @ param transform the closure used to transform each item of the collection * @ return the resultant collection * @ since 1.8.1 */ public static List collectNested ( Collection self , Closure transform ) { } }
return ( List ) collectNested ( ( Iterable ) self , new ArrayList ( self . size ( ) ) , transform ) ;
public class RequestUtils { /** * This method returns a protocol of a request to web server if this container is fronted by one , such that * it sets a header < code > X - Forwarded - Proto < / code > on the request and forwards it to the Java container . * If such header is not present , than the { @ link # protocol ( ) } method is used . * @ return protocol of web server request if < code > X - Forwarded - Proto < / code > header is found , otherwise current * protocol . */ public static String getRequestProtocol ( ) { } }
String protocol = header ( "X-Forwarded-Proto" ) ; return Util . blank ( protocol ) ? protocol ( ) : protocol ;
public class Context { /** * Method to get a new Context . * @ see # begin ( String , Locale , Map , Map , Map ) * @ param _ userName Naem of the user the Context must be created for * @ throws EFapsException on error * @ return new Context */ public static Context begin ( final String _userName ) throws EFapsException { } }
return Context . begin ( _userName , Inheritance . Inheritable ) ;
public class FileGetFromTaskHeaders { /** * Set the file creation time . * @ param ocpCreationTime the ocpCreationTime value to set * @ return the FileGetFromTaskHeaders object itself . */ public FileGetFromTaskHeaders withOcpCreationTime ( DateTime ocpCreationTime ) { } }
if ( ocpCreationTime == null ) { this . ocpCreationTime = null ; } else { this . ocpCreationTime = new DateTimeRfc1123 ( ocpCreationTime ) ; } return this ;
public class AbstractRunMojo { /** * Copy the Alfresco Enterprise license to its correct place in the Platform WAR , if it exists . * It is not enough to have it on the test classpath , then it will start up as Trial license . . . * @ throws MojoExecutionException when any problem appears copying the Alfresco license */ protected void copyAlfrescoLicense ( ) throws MojoExecutionException { } }
final String warOutputDir = getWarOutputDir ( PLATFORM_WAR_PREFIX_NAME ) ; final String licDestDir = warOutputDir + "/WEB-INF/classes/alfresco/extension/license" ; getLog ( ) . info ( "Copying Alfresco Enterprise license to: " + licDestDir ) ; executeMojo ( plugin ( groupId ( "org.apache.maven.plugins" ) , artifactId ( "maven-resources-plugin" ) , version ( MAVEN_RESOURCE_PLUGIN_VERSION ) ) , goal ( "copy-resources" ) , configuration ( element ( name ( "outputDirectory" ) , licDestDir ) , element ( name ( "resources" ) , element ( name ( "resource" ) , element ( name ( "directory" ) , "src/test/license" ) , element ( name ( "includes" ) , element ( name ( "include" ) , "*.lic" ) ) , element ( name ( "filtering" ) , "false" ) ) ) ) , execEnv ) ;
public class SparkQuery { /** * Parses the insert into query . * @ param query * the query * @ return the map */ private Map < String , Object > parseInsertIntoQuery ( String query ) { } }
Map < String , Object > persistDetails = new HashMap < String , Object > ( ) ; String insertReg = "(?i)^insert\\s+into\\s+(\\S+)\\s+(?:as\\s+(\\S+)\\s+)?FROM\\s+\\((.*)\\)$" ; Pattern r = Pattern . compile ( insertReg ) ; Matcher m = r . matcher ( query ) ; if ( m . find ( ) ) { try { parsePersistClause ( m . group ( 1 ) , persistDetails ) ; persistDetails . put ( "format" , m . group ( 2 ) ) ; persistDetails . put ( "fetchQuery" , m . group ( 3 ) ) ; } catch ( Exception e ) { throw new KunderaException ( "Invalid Query" ) ; } } else { throw new KunderaException ( "Invalid Query" ) ; } return persistDetails ;
public class KillBillHttpClient { /** * OPTIONS */ public Response doOptions ( final String uri , final RequestOptions requestOptions ) throws KillBillClientException { } }
return doOptions ( uri , requestOptions , this . requestTimeoutSec ) ;
public class ConcurrentServiceReferenceMap { /** * Associates the reference with the key but only if there is not an * existing reference associated with that key . It will only attempt to add * the reference to the map if < code > key < / code > is not < code > null < / code > . * @ param key Key associated with this reference * @ param reference ServiceReference for the target service * @ return The service reference that was previously associated with the key or < code > null < / code > otherwise * @ see ConcurrentMap # putIfAbsent ( Object , Object ) */ public ServiceReference < V > putReferenceIfAbsent ( K key , ServiceReference < V > reference ) { } }
// If the key is null we can ' t do anything if ( key == null ) return null ; if ( reference == null ) { // If the reference is null we can ' t add it to the map but we could still return an existing on if there was one so check this return getReference ( key ) ; } ConcurrentServiceReferenceElement < V > element = new ConcurrentServiceReferenceElement < V > ( referenceName , reference ) ; ConcurrentServiceReferenceElement < V > existingEntry = elementMap . putIfAbsent ( key , element ) ; if ( existingEntry == null ) { return null ; } else { return existingEntry . getReference ( ) ; }
public class DefaultQuartzServiceImpl { /** * Pause the job with given name in given group * @ param jobName * the job name * @ param jobGroupName * the job group name * @ return < code > true < / code > if job was paused , < code > false < / code > otherwise * @ see QuartzService # pauseQuartzJob ( String , String ) */ public boolean pauseQuartzJob ( String jobName , String jobGroupName ) { } }
try { this . scheduler . pauseJob ( new JobKey ( jobName , jobGroupName ) ) ; return true ; } catch ( SchedulerException e ) { logger . error ( "error pausing job: " + jobName + " in group: " + jobGroupName , e ) ; } return false ;
public class ClassIdentifiers { /** * This method throws IOException because it is assumed that we got the id from network . * @ param id * @ return * @ throws IOException */ public Class < ? > getClass ( int id ) throws IOException { } }
if ( id < 0 || id > internalIdToClass . length ) { throw new IOException ( "Unknown class id " + id ) ; } Class < ? > clazz = internalIdToClass [ id ] ; if ( clazz == null ) { throw new IOException ( "Unknown class id " + id ) ; } return clazz ;
public class DeleteDirectoryConfigRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteDirectoryConfigRequest deleteDirectoryConfigRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteDirectoryConfigRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteDirectoryConfigRequest . getDirectoryName ( ) , DIRECTORYNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class EntryAction { /** * We have two exception in this case : One from the loader or the expiry policy , one from * the resilience policy . Propagate exception from the resilience policy and suppress * the other , since this is a general configuration problem . */ @ SuppressWarnings ( "unchecked" ) private void resiliencePolicyException ( RuntimeException _ouch ) { } }
newValueOrException = ( V ) new ExceptionWrapper < K > ( key , _ouch , loadStartedTime , entry ) ; expiry = 0 ; expiryCalculated ( ) ;
public class FieldInfo { /** * Set this field back to the original value . * @ param bDisplayOption If true , display the data . * @ return The error code . */ public int initField ( boolean bDisplayOption ) // Init this field override for other value { } }
if ( ( this . getDefault ( ) == null ) || ( this . getDefault ( ) instanceof String ) ) return this . setString ( ( String ) this . getDefault ( ) , bDisplayOption , Constants . INIT_MOVE ) ; // zero out the field return this . setData ( this . getDefault ( ) , bDisplayOption , Constants . INIT_MOVE ) ;
public class Channels { /** * Sends a { @ code " shutdownInput " } request to the * { @ link ChannelDownstreamHandler } which is placed in the closest * downstream from the handler associated with the specified * { @ link ChannelHandlerContext } . * @ param ctx the context * @ param future the future which will be notified when the shutdownInput * operation is done */ public static void shutdownInput ( ChannelHandlerContext ctx , ChannelFuture future ) { } }
ctx . sendDownstream ( new DownstreamShutdownInputEvent ( ctx . getChannel ( ) , future ) ) ;
public class Validation { /** * Cross validation of a classification model . * @ param < T > the data type of input objects . * @ param k k - fold cross validation . * @ param trainer a classifier trainer that is properly parameterized . * @ param x the test data set . * @ param y the test data labels . * @ param measure the performance measure of classification . * @ return the test results with the same size of order of measures */ public static < T > double cv ( int k , ClassifierTrainer < T > trainer , T [ ] x , int [ ] y , ClassificationMeasure measure ) { } }
if ( k < 2 ) { throw new IllegalArgumentException ( "Invalid k for k-fold cross validation: " + k ) ; } int n = x . length ; int [ ] predictions = new int [ n ] ; CrossValidation cv = new CrossValidation ( n , k ) ; for ( int i = 0 ; i < k ; i ++ ) { T [ ] trainx = Math . slice ( x , cv . train [ i ] ) ; int [ ] trainy = Math . slice ( y , cv . train [ i ] ) ; Classifier < T > classifier = trainer . train ( trainx , trainy ) ; for ( int j : cv . test [ i ] ) { predictions [ j ] = classifier . predict ( x [ j ] ) ; } } return measure . measure ( y , predictions ) ;
public class TreeCoreset { /** * frees a tree of its storage */ void freeTree ( treeNode root ) { } }
while ( ! treeFinished ( root ) ) { if ( root . lc == null && root . rc == null ) { root = root . parent ; } else if ( root . lc == null && root . rc != null ) { // Schau ob rc ein Blatt ist if ( isLeaf ( root . rc ) ) { // Gebe rechtes Kind frei root . rc . free ( ) ; root . rc = null ; } else { // Fahre mit rechtem Kind fort root = root . rc ; } } else if ( root . lc != null ) { if ( isLeaf ( root . lc ) ) { root . lc . free ( ) ; root . lc = null ; } else { root = root . lc ; } } } root . free ( ) ;
public class MetadataService { /** * Ensures that the specified { @ code appId } is a token of the specified { @ code project } . */ private static void ensureProjectToken ( ProjectMetadata project , String appId ) { } }
requireNonNull ( project , "project" ) ; requireNonNull ( appId , "appId" ) ; checkArgument ( project . tokens ( ) . containsKey ( appId ) , appId + " is not a token of the project " + project . name ( ) ) ;
public class OnExitScriptTypeImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case DroolsPackage . ON_EXIT_SCRIPT_TYPE__SCRIPT : return getScript ( ) ; case DroolsPackage . ON_EXIT_SCRIPT_TYPE__SCRIPT_FORMAT : return getScriptFormat ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class Angular { /** * Registers a module with Angular framework , with module dependency listed * in the < code > requires < / code > parameter . If a module by the same name * ( i . e . the same class name including the package name ) has already been * registered , then the previous registration is overridden . * @ param module An instance of this module . * @ param requires Optional list of other module names this module depends on . */ public static < M extends AbstractModule > M module ( M module , String ... requires ) { } }
return module ( module , null , requires == null ? EMPTY_STRING_ARRAY : requires ) ;