signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class MIDDCombiner { /** * Combine two external nodes following the algorithm in this . algo * @ param n1 * @ param n2 * @ return */ private ExternalNode3 combineExternalNodes ( ExternalNode3 n1 , ExternalNode3 n2 ) { } }
if ( n1 == null || n2 == null ) { throw new IllegalArgumentException ( "Input nodes must not be null" ) ; } DecisionType combinedDecision = algo . combine ( n1 . getDecision ( ) , n2 . getDecision ( ) ) ; ExternalNode3 n = new ExternalNode3 ( combinedDecision ) ; // only accept OE that match with combined decision . List < ObligationExpression > oes1 = getFulfilledObligationExpressions ( n1 . getObligationExpressions ( ) , combinedDecision ) ; List < ObligationExpression > oes2 = getFulfilledObligationExpressions ( n2 . getObligationExpressions ( ) , combinedDecision ) ; n . getObligationExpressions ( ) . addAll ( oes1 ) ; n . getObligationExpressions ( ) . addAll ( oes2 ) ; return n ;
public class GrafeasV1Beta1Client { /** * Deletes the specified occurrence . For example , use this method to delete an occurrence when the * occurrence is no longer applicable for the given resource . * < p > Sample code : * < pre > < code > * try ( GrafeasV1Beta1Client grafeasV1Beta1Client = GrafeasV1Beta1Client . create ( ) ) { * OccurrenceName name = OccurrenceName . of ( " [ PROJECT ] " , " [ OCCURRENCE ] " ) ; * grafeasV1Beta1Client . deleteOccurrence ( name ) ; * < / code > < / pre > * @ param name The name of the occurrence in the form of * ` projects / [ PROJECT _ ID ] / occurrences / [ OCCURRENCE _ ID ] ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final void deleteOccurrence ( OccurrenceName name ) { } }
DeleteOccurrenceRequest request = DeleteOccurrenceRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; deleteOccurrence ( request ) ;
public class Bundler { /** * Inserts an array of Parcelable values into the mapping of the underlying Bundle , replacing any * existing value for the given key . Either key or value may be null . * @ param key a String , or null * @ param value an array of Parcelable objects , or null * @ return this bundler instance to chain method calls */ public Bundler put ( String key , Parcelable [ ] value ) { } }
delegate . putParcelableArray ( key , value ) ; return this ;
public class BigtableAsyncTable { /** * { @ inheritDoc } */ @ Override public CompletableFuture < List < Result > > scanAll ( Scan scan ) { } }
if ( AbstractBigtableTable . hasWhileMatchFilter ( scan . getFilter ( ) ) ) { throw new UnsupportedOperationException ( "scanAll with while match filter is not allowed" ) ; } return toCompletableFuture ( clientWrapper . readFlatRowsAsync ( hbaseAdapter . adapt ( scan ) ) ) . thenApply ( list -> map ( list , Adapters . FLAT_ROW_ADAPTER :: adaptResponse ) ) ;
public class BaseAsset { /** * Creates conversation with an expression which mentioned this asset . * @ param author Author of conversation expression . * @ param content Content of conversation expression . * @ return Created conversation */ public Conversation createConversation ( Member author , String content ) { } }
Conversation conversation = getInstance ( ) . create ( ) . conversation ( author , content ) ; Iterator < Expression > iterator = conversation . getContainedExpressions ( ) . iterator ( ) ; iterator . next ( ) . getMentions ( ) . add ( this ) ; conversation . save ( ) ; return conversation ;
public class AdaptedAction { /** * Load the icon file . * @ param location * Path to the file . */ private ImageIcon retrieveIcon ( IconLocationType location ) { } }
try { return myIconProvider . retrieveIcon ( location ) ; } catch ( Exception _exception ) { System . err . println ( _exception . getMessage ( ) ) ; } return null ;
public class IdentityTemplateLibrary { /** * Store a template library to the provided output stream . * @ param out output stream * @ throws IOException low level IO error */ void store ( OutputStream out ) throws IOException { } }
BufferedWriter bw = new BufferedWriter ( new OutputStreamWriter ( out ) ) ; for ( Entry < String , Point2d [ ] > e : templateMap . entries ( ) ) { bw . write ( encodeEntry ( e ) ) ; bw . write ( '\n' ) ; } bw . close ( ) ;
public class EncryptionUtil { /** * Encrypts the given message with the specified key type ( either public or private ) . * @ param text * The message to encrypt . * @ param keyType * THe key type to be used . * @ return The encrypted message . * @ throws EncryptionException * When it was not possible to encrypt the message . */ public byte [ ] encrypt ( final String text , final KeyType keyType ) throws EncryptionException { } }
final Key key = keyType == KeyType . PRIVATE ? privateKey : publicKey ; try { // get an RSA cipher object and print the provider final Cipher cipher = Cipher . getInstance ( ALGORITHM ) ; // encrypt the plain text using the public key cipher . init ( Cipher . ENCRYPT_MODE , key ) ; final byte [ ] cipherText = cipher . doFinal ( text . getBytes ( DEFAULT_CHARSET ) ) ; return cipherText ; } catch ( NoSuchAlgorithmException | InvalidKeyException | NoSuchPaddingException | BadPaddingException | IllegalBlockSizeException e ) { throw new EncryptionException ( "Unable to encrypt message: " , e ) ; }
public class HttpServerHandlerBinder { /** * Configure the " get all requests " servlet that backs the { @ link GuiceFilter } . This servlet should log all requests as legal request should never * hit it . */ public static LinkedBindingBuilder < Servlet > bindCatchallServlet ( final Binder binder ) { } }
return binder . bind ( Servlet . class ) . annotatedWith ( CATCHALL_NAMED ) ;
public class BaseFont { /** * Creates a new font . This font can be one of the 14 built in types , * a Type1 font referred to by an AFM or PFM file , a TrueType font ( simple or collection ) or a CJK font from the * Adobe Asian Font Pack . TrueType fonts and CJK fonts can have an optional style modifier * appended to the name . These modifiers are : Bold , Italic and BoldItalic . An * example would be " STSong - Light , Bold " . Note that this modifiers do not work if * the font is embedded . Fonts in TrueType collections are addressed by index such as " msgothic . ttc , 1 " . * This would get the second font ( indexes start at 0 ) , in this case " MS PGothic " . * The fonts may or may not be cached depending on the flag < CODE > cached < / CODE > . * If the < CODE > byte < / CODE > arrays are present the font will be * read from them instead of the name . A name is still required to identify * the font type . * Besides the common encodings described by name , custom encodings * can also be made . These encodings will only work for the single byte fonts * Type1 and TrueType . The encoding string starts with a ' # ' * followed by " simple " or " full " . If " simple " there is a decimal for the first character position and then a list * of hex values representing the Unicode codes that compose that encoding . < br > * The " simple " encoding is recommended for TrueType fonts * as the " full " encoding risks not matching the character with the right glyph * if not done with care . < br > * The " full " encoding is specially aimed at Type1 fonts where the glyphs have to be * described by non standard names like the Tex math fonts . Each group of three elements * compose a code position : the one byte code order in decimal or as ' x ' ( x cannot be the space ) , the name and the Unicode character * used to access the glyph . The space must be assigned to character position 32 otherwise * text justification will not work . * Example for a " simple " encoding that includes the Unicode * character space , A , B and ecyrillic : * < PRE > * " # simple 32 0020 0041 0042 0454" * < / PRE > * Example for a " full " encoding for a Type1 Tex font : * < PRE > * " # full ' A ' nottriangeqlleft 0041 ' B ' dividemultiply 0042 32 space 0020" * < / PRE > * @ param name the name of the font or its location on file * @ param encoding the encoding to be applied to this font * @ param embedded true if the font is to be embedded in the PDF * @ param cached true if the font comes from the cache or is added to * the cache if new , false if the font is always created new * @ param ttfAfm the true type font or the afm in a byte array * @ param pfb the pfb in a byte array * @ return returns a new font . This font may come from the cache but only if cached * is true , otherwise it will always be created new * @ throws DocumentException the font is invalid * @ throws IOException the font file could not be read * @ sinceiText 0.80 */ public static BaseFont createFont ( String name , String encoding , boolean embedded , boolean cached , byte ttfAfm [ ] , byte pfb [ ] ) throws DocumentException , IOException { } }
return createFont ( name , encoding , embedded , cached , ttfAfm , pfb , false ) ;
public class SerializedFormBuilder { /** * Build the field information . * @ param node the XML element that specifies which components to document * @ param fieldsContentTree content tree to which the documentation will be added */ public void buildFieldInfo ( XMLNode node , Content fieldsContentTree ) { } }
if ( configuration . nocomment ) { return ; } FieldDoc field = ( FieldDoc ) currentMember ; ClassDoc cd = field . containingClass ( ) ; // Process default Serializable field . if ( ( field . tags ( "serial" ) . length == 0 ) && ! field . isSynthetic ( ) && configuration . serialwarn ) { configuration . message . warning ( field . position ( ) , "doclet.MissingSerialTag" , cd . qualifiedName ( ) , field . name ( ) ) ; } fieldWriter . addMemberDescription ( field , fieldsContentTree ) ; fieldWriter . addMemberTags ( field , fieldsContentTree ) ;
public class MultiCollection { /** * Add a collection inside this multicollection . * @ param collection the collection to add . */ public void addCollection ( Collection < ? extends E > collection ) { } }
if ( collection != null && ! collection . isEmpty ( ) ) { this . collections . add ( collection ) ; }
public class JacobsthalLucas { /** * Function to calculate the nth Jacobsthal - Lucas number . The Jacobsthal - Lucas numbers are a * series of numbers where the current number is sum of the previous number and twice * the number before that . * @ param n The position in the Jacobsthal - Lucas series to find . * @ return The nth Jacobsthal - Lucas number . * Examples : * > > > nth _ jacobsthal _ lucas ( 5) * 31 * > > > nth _ jacobsthal _ lucas ( 2) * > > > nth _ jacobsthal _ lucas ( 4) * 17 */ public static int nthJacobsthalLucas ( int n ) { } }
int [ ] series = new int [ n + 1 ] ; series [ 0 ] = 2 ; series [ 1 ] = 1 ; for ( int i = 2 ; i <= n ; i ++ ) { series [ i ] = series [ i - 1 ] + 2 * series [ i - 2 ] ; } return series [ n ] ;
public class CmsObject { /** * Returns all relations for the given resource matching the given filter . < p > * You should have view / read permissions on the given resource . < p > * You may become source and / or target paths to resource you do not have view / read permissions on . < p > * @ param resourceName the name of the resource to retrieve the relations for * @ param filter the filter to match the relation * @ return a List containing all { @ link org . opencms . relations . CmsRelation } * objects for the given resource matching the given filter * @ throws CmsException if something goes wrong * @ see CmsSecurityManager # getRelationsForResource ( CmsRequestContext , CmsResource , CmsRelationFilter ) */ public List < CmsRelation > getRelationsForResource ( String resourceName , CmsRelationFilter filter ) throws CmsException { } }
return getRelationsForResource ( readResource ( resourceName , CmsResourceFilter . ALL ) , filter ) ;
public class EventMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Event event , ProtocolMarshaller protocolMarshaller ) { } }
if ( event == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( event . getAppPackageName ( ) , APPPACKAGENAME_BINDING ) ; protocolMarshaller . marshall ( event . getAppTitle ( ) , APPTITLE_BINDING ) ; protocolMarshaller . marshall ( event . getAppVersionCode ( ) , APPVERSIONCODE_BINDING ) ; protocolMarshaller . marshall ( event . getAttributes ( ) , ATTRIBUTES_BINDING ) ; protocolMarshaller . marshall ( event . getClientSdkVersion ( ) , CLIENTSDKVERSION_BINDING ) ; protocolMarshaller . marshall ( event . getEventType ( ) , EVENTTYPE_BINDING ) ; protocolMarshaller . marshall ( event . getMetrics ( ) , METRICS_BINDING ) ; protocolMarshaller . marshall ( event . getSdkName ( ) , SDKNAME_BINDING ) ; protocolMarshaller . marshall ( event . getSession ( ) , SESSION_BINDING ) ; protocolMarshaller . marshall ( event . getTimestamp ( ) , TIMESTAMP_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CollationBuilder { /** * Implements CollationRuleParser . Sink . */ @ Override void addReset ( int strength , CharSequence str ) { } }
assert ( str . length ( ) != 0 ) ; if ( str . charAt ( 0 ) == CollationRuleParser . POS_LEAD ) { ces [ 0 ] = getSpecialResetPosition ( str ) ; cesLength = 1 ; assert ( ( ces [ 0 ] & Collation . CASE_AND_QUATERNARY_MASK ) == 0 ) ; } else { // normal reset to a character or string String nfdString = nfd . normalize ( str ) ; cesLength = dataBuilder . getCEs ( nfdString , ces , 0 ) ; if ( cesLength > Collation . MAX_EXPANSION_LENGTH ) { throw new IllegalArgumentException ( "reset position maps to too many collation elements (more than 31)" ) ; } } if ( strength == Collator . IDENTICAL ) { return ; } // simple reset - at - position // & [ before strength ] position assert ( Collator . PRIMARY <= strength && strength <= Collator . TERTIARY ) ; int index = findOrInsertNodeForCEs ( strength ) ; long node = nodes . elementAti ( index ) ; // If the index is for a " weaker " node , // then skip backwards over this and further " weaker " nodes . while ( strengthFromNode ( node ) > strength ) { index = previousIndexFromNode ( node ) ; node = nodes . elementAti ( index ) ; } // Find or insert a node whose index we will put into a temporary CE . if ( strengthFromNode ( node ) == strength && isTailoredNode ( node ) ) { // Reset to just before this same - strength tailored node . index = previousIndexFromNode ( node ) ; } else if ( strength == Collator . PRIMARY ) { // root primary node ( has no previous index ) long p = weight32FromNode ( node ) ; if ( p == 0 ) { throw new UnsupportedOperationException ( "reset primary-before ignorable not possible" ) ; } if ( p <= rootElements . getFirstPrimary ( ) ) { // There is no primary gap between ignorables and the space - first - primary . throw new UnsupportedOperationException ( "reset primary-before first non-ignorable not supported" ) ; } if ( p == Collation . FIRST_TRAILING_PRIMARY ) { // We do not support tailoring to an unassigned - implicit CE . throw new UnsupportedOperationException ( "reset primary-before [first trailing] not supported" ) ; } p = rootElements . getPrimaryBefore ( p , baseData . isCompressiblePrimary ( p ) ) ; index = findOrInsertNodeForPrimary ( p ) ; // Go to the last node in this list : // Tailor after the last node between adjacent root nodes . for ( ; ; ) { node = nodes . elementAti ( index ) ; int nextIndex = nextIndexFromNode ( node ) ; if ( nextIndex == 0 ) { break ; } index = nextIndex ; } } else { // & [ before 2 ] or & [ before 3] index = findCommonNode ( index , Collator . SECONDARY ) ; if ( strength >= Collator . TERTIARY ) { index = findCommonNode ( index , Collator . TERTIARY ) ; } // findCommonNode ( ) stayed on the stronger node or moved to // an explicit common - weight node of the reset - before strength . node = nodes . elementAti ( index ) ; if ( strengthFromNode ( node ) == strength ) { // Found a same - strength node with an explicit weight . int weight16 = weight16FromNode ( node ) ; if ( weight16 == 0 ) { throw new UnsupportedOperationException ( ( strength == Collator . SECONDARY ) ? "reset secondary-before secondary ignorable not possible" : "reset tertiary-before completely ignorable not possible" ) ; } assert ( weight16 > Collation . BEFORE_WEIGHT16 ) ; // Reset to just before this node . // Insert the preceding same - level explicit weight if it is not there already . // Which explicit weight immediately precedes this one ? weight16 = getWeight16Before ( index , node , strength ) ; // Does this preceding weight have a node ? int previousWeight16 ; int previousIndex = previousIndexFromNode ( node ) ; for ( int i = previousIndex ; ; i = previousIndexFromNode ( node ) ) { node = nodes . elementAti ( i ) ; int previousStrength = strengthFromNode ( node ) ; if ( previousStrength < strength ) { assert ( weight16 >= Collation . COMMON_WEIGHT16 || i == previousIndex ) ; // Either the reset element has an above - common weight and // the parent node provides the implied common weight , // or the reset element has a weight < = common in the node // right after the parent , and we need to insert the preceding weight . previousWeight16 = Collation . COMMON_WEIGHT16 ; break ; } else if ( previousStrength == strength && ! isTailoredNode ( node ) ) { previousWeight16 = weight16FromNode ( node ) ; break ; } // Skip weaker nodes and same - level tailored nodes . } if ( previousWeight16 == weight16 ) { // The preceding weight has a node , // maybe with following weaker or tailored nodes . // Reset to the last of them . index = previousIndex ; } else { // Insert a node with the preceding weight , reset to that . node = nodeFromWeight16 ( weight16 ) | nodeFromStrength ( strength ) ; index = insertNodeBetween ( previousIndex , index , node ) ; } } else { // Found a stronger node with implied strength - common weight . int weight16 = getWeight16Before ( index , node , strength ) ; index = findOrInsertWeakNode ( index , weight16 , strength ) ; } // Strength of the temporary CE = strength of its reset position . // Code above raises an error if the before - strength is stronger . strength = ceStrength ( ces [ cesLength - 1 ] ) ; } ces [ cesLength - 1 ] = tempCEFromIndexAndStrength ( index , strength ) ;
public class DateUtil { /** * Test to see if two dates are in the same week * @ param dateOne first date * @ param dateTwo second date * @ return true if the two dates are in the same week */ public static boolean sameWeek ( Date dateOne , Date dateTwo ) { } }
if ( ( dateOne == null ) || ( dateTwo == null ) ) { return false ; } Calendar cal = Calendar . getInstance ( ) ; cal . setTime ( dateOne ) ; int year = cal . get ( Calendar . YEAR ) ; int week = cal . get ( Calendar . WEEK_OF_YEAR ) ; cal . setTime ( dateTwo ) ; int year2 = cal . get ( Calendar . YEAR ) ; int week2 = cal . get ( Calendar . WEEK_OF_YEAR ) ; return ( ( year == year2 ) && ( week == week2 ) ) ;
public class AsyncSocketConnection { /** * originally for debugging , but now used to reschedule . . */ protected IPromise directWrite ( ByteBuffer buf ) { } }
checkThread ( ) ; if ( myActor == null ) myActor = Actor . current ( ) ; if ( writePromise != null ) throw new RuntimeException ( "concurrent write con:" + chan . isConnected ( ) + " open:" + chan . isOpen ( ) ) ; writePromise = new Promise ( ) ; writingBuffer = buf ; Promise res = writePromise ; try { int written = 0 ; written = chan . write ( buf ) ; if ( written < 0 ) { // TODO : closed writeFinished ( new IOException ( "connection closed" ) ) ; } if ( buf . remaining ( ) > 0 ) { // key . interestOps ( SelectionKey . OP _ WRITE ) ; } else { writeFinished ( null ) ; } } catch ( Exception e ) { res . reject ( e ) ; FSTUtil . rethrow ( e ) ; } return res ;
public class ServiceManagerIndexRdf { /** * Creates a Service Description in the system . * Only needs to be fed with an MSM Service description . * After successfully adding a service , implementations of this method should raise a { @ code ServiceCreatedEvent } * @ param service the input service description in terms of MSM * @ return the URI this service description was saved to * @ throws ServiceException */ @ Override public URI addService ( Service service ) throws ServiceException { } }
URI serviceUri = super . addService ( service ) ; if ( serviceUri != null ) { // Index service this . indexService ( service ) ; } return service . getUri ( ) ;
public class ClusterJoinManager { /** * Respond to a join request by sending the master address in a { @ link MasterResponseOp } . This happens when current node * receives a join request but is not the cluster ' s master . * @ param target the node receiving the master answer */ private void sendMasterAnswer ( Address target ) { } }
Address masterAddress = clusterService . getMasterAddress ( ) ; if ( masterAddress == null ) { logger . info ( format ( "Cannot send master answer to %s since master node is not known yet" , target ) ) ; return ; } if ( masterAddress . equals ( node . getThisAddress ( ) ) && node . getNodeExtension ( ) . getInternalHotRestartService ( ) . isMemberExcluded ( masterAddress , clusterService . getThisUuid ( ) ) ) { // I already know that I will do a force - start so I will not allow target to join me logger . info ( "Cannot send master answer because " + target + " should not join to this master node." ) ; return ; } if ( masterAddress . equals ( target ) ) { logger . fine ( "Cannot send master answer to " + target + " since it is the known master" ) ; return ; } MasterResponseOp op = new MasterResponseOp ( masterAddress ) ; nodeEngine . getOperationService ( ) . send ( op , target ) ;
public class CmsVaadinUtils { /** * Gets list of resource types . < p > * @ return List */ public static List < I_CmsResourceType > getResourceTypes ( ) { } }
List < I_CmsResourceType > res = new ArrayList < I_CmsResourceType > ( ) ; for ( I_CmsResourceType type : OpenCms . getResourceManager ( ) . getResourceTypes ( ) ) { CmsExplorerTypeSettings typeSetting = OpenCms . getWorkplaceManager ( ) . getExplorerTypeSetting ( type . getTypeName ( ) ) ; if ( typeSetting != null ) { res . add ( type ) ; } } return res ;
public class UsersInner { /** * Create or replace an existing User . * @ param resourceGroupName The name of the resource group . * @ param labAccountName The name of the lab Account . * @ param labName The name of the lab . * @ param userName The name of the user . * @ param user The User registered to a lab * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the UserInner object if successful . */ public UserInner createOrUpdate ( String resourceGroupName , String labAccountName , String labName , String userName , UserInner user ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , labAccountName , labName , userName , user ) . toBlocking ( ) . single ( ) . body ( ) ;
public class IntegralImageOps { /** * Computes the value of a block inside an integral image and treats pixels outside of the * image as zero . The block is defined as follows : x0 & lt ; x & le ; x1 and y0 & lt ; y & le ; y1. * @ param integral Integral image . * @ param x0 Lower bound of the block . Exclusive . * @ param y0 Lower bound of the block . Exclusive . * @ param x1 Upper bound of the block . Inclusive . * @ param y1 Upper bound of the block . Inclusive . * @ return Value inside the block . */ public static int block_zero ( GrayS32 integral , int x0 , int y0 , int x1 , int y1 ) { } }
return ImplIntegralImageOps . block_zero ( integral , x0 , y0 , x1 , y1 ) ;
public class XPathScanner { /** * Checks if the given character is a letter . * @ param paramInput * The character to check . * @ return Returns true , if the character is a letter . */ private boolean isLetter ( final char paramInput ) { } }
return ( paramInput >= '0' && paramInput <= '9' ) || ( paramInput >= 'a' && paramInput <= 'z' ) || ( paramInput >= 'A' && paramInput <= 'Z' ) || ( paramInput == '_' ) || ( paramInput == '-' ) || ( paramInput == '.' ) ;
public class DeviceStore { /** * Removes the given device from store so that it cannot be any longer be used for testing . This * can happen if e . g . the hardware device gets unplugged from the computer . * @ param device the device to remove . * @ throws DeviceStoreException when parameter is not type of ' DefaultHardwareDevice ' . */ public void removeAndroidDevice ( AndroidDevice device ) throws DeviceStoreException { } }
if ( device == null ) { return ; } boolean hardwareDevice = device instanceof DefaultHardwareDevice ; if ( hardwareDevice == false ) { throw new DeviceStoreException ( "Only devices of type 'DefaultHardwareDevice' can be removed." ) ; } release ( device , null ) ; DeviceTargetPlatform apiLevel = device . getTargetPlatform ( ) ; if ( androidDevices . containsKey ( apiLevel ) ) { log . info ( "Removing: " + device ) ; androidDevices . get ( apiLevel ) . remove ( device ) ; if ( androidDevices . get ( apiLevel ) . isEmpty ( ) ) { androidDevices . remove ( apiLevel ) ; } } else { for ( List < AndroidDevice > targetDevices : androidDevices . values ( ) ) { if ( targetDevices . contains ( device ) ) { log . warning ( "Device in devicestore" ) ; } } log . warning ( "The target platform version of the device is not found in device store." ) ; log . warning ( "The device was propably already removed." ) ; }
public class PooledExecutionServiceConfigurationBuilder { /** * Builds the { @ link PooledExecutionServiceConfiguration } * @ return the built configuration */ @ Override public PooledExecutionServiceConfiguration build ( ) { } }
PooledExecutionServiceConfiguration config = new PooledExecutionServiceConfiguration ( ) ; if ( defaultPool != null ) { config . addDefaultPool ( defaultPool . alias , defaultPool . minSize , defaultPool . maxSize ) ; } for ( Pool pool : pools ) { config . addPool ( pool . alias , pool . minSize , pool . maxSize ) ; } return config ;
public class CmsSerialDateView { /** * Initialize the ui elements for the management part . */ private void initManagementPart ( ) { } }
m_manageExceptionsButton . setText ( Messages . get ( ) . key ( Messages . GUI_SERIALDATE_BUTTON_MANAGE_EXCEPTIONS_0 ) ) ; m_manageExceptionsButton . getElement ( ) . getStyle ( ) . setFloat ( Style . Float . RIGHT ) ;
public class MatrixPrinter { /** * Prints the matrix using default control arguments . The matrix * elements are formatted using the specified < code > NumberFormat < / code > . * @ param m the matrix to print * @ param format the NumberFormat used to convert the matrix elements to string * @ throws NullPointerException if either argument is < code > null < / code > */ public static void print ( Matrix m , NumberFormat format ) { } }
print ( m , DEFAULT_CONTROL , new NumberFormatNumberFormatter ( format ) ) ;
public class DBService { /** * Get { @ link DRow } object for the given row key . * the object will be returned even if the row has no columns or does not exist * @ param storeName Name of physical store to query . * @ return { @ link DRow } object . May be empty but not null . */ public Iterable < DRow > getAllRows ( String storeName ) { } }
return new SequenceIterable < DRow > ( new RowSequence ( m_tenant , storeName , 65536 ) ) ;
public class ObjectArrayList { /** * Applies a procedure to each element of the receiver , if any . * Starts at index 0 , moving rightwards . * @ param procedure the procedure to be applied . Stops iteration if the procedure returns < tt > false < / tt > , otherwise continues . * @ return < tt > false < / tt > if the procedure stopped before all elements where iterated over , < tt > true < / tt > otherwise . */ public boolean forEach ( ObjectProcedure procedure ) { } }
Object [ ] theElements = elements ; int theSize = size ; for ( int i = 0 ; i < theSize ; ) if ( ! procedure . apply ( theElements [ i ++ ] ) ) return false ; return true ;
public class ProxyImpl { /** * / * ( non - Javadoc ) * @ see javax . servlet . sip . Proxy # createProxyBranches ( java . util . List ) */ public List < ProxyBranch > createProxyBranches ( List < ? extends URI > targets ) { } }
ArrayList < ProxyBranch > list = new ArrayList < ProxyBranch > ( ) ; for ( URI target : targets ) { if ( target == null ) { throw new NullPointerException ( "URI can't be null" ) ; } if ( ! JainSipUtils . checkScheme ( target . toString ( ) ) ) { throw new IllegalArgumentException ( "Scheme " + target . getScheme ( ) + " is not supported" ) ; } if ( logger . isDebugEnabled ( ) ) { logger . debug ( "createProxyBranches with proxyTimerService=" + proxyTimerService ) ; } ProxyBranchImpl branch = new ProxyBranchImpl ( target , this ) ; branch . setRecordRoute ( recordRoutingEnabled ) ; branch . setRecurse ( recurse ) ; list . add ( branch ) ; this . proxyBranches . put ( target , branch ) ; } return list ;
public class AuditableSupport { /** * Assert the given { @ link Object value } is not { @ literal null } . * @ param < T > { @ link Class type } of the { @ link Object value } . * @ param value { @ link Object } to evaluate . * @ param message { @ link Supplier } containing the message used in the { @ link IllegalArgumentException } . * @ return the given { @ link Object value } if not { @ literal null } . * @ throws IllegalArgumentException with { @ link Supplier message } if the { @ link Object value } is { @ literal null } . * @ see java . util . function . Supplier */ private < T > T assertNotNull ( T value , Supplier < String > message ) { } }
Assert . notNull ( value , message ) ; return value ;
public class DocumentBlock { /** * getter for hasManyFontsizes - gets * @ generated * @ return value of the feature */ public boolean getHasManyFontsizes ( ) { } }
if ( DocumentBlock_Type . featOkTst && ( ( DocumentBlock_Type ) jcasType ) . casFeat_hasManyFontsizes == null ) jcasType . jcas . throwFeatMissing ( "hasManyFontsizes" , "ch.epfl.bbp.uima.types.DocumentBlock" ) ; return jcasType . ll_cas . ll_getBooleanValue ( addr , ( ( DocumentBlock_Type ) jcasType ) . casFeatCode_hasManyFontsizes ) ;
public class ConfigImpl { /** * Resolves a module id by applying alias resolution and has ! loader plugin resolution . * @ param name * The module name to map . May specify plugins * @ param features * Features that are defined in the request * @ param dependentFeatures * Output - Set of feature names that the returned value is * conditioned on . Used for cache management . * @ param resolveAliases * If true , then module name aliases will be resolved * @ param evaluateHasPluginConditionals * If true , then attempt to evaluate the has plugin conditionals using the features * provided in < code > features < / code > , potentially eliminating the has ! loader plugin * from the returned value if all features can be resolved . If false , then the * conditionals are retained in the result , although the expression may change * if new conditionals are introduced by alias resolution . * @ param recursionCount * Counter used to guard against runaway recursion * @ param sb * If not null , then a reference to a string buffer that can * be used by the resolver to indicate debug / diagnostic information * about the alias resolution . For example , the resolver may * indicate that alias resolution was not performed due to * a missing required feature . * @ return The resolved module id . */ protected String _resolve ( String name , Features features , Set < String > dependentFeatures , boolean resolveAliases , boolean evaluateHasPluginConditionals , int recursionCount , StringBuffer sb ) { } }
final String sourceMethod = "_resolve" ; // $ NON - NLS - 1 $ boolean isTraceLogging = log . isLoggable ( Level . FINER ) ; if ( isTraceLogging ) { log . entering ( ConfigImpl . class . getName ( ) , sourceMethod , new Object [ ] { name , features , dependentFeatures , resolveAliases , evaluateHasPluginConditionals , recursionCount , sb } ) ; } String result = name ; if ( name != null && name . length ( ) != 0 ) { if ( recursionCount >= MAX_RECURSION_COUNT ) { throw new IllegalStateException ( "Excessive recursion in resolver: " + name ) ; // $ NON - NLS - 1 $ } int idx = name . indexOf ( "!" ) ; // $ NON - NLS - 1 $ if ( idx != - 1 && HAS_PATTERN . matcher ( name . substring ( 0 , idx ) ) . find ( ) ) { result = resolveHasPlugin ( name . substring ( idx + 1 ) , features , dependentFeatures , resolveAliases , evaluateHasPluginConditionals , recursionCount + 1 , sb ) ; result = result . contains ( "?" ) ? ( name . substring ( 0 , idx + 1 ) + result ) : result ; // $ NON - NLS - 1 $ } else if ( resolveAliases && getAliases ( ) != null ) { if ( idx != - 1 ) { // non - has plugin // If the module id specifies a plugin , then process each part individually Matcher m = plugins2 . matcher ( name ) ; StringBuffer sbResult = new StringBuffer ( ) ; while ( m . find ( ) ) { String replacement = _resolve ( m . group ( 0 ) , features , dependentFeatures , true , evaluateHasPluginConditionals , recursionCount + 1 , sb ) ; m . appendReplacement ( sbResult , replacement ) ; } m . appendTail ( sbResult ) ; result = sbResult . toString ( ) ; } String candidate = resolveAliases ( name , features , dependentFeatures , sb ) ; if ( candidate != null && candidate . length ( ) > 0 && ! candidate . equals ( name ) ) { if ( sb != null ) { sb . append ( ", " ) . append ( MessageFormat . format ( // $ NON - NLS - 1 $ Messages . ConfigImpl_6 , new Object [ ] { name + " --> " + candidate } // $ NON - NLS - 1 $ ) ) ; } result = _resolve ( candidate , features , dependentFeatures , true , evaluateHasPluginConditionals , recursionCount + 1 , sb ) ; } } } if ( isTraceLogging ) { log . exiting ( ConfigImpl . class . getName ( ) , sourceMethod , result ) ; } return result ;
public class ConfigureJMeterMojo { /** * Copy an Artifact to a directory * @ param artifactToCopy Artifact that needs to be copied . * @ param destinationDirectory Directory to copy the artifact to * @ return true if artifact copied , false if artifact not copied * @ throws MojoExecutionException Unable to copy file or resolve dependency , or unable to find artifact or unable to parse Artifact version */ private boolean copyArtifactIfRequired ( Artifact artifactToCopy , Path destinationDirectory ) throws MojoExecutionException { } }
for ( String ignoredArtifact : ignoredArtifacts ) { Artifact artifactToIgnore = getArtifactResult ( new DefaultArtifact ( ignoredArtifact ) ) ; if ( artifactToCopy . getFile ( ) . getName ( ) . equals ( artifactToIgnore . getFile ( ) . getName ( ) ) ) { getLog ( ) . debug ( artifactToCopy . getFile ( ) . getName ( ) + " has not been copied over because it is in the ignore list." ) ; return false ; } } try { for ( Iterator < Artifact > iterator = copiedArtifacts . iterator ( ) ; iterator . hasNext ( ) ; ) { Artifact alreadyCopiedArtifact = iterator . next ( ) ; if ( artifactsAreMatchingTypes ( alreadyCopiedArtifact , artifactToCopy ) ) { if ( isArtifactIsOlderThanArtifact ( alreadyCopiedArtifact , artifactToCopy ) ) { Path artifactToDelete = Paths . get ( destinationDirectory . toString ( ) , alreadyCopiedArtifact . getFile ( ) . getName ( ) ) ; getLog ( ) . debug ( String . format ( "Deleting file:'%s'" , artifactToDelete ) ) ; // We delete the old artifact and remove it from the list of copied artifacts , the new artifact will be copied below Files . deleteIfExists ( artifactToDelete ) ; iterator . remove ( ) ; break ; } else { return false ; } } } Path desiredArtifact = Paths . get ( destinationDirectory . toString ( ) , artifactToCopy . getFile ( ) . getName ( ) ) ; if ( ! desiredArtifact . toFile ( ) . exists ( ) ) { getLog ( ) . debug ( String . format ( "Copying: %s to %s" , desiredArtifact . toString ( ) , destinationDirectory . toString ( ) ) ) ; Files . copy ( Paths . get ( artifactToCopy . getFile ( ) . getAbsolutePath ( ) ) , desiredArtifact ) ; } } catch ( IOException | InvalidVersionSpecificationException e ) { throw new MojoExecutionException ( e . getMessage ( ) , e ) ; } copiedArtifacts . add ( artifactToCopy ) ; return true ;
public class CATConsumer { /** * This method should be handled by the appropriate subclass * @ param requestNumber */ public void unlockAll ( int requestNumber ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "unlockAll" , requestNumber ) ; SIErrorException e = new SIErrorException ( nls . getFormattedMessage ( "PROTOCOL_ERROR_SICO2003" , null , null ) ) ; FFDCFilter . processException ( e , CLASS_NAME + ".unlockAll" , CommsConstants . CATCONSUMER_UNLOCKALL_01 , this ) ; SibTr . error ( tc , "PROTOCOL_ERROR_SICO2003" , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "unlockAll" ) ; // Re - throw this exception so that the client will informed if required throw e ;
public class PrefixedProperties { /** * Gets the prefixed key and parse it to an int [ ] < br > * Each comma - separated list can be used . If the key couldn ' t get found , the * default will be used . * @ param key * the key * @ param def * default value * @ return int [ ] */ public int [ ] getIntArray ( final String key , final int [ ] def ) { } }
try { final int [ ] result = getIntArray ( key ) ; return result != null ? result : def ; } catch ( final NumberFormatException nfe ) { return def ; }
public class CommunicationHandlerFactory { /** * Helper to return an instance of the { @ link ApacheCoreCommunicationHandler } . * If the { @ link ApacheCoreCommunicationHandler } cannot be initialized , return * a { @ link JavaCommunicationHandler } instance . * @ param url * @ param user * @ param pass * @ param sslSocketFactory * @ param verifier * @ param initialConnectionTimeout the initial connection timeout in milliseconds * @ return * @ throws InitializationException */ private static CommunicationHandler newHandlerAuto ( String url , String user , String pass , SSLSocketFactory sslSocketFactory , HostnameVerifier verifier , int initialConnectionTimeout ) throws InitializationException { } }
try { return new ApacheCoreCommunicationHandler ( url , user , pass , sslSocketFactory , verifier , initialConnectionTimeout ) ; } catch ( NoClassDefFoundError e ) { return new JavaCommunicationHandler ( url , user , pass , sslSocketFactory , verifier , initialConnectionTimeout ) ; }
import java . util . * ; class SortGivenArray { /** * Given a list of non - negative integers , returns a sorted copy of the original list . * The sort order depends on the sum of the first and last elements in the list : * - If the sum is odd , the array will be sorted in ascending order . * - If the sum is even , the array will be sorted in descending order . * Note : * * No mutation on the original list . * Examples : * * sort _ given _ array ( [ ] ) = > [ ] * * sort _ given _ array ( [ 5 ] ) = > [ 5] * * sort _ given _ array ( [ 2 , 4 , 3 , 0 , 1 , 5 ] ) = > [ 0 , 1 , 2 , 3 , 4 , 5] * * sort _ given _ array ( [ 2 , 4 , 3 , 0 , 1 , 5 , 6 ] ) = > [ 6 , 5 , 4 , 3 , 2 , 1 , 0] */ public static List < Integer > sortGivenArray ( List < Integer > nums ) { } }
if ( nums . size ( ) == 0 ) { return new ArrayList < > ( ) ; } else { if ( ( nums . get ( 0 ) + nums . get ( nums . size ( ) - 1 ) ) % 2 == 0 ) { Collections . sort ( nums , Collections . reverseOrder ( ) ) ; } else { Collections . sort ( nums ) ; } return nums ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcValveTypeEnum ( ) { } }
if ( ifcValveTypeEnumEEnum == null ) { ifcValveTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 925 ) ; } return ifcValveTypeEnumEEnum ;
public class ThriftDataResultHelper { /** * Translates into thrift row . * @ param coscResultMap the cosc result map * @ param isCounterType the is counter type * @ param columnFamilyType the column family type * @ param row the row * @ return the thrift row */ public ThriftRow translateToThriftRow ( Map < ByteBuffer , List < ColumnOrSuperColumn > > coscResultMap , boolean isCounterType , Type columnFamilyType , ThriftRow row ) { } }
ColumnFamilyType columnType = ColumnFamilyType . COLUMN ; if ( isCounterType ) { if ( columnFamilyType . equals ( Type . SUPER_COLUMN_FAMILY ) ) { columnType = ColumnFamilyType . COUNTER_SUPER_COLUMN ; } else { columnType = ColumnFamilyType . COUNTER_COLUMN ; } } else if ( columnFamilyType . equals ( Type . SUPER_COLUMN_FAMILY ) ) { columnType = ColumnFamilyType . SUPER_COLUMN ; } transformThriftResultAndAddToList ( coscResultMap , columnType , row ) ; return row ;
public class LZ4DecompressorWithLength { /** * Returns the decompressed length of compressed data in < code > src [ srcOff : ] < / code > . * @ param src the compressed data * @ param srcOff the start offset in src * @ return the decompressed length */ public static int getDecompressedLength ( ByteBuffer src , int srcOff ) { } }
return ( src . get ( srcOff ) & 0xFF ) | ( src . get ( srcOff + 1 ) & 0xFF ) << 8 | ( src . get ( srcOff + 2 ) & 0xFF ) << 16 | src . get ( srcOff + 3 ) << 24 ;
public class Video { /** * Adds a comment * @ param comment the comment that should be added * @ return true if the comment was added , false otherwise * @ deprecated work on the { @ code Comments } object directly instead of using this method */ @ Deprecated public boolean addComment ( Comment comment ) { } }
if ( getComments ( ) != null ) { return getComments ( ) . addData ( comment ) ; } return false ;
public class XFunctionTypeRefImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EList < JvmTypeReference > getParamTypes ( ) { } }
if ( paramTypes == null ) { paramTypes = new EObjectContainmentEList < JvmTypeReference > ( JvmTypeReference . class , this , XtypePackage . XFUNCTION_TYPE_REF__PARAM_TYPES ) ; } return paramTypes ;
public class TransactionException { /** * Thrown when { @ code type } has { @ code attributeType } as a Type # key ( AttributeType ) and a Type # has ( AttributeType ) */ public static TransactionException duplicateHas ( Type type , AttributeType attributeType ) { } }
return create ( ErrorMessage . CANNOT_BE_KEY_AND_ATTRIBUTE . getMessage ( type . label ( ) , attributeType . label ( ) ) ) ;
public class Batch { /** * The actual implementation of this , after we ' ve converted to proper Jackson JavaType */ private < T > GraphRequest < T > graph ( String object , JavaType type , Param ... params ) { } }
this . checkForBatchExecution ( ) ; // The data is transformed through a chain of wrappers GraphRequest < T > req = new GraphRequest < T > ( object , params , this . mapper , this . < T > createMappingChain ( type ) ) ; this . graphRequests . add ( req ) ; return req ;
public class PyGenerator { /** * Generate the given enumeration declaration . * @ param enumeration the enumeration . * @ param it the receiver of the generated code . * @ param context the context . * @ return { @ code true } if a declaration was generated . { @ code false } if no enumeration was generated . * @ since 0.8 */ protected boolean generateEnumerationDeclaration ( SarlEnumeration enumeration , PyAppendable it , IExtraLanguageGeneratorContext context ) { } }
if ( ! Strings . isEmpty ( enumeration . getName ( ) ) ) { it . append ( "class " ) . append ( enumeration . getName ( ) ) ; // $ NON - NLS - 1 $ it . append ( "(Enum" ) ; // $ NON - NLS - 1 $ it . append ( newType ( "enum.Enum" ) ) ; // $ NON - NLS - 1 $ it . append ( "):" ) ; // $ NON - NLS - 1 $ it . increaseIndentation ( ) . newLine ( ) ; generateDocString ( getTypeBuilder ( ) . getDocumentation ( enumeration ) , it ) ; int i = 0 ; for ( final XtendMember item : enumeration . getMembers ( ) ) { if ( context . getCancelIndicator ( ) . isCanceled ( ) ) { return false ; } if ( item instanceof XtendEnumLiteral ) { final XtendEnumLiteral literal = ( XtendEnumLiteral ) item ; it . append ( literal . getName ( ) ) . append ( " = " ) ; // $ NON - NLS - 1 $ it . append ( Integer . toString ( i ) ) ; it . newLine ( ) ; ++ i ; } } it . decreaseIndentation ( ) . newLine ( ) . newLine ( ) ; return true ; } return false ;
public class BaseMessageRecordDesc { /** * Add the data description for this param . */ public MessageFieldDesc addMessageFieldDesc ( String strParam , Class < ? > classRawObject , boolean bRequired , Object objRawDefault ) { } }
return new MessageFieldDesc ( this , strParam , classRawObject , bRequired , objRawDefault ) ;
public class Presser { /** * Presses the soft keyboard search / next button . * @ param imeAction the action to be performed */ public void pressSoftKeyboard ( final int imeAction ) { } }
final EditText freshestEditText = viewFetcher . getFreshestView ( viewFetcher . getCurrentViews ( EditText . class , true ) ) ; if ( freshestEditText != null ) { inst . runOnMainSync ( new Runnable ( ) { public void run ( ) { freshestEditText . onEditorAction ( imeAction ) ; } } ) ; }
public class MPD9AbstractReader { /** * Reads the project properties . * @ param row project properties data */ protected void processProjectProperties ( Row row ) { } }
ProjectProperties properties = m_project . getProjectProperties ( ) ; properties . setCurrencySymbol ( row . getString ( "PROJ_OPT_CURRENCY_SYMBOL" ) ) ; properties . setSymbolPosition ( MPDUtility . getSymbolPosition ( row . getInt ( "PROJ_OPT_CURRENCY_POSITION" ) ) ) ; properties . setCurrencyDigits ( row . getInteger ( "PROJ_OPT_CURRENCY_DIGITS" ) ) ; // properties . setThousandsSeparator ( ) ; // properties . setDecimalSeparator ( ) ; properties . setDefaultDurationUnits ( MPDUtility . getDurationTimeUnits ( row . getInt ( "PROJ_OPT_DUR_ENTRY_FMT" ) ) ) ; // properties . setDefaultDurationIsFixed ( ) ; properties . setDefaultWorkUnits ( MPDUtility . getDurationTimeUnits ( row . getInt ( "PROJ_OPT_WORK_ENTRY_FMT" ) ) ) ; properties . setMinutesPerDay ( row . getInteger ( "PROJ_OPT_MINUTES_PER_DAY" ) ) ; properties . setMinutesPerWeek ( row . getInteger ( "PROJ_OPT_MINUTES_PER_WEEK" ) ) ; properties . setDefaultStandardRate ( new Rate ( row . getDouble ( "PROJ_OPT_DEF_STD_RATE" ) , TimeUnit . HOURS ) ) ; properties . setDefaultOvertimeRate ( new Rate ( row . getDouble ( "PROJ_OPT_DEF_OVT_RATE" ) , TimeUnit . HOURS ) ) ; properties . setUpdatingTaskStatusUpdatesResourceStatus ( row . getBoolean ( "PROJ_OPT_TASK_UPDATES_RES" ) ) ; properties . setSplitInProgressTasks ( row . getBoolean ( "PROJ_OPT_SPLIT_IN_PROGRESS" ) ) ; // properties . setDateOrder ( ) ; // properties . setTimeFormat ( ) ; properties . setDefaultStartTime ( row . getDate ( "PROJ_OPT_DEF_START_TIME" ) ) ; // properties . setDateSeparator ( ) ; // properties . setTimeSeparator ( ) ; // properties . setAmText ( ) ; // properties . setPmText ( ) ; // properties . setDateFormat ( ) ; // properties . setBarTextDateFormat ( ) ; properties . setProjectTitle ( row . getString ( "PROJ_PROP_TITLE" ) ) ; properties . setCompany ( row . getString ( "PROJ_PROP_COMPANY" ) ) ; properties . setManager ( row . getString ( "PROJ_PROP_MANAGER" ) ) ; properties . setDefaultCalendarName ( row . getString ( "PROJ_INFO_CAL_NAME" ) ) ; properties . setStartDate ( row . getDate ( "PROJ_INFO_START_DATE" ) ) ; properties . setFinishDate ( row . getDate ( "PROJ_INFO_FINISH_DATE" ) ) ; properties . setScheduleFrom ( ScheduleFrom . getInstance ( 1 - row . getInt ( "PROJ_INFO_SCHED_FROM" ) ) ) ; properties . setCurrentDate ( row . getDate ( "PROJ_INFO_CURRENT_DATE" ) ) ; // properties . setComments ( ) ; // properties . setCost ( ) ; // properties . setBaselineCost ( ) ; // properties . setActualCost ( ) ; // properties . setWork ( ) ; // properties . setBaselineWork ( ) ; // properties . setActualWork ( ) ; // properties . setWork2 ( ) ; // properties . setDuration ( ) ; // properties . setBaselineDuration ( ) ; // properties . setActualDuration ( ) ; // properties . setPercentageComplete ( ) ; // properties . setBaselineStart ( ) ; // properties . setBaselineFinish ( ) ; // properties . setActualStart ( ) ; // properties . setActualFinish ( ) ; // properties . setStartVariance ( ) ; // properties . setFinishVariance ( ) ; properties . setSubject ( row . getString ( "PROJ_PROP_SUBJECT" ) ) ; properties . setAuthor ( row . getString ( "PROJ_PROP_AUTHOR" ) ) ; properties . setKeywords ( row . getString ( "PROJ_PROP_KEYWORDS" ) ) ; properties . setDefaultEndTime ( row . getDate ( "PROJ_OPT_DEF_FINISH_TIME" ) ) ; properties . setProjectExternallyEdited ( row . getBoolean ( "PROJ_EXT_EDITED_FLAG" ) ) ; properties . setCategory ( row . getString ( "PROJ_PROP_CATEGORY" ) ) ; properties . setDaysPerMonth ( row . getInteger ( "PROJ_OPT_DAYS_PER_MONTH" ) ) ; properties . setFiscalYearStart ( row . getBoolean ( "PROJ_OPT_FY_USE_START_YR" ) ) ; // properties . setDefaultTaskEarnedValueMethod ( ) ; // properties . setRemoveFileProperties ( ) ; // properties . setMoveCompletedEndsBack ( ) ; properties . setNewTasksEstimated ( row . getBoolean ( "PROJ_OPT_NEW_TASK_EST" ) ) ; properties . setSpreadActualCost ( row . getBoolean ( "PROJ_OPT_SPREAD_ACT_COSTS" ) ) ; properties . setMultipleCriticalPaths ( row . getBoolean ( "PROJ_OPT_MULT_CRITICAL_PATHS" ) ) ; // properties . setAutoAddNewResourcesAndTasks ( ) ; properties . setLastSaved ( row . getDate ( "PROJ_LAST_SAVED" ) ) ; properties . setStatusDate ( row . getDate ( "PROJ_INFO_STATUS_DATE" ) ) ; // properties . setMoveRemainingStartsBack ( ) ; // properties . setAutolink ( ) ; // properties . setMicrosoftProjectServerURL ( ) ; properties . setHonorConstraints ( row . getBoolean ( "PROJ_OPT_HONOR_CONSTRAINTS" ) ) ; // properties . setAdminProject ( row . getInt ( " PROJ _ ADMINPROJECT " ) ! = 0 ) ; / / Not in MPP9 MPD ? // properties . setInsertedProjectsLikeSummary ( ) ; properties . setName ( row . getString ( "PROJ_NAME" ) ) ; properties . setSpreadPercentComplete ( row . getBoolean ( "PROJ_OPT_SPREAD_PCT_COMP" ) ) ; // properties . setMoveCompletedEndsForward ( ) ; // properties . setEditableActualCosts ( ) ; // properties . setUniqueID ( ) ; // properties . setRevision ( ) ; properties . setNewTasksEffortDriven ( row . getBoolean ( "PROJ_OPT_NEW_ARE_EFFORT_DRIVEN" ) ) ; // properties . setMoveRemainingStartsForward ( ) ; // properties . setActualsInSync ( row . getInt ( " PROJ _ ACTUALS _ SYNCH " ) ! = 0 ) ; / / Not in MPP9 MPD ? properties . setDefaultTaskType ( TaskType . getInstance ( row . getInt ( "PROJ_OPT_DEF_TASK_TYPE" ) ) ) ; // properties . setEarnedValueMethod ( ) ; properties . setCreationDate ( row . getDate ( "PROJ_CREATION_DATE" ) ) ; // properties . setExtendedCreationDate ( row . getDate ( " PROJ _ CREATION _ DATE _ EX " ) ) ; / / Not in MPP9 MPD ? properties . setDefaultFixedCostAccrual ( AccrueType . getInstance ( row . getInt ( "PROJ_OPT_DEF_FIX_COST_ACCRUAL" ) ) ) ; properties . setCriticalSlackLimit ( row . getInteger ( "PROJ_OPT_CRITICAL_SLACK_LIMIT" ) ) ; // properties . setBaselineForEarnedValue ; properties . setFiscalYearStartMonth ( row . getInteger ( "PROJ_OPT_FY_START_MONTH" ) ) ; // properties . setNewTaskStartIsProjectStart ( ) ; properties . setWeekStartDay ( Day . getInstance ( row . getInt ( "PROJ_OPT_WEEK_START_DAY" ) + 1 ) ) ; // properties . setCalculateMultipleCriticalPaths ( ) ; properties . setMultipleCriticalPaths ( row . getBoolean ( "PROJ_OPT_MULT_CRITICAL_PATHS" ) ) ; // Unused attributes // PROJ _ OPT _ CALC _ ACT _ COSTS // PROJ _ POOL _ ATTACHED _ TO // PROJ _ IS _ RES _ POOL // PROJ _ OPT _ CALC _ SUB _ AS _ SUMMARY // PROJ _ OPT _ SHOW _ EST _ DUR // PROJ _ OPT _ EXPAND _ TIMEPHASED // PROJ _ PROJECT // PROJ _ VERSION // PROJ _ ENT _ LIST _ SEPARATOR // PROJ _ EXT _ EDITED _ DUR // PROJ _ EXT _ EDITED _ NUM // PROJ _ EXT _ EDITED _ FLAG // PROJ _ EXT _ EDITED _ CODE // PROJ _ EXT _ EDITED _ TEXT // PROJ _ IGNORE _ FRONT _ END // PROJ _ EXT _ EDITED // PROJ _ DATA _ SOURCE // PROJ _ READ _ ONLY // PROJ _ READ _ WRITE // PROJ _ READ _ COUNT // PROJ _ LOCKED // PROJ _ MACHINE _ ID // PROJ _ TYPE // PROJ _ CHECKEDOUT // PROJ _ CHECKEDOUTBY // PROJ _ CHECKEDOUTDATE // RESERVED _ BINARY _ DATA
public class AbstractTreeNode { /** * Builds the parents of node up to and including the root node , where the original node is the last element in the * returned array . The length of the returned array gives the node ' s depth in the tree . * @ param aNode the DefaultTreeNode to get the path for * @ param depth an int giving the number of steps already taken towards the root ( on recursive calls ) , used to size * the returned array * @ return an array of TreeNodes giving the path from the root to the specified node */ private TreeNode [ ] getPathToRoot ( final TreeNode aNode , final int depth ) { } }
TreeNode [ ] retNodes ; // This method recurses , traversing towards the root in order // size the array . On the way back , it fills in the nodes , // starting from the root and working back to the original node . // Check for null , in case someone passed in a null node , or they passed // in an element that isn ' t rooted at root . if ( aNode == null ) { if ( depth == 0 ) { return null ; } else { retNodes = new TreeNode [ depth ] ; } } else { int newDepth = depth + 1 ; retNodes = getPathToRoot ( aNode . getParent ( ) , newDepth ) ; retNodes [ retNodes . length - newDepth ] = aNode ; } return retNodes ;
public class RelativePath { /** * test the function */ public static void main ( String args [ ] ) { } }
String home = "/home/user1/content/myfolder" ; String file = "/home/user1/figures/fig.png" ; System . out . println ( "home = " + home ) ; System . out . println ( "file = " + file ) ; System . out . println ( "path = " + getRelativePath ( new File ( home ) , new File ( file ) ) ) ;
public class Hex { /** * Convert hex to bytes */ public static byte [ ] toBytes ( String hex ) { } }
if ( hex == null ) return null ; int len = hex . length ( ) ; byte [ ] bytes = new byte [ len / 2 ] ; int k = 0 ; for ( int i = 0 ; i < len ; i += 2 ) { int digit = 0 ; char ch = hex . charAt ( i ) ; if ( '0' <= ch && ch <= '9' ) digit = ch - '0' ; else if ( 'a' <= ch && ch <= 'f' ) digit = ch - 'a' + 10 ; else if ( 'A' <= ch && ch <= 'F' ) digit = ch - 'A' + 10 ; ch = hex . charAt ( i + 1 ) ; if ( '0' <= ch && ch <= '9' ) digit = 16 * digit + ch - '0' ; else if ( 'a' <= ch && ch <= 'f' ) digit = 16 * digit + ch - 'a' + 10 ; else if ( 'A' <= ch && ch <= 'F' ) digit = 16 * digit + ch - 'A' + 10 ; bytes [ k ++ ] = ( byte ) digit ; } return bytes ;
public class FSInputChecker { /** * / * verify checksum for the chunk . * @ throws ChecksumException if there is a mismatch */ private void verifySum ( long errPos ) throws ChecksumException { } }
long crc = getChecksum ( ) ; long sumValue = sum . getValue ( ) ; sum . reset ( ) ; if ( crc != sumValue ) { throw new ChecksumException ( "Checksum error: " + file + " at " + errPos , errPos ) ; }
public class CmsBeanTableBuilder { /** * Builds a table and uses the given beans to fill its rows . < p > * @ param beans the beans to display in the table * @ return the finished table */ public Table buildTable ( List < T > beans ) { } }
Table table = new Table ( ) ; buildTable ( table , beans ) ; return table ;
public class LToLongBiFuncMemento { /** * < editor - fold desc = " object " > */ public static boolean argEquals ( LToLongBiFuncMemento the , Object that ) { } }
return Null . < LToLongBiFuncMemento > equals ( the , that , ( one , two ) -> { if ( one . getClass ( ) != two . getClass ( ) ) { return false ; } LToLongBiFuncMemento other = ( LToLongBiFuncMemento ) two ; return LObjLongPair . argEquals ( one . function , one . lastValue ( ) , other . function , other . lastValue ( ) ) ; } ) ;
public class MDRImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . MDR__RG : getRG ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ;
public class TermOfUsePanel { /** * Factory method for creating the new { @ link Component } for the liability . This method is * invoked in the constructor from the derived classes and can be overridden so users can * provide their own version of a new { @ link Component } for the liability . * @ param id * the id * @ param model * the model * @ return the new { @ link Component } for the liability */ protected Component newLiabilityPanel ( final String id , final IModel < HeaderContentListModelBean > model ) { } }
return new LiabilityPanel ( id , Model . of ( model . getObject ( ) ) ) ;
public class Reflecter { /** * Populate the JavaBeans properties of this delegate object , based on the specified name / value pairs * @ param properties * @ return */ public < V > Reflecter < T > populate ( Map < String , V > properties ) { } }
return populate ( properties , new String [ ] { } ) ;
public class ReceiptTemplate { /** * Build and get message as a string * @ return String the final message body */ public String build ( ) { } }
this . message_string = "{" ; if ( this . recipient_id != null ) { this . message_string += "\"recipient\": {\"id\": \"" + this . recipient_id + "\"}," ; } this . message_string += "\"message\": {" ; this . message_string += "\"attachment\":{" ; this . message_string += "\"type\":\"template\"," ; this . message_string += "\"payload\":{" ; this . message_string += "\"template_type\":\"receipt\"," ; if ( this . recipient_name != null ) { this . message_string += "\"recipient_name\":\"" + this . recipient_name + "\"," ; } if ( this . order_number != null ) { this . message_string += "\"order_number\":\"" + this . order_number + "\"," ; } if ( this . currency != null ) { this . message_string += "\"currency\":\"" + this . currency + "\"," ; } if ( this . payment_method != null ) { this . message_string += "\"payment_method\":\"" + this . payment_method + "\"," ; } if ( this . order_url != null ) { this . message_string += "\"order_url\":\"" + this . order_url + "\"," ; } if ( this . timestamp != null ) { this . message_string += "\"timestamp\":\"" + this . timestamp + "\"," ; } if ( ! this . elements . isEmpty ( ) ) { this . message_string += "\"elements\":[" ; for ( int j = 0 ; j < this . elements . size ( ) ; j ++ ) { HashMap < String , String > element = this . elements . get ( j ) ; this . message_string += "{" ; if ( ! element . get ( "title" ) . equals ( "" ) ) { this . message_string += "\"title\":\"" + element . get ( "title" ) + "\"," ; } if ( ! element . get ( "subtitle" ) . equals ( "" ) ) { this . message_string += "\"subtitle\":\"" + element . get ( "subtitle" ) + "\"," ; } if ( ! element . get ( "quantity" ) . equals ( "" ) ) { this . message_string += "\"quantity\":" + element . get ( "quantity" ) + "," ; } if ( ! element . get ( "price" ) . equals ( "" ) ) { this . message_string += "\"price\":" + element . get ( "price" ) + "," ; } if ( ! element . get ( "currency" ) . equals ( "" ) ) { this . message_string += "\"currency\":\"" + element . get ( "currency" ) + "\"," ; } if ( ! element . get ( "image_url" ) . equals ( "" ) ) { this . message_string += "\"image_url\":\"" + element . get ( "image_url" ) + "\"," ; } this . message_string = this . message_string . replaceAll ( ",$" , "" ) ; this . message_string += "}," ; } this . message_string = this . message_string . replaceAll ( ",$" , "" ) ; this . message_string += "]," ; } if ( ! this . address . isEmpty ( ) ) { this . message_string += "\"address\":{" ; if ( ! this . address . get ( "street_1" ) . equals ( "" ) ) { this . message_string += "\"street_1\":\"" + this . address . get ( "street_1" ) + "\"," ; } if ( ! this . address . get ( "street_2" ) . equals ( "" ) ) { this . message_string += "\"street_2\":\"" + this . address . get ( "street_2" ) + "\"," ; } if ( ! this . address . get ( "city" ) . equals ( "" ) ) { this . message_string += "\"city\":\"" + this . address . get ( "city" ) + "\"," ; } if ( ! this . address . get ( "postal_code" ) . equals ( "" ) ) { this . message_string += "\"postal_code\":\"" + this . address . get ( "postal_code" ) + "\"," ; } if ( ! this . address . get ( "state" ) . equals ( "" ) ) { this . message_string += "\"state\":\"" + this . address . get ( "state" ) + "\"," ; } if ( ! this . address . get ( "country" ) . equals ( "" ) ) { this . message_string += "\"country\":\"" + this . address . get ( "country" ) + "\"," ; } this . message_string = this . message_string . replaceAll ( ",$" , "" ) ; this . message_string += "}," ; } if ( ! this . summary . isEmpty ( ) ) { this . message_string += "\"summary\":{" ; if ( ! this . summary . get ( "subtotal" ) . equals ( "" ) ) { this . message_string += "\"subtotal\":" + this . summary . get ( "subtotal" ) + "," ; } if ( ! this . summary . get ( "shipping_cost" ) . equals ( "" ) ) { this . message_string += "\"shipping_cost\":" + this . summary . get ( "shipping_cost" ) + "," ; } if ( ! this . summary . get ( "total_tax" ) . equals ( "" ) ) { this . message_string += "\"total_tax\":" + this . summary . get ( "total_tax" ) + "," ; } if ( ! this . summary . get ( "total_cost" ) . equals ( "" ) ) { this . message_string += "\"total_cost\":" + this . summary . get ( "total_cost" ) + "," ; } this . message_string = this . message_string . replaceAll ( ",$" , "" ) ; this . message_string += "}," ; } if ( ! this . adjustments . isEmpty ( ) ) { this . message_string += "\"adjustments\":[" ; for ( int j = 0 ; j < this . adjustments . size ( ) ; j ++ ) { HashMap < String , String > adjustment = this . adjustments . get ( j ) ; this . message_string += "{" ; if ( ! adjustment . get ( "name" ) . equals ( "" ) ) { this . message_string += "\"name\":\"" + adjustment . get ( "name" ) + "\"," ; } if ( ! adjustment . get ( "amount" ) . equals ( "" ) ) { this . message_string += "\"amount\":" + adjustment . get ( "amount" ) + "," ; } this . message_string = this . message_string . replaceAll ( ",$" , "" ) ; this . message_string += "}," ; } this . message_string = this . message_string . replaceAll ( ",$" , "" ) ; this . message_string += "]," ; } this . message_string = this . message_string . replaceAll ( ",$" , "" ) ; this . message_string += "}" ; this . message_string += "}" ; this . message_string += "}" ; this . message_string += "}" ; return this . message_string ;
public class IndexCache { /** * This method gets the index information for the given mapId and reduce . * It reads the index file into cache if it is not already present . * @ param mapId * @ param reduce * @ param fileName The file to read the index information from if it is not * already present in the cache * @ return The Index Information * @ throws IOException */ public IndexRecord getIndexInformation ( String mapId , int reduce , Path fileName ) throws IOException { } }
IndexInformation info = cache . get ( mapId ) ; if ( info == null ) { info = readIndexFileToCache ( fileName , mapId ) ; } else { synchronized ( info ) { while ( null == info . mapSpillRecord ) { try { info . wait ( ) ; } catch ( InterruptedException e ) { throw new IOException ( "Interrupted waiting for construction" , e ) ; } } } LOG . debug ( "IndexCache HIT: MapId " + mapId + " found" ) ; } if ( info . mapSpillRecord . size ( ) == 0 || info . mapSpillRecord . size ( ) < reduce ) { throw new IOException ( "Invalid request " + " Map Id = " + mapId + " Reducer = " + reduce + " Index Info Length = " + info . mapSpillRecord . size ( ) ) ; } return info . mapSpillRecord . getIndex ( reduce ) ;
public class PeriodDuration { /** * Gets the value of the requested unit . * This returns a value for the supported units - { @ link ChronoUnit # YEARS } , * { @ link ChronoUnit # MONTHS } , { @ link ChronoUnit # DAYS } , { @ link ChronoUnit # SECONDS } * and { @ link ChronoUnit # NANOS } . * All other units throw an exception . * Note that hours and minutes throw an exception . * @ param unit the { @ code TemporalUnit } for which to return the value * @ return the long value of the unit * @ throws UnsupportedTemporalTypeException if the unit is not supported */ @ Override public long get ( TemporalUnit unit ) { } }
if ( unit instanceof ChronoUnit ) { switch ( ( ChronoUnit ) unit ) { case YEARS : return period . getYears ( ) ; case MONTHS : return period . getMonths ( ) ; case DAYS : return period . getDays ( ) ; case SECONDS : return duration . getSeconds ( ) ; case NANOS : return duration . getNano ( ) ; default : break ; } } throw new UnsupportedTemporalTypeException ( "Unsupported unit: " + unit ) ;
public class EventsHandler { /** * Executes the event using a deferred command . */ private < H extends EventHandler > void scheduleEvent ( final DomEvent < H > event ) { } }
DeferredCommand . addCommand ( new Command ( ) { public void execute ( ) { onEvent ( event ) ; } } ) ;
public class ByteUtilities { /** * Encodes an integer into up to 4 bytes in network byte order . * @ param num the int to convert to a byte array * @ param count the number of reserved bytes for the write operation * @ return the resulting byte array */ public static byte [ ] intToNetworkByteOrder ( int num , int count ) { } }
byte [ ] buf = new byte [ count ] ; intToNetworkByteOrder ( num , buf , 0 , count ) ; return buf ;
public class Responder { /** * Read the KeyStore information supplied from the responder configuration file . * @ return the KeyStore object * @ throws KeyStoreException if there is an issue reading the keystore * @ throws NoSuchAlgorithmException if the truststore type is incorrect * @ throws CertificateException if there is some issues reading in the certs * @ throws IOException if there is an issue reading the keystore */ private KeyStore getClientTruststore ( ) throws KeyStoreException , NoSuchAlgorithmException , CertificateException , IOException { } }
KeyStore ks = null ; if ( _config . getTruststoreType ( ) != null && ! _config . getTruststoreType ( ) . isEmpty ( ) ) { try { ks = KeyStore . getInstance ( _config . getTruststoreType ( ) ) ; } catch ( KeyStoreException e ) { LOGGER . warn ( "The specified truststore type [" + _config . getTruststoreType ( ) + "] didn't work." , e ) ; throw e ; } } else { ks = KeyStore . getInstance ( KeyStore . getDefaultType ( ) ) ; } // get user password and file input stream char [ ] password = _config . getTruststorePassword ( ) . toCharArray ( ) ; java . io . FileInputStream fis = null ; try { fis = new java . io . FileInputStream ( _config . geTruststoreFilename ( ) ) ; ks . load ( fis , password ) ; } finally { if ( fis != null ) { fis . close ( ) ; } } return ks ;
public class ResultHandler { /** * Primary method that will continually check for new IO events * to process . */ public void runEventProcessingLoop ( ) { } }
final long port = this . completionPort ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "runEventProcessingLoop: " + port + " " + Thread . currentThread ( ) ) ; } final IAsyncProvider provider = AsyncLibrary . getInstance ( ) ; final int size = this . batchSize ; final int jitSize = 8192 ; final int timeout = AsyncProperties . completionTimeout ; final WsByteBufferPoolManager wsByteBufferManager = ChannelFrameworkFactory . getBufferManager ( ) ; final boolean jitCapable = provider . hasCapability ( IAsyncProvider . CAP_JIT_BUFFERS ) ; final boolean batchCapable = ( 1 < size ) ; long [ ] compKeyAddrs = new long [ size ] ; long [ ] jitBufferAddressBatch = new long [ size ] ; WsByteBuffer [ ] wsBBBatch = new WsByteBuffer [ size ] ; int keysReady = 0 ; CompletionKey completionKey = null ; AsyncFuture future ; int rc = 0 ; long numBytes = 0L ; CompletionKey [ ] compKeys = new CompletionKey [ size ] ; for ( int i = 0 ; i < size ; i ++ ) { compKeys [ i ] = ( CompletionKey ) AsyncLibrary . completionKeyPool . get ( ) ; if ( compKeys [ i ] != null ) { // Initialize the IOCB obtained from the pool compKeys [ i ] . initializePoolEntry ( 0 , 0 ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Handler got CompletionKey from Pool:" + compKeys [ i ] ) ; } } else { compKeys [ i ] = new CompletionKey ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Handler newed up CompletionKey:" + compKeys [ i ] ) ; } } compKeyAddrs [ i ] = compKeys [ i ] . getAddress ( ) ; } try { // loop until we should not do any more while ( true ) { // see if we are shutting down , access directly for better performance if ( AsyncLibrary . aioInitialized == AsyncLibrary . AIO_SHUTDOWN ) { // leave immediately if we are shutting down if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "AIO library is shutdown, ending loop" ) ; } break ; } future = null ; keysReady = 0 ; this . handlersWaiting . getAndIncrement ( ) ; // get the completion keys ready for use if ( jitCapable ) { for ( int i = 0 ; i < size ; i ++ ) { // reset the completionKey for next request . Commenting this out // for performance reasons . It is driving 4 put calls per IOCD . // The native code will now reset ( ) between calls . // compKeys [ i ] . reset ( ) ; // see if we need to allocate a jit buffer if ( jitBufferAddressBatch [ i ] == 0 ) { wsBBBatch [ i ] = wsByteBufferManager . allocateDirect ( jitSize ) ; ByteBuffer bb = wsBBBatch [ i ] . getWrappedByteBufferNonSafe ( ) ; jitBufferAddressBatch [ i ] = AbstractAsyncChannel . getBufAddress ( bb ) ; compKeys [ i ] . setJITBufferUsed ( ) ; compKeys [ i ] . setBuffer ( jitBufferAddressBatch [ i ] , jitSize , 0 ) ; } } } // Blocks until an async call completes , or the timeout occurs try { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "waiting for native event port=" + port ) ; } if ( batchCapable ) { keysReady = provider . getCompletionData3 ( compKeyAddrs , size , timeout , port ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "(batch) events: " + keysReady ) ; } } else { boolean gotData = provider . getCompletionData2 ( compKeyAddrs [ 0 ] , timeout , port ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "(no batch) events: " + gotData ) ; } if ( gotData ) { keysReady = 1 ; // Not used below . . . Commenting out for performance // compKeys [ 0 ] . setReturnStatus ( 1 ) ; } else { keysReady = 0 ; // Not used below . . . Commenting out for performance // compKeys [ 0 ] . setReturnStatus ( 0 ) ; } } } catch ( AsyncException exception ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Error getting IO completion event: " + exception ) ; } FFDCFilter . processException ( exception , getClass ( ) . getName ( ) + ".runEventProcessingLoop" , "331" , this ) ; } finally { this . handlersWaiting . getAndDecrement ( ) ; } // if we didn ' t get data , go back to the top of the loop and try again if ( keysReady == 0 ) { // make sure this completion port didn ' t get closed , and then AIO // restarted while this thread was in the native AIO code if ( ( AsyncLibrary . aioInitialized != AsyncLibrary . AIO_SHUTDOWN ) ) { if ( ! provider . isCompletionPortValid ( port ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Completion port not valid" ) ; } break ; } } else { // AIO shutdown , so break out now if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "AIO Shutdown detected, break out" ) ; } break ; } // Timeout path . Just loop back . continue ; } // end - no - events - received else if ( this . handlersWaiting . get ( ) == 0 ) { // if we have work to process and nobody else is currently // calling into the native code , start an extra handler now // ( if allowed ) startHandler ( ) ; } this . numItemsFromNative += keysReady ; for ( int j = keysReady - 1 ; j >= 0 ; j -- ) { completionKey = compKeys [ j ] ; // if we get here ( no timeout and no exception ) , we should have a // valid completion key completionKey . postNativePrep ( ) ; long callid = completionKey . getCallIdentifier ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "batch index: " + j + " call id: " + Long . toHexString ( callid ) + " channel id: " + completionKey . getChannelIdentifier ( ) ) ; Tr . debug ( tc , "completionKey: " + completionKey ) ; } int channelIndex = ( int ) ( callid & 0x00000000FFFFFFFF ) ; int futureIndex = ( int ) ( callid >> 32 ) ; AbstractAsyncChannel theChannel = AbstractAsyncChannel . getChannelFromIndex ( channelIndex ) ; if ( theChannel == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Could not find channel, possibly closed " + completionKey ) ; } continue ; } future = theChannel . getFutureFromIndex ( futureIndex ) ; if ( future == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Completion event could not find future " + completionKey ) ; } AsyncException exception = new AsyncException ( "Future not found" ) ; FFDCFilter . processException ( exception , getClass ( ) . getName ( ) + ".runEventProcessingLoop" , "142" , this ) ; continue ; } if ( jitCapable && completionKey . wasJITBufferUsed ( ) ) { // set the JIT buffer value in the future future . setJITBuffer ( wsBBBatch [ j ] ) ; jitBufferAddressBatch [ j ] = 0 ; // reset so new jit buffer will be allocated for next call } else { future . setJITBuffer ( null ) ; } rc = completionKey . getReturnCode ( ) ; numBytes = completionKey . getBytesAffected ( ) ; // check the event to see if it is for synchronous request if ( futureIndex == AbstractAsyncChannel . SYNC_READ_FUTURE_INDEX || futureIndex == AbstractAsyncChannel . SYNC_WRITE_FUTURE_INDEX ) { // notify waiting sync request if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Processing Sync Request rc: " + rc ) ; } if ( rc == 0 ) { // Defend against erroneous combination of values returned from // native if ( numBytes == 0L ) { IOException ioe = new IOException ( "Async IO operation failed, internal error" ) ; future . completed ( ioe ) ; continue ; } // Mark the future as completed future . setCancelInProgress ( 0 ) ; future . completed ( numBytes ) ; } else { if ( future . getCancelInProgress ( ) == 1 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Cancel is in progress, ignoring" ) ; } future . setCancelInProgress ( 0 ) ; } else { future . completed ( AsyncLibrary . getIOException ( "Async IO operation failed (3), reason: " , rc ) ) ; } } continue ; // sync response has been processed } // signal the IO completion // Check to see if the IO operation succeeded if ( rc == 0 ) { future . setCancelInProgress ( 0 ) ; future . completed ( numBytes ) ; } else { if ( future . getCancelInProgress ( ) == 1 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Cancel is in progress, ignoring" ) ; } future . setCancelInProgress ( 0 ) ; } else { future . completed ( AsyncLibrary . getIOException ( "Async IO operation failed (2), reason: " , rc ) ) ; } } } // end the batch loop } // end of " while ( true ) { " } catch ( Throwable t ) { if ( AsyncLibrary . aioInitialized != AsyncLibrary . AIO_SHUTDOWN ) { FFDCFilter . processException ( t , getClass ( ) . getName ( ) + ".runEventProcessingLoop" , "792" , this ) ; throw new RuntimeException ( t ) ; } } finally { // Decrement the number of in flight handlers , since this one is exiting this . numHandlersInFlight . decrementInt ( ) ; // The completion processing thread is being shut down or failed // if jit buffer was allocated , release it for ( int k = 0 ; k < size ; k ++ ) { if ( jitBufferAddressBatch [ k ] != 0 ) { wsBBBatch [ k ] . release ( ) ; } } // Put the CompletionKeys into the pool for reuse . They contain a DirectByteBuffer ( WsByteBuffer ) . for ( int i = 0 ; i < size ; i ++ ) { if ( compKeys [ i ] != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Handler ending, pooling CompletionKey:\n" + compKeys [ i ] ) ; } AsyncLibrary . completionKeyPool . put ( compKeys [ i ] ) ; } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "runEventProcessingLoop: " + port + " " + Thread . currentThread ( ) ) ; }
public class StylesheetHandler { /** * Push the current stylesheet being constructed . If no other stylesheets * have been pushed onto the stack , assume the argument is a stylesheet * root , and also set the stylesheet root member . * @ param s non - null reference to a stylesheet . */ public void pushStylesheet ( Stylesheet s ) { } }
if ( m_stylesheets . size ( ) == 0 ) m_stylesheetRoot = ( StylesheetRoot ) s ; m_stylesheets . push ( s ) ;
public class TypeQualifierDataflowAnalysis { /** * Get the set of SourceSinkInfo objects representing sources / sinks at a * given Location . * @ param location * a Location * @ return Set of SourceSinkInfo objects representing sources / sinks at the * Location */ public Set < SourceSinkInfo > getSourceSinkInfoSet ( Location location ) { } }
Set < SourceSinkInfo > result = sourceSinkMap . get ( location ) ; return result != null ? result : Collections . < SourceSinkInfo > emptySet ( ) ;
public class DiagonalMatrix { /** * { @ inheritDoc } * Note that any values are not on the diagonal are ignored . */ public void setRow ( int row , DoubleVector vector ) { } }
checkIndices ( row , vector . length ( ) - 1 ) ; values [ row ] = vector . get ( row ) ;
public class SqlBuilder { /** * 限制查询条数的SQL * @ param query SQL语句 * @ param offset SQL查询偏移记录数 * @ param length SQL目标数量 * @ param dbType 数据库类型 oralce , mysql * @ return 限制查询条数的SQL */ public static String getLimitSQL ( String query , long offset , long length , String dbType ) { } }
if ( query == null ) { return null ; } query = query . trim ( ) ; if ( StringUtils . startsWithIgnoreCase ( query , "select" ) ) { boolean isForUpdate = false ; if ( query . toLowerCase ( ) . endsWith ( " for update" ) ) { query = query . substring ( 0 , query . length ( ) - 11 ) ; isForUpdate = true ; } StringBuffer pagingSelect = new StringBuffer ( query . length ( ) + 100 ) ; if ( dbType . equalsIgnoreCase ( "oracle" ) ) { if ( offset > 0 ) { pagingSelect . append ( "select * from ( select row_.*, rownum rownum_ from ( " ) ; } else { pagingSelect . append ( "select * from ( " ) ; } pagingSelect . append ( query ) ; if ( offset > 0 ) { pagingSelect . append ( String . format ( " ) row_ where rownum <= %d) where rownum_ > %d" , length + offset , offset ) ) ; } else { pagingSelect . append ( String . format ( " ) where rownum <= %d" , length ) ) ; } } else if ( dbType . equalsIgnoreCase ( "mysql" ) ) { pagingSelect . append ( "select * from ( " ) ; pagingSelect . append ( query ) ; pagingSelect . append ( String . format ( " ) t limit %d, %d" , offset , length ) ) ; } if ( isForUpdate ) { pagingSelect . append ( " for update" ) ; } return pagingSelect . toString ( ) ; } return query ;
public class TreeBin { /** * Checks invariants recursively for the tree of Nodes rooted at t . */ static < K , V > boolean checkInvariants ( TreeNode < K , V > t ) { } }
TreeNode < K , V > tp = t . parent , tl = t . left , tr = t . right , tb = t . prev , tn = ( TreeNode < K , V > ) t . next ; if ( tb != null && tb . next != t ) return false ; if ( tn != null && tn . prev != t ) return false ; if ( tp != null && t != tp . left && t != tp . right ) return false ; if ( tl != null && ( tl . parent != t || tl . hash > t . hash ) ) return false ; if ( tr != null && ( tr . parent != t || tr . hash < t . hash ) ) return false ; if ( t . red && tl != null && tl . red && tr != null && tr . red ) return false ; if ( tl != null && ! checkInvariants ( tl ) ) return false ; if ( tr != null && ! checkInvariants ( tr ) ) return false ; return true ;
public class JCRAPIAspect { /** * Gives the corresponding statistics for the given target class and AspectJ signature * @ param target the target { @ link Class } * @ param signature the AspectJ signature * @ return the related { @ link Statistics } or < code > null < / code > if it cannot be found */ private static Statistics getStatistics ( Class < ? > target , String signature ) { } }
initIfNeeded ( ) ; Statistics statistics = MAPPING . get ( signature ) ; if ( statistics == null ) { synchronized ( JCRAPIAspect . class ) { Class < ? > interfaceClass = findInterface ( target ) ; if ( interfaceClass != null ) { Map < String , Statistics > allStatistics = ALL_STATISTICS . get ( interfaceClass . getSimpleName ( ) ) ; if ( allStatistics != null ) { int index1 = signature . indexOf ( '(' ) ; int index = signature . substring ( 0 , index1 ) . lastIndexOf ( '.' ) ; String name = signature . substring ( index + 1 ) ; statistics = allStatistics . get ( name ) ; } } if ( statistics == null ) { statistics = UNKNOWN ; } Map < String , Statistics > tempMapping = new HashMap < String , Statistics > ( MAPPING ) ; tempMapping . put ( signature , statistics ) ; MAPPING = Collections . unmodifiableMap ( tempMapping ) ; } } if ( statistics == UNKNOWN ) // NOSONAR { return null ; } return statistics ;
public class LRSigner { /** * Encodes the provided message with the private key and pass phrase set in configuration * @ param message Message to encode * @ return Encoded message * @ throws LRException SIGNING _ FAILED if the document cannot be signed , NO _ KEY if the key cannot be obtained */ private String signEnvelopeData ( String message ) throws LRException { } }
// Throw an exception if any of the required fields are null if ( passPhrase == null || publicKeyLocation == null || privateKey == null ) { throw new LRException ( LRException . NULL_FIELD ) ; } // Get an InputStream for the private key InputStream privateKeyStream = getPrivateKeyStream ( privateKey ) ; // Get an OutputStream for the result ByteArrayOutputStream result = new ByteArrayOutputStream ( ) ; ArmoredOutputStream aOut = new ArmoredOutputStream ( result ) ; // Get the pass phrase char [ ] privateKeyPassword = passPhrase . toCharArray ( ) ; try { // Get the private key from the InputStream PGPSecretKey sk = readSecretKey ( privateKeyStream ) ; PGPPrivateKey pk = sk . extractPrivateKey ( new JcePBESecretKeyDecryptorBuilder ( ) . setProvider ( "BC" ) . build ( privateKeyPassword ) ) ; PGPSignatureGenerator sGen = new PGPSignatureGenerator ( new JcaPGPContentSignerBuilder ( sk . getPublicKey ( ) . getAlgorithm ( ) , PGPUtil . SHA256 ) . setProvider ( "BC" ) ) ; PGPSignatureSubpacketGenerator spGen = new PGPSignatureSubpacketGenerator ( ) ; // Clear sign the message java . util . Iterator it = sk . getPublicKey ( ) . getUserIDs ( ) ; if ( it . hasNext ( ) ) { spGen . setSignerUserID ( false , ( String ) it . next ( ) ) ; sGen . setHashedSubpackets ( spGen . generate ( ) ) ; } aOut . beginClearText ( PGPUtil . SHA256 ) ; sGen . init ( PGPSignature . CANONICAL_TEXT_DOCUMENT , pk ) ; byte [ ] msg = message . getBytes ( ) ; sGen . update ( msg , 0 , msg . length ) ; aOut . write ( msg , 0 , msg . length ) ; BCPGOutputStream bOut = new BCPGOutputStream ( aOut ) ; aOut . endClearText ( ) ; sGen . generate ( ) . encode ( bOut ) ; aOut . close ( ) ; String strResult = result . toString ( "utf8" ) ; // for whatever reason , bouncycastle is failing to put a linebreak before " - - - - - BEGIN PGP SIGNATURE " strResult = strResult . replaceAll ( "([a-z0-9])-----BEGIN PGP SIGNATURE-----" , "$1\n-----BEGIN PGP SIGNATURE-----" ) ; return strResult ; } catch ( Exception e ) { throw new LRException ( LRException . SIGNING_FAILED ) ; } finally { try { if ( privateKeyStream != null ) { privateKeyStream . close ( ) ; } result . close ( ) ; } catch ( IOException e ) { // Could not close the streams } }
public class ApplicationHealthIndicator { /** * { @ inheritDoc } */ @ Override public final Health health ( ) { } }
final Builder upBuilder = Health . up ( ) ; final Map < String , Object > cacheMap = new HashMap < > ( ) ; upBuilder . withDetail ( "version" , appInfo . getVersion ( ) ) ; cacheMap . put ( "size" , gcc . size ( ) ) ; cacheMap . put ( "geocoded" , gcc . size ( ) - gcc . countNotFound ( ) ) ; upBuilder . withDetail ( "cache" , cacheMap ) ; return upBuilder . build ( ) ;
public class BlockBigArray { /** * Sets the element of this big array at specified index . * @ param index a position in this big array . */ public void set ( long index , Block value ) { } }
Block currentValue = array . get ( index ) ; if ( currentValue != null ) { currentValue . retainedBytesForEachPart ( ( object , size ) -> { if ( currentValue == object ) { // track instance size separately as the reference count for an instance is always 1 sizeOfBlocks -= size ; return ; } if ( trackedObjects . decrementAndGet ( object ) == 0 ) { // decrement the size only when it is the last reference sizeOfBlocks -= size ; } } ) ; } if ( value != null ) { value . retainedBytesForEachPart ( ( object , size ) -> { if ( value == object ) { // track instance size separately as the reference count for an instance is always 1 sizeOfBlocks += size ; return ; } if ( trackedObjects . incrementAndGet ( object ) == 1 ) { // increment the size only when it is the first reference sizeOfBlocks += size ; } } ) ; } array . set ( index , value ) ;
public class JDKLogger { /** * returns an array ( class , method ) of the caller before our logger class in the stack */ protected String [ ] getCaller ( ) { } }
final StackTraceElement [ ] stack = ( new Throwable ( ) ) . getStackTrace ( ) ; final String loggerClassname = getClass ( ) . getName ( ) ; final String callerName ; final String callerMethod ; int i = 0 ; while ( i < stack . length ) { final StackTraceElement ste = stack [ i ] ; final String fc = ste . getClassName ( ) ; if ( fc . equals ( loggerClassname ) ) { break ; } i ++ ; } // skip an extra frame . . . we call ourselves i ++ ; while ( i < stack . length ) { final StackTraceElement ste = stack [ i ] ; final String fc = ste . getClassName ( ) ; if ( ! fc . equals ( loggerClassname ) ) { callerMethod = ste . getMethodName ( ) ; callerName = fc ; return new String [ ] { callerName , callerMethod } ; } i ++ ; } return new String [ ] { "" , "" } ;
public class SimpleCacheImpl { /** * as we ' ll replace the old value when it ' s expired */ private boolean isNull ( InternalCacheEntry < K , V > entry ) { } }
if ( entry == null ) { return true ; } else if ( entry . canExpire ( ) ) { if ( entry . isExpired ( timeService . wallClockTime ( ) ) ) { if ( cacheNotifier . hasListener ( CacheEntryExpired . class ) ) { CompletionStages . join ( cacheNotifier . notifyCacheEntryExpired ( entry . getKey ( ) , entry . getValue ( ) , entry . getMetadata ( ) , ImmutableContext . INSTANCE ) ) ; } return true ; } } return false ;
public class CLIContext { /** * Loads properties from the given stream . * This will close the stream . * @ param stream The stream to load from . * @ param path The path represented by the stream . */ private void loadProperties ( InputStream stream , String path ) { } }
if ( stream == null ) { return ; } try { Properties props = new Properties ( ) ; props . load ( stream ) ; Iterator < Object > keyIt = props . keySet ( ) . iterator ( ) ; while ( keyIt . hasNext ( ) ) { String key = keyIt . next ( ) . toString ( ) ; _properties . put ( key , props . get ( key ) ) ; } } catch ( Exception e ) { Console . warn ( "Unable to load properties file [" + path + "]." ) ; } finally { if ( stream != null ) { try { stream . close ( ) ; } catch ( Exception e ) { Console . warn ( "Unable to close properties file [" + path + "]." ) ; } } }
public class VueGWTTools { /** * Proxy a method call to be warned when it called . This requires the function to be JsInterop * ( name shouldn ' t change at runtime ) . This used to observe Java Collections / Map . It won ' t be * necessary in future versions of Vue . js based on ES6 proxies . * @ param object The object to observe * @ param methodName The name of the method to proxify * @ param afterMethodCall A callback called each time after the method has been executed * @ param < T > Type of the object the we Proxy */ public static < T > void wrapMethod ( T object , String methodName , AfterMethodCall < T > afterMethodCall ) { } }
Function method = ( Function ) ( ( JsPropertyMap ) object ) . get ( methodName ) ; WrappingFunction wrappingFunction = args -> { Object result = method . apply ( object , args ) ; afterMethodCall . execute ( object , methodName , result , args ) ; return result ; } ; ( ( JsPropertyMap ) object ) . set ( methodName , wrappingFunction ) ;
public class OStringSerializerAnyRuntime { /** * Re - Create any object if the class has a public constructor that accepts a String as unique parameter . */ public Object fromStream ( final String iStream ) { } }
if ( iStream == null || iStream . length ( ) == 0 ) // NULL VALUE return null ; int pos = iStream . indexOf ( OStreamSerializerHelper . SEPARATOR ) ; if ( pos < 0 ) OLogManager . instance ( ) . error ( this , "Class signature not found in ANY element: " + iStream , OSerializationException . class ) ; final String className = iStream . substring ( 0 , pos ) ; try { Class < ? > clazz = Class . forName ( className ) ; return clazz . getDeclaredConstructor ( String . class ) . newInstance ( iStream . substring ( pos + 1 ) ) ; } catch ( Exception e ) { OLogManager . instance ( ) . error ( this , "Error on unmarshalling content. Class: " + className , e , OSerializationException . class ) ; } return null ;
public class JavacElements { /** * Returns the tree for an annotation given the annotated element * and the element ' s own tree . Returns null if the tree cannot be found . */ private JCTree matchAnnoToTree ( AnnotationMirror findme , Element e , JCTree tree ) { } }
Symbol sym = cast ( Symbol . class , e ) ; class Vis extends JCTree . Visitor { List < JCAnnotation > result = null ; public void visitTopLevel ( JCCompilationUnit tree ) { result = tree . packageAnnotations ; } public void visitClassDef ( JCClassDecl tree ) { result = tree . mods . annotations ; } public void visitMethodDef ( JCMethodDecl tree ) { result = tree . mods . annotations ; } public void visitVarDef ( JCVariableDecl tree ) { result = tree . mods . annotations ; } } Vis vis = new Vis ( ) ; tree . accept ( vis ) ; if ( vis . result == null ) return null ; List < Attribute . Compound > annos = sym . getRawAttributes ( ) ; return matchAnnoToTree ( cast ( Attribute . Compound . class , findme ) , annos , vis . result ) ;
public class CmsLuceneField { /** * Returns the String value state of this field if it is indexed ( and possibly tokenized ) in the Lucene index . < p > * @ return the String value state of this field if it is indexed ( and possibly tokenized ) in the Lucene index * @ see # isTokenizedAndIndexed ( ) * @ see # isIndexed ( ) */ @ Override public String getIndexed ( ) { } }
if ( isTokenizedAndIndexed ( ) ) { return String . valueOf ( isTokenizedAndIndexed ( ) ) ; } if ( isIndexed ( ) ) { return STR_UN_TOKENIZED ; } else { return String . valueOf ( isIndexed ( ) ) ; }
public class DialogUtils { /** * Show a model dialog box . The < code > android . app . AlertDialog < / code > object is returned so that * you can specify an OnDismissListener ( or other listeners ) if required . * < b > Note : < / b > show ( ) is already called on the AlertDialog being returned . * @ param context The current Context or Activity that this method is called from . * @ param message Message to display in the dialog . * @ return AlertDialog that is being displayed . */ public static AlertDialog quickDialog ( final Activity context , final String message ) { } }
final SpannableString s = new SpannableString ( message ) ; // Make links clickable Linkify . addLinks ( s , Linkify . ALL ) ; final Builder builder = new AlertDialog . Builder ( context ) ; builder . setMessage ( s ) ; builder . setPositiveButton ( android . R . string . ok , closeDialogListener ( ) ) ; AlertDialog dialog = builder . create ( ) ; if ( ! context . isFinishing ( ) ) { dialog . show ( ) ; final TextView textView = ( TextView ) dialog . findViewById ( android . R . id . message ) ; if ( textView != null ) { textView . setMovementMethod ( LinkMovementMethod . getInstance ( ) ) ; // Make links clickable } } return dialog ;
public class CPRuleAssetCategoryRelPersistenceImpl { /** * Returns a range of all the cp rule asset category rels . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPRuleAssetCategoryRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of cp rule asset category rels * @ param end the upper bound of the range of cp rule asset category rels ( not inclusive ) * @ return the range of cp rule asset category rels */ @ Override public List < CPRuleAssetCategoryRel > findAll ( int start , int end ) { } }
return findAll ( start , end , null ) ;
public class PHPDriverHelper { /** * < p > copyFile . < / p > * @ param is a { @ link java . io . InputStream } object . * @ param deleteOnExit a boolean . * @ return a { @ link java . io . File } object . * @ throws java . io . IOException if any . */ public static File copyFile ( InputStream is , boolean deleteOnExit ) throws IOException { } }
File f = File . createTempFile ( "php" , ".php" ) ; if ( deleteOnExit ) { f . deleteOnExit ( ) ; } FileWriter fw = new FileWriter ( f ) ; copyFile ( new BufferedReader ( new InputStreamReader ( is ) ) , new BufferedWriter ( fw ) ) ; is . close ( ) ; fw . close ( ) ; return f ;
public class DeleteTapeArchiveRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteTapeArchiveRequest deleteTapeArchiveRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteTapeArchiveRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteTapeArchiveRequest . getTapeARN ( ) , TAPEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SourceNode { /** * Walk over the tree of JS snippets in this node and its children . The walking function is called once for each snippet of JS and is passed that * snippet and the its original associated source ' s line / column location . * @ param aFn * The traversal function . */ public void walk ( ChuckWalker walker ) { } }
Object chunk ; for ( int i = 0 , len = this . children . size ( ) ; i < len ; i ++ ) { chunk = this . children . get ( i ) ; if ( chunk instanceof SourceNode ) { ( ( SourceNode ) chunk ) . walk ( walker ) ; } else { if ( ( ( String ) chunk ) . length ( ) != 0 ) { walker . walk ( ( String ) chunk , new OriginalPosition ( this . line , this . column , this . source , this . name ) ) ; } } }
public class RecommendationsInner { /** * Get past recommendations for an app , optionally specified by the time range . * Get past recommendations for an app , optionally specified by the time range . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; RecommendationInner & gt ; object */ public Observable < Page < RecommendationInner > > listHistoryForWebAppNextAsync ( final String nextPageLink ) { } }
return listHistoryForWebAppNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < RecommendationInner > > , Page < RecommendationInner > > ( ) { @ Override public Page < RecommendationInner > call ( ServiceResponse < Page < RecommendationInner > > response ) { return response . body ( ) ; } } ) ;
public class VirtualNetworkGatewaysInner { /** * The Set VpnclientIpsecParameters operation sets the vpnclient ipsec policy for P2S client of virtual network gateway in the specified resource group through Network resource provider . * @ param resourceGroupName The name of the resource group . * @ param virtualNetworkGatewayName The name of the virtual network gateway . * @ param vpnclientIpsecParams Parameters supplied to the Begin Set vpnclient ipsec parameters of Virtual Network Gateway P2S client operation through Network resource provider . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < ServiceResponse < VpnClientIPsecParametersInner > > setVpnclientIpsecParametersWithServiceResponseAsync ( String resourceGroupName , String virtualNetworkGatewayName , VpnClientIPsecParametersInner vpnclientIpsecParams ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( virtualNetworkGatewayName == null ) { throw new IllegalArgumentException ( "Parameter virtualNetworkGatewayName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( vpnclientIpsecParams == null ) { throw new IllegalArgumentException ( "Parameter vpnclientIpsecParams is required and cannot be null." ) ; } Validator . validate ( vpnclientIpsecParams ) ; final String apiVersion = "2018-08-01" ; Observable < Response < ResponseBody > > observable = service . setVpnclientIpsecParameters ( resourceGroupName , virtualNetworkGatewayName , this . client . subscriptionId ( ) , vpnclientIpsecParams , apiVersion , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) ; return client . getAzureClient ( ) . getPostOrDeleteResultAsync ( observable , new TypeToken < VpnClientIPsecParametersInner > ( ) { } . getType ( ) ) ;
public class XmlParser { /** * Parse an element - content model . * < pre > * [ 47 ] elements : : = ( choice | seq ) ( ' ? ' | ' * ' | ' + ' ) ? * [ 49 ] choice : : = ' ( ' S ? cp ( S ? ' | ' S ? cp ) + S ? ' ) ' * [ 50 ] seq : : = ' ( ' S ? cp ( S ? ' , ' S ? cp ) * S ? ' ) ' * < / pre > * NOTE : the opening ' ( ' and S have already been read . * @ param saved * Buffer for entity that should have the terminal ' ) ' */ private void parseElements ( char [ ] saved ) throws Exception { } }
char c ; char sep ; // Parse the first content particle skipWhitespace ( ) ; parseCp ( ) ; // Check for end or for a separator . skipWhitespace ( ) ; c = readCh ( ) ; switch ( c ) { case ')' : // VC : Proper Group / PE Nesting if ( readBuffer != saved ) { handler . verror ( "Illegal Group/PE nesting" ) ; } dataBufferAppend ( ')' ) ; c = readCh ( ) ; switch ( c ) { case '*' : case '+' : case '?' : dataBufferAppend ( c ) ; break ; default : unread ( c ) ; } return ; case ',' : // Register the separator . case '|' : sep = c ; dataBufferAppend ( c ) ; break ; default : fatal ( "bad separator in content model" , c , null ) ; return ; } // Parse the rest of the content model . while ( true ) { skipWhitespace ( ) ; parseCp ( ) ; skipWhitespace ( ) ; c = readCh ( ) ; if ( c == ')' ) { // VC : Proper Group / PE Nesting if ( readBuffer != saved ) { handler . verror ( "Illegal Group/PE nesting" ) ; } dataBufferAppend ( ')' ) ; break ; } else if ( c != sep ) { fatal ( "bad separator in content model" , c , null ) ; return ; } else { dataBufferAppend ( c ) ; } } // Check for the occurrence indicator . c = readCh ( ) ; switch ( c ) { case '?' : case '*' : case '+' : dataBufferAppend ( c ) ; return ; default : unread ( c ) ; return ; }
public class ZMQ { /** * Sending functions . */ public static int send ( SocketBase s , String str , int flags ) { } }
byte [ ] data = str . getBytes ( CHARSET ) ; return send ( s , data , data . length , flags ) ;
public class AbstractMetric { /** * Auto reports zero if the metric hasn ' t been modified */ protected void autoReportZero ( ) { } }
MetricCollector collector = MetricManager . getCollector ( ) ; if ( collector != null ) { collector . autoReportZero ( identity ) ; }
public class Iso8601Format { /** * / * [ deutsch ] * < p > Interpretiert den angegebenen ISO - 8601 - kompatiblen Datumstext im < i > basic < / i > - Format * oder im < i > extended < / i > - Format . < / p > * @ param iso text like & quot ; 20160101 & quot ; , & quot ; 2016001 & quot ; , & quot ; 2016W011 & quot ; , * & quot ; 2016-01-01 & quot ; , & quot ; 2016-001 & quot ; or & quot ; 2016 - W01-1 & quot ; * @ param plog new mutable instance of { @ code ParseLog } * @ return PlainDate or { @ code null } in case of error * @ throws IndexOutOfBoundsException if the start position is at end of text or even behind * @ see ParseLog # isError ( ) * @ since 3.22/4.18 */ public static PlainDate parseDate ( CharSequence iso , ParseLog plog ) { } }
int hyphens = 0 ; int n = iso . length ( ) ; int start = plog . getPosition ( ) ; int len = n - start ; if ( len < 7 ) { plog . setError ( n , "Too short to be compatible with ISO-8601: " + iso . subSequence ( start , n ) ) ; return null ; } LOOP : for ( int i = start + 1 ; i < n ; i ++ ) { switch ( iso . charAt ( i ) ) { case '-' : // leading sign is ignored , see initial loop index hyphens ++ ; break ; case 'W' : return ( ( hyphens > 0 ) ? EXTENDED_WEEK_DATE . parse ( iso , plog ) : BASIC_WEEK_DATE . parse ( iso , plog ) ) ; case 'T' : case '/' : len = i - start ; break LOOP ; default : // continue } } if ( hyphens == 0 ) { len -= 4 ; // year length is usually 4 digits char c = iso . charAt ( start ) ; if ( ( c == '+' ) || ( c == '-' ) ) { len -= 2 ; // concerns years with at least 5 digits or more } return ( ( len == 3 ) ? BASIC_ORDINAL_DATE . parse ( iso , plog ) : BASIC_CALENDAR_DATE . parse ( iso , plog ) ) ; } else if ( hyphens == 1 ) { return EXTENDED_ORDINAL_DATE . parse ( iso , plog ) ; } else { return EXTENDED_CALENDAR_DATE . parse ( iso , plog ) ; }
public class AstUtil { /** * Tells you if the given ASTNode is a VariableExpression with the given name . * @ param expression * any AST Node * @ param pattern * a string pattern to match * @ return * true if the node is a variable with the specified name */ public static boolean isVariable ( ASTNode expression , String pattern ) { } }
return ( expression instanceof VariableExpression && ( ( VariableExpression ) expression ) . getName ( ) . matches ( pattern ) ) ;
public class FisExporter { /** * Returns a string representation of the ` [ Output ] ` configuration * @ param engine is the engine * @ return a string representation of the ` [ Output ] ` configuration */ public String exportOutputs ( Engine engine ) { } }
StringBuilder result = new StringBuilder ( ) ; for ( int i = 0 ; i < engine . numberOfOutputVariables ( ) ; ++ i ) { OutputVariable outputVariable = engine . getOutputVariable ( i ) ; result . append ( String . format ( "[Output%d]\n" , i + 1 ) ) ; result . append ( String . format ( "Name='%s'\n" , outputVariable . getName ( ) ) ) ; result . append ( String . format ( "Range=[%s %s]\n" , Op . str ( outputVariable . getMinimum ( ) ) , Op . str ( outputVariable . getMaximum ( ) ) ) ) ; result . append ( String . format ( "NumMFs=%d\n" , outputVariable . numberOfTerms ( ) ) ) ; for ( int t = 0 ; t < outputVariable . numberOfTerms ( ) ; ++ t ) { Term term = outputVariable . getTerm ( t ) ; result . append ( String . format ( "MF%d=%s\n" , t + 1 , toString ( term ) ) ) ; } result . append ( "\n" ) ; } return result . toString ( ) ;
public class DatabaseMetaData { /** * { @ inheritDoc } */ public ResultSet getColumnPrivileges ( final String catalog , final String schema , final String table , final String columnNamePattern ) throws SQLException { } }
return RowLists . rowList8 ( String . class , String . class , String . class , String . class , String . class , String . class , String . class , String . class ) . withLabel ( 1 , "TABLE_CAT" ) . withLabel ( 2 , "TABLE_SCHEM" ) . withLabel ( 3 , "TABLE_NAME" ) . withLabel ( 4 , "COLUMN_NAME" ) . withLabel ( 5 , "GRANTOR" ) . withLabel ( 6 , "GRANTEE" ) . withLabel ( 7 , "PRIVILEGE" ) . withLabel ( 8 , "IS_GRANTABLE" ) . resultSet ( ) ;
public class AbstractResource { /** * Add a link with the given reference . * @ param ref The reference * @ param link The link * @ return This JsonError */ public Impl link ( @ Nullable CharSequence ref , @ Nullable Link link ) { } }
if ( StringUtils . isNotEmpty ( ref ) && link != null ) { List < Link > links = this . linkMap . computeIfAbsent ( ref , charSequence -> new ArrayList < > ( ) ) ; links . add ( link ) ; } return ( Impl ) this ;
public class MethodReflectUtil { /** * 根据 { @ link net . jueb . util4j . proxy . methodProxy . AnnotationTag } 属性匹配方法 * @ param target * @ param tag * @ return */ public static Method findMethodByAnnotationTag ( Object target , String tag ) { } }
Method [ ] methods = target . getClass ( ) . getDeclaredMethods ( ) ; for ( Method m : methods ) { AnnotationTag function = m . getAnnotation ( AnnotationTag . class ) ; if ( function != null && function . tag ( ) . equals ( tag ) ) { return m ; } } return null ;
public class SuperPositionQCP { /** * Superposition coords2 onto coords1 - - in other words , coords2 is rotated , * coords1 is held fixed */ private void calcTransformation ( ) { } }
// transformation . set ( rotmat , new Vector3d ( 0,0,0 ) , 1 ) ; transformation . set ( rotmat ) ; // long t2 = System . nanoTime ( ) ; // System . out . println ( " create transformation : " + ( t2 - t1 ) ) ; // System . out . println ( " m3d - > m4d " ) ; // System . out . println ( transformation ) ; // combine with x - > origin translation Matrix4d trans = new Matrix4d ( ) ; trans . setIdentity ( ) ; trans . setTranslation ( new Vector3d ( xtrans ) ) ; transformation . mul ( transformation , trans ) ; // System . out . println ( " setting xtrans " ) ; // System . out . println ( transformation ) ; // combine with origin - > y translation ytrans . negate ( ) ; Matrix4d transInverse = new Matrix4d ( ) ; transInverse . setIdentity ( ) ; transInverse . setTranslation ( new Vector3d ( ytrans ) ) ; transformation . mul ( transInverse , transformation ) ; // System . out . println ( " setting ytrans " ) ; // System . out . println ( transformation ) ;
public class FOPServlet { /** * Renders an XML file into a PDF file by applying a stylesheet * that converts the XML to XSL - FO . The PDF is written to a byte array * that is returned as the method ' s result . * @ param xml the XML file * @ param xslt the XSLT file * @ param response HTTP response object * @ throws FOPException If an error occurs during the rendering of the * XSL - FO * @ throws TransformerException If an error occurs during XSL * transformation * @ throws IOException In case of an I / O problem */ protected void renderXML ( String xml , String xslt , HttpServletResponse response ) throws FOPException , TransformerException , IOException { } }
// Setup sources Source xmlSrc = convertString2Source ( xml ) ; Source xsltSrc = convertString2Source ( xslt ) ; // Setup the XSL transformation Transformer transformer = this . transFactory . newTransformer ( xsltSrc ) ; transformer . setURIResolver ( this . uriResolver ) ; // Start transformation and rendering process render ( xmlSrc , transformer , response ) ;