signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class InternalPartitionServiceImpl { /** * Called on the master node to publish the current partition state to all cluster nodes . It will not publish the partition * state if the partitions have not yet been initialized , there is ongoing repartitioning or a node is joining the cluster . */ @ SuppressWarnings ( "checkstyle:npathcomplexity" ) void publishPartitionRuntimeState ( ) { } }
if ( ! partitionStateManager . isInitialized ( ) ) { // do not send partition state until initialized ! return ; } if ( ! node . isMaster ( ) ) { return ; } if ( ! areMigrationTasksAllowed ( ) ) { // migration is disabled because of a member leave , wait till enabled ! return ; } PartitionRuntimeState partitionState = createPartitionStateInternal ( ) ; if ( partitionState == null ) { return ; } if ( logger . isFineEnabled ( ) ) { logger . fine ( "Publishing partition state, version: " + partitionState . getVersion ( ) ) ; } PartitionStateOperation op = new PartitionStateOperation ( partitionState , false ) ; OperationService operationService = nodeEngine . getOperationService ( ) ; Collection < Member > members = node . clusterService . getMembers ( ) ; for ( Member member : members ) { if ( ! member . localMember ( ) ) { try { operationService . send ( op , member . getAddress ( ) ) ; } catch ( Exception e ) { logger . finest ( e ) ; } } }
public class UrlHelper { /** * Returns the baseUri of the request : http : / / www . example . com : 8080 / example * @ param request the current HttpServletRequest * @ return base uri including protocol , host , port and context path */ public static String baseUriOf ( final HttpServletRequest request ) { } }
final StringBuffer requestUrl = request . getRequestURL ( ) ; return requestUrl != null ? requestUrl . substring ( 0 , requestUrl . indexOf ( request . getServletPath ( ) ) ) : "" ;
public class Streams { /** * Repeat in a Stream until specified predicate holds * < pre > * { @ code * count = 0; * assertThat ( Streams . cycleUntil ( Stream . of ( 1,2,2,3) * , next - > count + + > 10 ) * . collect ( CyclopsCollectors . toList ( ) ) , equalTo ( Arrays . asList ( 1 , 2 , 2 , 3 , 1 , 2 , 2 , 3 , 1 , 2 , 2 ) ) ) ; * < / pre > * @ param predicate * repeat while true * @ return Repeating Stream */ public final static < T > Stream < T > cycleUntil ( final Stream < T > stream , final Predicate < ? super T > predicate ) { } }
return Streams . takeUntil ( Streams . cycle ( stream ) , predicate ) ;
public class AVIMFileMessage { /** * 判断是不是通过外部设置 url 来的 AVFile * @ param avFile * @ return */ static boolean isExternalAVFile ( AVFile avFile ) { } }
return null != avFile && null != avFile . getMetaData ( ) && avFile . getMetaData ( ) . containsKey ( "__source" ) && avFile . getMetaData ( ) . get ( "__source" ) . equals ( "external" ) ;
public class RedisURI { /** * Create a Redis URI from host and port . * @ param host the host * @ param port the port * @ return An instance of { @ link RedisURI } containing details from the { @ code host } and { @ code port } . */ public static RedisURI create ( String host , int port ) { } }
return Builder . redis ( host , port ) . build ( ) ;
public class JpaAttribute { public String getGeneratedValueAnnotation ( ) { } }
if ( useSequenceNameShortcut ( ) ) { return getGeneratedValueForSequenceNameByConfiguration ( ) ; } if ( attribute . getColumnConfig ( ) . useConfigForIdGenerator ( ) ) { // TODO : explain why we do not directly check attribute . getColumnConfig ( ) . hasGeneratedValue ( ) in the if statement above . return getGeneratedValueAnnotationByConfiguration ( ) ; } else if ( attribute . getAutoIncrement ( ) == TRUE && ! attribute . isSimpleFk ( ) ) { // the jdbc driver supports IS _ AUTOINCREMENT metadata , great ! // if it is an fk , we do not want @ GeneratedValue because we use instead @ MapsId on the association . . . addImport ( "javax.persistence.GeneratedValue" ) ; addImport ( "static javax.persistence.GenerationType.IDENTITY" ) ; return "@GeneratedValue(strategy = IDENTITY)" ; } else if ( attribute . getAutoIncrement ( ) == FALSE && /* 32 length string are special for us */ ! attribute . isString ( ) ) { // the jdbc driver supports IS _ AUTOINCREMENT metadata , great ! return "" ; } else { // the jdbc driver does not support IS _ AUTOINCREMENT // fall back to convention return getGeneratedValueAnnotationByConvention ( ) ; }
public class Objects2 { /** * Performs emptiness and nullness check . * @ see # checkNotEmpty ( Object , String , Object . . . ) */ public static < T > T checkNotEmpty ( T reference , Object errorMessage ) { } }
checkNotNull ( reference , "Expected not null object, got '%s'" , reference ) ; checkNotEmpty ( reference , String . valueOf ( errorMessage ) , Arrays2 . EMPTY_ARRAY ) ; return reference ;
public class SettingsPack { /** * Sets the maximum number of bytes a connection may have pending in the disk * write queue before its download rate is being throttled . This prevents * fast downloads to slow medias to allocate more memory indefinitely . * This should be set to at least 16 kB to not completely disrupt normal * downloads . If it ' s set to 0 , you will be starving the disk thread and * nothing will be written to disk . this is a per session setting . * When this limit is reached , the peer connections will stop reading * data from their sockets , until the disk thread catches up . Setting * this too low will severely limit your download rate . * @ param value */ public SettingsPack maxQueuedDiskBytes ( int value ) { } }
sp . set_int ( settings_pack . int_types . max_queued_disk_bytes . swigValue ( ) , value ) ; return this ;
public class MultipartProcessor { /** * Adds the final boundary to the multipart message and closes streams . */ public void finish ( ) throws IOException { } }
writer . append ( "--" + boundary + "--" ) . append ( LINE_BREAK ) ; writer . flush ( ) ; writer . close ( ) ; outputStream . flush ( ) ; outputStream . close ( ) ;
public class FibonacciHeap { /** * Decrease the key of a node . */ private void decreaseKeyWithComparator ( Node < K , V > n , K newKey ) { } }
int c = comparator . compare ( newKey , n . key ) ; if ( c > 0 ) { throw new IllegalArgumentException ( "Keys can only be decreased!" ) ; } n . key = newKey ; if ( c == 0 ) { return ; } if ( n . next == null ) { throw new IllegalArgumentException ( "Invalid handle!" ) ; } // if not root and heap order violation Node < K , V > y = n . parent ; if ( y != null && comparator . compare ( n . key , y . key ) < 0 ) { cut ( n , y ) ; cascadingCut ( y ) ; } // update minimum root if ( comparator . compare ( n . key , minRoot . key ) < 0 ) { minRoot = n ; }
public class JsonObject { /** * Returns the value mapped by { @ code name } if it exists and is a { @ code * JsonArray } , or null otherwise . */ public JsonArray optJsonArray ( String name ) { } }
JsonElement el = null ; try { el = get ( name ) ; } catch ( JsonException e ) { return null ; } if ( ! el . isJsonArray ( ) ) { return null ; } return el . asJsonArray ( ) ;
public class AbstractDatabase { /** * The main save method . */ private boolean save ( Document document , boolean deletion , ConcurrencyControl concurrencyControl ) throws CouchbaseLiteException { } }
if ( deletion && ! document . exists ( ) ) { throw new CouchbaseLiteException ( "Cannot delete a document that has not yet been saved." , CBLError . Domain . CBLITE , CBLError . Code . NOT_FOUND ) ; } C4Document curDoc = null ; C4Document newDoc = null ; synchronized ( lock ) { mustBeOpen ( ) ; prepareDocument ( document ) ; boolean commit = false ; beginTransaction ( ) ; try { try { newDoc = save ( document , null , deletion ) ; commit = true ; } catch ( LiteCoreException e ) { if ( ! ( e . domain == C4Constants . ErrorDomain . LITE_CORE && e . code == C4Constants . LiteCoreError . CONFLICT ) ) { throw CBLStatus . convertException ( e ) ; } } if ( newDoc == null ) { // Handle conflict : if ( concurrencyControl . equals ( ConcurrencyControl . FAIL_ON_CONFLICT ) ) { return false ; // document is conflicted and return false because of OPTIMISTIC } try { curDoc = getC4Database ( ) . get ( document . getId ( ) , true ) ; } catch ( LiteCoreException e ) { if ( deletion && e . domain == C4Constants . ErrorDomain . LITE_CORE && e . code == C4Constants . LiteCoreError . NOT_FOUND ) { return true ; } else { throw CBLStatus . convertException ( e ) ; } } if ( deletion && curDoc . deleted ( ) ) { document . replaceC4Document ( curDoc ) ; curDoc = null ; // NOTE : prevent to call curDoc . free ( ) in finally block return true ; } // Save changes on the current branch : // NOTE : curDoc null check is done in prev try - catch blcok try { newDoc = save ( document , curDoc , deletion ) ; } catch ( LiteCoreException e ) { throw CBLStatus . convertException ( e ) ; } } document . replaceC4Document ( newDoc ) ; commit = true ; } finally { if ( curDoc != null ) { curDoc . retain ( ) ; curDoc . release ( ) ; // curDoc is not retained } try { endTransaction ( commit ) ; // true : commit the transaction , false : abort the transaction } catch ( CouchbaseLiteException e ) { if ( newDoc != null ) { newDoc . release ( ) ; // newDoc is already retained } throw e ; } } return true ; }
public class CmsEditSiteForm { /** * Are the aliase valid ? < p > * @ return true if ok */ boolean isValidAliase ( ) { } }
boolean ret = true ; for ( I_CmsEditableGroupRow row : m_aliasGroup . getRows ( ) ) { FormLayout layout = ( FormLayout ) ( row . getComponent ( ) ) ; TextField field = ( TextField ) layout . getComponent ( 0 ) ; ret = ret & field . isValid ( ) ; } return ret ;
public class BasePostprocessor { /** * Copies the content of { @ code sourceBitmap } to { @ code destBitmap } . Both bitmaps must have the * same width and height . If their { @ link Bitmap . Config } are identical , the memory is directly * copied . Otherwise , the { @ code sourceBitmap } is drawn into { @ code destBitmap } . */ private static void internalCopyBitmap ( Bitmap destBitmap , Bitmap sourceBitmap ) { } }
if ( destBitmap . getConfig ( ) == sourceBitmap . getConfig ( ) ) { Bitmaps . copyBitmap ( destBitmap , sourceBitmap ) ; } else { // The bitmap configurations might be different when the source bitmap ' s configuration is // null , because it uses an internal configuration and the destination bitmap ' s configuration // is the FALLBACK _ BITMAP _ CONFIGURATION . This is the case for static images for animated GIFs . Canvas canvas = new Canvas ( destBitmap ) ; canvas . drawBitmap ( sourceBitmap , 0 , 0 , null ) ; }
public class JobInstanceService { /** * This method exists only so that we can scope the transaction properly */ private void internalUpdateJobStateAndResult ( final JobInstance job ) { } }
transactionTemplate . execute ( new TransactionCallbackWithoutResult ( ) { protected void doInTransactionWithoutResult ( TransactionStatus status ) { jobInstanceDao . updateStateAndResult ( job ) ; if ( job . isCompleted ( ) ) { buildPropertiesService . saveCruiseProperties ( job ) ; } } } ) ;
public class DeviceFinder { /** * Send a device lost announcement to all registered listeners . * @ param announcement the last message received from the vanished device */ private void deliverLostAnnouncement ( final DeviceAnnouncement announcement ) { } }
for ( final DeviceAnnouncementListener listener : getDeviceAnnouncementListeners ( ) ) { SwingUtilities . invokeLater ( new Runnable ( ) { @ Override public void run ( ) { try { listener . deviceLost ( announcement ) ; } catch ( Throwable t ) { logger . warn ( "Problem delivering device lost announcement to listener" , t ) ; } } } ) ; }
public class Matrix4d { /** * Set only the translation components < code > ( m30 , m31 , m32 ) < / code > of this matrix to the given values < code > ( x , y , z ) < / code > . * To build a translation matrix instead , use { @ link # translation ( double , double , double ) } . * To apply a translation , use { @ link # translate ( double , double , double ) } . * @ see # translation ( double , double , double ) * @ see # translate ( double , double , double ) * @ param x * the units to translate in x * @ param y * the units to translate in y * @ param z * the units to translate in z * @ return this */ public Matrix4d setTranslation ( double x , double y , double z ) { } }
m30 = x ; m31 = y ; m32 = z ; properties &= ~ ( PROPERTY_PERSPECTIVE | PROPERTY_IDENTITY ) ; return this ;
public class BitcoinBlockReader { /** * This function is used to read from a raw Bitcoin block some identifier . Note : Does not change ByteBuffer position * @ param rawByteBuffer ByteBuffer as read by readRawBlock * @ return byte array containing hashMerkleRoot and prevHashBlock */ public byte [ ] getKeyFromRawBlock ( ByteBuffer rawByteBuffer ) { } }
rawByteBuffer . mark ( ) ; byte [ ] magicNo = new byte [ 4 ] ; byte [ ] hashMerkleRoot = new byte [ 32 ] ; byte [ ] hashPrevBlock = new byte [ 32 ] ; // magic no ( skip ) rawByteBuffer . get ( magicNo , 0 , 4 ) ; // blocksize ( skip ) rawByteBuffer . getInt ( ) ; // version ( skip ) rawByteBuffer . getInt ( ) ; // hashPrevBlock rawByteBuffer . get ( hashPrevBlock , 0 , 32 ) ; // hashMerkleRoot rawByteBuffer . get ( hashMerkleRoot , 0 , 32 ) ; byte [ ] result = new byte [ hashMerkleRoot . length + hashPrevBlock . length ] ; for ( int i = 0 ; i < hashMerkleRoot . length ; i ++ ) { result [ i ] = hashMerkleRoot [ i ] ; } for ( int j = 0 ; j < hashPrevBlock . length ; j ++ ) { result [ j + hashMerkleRoot . length ] = hashPrevBlock [ j ] ; } rawByteBuffer . reset ( ) ; return result ;
public class CsvBindingErrors { /** * フィールドエラーを登録します 。 * @ param field フィールドパス 。 * @ param errorCode エラーコード 。 */ public void rejectValue ( final String field , final String errorCode ) { } }
rejectValue ( field , null , errorCode , Collections . emptyMap ( ) , null ) ;
public class Benchmark { /** * Converts the time to different scales * @ param time is the time to convert * @ param from is the units of the time to convert from * @ param to is the units of the time to convert to * @ return the time in the units specified */ public double convert ( double time , TimeUnit from , TimeUnit to ) { } }
return time * factorOf ( to ) / factorOf ( from ) ;
public class WaitPageInterceptor { /** * Copy errors from a context to another context . * @ param source source containing errors to copy * @ param destination where errors will be copied */ protected void copyErrors ( ActionBeanContext source , ActionBeanContext destination ) { } }
destination . getValidationErrors ( ) . putAll ( source . getValidationErrors ( ) ) ;
public class Preconditions { /** * Ensures a collection is neither null nor empty . * @ param collection collection to be validated . * @ param errorMessage error message to be thrown if collection is null or empty . * @ param < T > type of the collection item . * @ return collection passed in . */ @ Nonnull public static < T > Collection < T > checkNotEmpty ( Collection < T > collection , @ Nonnull String errorMessage ) { } }
checkState ( collection != null && ! collection . isEmpty ( ) , errorMessage ) ; return collection ;
public class TaskEmailNotifier { /** * Creates the mime message for the e - mail . * @ param taskInstance the task instance * @ param outcome the resulting task status or state * @ return the message */ protected void sendEmail ( TaskRuntimeContext context , String outcome ) throws ObserverException { } }
TaskInstance taskInstance = context . getTaskInstance ( ) ; TemplatedEmail templatedEmail = new TemplatedEmail ( ) ; templatedEmail . setFromAddress ( getFromAddress ( ) ) ; templatedEmail . setSubject ( getSubject ( taskInstance , outcome ) ) ; templatedEmail . setHtml ( true ) ; templatedEmail . setTemplateAssetVerSpec ( getTemplateSpec ( ) ) ; templatedEmail . setRuntimeContext ( context ) ; try { List < Address > recipients = getRecipientAddresses ( outcome ) ; List < Address > ccRecipients = getCcRecipientAddresses ( outcome ) ; if ( templatedEmail . getTemplateBody ( ) . contains ( "${taskActionUrl}" ) ) { // send individual e - mails for ( Address recip : recipients ) { String cuid = recip . toString ( ) . substring ( 0 , recip . toString ( ) . indexOf ( '@' ) ) ; String userIdentifier = MiniCrypter . encrypt ( cuid ) ; taskInstance . setUserIdentifier ( userIdentifier ) ; templatedEmail . setRecipients ( new Address [ ] { recip } ) ; templatedEmail . setCcRecipients ( new Address [ 0 ] ) ; try { templatedEmail . sendEmail ( ) ; } catch ( MessagingException ex ) { logger . severeException ( ex . getMessage ( ) , ex ) ; // do not rethrow } } if ( ccRecipients != null ) { for ( Address ccRecip : ccRecipients ) { String cuid = ccRecip . toString ( ) . substring ( 0 , ccRecip . toString ( ) . indexOf ( '@' ) ) ; String userIdentifier = MiniCrypter . encrypt ( cuid ) ; taskInstance . setUserIdentifier ( userIdentifier ) ; templatedEmail . setRecipients ( new Address [ 0 ] ) ; templatedEmail . setCcRecipients ( new Address [ ] { ccRecip } ) ; try { templatedEmail . sendEmail ( ) ; } catch ( MessagingException ex ) { logger . severeException ( ex . getMessage ( ) , ex ) ; // do not rethrow } } } } else { if ( ( recipients != null && ! recipients . isEmpty ( ) ) || ( ccRecipients != null && ! ccRecipients . isEmpty ( ) ) ) { if ( recipients != null ) templatedEmail . setRecipients ( recipients . toArray ( new Address [ 0 ] ) ) ; if ( ccRecipients != null ) templatedEmail . setCcRecipients ( ccRecipients . toArray ( new Address [ 0 ] ) ) ; try { templatedEmail . sendEmail ( ) ; } catch ( MessagingException ex ) { logger . severeException ( ex . getMessage ( ) , ex ) ; // do not rethrow } } else { logger . warn ( "WARNING: No email recipients for task " + context . getTaskInstanceId ( ) + " " + outcome ) ; } } } catch ( Exception ex ) { logger . severeException ( ex . getMessage ( ) , ex ) ; throw new ObserverException ( - 1 , ex . getMessage ( ) , ex ) ; }
public class UcsApi { /** * Identify the contact for the interaction * @ param id id of the Interaction ( required ) * @ param identifyContactData ( required ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > identifyContactWithHttpInfo ( String id , IdentifyContactData identifyContactData ) throws ApiException { } }
com . squareup . okhttp . Call call = identifyContactValidateBeforeCall ( id , identifyContactData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class CombinationGenerator { private void moveIndexesToNextCombination ( ) { } }
for ( int i = currentIndexes . length - 1 , j = list . size ( ) - 1 ; i >= 0 ; i -- , j -- ) { if ( currentIndexes [ i ] != j ) { currentIndexes [ i ] ++ ; for ( int k = i + 1 ; k < currentIndexes . length ; k ++ ) { currentIndexes [ k ] = currentIndexes [ k - 1 ] + 1 ; } return ; } } // otherwise , we are all done : currentIndexes = null ;
public class PropertiesConfigNodeFactory { /** * Load a given { @ link java . util . Properties } into a { @ link ConfigNode } . * @ param input The properties to load . * @ return The loaded { @ code ConfigNode } . */ public static ConfigNode load ( Properties input ) { } }
ConfigNode config = new ConfigNode ( ) ; load ( config , input ) ; return config ;
public class ID3v2ExtendedHeader { /** * A helper function for the getBytes method that returns a byte array * representing the extended flags field of the extended header . * @ return the extended flags field of the extended header */ private byte [ ] getFlagBytes ( ) { } }
byte [ ] b = new byte [ numFlagBytes ] ; int bytesCopied = 1 ; b [ 0 ] = 0 ; if ( update ) { b [ 0 ] |= 0x80 ; b [ bytesCopied ++ ] = 0 ; } if ( crced ) { b [ 0 ] |= 0x40 ; b [ bytesCopied ++ ] = ( byte ) crc . length ; System . arraycopy ( crc , 0 , b , bytesCopied , crc . length ) ; bytesCopied += crc . length ; } if ( ( maxTagSize != - 1 ) || textEncode || ( maxTextSize != - 1 ) || imageEncode || ( imageRestrict != - 1 ) ) { b [ 0 ] |= 0x20 ; b [ bytesCopied ++ ] = 0x01 ; byte restrict = 0 ; if ( maxTagSize != - 1 ) restrict |= ( byte ) ( ( maxTagSize & 0x3 ) << 6 ) ; if ( textEncode ) restrict |= 0x20 ; if ( maxTextSize != - 1 ) restrict |= ( byte ) ( ( maxTextSize & 0x3 ) << 3 ) ; if ( imageEncode ) restrict |= 0x04 ; if ( imageRestrict != - 1 ) restrict |= ( byte ) ( imageRestrict & 0x3 ) ; b [ bytesCopied ++ ] = restrict ; } return b ;
public class ModelControllerClientServerDeploymentManager { /** * { @ inheritDoc } */ @ Override protected Future < ModelNode > executeOperation ( Operation operation ) { } }
return client . executeAsync ( operation , null ) ;
public class PhysicalTable { /** * Read the record given the ID to this persistent object . * Note : You can ' t use an OBJECT _ ID handle , as these tables are non - persistent . * @ param objectID java . lang . Object The handle to lookup . * @ return true if found . * @ exception DBException File exception . */ public boolean doSetHandle ( Object bookmark , int iHandleType ) throws DBException { } }
try { if ( iHandleType == DBConstants . OBJECT_ID_HANDLE ) { return super . doSetHandle ( bookmark , iHandleType ) ; // ? throw new DBException ( " Object IDs are not supported for PhysicalTables ( non persistent ) " ) ; } else return super . doSetHandle ( bookmark , iHandleType ) ; } catch ( DBException ex ) { throw DatabaseException . toDatabaseException ( ex ) ; }
public class SFImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case BasePackage . SF__NUMBER : setNumber ( NUMBER_EDEFAULT ) ; return ; case BasePackage . SF__OFFSET : setOffset ( OFFSET_EDEFAULT ) ; return ; case BasePackage . SF__ID : setId ( ID_EDEFAULT ) ; return ; case BasePackage . SF__LENGTH : setLength ( LENGTH_EDEFAULT ) ; return ; case BasePackage . SF__CHILDREN : getChildren ( ) . clear ( ) ; return ; case BasePackage . SF__RAW_DATA : setRawData ( RAW_DATA_EDEFAULT ) ; return ; case BasePackage . SF__CHARSET : setCharset ( CHARSET_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class RgbaColor { /** * See http : / / www . w3 . org / TR / css3 - color / # hsl - color */ private static float hue2rgb ( float m1 , float m2 , float h ) { } }
if ( h < 0 ) h += 1 ; if ( h > 1 ) h -= 1 ; if ( h * 6f < 1 ) return m1 + ( m2 - m1 ) * 6f * h ; if ( h * 2f < 1 ) return m2 ; if ( h * 3f < 2 ) return m1 + ( m2 - m1 ) * ( 2 / 3f - h ) * 6f ; return m1 ;
public class RequestEvent { /** * < p > getExceptionCause . < / p > * @ return a { @ link org . glassfish . jersey . server . monitoring . RequestEvent . ExceptionCause } object . */ public org . glassfish . jersey . server . monitoring . RequestEvent . ExceptionCause getExceptionCause ( ) { } }
return event . getExceptionCause ( ) ;
public class OAuth2Credentials { /** * Indicates if the access token has expired * @ return true if expired , false otherwise */ public boolean hasExpired ( ) { } }
if ( expiresAt == null ) { LOGGER . debug ( "hasExpired - token is not expirable" ) ; return false ; } long now = System . currentTimeMillis ( ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "hasExpired? - now: {} expiredAt: {} " , new Date ( now ) , expiresAt ) ; } return now > expiresAt . getTime ( ) ;
public class ViewDragHelper { /** * The result of a call to this method is equivalent to * { @ link # processTouchEvent ( android . view . MotionEvent ) } receiving an ACTION _ CANCEL event . */ public void cancel ( ) { } }
mActivePointerId = INVALID_POINTER ; clearMotionHistory ( ) ; if ( mVelocityTracker != null ) { mVelocityTracker . recycle ( ) ; mVelocityTracker = null ; }
public class JMonthChooser { /** * Enable or disable the JMonthChooser . * @ param enabled * the new enabled value */ public void setEnabled ( boolean enabled ) { } }
super . setEnabled ( enabled ) ; comboBox . setEnabled ( enabled ) ; if ( spinner != null ) { spinner . setEnabled ( enabled ) ; }
public class RocksDbWrapper { /** * Gets a value from a column family , specifying read options . * @ param cfName * @ param readOptions * @ param key * @ return * @ throws RocksDbException */ public byte [ ] get ( String cfName , ReadOptions readOptions , String key ) throws RocksDbException { } }
if ( cfName == null ) { cfName = DEFAULT_COLUMN_FAMILY ; } ColumnFamilyHandle cfh = columnFamilyHandles . get ( cfName ) ; if ( cfh == null ) { throw new RocksDbException . ColumnFamilyNotExists ( cfName ) ; } return get ( cfh , readOptions , key . getBytes ( StandardCharsets . UTF_8 ) ) ;
public class FunctionInjector { /** * If required , rewrite the statement containing the call expression . * @ see ExpressionDecomposer # canExposeExpression */ void maybePrepareCall ( Reference ref ) { } }
CallSiteType callSiteType = classifyCallSite ( ref ) ; callSiteType . prepare ( this , ref ) ;
public class AddAdGroupBidModifier { /** * Runs the example . * @ param adWordsServices the services factory . * @ param session the session . * @ param adGroupId the ID of the ad group where bid modifiers will be added . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . */ public static void runExample ( AdWordsServicesInterface adWordsServices , AdWordsSession session , Long adGroupId ) throws RemoteException { } }
// Get the AdGroupBidModifierService . AdGroupBidModifierServiceInterface adGroupBidModifierService = adWordsServices . get ( session , AdGroupBidModifierServiceInterface . class ) ; // Create mobile platform . The ID can be found in the documentation . // https : / / developers . google . com / adwords / api / docs / appendix / platforms Platform mobile = new Platform ( ) ; mobile . setId ( 30001L ) ; AdGroupBidModifier adGroupBidModifier = new AdGroupBidModifier ( ) ; adGroupBidModifier . setAdGroupId ( adGroupId ) ; adGroupBidModifier . setBidModifier ( BID_MODIFIER ) ; adGroupBidModifier . setCriterion ( mobile ) ; // Create ADD operation . AdGroupBidModifierOperation operation = new AdGroupBidModifierOperation ( ) ; operation . setOperand ( adGroupBidModifier ) ; // Use ' ADD ' to add a new modifier and ' SET ' to update an existing one . A // modifier can be removed with the ' REMOVE ' operator . operation . setOperator ( Operator . ADD ) ; // Update ad group bid modifier . AdGroupBidModifierReturnValue result = adGroupBidModifierService . mutate ( new AdGroupBidModifierOperation [ ] { operation } ) ; for ( AdGroupBidModifier bidModifierResult : result . getValue ( ) ) { System . out . printf ( "Campaign ID %d, ad group ID %d was updated with ad group level modifier: %.4f%n" , bidModifierResult . getCampaignId ( ) , bidModifierResult . getAdGroupId ( ) , bidModifierResult . getBidModifier ( ) ) ; }
public class MetricCollectingService { /** * Returns a new { @ link Service } decorator that tracks request stats using { @ link MeterRegistry } . */ public static < I extends Request , O extends Response > Function < Service < I , O > , MetricCollectingService < I , O > > newDecorator ( MeterIdPrefixFunction meterIdPrefixFunction ) { } }
requireNonNull ( meterIdPrefixFunction , "meterIdPrefixFunction" ) ; return delegate -> new MetricCollectingService < > ( delegate , meterIdPrefixFunction ) ;
public class HostVsanInternalSystem { /** * Query information about VSAN DOM objects that are currently syncing data . Instead of returning all objects , * only such objects are returned that are currently resyncing any stale components or syncing fresh replicas . * The API returns the same output format as queryVsanObjects ( ) . It retrieves information about syncing all objects , * or retricts the search for syncing objects to the UUID list provided . In order to make this API efficient , the * output of this API contains the found DOM _ OBJECT , and referenced LSOM _ OBJECT and DISK entries . * @ param uuids List of VSAN / DOM object UUIDs to restrict search to . * @ return JSON string with the results * @ throws RuntimeFault * @ throws RemoteException * @ since 6.0 */ public String querySyncingVsanObjects ( String [ ] uuids ) throws RuntimeFault , RemoteException { } }
return getVimService ( ) . querySyncingVsanObjects ( getMOR ( ) , uuids ) ;
public class ArrayUtils { /** * Create an array starting with first element and followed by others . * This can be useful when handling vararg parameters and when you want to * force to have at least one value . * @ param first * the first element * @ param others * the other elements * @ param < T > * the type of each element in the array * @ return the combined array */ public static < T > T [ ] concat ( T first , T [ ] others ) { } }
@ SuppressWarnings ( "unchecked" ) T [ ] arr = ( T [ ] ) Array . newInstance ( first . getClass ( ) , 1 ) ; arr [ 0 ] = first ; return concat ( arr , others ) ;
public class DisasterRecoveryConfigurationsInner { /** * Fails over from the current primary server to this server . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param disasterRecoveryConfigurationName The name of the disaster recovery configuration to failover . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > beginFailoverAsync ( String resourceGroupName , String serverName , String disasterRecoveryConfigurationName , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginFailoverWithServiceResponseAsync ( resourceGroupName , serverName , disasterRecoveryConfigurationName ) , serviceCallback ) ;
public class SinglePageDocument { /** * Prepare the document before rendering . * @ param outputStream output stream to render to , null if only for layout * @ param format format * @ throws DocumentException oops * @ throws IOException oops * @ throws PrintingException oops */ private void prepare ( ) throws IOException , DocumentException , PrintingException { } }
if ( baos == null ) { baos = new ByteArrayOutputStream ( ) ; // let it grow as much as needed } baos . reset ( ) ; boolean resize = false ; if ( page . getConstraint ( ) . getWidth ( ) == 0 || page . getConstraint ( ) . getHeight ( ) == 0 ) { resize = true ; } // Create a document in the requested ISO scale . Document document = new Document ( page . getBounds ( ) , 0 , 0 , 0 , 0 ) ; PdfWriter writer ; writer = PdfWriter . getInstance ( document , baos ) ; // Render in correct colors for transparent rasters writer . setRgbTransparencyBlending ( true ) ; // The mapView is not scaled to the document , we assume the mapView // has the right ratio . // Write document title and metadata document . open ( ) ; PdfContext context = new PdfContext ( writer ) ; context . initSize ( page . getBounds ( ) ) ; // first pass of all children to calculate size page . calculateSize ( context ) ; if ( resize ) { // we now know the bounds of the document // round ' m up and restart with a new document int width = ( int ) Math . ceil ( page . getBounds ( ) . getWidth ( ) ) ; int height = ( int ) Math . ceil ( page . getBounds ( ) . getHeight ( ) ) ; page . getConstraint ( ) . setWidth ( width ) ; page . getConstraint ( ) . setHeight ( height ) ; document = new Document ( new Rectangle ( width , height ) , 0 , 0 , 0 , 0 ) ; writer = PdfWriter . getInstance ( document , baos ) ; // Render in correct colors for transparent rasters writer . setRgbTransparencyBlending ( true ) ; document . open ( ) ; baos . reset ( ) ; context = new PdfContext ( writer ) ; context . initSize ( page . getBounds ( ) ) ; } // int compressionLevel = writer . getCompressionLevel ( ) ; / / For testing // writer . setCompressionLevel ( 0 ) ; // Actual drawing document . addTitle ( "Geomajas" ) ; // second pass to layout page . layout ( context ) ; // finally render ( uses baos ) page . render ( context ) ; document . add ( context . getImage ( ) ) ; // Now close the document document . close ( ) ;
public class PoolEvaluateAutoScaleHeaders { /** * Set the time at which the resource was last modified . * @ param lastModified the lastModified value to set * @ return the PoolEvaluateAutoScaleHeaders object itself . */ public PoolEvaluateAutoScaleHeaders withLastModified ( DateTime lastModified ) { } }
if ( lastModified == null ) { this . lastModified = null ; } else { this . lastModified = new DateTimeRfc1123 ( lastModified ) ; } return this ;
public class EvalCacheImpl { public Object getExprValue ( int id ) { } }
if ( tc . isEntryEnabled ( ) ) tc . entry ( this , cclass , "getExprValue" , "id: " + new Integer ( id ) ) ; Object result = null ; if ( cacheTag [ id ] == generation ) result = cacheValue [ id ] ; if ( tc . isEntryEnabled ( ) ) tc . exit ( this , cclass , "getExprValue" , "result: " + result ) ; return result ;
public class SpringApplication { /** * Called to log active profile information . * @ param context the application context */ protected void logStartupProfileInfo ( ConfigurableApplicationContext context ) { } }
Log log = getApplicationLog ( ) ; if ( log . isInfoEnabled ( ) ) { String [ ] activeProfiles = context . getEnvironment ( ) . getActiveProfiles ( ) ; if ( ObjectUtils . isEmpty ( activeProfiles ) ) { String [ ] defaultProfiles = context . getEnvironment ( ) . getDefaultProfiles ( ) ; log . info ( "No active profile set, falling back to default profiles: " + StringUtils . arrayToCommaDelimitedString ( defaultProfiles ) ) ; } else { log . info ( "The following profiles are active: " + StringUtils . arrayToCommaDelimitedString ( activeProfiles ) ) ; } }
public class LocalVariableAnnotationNode { /** * Makes the given visitor visit this type annotation . * @ param mv * the visitor that must visit this annotation . * @ param visible * < tt > true < / tt > if the annotation is visible at runtime . */ public void accept ( final MethodVisitor mv , boolean visible ) { } }
Label [ ] start = new Label [ this . start . size ( ) ] ; Label [ ] end = new Label [ this . end . size ( ) ] ; int [ ] index = new int [ this . index . size ( ) ] ; for ( int i = 0 ; i < start . length ; ++ i ) { start [ i ] = this . start . get ( i ) . getLabel ( ) ; end [ i ] = this . end . get ( i ) . getLabel ( ) ; index [ i ] = this . index . get ( i ) ; } accept ( mv . visitLocalVariableAnnotation ( typeRef , typePath , start , end , index , desc , true ) ) ;
public class PagedList { /** * Removes a previously added callback . * @ param callback Callback , previously added . * @ see # addWeakCallback ( List , Callback ) */ @ SuppressWarnings ( "WeakerAccess" ) public void removeWeakCallback ( @ NonNull Callback callback ) { } }
for ( int i = mCallbacks . size ( ) - 1 ; i >= 0 ; i -- ) { Callback currentCallback = mCallbacks . get ( i ) . get ( ) ; if ( currentCallback == null || currentCallback == callback ) { // found callback , or empty weak ref mCallbacks . remove ( i ) ; } }
public class HttpsURLConnection { /** * Returns the server ' s principal which was established as part of * defining the session . * Note : Subclasses should override this method . If not overridden , it * will default to returning the X500Principal of the server ' s end - entity * certificate for certificate - based ciphersuites , or throw an * SSLPeerUnverifiedException for non - certificate based ciphersuites , * such as Kerberos . * @ return the server ' s principal . Returns an X500Principal of the * end - entity certiticate for X509 - based cipher suites , and * KerberosPrincipal for Kerberos cipher suites . * @ throws SSLPeerUnverifiedException if the peer was not verified * @ throws IllegalStateException if this method is called before * the connection has been established . * @ see # getServerCertificates ( ) * @ see # getLocalPrincipal ( ) * @ since 1.5 */ public Principal getPeerPrincipal ( ) throws SSLPeerUnverifiedException { } }
java . security . cert . Certificate [ ] certs = getServerCertificates ( ) ; return ( ( X500Principal ) ( ( X509Certificate ) certs [ 0 ] ) . getSubjectX500Principal ( ) ) ;
public class ScopeContext { /** * @ param cfmlFactory */ private void clearUnusedMemoryScope ( CFMLFactoryImpl cfmlFactory , int type ) { } }
Map < String , Map < String , Scope > > contexts = type == Scope . SCOPE_CLIENT ? cfClientContexts : cfSessionContexts ; if ( contexts . size ( ) == 0 ) return ; Object [ ] arrContexts = contexts . keySet ( ) . toArray ( ) ; ApplicationListener listener = cfmlFactory . getConfig ( ) . getApplicationListener ( ) ; Object applicationName , cfid , o ; Map < String , Scope > fhm ; for ( int i = 0 ; i < arrContexts . length ; i ++ ) { applicationName = arrContexts [ i ] ; fhm = contexts . get ( applicationName ) ; if ( fhm . size ( ) > 0 ) { Object [ ] cfids = fhm . keySet ( ) . toArray ( ) ; int count = cfids . length ; for ( int y = 0 ; y < cfids . length ; y ++ ) { cfid = cfids [ y ] ; o = fhm . get ( cfid ) ; if ( ! ( o instanceof MemoryScope ) ) continue ; MemoryScope scope = ( MemoryScope ) o ; // close if ( scope . isExpired ( ) ) { // TODO macht das sinn ? ist das nicht kopierleiche ? ApplicationImpl application = ( ApplicationImpl ) applicationContexts . get ( applicationName ) ; long appLastAccess = 0 ; if ( application != null ) { appLastAccess = application . getLastAccess ( ) ; application . touch ( ) ; } scope . touch ( ) ; try { if ( type == Scope . SCOPE_SESSION ) listener . onSessionEnd ( cfmlFactory , ( String ) applicationName , ( String ) cfid ) ; } catch ( Throwable t ) { ExceptionUtil . rethrowIfNecessary ( t ) ; ExceptionHandler . log ( cfmlFactory . getConfig ( ) , Caster . toPageException ( t ) ) ; } finally { if ( application != null ) application . setLastAccess ( appLastAccess ) ; fhm . remove ( cfids [ y ] ) ; scope . release ( ThreadLocalPageContext . get ( ) ) ; getLog ( ) . log ( Log . LEVEL_INFO , "scope-context" , "remove memory based " + VariableInterpreter . scopeInt2String ( type ) + " scope for " + applicationName + "/" + cfid ) ; count -- ; } } } if ( count == 0 ) contexts . remove ( arrContexts [ i ] ) ; } }
public class ClustersInner { /** * Rotate disk encryption key of the specified HDInsight cluster . * @ param resourceGroupName The name of the resource group . * @ param clusterName The name of the cluster . * @ param parameters The parameters for the disk encryption operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void beginRotateDiskEncryptionKey ( String resourceGroupName , String clusterName , ClusterDiskEncryptionParameters parameters ) { } }
beginRotateDiskEncryptionKeyWithServiceResponseAsync ( resourceGroupName , clusterName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class ExceptionFormatter { /** * Returns a formatted stack trace for an exception . * < p > This method provides a full ( non - truncated ) trace delimited by * { @ link # DELIMITER } . Currently it doesn ' t make any use of Java 7 ' s < a * href = " http : / / docs . oracle . com / javase / tutorial / essential / exceptions / tryResourceClose . html # suppressed - exceptions " > exception * suppression < / a > . < / p > * @ param error an { @ link IThrowableProxy } object * @ return stack trace string */ public static String formatException ( IThrowableProxy error ) { } }
String ex = "" ; ex += formatTopLevelError ( error ) ; ex += formatStackTraceElements ( error . getStackTraceElementProxyArray ( ) ) ; IThrowableProxy cause = error . getCause ( ) ; ex += DELIMITER ; while ( cause != null ) { ex += formatTopLevelError ( cause ) ; StackTraceElementProxy [ ] arr = cause . getStackTraceElementProxyArray ( ) ; ex += formatStackTraceElements ( arr ) ; ex += DELIMITER ; cause = cause . getCause ( ) ; } return ex ;
public class ControlClientAnnotationProcessor { /** * Enforces the VersionRequired annotation for control fields . */ private void enforceVersionRequired ( FieldDeclaration f , InterfaceDeclaration controlIntf ) { } }
VersionRequired versionRequired = f . getAnnotation ( VersionRequired . class ) ; Version versionPresent = controlIntf . getAnnotation ( Version . class ) ; if ( versionRequired != null ) { int majorRequired = - 1 ; try { majorRequired = versionRequired . major ( ) ; } catch ( NullPointerException ignore ) { /* the major version annotation is required and if unspecified , will throw an NPE when it is quereid but not provided . this error will be caught during syntactic validation perfoemed by javac , so ignore it if an NPE is caught here */ return ; } int minorRequired = versionRequired . minor ( ) ; /* no version requirement , so return */ if ( majorRequired < 0 ) return ; int majorPresent = - 1 ; int minorPresent = - 1 ; if ( versionPresent != null ) { try { majorPresent = versionPresent . major ( ) ; } catch ( NullPointerException ignore ) { /* the major version annotation is required and if unspecified , will throw an NPE when it is quereid but not provided . this error will be caught during syntactic validation perfoemed by javac , so ignore it if an NPE is caught here */ } minorPresent = versionPresent . minor ( ) ; if ( majorRequired <= majorPresent && ( minorRequired < 0 || minorRequired <= minorPresent ) ) { // Version requirement is satisfied return ; } } // Version requirement failed printError ( f , "control.field.bad.version" , f . getSimpleName ( ) , majorRequired , minorRequired , majorPresent , minorPresent ) ; }
public class HttpClientBuilder { /** * Map the parameters in { @ link HttpClientConfiguration } to configuration on a * { @ link org . apache . http . impl . client . HttpClientBuilder } instance * @ param builder * @ param manager * @ param name * @ return the configured { @ link CloseableHttpClient } */ protected ConfiguredCloseableHttpClient createClient ( final org . apache . http . impl . client . HttpClientBuilder builder , final InstrumentedHttpClientConnectionManager manager , final String name ) { } }
final String cookiePolicy = configuration . isCookiesEnabled ( ) ? CookieSpecs . DEFAULT : CookieSpecs . IGNORE_COOKIES ; final Integer timeout = ( int ) configuration . getTimeout ( ) . toMilliseconds ( ) ; final Integer connectionTimeout = ( int ) configuration . getConnectionTimeout ( ) . toMilliseconds ( ) ; final Integer connectionRequestTimeout = ( int ) configuration . getConnectionRequestTimeout ( ) . toMilliseconds ( ) ; final long keepAlive = configuration . getKeepAlive ( ) . toMilliseconds ( ) ; final ConnectionReuseStrategy reuseStrategy = keepAlive == 0 ? new NoConnectionReuseStrategy ( ) : new DefaultConnectionReuseStrategy ( ) ; final HttpRequestRetryHandler retryHandler = configuration . getRetries ( ) == 0 ? NO_RETRIES : ( httpRequestRetryHandler == null ? new DefaultHttpRequestRetryHandler ( configuration . getRetries ( ) , false ) : httpRequestRetryHandler ) ; final RequestConfig requestConfig = RequestConfig . custom ( ) . setCookieSpec ( cookiePolicy ) . setSocketTimeout ( timeout ) . setConnectTimeout ( connectionTimeout ) . setConnectionRequestTimeout ( connectionRequestTimeout ) . build ( ) ; final SocketConfig socketConfig = SocketConfig . custom ( ) . setTcpNoDelay ( true ) . setSoTimeout ( timeout ) . build ( ) ; customizeBuilder ( builder ) . setRequestExecutor ( new InstrumentedHttpRequestExecutor ( metricRegistry , metricNameStrategy , name ) ) . setConnectionManager ( manager ) . setDefaultRequestConfig ( requestConfig ) . setDefaultSocketConfig ( socketConfig ) . setConnectionReuseStrategy ( reuseStrategy ) . setRetryHandler ( retryHandler ) . setUserAgent ( createUserAgent ( name ) ) ; if ( keepAlive != 0 ) { // either keep alive based on response header Keep - Alive , // or if the server can keep a persistent connection ( - 1 ) , then override based on client ' s configuration builder . setKeepAliveStrategy ( new DefaultConnectionKeepAliveStrategy ( ) { @ Override public long getKeepAliveDuration ( HttpResponse response , HttpContext context ) { final long duration = super . getKeepAliveDuration ( response , context ) ; return ( duration == - 1 ) ? keepAlive : duration ; } } ) ; } // create a tunnel through a proxy host if it ' s specified in the config final ProxyConfiguration proxy = configuration . getProxyConfiguration ( ) ; if ( proxy != null ) { final HttpHost httpHost = new HttpHost ( proxy . getHost ( ) , proxy . getPort ( ) , proxy . getScheme ( ) ) ; builder . setRoutePlanner ( new NonProxyListProxyRoutePlanner ( httpHost , proxy . getNonProxyHosts ( ) ) ) ; // if the proxy host requires authentication then add the host credentials to the credentials provider final AuthConfiguration auth = proxy . getAuth ( ) ; if ( auth != null ) { if ( credentialsProvider == null ) { credentialsProvider = new BasicCredentialsProvider ( ) ; } // set the AuthScope AuthScope authScope = new AuthScope ( httpHost , auth . getRealm ( ) , auth . getAuthScheme ( ) ) ; // set the credentials type Credentials credentials = configureCredentials ( auth ) ; credentialsProvider . setCredentials ( authScope , credentials ) ; } } if ( credentialsProvider != null ) { builder . setDefaultCredentialsProvider ( credentialsProvider ) ; } if ( routePlanner != null ) { builder . setRoutePlanner ( routePlanner ) ; } if ( disableContentCompression ) { builder . disableContentCompression ( ) ; } if ( redirectStrategy != null ) { builder . setRedirectStrategy ( redirectStrategy ) ; } if ( defaultHeaders != null ) { builder . setDefaultHeaders ( defaultHeaders ) ; } if ( verifier != null ) { builder . setSSLHostnameVerifier ( verifier ) ; } if ( httpProcessor != null ) { builder . setHttpProcessor ( httpProcessor ) ; } if ( serviceUnavailableRetryStrategy != null ) { builder . setServiceUnavailableRetryStrategy ( serviceUnavailableRetryStrategy ) ; } return new ConfiguredCloseableHttpClient ( builder . build ( ) , requestConfig ) ;
public class dnspolicy64 { /** * Use this API to fetch filtered set of dnspolicy64 resources . * set the filter parameter values in filtervalue object . */ public static dnspolicy64 [ ] get_filtered ( nitro_service service , filtervalue [ ] filter ) throws Exception { } }
dnspolicy64 obj = new dnspolicy64 ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; dnspolicy64 [ ] response = ( dnspolicy64 [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class CommonOps_DDF3 { /** * < p > Performs the following operation : < br > * < br > * c = a - b < br > * c < sub > i < / sub > = a < sub > i < / sub > - b < sub > i < / sub > < br > * Vector C can be the same instance as Vector A and / or B . * @ param a A Vector . Not modified . * @ param b A Vector . Not modified . * @ param c A Vector where the results are stored . Modified . */ public static void subtract ( DMatrix3 a , DMatrix3 b , DMatrix3 c ) { } }
c . a1 = a . a1 - b . a1 ; c . a2 = a . a2 - b . a2 ; c . a3 = a . a3 - b . a3 ;
public class DatePicker { /** * This function return the current date of a date picker in the format of " MM / dd / yyyy " * @ return the current date */ public String getDate ( ) { } }
SimpleDateFormat formatter = new SimpleDateFormat ( "MM/dd/yyyy" ) ; String date = formatter . format ( calendar . getTime ( ) ) ; return date ;
public class PrintStreamOutput { /** * Prints text to output stream , replacing parameter start and end * placeholders * @ param text the String to print */ protected void print ( String text ) { } }
String tableStart = format ( PARAMETER_TABLE_START , PARAMETER_TABLE_START ) ; String tableEnd = format ( PARAMETER_TABLE_END , PARAMETER_TABLE_END ) ; boolean containsTable = text . contains ( tableStart ) && text . contains ( tableEnd ) ; String textToPrint = containsTable ? transformPrintingTable ( text , tableStart , tableEnd ) : text ; print ( output , textToPrint . replace ( format ( PARAMETER_VALUE_START , PARAMETER_VALUE_START ) , format ( "parameterValueStart" , EMPTY ) ) . replace ( format ( PARAMETER_VALUE_END , PARAMETER_VALUE_END ) , format ( "parameterValueEnd" , EMPTY ) ) . replace ( format ( PARAMETER_VALUE_NEWLINE , PARAMETER_VALUE_NEWLINE ) , format ( "parameterValueNewline" , NL ) ) ) ;
public class CreateSyntheticOverheadViewS { /** * Computes overhead view of input image . All pixels in input image are assumed to be on the ground plane . * @ param input ( Input ) Camera image . * @ param output ( Output ) Image containing overhead view . */ public void process ( T input , T output ) { } }
this . output = FactoryGImageGray . wrap ( output , this . output ) ; interp . setImage ( input ) ; int indexMap = 0 ; for ( int i = 0 ; i < output . height ; i ++ ) { int indexOut = output . startIndex + i * output . stride ; for ( int j = 0 ; j < output . width ; j ++ , indexOut ++ , indexMap ++ ) { Point2D_F32 p = mapPixels [ indexMap ] ; if ( p != null ) { this . output . set ( indexOut , interp . get ( p . x , p . y ) ) ; } } }
public class Util { /** * Computes the raw delta area between two quantile sketches for the * { @ link # kolmogorovSmirnovTest ( DoubleSketch , DoubleSketch , double ) * Kolmogorov - Smirnov Test } * method . * @ param sketch1 Input DoubleSketch 1 * @ param sketch2 Input DoubleSketch 2 * @ return the raw delta area between two quantile sketches */ public static double computeKSDelta ( final DoublesSketch sketch1 , final DoublesSketch sketch2 ) { } }
final DoublesAuxiliary p = new DoublesAuxiliary ( sketch1 ) ; final DoublesAuxiliary q = new DoublesAuxiliary ( sketch2 ) ; final double [ ] pSamplesArr = p . auxSamplesArr_ ; final double [ ] qSamplesArr = q . auxSamplesArr_ ; final long [ ] pCumWtsArr = p . auxCumWtsArr_ ; final long [ ] qCumWtsArr = q . auxCumWtsArr_ ; final int pSamplesArrLen = pSamplesArr . length ; final int qSamplesArrLen = qSamplesArr . length ; final double n1 = sketch1 . getN ( ) ; final double n2 = sketch2 . getN ( ) ; // Compute D from the two distributions double deltaArea = 0.0 ; int i = getNextIndex ( pSamplesArr , - 1 ) ; int j = getNextIndex ( qSamplesArr , - 1 ) ; // We ' re done if either array reaches the end while ( ( i < pSamplesArrLen ) && ( j < qSamplesArrLen ) ) { final double pSample = pSamplesArr [ i ] ; final double qSample = qSamplesArr [ j ] ; final long pWt = pCumWtsArr [ i ] ; final long qWt = qCumWtsArr [ j ] ; final double pNormWt = pWt / n1 ; final double qNormWt = qWt / n2 ; final double pMinusQ = Math . abs ( pNormWt - qNormWt ) ; final double curD = deltaArea ; deltaArea = Math . max ( curD , pMinusQ ) ; // Increment i or j or both if ( pSample == qSample ) { i = getNextIndex ( pSamplesArr , i ) ; j = getNextIndex ( qSamplesArr , j ) ; } else if ( pSample < qSample ) { i = getNextIndex ( pSamplesArr , i ) ; } else { j = getNextIndex ( qSamplesArr , j ) ; } } // This is D , the delta difference in area of the two distributions deltaArea = Math . max ( deltaArea , Math . abs ( ( pCumWtsArr [ i ] / n1 ) - ( qCumWtsArr [ j ] / n2 ) ) ) ; return deltaArea ;
public class CreateDeploymentConfigRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateDeploymentConfigRequest createDeploymentConfigRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createDeploymentConfigRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createDeploymentConfigRequest . getDeploymentConfigName ( ) , DEPLOYMENTCONFIGNAME_BINDING ) ; protocolMarshaller . marshall ( createDeploymentConfigRequest . getMinimumHealthyHosts ( ) , MINIMUMHEALTHYHOSTS_BINDING ) ; protocolMarshaller . marshall ( createDeploymentConfigRequest . getTrafficRoutingConfig ( ) , TRAFFICROUTINGCONFIG_BINDING ) ; protocolMarshaller . marshall ( createDeploymentConfigRequest . getComputePlatform ( ) , COMPUTEPLATFORM_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Engine { /** * Gets the output variable of the given name after iterating the output * variables . The cost of this method is O ( n ) , where n is the number of output * variables in the engine . For performance , please get the variables by * index . * @ param name is the name of the output variable * @ return output variable of the given name * @ throws RuntimeException if there is no variable with the given name */ public OutputVariable getOutputVariable ( String name ) { } }
for ( OutputVariable outputVariable : this . outputVariables ) { if ( outputVariable . getName ( ) . equals ( name ) ) { return outputVariable ; } } throw new RuntimeException ( String . format ( "[engine error] no output variable by name <%s>" , name ) ) ;
public class MySqlDdlParser { /** * { @ inheritDoc } * @ see org . modeshape . sequencer . ddl . StandardDdlParser # parseCreateStatement ( org . modeshape . sequencer . ddl . DdlTokenStream , * org . modeshape . sequencer . ddl . node . AstNode ) */ @ Override protected AstNode parseCreateStatement ( DdlTokenStream tokens , AstNode parentNode ) throws ParsingException { } }
assert tokens != null ; assert parentNode != null ; if ( tokens . matches ( STMT_CREATE_INDEX ) ) { return parseStatement ( tokens , MySqlStatementStartPhrases . STMT_CREATE_INDEX , parentNode , TYPE_CREATE_INDEX_STATEMENT ) ; } else if ( tokens . matches ( STMT_CREATE_UNIQUE_INDEX ) ) { return parseStatement ( tokens , MySqlStatementStartPhrases . STMT_CREATE_UNIQUE_INDEX , parentNode , TYPE_CREATE_INDEX_STATEMENT ) ; } else if ( tokens . matches ( STMT_CREATE_FUNCTION ) ) { return parseStatement ( tokens , MySqlStatementStartPhrases . STMT_CREATE_FUNCTION , parentNode , TYPE_CREATE_FUNCTION_STATEMENT ) ; } else if ( tokens . matches ( STMT_CREATE_PROCEDURE ) ) { return parseStatement ( tokens , MySqlStatementStartPhrases . STMT_CREATE_PROCEDURE , parentNode , TYPE_CREATE_PROCEDURE_STATEMENT ) ; } else if ( tokens . matches ( STMT_CREATE_SERVER ) ) { return parseStatement ( tokens , MySqlStatementStartPhrases . STMT_CREATE_SERVER , parentNode , TYPE_CREATE_SERVER_STATEMENT ) ; } else if ( tokens . matches ( STMT_CREATE_TRIGGER ) ) { return parseStatement ( tokens , MySqlStatementStartPhrases . STMT_CREATE_TRIGGER , parentNode , TYPE_CREATE_TRIGGER_STATEMENT ) ; } else if ( tokens . matches ( STMT_CREATE_EVENT ) ) { return parseStatement ( tokens , MySqlStatementStartPhrases . STMT_CREATE_EVENT , parentNode , TYPE_CREATE_EVENT_STATEMENT ) ; } else if ( tokens . matches ( STMT_CREATE_TABLESPACE ) ) { return parseStatement ( tokens , MySqlStatementStartPhrases . STMT_CREATE_TABLESPACE , parentNode , TYPE_CREATE_TABLESPACE_STATEMENT ) ; } else if ( tokens . matches ( STMT_CREATE_DEFINER ) ) { return parseStatement ( tokens , MySqlStatementStartPhrases . STMT_CREATE_DEFINER , parentNode , TYPE_CREATE_DEFINER_STATEMENT ) ; } return super . parseCreateStatement ( tokens , parentNode ) ;
public class OffheapIncrementalIndex { /** * NOTE : This is NOT thread - safe with add . . . so make sure all the adding is DONE before closing */ @ Override public void close ( ) { } }
super . close ( ) ; facts . clear ( ) ; indexAndOffsets . clear ( ) ; if ( selectors != null ) { selectors . clear ( ) ; } Closer c = Closer . create ( ) ; aggBuffers . forEach ( c :: register ) ; try { c . close ( ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } aggBuffers . clear ( ) ;
public class GracefulShutdownService { /** * Register a shutdown hook with the service . * @ param shutdownHook a class that implements { @ link GracefulShutdownHook } * @ throws IllegalStateException if the server shutdown is already in progress and the hook cannot be registered * @ throws NullPointerException if the shutdown hook argument is null */ public void register ( GracefulShutdownHook shutdownHook ) { } }
if ( isShuttingDown . get ( ) ) { // Avoid any changes to the shutdown hooks set when the shutdown is already in progress throw new IllegalStateException ( "Couldn't register shutdown hook because shutdown is already in progress" ) ; } shutdownHooks . add ( requireNonNull ( shutdownHook , "shutdownHook cannot be null" ) ) ;
public class OkCoinTradeServiceRaw { /** * 批量下单 * @ param symbol * @ param type 限价单 ( buy / sell ) * @ param ordersData " [ { price : 3 , amount : 5 , type : ' sell ' } , { price : 3 , amount : 3 , type : ' buy ' } ] " * 最终买卖类型由orders _ data 中type 为准 , 如orders _ data不设定type 则由上面type设置为准 。 若 , 上面type没有设置 , orderData * 必须设置type * @ return * @ throws IOException */ public OkCoinMoreTradeResult batchTrade ( String symbol , String type , String ordersData ) throws IOException { } }
OkCoinMoreTradeResult tradeResult = okCoin . batchTrade ( apikey , symbol , type , ordersData , signatureCreator ( ) ) ; return returnOrThrow ( tradeResult ) ;
public class SnowflakeResultChunk { /** * Checks that all data has been added after parsing . * @ throws SnowflakeSQLException when rows are not all downloaded */ public final void ensureRowsComplete ( ) throws SnowflakeSQLException { } }
// Check that all the rows have been decoded , raise an error if not if ( rowCount != currentRow ) { throw new SnowflakeSQLException ( SqlState . INTERNAL_ERROR , ErrorCode . INTERNAL_ERROR . getMessageCode ( ) , "Exception: expected " + rowCount + " rows and received " + currentRow ) ; }
public class DSClient { /** * Iterate and return . * @ param rSet * the r set * @ return the list */ private List iterateAndReturn ( ResultSet rSet ) { } }
Iterator < Row > rowIter = rSet . iterator ( ) ; List results = new ArrayList ( ) ; while ( rowIter . hasNext ( ) ) { Row row = rowIter . next ( ) ; ColumnDefinitions columnDefs = row . getColumnDefinitions ( ) ; Iterator < Definition > columnDefIter = columnDefs . iterator ( ) ; Map rowData = new HashMap ( ) ; while ( columnDefIter . hasNext ( ) ) { Definition columnDef = columnDefIter . next ( ) ; rowData . put ( columnDef . getName ( ) , DSClientUtilities . assign ( row , null , null , columnDef . getType ( ) . getName ( ) , null , columnDef . getName ( ) , null , null ) ) ; } results . add ( rowData ) ; } return results ;
public class HostMessenger { /** * Synchronization protects m _ knownFailedHosts and ensures that every failed host is only reported * once */ @ Override public synchronized void reportForeignHostFailed ( int hostId ) { } }
long initiatorSiteId = CoreUtils . getHSIdFromHostAndSite ( hostId , AGREEMENT_SITE_ID ) ; m_agreementSite . reportFault ( initiatorSiteId ) ; if ( ! m_shuttingDown ) { // should be the single console message a user sees when another node fails networkLog . warn ( String . format ( "Host %d failed. Cluster remains operational." , hostId ) ) ; }
public class StreamRemoteConnector { /** * Register for connection change events , when there is a change in connectivity status * on the underlying transport . * @ param listener the listener */ @ Override public void registerConnectionChangeListener ( ConnectionChangeListener listener ) { } }
connectionListeners . add ( listener ) ; executor . execute ( ( ) -> listener . connectionChange ( this , isConnected ( ) ) ) ;
public class CmsPathMap { /** * Converts a path into list form . < p > * @ param path the path to convert * @ return the list of the path elements */ private List < String > splitPath ( String path ) { } }
List < String > result = new ArrayList < String > ( ) ; for ( String token : path . split ( "/" ) ) { if ( "" . equals ( token ) ) { continue ; } result . add ( token ) ; } return result ;
public class JvmGenericArrayTypeReferenceImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case TypesPackage . JVM_GENERIC_ARRAY_TYPE_REFERENCE__COMPONENT_TYPE : setComponentType ( ( JvmTypeReference ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class DecimalFormat { /** * Matches a string at text [ pos ] and return the index of the next character upon * success . Return - 1 on failure . Match a run of white space in str with a run of * white space in text . */ static final int match ( String text , int pos , String str ) { } }
for ( int i = 0 ; i < str . length ( ) && pos >= 0 ; ) { int ch = UTF16 . charAt ( str , i ) ; i += UTF16 . getCharCount ( ch ) ; if ( isBidiMark ( ch ) ) { continue ; } pos = match ( text , pos , ch ) ; if ( PatternProps . isWhiteSpace ( ch ) ) { i = skipPatternWhiteSpace ( str , i ) ; } } return pos ;
public class EjbDescriptors { /** * Adds an EJB descriptor to the maps * @ param ejbDescriptor The EJB descriptor to add */ private < T > void add ( EjbDescriptor < T > ejbDescriptor ) { } }
InternalEjbDescriptor < T > internalEjbDescriptor = InternalEjbDescriptor . of ( ejbDescriptor ) ; ejbByName . put ( ejbDescriptor . getEjbName ( ) , internalEjbDescriptor ) ; ejbByClass . put ( ejbDescriptor . getBeanClass ( ) , internalEjbDescriptor . getEjbName ( ) ) ;
public class CodeAttr { /** * Returns the length ( in bytes ) of this object in the class file . */ public int getLength ( ) { } }
int length = 12 ; if ( mCodeBuffer != null ) { length += mCodeBuffer . getByteCodes ( ) . length ; ExceptionHandler [ ] handlers = mCodeBuffer . getExceptionHandlers ( ) ; if ( handlers != null ) { length += 8 * handlers . length ; } } int size = mAttributes . size ( ) ; for ( int i = 0 ; i < size ; i ++ ) { length += mAttributes . get ( i ) . getLength ( ) ; length += 6 ; // attributes have an intial 6 byte length } return length ;
public class AsynchronousRequest { /** * For more info on TokenInfo API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / tokeninfo " > here < / a > < br / > * Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions * @ param API API key * @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) } * @ throws GuildWars2Exception invalid API key * @ throws NullPointerException if given { @ link Callback } is null * @ see TokenInfo API info */ public void getAPIInfo ( String API , Callback < TokenInfo > callback ) throws GuildWars2Exception , NullPointerException { } }
isParamValid ( new ParamChecker ( ParamType . API , API ) ) ; gw2API . getAPIInfo ( API ) . enqueue ( callback ) ;
public class Country { /** * Compares two ISOCountry objects for ordering . * Returns the value < code > zero < / code > if the two countries have equal * country codes . Returns a value less than < code > zero < / code > if this * country code is before the country argument and a value greater than * < code > zero < / code > if this country is after the country argument . Will * result in alphabetical sorting . */ public int compareTo ( Country country ) { } }
String thisCode = this . getCode ( ) ; String otherCode = country . getCode ( ) ; return thisCode . compareTo ( otherCode ) ;
public class ForkJoinPool { /** * Runs tasks until { @ code isQuiescent ( ) } . We piggyback on * active count ctl maintenance , but rather than blocking * when tasks cannot be found , we rescan until all others cannot * find tasks either . */ final void helpQuiescePool ( WorkQueue w ) { } }
ForkJoinTask < ? > ps = w . currentSteal ; // save context int wc = w . config ; for ( boolean active = true ; ; ) { long c ; WorkQueue q ; ForkJoinTask < ? > t ; if ( wc >= 0 && ( t = w . pop ( ) ) != null ) { // run locals if LIFO ( w . currentSteal = t ) . doExec ( ) ; w . currentSteal = ps ; } else if ( ( q = findNonEmptyStealQueue ( ) ) != null ) { if ( ! active ) { // re - establish active count active = true ; U . getAndAddLong ( this , CTL , AC_UNIT ) ; } if ( ( t = q . pollAt ( q . base ) ) != null ) { ( w . currentSteal = t ) . doExec ( ) ; w . currentSteal = ps ; if ( ++ w . nsteals < 0 ) w . transferStealCount ( this ) ; } } else if ( active ) { // decrement active count without queuing long nc = ( AC_MASK & ( ( c = ctl ) - AC_UNIT ) ) | ( ~ AC_MASK & c ) ; if ( U . compareAndSwapLong ( this , CTL , c , nc ) ) active = false ; } else if ( ( int ) ( ( c = ctl ) >> AC_SHIFT ) + ( config & SMASK ) <= 0 && U . compareAndSwapLong ( this , CTL , c , c + AC_UNIT ) ) break ; }
public class FrameOutputWriter { /** * Add the FRAME tag for the frame that lists all packages . * @ param contentTree the content tree to which the information will be added */ private void addAllPackagesFrameTag ( Content contentTree ) { } }
HtmlTree frame = HtmlTree . FRAME ( DocPaths . OVERVIEW_FRAME . getPath ( ) , "packageListFrame" , configuration . getText ( "doclet.All_Packages" ) ) ; contentTree . addContent ( frame ) ;
public class EJSDeployedSupport { /** * Returns the context data associated with this method invocation . */ public Map < String , Object > getContextData ( ) { } }
if ( ivContextData == null ) { ivContextData = new HashMap < String , Object > ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "getContextData: created empty" ) ; } return ivContextData ;
public class SObject { /** * Load an sobject from classpath by given url path * This method will call { @ link Class # getResource ( String ) } method to open * an inputstream to the resource and then construct an SObject with the * inputstream * @ param url the resource url path * @ return the sobject instance if loaded successfully or ` null ` if cannot load resource from the url */ public static SObject loadResource ( String url ) { } }
InputStream is = SObject . class . getResourceAsStream ( url ) ; if ( null == is ) { return null ; } String filename = S . afterLast ( url , "/" ) ; if ( S . blank ( filename ) ) { filename = url ; } return of ( randomKey ( ) , is , ATTR_FILE_NAME , filename ) ;
public class TypedControllerConfiguration { /** * / * - - - - - GET ROUTES - - - - - */ protected final < REQ , RESP > Route get ( RouteAction < Request < REQ > , Response < RESP > > action , RequestType < REQ > req , ResponseType < RESP > resp ) { } }
return createEntityRoute ( HttpMethod . GET , "" , action , req , resp ) ;
public class UniqueId { /** * Attempts to find suggestions of names given a search term . * @ param search The search term ( possibly empty ) . * @ return A list of known valid names that have UIDs that sort of match * the search term . If the search term is empty , returns the first few * terms . * @ throws HBaseException if there was a problem getting suggestions from * HBase . * @ since 1.1 */ public Deferred < List < String > > suggestAsync ( final String search , final int max_results ) { } }
return new SuggestCB ( search , max_results ) . search ( ) ;
public class Mediawiki { /** * show a usage */ public void usage ( String msg ) { } }
System . err . println ( msg ) ; showVersion ( ) ; System . err . println ( " usage: java com.bitplan.mediawiki.japi.Mediawiki" ) ; parser . printUsage ( System . err ) ; exitCode = 1 ;
public class AESUtils { /** * Decrypt data using AES . * @ param keyData * @ param iv * initial vector . If { @ code null } or empty , { @ link # DEFAULT _ IV } * will be used . * @ param cipherTransformation * cipher - transformation to use . If empty , { @ link # DEFAULT _ CIPHER _ TRANSFORMATION } * will be used . * @ param encryptedData * encrypted data will be read from this input stream . This method will not close the * input stream ! * @ param output * output data will be written to this output stream . This method will not close the * output stream ! * @ throws NoSuchAlgorithmException * @ throws NoSuchPaddingException * @ throws InvalidKeyException * @ throws IllegalBlockSizeException * @ throws BadPaddingException * @ throws InvalidAlgorithmParameterException * @ throws IOException * @ since 0.9.2 */ public static void decrypt ( byte [ ] keyData , byte [ ] iv , String cipherTransformation , InputStream encryptedData , OutputStream output ) throws NoSuchAlgorithmException , NoSuchPaddingException , InvalidKeyException , IllegalBlockSizeException , BadPaddingException , InvalidAlgorithmParameterException , IOException { } }
Cipher cipher = createCipher ( Cipher . DECRYPT_MODE , keyData , iv , cipherTransformation ) ; try ( DdthCipherInputStream cis = new DdthCipherInputStream ( encryptedData , cipher , false ) ) { IOUtils . copy ( cis , output , 1024 ) ; output . flush ( ) ; }
public class DateTime { /** * Parse the timezone offset from the input , returning its millisecond value . */ private static Integer parseTzOffsetMs ( Input s , boolean strict ) { } }
if ( s . index < s . len ) { char c = s . getChar ( ) ; s . index ++ ; int sign ; if ( c == 'Z' ) { return 0 ; } else if ( c == '+' ) { sign = 1 ; } else if ( c == '-' ) { sign = - 1 ; } else { throw new DateFormatException ( "unexpected character, expected one of [Z+-]" , s . str , s . index - 1 ) ; } int tzHours = parseField ( "timezone hours" , s , TIME_SEP , 2 , 2 , strict ) ; if ( strict && tzHours > 14 ) throw new DateFormatException ( "timezone offset hours out of range [0..14]" , s . str , s . index - 2 ) ; int tzMin = parseField ( "timezone minutes" , s , null , 2 , 2 , strict ) ; if ( strict && tzMin > 59 ) throw new DateFormatException ( "timezone offset hours out of range [0..59]" , s . str , s . index - 1 ) ; if ( strict && tzHours == 14 && tzMin > 0 ) throw new DateFormatException ( "timezone offset may not be greater than 14 hours" , s . str , s . index - 1 ) ; return sign * ( tzHours * MINS_IN_HOUR + tzMin ) * MS_IN_MIN ; } // Reached the end of input with no timezone specified . return null ;
public class AbstractProject { /** * List of necessary resources to perform the build of this project . */ public ResourceList getResourceList ( ) { } }
final Set < ResourceActivity > resourceActivities = getResourceActivities ( ) ; final List < ResourceList > resourceLists = new ArrayList < ResourceList > ( 1 + resourceActivities . size ( ) ) ; for ( ResourceActivity activity : resourceActivities ) { if ( activity != this && activity != null ) { // defensive infinite recursion and null check resourceLists . add ( activity . getResourceList ( ) ) ; } } return ResourceList . union ( resourceLists ) ;
public class CreateKeyRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateKeyRequest createKeyRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createKeyRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createKeyRequest . getPolicy ( ) , POLICY_BINDING ) ; protocolMarshaller . marshall ( createKeyRequest . getDescription ( ) , DESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( createKeyRequest . getKeyUsage ( ) , KEYUSAGE_BINDING ) ; protocolMarshaller . marshall ( createKeyRequest . getOrigin ( ) , ORIGIN_BINDING ) ; protocolMarshaller . marshall ( createKeyRequest . getCustomKeyStoreId ( ) , CUSTOMKEYSTOREID_BINDING ) ; protocolMarshaller . marshall ( createKeyRequest . getBypassPolicyLockoutSafetyCheck ( ) , BYPASSPOLICYLOCKOUTSAFETYCHECK_BINDING ) ; protocolMarshaller . marshall ( createKeyRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SecurityActions { /** * Load a class using the provided classloader * @ param name * @ return * @ throws PrivilegedActionException */ static Class < ? > loadClass ( final ClassLoader cl , final String name ) throws PrivilegedActionException , ClassNotFoundException { } }
SecurityManager sm = System . getSecurityManager ( ) ; if ( sm == null ) { return cl . loadClass ( name ) ; } else { return AccessController . doPrivileged ( new PrivilegedExceptionAction < Class < ? > > ( ) { public Class < ? > run ( ) throws PrivilegedActionException { try { return cl . loadClass ( name ) ; } catch ( Exception e ) { throw new PrivilegedActionException ( e ) ; } } } ) ; }
public class WebACFilter { /** * Add URIs to collect permissions information for . * @ param httpRequest the request . * @ param uri the uri to check . */ private void addURIToAuthorize ( final HttpServletRequest httpRequest , final URI uri ) { } }
@ SuppressWarnings ( "unchecked" ) Set < URI > targetURIs = ( Set < URI > ) httpRequest . getAttribute ( URIS_TO_AUTHORIZE ) ; if ( targetURIs == null ) { targetURIs = new HashSet < > ( ) ; httpRequest . setAttribute ( URIS_TO_AUTHORIZE , targetURIs ) ; } targetURIs . add ( uri ) ;
public class Fn { /** * Split this stream at where { @ code maxWindowSize } or { @ code maxDuration } reaches first . * @ param maxWindowSize * @ param maxDuration * @ param startTime * @ param collectionSupplier * @ return * @ see # window ( Duration , long , LongSupplier , Supplier ) */ public static < T , C extends Collection < Timed < T > > > Function < Stream < Timed < T > > , Stream < C > > window ( final int maxWindowSize , final Duration maxDuration , final LongSupplier startTime , final Supplier < ? extends C > collectionSupplier ) { } }
return new Function < Stream < Timed < T > > , Stream < C > > ( ) { @ Override public Stream < C > apply ( final Stream < Timed < T > > s ) { final ObjIterator < C > iter = new ObjIteratorEx < C > ( ) { private long maxDurationInMillis ; private ObjIterator < Timed < T > > iter ; private Timed < T > next = null ; private long fromTime ; private long endTime ; private boolean initialized = false ; @ Override public boolean hasNext ( ) { if ( initialized == false ) { init ( ) ; } while ( ( next == null || next . timestamp ( ) < endTime ) && iter . hasNext ( ) ) { next = iter . next ( ) ; } return next != null && next . timestamp ( ) >= endTime ; } @ Override public C next ( ) { if ( hasNext ( ) == false ) { throw new NoSuchElementException ( ) ; } fromTime = endTime ; endTime = fromTime + maxDurationInMillis ; int cnt = 0 ; final C result = collectionSupplier . get ( ) ; if ( next != null && next . timestamp ( ) < endTime ) { result . add ( next ) ; next = null ; cnt ++ ; } if ( next == null ) { while ( cnt < maxWindowSize && iter . hasNext ( ) ) { next = iter . next ( ) ; if ( next . timestamp ( ) < endTime ) { result . add ( next ) ; next = null ; cnt ++ ; } else { break ; } } } endTime = N . min ( endTime , next == null ? System . currentTimeMillis ( ) : next . timestamp ( ) ) ; return result ; } private void init ( ) { if ( initialized == false ) { initialized = true ; N . checkArgNotNull ( maxDuration , "maxDuration" ) ; N . checkArgPositive ( maxDuration . toMillis ( ) , "maxDuration" ) ; N . checkArgPositive ( maxWindowSize , "maxWindowSize" ) ; N . checkArgNotNull ( startTime , "startTime" ) ; N . checkArgNotNull ( collectionSupplier , "collectionSupplier" ) ; iter = s . iterator ( ) ; maxDurationInMillis = maxDuration . toMillis ( ) ; fromTime = startTime . getAsLong ( ) - maxDurationInMillis ; endTime = fromTime + maxDurationInMillis ; } } } ; return Stream . of ( iter ) . onClose ( new Runnable ( ) { @ Override public void run ( ) { s . close ( ) ; } } ) ; } } ;
public class WriterConfig { /** * Public API */ @ Override public Object getProperty ( int id ) { } }
switch ( id ) { // First , Stax 1.0 properties : case PROP_AUTOMATIC_NS : return automaticNamespacesEnabled ( ) ? Boolean . TRUE : Boolean . FALSE ; // Then Stax2 properties : // First , properties common to input / output factories : case PROP_ENABLE_NS : return willSupportNamespaces ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_PROBLEM_REPORTER : return getProblemReporter ( ) ; // Then output - specific properties : case PROP_AUTOMATIC_EMPTY_ELEMENTS : return automaticEmptyElementsEnabled ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_AUTO_CLOSE_OUTPUT : return willAutoCloseOutput ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_AUTOMATIC_NS_PREFIX : return getAutomaticNsPrefix ( ) ; case PROP_TEXT_ESCAPER : return getTextEscaperFactory ( ) ; case PROP_ATTR_VALUE_ESCAPER : return getAttrValueEscaperFactory ( ) ; // / / / / Then Woodstox - specific properties : case PROP_USE_DOUBLE_QUOTES_IN_XML_DECL : return willUseDoubleQuotesInXmlDecl ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_OUTPUT_CDATA_AS_TEXT : return willOutputCDataAsText ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_COPY_DEFAULT_ATTRS : return willCopyDefaultAttrs ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_ESCAPE_CR : return willEscapeCr ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_ADD_SPACE_AFTER_EMPTY_ELEM : return willAddSpaceAfterEmptyElem ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_AUTOMATIC_END_ELEMENTS : return automaticEndElementsEnabled ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_VALIDATE_STRUCTURE : return willValidateStructure ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_VALIDATE_CONTENT : return willValidateContent ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_VALIDATE_ATTR : return willValidateAttributes ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_VALIDATE_NAMES : return willValidateNames ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_FIX_CONTENT : return willFixContent ( ) ? Boolean . TRUE : Boolean . FALSE ; case PROP_OUTPUT_INVALID_CHAR_HANDLER : return getInvalidCharHandler ( ) ; case PROP_OUTPUT_EMPTY_ELEMENT_HANDLER : return getEmptyElementHandler ( ) ; // And then per - instance properties : not valid via config object case PROP_UNDERLYING_STREAM : case PROP_UNDERLYING_WRITER : throw new IllegalStateException ( "Can not access per-stream-writer properties via factory" ) ; } throw new IllegalStateException ( "Internal error: no handler for property with internal id " + id + "." ) ;
public class ServiceTools { /** * Get instancename from Dataservice Class * @ param cls * @ return */ public String getInstanceNameFromDataservice ( Class cls ) { } }
DataService dataService = ( DataService ) cls . getAnnotation ( DataService . class ) ; String clsName = dataService . name ( ) ; if ( clsName . isEmpty ( ) ) { clsName = cls . getSimpleName ( ) ; } return getInstanceName ( clsName ) ;
public class CouchDbUtil { /** * create a HTTP POST request . * @ return { @ link HttpConnection } */ public static HttpConnection createPost ( URI uri , String body , String contentType ) { } }
HttpConnection connection = Http . POST ( uri , "application/json" ) ; if ( body != null ) { setEntity ( connection , body , contentType ) ; } return connection ;
public class FileUtils { /** * Sticky bit can be set primarily on directories in UNIX / Linux . * If the sticky bit of is enabled on a directory , only the owner and the root user can delete / * rename the files or directories within that directory . No one else can delete other users data * in this directory ( Where sticky bit is set ) . * This is a security measure to avoid deletion of folders and their content ( sub - folders and * files ) , though other users have full permissions . * Setting the sticky bit of a file is a no - op . * @ param dir absolute dir path to set the sticky bit */ public static void setLocalDirStickyBit ( String dir ) { } }
try { // Support for sticky bit is platform specific . Check if the path starts with " / " and if so , // assume that the host supports the chmod command . if ( dir . startsWith ( AlluxioURI . SEPARATOR ) ) { // TODO ( peis ) : This is very slow . Consider removing this . Runtime . getRuntime ( ) . exec ( "chmod +t " + dir ) ; } } catch ( IOException e ) { LOG . info ( "Can not set the sticky bit of the directory: {}" , dir , e ) ; }
public class Reflection { /** * Get the parameter name of a ( { @ linkplain Serializable serializable } ) lambda with a single parameter . * Getting the parameter requires the source to be compiled with the { @ code - parameters } flag passed to { @ code * javac } and JDK { @ code 1.8.0_60 } or newer . * @ param lambda the ( { @ linkplain Serializable serializable } ) lambda to get the parameter name from . * @ return the name of the sole parameter of the lambda . */ public static String lambdaParameterName ( Serializable lambda ) { } }
SerializedLambda serialized = serializedLambda ( lambda ) ; Parameter [ ] parameters = lambdaMethod ( serialized ) . getParameters ( ) ; int bound ; switch ( serialized . getImplMethodKind ( ) ) { case REF_invokeStatic : bound = serialized . getCapturedArgCount ( ) ; break ; case REF_invokeSpecial : bound = serialized . getCapturedArgCount ( ) - 1 ; break ; default : throw new IllegalArgumentException ( "Unsupported method kind: " + serialized . getImplMethodKind ( ) ) ; } if ( parameters == null || ( parameters . length - bound ) != 1 ) { throw new IllegalArgumentException ( "Must have exactly one parameter, not " + ( parameters == null ? 0 : parameters . length ) + "; " + Arrays . toString ( parameters ) + ", bound: " + bound ) ; } Parameter parameter = parameters [ bound ] ; if ( ! parameter . isNamePresent ( ) ) { throw new IllegalStateException ( "No parameter name present, compile with '-parameters', and use JDK 1.8.0_60 or newer. " + "Your JDK version is " + System . getProperty ( "java.version" ) ) ; } return parameter . getName ( ) ;
public class JsonArray { /** * Adds the specified element to self . * @ param element the element that needs to be added to the array . */ public void add ( JsonElement element ) { } }
if ( element == null ) { element = JsonNull . INSTANCE ; } elements . add ( element ) ;
public class RelatedArticleList { /** * getter for relatedArticles - gets * @ generated * @ return value of the feature */ public FSArray getRelatedArticles ( ) { } }
if ( RelatedArticleList_Type . featOkTst && ( ( RelatedArticleList_Type ) jcasType ) . casFeat_relatedArticles == null ) jcasType . jcas . throwFeatMissing ( "relatedArticles" , "de.julielab.jules.types.RelatedArticleList" ) ; return ( FSArray ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( RelatedArticleList_Type ) jcasType ) . casFeatCode_relatedArticles ) ) ) ;
public class KrakenImpl { /** * Query implementation for multiple result with the parsed query . */ private void findStream ( QueryKraken query , Object [ ] args , ResultStream < Cursor > result ) { } }
try { TableKraken table = query . table ( ) ; TableKelp tableKelp = table . getTableKelp ( ) ; TablePod tablePod = table . getTablePod ( ) ; if ( query . isStaticNode ( ) ) { RowCursor cursor = tableKelp . cursor ( ) ; query . fillKey ( cursor , args ) ; int hash = query . calculateHash ( cursor ) ; // ShardPod node = tablePod . getPodNode ( hash ) ; if ( tablePod . getNode ( hash ) . isSelfCopy ( ) || true ) { query . findStream ( result , args ) ; return ; } else { // tablePod . get ( cursor . getKey ( ) , new FindGetResult ( result , query , args ) ) ; result . ok ( ) ; return ; } } query . findStream ( result , args ) ; } catch ( Exception e ) { result . fail ( e ) ; }
public class RenderAny { /** * TODO : Allow plugin to support rendering pdf , xls or other binary types */ public void apply ( ActionContext context ) { } }
Boolean hasTemplate = context . hasTemplate ( ) ; if ( null != hasTemplate && hasTemplate ) { RenderTemplate . get ( context . successStatus ( ) ) . apply ( context ) ; return ; } H . Format fmt = context . accept ( ) ; if ( fmt == UNKNOWN ) { H . Request req = context . req ( ) ; if ( req . userAgent ( ) . isIE9Down ( ) ) { fmt = HTML ; } else { H . Method method = req . method ( ) ; String methodInfo = S . concat ( method . name ( ) , " method to " ) ; String acceptHeader = req . header ( H . Header . Names . ACCEPT ) ; throw E . unsupport ( S . concat ( "Unknown accept content type(" , acceptHeader , "): " , methodInfo , req . url ( ) ) ) ; } } Result result = null ; if ( JSON == fmt ) { List < String > varNames = context . __appRenderArgNames ( ) ; Map < String , Object > map = new HashMap < > ( context . renderArgs ( ) ) ; if ( null != varNames && ! varNames . isEmpty ( ) ) { for ( String name : varNames ) { map . put ( name , context . renderArg ( name ) ) ; } } result = new RenderJSON ( map ) ; } else if ( XML == fmt ) { List < String > varNames = context . __appRenderArgNames ( ) ; Map < String , Object > map = new HashMap < > ( ) ; if ( null != varNames && ! varNames . isEmpty ( ) ) { for ( String name : varNames ) { map . put ( name , context . renderArg ( name ) ) ; } } result = new FilteredRenderXML ( map , null , context ) ; } else if ( HTML == fmt || TXT == fmt || CSV == fmt ) { if ( ! ignoreMissingTemplate ) { throw E . unsupport ( "Template[%s] not found" , context . templatePath ( ) ) ; } context . nullValueResultIgnoreRenderArgs ( ) . apply ( context . req ( ) , context . prepareRespForResultEvaluation ( ) ) ; return ; } else if ( PDF == fmt || XLS == fmt || XLSX == fmt || DOC == fmt || DOCX == fmt ) { List < String > varNames = context . __appRenderArgNames ( ) ; if ( null != varNames && ! varNames . isEmpty ( ) ) { Object firstVar = context . renderArg ( varNames . get ( 0 ) ) ; String action = S . str ( context . actionPath ( ) ) . afterLast ( "." ) . toString ( ) ; if ( firstVar instanceof File ) { File file = ( File ) firstVar ; result = new RenderBinary ( file , action ) ; } else if ( firstVar instanceof InputStream ) { InputStream is = ( InputStream ) firstVar ; result = new RenderBinary ( is , action ) ; } else if ( firstVar instanceof ISObject ) { ISObject sobj = ( ISObject ) firstVar ; result = new RenderBinary ( sobj . asInputStream ( ) , action ) ; } if ( null == result ) { throw E . unsupport ( "Unknown render arg type [%s] for binary response" , firstVar . getClass ( ) ) ; } } else { throw E . unexpected ( "No render arg found for binary response" ) ; } } if ( null != result ) { ActResponse < ? > resp = context . prepareRespForResultEvaluation ( ) ; result . status ( context . successStatus ( ) ) . apply ( context . req ( ) , resp ) ; } else { throw E . unexpected ( "Unknown accept content type: %s" , fmt . contentType ( ) ) ; }