signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Container { /** * Initializes { @ code Container } instance . * Sets up built - in { @ link Singleton } , { @ link Application } , { @ link Thread } , * { @ link Session } , { @ link Request } and { @ link Prototype } contexts and makes * { @ code Container } instance available in application . This method is * invoked by { @ link Jaguar } in bootstrap . */ @ SuppressWarnings ( "unchecked" ) public void initialize ( ) { } }
Class < ? extends Context > context = Singleton . class . getAnnotation ( Scope . class ) . value ( ) ; install ( context ) ; contexts . put ( Singleton . class , ( Descriptor < ? extends Context > ) components . get ( context ) . get ( 0 ) ) ; SingletonContext singleton = new SingletonContext ( ) ; singleton . put ( ( Descriptor < SingletonContext > ) contexts . get ( Singleton . class ) , singleton ) ; instances . put ( singleton , contexts . get ( Singleton . class ) ) ; this . singleton = singleton ; install ( Container . class ) ; Descriptor < Container > container = ( Descriptor < Container > ) components . get ( Container . class ) . get ( 0 ) ; singleton . put ( container , this ) ; instances . put ( this , container ) ; context = Application . class . getAnnotation ( Scope . class ) . value ( ) ; install ( context ) ; contexts . put ( Application . class , ( Descriptor < ? extends Context > ) components . get ( context ) . get ( 0 ) ) ; context = Thread . class . getAnnotation ( Scope . class ) . value ( ) ; install ( context ) ; contexts . put ( Thread . class , ( Descriptor < ? extends Context > ) components . get ( context ) . get ( 0 ) ) ; context = Session . class . getAnnotation ( Scope . class ) . value ( ) ; install ( context ) ; contexts . put ( Session . class , ( Descriptor < ? extends Context > ) components . get ( context ) . get ( 0 ) ) ; context = Request . class . getAnnotation ( Scope . class ) . value ( ) ; install ( context ) ; contexts . put ( Request . class , ( Descriptor < ? extends Context > ) components . get ( context ) . get ( 0 ) ) ; context = Prototype . class . getAnnotation ( Scope . class ) . value ( ) ; install ( context ) ; contexts . put ( Prototype . class , ( Descriptor < ? extends Context > ) components . get ( context ) . get ( 0 ) ) ; install ( LazyProvider . class ) ; logger . info ( "Container started" ) ;
public class Jersey2Module { /** * When HK2 management for jersey extensions is enabled by default , then guice bridge must be enabled . * Without it guice beans could not be used in resources and other jersey extensions . If this is * expected then guice support is not needed at all . */ private void checkHkFirstMode ( ) { } }
final boolean guiceyFirstMode = context . option ( JerseyExtensionsManagedByGuice ) ; if ( ! guiceyFirstMode ) { Preconditions . checkState ( context . option ( UseHkBridge ) , "HK2 management for jersey extensions is enabled by default " + "(InstallersOptions.JerseyExtensionsManagedByGuice), but HK2-guice bridge is not " + "enabled. Use GuiceyOptions.UseHkBridge option to enable bridge " + "(extra dependency is required)" ) ; }
public class FlowNode { /** * Gets all surrounding { @ link FlowNode nodes } that < b > DO < / b > flow into this node . * @ return the nodes that flow into this node . */ public List < FlowNode > getEnteringNodes ( ) { } }
if ( enteringNodes == null ) { enteringNodes = new ArrayList < FlowNode > ( ) ; Direction [ ] orderedDirs = Direction . getOrderedDirs ( ) ; for ( Direction direction : orderedDirs ) { switch ( direction ) { case E : if ( eFlow == Direction . E . getEnteringFlow ( ) ) { int newCol = col + direction . col ; int newRow = row + direction . row ; FlowNode node = new FlowNode ( gridIter , cols , rows , newCol , newRow ) ; enteringNodes . add ( node ) ; } break ; case N : if ( nFlow == Direction . N . getEnteringFlow ( ) ) { int newCol = col + direction . col ; int newRow = row + direction . row ; FlowNode node = new FlowNode ( gridIter , cols , rows , newCol , newRow ) ; enteringNodes . add ( node ) ; } break ; case W : if ( wFlow == Direction . W . getEnteringFlow ( ) ) { int newCol = col + direction . col ; int newRow = row + direction . row ; FlowNode node = new FlowNode ( gridIter , cols , rows , newCol , newRow ) ; enteringNodes . add ( node ) ; } break ; case S : if ( sFlow == Direction . S . getEnteringFlow ( ) ) { int newCol = col + direction . col ; int newRow = row + direction . row ; FlowNode node = new FlowNode ( gridIter , cols , rows , newCol , newRow ) ; enteringNodes . add ( node ) ; } break ; case EN : if ( enFlow == Direction . EN . getEnteringFlow ( ) ) { int newCol = col + direction . col ; int newRow = row + direction . row ; FlowNode node = new FlowNode ( gridIter , cols , rows , newCol , newRow ) ; enteringNodes . add ( node ) ; } break ; case NW : if ( nwFlow == Direction . NW . getEnteringFlow ( ) ) { int newCol = col + direction . col ; int newRow = row + direction . row ; FlowNode node = new FlowNode ( gridIter , cols , rows , newCol , newRow ) ; enteringNodes . add ( node ) ; } break ; case WS : if ( wsFlow == Direction . WS . getEnteringFlow ( ) ) { int newCol = col + direction . col ; int newRow = row + direction . row ; FlowNode node = new FlowNode ( gridIter , cols , rows , newCol , newRow ) ; enteringNodes . add ( node ) ; } break ; case SE : if ( seFlow == Direction . SE . getEnteringFlow ( ) ) { int newCol = col + direction . col ; int newRow = row + direction . row ; FlowNode node = new FlowNode ( gridIter , cols , rows , newCol , newRow ) ; enteringNodes . add ( node ) ; } break ; default : throw new IllegalArgumentException ( ) ; } } } return enteringNodes ;
public class BitfinexApiCallbackListeners { /** * registers listener for subscribe events * @ param listener of event * @ return hook of this listener */ public Closeable onSubscribeChannelEvent ( final Consumer < BitfinexStreamSymbol > listener ) { } }
subscribeChannelConsumers . offer ( listener ) ; return ( ) -> subscribeChannelConsumers . remove ( listener ) ;
public class AuditEncryptionImpl { /** * The < code > getInstance < / code > method returns initializes the AuditEncryption implementation * @ param String representing the non - fully qualified keystore name * @ param String representing the path to the keystore * @ param String representing the keystore type * @ param String representing the keystore provider * @ param String representing the password for the keystore * @ param String representing the alias for the keystore entry * @ return instance of the < code > AuditEncryption < / code > object */ public static AuditEncryptionImpl getInstance ( String keyStoreName , String keyStorePath , String keyStoreType , String keyStoreProvider , String keyStorePassword , String keyAlias ) throws AuditEncryptionException { } }
try { if ( ae == null ) ae = new AuditEncryptionImpl ( keyStoreName , keyStorePath , keyStoreType , keyStoreProvider , keyStorePassword , keyAlias ) ; return ae ; } catch ( AuditEncryptionException e ) { throw new AuditEncryptionException ( e ) ; }
public class RulesConfigurationTypeMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RulesConfigurationType rulesConfigurationType , ProtocolMarshaller protocolMarshaller ) { } }
if ( rulesConfigurationType == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( rulesConfigurationType . getRules ( ) , RULES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class VisualizeBinaryData { /** * Draws contours . Internal and external contours are different user specified colors . * @ param contours List of contours * @ param colorExternal RGB color * @ param colorInternal RGB color * @ param width Image width * @ param height Image height * @ param out ( Optional ) storage for output image * @ return Rendered contours */ public static BufferedImage renderContours ( List < Contour > contours , int colorExternal , int colorInternal , int width , int height , BufferedImage out ) { } }
if ( out == null ) { out = new BufferedImage ( width , height , BufferedImage . TYPE_INT_RGB ) ; } else { Graphics2D g2 = out . createGraphics ( ) ; g2 . setColor ( Color . BLACK ) ; g2 . fillRect ( 0 , 0 , width , height ) ; } for ( Contour c : contours ) { for ( Point2D_I32 p : c . external ) { out . setRGB ( p . x , p . y , colorExternal ) ; } for ( List < Point2D_I32 > l : c . internal ) { for ( Point2D_I32 p : l ) { out . setRGB ( p . x , p . y , colorInternal ) ; } } } return out ;
public class MessageRouterConfigurator { /** * Compare the incoming list of message IDs to be associated with the given handler , * adding and removing as needed to match the new config . * @ param msgIds * @ param handlerId */ public void updateMessageListForHandler ( String msgIds , String handlerId ) { } }
if ( msgIds == null ) { return ; // Should never happen , but avoid NPEs } String [ ] msgStr = msgIds . split ( "," ) ; ArrayList < String > newMsgList = new ArrayList < String > ( Arrays . asList ( msgStr ) ) ; newMsgList . remove ( "" ) ; // Ignore blank value // Get the list of existing handlers mapped to this message ID . ArrayList < String > prevMsgList = previousHandlerMessageMap . get ( handlerId ) ; { if ( prevMsgList == null ) { prevMsgList = new ArrayList < String > ( ) ; } } if ( prevMsgList . equals ( newMsgList ) ) { return ; // No change = nothing to do here , skip issuing message } getMessageRouter ( ) ; // Ensure router is not null // Check for new messages and add them to the list for the handler ArrayList < String > addedMsgs = new ArrayList < String > ( newMsgList ) ; addedMsgs . removeAll ( prevMsgList ) ; for ( String msgId : addedMsgs ) { msgRouter . addMsgToLogHandler ( msgId , handlerId ) ; } // Check for removed messages and remove them from the list for the handler ArrayList < String > removedMsgs = new ArrayList < String > ( prevMsgList ) ; removedMsgs . removeAll ( newMsgList ) ; for ( String msgId : removedMsgs ) { msgRouter . removeMsgFromLogHandler ( msgId , handlerId ) ; } // Save the new map for the next config change previousHandlerMessageMap . put ( handlerId , newMsgList ) ; // Notify that there was a change in this handler ' s messages String setOfIds = "" ; for ( String msg : newMsgList ) { if ( ! setOfIds . equals ( "" ) ) { setOfIds = setOfIds . concat ( " " ) ; } setOfIds = setOfIds . concat ( msg ) ; } Tr . info ( tc , "MSG_ROUTER_UPDATED" , new Object [ ] { handlerId , setOfIds } ) ;
public class SQLMultiScopeRecoveryLog { /** * Called when logs fail . Provides more comprehensive FFDC - PI45254. * this is NOT synchronized to avoid deadlocks . */ @ Override public void provideServiceability ( ) { } }
Exception e = new Exception ( ) ; try { FFDCFilter . processException ( e , "com.ibm.ws.recoverylog.custom.jdbc.impl.SQLMultiScopeRecoveryLog.provideServiceability" , "3624" , this ) ; HashMap < Long , RecoverableUnit > rus = _recoverableUnits ; if ( rus != null ) FFDCFilter . processException ( e , "com.ibm.ws.recoverylog.custom.jdbc.impl.SQLMultiScopeRecoveryLog.provideServiceability" , "3628" , rus ) ; } catch ( Exception ex ) { // Do nothing }
public class LeastRecentlyUsedCache { /** * Fetches an object from the cache . Synchronization at cache level is kept to minimum . The provider is called upon cache miss . * @ param key the identity of the object * @ param functor a functor to provide the object upon cache miss * @ param argument the sole argument to pass to the functor * @ return the object requested * @ throws Exception if the provider fails to create a new object */ public < T > V fetch ( K key , Functor < V , T > functor , T argument ) throws Exception { } }
return locate ( key ) . get ( functor , argument ) ;
public class JDBC4CallableStatement { /** * Retrieves the value of a JDBC DATE parameter as a java . sql . Date object , using the given Calendar object to construct the date . */ @ Override public Date getDate ( String parameterName , Calendar cal ) throws SQLException { } }
checkClosed ( ) ; throw SQLError . noSupport ( ) ;
public class MediaHttpDownloader { /** * Sets the content range of the next download request . Eg : bytes = firstBytePos - lastBytePos . * < p > If a download was aborted mid - way due to a connection failure then users can resume the * download from the point where it left off . * < p > Use { @ link # setBytesDownloaded } if you only need to specify the first byte position . * @ param firstBytePos The first byte position in the content range string * @ param lastBytePos The last byte position in the content range string . * @ since 1.24 */ public MediaHttpDownloader setContentRange ( long firstBytePos , long lastBytePos ) { } }
Preconditions . checkArgument ( lastBytePos >= firstBytePos ) ; setBytesDownloaded ( firstBytePos ) ; this . lastBytePos = lastBytePos ; return this ;
public class QueryLexer { /** * $ ANTLR start " FLOAT " */ public final void mFLOAT ( ) throws RecognitionException { } }
try { int _type = FLOAT ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / riemann / Query . g : 92:5 : ( ( ' - ' ) ? ( ' 0 ' . . ' 9 ' ) + ( ' . ' ( ' 0 ' . . ' 9 ' ) * ) ? ( EXPONENT ) ? ) // src / riemann / Query . g : 92:9 : ( ' - ' ) ? ( ' 0 ' . . ' 9 ' ) + ( ' . ' ( ' 0 ' . . ' 9 ' ) * ) ? ( EXPONENT ) ? { // src / riemann / Query . g : 92:9 : ( ' - ' ) ? int alt4 = 2 ; int LA4_0 = input . LA ( 1 ) ; if ( ( LA4_0 == '-' ) ) { alt4 = 1 ; } switch ( alt4 ) { case 1 : // src / riemann / Query . g : 92:9 : ' - ' { match ( '-' ) ; } break ; } // src / riemann / Query . g : 92:14 : ( ' 0 ' . . ' 9 ' ) + int cnt5 = 0 ; loop5 : do { int alt5 = 2 ; int LA5_0 = input . LA ( 1 ) ; if ( ( ( LA5_0 >= '0' && LA5_0 <= '9' ) ) ) { alt5 = 1 ; } switch ( alt5 ) { case 1 : // src / riemann / Query . g : 92:15 : ' 0 ' . . ' 9' { matchRange ( '0' , '9' ) ; } break ; default : if ( cnt5 >= 1 ) break loop5 ; EarlyExitException eee = new EarlyExitException ( 5 , input ) ; throw eee ; } cnt5 ++ ; } while ( true ) ; // src / riemann / Query . g : 92:26 : ( ' . ' ( ' 0 ' . . ' 9 ' ) * ) ? int alt7 = 2 ; int LA7_0 = input . LA ( 1 ) ; if ( ( LA7_0 == '.' ) ) { alt7 = 1 ; } switch ( alt7 ) { case 1 : // src / riemann / Query . g : 92:27 : ' . ' ( ' 0 ' . . ' 9 ' ) * { match ( '.' ) ; // src / riemann / Query . g : 92:31 : ( ' 0 ' . . ' 9 ' ) * loop6 : do { int alt6 = 2 ; int LA6_0 = input . LA ( 1 ) ; if ( ( ( LA6_0 >= '0' && LA6_0 <= '9' ) ) ) { alt6 = 1 ; } switch ( alt6 ) { case 1 : // src / riemann / Query . g : 92:32 : ' 0 ' . . ' 9' { matchRange ( '0' , '9' ) ; } break ; default : break loop6 ; } } while ( true ) ; } break ; } // src / riemann / Query . g : 92:45 : ( EXPONENT ) ? int alt8 = 2 ; int LA8_0 = input . LA ( 1 ) ; if ( ( LA8_0 == 'E' || LA8_0 == 'e' ) ) { alt8 = 1 ; } switch ( alt8 ) { case 1 : // src / riemann / Query . g : 92:45 : EXPONENT { mEXPONENT ( ) ; } break ; } } state . type = _type ; state . channel = _channel ; } finally { }
public class StringTools { /** * Case - aware version of startsWith ( ) */ public static int indexOf ( String text , String needle , int fromIndex , boolean ignoreCase ) { } }
if ( ignoreCase ) return indexOfIgnoreCase ( text , needle , fromIndex ) ; else return text . indexOf ( needle , fromIndex ) ;
public class StitchingFromMotion2D { /** * Sets the current image to be the origin of the stitched coordinate system . The background is filled * with a value of 0. * Must be called after { @ link # process ( boofcv . struct . image . ImageBase ) } . */ public void setOriginToCurrent ( ) { } }
IT currToWorld = ( IT ) worldToCurr . invert ( null ) ; IT oldWorldToNewWorld = ( IT ) worldToInit . concat ( currToWorld , null ) ; PixelTransform < Point2D_F32 > newToOld = converter . convertPixel ( oldWorldToNewWorld , null ) ; // fill in the background color GImageMiscOps . fill ( workImage , 0 ) ; // render the transform distorter . setModel ( newToOld ) ; distorter . apply ( stitchedImage , workImage ) ; // swap the two images I s = workImage ; workImage = stitchedImage ; stitchedImage = s ; // have motion estimates be relative to this frame motion . setToFirst ( ) ; first = true ; computeCurrToInit_PixelTran ( ) ;
public class Wagging { /** * Sets the weak learner used for classification . If it also supports * regressions that will be set as well . * @ param weakL the weak learner to use */ public void setWeakLearner ( Classifier weakL ) { } }
if ( weakL == null ) throw new NullPointerException ( ) ; this . weakL = weakL ; if ( weakL instanceof Regressor ) this . weakR = ( Regressor ) weakL ;
public class DoubleClickCrypto { /** * Encodes data , from binary form to string . * The default implementation performs websafe - base64 encoding ( RFC 3548 ) . */ @ Nullable protected String encode ( @ Nullable byte [ ] data ) { } }
return data == null ? null : Base64 . getUrlEncoder ( ) . encodeToString ( data ) ;
public class Parser { /** * a FunctionCallExpression . Token passed in must be an identifier . */ private FunctionCallExpression parseFunctionCallExpression ( Token token ) throws IOException { } }
Token next = peek ( ) ; if ( next . getID ( ) != Token . LPAREN ) { return null ; } SourceInfo info = token . getSourceInfo ( ) ; Name target = new Name ( info , token . getStringValue ( ) ) ; // parse remainder of call expression return parseCallExpression ( FunctionCallExpression . class , null , target , info ) ;
public class ExternalServiceAlertConditionService { /** * Creates the given external service alert condition . * @ param policyId The id of the policy to add the alert condition to * @ param condition The alert condition to create * @ return The alert condition that was created */ public Optional < ExternalServiceAlertCondition > create ( long policyId , ExternalServiceAlertCondition condition ) { } }
return HTTP . POST ( String . format ( "/v2/alerts_external_service_conditions/policies/%d.json" , policyId ) , condition , EXTERNAL_SERVICE_ALERT_CONDITION ) ;
public class Materialize { /** * a helper method to enable the keyboardUtil for a specific activity * or disable it . note this will cause some frame drops because of the * listener . * @ param activity * @ param enable */ public void keyboardSupportEnabled ( Activity activity , boolean enable ) { } }
if ( getContent ( ) != null && getContent ( ) . getChildCount ( ) > 0 ) { if ( mKeyboardUtil == null ) { mKeyboardUtil = new KeyboardUtil ( activity , getContent ( ) . getChildAt ( 0 ) ) ; mKeyboardUtil . disable ( ) ; } if ( enable ) { mKeyboardUtil . enable ( ) ; } else { mKeyboardUtil . disable ( ) ; } }
public class RedisCounterFactory { /** * { @ inheritDoc } */ @ Override protected ICounter createCounter ( String name ) { } }
RedisCounter counter = new RedisCounter ( name , ttlSeconds ) ; counter . setCounterFactory ( this ) . init ( ) ; return counter ;
public class DRConsumerDrIdTracker { /** * Truncate the tracker to the given safe point . After truncation , the new * safe point will be the first DrId of the tracker . If the new safe point * is before the first DrId of the tracker , it ' s a no - op . * @ param newTruncationPoint New safe point */ public void truncate ( long newTruncationPoint ) { } }
if ( newTruncationPoint < getFirstDrId ( ) ) { return ; } final Iterator < Range < Long > > iter = m_map . asRanges ( ) . iterator ( ) ; while ( iter . hasNext ( ) ) { final Range < Long > next = iter . next ( ) ; if ( end ( next ) < newTruncationPoint ) { iter . remove ( ) ; } else if ( next . contains ( newTruncationPoint ) ) { iter . remove ( ) ; m_map . add ( range ( newTruncationPoint , end ( next ) ) ) ; return ; } else { break ; } } m_map . add ( range ( newTruncationPoint , newTruncationPoint ) ) ;
public class Matrix4f { /** * Set this matrix to a rotation transformation about the Z axis . * When used with a right - handed coordinate system , the produced rotation will rotate a vector * counter - clockwise around the rotation axis , when viewing along the negative axis direction towards the origin . * When used with a left - handed coordinate system , the rotation is clockwise . * Reference : < a href = " http : / / en . wikipedia . org / wiki / Rotation _ matrix # Basic _ rotations " > http : / / en . wikipedia . org < / a > * @ param ang * the angle in radians * @ return this */ public Matrix4f rotationZ ( float ang ) { } }
float sin , cos ; sin = ( float ) Math . sin ( ang ) ; cos = ( float ) Math . cosFromSin ( sin , ang ) ; if ( ( properties & PROPERTY_IDENTITY ) == 0 ) MemUtil . INSTANCE . identity ( this ) ; this . _m00 ( cos ) ; this . _m01 ( sin ) ; this . _m10 ( - sin ) ; this . _m11 ( cos ) ; _properties ( PROPERTY_AFFINE | PROPERTY_ORTHONORMAL ) ; return this ;
public class HttpURL { /** * Gets the last URL path segment without the query string . * If there are segment to return , * an empty string will be returned instead . * @ return the last URL path segment */ public String getLastPathSegment ( ) { } }
if ( StringUtils . isBlank ( path ) ) { return StringUtils . EMPTY ; } String segment = path ; segment = StringUtils . substringAfterLast ( segment , "/" ) ; return segment ;
public class BitsUtil { /** * NOTAND o onto v in - place , i . e . v & amp ; = ~ o * @ param v Primary object * @ param o data to and * @ return v */ public static long [ ] nandI ( long [ ] v , long [ ] o ) { } }
int i = 0 ; for ( ; i < o . length ; i ++ ) { v [ i ] &= ~ o [ i ] ; } return v ;
public class AbstractQueryHandler { /** * This default implementation calls the individual { @ link # deleteNode ( String ) } * and { @ link # addNode ( NodeData ) } methods * for each entry in the iterators . First the nodes to remove are processed * then the nodes to add . * @ param remove uuids of nodes to remove . * @ param add NodeStates to add . * @ throws RepositoryException if an error occurs while indexing a node . * @ throws IOException if an error occurs while updating the index . */ public void updateNodes ( Iterator < String > remove , Iterator < NodeData > add ) throws RepositoryException , IOException { } }
while ( remove . hasNext ( ) ) { deleteNode ( remove . next ( ) ) ; } while ( add . hasNext ( ) ) { addNode ( add . next ( ) ) ; }
public class SQLiteExecutor { /** * Insert multiple records into data store . * @ param table * @ param records * @ param withTransaction * @ return */ @ Deprecated < T > long [ ] insert ( String table , T [ ] records , boolean withTransaction ) { } }
if ( N . isNullOrEmpty ( records ) ) { return N . EMPTY_LONG_ARRAY ; } final long [ ] ret = new long [ records . length ] ; table = formatName ( table ) ; if ( withTransaction ) { beginTransaction ( ) ; } try { for ( int i = 0 , len = records . length ; i < len ; i ++ ) { ret [ i ] = insert ( table , records [ i ] ) ; } if ( withTransaction ) { sqliteDB . setTransactionSuccessful ( ) ; } } finally { if ( withTransaction ) { endTransaction ( ) ; } } return ret ;
public class InternalJSONUtil { /** * 默认情况下是否忽略null值的策略选择 < br > * JavaBean默认忽略null值 , 其它对象不忽略 * @ param obj 需要检查的对象 * @ return 是否忽略null值 * @ since 4.3.1 */ protected static boolean defaultIgnoreNullValue ( Object obj ) { } }
if ( obj instanceof CharSequence || obj instanceof JSONTokener || obj instanceof Map ) { return false ; } return true ;
public class PagedBitMap { /** * Bitwise < br / > * < code > this = this | that < / code > */ public void add ( PagedBitMap that ) { } }
LongArray ta = that . array ; long n = 0 ; while ( true ) { n = ta . seekNext ( n ) ; if ( n < 0 ) { break ; } long v = array . get ( n ) | ta . get ( n ) ; array . set ( n , v ) ; ++ n ; }
public class NoiseInstance { /** * Compute turbulence using Perlin noise . * @ param x the x value * @ param y the y value * @ param octaves number of octaves of turbulence * @ return turbulence value at ( x , y ) */ public float turbulence3 ( float x , float y , float z , float octaves ) { } }
float t = 0.0f ; for ( float f = 1.0f ; f <= octaves ; f *= 2 ) t += Math . abs ( noise3 ( f * x , f * y , f * z ) ) / f ; return t ;
public class Period { /** * Returns a new period plus the specified number of weeks added . * This period instance is immutable and unaffected by this method call . * @ param weeks the amount of weeks to add , may be negative * @ return the new period plus the increased weeks * @ throws UnsupportedOperationException if the field is not supported */ public Period plusWeeks ( int weeks ) { } }
if ( weeks == 0 ) { return this ; } int [ ] values = getValues ( ) ; // cloned getPeriodType ( ) . addIndexedField ( this , PeriodType . WEEK_INDEX , values , weeks ) ; return new Period ( values , getPeriodType ( ) ) ;
public class FamilyVisitor { /** * Visit a Family . This is the primary focus of the visitation . From * here , interesting information is gathered from the attributes . * @ see GedObjectVisitor # visit ( Family ) */ @ Override public void visit ( final Family family ) { } }
for ( final GedObject gob : family . getAttributes ( ) ) { gob . accept ( this ) ; }
public class SessionDataManager { /** * Returns true if the item with < code > identifier < / code > is a new item , meaning that it exists * only in transient storage on the Session and has not yet been saved . Within a transaction , * isNew on an Item may return false ( because the item has been saved ) even if that Item is not in * persistent storage ( because the transaction has not yet been committed ) . * @ param identifier * of the item * @ return boolean */ public boolean isNew ( String identifier ) { } }
ItemState lastState = changesLog . getItemState ( identifier ) ; if ( lastState == null || lastState . isDeleted ( ) ) { return false ; } return changesLog . getItemState ( identifier , ItemState . ADDED ) != null ;
public class Sc68Format { /** * Get the library , or void format if not found . * @ return The audio format . */ public static AudioFormat getFailsafe ( ) { } }
try { return new Sc68Format ( ) ; } catch ( final LionEngineException exception ) { Verbose . exception ( exception , ERROR_LOAD_LIBRARY ) ; return new AudioVoidFormat ( FORMATS ) ; }
public class GPARCImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setMFR ( Integer newMFR ) { } }
Integer oldMFR = mfr ; mfr = newMFR ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . GPARC__MFR , oldMFR , mfr ) ) ;
public class ScoredValue { /** * Returns a { @ link ScoredValue } consisting of the results of applying the given function to the score of this element . * Mapping is performed only if a { @ link # hasValue ( ) value is present } . * @ param mapper a stateless function to apply to each element * @ return the new { @ link ScoredValue } */ @ SuppressWarnings ( "unchecked" ) public ScoredValue < V > mapScore ( Function < ? super Number , ? extends Number > mapper ) { } }
LettuceAssert . notNull ( mapper , "Mapper function must not be null" ) ; if ( hasValue ( ) ) { return new ScoredValue < V > ( mapper . apply ( score ) . doubleValue ( ) , getValue ( ) ) ; } return this ;
public class CachedRemoteTable { /** * Move or get this record and cache multiple records if possible . * @ param iRowOrRelative For get the row to retrieve , for move the relative row to retrieve . * @ param iRowCount The number of rows to retrieve ( Used only by EjbCachedTable ) . * @ param iAbsoluteRow The absolute row of the first row to retrieve ( or - 1 if unknown ) . * @ param bGet If true get , if false move . * @ return The record or an error code as an Integer . * @ exception Exception File exception . * @ exception RemoteException RMI exception . */ public Object cacheGetMove ( int iRowOrRelative , int iRowCount , int iAbsoluteRow , boolean bGet ) throws DBException , RemoteException { } }
m_bhtGet = bGet ; m_objCurrentPhysicalRecord = NONE ; m_objCurrentCacheRecord = NONE ; m_iCurrentLogicalPosition = iAbsoluteRow ; if ( ( m_iPhysicalLastRecordPlusOne != - 1 ) && ( m_iCurrentLogicalPosition >= m_iPhysicalLastRecordPlusOne ) ) return FieldTable . EOF_RECORD ; try { Object objData = null ; if ( ( m_mapCache == null ) && ( m_htCache == null ) ) m_mapCache = new ArrayCache ( ) ; if ( m_mapCache != null ) if ( iAbsoluteRow != - 1 ) objData = m_mapCache . get ( iAbsoluteRow ) ; // Try to find this in the cache if ( objData == NONE ) { objData = FieldTable . DELETED_RECORD ; // Deleted record } else if ( objData != null ) { m_objCurrentCacheRecord = new Integer ( iAbsoluteRow ) ; } else if ( objData == null ) { m_objCurrentLockedRecord = NONE ; // You lose your lock on physical move . if ( m_mapCache != null ) if ( iAbsoluteRow == m_mapCache . getEndIndex ( ) + 1 ) iRowCount = MULTIPLE_READ_COUNT ; // If you are adding to the end of the cache , try reading a bunch at once . if ( bGet ) objData = m_tableRemote . get ( iRowOrRelative , iRowCount ) ; else objData = m_tableRemote . doMove ( iRowOrRelative , iRowCount ) ; if ( objData instanceof Vector ) if ( ( ( Vector ) objData ) . size ( ) > 1 ) if ( ! ( ( ( Vector ) objData ) . get ( 0 ) instanceof Vector ) ) iRowCount = 1 ; // Multiple read not supported ( This vector is a record ) . if ( objData instanceof Vector ) { if ( m_mapCache != null ) { m_objCurrentCacheRecord = new Integer ( m_iCurrentLogicalPosition ) ; if ( iRowCount == 1 ) { m_mapCache . set ( m_iCurrentLogicalPosition , objData ) ; m_objCurrentPhysicalRecord = m_objCurrentCacheRecord ; } else { Vector < Object > objectVector = ( Vector ) objData ; for ( int i = objectVector . size ( ) - 1 ; i >= 0 ; i -- ) { // I go in reverse , so the objData object will be the object at the iAbsoluteRow . objData = objectVector . get ( i ) ; if ( objData instanceof Vector ) { if ( iAbsoluteRow != - 1 ) { m_mapCache . set ( iAbsoluteRow + i , objData ) ; if ( i == objectVector . size ( ) - 1 ) m_objCurrentPhysicalRecord = new Integer ( iAbsoluteRow + i ) ; } } else if ( ( m_iPhysicalLastRecordPlusOne == - 1 ) || ( m_iPhysicalLastRecordPlusOne <= m_iCurrentLogicalPosition + i ) ) m_iPhysicalLastRecordPlusOne = m_iCurrentLogicalPosition + i ; // Recordstatus = EOF else objData = FieldTable . DELETED_RECORD ; // Deleted record } } } } else { if ( ( m_iPhysicalLastRecordPlusOne == - 1 ) || ( m_iPhysicalLastRecordPlusOne <= m_iCurrentLogicalPosition ) ) m_iPhysicalLastRecordPlusOne = m_iCurrentLogicalPosition ; else { if ( ( ! FieldTable . DELETED_RECORD . equals ( objData ) ) && ( ! FieldTable . EOF_RECORD . equals ( objData ) ) ) objData = FieldTable . DELETED_RECORD ; // Deleted record ( if not one that I recognize ) } } } return objData ; } catch ( RemoteException ex ) { throw ex ; }
public class WsByteBufferUtils { /** * Expand an existing wsbb [ ] to include a new wsbb [ ] * @ param oldList * @ param newBuffers * @ return WsByteBuffer [ ] */ public static WsByteBuffer [ ] expandBufferArray ( WsByteBuffer [ ] oldList , WsByteBuffer [ ] newBuffers ) { } }
if ( null == oldList && null == newBuffers ) { // if both are null then just exit return null ; } int oldLen = ( null != oldList ? oldList . length : 0 ) ; int newLen = ( null != newBuffers ? newBuffers . length : 0 ) ; WsByteBuffer [ ] bb = new WsByteBuffer [ oldLen + newLen ] ; if ( 0 < oldLen ) { System . arraycopy ( oldList , 0 , bb , 0 , oldLen ) ; } if ( 0 < newLen ) { System . arraycopy ( newBuffers , 0 , bb , oldLen , newLen ) ; } return bb ;
public class TokenService { /** * GET / tokens * < p > Returns the list of the tokens generated before . */ @ Get ( "/tokens" ) public CompletableFuture < Collection < Token > > listTokens ( User loginUser ) { } }
if ( loginUser . isAdmin ( ) ) { return mds . getTokens ( ) . thenApply ( tokens -> tokens . appIds ( ) . values ( ) ) ; } else { return mds . getTokens ( ) . thenApply ( Tokens :: withoutSecret ) . thenApply ( tokens -> tokens . appIds ( ) . values ( ) ) ; }
public class RythmConfiguration { /** * Return { @ link RythmConfigurationKey # ENGINE _ LOAD _ PRECOMPILED _ ENABLED } * without lookup * @ return true if load precompiled */ public boolean loadPrecompiled ( ) { } }
if ( null == _loadPrecompiled ) { Boolean b = get ( ENGINE_LOAD_PRECOMPILED_ENABLED ) ; _loadPrecompiled = b || gae ( ) ; } return _loadPrecompiled ;
public class TaskManagerService { /** * Attempt to start the given task by creating claim and see if we win it . */ private void attemptToExecuteTask ( ApplicationDefinition appDef , Task task , TaskRecord taskRecord ) { } }
Tenant tenant = Tenant . getTenant ( appDef ) ; String taskID = taskRecord . getTaskID ( ) ; String claimID = "_claim/" + taskID ; long claimStamp = System . currentTimeMillis ( ) ; writeTaskClaim ( tenant , claimID , claimStamp ) ; if ( taskClaimedByUs ( tenant , claimID ) ) { startTask ( appDef , task , taskRecord ) ; } else { m_logger . info ( "Will not start task: it was claimed by another service" ) ; }
public class ApiOvhIpLoadbalancing { /** * Get this object properties * REST : GET / ipLoadbalancing / { serviceName } / tcp / frontend / { frontendId } * @ param serviceName [ required ] The internal name of your IP load balancing * @ param frontendId [ required ] Id of your frontend */ public OvhFrontendTcp serviceName_tcp_frontend_frontendId_GET ( String serviceName , Long frontendId ) throws IOException { } }
String qPath = "/ipLoadbalancing/{serviceName}/tcp/frontend/{frontendId}" ; StringBuilder sb = path ( qPath , serviceName , frontendId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhFrontendTcp . class ) ;
public class ReservoirItemsUnion { /** * Union the given Memory image of the sketch . * < p > This method can be repeatedly called . If the given sketch is null it is interpreted as an * empty sketch . < / p > * @ param mem Memory image of sketch to be merged * @ param serDe An instance of ArrayOfItemsSerDe */ public void update ( final Memory mem , final ArrayOfItemsSerDe < T > serDe ) { } }
if ( mem == null ) { return ; } ReservoirItemsSketch < T > ris = ReservoirItemsSketch . heapify ( mem , serDe ) ; ris = ( ris . getK ( ) <= maxK_ ? ris : ris . downsampledCopy ( maxK_ ) ) ; if ( gadget_ == null ) { createNewGadget ( ris , true ) ; } else { twoWayMergeInternal ( ris , true ) ; }
public class VdmEvaluationContextManager { /** * Removes an evaluation context for the given page , and determines if any valid execution context remain . * @ param page */ private void removeContext ( IWorkbenchPage page ) { } }
// pageToContextMap . remove ( page ) ; // if ( pageToContextMap . isEmpty ( ) ) // System . setProperty ( DEBUGGER _ ACTIVE , " false " ) ; / / $ NON - NLS - 1 $ // / / System . setProperty ( INSTANCE _ OF _ IJAVA _ STACK _ FRAME , " false " ) ; // / / / / $ NON - NLS - 1 $ // / / System . setProperty ( SUPPORTS _ FORCE _ RETURN , " false " ) ; / / $ NON - NLS - 1 $ // / / System . setProperty ( SUPPORTS _ INSTANCE _ RETRIEVAL , " false " ) ; // / / / / $ NON - NLS - 1 $ if ( fContextsByPage != null ) { fContextsByPage . remove ( page ) ; if ( fContextsByPage . isEmpty ( ) ) { System . setProperty ( DEBUGGER_ACTIVE , "false" ) ; // $ NON - NLS - 1 $ System . setProperty ( INSTANCE_OF_IJAVA_STACK_FRAME , "false" ) ; // $ NON - NLS - 1 $ System . setProperty ( SUPPORTS_FORCE_RETURN , "false" ) ; // $ NON - NLS - 1 $ System . setProperty ( SUPPORTS_INSTANCE_RETRIEVAL , "false" ) ; // $ NON - NLS - 1 $ } }
public class InputLine { /** * Read line from terminal . * @ param initial The initial ( default ) value . * @ return The resulting line . */ public String readLine ( String initial ) { } }
this . before = initial == null ? "" : initial ; this . after = "" ; this . printedError = null ; if ( initial != null && initial . length ( ) > 0 && ! lineValidator . validate ( initial , e -> { } ) ) { throw new IllegalArgumentException ( "Invalid initial value: " + initial ) ; } terminal . formatln ( "%s: %s" , message , before ) ; try { for ( ; ; ) { Char c = terminal . read ( ) ; if ( c == null ) { throw new IOException ( "End of input." ) ; } int ch = c . asInteger ( ) ; if ( ch == Char . CR || ch == Char . LF ) { String line = before + after ; if ( lineValidator . validate ( line , this :: printAbove ) ) { return line ; } continue ; } handleInterrupt ( ch , c ) ; if ( handleTab ( ch ) || handleBackSpace ( ch ) || handleControl ( c ) ) { continue ; } if ( charValidator . validate ( c , this :: printAbove ) ) { before = before + c . toString ( ) ; printInputLine ( ) ; } } } catch ( IOException e ) { throw new UncheckedIOException ( e ) ; }
public class DefaultNodeManager { /** * Checks if a module is installed . */ private void doInstalled ( final Message < JsonObject > message ) { } }
String moduleName = message . body ( ) . getString ( "module" ) ; if ( moduleName == null ) { message . reply ( new JsonObject ( ) . putString ( "status" , "error" ) . putString ( "message" , "No module specified." ) ) ; return ; } platform . getModuleInfo ( moduleName , new Handler < AsyncResult < ModuleInfo > > ( ) { @ Override public void handle ( AsyncResult < ModuleInfo > result ) { if ( result . failed ( ) || result . result ( ) == null ) { message . reply ( new JsonObject ( ) . putString ( "status" , "ok" ) . putBoolean ( "result" , false ) ) ; } else { message . reply ( new JsonObject ( ) . putString ( "status" , "ok" ) . putBoolean ( "result" , true ) ) ; } } } ) ;
public class Install { /** * Reads all XML update files and parses them . * @ see # initialised * @ throws InstallationException on error */ protected void initialise ( ) throws InstallationException { } }
if ( ! this . initialised ) { this . initialised = true ; this . cache . clear ( ) ; AppDependency . initialise ( ) ; for ( final FileType fileType : FileType . values ( ) ) { if ( fileType == FileType . XML ) { for ( final InstallFile file : this . files ) { if ( file . getType ( ) == fileType ) { final SaxHandler handler = new SaxHandler ( ) ; try { final IUpdate elem = handler . parse ( file ) ; final List < IUpdate > list ; if ( this . cache . containsKey ( elem . getIdentifier ( ) ) ) { list = this . cache . get ( elem . getIdentifier ( ) ) ; } else { list = new ArrayList < > ( ) ; this . cache . put ( elem . getIdentifier ( ) , list ) ; } list . add ( handler . getUpdate ( ) ) ; } catch ( final SAXException e ) { throw new InstallationException ( "initialise()" , e ) ; } catch ( final IOException e ) { throw new InstallationException ( "initialise()" , e ) ; } } } } else { for ( final Class < ? extends IUpdate > updateClass : fileType . getClazzes ( ) ) { Method method = null ; try { method = updateClass . getMethod ( "readFile" , InstallFile . class ) ; } catch ( final SecurityException e ) { throw new InstallationException ( "initialise()" , e ) ; } catch ( final NoSuchMethodException e ) { throw new InstallationException ( "initialise()" , e ) ; } for ( final InstallFile file : this . files ) { if ( file . getType ( ) == fileType ) { Object obj = null ; try { obj = method . invoke ( null , file ) ; } catch ( final IllegalArgumentException e ) { throw new InstallationException ( "initialise()" , e ) ; } catch ( final IllegalAccessException e ) { throw new InstallationException ( "initialise()" , e ) ; } catch ( final InvocationTargetException e ) { throw new InstallationException ( "initialise()" , e ) ; } if ( obj != null && obj instanceof IUpdate ) { final IUpdate iUpdate = ( IUpdate ) obj ; final List < IUpdate > list ; if ( this . cache . containsKey ( iUpdate . getIdentifier ( ) ) ) { list = this . cache . get ( iUpdate . getIdentifier ( ) ) ; } else { list = new ArrayList < > ( ) ; this . cache . put ( iUpdate . getIdentifier ( ) , list ) ; } list . add ( iUpdate ) ; } } } } } } }
public class ClusterStreamManagerImpl { /** * TODO : we could have this method return a Stream etc . so it doesn ' t have to iterate upon keys multiple times ( helps rehash and tx ) */ private Set < K > determineExcludedKeys ( IntFunction < Set < K > > keysToExclude , IntSet segmentsToUse ) { } }
if ( keysToExclude == null ) { return Collections . emptySet ( ) ; } // Special map only supports get operations return segmentsToUse . intStream ( ) . mapToObj ( s -> { Set < K > keysForSegment = keysToExclude . apply ( s ) ; if ( keysForSegment != null ) { return keysForSegment . stream ( ) ; } return null ; } ) . flatMap ( Function . identity ( ) ) . collect ( Collectors . toSet ( ) ) ;
public class AbstractCreator { /** * Build a { @ link JDeserializerType } that instantiate a { @ link JsonSerializer } for the given type . If the type is a bean , * the implementation of { @ link AbstractBeanJsonSerializer } will be created . * @ param type type * @ param subtype true if the deserializer is for a subtype * @ return the { @ link JDeserializerType } . Examples : * < ul > * < li > ctx . getIntegerDeserializer ( ) < / li > * < li > new org . PersonBeanJsonDeserializer ( ) < / li > * < / ul > * @ throws com . google . gwt . core . ext . UnableToCompleteException if any . * @ throws com . github . nmorel . gwtjackson . rebind . exception . UnsupportedTypeException if any . */ protected final JDeserializerType getJsonDeserializerFromType ( JType type , boolean subtype ) throws UnableToCompleteException , UnsupportedTypeException { } }
JDeserializerType . Builder builder = new JDeserializerType . Builder ( ) . type ( type ) ; if ( null != type . isWildcard ( ) ) { // For wildcard type , we use the base type to find the deserializer . type = type . isWildcard ( ) . getBaseType ( ) ; } if ( null != type . isRawType ( ) ) { // For raw type , we use the base type to find the deserializer . type = type . isRawType ( ) . getBaseType ( ) ; } JTypeParameter typeParameter = type . isTypeParameter ( ) ; if ( null != typeParameter ) { // It ' s a type parameter like T in ' MyClass < T > ' if ( ! subtype || typeParameter . getDeclaringClass ( ) == getMapperInfo ( ) . get ( ) . getType ( ) ) { // The deserializer is created for the main type so we use the deserializer field declared for this type . return builder . instance ( CodeBlock . builder ( ) . add ( String . format ( TYPE_PARAMETER_DESERIALIZER_FIELD_NAME , typeParameter . getOrdinal ( ) ) ) . add ( ".json()" ) . build ( ) ) . build ( ) ; } else { // There is no declared deserializer so we use the base type to find a deserializer . type = typeParameter . getBaseType ( ) ; } } if ( typeOracle . isEnumSupertype ( type ) ) { String message = "Type java.lang.Enum is not supported by deserialization" ; logger . log ( TreeLogger . Type . WARN , message ) ; throw new UnsupportedTypeException ( message ) ; } Optional < MapperInstance > configuredDeserializer = configuration . getDeserializer ( type ) ; if ( configuredDeserializer . isPresent ( ) ) { // The type is configured in AbstractConfiguration . if ( null != type . isParameterized ( ) || null != type . isGenericType ( ) ) { JClassType [ ] typeArgs ; if ( null != type . isGenericType ( ) ) { typeArgs = type . isGenericType ( ) . asParameterizedByWildcards ( ) . getTypeArgs ( ) ; } else { typeArgs = type . isParameterized ( ) . getTypeArgs ( ) ; } ImmutableList . Builder < JDeserializerType > parametersDeserializerBuilder = ImmutableList . builder ( ) ; for ( int i = 0 ; i < typeArgs . length ; i ++ ) { JDeserializerType parameterDeserializerType ; if ( MapperType . KEY_DESERIALIZER == configuredDeserializer . get ( ) . getParameters ( ) [ i ] ) { parameterDeserializerType = getKeyDeserializerFromType ( typeArgs [ i ] ) ; } else { parameterDeserializerType = getJsonDeserializerFromType ( typeArgs [ i ] , subtype ) ; } parametersDeserializerBuilder . add ( parameterDeserializerType ) ; } ImmutableList < JDeserializerType > parametersDeserializer = parametersDeserializerBuilder . build ( ) ; builder . parameters ( parametersDeserializer ) ; builder . instance ( methodCallCodeWithJMapperTypeParameters ( configuredDeserializer . get ( ) , parametersDeserializer ) ) ; } else { // The deserializer has no parameters . builder . instance ( methodCallCode ( configuredDeserializer . get ( ) ) ) ; } return builder . build ( ) ; } if ( typeOracle . isJavaScriptObject ( type ) ) { // It ' s a JSO and the user didn ' t give a custom deserializer . We use the default one . configuredDeserializer = configuration . getDeserializer ( typeOracle . getJavaScriptObject ( ) ) ; return builder . instance ( methodCallCode ( configuredDeserializer . get ( ) ) ) . build ( ) ; } if ( typeOracle . isEnum ( type ) ) { configuredDeserializer = configuration . getDeserializer ( typeOracle . getEnum ( ) ) ; return builder . instance ( methodCallCodeWithClassParameters ( configuredDeserializer . get ( ) , ImmutableList . of ( type ) ) ) . build ( ) ; } JArrayType arrayType = type . isArray ( ) ; if ( null != arrayType ) { TypeSpec arrayCreator ; Class arrayDeserializer ; JType leafType = arrayType . getLeafType ( ) ; if ( arrayType . getRank ( ) == 1 ) { // One dimension array arrayCreator = TypeSpec . anonymousClassBuilder ( "" ) . addSuperinterface ( parameterizedName ( ArrayCreator . class , leafType ) ) . addMethod ( MethodSpec . methodBuilder ( "create" ) . addAnnotation ( Override . class ) . addModifiers ( Modifier . PUBLIC ) . addParameter ( int . class , "length" ) . addStatement ( "return new $T[$N]" , rawName ( leafType ) , "length" ) . returns ( typeName ( arrayType ) ) . build ( ) ) . build ( ) ; arrayDeserializer = ArrayJsonDeserializer . class ; } else if ( arrayType . getRank ( ) == 2 ) { // Two dimensions array arrayCreator = TypeSpec . anonymousClassBuilder ( "" ) . addSuperinterface ( parameterizedName ( Array2dCreator . class , leafType ) ) . addMethod ( MethodSpec . methodBuilder ( "create" ) . addAnnotation ( Override . class ) . addModifiers ( Modifier . PUBLIC ) . addParameter ( int . class , "first" ) . addParameter ( int . class , "second" ) . addStatement ( "return new $T[$N][$N]" , rawName ( leafType ) , "first" , "second" ) . returns ( typeName ( arrayType ) ) . build ( ) ) . build ( ) ; arrayDeserializer = Array2dJsonDeserializer . class ; } else { // More dimensions are not supported String message = "Arrays with 3 or more dimensions are not supported" ; logger . log ( TreeLogger . Type . WARN , message ) ; throw new UnsupportedTypeException ( message ) ; } JDeserializerType parameterDeserializerType = getJsonDeserializerFromType ( leafType , subtype ) ; builder . parameters ( ImmutableList . of ( parameterDeserializerType ) ) ; builder . instance ( CodeBlock . builder ( ) . add ( "$T.newInstance($L, $L)" , arrayDeserializer , parameterDeserializerType . getInstance ( ) , arrayCreator ) . build ( ) ) ; return builder . build ( ) ; } if ( null != type . isAnnotation ( ) ) { String message = "Annotations are not supported" ; logger . log ( TreeLogger . Type . WARN , message ) ; throw new UnsupportedTypeException ( message ) ; } JClassType classType = type . isClassOrInterface ( ) ; if ( null != classType ) { // The type is a class or interface and has no default deserializer . We generate one . JClassType baseClassType = classType ; JParameterizedType parameterizedType = classType . isParameterized ( ) ; if ( null != parameterizedType ) { // It ' s a bean with generics , we create a deserializer based on generic type . baseClassType = parameterizedType . getBaseType ( ) ; } BeanJsonDeserializerCreator beanJsonDeserializerCreator = new BeanJsonDeserializerCreator ( logger . branch ( Type . DEBUG , "Creating deserializer for " + baseClassType . getQualifiedSourceName ( ) ) , context , configuration , typeOracle , baseClassType ) ; BeanJsonMapperInfo mapperInfo = beanJsonDeserializerCreator . create ( ) ; // Generics and parameterized types deserializers have no default constructor . They need deserializers for each parameter . ImmutableList < ? extends JType > typeParameters = getTypeParameters ( classType , subtype ) ; ImmutableList . Builder < JParameterizedDeserializer > parametersDeserializerBuilder = ImmutableList . builder ( ) ; ImmutableList . Builder < JDeserializerType > parametersJsonDeserializerBuilder = ImmutableList . builder ( ) ; for ( JType argType : typeParameters ) { JDeserializerType jsonDeserializer = getJsonDeserializerFromType ( argType , subtype ) ; parametersDeserializerBuilder . add ( new JParameterizedDeserializer ( getKeyDeserializerFromType ( argType , subtype , true ) , jsonDeserializer ) ) ; parametersJsonDeserializerBuilder . add ( jsonDeserializer ) ; } builder . parameters ( parametersJsonDeserializerBuilder . build ( ) ) ; builder . beanMapper ( true ) ; builder . instance ( constructorCallCode ( ClassName . get ( mapperInfo . getPackageName ( ) , mapperInfo . getSimpleDeserializerClassName ( ) ) , parametersDeserializerBuilder . build ( ) ) ) ; return builder . build ( ) ; } String message = "Type '" + type . getQualifiedSourceName ( ) + "' is not supported" ; logger . log ( TreeLogger . Type . WARN , message ) ; throw new UnsupportedTypeException ( message ) ;
public class IntCounter { /** * Returns the set of keys whose counts are at or above the given threshold . * This set may have 0 elements but will not be null . */ public Set < E > keysAbove ( int countThreshold ) { } }
Set < E > keys = new HashSet < E > ( ) ; for ( E key : map . keySet ( ) ) { if ( getIntCount ( key ) >= countThreshold ) { keys . add ( key ) ; } } return keys ;
public class CPTaxCategoryServiceBaseImpl { /** * Sets the cp option remote service . * @ param cpOptionService the cp option remote service */ public void setCPOptionService ( com . liferay . commerce . product . service . CPOptionService cpOptionService ) { } }
this . cpOptionService = cpOptionService ;
public class KeyVaultClientBaseImpl { /** * Updates the specified attributes associated with the given certificate . * The UpdateCertificate operation applies the specified update on the given certificate ; the only elements updated are the certificate ' s attributes . This operation requires the certificates / update permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param certificateName The name of the certificate in the given key vault . * @ param certificateVersion The version of the certificate . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < CertificateBundle > updateCertificateAsync ( String vaultBaseUrl , String certificateName , String certificateVersion , final ServiceCallback < CertificateBundle > serviceCallback ) { } }
return ServiceFuture . fromResponse ( updateCertificateWithServiceResponseAsync ( vaultBaseUrl , certificateName , certificateVersion ) , serviceCallback ) ;
public class MtasDataDoubleAdvanced { /** * ( non - Javadoc ) * @ see * mtas . codec . util . DataCollector . MtasDataCollector # minimumForComputingSegment ( */ @ Override protected Double lastForComputingSegment ( ) throws IOException { } }
if ( segmentRegistration . equals ( SEGMENT_SORT_ASC ) || segmentRegistration . equals ( SEGMENT_BOUNDARY_ASC ) ) { return Collections . max ( segmentValueTopList ) ; } else if ( segmentRegistration . equals ( SEGMENT_SORT_DESC ) || segmentRegistration . equals ( SEGMENT_BOUNDARY_DESC ) ) { return Collections . min ( segmentValueTopList ) ; } else { throw new IOException ( "can't compute last for segmentRegistration " + segmentRegistration ) ; }
public class WAttribute { void set_write_value ( Any any ) throws DevFailed { } }
switch ( data_type ) { case Tango_DEV_STATE : // Check data type inside the any DevState [ ] st = null ; try { st = DevVarStateArrayHelper . extract ( any ) ; } catch ( BAD_OPERATION ex ) { Except . throw_exception ( "API_IncompatibleAttrDataType" , "Incompatible attribute type, expected type is : Tango_DevBoolean" , "WAttribute.set_write_value()" ) ; } // No min max value check for boolean ! state_val = st [ 0 ] ; break ; case Tango_DEV_BOOLEAN : // Check data type inside the any boolean [ ] bl_ptr = null ; try { bl_ptr = DevVarBooleanArrayHelper . extract ( any ) ; } catch ( BAD_OPERATION ex ) { Except . throw_exception ( "API_IncompatibleAttrDataType" , "Incompatible attribute type, expected type is : Tango_DevBoolean" , "WAttribute.set_write_value()" ) ; } // No min max value check for boolean ! old_bool_val = bool_val ; bool_val = bl_ptr [ 0 ] ; break ; case Tango_DEV_SHORT : // Check data type inside the any short [ ] sh_ptr = null ; try { sh_ptr = DevVarShortArrayHelper . extract ( any ) ; } catch ( BAD_OPERATION ex ) { Except . throw_exception ( "API_IncompatibleAttrDataType" , "Incompatible attribute type, expected type is : Tango_DevShort" , "WAttribute.set_write_value()" ) ; } // Check the incoming value if ( check_min_value == true ) { if ( sh_ptr [ 0 ] <= min_value . sh ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is below the minimum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } if ( check_max_value == true ) { if ( sh_ptr [ 0 ] >= max_value . sh ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is above the maximum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } old_short_val = short_val ; short_val = sh_ptr [ 0 ] ; break ; case Tango_DEV_USHORT : // Check data type inside the any short [ ] ush_ptr = null ; try { ush_ptr = DevVarUShortArrayHelper . extract ( any ) ; } catch ( BAD_OPERATION ex ) { Except . throw_exception ( "API_IncompatibleAttrDataType" , "Incompatible attribute type, expected type is : Tango_DevUShort" , "WAttribute.set_write_value()" ) ; } // Check the incoming value int ush_val = ( 0xFFFF & ush_ptr [ 0 ] ) ; if ( check_min_value == true ) { if ( ush_val <= min_value . sh ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is below the minimum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } if ( check_max_value == true ) { if ( ush_val >= max_value . sh ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is above the maximum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } old_long_val = long_val ; long_val = ush_val ; break ; case Tango_DEV_LONG : // Check data type inside the any int [ ] lg_ptr = null ; try { lg_ptr = DevVarLongArrayHelper . extract ( any ) ; } catch ( BAD_OPERATION ex ) { Except . throw_exception ( "API_IncompatibleAttrDataType" , "Incompatible attribute type, expected type is : Tango_DevLong" , "WAttribute.set_write_value()" ) ; } // Check the incoming value if ( check_min_value == true ) { if ( lg_ptr [ 0 ] <= min_value . lg ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is below the minimum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } if ( check_max_value == true ) { if ( lg_ptr [ 0 ] >= max_value . lg ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is above the maximum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } old_long_val = long_val ; long_val = lg_ptr [ 0 ] ; break ; case Tango_DEV_ULONG : // Check data type inside the any int [ ] ulg_ptr = null ; try { ulg_ptr = DevVarULongArrayHelper . extract ( any ) ; } catch ( BAD_OPERATION ex ) { Except . throw_exception ( "API_IncompatibleAttrDataType" , "Incompatible attribute type, expected type is : Tango_DevULong" , "WAttribute.set_write_value()" ) ; } // Check the incoming value long mask = ( long ) 0x7fffffff ; mask += ( ( long ) 1 << 31 ) ; long ulg_val = ( mask & ( long ) ulg_ptr [ 0 ] ) ; if ( check_min_value == true ) { if ( ulg_val <= min_value . lg ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is below the minimum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } if ( check_max_value == true ) { if ( ulg_val >= max_value . lg ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is above the maximum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } old_long64_val = long64_val ; long64_val = ulg_val ; break ; case Tango_DEV_LONG64 : // Check data type inside the any long [ ] lg64_ptr = null ; try { lg64_ptr = DevVarLong64ArrayHelper . extract ( any ) ; } catch ( BAD_OPERATION ex ) { Except . throw_exception ( "API_IncompatibleAttrDataType" , "Incompatible attribute type, expected type is : Tango_DevLong" , "WAttribute.set_write_value()" ) ; } // Check the incoming value if ( check_min_value == true ) { if ( lg64_ptr [ 0 ] <= min_value . lg ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is below the minimum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } if ( check_max_value == true ) { if ( lg64_ptr [ 0 ] >= max_value . lg ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is above the maximum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } old_long64_val = long64_val ; long64_val = lg64_ptr [ 0 ] ; break ; case Tango_DEV_ULONG64 : // Check data type inside the any long [ ] ulg64_ptr = null ; try { ulg64_ptr = DevVarLong64ArrayHelper . extract ( any ) ; } catch ( BAD_OPERATION ex ) { Except . throw_exception ( "API_IncompatibleAttrDataType" , "Incompatible attribute type, expected type is : Tango_DevLong" , "WAttribute.set_write_value()" ) ; } // Check the incoming value if ( check_min_value == true ) { if ( ulg64_ptr [ 0 ] <= min_value . lg ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is below the minimum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } if ( check_max_value == true ) { if ( ulg64_ptr [ 0 ] >= max_value . lg ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is above the maximum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } old_long64_val = long64_val ; long64_val = ulg64_ptr [ 0 ] ; break ; case Tango_DEV_DOUBLE : // Check data type inside the any double [ ] db_ptr = null ; try { db_ptr = DevVarDoubleArrayHelper . extract ( any ) ; } catch ( BAD_OPERATION ex ) { Except . throw_exception ( "API_IncompatibleAttrDataType" , "Incompatible attribute type, expected type is : Tango_DevDouble" , "WAttribute.set_write_value()" ) ; } // Check the incoming value if ( check_min_value == true ) { if ( db_ptr [ 0 ] <= min_value . db ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is below the minimum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } if ( check_max_value == true ) { if ( db_ptr [ 0 ] >= max_value . db ) { StringBuffer o = new StringBuffer ( "Set value for attribute " ) ; o . append ( name ) ; o . append ( " is above the maximum authorized" ) ; Except . throw_exception ( "API_WAttrOutsideLimit" , o . toString ( ) , "WAttribute.set_write_value()" ) ; } } old_double_val = double_val ; double_val = db_ptr [ 0 ] ; break ; case Tango_DEV_STRING : String [ ] str_ptr = null ; try { str_ptr = DevVarStringArrayHelper . extract ( any ) ; } catch ( BAD_OPERATION ex ) { Except . throw_exception ( "API_IncompatibleAttrDataType" , "Incompatible attribute type, expected type is : Tango_DevString" , "WAttribute.set_write_value()" ) ; } old_str_val = str_val ; str_val = str_ptr [ 0 ] ; break ; }
public class BaseLogger { /** * Creates a new instance of the { @ link BaseLogger Logger } . * @ param loggerClass the type of the logger * @ param projectCode the unique code for a complete project . * @ param name the name of the slf4j logger to use . * @ param componentId the unique id of the component . */ public static < T extends BaseLogger > T createLogger ( Class < T > loggerClass , String projectCode , String name , String componentId ) { } }
try { T logger = loggerClass . newInstance ( ) ; logger . projectCode = projectCode ; logger . componentId = componentId ; logger . delegateLogger = LoggerFactory . getLogger ( name ) ; return logger ; } catch ( InstantiationException e ) { throw new RuntimeException ( "Unable to instantiate logger '" + loggerClass . getName ( ) + "'" , e ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( "Unable to instantiate logger '" + loggerClass . getName ( ) + "'" , e ) ; }
public class DefaultEquationSupport { /** * { @ inheritDoc } */ @ Override public void clear ( ) { } }
final Enumeration < Variable > varIter = getVariables ( ) . elements ( ) ; while ( varIter . hasMoreElements ( ) ) { final Variable var = varIter . nextElement ( ) ; if ( var . systemGenerated ) continue ; removeVariable ( var . name ) ; }
public class KdTreeSearchNStandard { /** * Recursive step for finding the closest point */ private void stepClosest ( KdTree . Node node , FastQueue < KdTreeResult > neighbors ) { } }
if ( node == null ) return ; checkBestDistance ( node , neighbors ) ; if ( node . isLeaf ( ) ) { return ; } // select the most promising branch to investigate first KdTree . Node nearer , further ; double splitValue = distance . valueAt ( ( P ) node . point , node . split ) ; double targetAtSplit = distance . valueAt ( target , node . split ) ; if ( targetAtSplit <= splitValue ) { nearer = node . left ; further = node . right ; } else { nearer = node . right ; further = node . left ; } stepClosest ( nearer , neighbors ) ; // See if it is possible for ' further ' to contain a better node // Or if N matches have yet to be find , if it is possible to meet the maximum distance requirement double dx = splitValue - targetAtSplit ; if ( dx * dx <= mostDistantNeighborSq ) { if ( neighbors . size ( ) < searchN || dx * dx < mostDistantNeighborSq ) { stepClosest ( further , neighbors ) ; } }
public class NormOps_DDRM { /** * This implementation of the Frobenius norm is a straight forward implementation and can * be susceptible for overflow / underflow issues . A more resilient implementation is * { @ link # normF } . * @ param a The matrix whose norm is computed . Not modified . */ public static double fastNormF ( DMatrixD1 a ) { } }
double total = 0 ; int size = a . getNumElements ( ) ; for ( int i = 0 ; i < size ; i ++ ) { double val = a . get ( i ) ; total += val * val ; } return Math . sqrt ( total ) ;
public class JobListFromJobScheduleOptions { /** * Set the time the request was issued . Client libraries typically set this to the current system clock time ; set it explicitly if you are calling the REST API directly . * @ param ocpDate the ocpDate value to set * @ return the JobListFromJobScheduleOptions object itself . */ public JobListFromJobScheduleOptions withOcpDate ( DateTime ocpDate ) { } }
if ( ocpDate == null ) { this . ocpDate = null ; } else { this . ocpDate = new DateTimeRfc1123 ( ocpDate ) ; } return this ;
public class Application { /** * This option is for advanced users only . This is meta information about third - party applications that third - party * vendors use for testing purposes . * @ param additionalInfo * This option is for advanced users only . This is meta information about third - party applications that * third - party vendors use for testing purposes . * @ return Returns a reference to this object so that method calls can be chained together . */ public Application withAdditionalInfo ( java . util . Map < String , String > additionalInfo ) { } }
setAdditionalInfo ( additionalInfo ) ; return this ;
public class HttpRequestFactory { /** * Builds a { @ code PUT } request for the given URL and content . * @ param url HTTP request URL or { @ code null } for none * @ param content HTTP request content or { @ code null } for none * @ return new HTTP request */ public HttpRequest buildPutRequest ( GenericUrl url , HttpContent content ) throws IOException { } }
return buildRequest ( HttpMethods . PUT , url , content ) ;
public class PolymerPassStaticUtils { /** * Gets the JSTypeExpression for a given property using its " type " key . * @ see https : / / github . com / Polymer / polymer / blob / 0.8 - preview / PRIMER . md # configuring - properties */ static JSTypeExpression getTypeFromProperty ( MemberDefinition property , AbstractCompiler compiler ) { } }
if ( property . info != null && property . info . hasType ( ) ) { return property . info . getType ( ) ; } String typeString ; if ( property . value . isObjectLit ( ) ) { Node typeValue = NodeUtil . getFirstPropMatchingKey ( property . value , "type" ) ; if ( typeValue == null || ! typeValue . isName ( ) ) { compiler . report ( JSError . make ( property . name , PolymerPassErrors . POLYMER_INVALID_PROPERTY ) ) ; return null ; } typeString = typeValue . getString ( ) ; } else if ( property . value . isName ( ) ) { typeString = property . value . getString ( ) ; } else { typeString = "" ; } Node typeNode ; switch ( typeString ) { case "Boolean" : case "String" : case "Number" : typeNode = IR . string ( typeString . toLowerCase ( ) ) ; break ; case "Array" : case "Function" : case "Object" : case "Date" : typeNode = new Node ( Token . BANG , IR . string ( typeString ) ) ; break ; default : compiler . report ( JSError . make ( property . name , PolymerPassErrors . POLYMER_INVALID_PROPERTY ) ) ; return null ; } return new JSTypeExpression ( typeNode , VIRTUAL_FILE ) ;
public class GenericOAuth20ProfileDefinition { /** * Add an attribute as a primary one and its converter . * @ param name name of the attribute * @ param converter converter */ public void profileAttribute ( final String name , final AttributeConverter < ? extends Object > converter ) { } }
profileAttribute ( name , name , converter ) ;
public class CompletionKey { /** * Cleanup resources held by CompletionKey */ protected void destroy ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "CompletionKey::destroy entered for:" + this ) ; } // Free existing ByteBuffer objects . if ( this . rawData != null ) { if ( this . wsByteBuf != null ) { this . wsByteBuf . release ( ) ; this . wsByteBuf = null ; } this . rawData = null ; this . stagingByteBuffer = null ; }
public class CouchDBClientFactory { /** * ( non - Javadoc ) * @ see * com . impetus . kundera . loader . GenericClientFactory # initialize ( java . util . Map ) */ @ Override public void initialize ( Map < String , Object > externalProperty ) { } }
reader = new CouchDBEntityReader ( kunderaMetadata ) ; initializePropertyReader ( ) ; setExternalProperties ( externalProperty ) ;
public class DataSet { /** * Writes a DataSet to the standard output stream ( stdout ) . * < p > For each element of the DataSet the result of { @ link Object # toString ( ) } is written . * @ param sinkIdentifier The string to prefix the output with . * @ return The DataSink that writes the DataSet . * @ deprecated Use { @ link # printOnTaskManager ( String ) } instead . */ @ Deprecated @ PublicEvolving public DataSink < T > print ( String sinkIdentifier ) { } }
return output ( new PrintingOutputFormat < T > ( sinkIdentifier , false ) ) ;
public class SessionApi { /** * Get configuration settings * Get all configuration items needed by the user interface . This includes action codes , business attributes , transactions , and settings . * @ param types A comma delimited list of types used to specify what content should be returned . If not specified the default is & # x60 ; actionCodes , agentGroups , settings & # x60 ; . Valid values are & # x60 ; actionCodes & # x60 ; , & # x60 ; agentGroups & # x60 ; , & # x60 ; settings & # x60 ; , & # x60 ; workspaceTransactions & # x60 ; , and & # x60 ; businessAttributes & # x60 ; . ( optional ) * @ return ApiResponse & lt ; ConfigResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ConfigResponse > getConfigurationWithHttpInfo ( String types ) throws ApiException { } }
com . squareup . okhttp . Call call = getConfigurationValidateBeforeCall ( types , null , null ) ; Type localVarReturnType = new TypeToken < ConfigResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class ChildWorkflowExecutionFailedEventAttributesMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ChildWorkflowExecutionFailedEventAttributes childWorkflowExecutionFailedEventAttributes , ProtocolMarshaller protocolMarshaller ) { } }
if ( childWorkflowExecutionFailedEventAttributes == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( childWorkflowExecutionFailedEventAttributes . getWorkflowExecution ( ) , WORKFLOWEXECUTION_BINDING ) ; protocolMarshaller . marshall ( childWorkflowExecutionFailedEventAttributes . getWorkflowType ( ) , WORKFLOWTYPE_BINDING ) ; protocolMarshaller . marshall ( childWorkflowExecutionFailedEventAttributes . getReason ( ) , REASON_BINDING ) ; protocolMarshaller . marshall ( childWorkflowExecutionFailedEventAttributes . getDetails ( ) , DETAILS_BINDING ) ; protocolMarshaller . marshall ( childWorkflowExecutionFailedEventAttributes . getInitiatedEventId ( ) , INITIATEDEVENTID_BINDING ) ; protocolMarshaller . marshall ( childWorkflowExecutionFailedEventAttributes . getStartedEventId ( ) , STARTEDEVENTID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JsHdrsImpl { /** * Helper method used by the JMO to rewrite any transient data back into the * underlying JMF message . * Package level visibility as used by the JMO . * @ param why The reason why updateDataFields is being called */ @ Override void updateDataFields ( int why ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "updateDataFields" ) ; super . updateDataFields ( why ) ; setFlags ( ) ; /* If the cachedMessageWaitTime transient has ever been set , write it back */ if ( cachedMessageWaitTime != null ) { getHdr2 ( ) . setField ( JsHdr2Access . MESSAGEWAITTIME , cachedMessageWaitTime ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "updateDataFields" ) ;
public class RecurlyClient { /** * Lookup an account ' s transactions history * Returns the account ' s transaction history * @ param accountCode recurly account id * @ return the transaction history associated with this account on success , null otherwise */ public Transactions getAccountTransactions ( final String accountCode ) { } }
return doGET ( Accounts . ACCOUNTS_RESOURCE + "/" + accountCode + Transactions . TRANSACTIONS_RESOURCE , Transactions . class , new QueryParams ( ) ) ;
public class Vector3f { /** * Set the x , y and z components to match the supplied vector . * @ param v * contains the values of x , y and z to set * @ return this */ public Vector3f set ( Vector3ic v ) { } }
return set ( v . x ( ) , v . y ( ) , v . z ( ) ) ;
public class Path3dfx { /** * Replies the path length property . * @ return the length property . */ public DoubleProperty lengthProperty ( ) { } }
if ( this . length == null ) { this . length = new ReadOnlyDoubleWrapper ( ) ; this . length . bind ( Bindings . createDoubleBinding ( ( ) -> Path3afp . computeLength ( getPathIterator ( ) ) , innerTypesProperty ( ) , innerCoordinatesProperty ( ) ) ) ; } return this . length ;
public class EAN13Reader { /** * Based on pattern of odd - even ( ' L ' and ' G ' ) patterns used to encoded the explicitly - encoded * digits in a barcode , determines the implicitly encoded first digit and adds it to the * result string . * @ param resultString string to insert decoded first digit into * @ param lgPatternFound int whose bits indicates the pattern of odd / even L / G patterns used to * encode digits * @ throws NotFoundException if first digit cannot be determined */ private static void determineFirstDigit ( StringBuilder resultString , int lgPatternFound ) throws NotFoundException { } }
for ( int d = 0 ; d < 10 ; d ++ ) { if ( lgPatternFound == FIRST_DIGIT_ENCODINGS [ d ] ) { resultString . insert ( 0 , ( char ) ( '0' + d ) ) ; return ; } } throw NotFoundException . getNotFoundInstance ( ) ;
public class ColorValidator { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public boolean validateDocumentRoot ( DocumentRoot documentRoot , DiagnosticChain diagnostics , Map < Object , Object > context ) { } }
return validate_EveryDefaultConstraint ( documentRoot , diagnostics , context ) ;
public class CardPayment { /** * Primary account number that uniquely identifies this card . * @ return pan */ @ ApiModelProperty ( required = true , value = "Primary account number that uniquely identifies this card." ) @ JsonProperty ( "pan" ) @ NotNull @ Pattern ( regexp = "[0-9]{1,19}" ) @ Masked public String getPan ( ) { } }
return pan ;
public class SemanticHighlightingRegistry { /** * Returns with the internal scope index for the argument . Returns { @ code - 1 } if the scopes * argument is < code > null < / code > , the { @ link SemanticHighlightingRegistry # UNKNOWN _ SCOPES unknown scopes } * or is not registered to this manager . */ public int getIndex ( final List < String > scopes ) { } }
this . checkInitialized ( ) ; boolean _isNullOrUnknown = this . isNullOrUnknown ( scopes ) ; if ( _isNullOrUnknown ) { return ( - 1 ) ; } final Integer index = this . scopes . inverse ( ) . get ( scopes ) ; Integer _xifexpression = null ; if ( ( index == null ) ) { _xifexpression = Integer . valueOf ( ( - 1 ) ) ; } else { _xifexpression = index ; } return ( _xifexpression ) . intValue ( ) ;
public class DefaultApplicationObjectConfigurer { /** * Sets the { @ link CommandButtonLabelInfo } of the given object . The label * info is created after loading the encoded label string from this * instance ' s { @ link MessageSource } using a message code in the format * < pre > * & lt ; objectName & gt ; . label * < / pre > * @ param configurable The object to be configured . Must not be null . * @ param objectName The name of the configurable object , unique within the * application . Must not be null . * @ throws IllegalArgumentException if either argument is null . */ protected void configureCommandLabel ( CommandLabelConfigurable configurable , String objectName ) { } }
Assert . notNull ( configurable , "configurable" ) ; Assert . notNull ( objectName , "objectName" ) ; String labelStr = loadMessage ( objectName + "." + LABEL_KEY ) ; if ( StringUtils . hasText ( labelStr ) ) { CommandButtonLabelInfo labelInfo = CommandButtonLabelInfo . valueOf ( labelStr ) ; configurable . setLabelInfo ( labelInfo ) ; }
public class RequestBuilder { /** * Adds an ContentBody object . */ public RequestBuilder < Resource > set ( final String name , final ContentBody contentBody ) { } }
if ( contentBody != null ) files . put ( name , contentBody ) ; else files . remove ( name ) ; return this ;
public class SimplifySpanBuild { /** * Build * @ return SpannableStringBuilder */ public SpannableStringBuilder build ( ) { } }
if ( mBeforeStringBuilder . length ( ) > 0 ) { mStringBuilder . insert ( 0 , mBeforeStringBuilder ) ; // reset SpecialUnit start pos if ( ! mFinalSpecialUnit . isEmpty ( ) ) { for ( BaseSpecialUnit specialUnit : mFinalSpecialUnit ) { int [ ] tempStartPoss = specialUnit . getStartPoss ( ) ; if ( null == tempStartPoss || tempStartPoss . length == 0 ) continue ; for ( int i = 0 ; i < tempStartPoss . length ; i ++ ) { int oldStartPos = tempStartPoss [ i ] ; tempStartPoss [ i ] = oldStartPos + mBeforeStringBuilder . length ( ) ; } } } // reset Cache SpecialClickableUnit start pos if ( ! mCacheSpecialClickableUnitMap . isEmpty ( ) ) { for ( Map . Entry < SpecialClickableUnit , PositionInfo > cm : mCacheSpecialClickableUnitMap . entrySet ( ) ) { cm . getValue ( ) . startPos += mBeforeStringBuilder . length ( ) ; } } } if ( ! mBeforeCacheSpecialClickableUnitMap . isEmpty ( ) ) { mCacheSpecialClickableUnitMap . putAll ( mBeforeCacheSpecialClickableUnitMap ) ; } if ( ! mBeforeSpecialUnit . isEmpty ( ) ) { mFinalSpecialUnit . addAll ( mBeforeSpecialUnit ) ; } if ( mStringBuilder . length ( ) == 0 ) return null ; if ( mFinalSpecialUnit . isEmpty ( ) ) return new SpannableStringBuilder ( mStringBuilder . toString ( ) ) ; if ( mNormalSizeText . length ( ) == 0 ) mNormalSizeText . append ( mStringBuilder ) ; String normalSizeText = mNormalSizeText . toString ( ) ; SpannableStringBuilder spannableStringBuilder = new SpannableStringBuilder ( mStringBuilder ) ; boolean isInitClickListener = false ; for ( BaseSpecialUnit st : mFinalSpecialUnit ) { String specialText = st . getText ( ) ; int [ ] startPoss = st . getStartPoss ( ) ; if ( TextUtils . isEmpty ( specialText ) || null == startPoss || startPoss . length == 0 ) continue ; int specialTextLength = specialText . length ( ) ; if ( st instanceof SpecialTextUnit ) { // text span SpecialTextUnit specialTextUnit = ( SpecialTextUnit ) st ; final SpecialClickableUnit internalSpecialClickableUnit = specialTextUnit . getSpecialClickableUnit ( ) ; if ( null != internalSpecialClickableUnit ) { if ( internalSpecialClickableUnit . getNormalTextColor ( ) == 0 ) { internalSpecialClickableUnit . setNormalTextColor ( specialTextUnit . getTextColor ( ) ) ; } if ( internalSpecialClickableUnit . getNormalBgColor ( ) == 0 ) { internalSpecialClickableUnit . setNormalBgColor ( specialTextUnit . getTextBackgroundColor ( ) ) ; } } for ( int startPos : startPoss ) { // Set Text Color if ( specialTextUnit . getTextColor ( ) != 0 ) { spannableStringBuilder . setSpan ( new ForegroundColorSpan ( specialTextUnit . getTextColor ( ) ) , startPos , startPos + specialTextLength , Spanned . SPAN_EXCLUSIVE_EXCLUSIVE ) ; } // Set Text Background Color if ( specialTextUnit . getTextBackgroundColor ( ) != 0 && null == internalSpecialClickableUnit ) { spannableStringBuilder . setSpan ( new BackgroundColorSpan ( specialTextUnit . getTextBackgroundColor ( ) ) , startPos , startPos + specialTextLength , Spanned . SPAN_EXCLUSIVE_EXCLUSIVE ) ; } // Add Underline if ( specialTextUnit . isShowUnderline ( ) ) { spannableStringBuilder . setSpan ( new UnderlineSpan ( ) , startPos , startPos + specialTextLength , Spanned . SPAN_EXCLUSIVE_EXCLUSIVE ) ; } // Add StrikeThrough if ( specialTextUnit . isShowStrikeThrough ( ) ) { spannableStringBuilder . setSpan ( new StrikethroughSpan ( ) , startPos , startPos + specialTextLength , Spanned . SPAN_EXCLUSIVE_EXCLUSIVE ) ; } // Set Text Bold if ( specialTextUnit . isTextBold ( ) ) { spannableStringBuilder . setSpan ( new StyleSpan ( Typeface . BOLD ) , startPos , startPos + specialTextLength , Spanned . SPAN_EXCLUSIVE_EXCLUSIVE ) ; } // Set Text Italic if ( specialTextUnit . isTextItalic ( ) ) { spannableStringBuilder . setSpan ( new StyleSpan ( Typeface . ITALIC ) , startPos , startPos + specialTextLength , Spanned . SPAN_EXCLUSIVE_EXCLUSIVE ) ; } // Set Text Style if ( specialTextUnit . getTextStyle ( ) != Typeface . NORMAL ) { spannableStringBuilder . setSpan ( new StyleSpan ( specialTextUnit . getTextStyle ( ) ) , startPos , startPos + specialTextLength , Spanned . SPAN_EXCLUSIVE_EXCLUSIVE ) ; } // Set Text Size if ( specialTextUnit . getTextSize ( ) > 0 ) { TextView curTextView = specialTextUnit . getCurTextView ( ) ; int gravity = specialTextUnit . getGravity ( ) ; if ( gravity != SpecialGravity . BOTTOM && null != curTextView ) { spannableStringBuilder . setSpan ( new CustomAbsoluteSizeSpan ( normalSizeText , specialTextUnit . getText ( ) , Math . round ( specialTextUnit . getTextSize ( ) ) , curTextView , gravity ) , startPos , startPos + specialTextLength , Spanned . SPAN_EXCLUSIVE_EXCLUSIVE ) ; } else { spannableStringBuilder . setSpan ( new AbsoluteSizeSpan ( Math . round ( specialTextUnit . getTextSize ( ) ) , true ) , startPos , startPos + specialTextLength , Spanned . SPAN_EXCLUSIVE_EXCLUSIVE ) ; } } // set clickable if ( null != internalSpecialClickableUnit ) { if ( ! isInitClickListener ) { isInitClickListener = true ; TextView curTextView = internalSpecialClickableUnit . getCurTextView ( ) ; if ( null != curTextView ) { curTextView . setMovementMethod ( CustomLinkMovementMethod . getInstance ( ) ) ; } } spannableStringBuilder . setSpan ( new CustomClickableSpan ( internalSpecialClickableUnit ) , startPos , startPos + specialTextLength , Spanned . SPAN_EXCLUSIVE_EXCLUSIVE ) ; } } } else if ( st instanceof SpecialImageUnit ) { // image Span SpecialImageUnit specialImageUnit = ( SpecialImageUnit ) st ; Bitmap bitmap = specialImageUnit . getBitmap ( ) ; int imgWidth = specialImageUnit . getWidth ( ) ; int imgHeight = specialImageUnit . getHeight ( ) ; if ( imgWidth > 0 && imgHeight > 0 ) { int bitWidth = bitmap . getWidth ( ) ; int bitHeight = bitmap . getHeight ( ) ; if ( imgWidth < bitWidth && imgHeight < bitHeight ) { Bitmap newBitmap = ThumbnailUtils . extractThumbnail ( bitmap , imgWidth , imgHeight ) ; if ( null != newBitmap ) { bitmap . recycle ( ) ; specialImageUnit . setBitmap ( newBitmap ) ; } } } for ( int startPos : startPoss ) { CustomImageSpan curCustomImageSpan = new CustomImageSpan ( normalSizeText , specialImageUnit ) ; spannableStringBuilder . setSpan ( curCustomImageSpan , startPos , startPos + specialTextLength , Spanned . SPAN_EXCLUSIVE_EXCLUSIVE ) ; if ( specialImageUnit . isClickable ( ) ) { addClickStateChangeListeners ( startPos , startPos + specialTextLength , curCustomImageSpan ) ; } } } else if ( st instanceof SpecialLabelUnit ) { // label span SpecialLabelUnit specialLabelUnit = ( SpecialLabelUnit ) st ; for ( int startPos : startPoss ) { CustomLabelSpan curCustomLabelSpan = new CustomLabelSpan ( normalSizeText , specialLabelUnit ) ; spannableStringBuilder . setSpan ( curCustomLabelSpan , startPos , startPos + specialTextLength , Spanned . SPAN_EXCLUSIVE_EXCLUSIVE ) ; if ( specialLabelUnit . isClickable ( ) ) { addClickStateChangeListeners ( startPos , startPos + specialTextLength , curCustomLabelSpan ) ; } } } else if ( st instanceof SpecialClickableUnit ) { // clickable span SpecialClickableUnit specialClickableUnit = ( SpecialClickableUnit ) st ; if ( ! isInitClickListener ) { isInitClickListener = true ; TextView curTextView = specialClickableUnit . getCurTextView ( ) ; if ( null != curTextView ) { curTextView . setMovementMethod ( CustomLinkMovementMethod . getInstance ( ) ) ; } } int startPos = startPoss [ 0 ] ; spannableStringBuilder . setSpan ( new CustomClickableSpan ( specialClickableUnit ) , startPos , startPos + specialTextLength , Spanned . SPAN_EXCLUSIVE_EXCLUSIVE ) ; } else if ( st instanceof SpecialRawSpanUnit ) { // raw span SpecialRawSpanUnit specialRawSpanUnit = ( SpecialRawSpanUnit ) st ; int startPos = startPoss [ 0 ] ; spannableStringBuilder . setSpan ( specialRawSpanUnit . getSpanObj ( ) , startPos , startPos + specialTextLength , specialRawSpanUnit . getFlags ( ) ) ; // Temporarily unable to support all } } return spannableStringBuilder ;
public class FFmpegUtils { /** * Convert the duration to " hh : mm : ss " timecode representation , where ss ( seconds ) can be decimal . * @ param duration the duration . * @ param units the unit the duration is in . * @ return the timecode representation . */ public static String toTimecode ( long duration , TimeUnit units ) { } }
// FIXME Negative durations are also supported . // https : / / www . ffmpeg . org / ffmpeg - utils . html # Time - duration checkArgument ( duration >= 0 , "duration must be positive" ) ; long nanoseconds = units . toNanos ( duration ) ; // TODO This will clip at Long . MAX _ VALUE long seconds = units . toSeconds ( duration ) ; long ns = nanoseconds - SECONDS . toNanos ( seconds ) ; long minutes = SECONDS . toMinutes ( seconds ) ; seconds -= MINUTES . toSeconds ( minutes ) ; long hours = MINUTES . toHours ( minutes ) ; minutes -= HOURS . toMinutes ( hours ) ; if ( ns == 0 ) { return String . format ( "%02d:%02d:%02d" , hours , minutes , seconds ) ; } return ZERO . trimTrailingFrom ( String . format ( "%02d:%02d:%02d.%09d" , hours , minutes , seconds , ns ) ) ;
public class CreateNetworkAclRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < CreateNetworkAclRequest > getDryRunRequest ( ) { } }
Request < CreateNetworkAclRequest > request = new CreateNetworkAclRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class SimonUtils { /** * method is extracted , so the stack trace index is always right */ private static String generatePrivate ( String suffix , boolean includeMethodName ) { } }
StackTraceElement stackElement = Thread . currentThread ( ) . getStackTrace ( ) [ CLIENT_CODE_STACK_INDEX ] ; StringBuilder nameBuilder = new StringBuilder ( stackElement . getClassName ( ) ) ; if ( includeMethodName ) { nameBuilder . append ( '.' ) . append ( stackElement . getMethodName ( ) ) ; } if ( suffix != null ) { nameBuilder . append ( suffix ) ; } return nameBuilder . toString ( ) ;
public class Classification { /** * classify an observation . * @ param cps contains a list of context predicates * @ return label */ public String classify ( String cps ) { } }
// cps contains a list of context predicates String modelLabel = "" ; int i ; intCps . clear ( ) ; StringTokenizer strTok = new StringTokenizer ( cps , " \t\r\n" ) ; int count = strTok . countTokens ( ) ; for ( i = 0 ; i < count ; i ++ ) { String cpStr = strTok . nextToken ( ) ; Integer cpInt = ( Integer ) data . cpStr2Int . get ( cpStr ) ; if ( cpInt != null ) { intCps . add ( cpInt ) ; } } Observation obsr = new Observation ( intCps ) ; // classify inference . classify ( obsr ) ; String lbStr = ( String ) data . lbInt2Str . get ( new Integer ( obsr . modelLabel ) ) ; if ( lbStr != null ) { modelLabel = lbStr ; } return modelLabel ;
public class AnimatorModel { /** * Animator */ @ Override public void play ( Animation anim ) { } }
Check . notNull ( anim ) ; final int firstFrame = anim . getFirst ( ) ; final int lastFrame = anim . getLast ( ) ; final double animSpeed = anim . getSpeed ( ) ; final boolean animReverse = anim . hasReverse ( ) ; final boolean animRepeat = anim . hasRepeat ( ) ; first = firstFrame ; last = lastFrame ; speed = animSpeed ; reverse = animReverse ; repeat = animRepeat ; current = first ; state = AnimState . PLAYING ;
public class JWSHeader { /** * Sets the array listing the header parameter names that define extensions that are used in the * JWS header that MUST be understood and processed or { @ code null } for none . * Overriding is only supported for the purpose of calling the super implementation and changing * the return type , but nothing else . */ public JWSHeader setCritical ( List < String > critical ) { } }
this . critical = critical ; this . put ( HeaderConstants . CRITICAL , critical ) ; return this ;
public class DruidLeaderClient { /** * Executes the request object aimed at the leader and process the response with given handler * Note : this method doesn ' t do retrying on errors or handle leader changes occurred during communication */ public < Intermediate , Final > ListenableFuture < Final > goAsync ( final Request request , final HttpResponseHandler < Intermediate , Final > handler ) { } }
return httpClient . go ( request , handler ) ;
public class WebhooksInner { /** * Create the webhook identified by webhook name . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ param webhookName The webhook name . * @ param parameters The create or update parameters for webhook . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the WebhookInner object if successful . */ public WebhookInner createOrUpdate ( String resourceGroupName , String automationAccountName , String webhookName , WebhookCreateOrUpdateParameters parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , automationAccountName , webhookName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class AccSessionFactoryImpl { @ Override public void doAccRequestEvent ( ServerAccSession appSession , AccountRequest acr ) throws InternalException , IllegalDiameterStateException , RouteException , OverloadException { } }
logger . info ( "Diameter Base AccountingSessionFactory :: doAccRequestEvent :: appSession[" + appSession + "], Request[" + acr + "]" ) ;
public class PtoPXmitMsgsItemStream { /** * Complete recovery of a ItemStream retrieved from the MessageStore . * @ param destinationHandler to use in reconstitution */ public void reconstitute ( BaseDestinationHandler destinationHandler ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "reconstitute" , destinationHandler ) ; super . reconstitute ( destinationHandler ) ; _destHighMsgs = destinationHandler . getMessageProcessor ( ) . getHighMessageThreshold ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exit ( tc , "reconstitute" ) ; }
public class FindBugsWorker { /** * Updates given outputFiles map with class name patterns matching given * java source names * @ param resources * java sources * @ param outLocations * key is src root , value is output location this directory * @ param fbProject */ private void collectClassFiles ( List < WorkItem > resources , Map < IPath , IPath > outLocations , Project fbProject ) { } }
for ( WorkItem workItem : resources ) { workItem . addFilesToProject ( fbProject , outLocations ) ; }
public class DDataSource { /** * Get an in memory representation of broken SQL query . This may require * contacting druid for resolving dimensions Vs metrics for SELECT queries * hence it also optionally accepts HTTP request headers to be sent out . * @ param sqlQuery * @ param namedParams * @ param reqHeaders * @ return * @ throws java . lang . Exception */ public Program < BaseStatementMeta > getCompiledAST ( String sqlQuery , NamedParameters namedParams , Map < String , String > reqHeaders ) throws Exception { } }
Program < BaseStatementMeta > pgm = DCompiler . compileSql ( preprocessSqlQuery ( sqlQuery , namedParams ) ) ; for ( BaseStatementMeta stmnt : pgm . getAllStmnts ( ) ) { if ( stmnt instanceof QueryMeta ) { QueryMeta query = ( QueryMeta ) stmnt ; if ( query . queryType == RequestType . SELECT ) { // classifyColumnsToDimAndMetrics Either < String , Tuple2 < List < String > , List < String > > > dataSourceDescRes = coordinator . aboutDataSource ( stmnt . dataSource , reqHeaders ) ; if ( dataSourceDescRes . isLeft ( ) ) { throw new Exception ( "Datasource info either not available (or)could not be loaded ." + dataSourceDescRes . left ( ) . get ( ) ) ; } else { ( ( SelectQueryMeta ) query ) . postProcess ( dataSourceDescRes . right ( ) . get ( ) ) ; } } } else if ( stmnt instanceof InsertMeta ) { // TODO : Handle this . } else if ( stmnt instanceof DeleteMeta ) { // TODO : Handle this . } else if ( stmnt instanceof DropMeta ) { // TODO : Handle this . } } // TODO : Do something if pgm is invalid ! ! ! pgm . isValid ( ) ; return pgm ;
public class RangeMember { /** * / * ( non - Javadoc ) * @ see org . archive . wayback . resourceindex . RemoteResourceIndex # query ( org . archive . wayback . core . WaybackRequest ) */ public SearchResults query ( WaybackRequest wbRequest ) throws ResourceIndexNotAvailableException , ResourceNotInArchiveException , BadQueryException , AccessControlException { } }
return index . query ( wbRequest ) ;
public class CachingTemplate { /** * Clears the entire contents of the { @ link Cache } . * The { @ link Cache } operation acquires a write lock . * @ param lock { @ link ReadWriteLock } used to coordinate the { @ link Cache } clear operation * with possibly other concurrent { @ link Cache } operations . * @ see java . util . concurrent . locks . ReadWriteLock # writeLock ( ) * @ see # getCache ( ) */ protected void clear ( ReadWriteLock lock ) { } }
Lock writeLock = lock . writeLock ( ) ; try { writeLock . lock ( ) ; getCache ( ) . clear ( ) ; } finally { writeLock . unlock ( ) ; }
public class HamcrestMatchers { /** * Creates a matcher of { @ link Comparable } object that matches when the examined object is before the given < code > * value < / code > , as reported by the < code > compareTo < / code > method of the < b > examined < / b > object . * < p > E . g . : < code > Date past ; Date now ; assertThat ( past , before ( now ) ) ; < / code > * < p > The matcher renames the Hamcrest matcher obtained with { @ linkplain org . hamcrest . Matchers # lessThan ( Comparable ) } . */ public static < T extends Comparable < T > > Matcher < T > before ( final T value ) { } }
return Matchers . lessThan ( value ) ;
public class InternalXtextParser { /** * InternalXtext . g : 895:1 : entryRuleAssignment : ruleAssignment EOF ; */ public final void entryRuleAssignment ( ) throws RecognitionException { } }
try { // InternalXtext . g : 896:1 : ( ruleAssignment EOF ) // InternalXtext . g : 897:1 : ruleAssignment EOF { before ( grammarAccess . getAssignmentRule ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_1 ) ; ruleAssignment ( ) ; state . _fsp -- ; after ( grammarAccess . getAssignmentRule ( ) ) ; match ( input , EOF , FollowSets000 . FOLLOW_2 ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ;
public class Exceptions { /** * Runs a throwable and , sneakily , re - throws any exceptions it encounters . * @ param runnable the runnable * @ param < E > the exception type * @ return the runnable */ public static < E extends Throwable > @ NonNull Runnable rethrowRunnable ( final @ NonNull ThrowingRunnable < E > runnable ) { } }
return runnable ;
public class CPDefinitionOptionValueRelPersistenceImpl { /** * Returns the cp definition option value rel where CPDefinitionOptionRelId = & # 63 ; and key = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache . * @ param CPDefinitionOptionRelId the cp definition option rel ID * @ param key the key * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the matching cp definition option value rel , or < code > null < / code > if a matching cp definition option value rel could not be found */ @ Override public CPDefinitionOptionValueRel fetchByC_K ( long CPDefinitionOptionRelId , String key , boolean retrieveFromCache ) { } }
Object [ ] finderArgs = new Object [ ] { CPDefinitionOptionRelId , key } ; Object result = null ; if ( retrieveFromCache ) { result = finderCache . getResult ( FINDER_PATH_FETCH_BY_C_K , finderArgs , this ) ; } if ( result instanceof CPDefinitionOptionValueRel ) { CPDefinitionOptionValueRel cpDefinitionOptionValueRel = ( CPDefinitionOptionValueRel ) result ; if ( ( CPDefinitionOptionRelId != cpDefinitionOptionValueRel . getCPDefinitionOptionRelId ( ) ) || ! Objects . equals ( key , cpDefinitionOptionValueRel . getKey ( ) ) ) { result = null ; } } if ( result == null ) { StringBundler query = new StringBundler ( 4 ) ; query . append ( _SQL_SELECT_CPDEFINITIONOPTIONVALUEREL_WHERE ) ; query . append ( _FINDER_COLUMN_C_K_CPDEFINITIONOPTIONRELID_2 ) ; boolean bindKey = false ; if ( key == null ) { query . append ( _FINDER_COLUMN_C_K_KEY_1 ) ; } else if ( key . equals ( "" ) ) { query . append ( _FINDER_COLUMN_C_K_KEY_3 ) ; } else { bindKey = true ; query . append ( _FINDER_COLUMN_C_K_KEY_2 ) ; } String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; qPos . add ( CPDefinitionOptionRelId ) ; if ( bindKey ) { qPos . add ( key ) ; } List < CPDefinitionOptionValueRel > list = q . list ( ) ; if ( list . isEmpty ( ) ) { finderCache . putResult ( FINDER_PATH_FETCH_BY_C_K , finderArgs , list ) ; } else { CPDefinitionOptionValueRel cpDefinitionOptionValueRel = list . get ( 0 ) ; result = cpDefinitionOptionValueRel ; cacheResult ( cpDefinitionOptionValueRel ) ; } } catch ( Exception e ) { finderCache . removeResult ( FINDER_PATH_FETCH_BY_C_K , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } if ( result instanceof List < ? > ) { return null ; } else { return ( CPDefinitionOptionValueRel ) result ; }
public class N1qlQuery { /** * Create a new query with positional parameters . Note that the { @ link JsonArray } * should not be mutated until { @ link # n1ql ( ) } is called since it backs the * creation of the query string . * Positional parameters have the form of ` $ n ` , where the ` n ` represents the position , starting * with 1 . The following two examples are equivalent and compare the { @ link # simple ( Statement ) } * vs the positional { @ link # parameterized ( Statement , JsonArray ) } approach : * Simple : * N1qlQuery . simple ( " SELECT * FROM ` travel - sample ` WHERE type = ' airline ' and name like ' A % ' " ) * Positional Params : * N1qlQuery . parameterized ( * " SELECT * FROM ` travel - sample ` WHERE type = $ 1 and name like $ 2 " , * JsonArray . from ( " airline " , " A % " ) * Using parameterized statements combined with non - adhoc queries ( which is configurable through * the { @ link N1qlParams } ) can provide better performance even when the actual arguments change * at execution time . * @ param statement the { @ link Statement } to execute ( containing positional placeholders ) * @ param positionalParams the values for the positional placeholders in statement */ public static ParameterizedN1qlQuery parameterized ( Statement statement , JsonArray positionalParams ) { } }
return new ParameterizedN1qlQuery ( statement , positionalParams , null ) ;
public class CPDefinitionOptionValueRelUtil { /** * Returns the last cp definition option value rel in the ordered set where groupId = & # 63 ; . * @ param groupId the group ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp definition option value rel , or < code > null < / code > if a matching cp definition option value rel could not be found */ public static CPDefinitionOptionValueRel fetchByGroupId_Last ( long groupId , OrderByComparator < CPDefinitionOptionValueRel > orderByComparator ) { } }
return getPersistence ( ) . fetchByGroupId_Last ( groupId , orderByComparator ) ;
public class GVRTextureCapturer { /** * Starts or stops capturing . * @ param capture If true , capturing is started . If false , it is stopped . * @ param fps Capturing FPS ( frames per second ) . */ public void setCapture ( boolean capture , float fps ) { } }
capturing = capture ; NativeTextureCapturer . setCapture ( getNative ( ) , capture , fps ) ;