signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ConnectionCache { /** * The main loop of the cache , which checks for expirations . * @ return true if the thread should be stopped . * @ throws InterruptedException if the thread was interrupted while blocking . */ private boolean mainLoop ( ) throws InterruptedException { } }
long now = System . currentTimeMillis ( ) ; Iterator list = m_hardStore . values ( ) . iterator ( ) ; while ( list . hasNext ( ) ) { Entry entry = ( Entry ) list . next ( ) ; if ( entry . m_collectTime < now ) { m_weakStore . put ( entry . m_connection , DUMMY ) ; list . remove ( ) ; } } if ( m_hardStore . size ( ) == 0 ) { m_thread = null ; // mark to start a new thread next time . return true ; } wait ( 10000 ) ; return false ;
public class VaultsInner { /** * The List operation gets information about the vaults associated with the subscription . * @ param top Maximum number of results to return . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; Resource & gt ; object */ public Observable < Page < Resource > > listAsync ( final Integer top ) { } }
return listWithServiceResponseAsync ( top ) . map ( new Func1 < ServiceResponse < Page < Resource > > , Page < Resource > > ( ) { @ Override public Page < Resource > call ( ServiceResponse < Page < Resource > > response ) { return response . body ( ) ; } } ) ;
public class Widgets { /** * Converts from { @ link com . google . gwt . dom . client . Element } & rarr ; { @ link HTMLElement } . */ public static HTMLElement element ( com . google . gwt . dom . client . Element element ) { } }
return Js . cast ( element ) ;
public class TrustedIdProvidersInner { /** * Gets the specified Data Lake Store trusted identity provider . * @ param resourceGroupName The name of the Azure resource group . * @ param accountName The name of the Data Lake Store account . * @ param trustedIdProviderName The name of the trusted identity provider to retrieve . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < TrustedIdProviderInner > getAsync ( String resourceGroupName , String accountName , String trustedIdProviderName , final ServiceCallback < TrustedIdProviderInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( resourceGroupName , accountName , trustedIdProviderName ) , serviceCallback ) ;
public class Matchers { /** * Returns a Matcher that returns true only if all of the provided * matchers match . */ public static Matcher allOf ( final Matcher ... matchers ) { } }
return new Matcher ( ) { @ Override public boolean matches ( Node node , NodeMetadata metadata ) { for ( Matcher m : matchers ) { if ( ! m . matches ( node , metadata ) ) { return false ; } } return true ; } } ;
public class Beans { /** * Invokes a bean method by choosing the best method that matches the given name and the list of arguments . * @ param bean the invocation target * @ param name the name of the method to invoke * @ param args the arguments to pass to the method * @ return the return value from the method * @ throws NoSuchMethodException if an appropriate method cannot be found * @ throws IllegalAccessException if an appropriate method cannot be accessed * @ throws InvocationTargetException if errors occur while invoking the method */ public static Object invoke ( Object bean , String name , Object ... args ) throws NoSuchMethodException , IllegalAccessException , InvocationTargetException { } }
Class < ? > [ ] argumentTypes = new Class < ? > [ args . length ] ; for ( int i = 0 ; i < args . length ; i ++ ) { argumentTypes [ i ] = args [ i ] . getClass ( ) ; } return choose ( bean . getClass ( ) . getMethods ( ) , new MethodParameterExtractor ( ) , name , argumentTypes ) . invoke ( bean , args ) ;
public class MasterJournalContext { /** * Waits for the flush counter to be flushed to the journal . If the counter is * { @ link # INVALID _ FLUSH _ COUNTER } , this is a noop . */ private void waitForJournalFlush ( ) throws UnavailableException { } }
if ( mFlushCounter == INVALID_FLUSH_COUNTER ) { // Check this before the precondition . return ; } RetryPolicy retry = new TimeoutRetry ( FLUSH_RETRY_TIMEOUT_MS , FLUSH_RETRY_INTERVAL_MS ) ; while ( retry . attempt ( ) ) { try { mAsyncJournalWriter . flush ( mFlushCounter ) ; return ; } catch ( IOException e ) { LOG . warn ( "Journal flush failed. retrying..." , e ) ; } catch ( JournalClosedException e ) { throw new UnavailableException ( String . format ( "Failed to complete request: %s" , e . getMessage ( ) ) , e ) ; } catch ( Throwable e ) { ProcessUtils . fatalError ( LOG , e , "Journal flush failed" ) ; } } ProcessUtils . fatalError ( LOG , "Journal flush failed after %d attempts" , retry . getAttemptCount ( ) ) ;
public class ApiOvhXdsl { /** * Alter this object properties * REST : PUT / xdsl / templateModem / { name } * @ param body [ required ] New object properties * @ param name [ required ] Name of the Modem Template */ public void templateModem_name_PUT ( String name , OvhTemplateModem body ) throws IOException { } }
String qPath = "/xdsl/templateModem/{name}" ; StringBuilder sb = path ( qPath , name ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class ServerInterceptors { /** * Create a new { @ code ServerServiceDefinition } whose { @ link ServerCallHandler } s will call * { @ code interceptors } before calling the pre - existing { @ code ServerCallHandler } . The last * interceptor will have its { @ link ServerInterceptor # interceptCall } called first . * @ param serviceDef the service definition for which to intercept all its methods . * @ param interceptors array of interceptors to apply to the service . * @ return a wrapped version of { @ code serviceDef } with the interceptors applied . */ public static ServerServiceDefinition intercept ( ServerServiceDefinition serviceDef , ServerInterceptor ... interceptors ) { } }
return intercept ( serviceDef , Arrays . asList ( interceptors ) ) ;
public class AbstractJaxb { /** * delete element that has specified attribute within descendant element * of oneself . you can ' t delete oneself . * @ param target * @ return if success to delete , return true . if no hit , return false . */ public < T extends AbstractJaxb > boolean remove ( T target ) { } }
String id = target . getId ( ) ; if ( id == null ) { for ( int i = 0 ; i < 256 ; i ++ ) { id = UUID . randomUUID ( ) . toString ( ) ; if ( this . getById ( id ) == null ) { target . setId ( id ) ; break ; } } } return RemoveByIdUtil . removeById ( id , this ) ;
public class JsonAssert { /** * Asserts that given node is present and is not null . * @ return */ @ Override public JsonAssert isNotNull ( ) { } }
isPresent ( "not null" ) ; Node node = getNode ( actual , "" ) ; if ( node . getNodeType ( ) == NULL ) { failOnType ( node , "not null" ) ; } return this ;
public class TagletWriterImpl { /** * { @ inheritDoc } */ protected Content codeTagOutput ( Element element , DocTree tag ) { } }
CommentHelper ch = utils . getCommentHelper ( element ) ; StringContent content = new StringContent ( utils . normalizeNewlines ( ch . getText ( tag ) ) ) ; Content result = HtmlTree . CODE ( content ) ; return result ;
public class Deadline { /** * Determines the amount of time left ( in a custom unit ) until the deadline ; if the deadline has been reached or passed this * method returns 0 * @ param unit * a unit of time * @ return the amount of time left before this deadline expires , converted ( using < code > TimeUnit . convert < / code > ) into the * given Time Unit */ public long getTimeLeft ( final TimeUnit unit ) { } }
final long left = unit . convert ( getTimeLeft ( ) , TimeUnit . MILLISECONDS ) ; if ( left > 0 ) return left ; else return 0 ;
public class GroupDefImpl { /** * / * ( non - Javadoc ) * @ see org . jboss . arquillian . impl . configuration . api . GroupDef # getGroupContainers ( ) */ @ Override public List < ContainerDef > getGroupContainers ( ) { } }
List < ContainerDef > containers = new ArrayList < ContainerDef > ( ) ; for ( Node container : group . get ( "container" ) ) { containers . add ( new GroupContainerDefImpl ( getDescriptorName ( ) , getRootNode ( ) , group , container ) ) ; } return containers ;
public class DirContextPooledObjectFactory { /** * @ see BaseKeyedPooledObjectFactory # validateObject ( Object , PooledObject ) */ @ Override public boolean validateObject ( Object key , PooledObject < Object > pooledObject ) { } }
Assert . notNull ( this . dirContextValidator , "DirContextValidator may not be null" ) ; Assert . isTrue ( key instanceof DirContextType , "key must be a DirContextType" ) ; Assert . notNull ( pooledObject , "The Object to validate must not be null" ) ; Assert . isTrue ( pooledObject . getObject ( ) instanceof DirContext , "The Object to validate must be of type '" + DirContext . class + "'" ) ; try { final DirContextType contextType = ( DirContextType ) key ; final DirContext dirContext = ( DirContext ) pooledObject . getObject ( ) ; return this . dirContextValidator . validateDirContext ( contextType , dirContext ) ; } catch ( Exception e ) { this . logger . warn ( "Failed to validate '" + pooledObject . getObject ( ) + "' due to an unexpected exception." , e ) ; return false ; }
public class ValidationExecutorSetRegistry { /** * Find the validation executor set with the specified ID . * @ param aID * The ID to search . May be < code > null < / code > . * @ return < code > null < / code > if no such validation executor set is registered . */ @ Nullable public IValidationExecutorSet getOfID ( @ Nullable final VESID aID ) { } }
if ( aID == null ) return null ; return m_aRWLock . readLocked ( ( ) -> m_aMap . get ( aID ) ) ;
public class MetaBucket { /** * Putting an entry to the map . * @ param pKey * to be stored . * @ param pVal * to be stored . * @ return if entry already existing , return that one . * @ see ConcurrentHashMap # put ( Object , Object ) */ public IMetaEntry put ( final IMetaEntry pKey , final IMetaEntry pVal ) { } }
return mMetaMap . put ( pKey , pVal ) ;
public class ElementFilter { /** * Returns a list of { @ code requires } directives in { @ code directives } . * @ return a list of { @ code requires } directives in { @ code directives } * @ param directives the directives to filter * @ since 9 * @ spec JPMS */ public static List < RequiresDirective > requiresIn ( Iterable < ? extends Directive > directives ) { } }
return listFilter ( directives , DirectiveKind . REQUIRES , RequiresDirective . class ) ;
public class NuCharsetEncoder { /** * Implementation of { @ link NuProcessHandler # onStdinReady ( ByteBuffer ) } which * calls { @ link handler } with a string buffer then encodes it to bytes and * feeds it to the process ' s stdin . * @ param buffer The { @ link ByteBuffer } passed to * { @ link NuProcessHandler # onStdinReady ( ByteBuffer ) } * @ return true if more data needs to be passed to stdin , false otherwise */ public boolean onStdinReady ( ByteBuffer buffer ) { } }
// TODO : Should we avoid invoking onStdinReady ( ) when it returned false previously ? boolean endOfInput = ! this . handler . onStdinReady ( charBuffer ) ; CoderResult encoderResult = encoder . encode ( charBuffer , buffer , endOfInput ) ; buffer . flip ( ) ; charBuffer . compact ( ) ; if ( encoderResult . isError ( ) ) { this . handler . onEncoderError ( encoderResult ) ; } if ( encoderResult . isOverflow ( ) ) { return true ; } else if ( endOfInput ) { CoderResult flushResult = encoder . flush ( buffer ) ; return flushResult . isOverflow ( ) ; } else { return true ; }
public class DeviceProxy { protected void check_asynch_reply ( Request request , int id , String idl_cmd ) throws DevFailed , AsynReplyNotArrived { } }
deviceProxyDAO . check_asynch_reply ( this , request , id , idl_cmd ) ;
public class DocumentUrl { /** * Get Resource Url for DeleteDocument * @ param documentId Unique identifier for a document , used by content and document calls . Document IDs are associated with document types , document type lists , sites , and tenants . * @ param documentListName Name of content documentListName to delete * @ return String Resource Url */ public static MozuUrl deleteDocumentUrl ( String documentId , String documentListName ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/content/documentlists/{documentListName}/documents/{documentId}" ) ; formatter . formatUrl ( "documentId" , documentId ) ; formatter . formatUrl ( "documentListName" , documentListName ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class CoreUtils { /** * Create a bounded thread pool executor . The work queue is synchronous and can cause * RejectedExecutionException if there is no available thread to take a new task . * @ param maxPoolSize : the maximum number of threads to allow in the pool . * @ param keepAliveTime : when the number of threads is greater than the core , this is the maximum * time that excess idle threads will wait for new tasks before terminating . * @ param unit : the time unit for the keepAliveTime argument . * @ param threadFactory : the factory to use when the executor creates a new thread . */ public static ThreadPoolExecutor getBoundedThreadPoolExecutor ( int maxPoolSize , long keepAliveTime , TimeUnit unit , ThreadFactory tFactory ) { } }
return new ThreadPoolExecutor ( 0 , maxPoolSize , keepAliveTime , unit , new SynchronousQueue < Runnable > ( ) , tFactory ) ;
public class CnvTfsLong { /** * < p > Convert from string . < / p > * @ param pAddParam additional params , e . g . IRequestData * to fill owner itsVersion . * @ param pStrVal string representation * @ return Long value * @ throws Exception - an exception */ @ Override public final Long fromString ( final Map < String , Object > pAddParam , final String pStrVal ) throws Exception { } }
if ( pStrVal == null || "" . equals ( pStrVal ) ) { return null ; } if ( pAddParam != null ) { String dgsep = ( String ) pAddParam . get ( "decGrSepv" ) ; if ( dgsep != null ) { String strVal = pStrVal . replace ( dgsep , "" ) ; return Long . valueOf ( strVal ) ; } } return Long . valueOf ( pStrVal ) ;
public class PortalPreAuthenticatedProcessingFilter { /** * Get the values represented by each token from the request and load them into a HashMap that * is returned . * @ param tokens * @ param request * @ return HashMap of properties */ private HashMap < String , String > getPropertyFromRequest ( HashMap < String , String > tokens , HttpServletRequest request ) { } }
// Iterate through all of the other property keys looking for the first property // named like propname that has a value in the request HashMap < String , String > retHash = new HashMap < > ( ) ; for ( Map . Entry < String , String > entry : tokens . entrySet ( ) ) { String contextName = entry . getKey ( ) ; String parmName = entry . getValue ( ) ; String parmValue ; if ( request . getAttribute ( parmName ) != null ) { // Upstream components ( like servlet filters ) may supply information // for the authentication process using request attributes . try { parmValue = ( String ) request . getAttribute ( parmName ) ; } catch ( ClassCastException cce ) { String msg = "The request attribute '" + parmName + "' must be a String." ; throw new RuntimeException ( msg , cce ) ; } } else { // If a configured parameter isn ' t provided by a request attribute , // check request parameters ( i . e . querystring , form fields ) . parmValue = request . getParameter ( parmName ) ; } // null value causes exception in context . authentication // alternately we could just not set parm if value is null if ( "password" . equals ( parmName ) ) { // make sure we don ' t trim passwords , since they might have // leading or trailing spaces parmValue = ( parmValue == null ? "" : parmValue ) ; } else { parmValue = ( parmValue == null ? "" : parmValue ) . trim ( ) ; } // The relationship between the way the properties are stored and the way // the subcontexts are named has to be closely looked at to make this work . // The keys are either " root " or the subcontext name that follows " root . " . As // as example , the contexts [ " root " , " root . simple " , " root . cas " ] are represented // as [ " root " , " simple " , " cas " ] . String key = ( contextName . startsWith ( "root." ) ? contextName . substring ( 5 ) : contextName ) ; retHash . put ( key , parmValue ) ; } return ( retHash ) ;
public class HarFileSystem { /** * Fix offset and length of block locations . * Note that this method modifies the original array . * @ param locations block locations of har part file * @ param start the start of the desired range in the contained file * @ param len the length of the desired range * @ param fileOffsetInHar the offset of the desired file in the har part file * @ return block locations with fixed offset and length */ static BlockLocation [ ] fixBlockLocations ( BlockLocation [ ] locations , long start , long len , long fileOffsetInHar ) { } }
// offset 1 past last byte of desired range long end = start + len ; for ( BlockLocation location : locations ) { // offset of part block relative to beginning of desired file // ( may be negative if file starts in this part block ) long harBlockStart = location . getOffset ( ) - fileOffsetInHar ; // offset 1 past last byte of har block relative to beginning of // desired file long harBlockEnd = harBlockStart + location . getLength ( ) ; if ( start > harBlockStart ) { // desired range starts after beginning of this har block // fix offset to beginning of relevant range ( relative to desired file ) location . setOffset ( start ) ; // fix length to relevant portion of har block location . setLength ( location . getLength ( ) - ( start - harBlockStart ) ) ; } else { // desired range includes beginning of this har block location . setOffset ( harBlockStart ) ; } if ( harBlockEnd > end ) { // range ends before end of this har block // fix length to remove irrelevant portion at the end location . setLength ( location . getLength ( ) - ( harBlockEnd - end ) ) ; } } return locations ;
public class Dom { /** * TODO : replace by iterator ? */ public static List < Element > getAllChildElements ( Element parent ) { } }
List < Element > result ; NodeList nodes ; int i ; int max ; Node node ; result = new ArrayList < > ( ) ; nodes = parent . getChildNodes ( ) ; max = nodes . getLength ( ) ; for ( i = 0 ; i < max ; i ++ ) { node = nodes . item ( i ) ; if ( node instanceof Element ) { result . add ( ( Element ) node ) ; } } return result ;
public class BlockDrawingHelper { /** * Draw a filled cuboid of Minecraft blocks of a single type . * @ param c Contains information about the cuboid to be drawn . * @ param w The world in which to draw . * @ throws Exception Throws an exception if the block type is not recognised . */ private void DrawPrimitive ( DrawCuboid c , World w ) throws Exception { } }
XMLBlockState blockType = new XMLBlockState ( c . getType ( ) , c . getColour ( ) , c . getFace ( ) , c . getVariant ( ) ) ; if ( ! blockType . isValid ( ) ) throw new Exception ( "Unrecogised item type: " + c . getType ( ) . value ( ) ) ; int x1 = Math . min ( c . getX1 ( ) , c . getX2 ( ) ) ; int x2 = Math . max ( c . getX1 ( ) , c . getX2 ( ) ) ; int y1 = Math . min ( c . getY1 ( ) , c . getY2 ( ) ) ; int y2 = Math . max ( c . getY1 ( ) , c . getY2 ( ) ) ; int z1 = Math . min ( c . getZ1 ( ) , c . getZ2 ( ) ) ; int z2 = Math . max ( c . getZ1 ( ) , c . getZ2 ( ) ) ; clearEntities ( w , x1 , y1 , z1 , x2 + 1 , y2 + 1 , z2 + 1 ) ; for ( int x = x1 ; x <= x2 ; x ++ ) { for ( int y = y1 ; y <= y2 ; y ++ ) { for ( int z = z1 ; z <= z2 ; z ++ ) { BlockPos pos = new BlockPos ( x , y , z ) ; setBlockState ( w , pos , blockType ) ; } } }
public class PhotosApi { /** * Get information about a photo . The calling user must have permission to view the photo . * < br > * This method does not require authentication . * @ param photoId Required . The id of the photo to get information for . * @ param secret Optional . The secret for the photo . If the correct secret is passed then permissions checking is skipped . * This enables the ' sharing ' of individual photos by passing around the id and secret . * @ return object with available information for the photo . * @ throws JinxException if required parameters are null or empty , or if there are errors . * @ see < a href = " https : / / www . flickr . com / services / api / flickr . photos . getInfo . html " > flickr . photos . getInfo < / a > */ public PhotoInfo getInfo ( String photoId , String secret ) throws JinxException { } }
JinxUtils . validateParams ( photoId ) ; Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.photos.getInfo" ) ; params . put ( "photo_id" , photoId ) ; if ( ! JinxUtils . isNullOrEmpty ( secret ) ) { params . put ( "secret" , secret ) ; } return jinx . flickrGet ( params , PhotoInfo . class ) ; // sometimes Flickr sends back responses with " machine _ tag " : false rather than " machine _ tag " : 0 // so we need to work around this by fixing up the response // if the response is not fixed up , Gson cannot parse it // String json = jinx . callFlickr ( params , JinxConstants . Method . GET , true ) ; // json = json . replace ( " : false " , " : 0 " ) ; // return jinx . jsonToClass ( json , PhotoInfo . class ) ;
public class RemoteFieldTable { /** * Does this list have a next record ? * @ return true if there is a next record to read . */ public boolean hasNext ( ) throws DBException { } }
if ( m_iCurrentRecord >= m_iRecordsAccessed ) return true ; Object record = this . next ( ) ; if ( record == null ) return false ; else { m_iRecordsAccessed -- ; // Offically this record has not been accessed return true ; }
public class AOStreamIterator { /** * / * ( non - Javadoc ) * @ see java . util . Iterator # next ( ) */ public Object next ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "next" ) ; } Object result ; if ( msgIterator . hasNext ( ) ) { Long valueStamp = ( Long ) msgIterator . next ( ) ; result = new TransmitMessageRequest ( valueStamp . longValue ( ) , aoStream . itemStream , aoStream , messageProcessor , aoh . getDestinationHandler ( ) ) ; } else { result = null ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exit ( tc , "next" , result ) ; } return result ;
public class ClasspathOrder { /** * Add a system classpath entry . * @ param pathEntry * the system classpath entry - - the path string should already have been run through * FastPathResolver . resolve ( FileUtils . CURR _ DIR _ PATH , path * @ param classLoader * the classloader * @ return true , if added and unique */ boolean addSystemClasspathEntry ( final String pathEntry , final ClassLoader classLoader ) { } }
if ( classpathEntryUniqueResolvedPaths . add ( pathEntry ) ) { order . add ( new SimpleEntry < > ( pathEntry , classLoader ) ) ; return true ; } return false ;
public class ProgressBarDrawable { /** * Gets the progress bar padding . */ @ Override public boolean getPadding ( Rect padding ) { } }
padding . set ( mPadding , mPadding , mPadding , mPadding ) ; return mPadding != 0 ;
public class Command { /** * Executes the command . Before execution is validates Parameters args using the * defined schema . The command execution intercepts ApplicationException raised * by the called function and throws them . * @ param correlationId optional transaction id to trace calls across * components . * @ param args the parameters ( arguments ) to pass to this command for * execution . * @ return execution result . * @ throws ApplicationException when execution fails for whatever reason . * @ see Parameters */ @ Override public Object execute ( String correlationId , Parameters args ) throws ApplicationException { } }
if ( _schema != null ) _schema . validateAndThrowException ( correlationId , args ) ; try { return _function . execute ( correlationId , args ) ; } catch ( Throwable ex ) { throw new InvocationException ( correlationId , "EXEC_FAILED" , "Execution " + _name + " failed: " + ex ) . withDetails ( "command" , _name ) . wrap ( ex ) ; }
public class Widgets { /** * Creates an image with the supplied resource and style . */ public static Image newImage ( ImageResource image , String ... styles ) { } }
return setStyleNames ( new Image ( image ) , styles ) ;
public class CmsFlexBucketConfiguration { /** * Computes the bucket set for a set of paths based on this configuration . < p > * The resulting bucket set contains all buckets for which one of the given paths is below the * configured roots of that bucket . * @ param paths a list of root paths * @ return the bucket set for the input paths */ public BucketSet getBucketSet ( Iterable < String > paths ) { } }
Set < String > bucketNames = Sets . newHashSet ( ) ; for ( String path : paths ) { bucketNames . addAll ( getBucketsForPath ( path ) ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Determined bucket set " + bucketNames . toString ( ) + " for path set " + paths ) ; } return new BucketSet ( bucketNames ) ;
public class InstanceStateChangeReasonMarshaller { /** * Marshall the given parameter object . */ public void marshall ( InstanceStateChangeReason instanceStateChangeReason , ProtocolMarshaller protocolMarshaller ) { } }
if ( instanceStateChangeReason == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( instanceStateChangeReason . getCode ( ) , CODE_BINDING ) ; protocolMarshaller . marshall ( instanceStateChangeReason . getMessage ( ) , MESSAGE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JsonDiff { /** * Computes the difference of two JSON strings , and returns the differences */ public List < JsonDelta > computeDiff ( String node1 , String node2 ) throws IOException { } }
ObjectMapper mapper = new ObjectMapper ( ) ; return computeDiff ( mapper . readTree ( node1 ) , mapper . readTree ( node2 ) ) ;
public class CmsAliasList { /** * Creates the button used for adding new aliases . < p > * @ return the new button */ protected PushButton createAddButton ( ) { } }
PushButton button = createIconButton ( I_CmsButton . ADD_SMALL ) ; button . setTitle ( aliasMessages . addAlias ( ) ) ; return button ;
public class MtasDataDoubleBasic { /** * ( non - Javadoc ) * @ see mtas . codec . util . DataCollector . MtasDataCollector # add ( double , long ) */ @ Override public MtasDataCollector < ? , ? > add ( double valueSum , long valueN ) throws IOException { } }
MtasDataCollector < ? , ? > dataCollector = add ( false ) ; setValue ( newCurrentPosition , valueSum , valueN , newCurrentExisting ) ; return dataCollector ;
public class ReposFragment { /** * Get items to load per page onScroll . * @ param context { @ link Context } * @ return int of num of items that can be loaded onto the screen with scroll enabled */ private int getPerPage ( Context context ) { } }
// fixed item size in recyclerview . Adding 3 enables recyclerview scrolling . return ( context . getResources ( ) . getDisplayMetrics ( ) . heightPixels / context . getResources ( ) . getDimensionPixelSize ( R . dimen . repo_item_height ) ) + 3 ;
public class EstimateSceneCalibrated { /** * An edge has been declared as defining a good stereo pair . All associated feature will now be * triangulated . It is assumed that there is no global coordinate system at this point . */ void triangulateStereoEdges ( Motion edge ) { } }
View viewA = edge . viewSrc ; View viewB = edge . viewDst ; triangulationError . configure ( viewA . camera . pinhole , viewB . camera . pinhole ) ; for ( int i = 0 ; i < edge . associated . size ( ) ; i ++ ) { AssociatedIndex f = edge . associated . get ( i ) ; Point2D_F64 normA = viewA . observationNorm . get ( f . src ) ; Point2D_F64 normB = viewB . observationNorm . get ( f . dst ) ; double angle = triangulationAngle ( normA , normB , edge . a_to_b ) ; if ( angle < TRIANGULATE_MIN_ANGLE ) continue ; Feature3D feature3D = new Feature3D ( ) ; if ( ! triangulate . triangulate ( normA , normB , edge . a_to_b , feature3D . worldPt ) ) { continue ; } // must be in front of the camera if ( feature3D . worldPt . z <= 0 ) continue ; // can ' t have an excessively large reprojection error either double error = triangulationError . process ( normA , normB , edge . a_to_b , feature3D . worldPt ) ; if ( error > maxPixelError * maxPixelError ) continue ; feature3D . views . add ( viewA ) ; feature3D . views . add ( viewB ) ; feature3D . obsIdx . add ( f . src ) ; feature3D . obsIdx . add ( f . dst ) ; feature3D . triangulationAngle = angle ; edge . stereoTriangulations . add ( feature3D ) ; }
public class FeatureMate { /** * Check for cover . * @ param geometry the geometry to check against . * @ param usePrepared use prepared geometry . * @ return true if the current geometries covers the supplied one . */ public boolean covers ( Geometry geometry , boolean usePrepared ) { } }
if ( ! getEnvelope ( ) . covers ( geometry . getEnvelopeInternal ( ) ) ) { return false ; } if ( usePrepared ) { if ( preparedGeometry == null ) { preparedGeometry = PreparedGeometryFactory . prepare ( getGeometry ( ) ) ; } return preparedGeometry . covers ( geometry ) ; } else { return getGeometry ( ) . covers ( geometry ) ; }
public class Applications { /** * Gets the list of secure < em > instances < / em > associated to a virtual host * name . * @ param secureVirtualHostName * the virtual hostname for which the secure instances need to be * returned . * @ return list of < em > instances < / em > . */ public List < InstanceInfo > getInstancesBySecureVirtualHostName ( String secureVirtualHostName ) { } }
return Optional . ofNullable ( this . secureVirtualHostNameAppMap . get ( secureVirtualHostName . toUpperCase ( Locale . ROOT ) ) ) . map ( VipIndexSupport :: getVipList ) . map ( AtomicReference :: get ) . orElseGet ( Collections :: emptyList ) ;
public class SARLQuickfixProvider { /** * Quick fix for " Duplicate field " . * @ param issue the issue . * @ param acceptor the quick fix acceptor . */ @ Fix ( IssueCodes . DUPLICATE_FIELD ) public void fixDuplicateAttribute ( Issue issue , IssueResolutionAcceptor acceptor ) { } }
MemberRemoveModification . accept ( this , issue , acceptor ) ;
public class Single { /** * Concatenates a Publisher sequence of SingleSources eagerly into a single stream of values . * < img width = " 640 " height = " 307 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / Single . concatEager . p . png " alt = " " > * Eager concatenation means that once a subscriber subscribes , this operator subscribes to all of the * emitted source Publishers as they are observed . The operator buffers the values emitted by these * Publishers and then drains them in order , each one after the previous one completes . * < dl > * < dt > < b > Backpressure : < / b > < / dt > * < dd > Backpressure is honored towards the downstream and the outer Publisher is * expected to support backpressure . Violating this assumption , the operator will * signal { @ link io . reactivex . exceptions . MissingBackpressureException } . < / dd > * < dt > < b > Scheduler : < / b > < / dt > * < dd > This method does not operate by default on a particular { @ link Scheduler } . < / dd > * < / dl > * @ param < T > the value type * @ param sources a sequence of Publishers that need to be eagerly concatenated * @ return the new Publisher instance with the specified concatenation behavior */ @ BackpressureSupport ( BackpressureKind . FULL ) @ CheckReturnValue @ SchedulerSupport ( SchedulerSupport . NONE ) public static < T > Flowable < T > concatEager ( Publisher < ? extends SingleSource < ? extends T > > sources ) { } }
return Flowable . fromPublisher ( sources ) . concatMapEager ( SingleInternalHelper . < T > toFlowable ( ) ) ;
public class ComponentCollision { /** * Check elements in group . * @ param current The elements in group . */ private void checkGroup ( Entry < Point , Set < Collidable > > current ) { } }
final Set < Collidable > elements = current . getValue ( ) ; for ( final Collidable objectA : elements ) { checkOthers ( objectA , current ) ; }
public class GraphEditorWindow { /** * Indicate that the first property should be displayed */ public void setFirstProperty ( ) { } }
if ( properties . getItemCount ( ) > 0 ) { properties . setSelectedIndex ( 0 ) ; LinearInterpolator currentValue = ( LinearInterpolator ) values . get ( properties . getSelectedItem ( ) ) ; panel . setInterpolator ( currentValue ) ; }
public class CmsSendPopupGroupsDialog { /** * Returns a list of all members of the selected groups . < p > * @ return a list of user objects */ private List < CmsUser > getUsers ( ) { } }
if ( m_users == null ) { m_users = new ArrayList < CmsUser > ( ) ; List < CmsUser > manageableUsers = new ArrayList < CmsUser > ( ) ; try { manageableUsers = OpenCms . getRoleManager ( ) . getManageableUsers ( getCms ( ) , "" , true ) ; } catch ( CmsException e ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } Iterator < String > itGroups = getGroups ( ) . iterator ( ) ; while ( itGroups . hasNext ( ) ) { String groupName = itGroups . next ( ) ; try { Iterator < CmsUser > itUsers = getCms ( ) . getUsersOfGroup ( groupName , true ) . iterator ( ) ; while ( itUsers . hasNext ( ) ) { CmsUser user = itUsers . next ( ) ; if ( OpenCms . getSessionManager ( ) . getSessionInfos ( user . getId ( ) ) . isEmpty ( ) ) { continue ; } if ( ! manageableUsers . contains ( user ) ) { continue ; } m_users . add ( user ) ; } } catch ( CmsException e ) { // should never happen } } } return m_users ;
public class HFCAAffiliation { /** * The identities affected during request * @ param name Name of the child affiliation * @ return The requested child affiliation * @ throws InvalidArgumentException * @ throws AffiliationException */ public HFCAAffiliation createDecendent ( String name ) throws InvalidArgumentException , AffiliationException { } }
if ( this . deleted ) { throw new AffiliationException ( "Affiliation has been deleted" ) ; } validateAffiliationNames ( name ) ; return new HFCAAffiliation ( this . name + "." + name , this . client ) ;
public class UpdateSite { /** * Let sub - classes of UpdateSite provide their own signature validator . * @ param name , the name for the JSON signature Validator object . * if name is null , then the default name will be used , * which is " update site " followed by the update site id * @ return the signature validator . * @ since 2.21 */ @ Nonnull protected JSONSignatureValidator getJsonSignatureValidator ( @ CheckForNull String name ) { } }
if ( name == null ) { name = signatureValidatorPrefix + " '" + id + "'" ; } return new JSONSignatureValidator ( name ) ;
public class BinaryGrailsPlugin { /** * Resolves a static resource contained within this binary plugin * @ param path The relative path to the static resource * @ return The resource or null if it doesn ' t exist */ public Resource getResource ( String path ) { } }
final Resource descriptorResource = descriptor . getResource ( ) ; try { Resource resource = descriptorResource . createRelative ( "static" + path ) ; if ( resource . exists ( ) ) { return resource ; } } catch ( IOException e ) { return null ; } return null ;
public class Resources { /** * Constructs a resource from a string representation . Defaults to a file based resource if no schema is present . * @ param resource The string representation of the resource * @ return A resource representing the string representation */ public static Resource from ( String resource ) { } }
if ( StringUtils . isNullOrBlank ( resource ) ) { return new StringResource ( ) ; } Matcher matcher = protocolPattern . matcher ( resource ) ; if ( matcher . find ( ) ) { String schema = matcher . group ( "PROTOCOL" ) ; String options = matcher . group ( "OPTIONS" ) ; String path = matcher . group ( "PATH" ) ; if ( StringUtils . isNullOrBlank ( options ) ) { options = "" ; } else { options = options . replaceFirst ( "\\[" , "" ) . replaceFirst ( "\\]$" , "" ) ; } ResourceProvider provider = resourceProviders . get ( schema . toLowerCase ( ) ) ; if ( provider == null ) { try { return new URIResource ( new URI ( resource ) ) ; } catch ( URISyntaxException e ) { throw new IllegalStateException ( schema + " is an unknown protocol." ) ; } } if ( provider . requiresProtocol ( ) ) { path = schema + ":" + path ; } return provider . createResource ( path , Val . of ( options ) . asMap ( String . class , String . class ) ) ; } return new FileResource ( resource ) ;
public class AbstractSubscriptionPublisher { /** * Adds multiple broadcast filters to the provider . * @ param filters the filters to add . * @ see AbstractSubscriptionPublisher # addBroadcastFilter ( BroadcastFilterImpl filter ) */ @ Override public void addBroadcastFilter ( BroadcastFilterImpl ... filters ) { } }
List < BroadcastFilterImpl > filtersList = Arrays . asList ( filters ) ; for ( BroadcastFilterImpl filter : filtersList ) { addBroadcastFilter ( filter ) ; }
public class Interval { /** * Test if an interval is between two others . * @ param that interval * @ param other interval * @ return true if this interval is after that and before the other * @ see # before ( org . nmdp . ngs . fca . Interval ) * @ see # after ( org . nmdp . ngs . fca . Interval ) */ public boolean between ( final Interval < C > that , final Interval < C > other ) { } }
checkNotNull ( this . range , that . range ) ; checkNotNull ( other . range ) ; return this . after ( that ) && this . before ( other ) ;
public class TransactionHelper { /** * Begin . * @ throws HandlerException oops */ public void begin ( ) throws HandlerException { } }
if ( _enabled ) { try { _userTx = TransactionManagerLocator . INSTANCE . getUserTransaction ( ) ; if ( _userTx . getStatus ( ) == Status . STATUS_NO_TRANSACTION ) { _userTx . begin ( ) ; _isInitiator = true ; } } catch ( SystemException se ) { throw CommonKnowledgeMessages . MESSAGES . userTransactionBeginFailedSystem ( se ) ; } catch ( NotSupportedException nse ) { throw CommonKnowledgeMessages . MESSAGES . userTransactionBeginFailedNSE ( nse ) ; } }
public class FilterAdapterContext { /** * < p > beginFilterList . < / p > * @ param lst a { @ link org . apache . hadoop . hbase . filter . FilterList } object . * @ return a { @ link com . google . cloud . bigtable . hbase . adapters . filters . FilterAdapterContext . ContextCloseable } object . */ public ContextCloseable beginFilterList ( final FilterList lst ) { } }
Preconditions . checkNotNull ( lst ) ; filterListStack . push ( lst ) ; return new ContextCloseable ( ) { @ Override public void close ( ) { Preconditions . checkState ( filterListStack . peek ( ) . equals ( lst ) ) ; filterListStack . pop ( ) ; } } ;
public class GedAppender { /** * { @ inheritDoc } */ @ Override public void appendString ( final String appendage ) { } }
owner . setString ( owner . getString ( ) . concat ( appendage ) ) ;
public class PrimaveraConvert { /** * Process a single project . * @ param reader Primavera reader * @ param projectID required project ID * @ param outputFile output file name */ private void processProject ( PrimaveraDatabaseReader reader , int projectID , String outputFile ) throws Exception { } }
long start = System . currentTimeMillis ( ) ; reader . setProjectID ( projectID ) ; ProjectFile projectFile = reader . read ( ) ; long elapsed = System . currentTimeMillis ( ) - start ; System . out . println ( "Reading database completed in " + elapsed + "ms." ) ; System . out . println ( "Writing output file started." ) ; start = System . currentTimeMillis ( ) ; ProjectWriter writer = ProjectWriterUtility . getProjectWriter ( outputFile ) ; writer . write ( projectFile , outputFile ) ; elapsed = System . currentTimeMillis ( ) - start ; System . out . println ( "Writing output completed in " + elapsed + "ms." ) ;
public class JobFlowInstancesConfig { /** * Applies to clusters that use the instance fleet configuration . When multiple EC2 subnet IDs are specified , Amazon * EMR evaluates them and launches instances in the optimal subnet . * < note > * The instance fleet configuration is available only in Amazon EMR versions 4.8.0 and later , excluding 5.0 . x * versions . * < / note > * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setEc2SubnetIds ( java . util . Collection ) } or { @ link # withEc2SubnetIds ( java . util . Collection ) } if you want to * override the existing values . * @ param ec2SubnetIds * Applies to clusters that use the instance fleet configuration . When multiple EC2 subnet IDs are specified , * Amazon EMR evaluates them and launches instances in the optimal subnet . < / p > < note > * The instance fleet configuration is available only in Amazon EMR versions 4.8.0 and later , excluding 5.0 . x * versions . * @ return Returns a reference to this object so that method calls can be chained together . */ public JobFlowInstancesConfig withEc2SubnetIds ( String ... ec2SubnetIds ) { } }
if ( this . ec2SubnetIds == null ) { setEc2SubnetIds ( new com . amazonaws . internal . SdkInternalList < String > ( ec2SubnetIds . length ) ) ; } for ( String ele : ec2SubnetIds ) { this . ec2SubnetIds . add ( ele ) ; } return this ;
public class TreeNode { /** * Finds a child TreeNode based on its path . * Searches the child nodes for the first element , then that * node ' s children for the second element , etc . * @ throws IllegalArgumentException if no such node exists */ @ SuppressWarnings ( "unchecked" ) public TreeNode < T > findByPath ( T ... path ) { } }
return findByPath ( Arrays . asList ( path ) ) ;
public class Translation { /** * Calculates the transformation . * @ param transformable the transformable * @ param comp the comp */ @ Override protected void doTransform ( ITransformable . Translate transformable , float comp ) { } }
float fromX = reversed ? this . toX : this . fromX ; float toX = reversed ? this . fromX : this . toX ; float fromY = reversed ? this . toY : this . fromY ; float toY = reversed ? this . fromY : this . toY ; float fromZ = reversed ? this . toZ : this . fromZ ; float toZ = reversed ? this . fromZ : this . toZ ; transformable . translate ( fromX + ( toX - fromX ) * comp , fromY + ( toY - fromY ) * comp , fromZ + ( toZ - fromZ ) * comp ) ;
public class ASTIsTRUE { /** * to make sure fcn get bound at each new context */ @ Override void apply ( Env env , int argcnt , ASTApply apply ) { } }
double res = env . isDbl ( ) && env . popDbl ( ) == 1.0 ? 1 : 0 ; env . pop ( ) ; env . poppush ( res ) ;
public class NoClientBindProtocolSocketFactory { /** * Attempts to get a new socket connection to the given host within the given time limit . * This method employs several techniques to circumvent the limitations of older JREs that * do not support connect timeout . When running in JRE 1.4 or above reflection is used to * call Socket # connect ( SocketAddress endpoint , int timeout ) method . When executing in older * JREs a controller thread is executed . The controller thread attempts to create a new socket * within the given limit of time . If socket constructor does not return until the timeout * expires , the controller terminates and throws an { @ link ConnectTimeoutException } * @ param host the host name / IP * @ param port the port on the host * @ param localAddress the local host name / IP to bind the socket to , ignored * @ param localPort the port on the local machine , ignored * @ param params { @ link HttpConnectionParams Http connection parameters } * @ return Socket a new socket * @ throws IOException if an I / O error occurs while creating the socket * @ throws UnknownHostException if the IP address of the host cannot be * determined * @ throws ConnectTimeoutException if socket cannot be connected within the * given time limit * @ since 3.0 */ public Socket createSocket ( String host , int port , InetAddress localAddress , int localPort , HttpConnectionParams params ) throws IOException , UnknownHostException , ConnectTimeoutException { } }
if ( params == null ) { throw new IllegalArgumentException ( "Parameters may not be null" ) ; } int timeout = params . getConnectionTimeout ( ) ; if ( timeout == 0 ) { // ignore the local address / port for binding return createSocket ( host , port ) ; } else { Socket s = new Socket ( ) ; s . connect ( new InetSocketAddress ( host , port ) , timeout ) ; return s ; }
public class ChannelFrameworkImpl { /** * @ see * com . ibm . wsspi . channelfw . ChannelFramework # registerFactory ( java . lang . String , * java . lang . Class ) */ @ Override public void registerFactory ( String name , Class < ? extends ChannelFactory > factory ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "registerFactory: " + name + "; " + factory ) ; } synchronized ( this . factories ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Class < ? extends ChannelFactory > prevFactory = this . factories . get ( name ) ; if ( null != prevFactory && factory != prevFactory ) { Tr . event ( tc , "WARNING: overlaying existing factory: " + prevFactory ) ; } } this . factories . put ( name , factory ) ; } // end - sync // now that we have a new factory type , tell ChannelUtils to process // any delayed config that might be waiting for it ChannelUtils . loadConfig ( null ) ;
public class KafkaHelper { /** * Create a producer that can write to this broker * @ param keySerializer Key serializer class * @ param valueSerializer Valuer serializer class * @ param overrideConfig Producer config to override . Pass null if there aren ' t any . * @ param < K > Type of Key * @ param < V > Type of Value * @ return KafkaProducer */ public < K , V > KafkaProducer < K , V > createProducer ( Serializer < K > keySerializer , Serializer < V > valueSerializer , Properties overrideConfig ) { } }
return broker . createProducer ( keySerializer , valueSerializer , overrideConfig ) ;
public class AmazonApiGatewayClient { /** * Gets the < a > RequestValidators < / a > collection of a given < a > RestApi < / a > . * @ param getRequestValidatorsRequest * Gets the < a > RequestValidators < / a > collection of a given < a > RestApi < / a > . * @ return Result of the GetRequestValidators operation returned by the service . * @ throws BadRequestException * The submitted request is not valid , for example , the input is incomplete or incorrect . See the * accompanying error message for details . * @ throws UnauthorizedException * The request is denied because the caller has insufficient permissions . * @ throws NotFoundException * The requested resource is not found . Make sure that the request URI is correct . * @ throws TooManyRequestsException * The request has reached its throttling limit . Retry after the specified time period . * @ sample AmazonApiGateway . GetRequestValidators */ @ Override public GetRequestValidatorsResult getRequestValidators ( GetRequestValidatorsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetRequestValidators ( request ) ;
public class DemoThreeViewStereoApp { /** * Select two views which are the closest to an idea stereo pair . Little rotation and little translation along * z - axis */ private int [ ] selectBestPair ( SceneStructureMetric structure ) { } }
Se3_F64 w_to_0 = structure . views [ 0 ] . worldToView ; Se3_F64 w_to_1 = structure . views [ 1 ] . worldToView ; Se3_F64 w_to_2 = structure . views [ 2 ] . worldToView ; Se3_F64 view0_to_1 = w_to_0 . invert ( null ) . concat ( w_to_1 , null ) ; Se3_F64 view0_to_2 = w_to_0 . invert ( null ) . concat ( w_to_2 , null ) ; Se3_F64 view1_to_2 = w_to_1 . invert ( null ) . concat ( w_to_2 , null ) ; Se3_F64 candidates [ ] = new Se3_F64 [ ] { view0_to_1 , view0_to_2 , view1_to_2 } ; int best = - 1 ; double bestScore = Double . MAX_VALUE ; for ( int i = 0 ; i < candidates . length ; i ++ ) { double s = score ( candidates [ i ] ) ; System . out . println ( "stereo score[" + i + "] = " + s ) ; if ( s < bestScore ) { bestScore = s ; best = i ; } } switch ( best ) { case 0 : return new int [ ] { 0 , 1 } ; case 1 : return new int [ ] { 0 , 2 } ; case 2 : return new int [ ] { 1 , 2 } ; } throw new RuntimeException ( "BUG!" ) ;
public class SpiderTransaction { /** * Add the following row deletion . */ private void deleteRow ( String storeName , String rowKey ) { } }
List < String > rowKeys = m_rowDeletes . get ( storeName ) ; if ( rowKeys == null ) { rowKeys = new ArrayList < > ( ) ; m_rowDeletes . put ( storeName , rowKeys ) ; } rowKeys . add ( rowKey ) ; m_totalUpdates ++ ;
public class StringConcatenation { /** * Return a list of segments where each segment is either the content of a line in the given text or a line - break * according to the configured delimiter . Existing line - breaks in the text will be replaced by this ' s * instances delimiter . * @ param text * the to - be - splitted text . May be < code > null < / code > . * @ return a list of segments . Is never < code > null < / code > . */ protected List < String > splitLinesAndNewLines ( String text ) { } }
if ( text == null ) return Collections . emptyList ( ) ; int idx = initialSegmentSize ( text ) ; if ( idx == text . length ( ) ) { return Collections . singletonList ( text ) ; } return continueSplitting ( text , idx ) ;
public class JvmPauseMonitor { /** * Stops jvm monitor . */ public void stop ( ) { } }
Preconditions . checkState ( mJvmMonitorThread != null , "JVM monitor thread does not start" ) ; mJvmMonitorThread . interrupt ( ) ; try { mJvmMonitorThread . join ( ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } reset ( ) ;
public class MisoUtil { /** * Convert the given full coordinates to screen - based pixel * coordinates . Converted coordinates are placed in the given * point object . * @ param x the x - position full coordinate . * @ param y the y - position full coordinate . * @ param spos the point object to place coordinates in . * @ return the point passed in to receive the coordinates . */ public static Point fullToScreen ( MisoSceneMetrics metrics , int x , int y , Point spos ) { } }
// get the tile screen position int tx = fullToTile ( x ) , ty = fullToTile ( y ) ; Point tspos = tileToScreen ( metrics , tx , ty , new Point ( ) ) ; // get the pixel position of the fine coords within the tile Point ppos = new Point ( ) ; int fx = x - ( tx * FULL_TILE_FACTOR ) , fy = y - ( ty * FULL_TILE_FACTOR ) ; fineToPixel ( metrics , fx , fy , ppos ) ; // final position is tile position offset by fine position spos . x = tspos . x + ppos . x ; spos . y = tspos . y + ppos . y ; return spos ;
public class InternalXtextParser { /** * InternalXtext . g : 270:1 : entryRuleParameter : ruleParameter EOF ; */ public final void entryRuleParameter ( ) throws RecognitionException { } }
try { // InternalXtext . g : 271:1 : ( ruleParameter EOF ) // InternalXtext . g : 272:1 : ruleParameter EOF { before ( grammarAccess . getParameterRule ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_1 ) ; ruleParameter ( ) ; state . _fsp -- ; after ( grammarAccess . getParameterRule ( ) ) ; match ( input , EOF , FollowSets000 . FOLLOW_2 ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ;
public class UpgradeInputByteBufferUtil { /** * This method will call the synchronous or asynchronous method depending on how everything is set up * @ return If we have read any data or not * @ throws IOException */ private boolean doRead ( int amountToRead ) throws IOException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "doRead, Current buffer, " + _buffer + ", reading from the TCP Channel, readLine : " + _isReadLine ) ; } try { if ( _tcpChannelCallback != null && ! _isReadLine ) { // async read logic return immediateRead ( amountToRead ) ; } else { return syncRead ( amountToRead ) ; } } catch ( IOException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "doRead, we encountered an exception during the read : " + e ) ; } if ( _error != null ) { return false ; } _error = e ; throw e ; }
public class RepositoryConfiguration { /** * Resolve the supplied URL to a JSON document , read the contents , and parse into a { @ link RepositoryConfiguration } . * @ param url the URL ; may not be null * @ return the parsed repository configuration ; never null * @ throws ParsingException if the content could not be parsed as a valid JSON document */ public static RepositoryConfiguration read ( URL url ) throws ParsingException { } }
CheckArg . isNotNull ( url , "url" ) ; Document doc = Json . read ( url ) ; return new RepositoryConfiguration ( doc , withoutExtension ( url . getFile ( ) ) ) ;
public class Matrix4d { /** * Set this matrix to a rotation transformation to make < code > - z < / code > * point along < code > dir < / code > . * This is equivalent to calling * { @ link # setLookAt ( Vector3dc , Vector3dc , Vector3dc ) setLookAt ( ) } * with < code > eye = ( 0 , 0 , 0 ) < / code > and < code > center = dir < / code > . * In order to apply the lookalong transformation to any previous existing transformation , * use { @ link # lookAlong ( Vector3dc , Vector3dc ) } . * @ see # setLookAlong ( Vector3dc , Vector3dc ) * @ see # lookAlong ( Vector3dc , Vector3dc ) * @ param dir * the direction in space to look along * @ param up * the direction of ' up ' * @ return this */ public Matrix4d setLookAlong ( Vector3dc dir , Vector3dc up ) { } }
return setLookAlong ( dir . x ( ) , dir . y ( ) , dir . z ( ) , up . x ( ) , up . y ( ) , up . z ( ) ) ;
public class ChecksumPhrase { /** * Public API . */ public Formula getFormula ( ) { } }
Reagent [ ] reagents = new Reagent [ ] { ResourceHelper . CONTEXT_SOURCE , ResourceHelper . LOCATION_PHRASE } ; return new SimpleFormula ( getClass ( ) , reagents ) ;
public class MergeRequestApi { /** * Get Stream of participants of merge request . * < pre > < code > GitLab Endpoint : GET / projects / : id / merge _ requests / : merge _ request _ iid / participants < / code > < / pre > * @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance * @ param mergeRequestIid the IID of the merge request to get * @ return a Stream containing all participants for the specified merge request * @ throws GitLabApiException if any exception occurs */ public Stream < Participant > getParticipantsStream ( Object projectIdOrPath , Integer mergeRequestIid ) throws GitLabApiException { } }
return ( getParticipants ( projectIdOrPath , mergeRequestIid , getDefaultPerPage ( ) ) . stream ( ) ) ;
public class XMLUtil { /** * Write the given node tree into a XML file . * @ param fragment is the object that contains the node tree * @ param writer is the target stream * @ throws IOException if the stream cannot be read . */ public static void writeXML ( DocumentFragment fragment , Writer writer ) throws IOException { } }
assert fragment != null : AssertMessages . notNullParameter ( 0 ) ; assert writer != null : AssertMessages . notNullParameter ( 1 ) ; writeNode ( fragment , writer ) ;
public class WSRdbOnePhaseXaResourceImpl { /** * Method to translate the XAResource stuff , including the error code . */ public static final XAException traceXAException ( XAException xae , Class < ? > callerClass ) { } }
Tr . warning ( tc , "THROW_XAEXCEPTION" , new Object [ ] { AdapterUtil . getXAExceptionCodeString ( xae . errorCode ) , xae . getMessage ( ) } ) ; return xae ;
public class PasswordPolicyService { /** * Records the password that was associated with the given user at the time * the user was queried , such that future attempts to set that same password * for that user will be denied . The number of passwords remembered for each * user is limited by the password policy . * @ param user * The user whose password should be recorded within the password * history . * @ throws GuacamoleException * If the password policy cannot be parsed . */ public void recordPassword ( ModeledUser user ) throws GuacamoleException { } }
// Retrieve password policy from environment PasswordPolicy policy = environment . getPasswordPolicy ( ) ; // Nothing to do if history is not being recorded int historySize = policy . getHistorySize ( ) ; if ( historySize <= 0 ) return ; // Store previous password in history passwordRecordMapper . insert ( user . getPasswordRecord ( ) , historySize ) ;
public class FbBotMillBean { /** * Retrieves the sender from an envelope . It never returns null . * @ param envelope * the message envelope . * @ return a { @ link User } containing the sender if found , empty otherwise . * It never returns null . */ protected User safeGetSender ( MessageEnvelope envelope ) { } }
if ( envelope != null && envelope . getSender ( ) != null && envelope . getSender ( ) . getId ( ) != null ) { return envelope . getSender ( ) ; } return new User ( ) ;
public class StatementParameter { /** * 设置Long类型参数 . * @ param value */ public void setLong ( Long value ) { } }
this . checkNull ( value ) ; list . add ( value ) ; type . add ( Long . class ) ;
public class JarTemplateSource { private ITemplateSourceEntry addJarEntry ( Map < String , ITemplateSourceEntry > entries , JarEntry jarEntry ) { } }
String name = jarEntry . getName ( ) . substring ( basePathLength ) ; if ( name . isEmpty ( ) ) { name = "/" ; } ITemplateSourceEntry entry = new JarTemplateSourceEntry ( name , jarEntry ) ; entries . put ( name , entry ) ; return entry ;
public class BundleScriptEngineResolver { /** * Takes the input stream and ignores lines starting with a # and everything after a # */ private String removeCommentsFromInput ( BufferedReader in ) throws IOException { } }
String l = in . readLine ( ) ; // remove lines that start with a comment while ( l . startsWith ( "#" ) ) { l = in . readLine ( ) ; } if ( l . contains ( "#" ) ) { l = l . substring ( 0 , l . indexOf ( "#" ) ) ; } return l . trim ( ) ;
public class ExecutorServiceImpl { /** * This is private , so handling both interceptors and wrapping in this method for simplicity */ private < T > Collection < ? extends Callable < T > > wrap ( Collection < ? extends Callable < T > > tasks ) { } }
List < Callable < T > > wrappedTasks = new ArrayList < Callable < T > > ( ) ; Iterator < ? extends Callable < T > > i = tasks . iterator ( ) ; while ( i . hasNext ( ) ) { Callable < T > c = wrap ( i . next ( ) ) ; if ( serverStopping ) wrappedTasks . add ( c ) ; else wrappedTasks . add ( new CallableWrapper < T > ( c ) ) ; } return wrappedTasks ;
public class DefaultEndpoint { /** * Reset the writer state . Queued commands will be canceled and the internal state will be reset . This is useful when the * internal state machine gets out of sync with the connection . */ @ Override public void reset ( ) { } }
if ( debugEnabled ) { logger . debug ( "{} reset()" , logPrefix ( ) ) ; } if ( channel != null ) { channel . pipeline ( ) . fireUserEventTriggered ( new ConnectionEvents . Reset ( ) ) ; } cancelBufferedCommands ( "Reset" ) ;
public class Publishers { /** * Is the given type a Publisher or convertible to a publisher . * @ param type The type to check * @ return True if it is */ public static boolean isConvertibleToPublisher ( Class < ? > type ) { } }
if ( Publisher . class . isAssignableFrom ( type ) ) { return true ; } else { for ( Class < ? > reactiveType : REACTIVE_TYPES ) { if ( reactiveType . isAssignableFrom ( type ) ) { return true ; } } return false ; }
public class AccountsInner { /** * Gets the SAS token associated with the specified Data Lake Analytics and Azure Storage account and container combination . * ServiceResponse < PageImpl < SasTokenInfoInner > > * @ param resourceGroupName The name of the Azure resource group that contains the Data Lake Analytics account . * ServiceResponse < PageImpl < SasTokenInfoInner > > * @ param accountName The name of the Data Lake Analytics account from which an Azure Storage account ' s SAS token is being requested . * ServiceResponse < PageImpl < SasTokenInfoInner > > * @ param storageAccountName The name of the Azure storage account for which the SAS token is being requested . * ServiceResponse < PageImpl < SasTokenInfoInner > > * @ param containerName The name of the Azure storage container for which the SAS token is being requested . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the PagedList & lt ; SasTokenInfoInner & gt ; object wrapped in { @ link ServiceResponse } if successful . */ public Observable < ServiceResponse < Page < SasTokenInfoInner > > > listSasTokensSinglePageAsync ( final String resourceGroupName , final String accountName , final String storageAccountName , final String containerName ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( accountName == null ) { throw new IllegalArgumentException ( "Parameter accountName is required and cannot be null." ) ; } if ( storageAccountName == null ) { throw new IllegalArgumentException ( "Parameter storageAccountName is required and cannot be null." ) ; } if ( containerName == null ) { throw new IllegalArgumentException ( "Parameter containerName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . listSasTokens ( resourceGroupName , accountName , storageAccountName , containerName , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Page < SasTokenInfoInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < SasTokenInfoInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < SasTokenInfoInner > > result = listSasTokensDelegate ( response ) ; return Observable . just ( new ServiceResponse < Page < SasTokenInfoInner > > ( result . body ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class SnapshotsInner { /** * Revokes access to a snapshot . * @ param resourceGroupName The name of the resource group . * @ param snapshotName The name of the snapshot that is being created . The name can ' t be changed after the snapshot is created . Supported characters for the name are a - z , A - Z , 0-9 and _ . The max name length is 80 characters . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < OperationStatusResponseInner > beginRevokeAccessAsync ( String resourceGroupName , String snapshotName , final ServiceCallback < OperationStatusResponseInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginRevokeAccessWithServiceResponseAsync ( resourceGroupName , snapshotName ) , serviceCallback ) ;
public class ObjectsApi { /** * Get DNs or agent groups . * Get DNs ( directory numbers ) or agent groups from Configuration Server with the specified filters . * @ param objectType The type of object . Possible values are dns , skills , dn - groups or agent - groups . ( required ) * @ param dnType If the object _ type is & # 39 ; dns & # 39 ; , then you need to specify the DN type ( for example , CFGRoutingPoint ) . For possible values , see [ CfgDNType ] ( https : / / docs . genesys . com / Documentation / PSDK / 9.0 . x / ConfigLayerRef / CfgDNType ) in the Platform SDK documentation . ( optional ) * @ param dnGroups If the object _ type is & # 39 ; dns & # 39 ; , may contain a list of DN group names to filter DNs . ( optional ) * @ param groupType If the object _ type is & # 39 ; agent - groups & # 39 ; , then you need to specify the agent group type . ( optional ) * @ param limit The number of objects the Provisioning API should return . ( optional ) * @ param offset The number of matches the Provisioning API should skip in the returned objects . ( optional ) * @ param searchTerm The term that you want to search for in the object keys . The Provisioning API searches for the this term in the value of the key you specify in & # 39 ; search _ key & # 39 ; . ( optional ) * @ param searchKey The key you want the Provisioning API to use when searching for the term you specified in & # 39 ; search _ term & # 39 ; . You can find valid key names in the Platform SDK documentation for [ CfgDN ] ( https : / / docs . genesys . com / Documentation / PSDK / 9.0 . x / ConfigLayerRef / CfgDN ) and [ CfgAgentGroup ] ( https : / / docs . genesys . com / Documentation / PSDK / latest / ConfigLayerRef / CfgAgentGroup ) . ( optional ) * @ param matchMethod The method the Provisioning API should use to match the & # 39 ; search _ term & # 39 ; . Possible values are includes , startsWith , endsWith , and isEqual . ( optional , default to includes ) * @ param sortKey A key in [ CfgDN ] ( https : / / docs . genesys . com / Documentation / PSDK / 9.0 . x / ConfigLayerRef / CfgDN ) , [ CfgSkill ] ( https : / / docs . genesys . com / Documentation / PSDK / 9.0 . x / ConfigLayerRef / CfgSkill ) or [ CfgAgentGroup ] ( https : / / docs . genesys . com / Documentation / PSDK / latest / ConfigLayerRef / CfgAgentGroup ) to sort the search results . ( optional ) * @ param sortAscending Specifies whether to sort the search results in ascending or descending order . ( optional , default to true ) * @ param sortMethod Specifies the sort method . Possible values are caseSensitive , caseInsensitive or numeric . ( optional , default to caseSensitive ) * @ param dbids Comma - separated list of DNs to be fetched . ( optional ) * @ param inUse Specifies whether to return only skills actually assigned to agents . ( optional , default to false ) * @ return GetObjectsSuccessResponse * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public GetObjectsSuccessResponse getObject ( String objectType , String dnType , List < String > dnGroups , String groupType , Integer limit , Integer offset , String searchTerm , String searchKey , String matchMethod , String sortKey , Boolean sortAscending , String sortMethod , String dbids , Boolean inUse ) throws ApiException { } }
ApiResponse < GetObjectsSuccessResponse > resp = getObjectWithHttpInfo ( objectType , dnType , dnGroups , groupType , limit , offset , searchTerm , searchKey , matchMethod , sortKey , sortAscending , sortMethod , dbids , inUse ) ; return resp . getData ( ) ;
public class JingleSession { /** * Validate the state changes . */ public void setSessionState ( JingleSessionState stateIs ) { } }
LOGGER . fine ( "Session state change: " + sessionState + "->" + stateIs ) ; stateIs . enter ( ) ; sessionState = stateIs ;
public class InternalXtextParser { /** * InternalXtext . g : 495:1 : entryRuleAbstractTerminal : ruleAbstractTerminal EOF ; */ public final void entryRuleAbstractTerminal ( ) throws RecognitionException { } }
try { // InternalXtext . g : 496:1 : ( ruleAbstractTerminal EOF ) // InternalXtext . g : 497:1 : ruleAbstractTerminal EOF { before ( grammarAccess . getAbstractTerminalRule ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_1 ) ; ruleAbstractTerminal ( ) ; state . _fsp -- ; after ( grammarAccess . getAbstractTerminalRule ( ) ) ; match ( input , EOF , FollowSets000 . FOLLOW_2 ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ;
public class BsonParser { /** * Reads a string that consists of a integer denoting the number of bytes , * the bytes ( including a terminating 0 byte ) * @ return the string * @ throws IOException if the string could not be read */ protected String readString ( ) throws IOException { } }
// read number of bytes int bytes = _in . readInt ( ) ; if ( bytes <= 0 ) { throw new IOException ( "Invalid number of string bytes" ) ; } String s ; if ( bytes > 1 ) { s = _in . readUTF ( bytes - 1 ) ; } else { s = "" ; } // read terminating zero _in . readByte ( ) ; return s ;
public class HtmlWriter { /** * Writes the representation of a < CODE > Font < / CODE > . * @ param font a < CODE > Font < / CODE > * @ param styleAttributes the style of the font * @ throws IOException */ protected void write ( Font font , Properties styleAttributes ) throws IOException { } }
if ( font == null || ! isOtherFont ( font ) /* | | styleAttributes = = null */ ) return ; write ( " " ) ; write ( HtmlTags . STYLE ) ; write ( "=\"" ) ; if ( styleAttributes != null ) { String key ; for ( Enumeration e = styleAttributes . propertyNames ( ) ; e . hasMoreElements ( ) ; ) { key = ( String ) e . nextElement ( ) ; writeCssProperty ( key , styleAttributes . getProperty ( key ) ) ; } } if ( isOtherFont ( font ) ) { writeCssProperty ( Markup . CSS_KEY_FONTFAMILY , font . getFamilyname ( ) ) ; if ( font . getSize ( ) != Font . UNDEFINED ) { writeCssProperty ( Markup . CSS_KEY_FONTSIZE , font . getSize ( ) + "pt" ) ; } if ( font . getColor ( ) != null ) { writeCssProperty ( Markup . CSS_KEY_COLOR , HtmlEncoder . encode ( font . getColor ( ) ) ) ; } int fontstyle = font . getStyle ( ) ; BaseFont bf = font . getBaseFont ( ) ; if ( bf != null ) { String ps = bf . getPostscriptFontName ( ) . toLowerCase ( ) ; if ( ps . indexOf ( "bold" ) >= 0 ) { if ( fontstyle == Font . UNDEFINED ) fontstyle = 0 ; fontstyle |= Font . BOLD ; } if ( ps . indexOf ( "italic" ) >= 0 || ps . indexOf ( "oblique" ) >= 0 ) { if ( fontstyle == Font . UNDEFINED ) fontstyle = 0 ; fontstyle |= Font . ITALIC ; } } if ( fontstyle != Font . UNDEFINED && fontstyle != Font . NORMAL ) { switch ( fontstyle & Font . BOLDITALIC ) { case Font . BOLD : writeCssProperty ( Markup . CSS_KEY_FONTWEIGHT , Markup . CSS_VALUE_BOLD ) ; break ; case Font . ITALIC : writeCssProperty ( Markup . CSS_KEY_FONTSTYLE , Markup . CSS_VALUE_ITALIC ) ; break ; case Font . BOLDITALIC : writeCssProperty ( Markup . CSS_KEY_FONTWEIGHT , Markup . CSS_VALUE_BOLD ) ; writeCssProperty ( Markup . CSS_KEY_FONTSTYLE , Markup . CSS_VALUE_ITALIC ) ; break ; } // CSS only supports one decoration tag so if both are specified // only one of the two will display if ( ( fontstyle & Font . UNDERLINE ) > 0 ) { writeCssProperty ( Markup . CSS_KEY_TEXTDECORATION , Markup . CSS_VALUE_UNDERLINE ) ; } if ( ( fontstyle & Font . STRIKETHRU ) > 0 ) { writeCssProperty ( Markup . CSS_KEY_TEXTDECORATION , Markup . CSS_VALUE_LINETHROUGH ) ; } } } write ( "\"" ) ;
public class Ginv { /** * Swap components in the two rows . * @ param matrix * the matrix to modify * @ param row1 * the first row * @ param row2 * the second row */ public static void swapRows ( Matrix matrix , long row1 , long row2 ) { } }
double temp = 0 ; long cols = matrix . getColumnCount ( ) ; for ( long col = 0 ; col < cols ; col ++ ) { temp = matrix . getAsDouble ( row1 , col ) ; matrix . setAsDouble ( matrix . getAsDouble ( row2 , col ) , row1 , col ) ; matrix . setAsDouble ( temp , row2 , col ) ; }
public class NameSpace { /** * Gets the imported method . * @ param name the name * @ param sig the sig * @ return the imported method * @ throws UtilEvalError the util eval error */ protected BshMethod getImportedMethod ( final String name , final Class < ? > [ ] sig ) throws UtilEvalError { } }
// Try object imports for ( final Object object : this . importedObjects ) { final Invocable method = Reflect . resolveJavaMethod ( object . getClass ( ) , name , sig , false /* onlyStatic */ ) ; if ( method != null ) return new BshMethod ( method , object ) ; } // Try static imports for ( final Class < ? > stat : this . importedStatic ) { final Invocable method = Reflect . resolveJavaMethod ( stat , name , sig , true /* onlyStatic */ ) ; if ( method != null ) return new BshMethod ( method , null /* object */ ) ; } return null ;
public class ServerAttribute { /** * Do the applyChange with enforced creation of commands that are sent to the client */ protected void verbosely ( final Runnable applyChange ) { } }
boolean temp = notifyClient ; notifyClient = true ; try { applyChange . run ( ) ; } finally { notifyClient = temp ; }
public class JsGeometrySplitService { /** * Register a { @ link GeometrySplitStartHandler } to listen to events that signal the splitting process has started . * @ param handler * The { @ link GeometrySplitStartHandler } to add as listener . * @ return The registration of the handler . */ public JsHandlerRegistration addGeometrySplitStartHandler ( final GeometrySplitStartHandler handler ) { } }
org . geomajas . plugin . editing . client . split . event . GeometrySplitStartHandler h ; h = new org . geomajas . plugin . editing . client . split . event . GeometrySplitStartHandler ( ) { public void onGeometrySplitStart ( GeometrySplitStartEvent event ) { Geometry geometry = event . getGeometry ( ) ; org . geomajas . plugin . editing . jsapi . client . split . event . GeometrySplitStartEvent e ; e = new org . geomajas . plugin . editing . jsapi . client . split . event . GeometrySplitStartEvent ( geometry ) ; handler . onGeometrySplitStart ( e ) ; } } ; HandlerRegistration registration = delegate . addGeometrySplitStartHandler ( h ) ; return new JsHandlerRegistration ( new HandlerRegistration [ ] { registration } ) ;
public class LazyArray { /** * protected LazyArray ( LazyNode root , char [ ] source ) { * super ( root , source , null ) ; */ protected String serializeElementToString ( ) { } }
StringBuilder buf = new StringBuilder ( ) ; buf . append ( "[" ) ; LazyNode pointer = root . child ; boolean first = true ; while ( pointer != null ) { if ( first ) { first = false ; } else { buf . append ( "," ) ; } if ( pointer . type == LazyNode . OBJECT ) { buf . append ( new LazyObject ( pointer ) . toString ( ) ) ; } else if ( pointer . type == LazyNode . ARRAY ) { buf . append ( new LazyArray ( pointer ) . toString ( ) ) ; } else if ( pointer . type == LazyNode . VALUE_STRING ) { buf . append ( "\"" ) ; buf . append ( pointer . getStringValue ( ) ) ; buf . append ( "\"" ) ; } else if ( pointer . type == LazyNode . VALUE_ESTRING ) { buf . append ( "\"" ) ; buf . append ( pointer . getRawStringValue ( ) ) ; buf . append ( "\"" ) ; } else if ( pointer . type == LazyNode . VALUE_TRUE ) { buf . append ( "true" ) ; } else if ( pointer . type == LazyNode . VALUE_FALSE ) { buf . append ( "false" ) ; } else if ( pointer . type == LazyNode . VALUE_NULL ) { buf . append ( "null" ) ; } else { buf . append ( pointer . getStringValue ( ) ) ; } pointer = pointer . next ; } buf . append ( "]" ) ; return buf . toString ( ) ;
public class clusterinstance { /** * Use this API to add clusterinstance . */ public static base_response add ( nitro_service client , clusterinstance resource ) throws Exception { } }
clusterinstance addresource = new clusterinstance ( ) ; addresource . clid = resource . clid ; addresource . deadinterval = resource . deadinterval ; addresource . hellointerval = resource . hellointerval ; addresource . preemption = resource . preemption ; return addresource . add_resource ( client ) ;