signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ElementsExceptionsFactory { /** * Constructs and initializes a new { @ link ResourceNotFoundException } with the given { @ link Throwable cause } * and { @ link String message } formatted with the given { @ link Object [ ] arguments } . * @ param cause { @ link Throwable } identified as the reason this { @ link ResourceNotFoundException } was thrown . * @ param message { @ link String } describing the { @ link ResourceNotFoundException exception } . * @ param args { @ link Object [ ] arguments } used to replace format placeholders in the { @ link String message } . * @ return a new { @ link ResourceNotFoundException } with the given { @ link Throwable cause } and { @ link String message } . * @ see org . cp . elements . lang . ResourceNotFoundException */ public static ResourceNotFoundException newResourceNotFoundException ( Throwable cause , String message , Object ... args ) { } }
return new ResourceNotFoundException ( format ( message , args ) , cause ) ;
public class AbstractConfigFile { /** * Stores contents if url is a file otherwise throws IllegalArgumentException * @ throws IOException */ public void store ( ) throws IOException { } }
try { File file = new File ( url . toURI ( ) ) ; storeAs ( file ) ; } catch ( URISyntaxException ex ) { throw new RuntimeException ( ex ) ; }
public class TransformerImpl { /** * Reset parameters that the user specified for the transformation . * Called during transformer . reset ( ) after we have cleared the * variable stack . We need to make sure that user params are * reset so that the transformer object can be reused . */ private void resetUserParameters ( ) { } }
try { if ( null == m_userParams ) return ; int n = m_userParams . size ( ) ; for ( int i = n - 1 ; i >= 0 ; i -- ) { Arg arg = ( Arg ) m_userParams . elementAt ( i ) ; QName name = arg . getQName ( ) ; // The first string might be the namespace , or it might be // the local name , if the namespace is null . String s1 = name . getNamespace ( ) ; String s2 = name . getLocalPart ( ) ; setParameter ( s2 , s1 , arg . getVal ( ) . object ( ) ) ; } } catch ( java . util . NoSuchElementException nsee ) { // Should throw some sort of an error . }
public class MOP4 { /** * Evaluate ( ) method */ public void evaluate ( DoubleSolution solution ) { } }
double [ ] f = new double [ getNumberOfObjectives ( ) ] ; double g = this . evalG ( solution ) ; f [ 0 ] = ( 1 + g ) * solution . getVariableValue ( 0 ) ; f [ 1 ] = ( 1 + g ) * ( 1 - Math . sqrt ( solution . getVariableValue ( 0 ) ) * Math . pow ( Math . cos ( solution . getVariableValue ( 0 ) * Math . PI * 2 ) , 2 ) ) ; solution . setObjective ( 0 , f [ 0 ] ) ; solution . setObjective ( 1 , f [ 1 ] ) ;
public class Solver { /** * Runs a grid search for the maximum value of a univariate function . * @ param fn the likelihood function to minimize * @ param start lower bound of the interval to search * @ param end upper bound of the interval to search * @ param step grid step size * @ return an Interval bracketing the minimum */ static Interval gridSearch ( UnivariateFunction fn , double start , double end , double step ) { } }
double lowMax = start ; // lower bound on interval surrounding alphaMax double alphaMax = start - step ; double likMax = 0.0 ; double lastAlpha = start ; double alpha = start ; while ( alpha < end ) { double likelihood = fn . value ( alpha ) ; if ( alphaMax < start || likelihood > likMax ) { lowMax = lastAlpha ; alphaMax = alpha ; likMax = likelihood ; } lastAlpha = alpha ; alpha += step ; } // make sure we ' ve checked the rightmost endpoint ( won ' t happen if // end - start is not an integer multiple of step , because of roundoff // errors , etc ) double likelihood = fn . value ( end ) ; if ( likelihood > likMax ) { lowMax = lastAlpha ; alphaMax = end ; likMax = likelihood ; } return new Interval ( lowMax , Math . min ( end , alphaMax + step ) ) ;
public class Props { /** * load this Prop Object from a @ Properties formatted InputStream * @ param inputStream inputStream for loading Properties Object * @ throws IOException read exception */ private void loadFrom ( final InputStream inputStream ) throws IOException { } }
final Properties properties = new Properties ( ) ; properties . load ( inputStream ) ; this . put ( properties ) ;
public class Field { /** * Returns the list of user defined attribute names . * @ return the list of user defined attribute names , if there are none it returns an empty set . */ public Set < String > getAttributeNames ( ) { } }
if ( attributes == null ) { return Collections . emptySet ( ) ; } else { return Collections . unmodifiableSet ( attributes . keySet ( ) ) ; }
public class HammerTime { /** * Change the initial settings of hammer Gwt . * @ param option { @ link org . geomajas . hammergwt . client . option . GestureOption } * @ param value T look at { @ link org . geomajas . hammergwt . client . option . GestureOptions } * interface for all possible types * @ param < T > * @ since 1.0.0 */ @ Api public < T > void setOption ( GestureOption < T > option , T value ) { } }
if ( value == null ) { throw new IllegalArgumentException ( "Null value passed." ) ; } if ( value instanceof Boolean ) { setOption ( this , ( Boolean ) value , option . getName ( ) ) ; } else if ( value instanceof Integer ) { setOption ( this , ( Integer ) value , option . getName ( ) ) ; } else if ( value instanceof Double ) { setOption ( this , ( Double ) value , option . getName ( ) ) ; } else if ( value instanceof String ) { setOption ( this , String . valueOf ( value ) , option . getName ( ) ) ; }
public class AmazonMTurkClient { /** * The < code > CreateHIT < / code > operation creates a new Human Intelligence Task ( HIT ) . The new HIT is made available * for Workers to find and accept on the Amazon Mechanical Turk website . * This operation allows you to specify a new HIT by passing in values for the properties of the HIT , such as its * title , reward amount and number of assignments . When you pass these values to < code > CreateHIT < / code > , a new HIT * is created for you , with a new < code > HITTypeID < / code > . The HITTypeID can be used to create additional HITs in the * future without needing to specify common parameters such as the title , description and reward amount each time . * An alternative way to create HITs is to first generate a HITTypeID using the < code > CreateHITType < / code > operation * and then call the < code > CreateHITWithHITType < / code > operation . This is the recommended best practice for * Requesters who are creating large numbers of HITs . * CreateHIT also supports several ways to provide question data : by providing a value for the < code > Question < / code > * parameter that fully specifies the contents of the HIT , or by providing a < code > HitLayoutId < / code > and associated * < code > HitLayoutParameters < / code > . * < note > * If a HIT is created with 10 or more maximum assignments , there is an additional fee . For more information , see < a * href = " https : / / requester . mturk . com / pricing " > Amazon Mechanical Turk Pricing < / a > . * < / note > * @ param createHITRequest * @ return Result of the CreateHIT operation returned by the service . * @ throws ServiceException * Amazon Mechanical Turk is temporarily unable to process your request . Try your call again . * @ throws RequestErrorException * Your request is invalid . * @ sample AmazonMTurk . CreateHIT * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / mturk - requester - 2017-01-17 / CreateHIT " target = " _ top " > AWS API * Documentation < / a > */ @ Override public CreateHITResult createHIT ( CreateHITRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateHIT ( request ) ;
public class Grid { /** * Adds a set of coordinates , subsequent call to { @ link # getIndicesContacts ( ) } will produce the * contacts , i . e . the set of points within distance cutoff . * The bounds calculated elsewhere can be passed , or if null they are computed . * Subsequent calls to method { @ link # getAtomContacts ( ) } will produce a NullPointerException * since this only adds coordinates and no atom information . * @ param atoms * @ param bounds */ public void addCoords ( Point3d [ ] atoms , BoundingBox bounds ) { } }
this . iAtoms = atoms ; this . iAtomObjects = null ; if ( bounds != null ) { this . ibounds = bounds ; } else { this . ibounds = new BoundingBox ( iAtoms ) ; } this . jAtoms = null ; this . jAtomObjects = null ; this . jbounds = null ; fillGrid ( ) ;
public class ArrayUtils { /** * Removes the element at the specified index from the array , and returns * a new array containing the remaining elements . If < tt > index < / tt > is * invalid , returns < tt > array < / tt > unchanged . Uses reflection to determine * the type of the array and returns an array of the appropriate type . */ public static Object [ ] removeAt ( Object [ ] array , int index ) { } }
if ( array == null ) { return null ; } if ( index < 0 || index >= array . length ) { return array ; } Object [ ] retVal = ( Object [ ] ) Array . newInstance ( array [ 0 ] . getClass ( ) , array . length - 1 ) ; for ( int i = 0 ; i < array . length ; i ++ ) { if ( i < index ) { retVal [ i ] = array [ i ] ; } else if ( i > index ) { retVal [ i - 1 ] = array [ i ] ; } } return retVal ;
public class ExportHandler { /** * Takes the output and transforms it into a csv file . * @ param out Output stream . * @ param rows Rows of data from reporting - core * @ param columns Columns to list on report */ public void generateCsv ( OutputStream out , List < Map < String , Object > > rows , List < ColumnDef > columns ) { } }
ICsvMapWriter csvWriter = null ; try { csvWriter = new CsvMapWriter ( new OutputStreamWriter ( out ) , CsvPreference . STANDARD_PREFERENCE ) ; // the header elements are used to map the bean values to each column ( names must match ) String [ ] header = new String [ ] { } ; CellProcessor [ ] processors = new CellProcessor [ ] { } ; if ( columns != null ) { header = ( String [ ] ) CollectionUtils . collect ( columns , new Transformer ( ) { @ Override public Object transform ( Object input ) { ColumnDef column = ( ColumnDef ) input ; return column . getName ( ) ; } } ) . toArray ( new String [ 0 ] ) ; processors = ( CellProcessor [ ] ) CollectionUtils . collect ( columns , new Transformer ( ) { @ Override public Object transform ( Object input ) { return new Optional ( ) ; } } ) . toArray ( new CellProcessor [ 0 ] ) ; } else if ( rows . size ( ) > 0 ) { header = new ArrayList < String > ( rows . get ( 0 ) . keySet ( ) ) . toArray ( new String [ 0 ] ) ; processors = ( CellProcessor [ ] ) CollectionUtils . collect ( rows . get ( 0 ) . keySet ( ) , new Transformer ( ) { @ Override public Object transform ( Object input ) { return new Optional ( ) ; } } ) . toArray ( new CellProcessor [ 0 ] ) ; } if ( header . length > 0 ) csvWriter . writeHeader ( header ) ; if ( rows != null ) for ( Map < String , Object > row : rows ) { csvWriter . write ( row , header , processors ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; // To change body of catch statement use File | Settings | File Templates . } finally { if ( csvWriter != null ) { try { csvWriter . close ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; // To change body of catch statement use File | Settings | File Templates . } } }
public class TurfMisc { /** * Takes a line , a start { @ link Point } , and a stop point and returns the line in between those * points . * @ param startPt used for calculating the lineSlice * @ param stopPt used for calculating the lineSlice * @ param line geometry that should be sliced * @ return a sliced { @ link LineString } * @ see < a href = " http : / / turfjs . org / docs / # lineslice " > Turf Line slice documentation < / a > * @ since 1.2.0 */ @ NonNull public static LineString lineSlice ( @ NonNull Point startPt , @ NonNull Point stopPt , @ NonNull LineString line ) { } }
List < Point > coords = line . coordinates ( ) ; if ( coords . size ( ) < 2 ) { throw new TurfException ( "Turf lineSlice requires a LineString made up of at least 2 " + "coordinates." ) ; } else if ( startPt . equals ( stopPt ) ) { throw new TurfException ( "Start and stop points in Turf lineSlice cannot equal each other." ) ; } Feature startVertex = nearestPointOnLine ( startPt , coords ) ; Feature stopVertex = nearestPointOnLine ( stopPt , coords ) ; List < Feature > ends = new ArrayList < > ( ) ; if ( ( int ) startVertex . getNumberProperty ( INDEX_KEY ) <= ( int ) stopVertex . getNumberProperty ( INDEX_KEY ) ) { ends . add ( startVertex ) ; ends . add ( stopVertex ) ; } else { ends . add ( stopVertex ) ; ends . add ( startVertex ) ; } List < Point > points = new ArrayList < > ( ) ; points . add ( ( Point ) ends . get ( 0 ) . geometry ( ) ) ; for ( int i = ( int ) ends . get ( 0 ) . getNumberProperty ( INDEX_KEY ) + 1 ; i < ( int ) ends . get ( 1 ) . getNumberProperty ( INDEX_KEY ) + 1 ; i ++ ) { points . add ( coords . get ( i ) ) ; } points . add ( ( Point ) ends . get ( 1 ) . geometry ( ) ) ; return LineString . fromLngLats ( points ) ;
public class XMLUtils { /** * The a " value " from an XML file using XPath . * Uses the system encoding for reading the file . * @ param xpath The XPath expression to select the value . * @ param file The file to read . * @ return The data value . An empty { @ link String } is returned when the expression does not evaluate * to anything in the document . * @ throws IOException Error reading from the file . * @ throws SAXException Error parsing the XML file data e . g . badly formed XML . * @ throws XPathExpressionException Invalid XPath expression . * @ since 2.0 */ public static @ Nonnull String getValue ( @ Nonnull String xpath , @ Nonnull File file ) throws IOException , SAXException , XPathExpressionException { } }
return getValue ( xpath , file , Charset . defaultCharset ( ) . toString ( ) ) ;
public class ShapeProcessor { /** * Method resize the given { @ code Shape } with respect of its original aspect ratio but independent of its original size . * Note : Out of performance reasons the given object will directly be manipulated . * @ param shape the shape to scale . * @ param size the size of the new shape . * @ return the scaled { @ code Shape } instance . */ public static < S extends Shape > S resize ( final S shape , final double size ) { } }
return resize ( shape , size , size ) ;
public class StorageSnippets { /** * [ VARIABLE " my _ blob _ name2 " ] */ public List < Blob > batchUpdate ( String bucketName , String blobName1 , String blobName2 ) { } }
// [ START batchUpdate ] Blob firstBlob = storage . get ( bucketName , blobName1 ) ; Blob secondBlob = storage . get ( bucketName , blobName2 ) ; List < Blob > updatedBlobs = storage . update ( firstBlob . toBuilder ( ) . setContentType ( "text/plain" ) . build ( ) , secondBlob . toBuilder ( ) . setContentType ( "text/plain" ) . build ( ) ) ; // [ END batchUpdate ] return updatedBlobs ;
public class SipPhone { /** * Gets the subscription object ( s ) associated with the given dialog ID . The * returned object ( s ) contains subscription state , received requests ( NOTIFY ' s ) and * REFER / SUBSCRIBE responses , etc . for outbound REFER subscription ( s ) associated with the dialog . * @ param dialogId the dialog ID of interest * @ return ReferSubscriber object ( s ) associated with the dialog , or an empty list if there was * never a refer subscription associated with that dialog . */ public List < ReferSubscriber > getRefererInfoByDialog ( String dialogId ) { } }
List < ReferSubscriber > list = new ArrayList < > ( ) ; synchronized ( refererList ) { for ( ReferSubscriber s : refererList ) { if ( s . getDialogId ( ) . equals ( dialogId ) ) { list . add ( s ) ; } } } return list ;
public class ApplicationsImpl { /** * Lists all of the applications available in the specified account . * This operation returns only applications and versions that are available for use on compute nodes ; that is , that can be used in an application package reference . For administrator information about applications and versions that are not yet available to compute nodes , use the Azure portal or the Azure Resource Manager API . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; ApplicationSummary & gt ; object */ public Observable < Page < ApplicationSummary > > listNextAsync ( final String nextPageLink ) { } }
return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponseWithHeaders < Page < ApplicationSummary > , ApplicationListHeaders > , Page < ApplicationSummary > > ( ) { @ Override public Page < ApplicationSummary > call ( ServiceResponseWithHeaders < Page < ApplicationSummary > , ApplicationListHeaders > response ) { return response . body ( ) ; } } ) ;
public class SessionFacade { /** * Returns names of current attributes as a list . * @ return names of current attributes as a list . */ public String [ ] names ( ) { } }
List < String > namesList = new ArrayList < > ( ) ; Enumeration names = RequestContext . getHttpRequest ( ) . getSession ( true ) . getAttributeNames ( ) ; while ( names . hasMoreElements ( ) ) { Object o = names . nextElement ( ) ; namesList . add ( o . toString ( ) ) ; } return namesList . toArray ( new String [ namesList . size ( ) ] ) ;
public class DruidQuery { /** * Return this query as a Scan query , or null if this query is not compatible with Scan . * @ return query or null */ @ Nullable public ScanQuery toScanQuery ( ) { } }
if ( grouping != null ) { // Scan cannot GROUP BY . return null ; } if ( limitSpec != null && ( limitSpec . getColumns ( ) . size ( ) > 1 || ( limitSpec . getColumns ( ) . size ( ) == 1 && ! Iterables . getOnlyElement ( limitSpec . getColumns ( ) ) . getDimension ( ) . equals ( ColumnHolder . TIME_COLUMN_NAME ) ) ) ) { // Scan cannot ORDER BY non - time columns . return null ; } if ( outputRowSignature . getRowOrder ( ) . isEmpty ( ) ) { // Should never do a scan query without any columns that we ' re interested in . This is probably a planner bug . throw new ISE ( "WTF?! Attempting to convert to Scan query without any columns?" ) ; } final Filtration filtration = Filtration . create ( filter ) . optimize ( sourceQuerySignature ) ; // DefaultLimitSpec ( which we use to " remember " limits ) is int typed , and Integer . MAX _ VALUE means " no limit " . final long scanLimit = limitSpec == null || limitSpec . getLimit ( ) == Integer . MAX_VALUE ? 0L : ( long ) limitSpec . getLimit ( ) ; ScanQuery . Order order ; if ( limitSpec == null || limitSpec . getColumns ( ) . size ( ) == 0 ) { order = ScanQuery . Order . NONE ; } else if ( limitSpec . getColumns ( ) . get ( 0 ) . getDirection ( ) == OrderByColumnSpec . Direction . ASCENDING ) { order = ScanQuery . Order . ASCENDING ; } else { order = ScanQuery . Order . DESCENDING ; } return new ScanQuery ( dataSource , filtration . getQuerySegmentSpec ( ) , getVirtualColumns ( true ) , ScanQuery . ResultFormat . RESULT_FORMAT_COMPACTED_LIST , 0 , scanLimit , order , // Will default to " none " filtration . getDimFilter ( ) , Ordering . natural ( ) . sortedCopy ( ImmutableSet . copyOf ( outputRowSignature . getRowOrder ( ) ) ) , false , ImmutableSortedMap . copyOf ( plannerContext . getQueryContext ( ) ) ) ;
public class SystemInputDocWriter { /** * Writes the given variable input value definition . */ protected void writeValue ( VarValueDef value ) { } }
ConditionWriter conditionWriter = new ConditionWriter ( value . getCondition ( ) ) ; xmlWriter_ . element ( VALUE_TAG ) . attribute ( NAME_ATR , String . valueOf ( value . getName ( ) ) ) . attributeIf ( value . getType ( ) == FAILURE , FAILURE_ATR , "true" ) . attributeIf ( value . getType ( ) == ONCE , ONCE_ATR , "true" ) . attributeIf ( WHEN_ATR , conditionWriter . getWhenAttribute ( ) ) . attributeIf ( PROPERTY_ATR , propertyList ( toStream ( value . getProperties ( ) ) ) ) . contentIf ( value . getAnnotationCount ( ) > 0 || conditionWriter . hasWhenElement ( ) , ( ) -> { writeAnnotations ( value ) ; conditionWriter . writeWhenElement ( ) ; } ) . write ( ) ;
public class ObjectQueryService { /** * Converts a given list of LDAP entries to a { @ link Map } of Guacamole * objects stored by their identifiers . * @ param < ObjectType > * The type of object to store within the { @ link Map } . * @ param entries * A list of LDAP entries to convert to Guacamole objects . * @ param mapper * A mapping function which converts a given LDAP entry to its * corresponding Guacamole object . If the LDAP entry cannot be * converted , null should be returned . * @ return * A new { @ link Map } containing Guacamole object versions of each of * the given LDAP entries , where each object is stored within the * { @ link Map } under its corresponding identifier . */ public < ObjectType extends Identifiable > Map < String , ObjectType > asMap ( List < LDAPEntry > entries , Function < LDAPEntry , ObjectType > mapper ) { } }
// Convert each entry to the corresponding Guacamole API object Map < String , ObjectType > objects = new HashMap < > ( entries . size ( ) ) ; for ( LDAPEntry entry : entries ) { ObjectType object = mapper . apply ( entry ) ; if ( object == null ) { logger . debug ( "Ignoring object \"{}\"." , entry . getDN ( ) ) ; continue ; } // Attempt to add object to map , warning if the object appears // to be a duplicate String identifier = object . getIdentifier ( ) ; if ( objects . putIfAbsent ( identifier , object ) != null ) logger . warn ( "Multiple objects ambiguously map to the " + "same identifier (\"{}\"). Ignoring \"{}\" as " + "a duplicate." , identifier , entry . getDN ( ) ) ; } return objects ;
public class QueryBuilder { /** * Perform a full outer join between the already defined source with the " _ _ ALL _ NODES " table using the supplied alias . * @ param alias the alias for the " _ _ ALL _ NODES " table ; may not be null * @ return the component that must be used to complete the join specification ; never null */ public JoinClause fullOuterJoinAllNodesAs ( String alias ) { } }
// Expect there to be a source already . . . return new JoinClause ( namedSelector ( AllNodes . ALL_NODES_NAME + " AS " + alias ) , JoinType . FULL_OUTER ) ;
public class RuntimeClock { /** * Register event handlers for the given event class . * @ param eventClass Event type to handle . Must be derived from Time . * @ param handlers One or many event handlers that can process given event type . * @ param < T > Event type - must be derived from class Time . ( i . e . contain a timestamp ) . */ @ SuppressWarnings ( "checkstyle:hiddenfield" ) private < T extends Time > void subscribe ( final Class < T > eventClass , final Set < EventHandler < T > > handlers ) { } }
for ( final EventHandler < T > handler : handlers ) { LOG . log ( Level . FINEST , "Subscribe: event {0} handler {1}" , new Object [ ] { eventClass . getName ( ) , handler } ) ; this . handlers . subscribe ( eventClass , handler ) ; }
public class RegularPactTask { /** * The main work method . */ @ Override public void invoke ( ) throws Exception { } }
if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( formatLogString ( "Start task code." ) ) ; } // whatever happens in this scope , make sure that the local strategies are cleaned up ! // note that the initialization of the local strategies is in the try - finally block as well , // so that the thread that creates them catches its own errors that may happen in that process . // this is especially important , since there may be asynchronous closes ( such as through canceling ) . try { // initialize the serializers ( one per channel ) of the record writers initOutputWriters ( this . eventualOutputs ) ; // initialize the remaining data structures on the input and trigger the local processing // the local processing includes building the dams / caches try { int numInputs = driver . getNumberOfInputs ( ) ; int numBroadcastInputs = this . config . getNumBroadcastInputs ( ) ; initInputsSerializersAndComparators ( numInputs ) ; initBroadcastInputsSerializers ( numBroadcastInputs ) ; // set the iterative status for inputs and broadcast inputs { List < Integer > iterativeInputs = new ArrayList < Integer > ( ) ; for ( int i = 0 ; i < numInputs ; i ++ ) { final int numberOfEventsUntilInterrupt = getTaskConfig ( ) . getNumberOfEventsUntilInterruptInIterativeGate ( i ) ; if ( numberOfEventsUntilInterrupt < 0 ) { throw new IllegalArgumentException ( ) ; } else if ( numberOfEventsUntilInterrupt > 0 ) { this . inputReaders [ i ] . setIterative ( numberOfEventsUntilInterrupt ) ; iterativeInputs . add ( i ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( formatLogString ( "Input [" + i + "] reads in supersteps with [" + + numberOfEventsUntilInterrupt + "] event(s) till next superstep." ) ) ; } } } this . iterativeInputs = asArray ( iterativeInputs ) ; } { List < Integer > iterativeBcInputs = new ArrayList < Integer > ( ) ; for ( int i = 0 ; i < numBroadcastInputs ; i ++ ) { final int numberOfEventsUntilInterrupt = getTaskConfig ( ) . getNumberOfEventsUntilInterruptInIterativeBroadcastGate ( i ) ; if ( numberOfEventsUntilInterrupt < 0 ) { throw new IllegalArgumentException ( ) ; } else if ( numberOfEventsUntilInterrupt > 0 ) { this . broadcastInputReaders [ i ] . setIterative ( numberOfEventsUntilInterrupt ) ; iterativeBcInputs . add ( i ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( formatLogString ( "Broadcast input [" + i + "] reads in supersteps with [" + + numberOfEventsUntilInterrupt + "] event(s) till next superstep." ) ) ; } } } this . iterativeBroadcastInputs = asArray ( iterativeBcInputs ) ; } initLocalStrategies ( numInputs ) ; } catch ( Exception e ) { throw new RuntimeException ( "Initializing the input processing failed" + e . getMessage ( ) == null ? "." : ": " + e . getMessage ( ) , e ) ; } if ( ! this . running ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( formatLogString ( "Task cancelled before task code was started." ) ) ; } return ; } // pre main - function initialization initialize ( ) ; // read the broadcast variables for ( int i = 0 ; i < this . config . getNumBroadcastInputs ( ) ; i ++ ) { final String name = this . config . getBroadcastInputName ( i ) ; readAndSetBroadcastInput ( i , name , this . runtimeUdfContext ) ; } // the work goes here run ( ) ; } finally { // clean up in any case ! closeLocalStrategiesAndCaches ( ) ; } if ( this . running ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( formatLogString ( "Finished task code." ) ) ; } } else { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( formatLogString ( "Task code cancelled." ) ) ; } }
public class OkHttpChannelBuilder { /** * For secure connection , provides a ConnectionSpec to specify Cipher suite and * TLS versions . * < p > By default a modern , HTTP / 2 - compatible spec will be used . * < p > This method is only used when building a secure connection . For plaintext * connection , use { @ link # usePlaintext ( ) } instead . * @ throws IllegalArgumentException * If { @ code connectionSpec } is not with TLS */ public final OkHttpChannelBuilder connectionSpec ( com . squareup . okhttp . ConnectionSpec connectionSpec ) { } }
Preconditions . checkArgument ( connectionSpec . isTls ( ) , "plaintext ConnectionSpec is not accepted" ) ; this . connectionSpec = Utils . convertSpec ( connectionSpec ) ; return this ;
public class JPAPuId { /** * Compute and cache the current hashCode . */ private void reComputeHashCode ( ) { } }
ivCurHashCode = ( ivAppName != null ? ivAppName . hashCode ( ) : 0 ) // d437828 + ( ivModJarName != null ? ivModJarName . hashCode ( ) : 0 ) + ( ivPuName != null ? ivPuName . hashCode ( ) : 0 ) ;
public class RouteClient { /** * Creates a Route resource in the specified project using the data included in the request . * < p > Sample code : * < pre > < code > * try ( RouteClient routeClient = RouteClient . create ( ) ) { * ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ; * Route routeResource = Route . newBuilder ( ) . build ( ) ; * Operation response = routeClient . insertRoute ( project . toString ( ) , routeResource ) ; * < / code > < / pre > * @ param project Project ID for this request . * @ param routeResource Represents a Route resource . A route specifies how certain packets should * be handled by the network . Routes are associated with instances by tags and the set of * routes for a particular instance is called its routing table . * < p > For each packet leaving an instance , the system searches that instance ' s routing table * for a single best matching route . Routes match packets by destination IP address , * preferring smaller or more specific ranges over larger ones . If there is a tie , the system * selects the route with the smallest priority value . If there is still a tie , it uses the * layer three and four packet headers to select just one of the remaining matching routes . * The packet is then forwarded as specified by the nextHop field of the winning route - * either to another instance destination , an instance gateway , or a Google Compute * Engine - operated gateway . * < p > Packets that do not match any route in the sending instance ' s routing table are dropped . * ( = = resource _ for beta . routes = = ) ( = = resource _ for v1 . routes = = ) * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation insertRoute ( String project , Route routeResource ) { } }
InsertRouteHttpRequest request = InsertRouteHttpRequest . newBuilder ( ) . setProject ( project ) . setRouteResource ( routeResource ) . build ( ) ; return insertRoute ( request ) ;
public class Collectors { /** * Returns a concurrent { @ code Collector } implementing a " group by " * operation on input elements of type { @ code T } , grouping elements * according to a classification function . * < p > This is a { @ link Collector . Characteristics # CONCURRENT concurrent } and * { @ link Collector . Characteristics # UNORDERED unordered } Collector . * < p > The classification function maps elements to some key type { @ code K } . * The collector produces a { @ code ConcurrentMap < K , List < T > > } whose keys are the * values resulting from applying the classification function to the input * elements , and whose corresponding values are { @ code List } s containing the * input elements which map to the associated key under the classification * function . * < p > There are no guarantees on the type , mutability , or serializability * of the { @ code Map } or { @ code List } objects returned , or of the * thread - safety of the { @ code List } objects returned . * @ implSpec * This produces a result similar to : * < pre > { @ code * groupingByConcurrent ( classifier , toList ( ) ) ; * } < / pre > * @ param < T > the type of the input elements * @ param < K > the type of the keys * @ param classifier a classifier function mapping input elements to keys * @ return a concurrent , unordered { @ code Collector } implementing the group - by operation * @ see # groupingBy ( Function ) * @ see # groupingByConcurrent ( Function , Collector ) * @ see # groupingByConcurrent ( Function , Supplier , Collector ) */ public static < T , K > Collector < T , ? , ConcurrentMap < K , List < T > > > groupingByConcurrent ( Function < ? super T , ? extends K > classifier ) { } }
return groupingByConcurrent ( classifier , ConcurrentHashMap :: new , toList ( ) ) ;
public class InApplicationMonitor { /** * < p > Increase the specified counter by a variable amount . < / p > * @ param name * the name of the { @ code Counter } to increase * @ param increment * the added to add */ public void incrementCounter ( String name , int increment ) { } }
if ( monitorActive ) { String escapedName = keyHandler . handle ( name ) ; for ( MonitorPlugin p : getPlugins ( ) ) { p . incrementCounter ( escapedName , increment ) ; } }
public class CPInstancePersistenceImpl { /** * Returns the last cp instance in the ordered set where CPDefinitionId = & # 63 ; . * @ param CPDefinitionId the cp definition ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp instance * @ throws NoSuchCPInstanceException if a matching cp instance could not be found */ @ Override public CPInstance findByCPDefinitionId_Last ( long CPDefinitionId , OrderByComparator < CPInstance > orderByComparator ) throws NoSuchCPInstanceException { } }
CPInstance cpInstance = fetchByCPDefinitionId_Last ( CPDefinitionId , orderByComparator ) ; if ( cpInstance != null ) { return cpInstance ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "CPDefinitionId=" ) ; msg . append ( CPDefinitionId ) ; msg . append ( "}" ) ; throw new NoSuchCPInstanceException ( msg . toString ( ) ) ;
public class IntegralImageOps { /** * Computes the value of a block inside an integral image without bounds checking . The block is * defined as follows : x0 & lt ; x & le ; x1 and y0 & lt ; y & le ; y1. * @ param integral Integral image . * @ param x0 Lower bound of the block . Exclusive . * @ param y0 Lower bound of the block . Exclusive . * @ param x1 Upper bound of the block . Inclusive . * @ param y1 Upper bound of the block . Inclusive . * @ return Value inside the block . */ public static long block_unsafe ( GrayS64 integral , int x0 , int y0 , int x1 , int y1 ) { } }
return ImplIntegralImageOps . block_unsafe ( integral , x0 , y0 , x1 , y1 ) ;
public class MeetmeRoom { /** * returns true if the channel was added to the list of channels in this * meetme . if the channel is already in the meetme , returns false */ synchronized public boolean addChannel ( final Channel channel ) { } }
boolean newChannel = false ; if ( ! this . channels . contains ( channel ) ) { this . channels . add ( channel ) ; this . channelCount ++ ; newChannel = true ; } else MeetmeRoom . logger . error ( "rejecting " + channel + " already in meetme." ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ return newChannel ;
public class Cluster { /** * Registers this node with Zookeeper on startup , retrying until it succeeds . * This retry logic is important in that a node which restarts before Zookeeper * detects the previous disconnect could prohibit the node from properly launching . */ private void joinCluster ( ) throws InterruptedException , IOException { } }
while ( true ) { NodeInfo myInfo ; try { myInfo = new NodeInfo ( NodeState . Fresh . toString ( ) , zk . get ( ) . getSessionId ( ) ) ; } catch ( ZooKeeperConnectionException e ) { throw ZKException . from ( e ) ; } byte [ ] encoded = JsonUtil . asJSONBytes ( myInfo ) ; if ( ZKUtils . createEphemeral ( zk , "/" + name + "/nodes/" + myNodeID , encoded ) ) { return ; } else { Stat stat = new Stat ( ) ; try { byte [ ] bytes = zk . get ( ) . getData ( "/" + name + "/nodes/" + myNodeID , false , stat ) ; NodeInfo nodeInfo = JsonUtil . fromJSON ( bytes , NodeInfo . class ) ; if ( nodeInfo . connectionID == zk . get ( ) . getSessionId ( ) ) { // As it turns out , our session is already registered ! return ; } } catch ( ZooKeeperConnectionException e ) { throw ZKException . from ( e ) ; } catch ( KeeperException e ) { throw ZKException . from ( e ) ; } } LOG . warn ( "Unable to register with Zookeeper on launch. " + "Is {} already running on this host? Retrying in 1 second..." , name ) ; Thread . sleep ( 1000 ) ; }
public class StatsAgent { /** * Produce PROCEDURE aggregation of PROCEDURE subselector * Basically it leaves out the rows that were not labeled as " < ALL > " . */ private VoltTable [ ] aggregateProcedureStats ( VoltTable [ ] baseStats ) { } }
if ( baseStats == null || baseStats . length != 1 ) { return baseStats ; } VoltTable result = new VoltTable ( new ColumnInfo ( "TIMESTAMP" , VoltType . BIGINT ) , new ColumnInfo ( VoltSystemProcedure . CNAME_HOST_ID , VoltSystemProcedure . CTYPE_ID ) , new ColumnInfo ( "HOSTNAME" , VoltType . STRING ) , new ColumnInfo ( VoltSystemProcedure . CNAME_SITE_ID , VoltSystemProcedure . CTYPE_ID ) , new ColumnInfo ( "PARTITION_ID" , VoltType . INTEGER ) , new ColumnInfo ( "PROCEDURE" , VoltType . STRING ) , new ColumnInfo ( "INVOCATIONS" , VoltType . BIGINT ) , new ColumnInfo ( "TIMED_INVOCATIONS" , VoltType . BIGINT ) , new ColumnInfo ( "MIN_EXECUTION_TIME" , VoltType . BIGINT ) , new ColumnInfo ( "MAX_EXECUTION_TIME" , VoltType . BIGINT ) , new ColumnInfo ( "AVG_EXECUTION_TIME" , VoltType . BIGINT ) , new ColumnInfo ( "MIN_RESULT_SIZE" , VoltType . INTEGER ) , new ColumnInfo ( "MAX_RESULT_SIZE" , VoltType . INTEGER ) , new ColumnInfo ( "AVG_RESULT_SIZE" , VoltType . INTEGER ) , new ColumnInfo ( "MIN_PARAMETER_SET_SIZE" , VoltType . INTEGER ) , new ColumnInfo ( "MAX_PARAMETER_SET_SIZE" , VoltType . INTEGER ) , new ColumnInfo ( "AVG_PARAMETER_SET_SIZE" , VoltType . INTEGER ) , new ColumnInfo ( "ABORTS" , VoltType . BIGINT ) , new ColumnInfo ( "FAILURES" , VoltType . BIGINT ) , new ColumnInfo ( "TRANSACTIONAL" , VoltType . TINYINT ) ) ; baseStats [ 0 ] . resetRowPosition ( ) ; while ( baseStats [ 0 ] . advanceRow ( ) ) { if ( baseStats [ 0 ] . getString ( "STATEMENT" ) . equalsIgnoreCase ( "<ALL>" ) ) { result . addRow ( baseStats [ 0 ] . getLong ( "TIMESTAMP" ) , baseStats [ 0 ] . getLong ( VoltSystemProcedure . CNAME_HOST_ID ) , baseStats [ 0 ] . getString ( "HOSTNAME" ) , baseStats [ 0 ] . getLong ( VoltSystemProcedure . CNAME_SITE_ID ) , baseStats [ 0 ] . getLong ( "PARTITION_ID" ) , baseStats [ 0 ] . getString ( "PROCEDURE" ) , baseStats [ 0 ] . getLong ( "INVOCATIONS" ) , baseStats [ 0 ] . getLong ( "TIMED_INVOCATIONS" ) , baseStats [ 0 ] . getLong ( "MIN_EXECUTION_TIME" ) , baseStats [ 0 ] . getLong ( "MAX_EXECUTION_TIME" ) , baseStats [ 0 ] . getLong ( "AVG_EXECUTION_TIME" ) , baseStats [ 0 ] . getLong ( "MIN_RESULT_SIZE" ) , baseStats [ 0 ] . getLong ( "MAX_RESULT_SIZE" ) , baseStats [ 0 ] . getLong ( "AVG_RESULT_SIZE" ) , baseStats [ 0 ] . getLong ( "MIN_PARAMETER_SET_SIZE" ) , baseStats [ 0 ] . getLong ( "MAX_PARAMETER_SET_SIZE" ) , baseStats [ 0 ] . getLong ( "AVG_PARAMETER_SET_SIZE" ) , baseStats [ 0 ] . getLong ( "ABORTS" ) , baseStats [ 0 ] . getLong ( "FAILURES" ) , ( byte ) baseStats [ 0 ] . getLong ( "TRANSACTIONAL" ) ) ; } } return new VoltTable [ ] { result } ;
public class ptp { /** * Use this API to update ptp . */ public static base_response update ( nitro_service client , ptp resource ) throws Exception { } }
ptp updateresource = new ptp ( ) ; updateresource . state = resource . state ; return updateresource . update_resource ( client ) ;
public class JsonLdApi { /** * Compaction Algorithm * http : / / json - ld . org / spec / latest / json - ld - api / # compaction - algorithm * @ param activeCtx * The Active Context * @ param activeProperty * The Active Property * @ param element * The current element * @ return The compacted JSON - LD object . * @ throws JsonLdError * If there was an error during compaction . */ public Object compact ( Context activeCtx , String activeProperty , Object element ) throws JsonLdError { } }
return compact ( activeCtx , activeProperty , element , JsonLdOptions . DEFAULT_COMPACT_ARRAYS ) ;
public class JmsManagedConnectionFactoryImpl { /** * ( non - Javadoc ) * @ see com . ibm . websphere . sib . api . jms . JmsManagedConnectionFactory # getReadAhead ( ) */ @ Override public String getReadAhead ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getReadAhead" ) ; String ra = jcaConnectionFactory . getReadAhead ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getReadAhead" , ra ) ; return ra ;
public class Validator { /** * Cleans the object key . * @ param name Name of the object key * @ return The { @ link ValidationResult } object containing the object , * and the error code ( if any ) */ ValidationResult cleanObjectKey ( String name ) { } }
ValidationResult vr = new ValidationResult ( ) ; name = name . trim ( ) ; for ( String x : objectKeyCharsNotAllowed ) name = name . replace ( x , "" ) ; if ( name . length ( ) > Constants . MAX_KEY_LENGTH ) { name = name . substring ( 0 , Constants . MAX_KEY_LENGTH - 1 ) ; vr . setErrorDesc ( name . trim ( ) + "... exceeds the limit of " + Constants . MAX_KEY_LENGTH + " characters. Trimmed" ) ; vr . setErrorCode ( 520 ) ; } vr . setObject ( name . trim ( ) ) ; return vr ;
public class SftpSubsystemChannel { /** * Send an extension message and return the response . This is for advanced * use only . * @ param request * String * @ param requestData * byte [ ] * @ return SftpMessage * @ throws SshException * @ throws SftpStatusException */ public SftpMessage sendExtensionMessage ( String request , byte [ ] requestData ) throws SshException , SftpStatusException { } }
try { UnsignedInteger32 id = nextRequestId ( ) ; Packet packet = createPacket ( ) ; packet . write ( SSH_FXP_EXTENDED ) ; packet . writeUINT32 ( id ) ; packet . writeString ( request ) ; sendMessage ( packet ) ; return getResponse ( id ) ; } catch ( IOException ex ) { throw new SshException ( SshException . INTERNAL_ERROR , ex ) ; }
public class SARLJvmModelInferrer { /** * Transform the uses of SARL capacities . * < p > Resolving the calls to the capacities ' functions is done in { @ link SARLReentrantTypeResolver } . * @ param source the feature to transform . * @ param container the target container of the transformation result . */ protected void transform ( SarlCapacityUses source , JvmGenericType container ) { } }
final GenerationContext context = getContext ( container ) ; if ( context == null ) { return ; } for ( final JvmTypeReference capacityType : source . getCapacities ( ) ) { final JvmType type = capacityType . getType ( ) ; if ( type instanceof JvmGenericType /* & & this . inheritanceHelper . isSubTypeOf ( capacityType , Capacity . class , SarlCapacity . class ) */ && ! context . getGeneratedCapacityUseFields ( ) . contains ( capacityType . getIdentifier ( ) ) ) { // Generate the buffer field final String fieldName = Utils . createNameForHiddenCapacityImplementationAttribute ( capacityType . getIdentifier ( ) ) ; final JvmField field = this . typesFactory . createJvmField ( ) ; container . getMembers ( ) . add ( field ) ; field . setVisibility ( JvmVisibility . PRIVATE ) ; field . setSimpleName ( fieldName ) ; field . setTransient ( true ) ; final JvmType clearableReferenceType = this . typeReferences . findDeclaredType ( ClearableReference . class , container ) ; final JvmTypeReference skillClearableReference = this . typeReferences . createTypeRef ( clearableReferenceType , this . typeReferences . createTypeRef ( this . typeReferences . findDeclaredType ( Skill . class , container ) ) ) ; field . setType ( skillClearableReference ) ; this . associator . associatePrimary ( source , field ) ; addAnnotationSafe ( field , Extension . class ) ; field . getAnnotations ( ) . add ( annotationClassRef ( ImportedCapacityFeature . class , Collections . singletonList ( capacityType ) ) ) ; appendGeneratedAnnotation ( field , getContext ( container ) ) ; // Generate the calling function final String methodName = Utils . createNameForHiddenCapacityImplementationCallingMethodFromFieldName ( fieldName ) ; final JvmOperation operation = this . typesFactory . createJvmOperation ( ) ; container . getMembers ( ) . add ( operation ) ; operation . setVisibility ( JvmVisibility . PRIVATE ) ; operation . setReturnType ( cloneWithTypeParametersAndProxies ( capacityType , operation ) ) ; operation . setSimpleName ( methodName ) ; this . associator . associatePrimary ( source , operation ) ; setBody ( operation , it -> { it . append ( "if (this." ) . append ( fieldName ) . append ( " == null || this." ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ it . append ( fieldName ) . append ( ".get() == null) {" ) ; // $ NON - NLS - 1 $ it . increaseIndentation ( ) ; it . newLine ( ) ; it . append ( "this." ) . append ( fieldName ) . append ( " = " ) // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ . append ( Utils . HIDDEN_MEMBER_CHARACTER ) . append ( "getSkill(" ) ; // $ NON - NLS - 1 $ it . append ( capacityType . getType ( ) ) . append ( ".class);" ) ; // $ NON - NLS - 1 $ it . decreaseIndentation ( ) ; it . newLine ( ) ; it . append ( "}" ) ; // $ NON - NLS - 1 $ it . newLine ( ) ; it . append ( "return " ) . append ( Utils . HIDDEN_MEMBER_CHARACTER ) // $ NON - NLS - 1 $ . append ( "castSkill(" ) . append ( capacityType . getType ( ) ) . append ( ".class, this." ) // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ . append ( fieldName ) . append ( ");" ) ; // $ NON - NLS - 1 $ } ) ; // Add the annotation dedicated to this particular method if ( context . isAtLeastJava8 ( ) ) { context . getPostFinalizationElements ( ) . add ( ( ) -> { final String inlineExpression = Utils . HIDDEN_MEMBER_CHARACTER + "castSkill(" + capacityType . getSimpleName ( ) // $ NON - NLS - 1 $ + ".class, ($0" + fieldName // $ NON - NLS - 1 $ + " == null || $0" + fieldName // $ NON - NLS - 1 $ + ".get() == null) ? ($0" + fieldName // $ NON - NLS - 1 $ + " = $0" + Utils . HIDDEN_MEMBER_CHARACTER + "getSkill(" // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ + capacityType . getSimpleName ( ) + ".class)) : $0" + fieldName + ")" ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ ; this . inlineExpressionCompiler . appendInlineAnnotation ( operation , source . eResource ( ) . getResourceSet ( ) , inlineExpression , capacityType ) ; } ) ; } appendGeneratedAnnotation ( operation , context ) ; if ( context . getGeneratorConfig2 ( ) . isGeneratePureAnnotation ( ) ) { addAnnotationSafe ( operation , Pure . class ) ; } context . addGeneratedCapacityUseField ( capacityType . getIdentifier ( ) ) ; context . incrementSerial ( capacityType . getIdentifier ( ) . hashCode ( ) ) ; } }
public class ThreadPoolExecutor { /** * Executes the given task sometime in the future . The task * may execute in a new thread or in an existing pooled thread . * If the task cannot be submitted for execution , either because this * executor has been shutdown or because its capacity has been reached , * the task is handled by the current { @ code RejectedExecutionHandler } . * @ param command the task to execute * @ throws RejectedExecutionException at discretion of * { @ code RejectedExecutionHandler } , if the task * cannot be accepted for execution * @ throws NullPointerException if { @ code command } is null */ public void execute ( Runnable command ) { } }
if ( command == null ) throw new NullPointerException ( ) ; /* * Proceed in 3 steps : * 1 . If fewer than corePoolSize threads are running , try to * start a new thread with the given command as its first * task . The call to addWorker atomically checks runState and * workerCount , and so prevents false alarms that would add * threads when it shouldn ' t , by returning false . * 2 . If a task can be successfully queued , then we still need * to double - check whether we should have added a thread * ( because existing ones died since last checking ) or that * the pool shut down since entry into this method . So we * recheck state and if necessary roll back the enqueuing if * stopped , or start a new thread if there are none . * 3 . If we cannot queue task , then we try to add a new * thread . If it fails , we know we are shut down or saturated * and so reject the task . */ int c = ctl . get ( ) ; if ( workerCountOf ( c ) < corePoolSize ) { if ( addWorker ( command , true ) ) return ; c = ctl . get ( ) ; } if ( isRunning ( c ) && workQueue . offer ( command ) ) { int recheck = ctl . get ( ) ; if ( ! isRunning ( recheck ) && remove ( command ) ) reject ( command ) ; else if ( workerCountOf ( recheck ) == 0 ) addWorker ( null , false ) ; } else if ( ! addWorker ( command , false ) ) reject ( command ) ;
public class EnvLoader { /** * Starts the current environment . */ public static void start ( ClassLoader loader ) { } }
for ( ; loader != null ; loader = loader . getParent ( ) ) { if ( loader instanceof EnvironmentClassLoader ) { ( ( EnvironmentClassLoader ) loader ) . start ( ) ; return ; } } init ( loader ) ; for ( int i = 0 ; i < _globalEnvironmentListeners . size ( ) ; i ++ ) { EnvLoaderListener listener = _globalEnvironmentListeners . get ( i ) ; listener . environmentStart ( null ) ; }
public class Proxy { /** * Retrieves all of the headers from the servlet request and sets them on * the proxy request * @ param httpServletRequest The request object representing the client ' s request to the * servlet engine * @ param httpMethodProxyRequest The request that we are about to send to the proxy host */ @ SuppressWarnings ( "unchecked" ) private void setProxyRequestHeaders ( HttpServletRequest httpServletRequest , HttpMethod httpMethodProxyRequest ) throws Exception { } }
RequestInformation requestInfo = requestInformation . get ( ) ; String hostName = HttpUtilities . getHostNameFromURL ( httpServletRequest . getRequestURL ( ) . toString ( ) ) ; // Get an Enumeration of all of the header names sent by the client Boolean stripTransferEncoding = false ; Enumeration < String > enumerationOfHeaderNames = httpServletRequest . getHeaderNames ( ) ; while ( enumerationOfHeaderNames . hasMoreElements ( ) ) { String stringHeaderName = enumerationOfHeaderNames . nextElement ( ) ; if ( stringHeaderName . equalsIgnoreCase ( STRING_CONTENT_LENGTH_HEADER_NAME ) ) { // don ' t add this header continue ; } // The forwarding proxy may supply a POST encoding hint in ODO - POST - TYPE if ( stringHeaderName . equalsIgnoreCase ( "ODO-POST-TYPE" ) && httpServletRequest . getHeader ( "ODO-POST-TYPE" ) . startsWith ( "content-length:" ) ) { stripTransferEncoding = true ; } logger . info ( "Current header: {}" , stringHeaderName ) ; // As per the Java Servlet API 2.5 documentation : // Some headers , such as Accept - Language can be sent by clients // as several headers each with a different value rather than // sending the header as a comma separated list . // Thus , we get an Enumeration of the header values sent by the // client Enumeration < String > enumerationOfHeaderValues = httpServletRequest . getHeaders ( stringHeaderName ) ; while ( enumerationOfHeaderValues . hasMoreElements ( ) ) { String stringHeaderValue = enumerationOfHeaderValues . nextElement ( ) ; // In case the proxy host is running multiple virtual servers , // rewrite the Host header to ensure that we get content from // the correct virtual server if ( stringHeaderName . equalsIgnoreCase ( STRING_HOST_HEADER_NAME ) && requestInfo . handle ) { String hostValue = getHostHeaderForHost ( hostName ) ; if ( hostValue != null ) { stringHeaderValue = hostValue ; } } Header header = new Header ( stringHeaderName , stringHeaderValue ) ; // Set the same header on the proxy request httpMethodProxyRequest . addRequestHeader ( header ) ; } } // this strips transfer encoding headers and adds in the appropriate content - length header // based on the hint provided in the ODO - POST - TYPE header ( sent from BrowserMobProxyHandler ) if ( stripTransferEncoding ) { httpMethodProxyRequest . removeRequestHeader ( "transfer-encoding" ) ; // add content length back in based on the ODO information String contentLengthHint = httpServletRequest . getHeader ( "ODO-POST-TYPE" ) ; String [ ] contentLengthParts = contentLengthHint . split ( ":" ) ; httpMethodProxyRequest . addRequestHeader ( "content-length" , contentLengthParts [ 1 ] ) ; // remove the odo - post - type header httpMethodProxyRequest . removeRequestHeader ( "ODO-POST-TYPE" ) ; } // bail if we aren ' t fully handling this request if ( ! requestInfo . handle ) { return ; } // deal with header overrides for the request processRequestHeaderOverrides ( httpMethodProxyRequest ) ;
public class Assembly { /** * Does the actual uploading of files ( when tus is enabled ) . * @ throws IOException when there ' s a failure with file retrieval . * @ throws ProtocolException when there ' s a failure with tus upload . */ protected void uploadTusFiles ( ) throws IOException , ProtocolException { } }
while ( uploads . size ( ) > 0 ) { final TusUploader tusUploader = tusClient . resumeOrCreateUpload ( uploads . get ( 0 ) ) ; TusExecutor tusExecutor = new TusExecutor ( ) { @ Override protected void makeAttempt ( ) throws ProtocolException , IOException { int uploadedChunk = 0 ; while ( uploadedChunk > - 1 ) { uploadedChunk = tusUploader . uploadChunk ( ) ; } tusUploader . finish ( ) ; } } ; tusExecutor . makeAttempts ( ) ; // remove upload instance from list uploads . remove ( 0 ) ; }
public class ManagedCloudSdk { /** * TODO : fix passthrough for useragent and client side usage reporting */ public SdkInstaller newInstaller ( ) { } }
String userAgentString = "google-cloud-tools-java" ; return SdkInstaller . newInstaller ( managedSdkDirectory , version , osInfo , userAgentString , false ) ;
public class MethodInfoList { /** * Returns a list of all methods matching a given name . ( There may be more than one method with a given name , * due to overloading . ) * @ param methodName * The name of a method . * @ return A list of { @ link MethodInfo } objects in the list with the given name ( there may be more than one * method with a given name , due to overloading ) . Returns the empty list if no method had a matching * name . */ public MethodInfoList get ( final String methodName ) { } }
boolean hasMethodWithName = false ; for ( final MethodInfo mi : this ) { if ( mi . getName ( ) . equals ( methodName ) ) { hasMethodWithName = true ; break ; } } if ( ! hasMethodWithName ) { return EMPTY_LIST ; } else { final MethodInfoList matchingMethods = new MethodInfoList ( 2 ) ; for ( final MethodInfo mi : this ) { if ( mi . getName ( ) . equals ( methodName ) ) { matchingMethods . add ( mi ) ; } } return matchingMethods ; }
public class EntryValueLong { /** * Writes this EntryValue at a given position of a data writer . * @ param writer * @ param position * @ throws IOException */ @ Override public void updateArrayFile ( DataWriter writer , long position ) throws IOException { } }
writer . writeLong ( position , val ) ;
public class Collectors { /** * Returns a { @ code Collector } that performs additional transformation . * @ param < T > the type of the input elements * @ param < A > the accumulation type * @ param < IR > the input type of the transformation function * @ param < OR > the output type of the transformation function * @ param c the input { @ code Collector } * @ param finisher the final transformation function * @ return a { @ code Collector } */ @ NotNull public static < T , A , IR , OR > Collector < T , A , OR > collectingAndThen ( @ NotNull Collector < T , A , IR > c , @ NotNull Function < IR , OR > finisher ) { } }
Objects . requireNonNull ( c ) ; Objects . requireNonNull ( finisher ) ; return new CollectorsImpl < T , A , OR > ( c . supplier ( ) , c . accumulator ( ) , Function . Util . andThen ( c . finisher ( ) , finisher ) ) ;
public class GetSnapshotLimitsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetSnapshotLimitsRequest getSnapshotLimitsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getSnapshotLimitsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getSnapshotLimitsRequest . getDirectoryId ( ) , DIRECTORYID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JOGLTypeConversions { /** * Convert stencil ops to GL constants . * @ param op The op . * @ return The resulting GL constant . */ public static int stencilOperationToGL ( final JCGLStencilOperation op ) { } }
switch ( op ) { case STENCIL_OP_DECREMENT : return GL . GL_DECR ; case STENCIL_OP_DECREMENT_WRAP : return GL . GL_DECR_WRAP ; case STENCIL_OP_INCREMENT : return GL . GL_INCR ; case STENCIL_OP_INCREMENT_WRAP : return GL . GL_INCR_WRAP ; case STENCIL_OP_INVERT : return GL . GL_INVERT ; case STENCIL_OP_KEEP : return GL . GL_KEEP ; case STENCIL_OP_REPLACE : return GL . GL_REPLACE ; case STENCIL_OP_ZERO : return GL . GL_ZERO ; } throw new UnreachableCodeException ( ) ;
public class Convert { /** * Imports an openfst text format . You pass in the base path that can be loaded off of the classpath * For example if you had classpath location data with files data / mymodel . fst . txt , data / mymodel . input . syms , * and data / mymodel . output . syms then you would pass " data / mymodel " to this method * @ param basename the files ' base name * @ param semiring the fst ' s semiring */ public static MutableFst importFst ( String basename , Semiring semiring ) { } }
Optional < MutableSymbolTable > maybeInputs = importSymbols ( basename + INPUT_SYMS ) ; Optional < MutableSymbolTable > maybeOutputs = importSymbols ( basename + OUTPUT_SYMS ) ; Optional < MutableSymbolTable > maybeStates = importSymbols ( basename + STATES_SYMS ) ; CharSource cs = asCharSource ( Resources . getResource ( basename + FST_TXT ) , Charsets . UTF_8 ) ; return convertFrom ( cs , maybeInputs , maybeOutputs , maybeStates , semiring ) ;
public class AWSOpsWorksClient { /** * Detaches a specified Elastic Load Balancing instance from its layer . * < b > Required Permissions < / b > : To use this action , an IAM user must have a Manage permissions level for the stack , * or an attached policy that explicitly grants permissions . For more information on user permissions , see < a * href = " http : / / docs . aws . amazon . com / opsworks / latest / userguide / opsworks - security - users . html " > Managing User * Permissions < / a > . * @ param detachElasticLoadBalancerRequest * @ return Result of the DetachElasticLoadBalancer operation returned by the service . * @ throws ResourceNotFoundException * Indicates that a resource was not found . * @ sample AWSOpsWorks . DetachElasticLoadBalancer * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / opsworks - 2013-02-18 / DetachElasticLoadBalancer " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DetachElasticLoadBalancerResult detachElasticLoadBalancer ( DetachElasticLoadBalancerRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDetachElasticLoadBalancer ( request ) ;
public class AsynchronousRequest { /** * For more info on emblem API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / emblem " > here < / a > < br / > * Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions * @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) } * @ throws NullPointerException if given { @ link Callback } is empty * @ see Emblem Emblem info */ public void getAllEmblemType ( Callback < List < String > > callback ) throws NullPointerException { } }
gw2API . getAllEmblemType ( ) . enqueue ( callback ) ;
public class JSONTranscoder { /** * Return the deserialized map * @ param in bytes to deserialize * @ return map of deserialized objects */ @ Override public ConcurrentMap < String , Object > deserializeAttributes ( final byte [ ] in ) { } }
final InputStreamReader inputStream = new InputStreamReader ( new ByteArrayInputStream ( in ) ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "deserialize the stream" ) ; } try { return deserializer . deserializeInto ( inputStream , new ConcurrentHashMap < String , Object > ( ) ) ; } catch ( final RuntimeException e ) { LOG . warn ( "Caught Exception deserializing JSON " + e ) ; throw new TranscoderDeserializationException ( e ) ; }
public class JSONNavi { /** * Set current value as Json Object You can also skip this call , Objects can * be create automatically . */ @ SuppressWarnings ( "unchecked" ) public JSONNavi < T > object ( ) { } }
if ( failure ) return this ; if ( current == null && readonly ) failure ( "Can not create Object child in readonly" , null ) ; if ( current != null ) { if ( isObject ( ) ) return this ; if ( isArray ( ) ) failure ( "can not use Object feature on Array." , null ) ; failure ( "Can not use current possition as Object" , null ) ; } else { current = mapper . createObject ( ) ; } if ( root == null ) root = ( T ) current ; else store ( ) ; return this ;
public class ArtifactCollector { /** * Retrieve the paths of artifacts that were stored in the indicated test result . * @ return ( optional ) list of artifact paths */ public Optional < List < Path > > retrieveArtifactPaths ( ) { } }
if ( artifactPaths . isEmpty ( ) ) { return Optional . absent ( ) ; } else { return Optional . of ( artifactPaths ) ; }
public class Job { /** * Set the outercontrol . * @ param control control */ public void setOutterControl ( final String control ) { } }
prop . put ( PROPERTY_OUTER_CONTROL , OutterControl . valueOf ( control . toUpperCase ( ) ) . toString ( ) ) ;
public class ClassPathTraversal { /** * Analyzes the MANIFEST . MF file of a jar whether additional jars are * listed in the " Class - Path " key . * @ param manifestthe manifest to analyze * @ param state the traversal state */ protected void traverseManifest ( Manifest manifest , TraversalState state ) { } }
Attributes atts ; String cp ; String [ ] parts ; if ( manifest == null ) return ; atts = manifest . getMainAttributes ( ) ; cp = atts . getValue ( "Class-Path" ) ; if ( cp == null ) return ; parts = cp . split ( " " ) ; for ( String part : parts ) { if ( part . trim ( ) . length ( ) == 0 ) return ; if ( part . toLowerCase ( ) . endsWith ( ".jar" ) || ! part . equals ( "." ) ) traverseClasspathPart ( part , state ) ; }
public class Histogram_F64 { /** * Creates an exact copy of " this " histogram */ public Histogram_F64 copy ( ) { } }
Histogram_F64 out = newInstance ( ) ; System . arraycopy ( value , 0 , out . value , 0 , length . length ) ; return out ;
public class ValueType { /** * / * ( non - Javadoc ) * @ see org . kie . base . ValueTypeInterface # isNumber ( ) */ public boolean isNumber ( ) { } }
return ( this . simpleType == SimpleValueType . INTEGER || this . simpleType == SimpleValueType . DECIMAL || this . simpleType == SimpleValueType . CHAR || this . simpleType == SimpleValueType . NUMBER ) ;
public class SecurityContextImpl { /** * Restore the subjects that were previously on the thread prior to applying this * security context . */ @ Override public void taskStopping ( ) { } }
final boolean trace = TraceComponent . isAnyTracingEnabled ( ) ; if ( trace && tc . isEntryEnabled ( ) ) Tr . entry ( this , tc , "taskStopping" , "restore caller/invocation subjects" , prevCallerSubject , prevInvocationSubject ) ; subjectManager . setCallerSubject ( prevCallerSubject ) ; subjectManager . setInvocationSubject ( prevInvocationSubject ) ; if ( trace && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "taskStopping" ) ;
public class DecodingOptions { /** * Enables given option . * Note : Some options ( e . g . INCLUDE _ SCHEMA _ ID ) will only take effect if the * EXI options document is set to encode options in general ( see * INCLUDE _ OPTIONS ) . * @ param key * referring to a specific option * @ throws UnsupportedOption * if option is not supported */ public void setOption ( String key ) throws UnsupportedOption { } }
if ( key . equals ( IGNORE_SCHEMA_ID ) ) { options . add ( key ) ; } else { throw new UnsupportedOption ( "DecodingOption '" + key + "' is unknown!" ) ; }
public class ResourceManager { /** * Transform the path into a locale - specific one , or return null . */ protected String getLocalePath ( String path ) { } }
return ( _localeHandler == null ) ? null : _localeHandler . getLocalePath ( path ) ;
public class ProtoLexer { /** * $ ANTLR start " SERVICE " */ public final void mSERVICE ( ) throws RecognitionException { } }
try { int _type = SERVICE ; int _channel = DEFAULT_TOKEN_CHANNEL ; // com / dyuproject / protostuff / parser / ProtoLexer . g : 115:5 : ( ' service ' ) // com / dyuproject / protostuff / parser / ProtoLexer . g : 115:9 : ' service ' { match ( "service" ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class FogOfWar { /** * Check if the tile is currently visible . * @ param tiled The tiled to check . * @ return < code > true < / code > if hidden , < code > false < / code > else . */ public boolean isVisible ( Tiled tiled ) { } }
final int tx = tiled . getInTileX ( ) ; final int ty = tiled . getInTileY ( ) ; final int tw = tiled . getInTileWidth ( ) - 1 ; final int th = tiled . getInTileHeight ( ) - 1 ; for ( int ctx = tx ; ctx <= tx + tw ; ctx ++ ) { for ( int cty = ty ; cty <= ty + th ; cty ++ ) { if ( isFogged ( ctx , cty ) || ! isVisited ( ctx , cty ) ) { return false ; } } } return true ;
public class CreateVpcLinkRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateVpcLinkRequest createVpcLinkRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createVpcLinkRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createVpcLinkRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( createVpcLinkRequest . getDescription ( ) , DESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( createVpcLinkRequest . getTargetArns ( ) , TARGETARNS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ArrayOfDoublesUpdatableSketchBuilder { /** * Returns an ArrayOfDoublesUpdatableSketch with the current configuration of this Builder . * @ param dstMem instance of Memory to be used by the sketch * @ return an ArrayOfDoublesUpdatableSketch */ public ArrayOfDoublesUpdatableSketch build ( final WritableMemory dstMem ) { } }
return new DirectArrayOfDoublesQuickSelectSketch ( nomEntries_ , resizeFactor_ . lg ( ) , samplingProbability_ , numValues_ , seed_ , dstMem ) ;
public class ChangeVerifier { /** * Given an AST and its copy , map the root node of each scope of main to the * corresponding root node of clone */ private void associateClones ( Node n , Node snapshot ) { } }
// TODO ( johnlenz ) : determine if MODULE _ BODY is useful here . if ( n . isRoot ( ) || NodeUtil . isChangeScopeRoot ( n ) ) { clonesByCurrent . put ( n , snapshot ) ; } Node child = n . getFirstChild ( ) ; Node snapshotChild = snapshot . getFirstChild ( ) ; while ( child != null ) { associateClones ( child , snapshotChild ) ; child = child . getNext ( ) ; snapshotChild = snapshotChild . getNext ( ) ; }
public class SegmentAggregator { /** * Flushes all Append Operations that can be flushed at the given moment ( until the entire Aggregator is emptied out * or until a StreamSegmentSealOperation or MergeSegmentOperation is encountered ) . * @ param timer Timer for the operation . * @ return A CompletableFuture that , when completed , will contain the result from the flush operation . */ private CompletableFuture < WriterFlushResult > flushFully ( TimeoutTimer timer ) { } }
long traceId = LoggerHelpers . traceEnterWithContext ( log , this . traceObjectId , "flushFully" ) ; WriterFlushResult result = new WriterFlushResult ( ) ; return Futures . loop ( this :: canContinueFlushingFully , ( ) -> flushPendingAppends ( timer . getRemaining ( ) ) . thenCompose ( flushResult -> flushPendingTruncate ( flushResult , timer . getRemaining ( ) ) ) , result :: withFlushResult , this . executor ) . thenApply ( v -> { LoggerHelpers . traceLeave ( log , this . traceObjectId , "flushFully" , traceId , result ) ; return result ; } ) ;
public class CheckMysql { /** * Get slave statuses . * @ param conn * The database connection * @ return The slave status info * @ throws SQLException */ private Map < String , Integer > getSlaveStatus ( final Connection conn ) throws SQLException { } }
Map < String , Integer > map = new HashMap < String , Integer > ( ) ; String query = SLAVE_STATUS_QRY ; Statement statement = null ; ResultSet rs = null ; try { if ( conn != null ) { statement = conn . createStatement ( ) ; rs = statement . executeQuery ( query ) ; while ( rs . next ( ) ) { map . put ( "Slave_IO_Running" , rs . getInt ( "Slave_IO_Running" ) ) ; map . put ( "Slave_SQL_Running" , rs . getInt ( "Slave_SQL_Running" ) ) ; map . put ( "Seconds_Behind_Master" , rs . getInt ( "Seconds_Behind_Master" ) ) ; } } } finally { DBUtils . closeQuietly ( rs ) ; DBUtils . closeQuietly ( statement ) ; } return map ;
public class SAAJMetaFactory { /** * Creates a new instance of a concrete < code > SAAJMetaFactory < / code > object . * The SAAJMetaFactory is an SPI , it pulls the creation of the other factories together into a * single place . Changing out the SAAJMetaFactory has the effect of changing out the entire SAAJ * implementation . Service providers provide the name of their < code > SAAJMetaFactory < / code > * implementation . * This method uses the following ordered lookup procedure to determine the SAAJMetaFactory implementation class to load : * < UL > * < LI > Use the javax . xml . soap . MetaFactory system property . * < LI > Use the properties file " lib / jaxm . properties " in the JRE directory . This configuration file is in standard * java . util . Properties format and contains the fully qualified name of the implementation class with the key being the * system property defined above . * < LI > Use the Services API ( as detailed in the JAR specification ) , if available , to determine the classname . The Services API * will look for a classname in the file META - INF / services / javax . xml . soap . MetaFactory in jars available to the runtime . * < LI > Default to com . sun . xml . messaging . saaj . soap . SAAJMetaFactoryImpl . * < / UL > * @ return a concrete < code > SAAJMetaFactory < / code > object * @ exception SOAPException if there is an error in creating the < code > SAAJMetaFactory < / code > */ static synchronized SAAJMetaFactory getInstance ( ) throws SOAPException { } }
if ( instance == null ) { try { instance = ( SAAJMetaFactory ) FactoryFinder . find ( META_FACTORY_CLASS_PROPERTY , DEFAULT_META_FACTORY_CLASS ) ; } catch ( Exception e ) { throw new SOAPException ( "Unable to create SAAJ meta-factory" + e . getMessage ( ) ) ; } } return instance ;
public class WonderPushRestClient { /** * Runs the specified request and ensure a valid access token is fetched if * necessary beforehand , or afterwards ( and re - run the request ) if the request * fails for auth reasons . */ protected static void requestAuthenticated ( final Request request ) { } }
if ( null == request ) { return ; } if ( ! WonderPush . isInitialized ( ) ) { WonderPush . safeDefer ( new Runnable ( ) { @ Override public void run ( ) { requestAuthenticated ( request ) ; } } , 100 ) ; return ; } String accessToken = WonderPushConfiguration . getAccessTokenForUserId ( request . getUserId ( ) ) ; if ( accessToken == null ) { // User is not authenticated , request a token fetchAnonymousAccessTokenAndRunRequest ( request ) ; return ; } // Add the access token to the params RequestParams params = request . getParams ( ) ; if ( null == params ) { params = new RequestParams ( ) ; request . setParams ( params ) ; } params . remove ( "accessToken" ) ; params . put ( "accessToken" , accessToken ) ; // Wrap the request handler with our own ResponseHandler wrapperHandler = new ResponseHandler ( ) { @ Override public void onSuccess ( int status , Response response ) { WonderPush . logDebug ( "Request successful: (" + status + ") " + response + " (for " + request + ")" ) ; if ( request . getHandler ( ) != null ) { request . getHandler ( ) . onSuccess ( status , response ) ; } } @ Override public void onFailure ( Throwable e , Response errorResponse ) { WonderPush . logError ( "Request failed: " + errorResponse , e ) ; if ( errorResponse != null && ERROR_INVALID_ACCESS_TOKEN == errorResponse . getErrorCode ( ) ) { // null out the access token WonderPushConfiguration . invalidateCredentials ( ) ; // retry later now WonderPush . safeDefer ( new Runnable ( ) { @ Override public void run ( ) { requestAuthenticated ( request ) ; } } , RETRY_INTERVAL_BAD_AUTH ) ; } else { if ( request . getHandler ( ) != null ) { request . getHandler ( ) . onFailure ( e , errorResponse ) ; } } } @ Override public void onSuccess ( Response response ) { WonderPush . logDebug ( "Request successful: " + response + " (for " + request + ")" ) ; if ( request . getHandler ( ) != null ) { request . getHandler ( ) . onSuccess ( response ) ; } } } ; Request wrapperRequest = ( Request ) request . clone ( ) ; wrapperRequest . setHandler ( wrapperHandler ) ; // Perform request request ( wrapperRequest ) ;
public class AttributesConnection { /** * { @ inheritDoc } */ @ Override protected AttributesResultSet createResult ( ResultSet resultSet , int count ) { } }
return new AttributesResultSet ( table , resultSet , count ) ;
public class RegionCommitmentClient { /** * Retrieves a list of commitments contained within the specified region . * < p > Sample code : * < pre > < code > * try ( RegionCommitmentClient regionCommitmentClient = RegionCommitmentClient . create ( ) ) { * ProjectRegionName region = ProjectRegionName . of ( " [ PROJECT ] " , " [ REGION ] " ) ; * for ( Commitment element : regionCommitmentClient . listRegionCommitments ( region . toString ( ) ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param region Name of the region for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final ListRegionCommitmentsPagedResponse listRegionCommitments ( String region ) { } }
ListRegionCommitmentsHttpRequest request = ListRegionCommitmentsHttpRequest . newBuilder ( ) . setRegion ( region ) . build ( ) ; return listRegionCommitments ( request ) ;
public class Stream { /** * Reduces the elements using provided identity value and * the associative accumulation indexed function . * < p > This is a terminal operation . * < p > Example : * < pre > * identity : 10 * accumulator : ( index , a , b ) - & gt ; index + a + b * stream : [ 1 , 2 , 3 , 4 , 5] * index : [ 0 , 1 , 2 , 3 , 4] * result : 10 + 1 + 3 + 5 + 7 + 9 = 35 * < / pre > * @ param < R > the type of the result * @ param identity the initial value * @ param accumulator the accumulation function * @ return the result of the reduction * @ since 1.1.6 */ @ Nullable public < R > R reduceIndexed ( @ Nullable R identity , @ NotNull IndexedBiFunction < ? super R , ? super T , ? extends R > accumulator ) { } }
return reduceIndexed ( 0 , 1 , identity , accumulator ) ;
public class CacheHelper { /** * Retrieves the given resource from the cache and translate it to a byte * array ; if missing tries to retrieve it using the ( optional ) provided set * of handlers . * @ param cache * the cache that stores the resource . * @ param resource * the name of the resource to be retrieved . * @ param handlers * the ( optional ) set of handlers that will attempt to retrieve the resource * if missing from the cache . * @ return * the resource as an array of bytes , or { @ code null } if it cannot be * retrieved . * @ throws CacheException */ public static byte [ ] getIntoByteArray ( Cache cache , String resource , CacheMissHandler ... handlers ) throws CacheException { } }
if ( cache == null ) { logger . error ( "cache reference must not be null" ) ; throw new CacheException ( "invalid cache" ) ; } InputStream input = null ; ByteArrayOutputStream output = null ; try { input = cache . get ( resource , handlers ) ; if ( input != null ) { output = new ByteArrayOutputStream ( ) ; long copied = Streams . copy ( input , output ) ; logger . trace ( "copied {} bytes from cache" , copied ) ; return output . toByteArray ( ) ; } } catch ( IOException e ) { logger . error ( "error copying data from cache to byte array" , e ) ; throw new CacheException ( "error copying data from cache to byte array" , e ) ; } finally { Streams . safelyClose ( input ) ; Streams . safelyClose ( output ) ; } return null ;
public class Multiplexing { /** * Creates an infinite iterator that issues elements from the parameter cyclicly . * @ param < E > the element type * @ param iterable the iterable to cycle * @ return an iterator that cyclicly returns the elements from the argument */ public static < E > Iterator < E > cycle ( Iterable < E > iterable ) { } }
dbc . precondition ( iterable != null , "cannot cycle a null iterable" ) ; return new CyclicIterator < E > ( iterable . iterator ( ) ) ;
public class JKObjectUtil { public static Object copy ( Object source ) { } }
try { ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; ObjectOutputStream oos = new ObjectOutputStream ( baos ) ; oos . writeObject ( source ) ; ByteArrayInputStream bais = new ByteArrayInputStream ( baos . toByteArray ( ) ) ; ObjectInputStream ois = new ObjectInputStream ( bais ) ; Object deepCopy = ois . readObject ( ) ; return deepCopy ; } catch ( Exception e ) { throw new JKException ( e ) ; }
public class DefaultFeatureTiles { /** * Draw the geometry * @ param simplifyTolerance * simplify tolerance in meters * @ param boundingBox * bounding box * @ param transform * projection transform * @ param graphics * feature tile graphics * @ param featureRow * feature row * @ param geometry * geometry * @ return true if drawn */ private boolean drawGeometry ( double simplifyTolerance , BoundingBox boundingBox , ProjectionTransform transform , FeatureTileGraphics graphics , FeatureRow featureRow , Geometry geometry ) { } }
boolean drawn = false ; GeometryType geometryType = geometry . getGeometryType ( ) ; FeatureStyle featureStyle = getFeatureStyle ( featureRow , geometryType ) ; switch ( geometryType ) { case POINT : Point point = ( Point ) geometry ; drawn = drawPoint ( boundingBox , transform , graphics , point , featureStyle ) ; break ; case LINESTRING : LineString lineString = ( LineString ) geometry ; drawn = drawLineString ( simplifyTolerance , boundingBox , transform , graphics , lineString , featureStyle ) ; break ; case POLYGON : Polygon polygon = ( Polygon ) geometry ; drawn = drawPolygon ( simplifyTolerance , boundingBox , transform , graphics , polygon , featureStyle ) ; break ; case MULTIPOINT : MultiPoint multiPoint = ( MultiPoint ) geometry ; for ( Point p : multiPoint . getPoints ( ) ) { drawn = drawPoint ( boundingBox , transform , graphics , p , featureStyle ) || drawn ; } break ; case MULTILINESTRING : MultiLineString multiLineString = ( MultiLineString ) geometry ; for ( LineString ls : multiLineString . getLineStrings ( ) ) { drawn = drawLineString ( simplifyTolerance , boundingBox , transform , graphics , ls , featureStyle ) || drawn ; } break ; case MULTIPOLYGON : MultiPolygon multiPolygon = ( MultiPolygon ) geometry ; for ( Polygon p : multiPolygon . getPolygons ( ) ) { drawn = drawPolygon ( simplifyTolerance , boundingBox , transform , graphics , p , featureStyle ) || drawn ; } break ; case CIRCULARSTRING : CircularString circularString = ( CircularString ) geometry ; drawn = drawLineString ( simplifyTolerance , boundingBox , transform , graphics , circularString , featureStyle ) ; break ; case COMPOUNDCURVE : CompoundCurve compoundCurve = ( CompoundCurve ) geometry ; for ( LineString ls : compoundCurve . getLineStrings ( ) ) { drawn = drawLineString ( simplifyTolerance , boundingBox , transform , graphics , ls , featureStyle ) || drawn ; } break ; case POLYHEDRALSURFACE : PolyhedralSurface polyhedralSurface = ( PolyhedralSurface ) geometry ; for ( Polygon p : polyhedralSurface . getPolygons ( ) ) { drawn = drawPolygon ( simplifyTolerance , boundingBox , transform , graphics , p , featureStyle ) || drawn ; } break ; case TIN : TIN tin = ( TIN ) geometry ; for ( Polygon p : tin . getPolygons ( ) ) { drawn = drawPolygon ( simplifyTolerance , boundingBox , transform , graphics , p , featureStyle ) || drawn ; } break ; case TRIANGLE : Triangle triangle = ( Triangle ) geometry ; drawn = drawPolygon ( simplifyTolerance , boundingBox , transform , graphics , triangle , featureStyle ) ; break ; case GEOMETRYCOLLECTION : @ SuppressWarnings ( "unchecked" ) GeometryCollection < Geometry > geometryCollection = ( GeometryCollection < Geometry > ) geometry ; for ( Geometry g : geometryCollection . getGeometries ( ) ) { drawn = drawGeometry ( simplifyTolerance , boundingBox , transform , graphics , featureRow , g ) || drawn ; } break ; default : throw new GeoPackageException ( "Unsupported Geometry Type: " + geometry . getGeometryType ( ) . getName ( ) ) ; } return drawn ;
public class Medias { /** * Get all media by extension found in the direct JAR path ( does not search in sub folders ) . * @ param jar The JAR file ( must not be < code > null < / code > ) . * @ param fullPath The full path in JAR ( must not be < code > null < / code > ) . * @ param prefixLength The prefix length in JAR ( must not be < code > null < / code > ) . * @ param extension The extension without dot ; eg : png ( must not be < code > null < / code > ) . * @ return The medias found . * @ throws LionEngineException If invalid parameters . */ public static synchronized List < Media > getByExtension ( File jar , String fullPath , int prefixLength , String extension ) { } }
if ( jar . isDirectory ( ) ) { return UtilFile . getFilesByExtension ( new File ( jar , fullPath ) , extension ) . stream ( ) . map ( file -> Medias . create ( file . getName ( ) ) ) . collect ( Collectors . toList ( ) ) ; } final Collection < ZipEntry > entries = UtilZip . getEntriesByExtension ( jar , fullPath , extension ) ; final List < Media > medias = new ArrayList < > ( entries . size ( ) ) ; for ( final ZipEntry entry : entries ) { final Media media = create ( entry . getName ( ) . substring ( prefixLength ) ) ; medias . add ( media ) ; } return medias ;
public class InternalSARLParser { /** * InternalSARL . g : 8533:1 : entryRuleXSwitchExpression returns [ EObject current = null ] : iv _ ruleXSwitchExpression = ruleXSwitchExpression EOF ; */ public final EObject entryRuleXSwitchExpression ( ) throws RecognitionException { } }
EObject current = null ; EObject iv_ruleXSwitchExpression = null ; try { // InternalSARL . g : 8533:58 : ( iv _ ruleXSwitchExpression = ruleXSwitchExpression EOF ) // InternalSARL . g : 8534:2 : iv _ ruleXSwitchExpression = ruleXSwitchExpression EOF { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXSwitchExpressionRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleXSwitchExpression = ruleXSwitchExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleXSwitchExpression ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class IntTrieBuilder { /** * Serializes the build table with 32 bit data * @ param datamanipulate builder raw fold method implementation * @ param triedatamanipulate result trie fold method * @ return a new trie */ public IntTrie serialize ( TrieBuilder . DataManipulate datamanipulate , Trie . DataManipulate triedatamanipulate ) { } }
if ( datamanipulate == null ) { throw new IllegalArgumentException ( "Parameters can not be null" ) ; } // fold and compact if necessary , also checks that indexLength is // within limits if ( ! m_isCompacted_ ) { // compact once without overlap to improve folding compact ( false ) ; // fold the supplementary part of the index array fold ( datamanipulate ) ; // compact again with overlap for minimum data array length compact ( true ) ; m_isCompacted_ = true ; } // is dataLength within limits ? if ( m_dataLength_ >= MAX_DATA_LENGTH_ ) { throw new ArrayIndexOutOfBoundsException ( "Data length too small" ) ; } char index [ ] = new char [ m_indexLength_ ] ; int data [ ] = new int [ m_dataLength_ ] ; // write the index ( stage 1 ) array and the 32 - bit data ( stage 2 ) array // write 16 - bit index values shifted right by INDEX _ SHIFT _ for ( int i = 0 ; i < m_indexLength_ ; i ++ ) { index [ i ] = ( char ) ( m_index_ [ i ] >>> INDEX_SHIFT_ ) ; } // write 32 - bit data values System . arraycopy ( m_data_ , 0 , data , 0 , m_dataLength_ ) ; int options = SHIFT_ | ( INDEX_SHIFT_ << OPTIONS_INDEX_SHIFT_ ) ; options |= OPTIONS_DATA_IS_32_BIT_ ; if ( m_isLatin1Linear_ ) { options |= OPTIONS_LATIN1_IS_LINEAR_ ; } return new IntTrie ( index , data , m_initialValue_ , options , triedatamanipulate ) ;
public class ContainerBase { /** * { @ inheritDoc } * @ see org . jboss . shrinkwrap . api . Archive # add ( org . jboss . shrinkwrap . api . Archive , java . lang . String , java . lang . Class ) */ @ Override public T add ( final Archive < ? > archive , final String path , final Class < ? extends StreamExporter > exporter ) { } }
this . getArchive ( ) . add ( archive , path , exporter ) ; return covarientReturn ( ) ;
public class BaseClient { /** * Loads local session state . * @ return Returns local session state . */ private Observable < SessionData > loadSession ( final Boolean initialised ) { } }
if ( initialised ) { final SessionData session = dataMgr . getSessionDAO ( ) . session ( ) ; if ( session != null ) { if ( session . getExpiresOn ( ) > System . currentTimeMillis ( ) ) { state . compareAndSet ( GlobalState . INITIALISED , GlobalState . SESSION_ACTIVE ) ; } else { state . compareAndSet ( GlobalState . INITIALISED , GlobalState . SESSION_OFF ) ; return service . reAuthenticate ( ) . onErrorReturn ( throwable -> { log . w ( "Authentication failure during init." ) ; return session ; } ) ; } } return Observable . create ( sub -> { sub . onNext ( session ) ; sub . onCompleted ( ) ; } ) ; } return Observable . just ( null ) ;
public class TypeUtils { /** * Checks if a { @ link TypeRef } is a { @ link java . util . OptionalInt } . * @ param type The type to check . * @ return True if its a { @ link java . util . OptionalInt } . */ public static boolean isOptionalInt ( TypeRef type ) { } }
if ( ! ( type instanceof ClassRef ) ) { return false ; } return JAVA_UTIL_OPTIONAL_INT . equals ( ( ( ClassRef ) type ) . getDefinition ( ) . getFullyQualifiedName ( ) ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcContextDependentMeasure ( ) { } }
if ( ifcContextDependentMeasureEClass == null ) { ifcContextDependentMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 787 ) ; } return ifcContextDependentMeasureEClass ;
public class Bond { /** * Returns the accrued interest of the bond for a given time . * @ param time The time of interest as double . * @ param model The model under which the product is valued . * @ return The accrued interest . */ public double getAccruedInterest ( double time , AnalyticModel model ) { } }
LocalDate date = FloatingpointDate . getDateFromFloatingPointDate ( schedule . getReferenceDate ( ) , time ) ; return getAccruedInterest ( date , model ) ;
public class ListSamples { protected Intent browseIntent ( String path ) { } }
Intent result = new Intent ( ) ; result . setClass ( this , ListSamples . class ) ; result . putExtra ( intentPath , path ) ; return result ;
public class BatchedMessageListenerContainer { /** * A batched variant of { @ link DefaultMessageListenerContainer # doExecuteListener ( Session , Message ) } . * @ param session The session * @ param messages A list of messages * @ throws JMSException Indicates a problem during processing */ protected void doExecuteListener ( Session session , List < Message > messages ) throws JMSException { } }
if ( ! isAcceptMessagesWhileStopping ( ) && ! isRunning ( ) ) { if ( logger . isWarnEnabled ( ) ) { logger . warn ( "Rejecting received messages because of the listener container " + "having been stopped in the meantime: " + messages ) ; } rollbackIfNecessary ( session ) ; throw new MessageRejectedWhileStoppingException ( ) ; } try { for ( Message message : messages ) { invokeListener ( session , message ) ; } } catch ( JMSException ex ) { rollbackOnExceptionIfNecessary ( session , ex ) ; throw ex ; } catch ( RuntimeException ex ) { rollbackOnExceptionIfNecessary ( session , ex ) ; throw ex ; } catch ( Error err ) { rollbackOnExceptionIfNecessary ( session , err ) ; throw err ; } commitIfNecessary ( session , messages ) ;
public class DefaultGrailsDomainClass { /** * / * ( non - Javadoc ) * @ see grails . core . GrailsDomainClass # getConstraints ( ) */ @ SuppressWarnings ( "unchecked" ) @ Override public Map getConstrainedProperties ( ) { } }
verifyContextIsInitialized ( ) ; if ( constrainedProperties == null ) { ConstrainedDiscovery constrainedDiscovery = GrailsFactoriesLoader . loadFactory ( ConstrainedDiscovery . class ) ; if ( constrainedDiscovery == null ) { constrainedProperties = Collections . emptyMap ( ) ; } else { constrainedProperties = constrainedDiscovery . findConstrainedProperties ( persistentEntity ) ; } } return constrainedProperties ;
public class RRFedNonFedBudget10V1_1Generator { /** * This method gets CumulativeTrainee details , * CumulativeTotalFundsRequestedTraineeCosts , CumulativeTraineeTuitionFeesHealthInsurance * CumulativeTraineeStipends , CumulativeTraineeTravel , CumulativeTraineeSubsistence , CumulativeOtherTraineeCost and * CumulativeNoofTrainees based on BudgetSummaryInfo for the RRFedNonFedBudget . * @ param budgetSummaryData ( BudgetSummaryInfo ) budget summary entry . */ private void setCumulativeTrainee ( BudgetSummary budgetSummary , BudgetSummaryDto budgetSummaryData ) { } }
SummaryDataType summaryTraineeCosts = SummaryDataType . Factory . newInstance ( ) ; if ( budgetSummaryData != null ) { if ( budgetSummaryData . getpartOtherCost ( ) != null && budgetSummaryData . getpartStipendCost ( ) != null && budgetSummaryData . getpartTravelCost ( ) != null && budgetSummaryData . getPartSubsistence ( ) != null && budgetSummaryData . getPartTuition ( ) != null ) { summaryTraineeCosts . setFederalSummary ( budgetSummaryData . getpartOtherCost ( ) . add ( budgetSummaryData . getpartStipendCost ( ) . add ( budgetSummaryData . getpartTravelCost ( ) . add ( budgetSummaryData . getPartSubsistence ( ) . add ( budgetSummaryData . getPartTuition ( ) ) ) ) ) . bigDecimalValue ( ) ) ; } if ( budgetSummaryData . getPartOtherCostSharing ( ) != null && budgetSummaryData . getPartStipendCostSharing ( ) != null && budgetSummaryData . getPartTravelCostSharing ( ) != null && budgetSummaryData . getPartSubsistenceCostSharing ( ) != null && budgetSummaryData . getPartTuitionCostSharing ( ) != null ) { summaryTraineeCosts . setNonFederalSummary ( budgetSummaryData . getPartOtherCostSharing ( ) . add ( budgetSummaryData . getPartStipendCostSharing ( ) . add ( budgetSummaryData . getPartTravelCostSharing ( ) . add ( budgetSummaryData . getPartSubsistenceCostSharing ( ) . add ( budgetSummaryData . getPartTuitionCostSharing ( ) ) ) ) ) . bigDecimalValue ( ) ) ; } if ( summaryTraineeCosts . getNonFederalSummary ( ) != null ) { if ( summaryTraineeCosts . getFederalSummary ( ) != null ) { summaryTraineeCosts . setTotalFedNonFedSummary ( summaryTraineeCosts . getFederalSummary ( ) . add ( summaryTraineeCosts . getNonFederalSummary ( ) ) ) ; } else { summaryTraineeCosts . setTotalFedNonFedSummary ( summaryTraineeCosts . getNonFederalSummary ( ) ) ; } } TotalDataType totalTuition = TotalDataType . Factory . newInstance ( ) ; if ( budgetSummaryData . getPartTuition ( ) != null ) { totalTuition . setFederal ( budgetSummaryData . getPartTuition ( ) . bigDecimalValue ( ) ) ; } if ( budgetSummaryData . getPartTuitionCostSharing ( ) != null ) { totalTuition . setNonFederal ( budgetSummaryData . getPartTuitionCostSharing ( ) . bigDecimalValue ( ) ) ; if ( budgetSummaryData . getPartTuition ( ) != null ) { totalTuition . setTotalFedNonFed ( budgetSummaryData . getPartTuition ( ) . add ( budgetSummaryData . getPartTuitionCostSharing ( ) ) . bigDecimalValue ( ) ) ; } else { totalTuition . setTotalFedNonFed ( budgetSummaryData . getPartTuitionCostSharing ( ) . bigDecimalValue ( ) ) ; } } budgetSummary . setCumulativeTraineeTuitionFeesHealthInsurance ( totalTuition ) ; TotalDataType totalStipends = TotalDataType . Factory . newInstance ( ) ; if ( budgetSummaryData . getpartStipendCost ( ) != null ) { totalStipends . setFederal ( budgetSummaryData . getpartStipendCost ( ) . bigDecimalValue ( ) ) ; } if ( budgetSummaryData . getPartStipendCostSharing ( ) != null ) { totalStipends . setNonFederal ( budgetSummaryData . getPartStipendCostSharing ( ) . bigDecimalValue ( ) ) ; if ( budgetSummaryData . getpartStipendCost ( ) != null ) { totalStipends . setTotalFedNonFed ( budgetSummaryData . getpartStipendCost ( ) . add ( budgetSummaryData . getPartStipendCostSharing ( ) ) . bigDecimalValue ( ) ) ; } else { totalStipends . setTotalFedNonFed ( budgetSummaryData . getPartStipendCostSharing ( ) . bigDecimalValue ( ) ) ; } } budgetSummary . setCumulativeTraineeStipends ( totalStipends ) ; TotalDataType totalTravel = TotalDataType . Factory . newInstance ( ) ; if ( budgetSummaryData . getpartTravelCost ( ) != null ) { totalTravel . setFederal ( budgetSummaryData . getpartTravelCost ( ) . bigDecimalValue ( ) ) ; } if ( budgetSummaryData . getPartTravelCostSharing ( ) != null ) { totalTravel . setNonFederal ( budgetSummaryData . getPartTravelCostSharing ( ) . bigDecimalValue ( ) ) ; if ( budgetSummaryData . getpartTravelCost ( ) != null ) { totalTravel . setTotalFedNonFed ( budgetSummaryData . getpartTravelCost ( ) . add ( budgetSummaryData . getPartTravelCostSharing ( ) ) . bigDecimalValue ( ) ) ; } else { totalTravel . setTotalFedNonFed ( budgetSummaryData . getPartTravelCostSharing ( ) . bigDecimalValue ( ) ) ; } } budgetSummary . setCumulativeTraineeTravel ( totalTravel ) ; TotalDataType totalSubsistence = TotalDataType . Factory . newInstance ( ) ; if ( budgetSummaryData . getPartSubsistence ( ) != null ) { totalSubsistence . setFederal ( budgetSummaryData . getPartSubsistence ( ) . bigDecimalValue ( ) ) ; } if ( budgetSummaryData . getPartSubsistenceCostSharing ( ) != null ) { totalSubsistence . setNonFederal ( budgetSummaryData . getPartSubsistenceCostSharing ( ) . bigDecimalValue ( ) ) ; if ( budgetSummaryData . getPartSubsistence ( ) != null ) { totalSubsistence . setTotalFedNonFed ( budgetSummaryData . getPartSubsistence ( ) . add ( budgetSummaryData . getPartSubsistenceCostSharing ( ) ) . bigDecimalValue ( ) ) ; } else { totalSubsistence . setTotalFedNonFed ( budgetSummaryData . getPartSubsistenceCostSharing ( ) . bigDecimalValue ( ) ) ; } } budgetSummary . setCumulativeTraineeSubsistence ( totalSubsistence ) ; TotalDataType totalOtherTrainee = TotalDataType . Factory . newInstance ( ) ; if ( budgetSummaryData . getpartOtherCost ( ) != null ) { totalOtherTrainee . setFederal ( budgetSummaryData . getpartOtherCost ( ) . bigDecimalValue ( ) ) ; } if ( budgetSummaryData . getPartOtherCostSharing ( ) != null ) { totalOtherTrainee . setNonFederal ( budgetSummaryData . getPartOtherCostSharing ( ) . bigDecimalValue ( ) ) ; if ( budgetSummaryData . getpartOtherCost ( ) != null ) { totalOtherTrainee . setTotalFedNonFed ( budgetSummaryData . getpartOtherCost ( ) . add ( budgetSummaryData . getPartOtherCostSharing ( ) ) . bigDecimalValue ( ) ) ; } else { totalOtherTrainee . setTotalFedNonFed ( budgetSummaryData . getPartOtherCostSharing ( ) . bigDecimalValue ( ) ) ; } } budgetSummary . setCumulativeOtherTraineeCost ( totalOtherTrainee ) ; budgetSummary . setCumulativeNoofTrainees ( budgetSummaryData . getparticipantCount ( ) ) ; } budgetSummary . setCumulativeTotalFundsRequestedTraineeCosts ( summaryTraineeCosts ) ;
public class Session { /** * checks that active / passive sides are correctly set */ protected void compareServerMode ( Session other ) throws ClientException { } }
if ( serverMode == SERVER_DEFAULT && other . serverMode == SERVER_DEFAULT ) { // this is OK } else { // active and passive side had already been set ; // make sure that it has been done correctly : // either server can be active // providing that the other is passive // if this server mode has been defined , // but the other has not , we can ' t proceed if ( this . serverMode == SERVER_DEFAULT || other . serverMode == SERVER_DEFAULT ) { throw new ClientException ( ClientException . BAD_SERVER_MODE , "Only one server has been defined as active or passive" ) ; } // both servers cannot have the same mode if ( other . serverMode == this . serverMode ) { String modeStr = ( this . serverMode == SERVER_PASSIVE ) ? "passive" : "active" ; throw new ClientException ( ClientException . BAD_SERVER_MODE , "Both servers are " + modeStr ) ; } }
public class BaseServletContextResourceReader { /** * Checks if the resource should be accessible * @ param resourceName * the resource name * @ return true if the resource should be accessible */ protected boolean isAccessPermitted ( String resourceName ) { } }
return ! resourceName . startsWith ( JawrConstant . WEB_INF_DIR_PREFIX ) && ! resourceName . startsWith ( JawrConstant . META_INF_DIR_PREFIX ) ;
public class VirtualWANsInner { /** * Updates a VirtualWAN tags . * @ param resourceGroupName The resource group name of the VirtualWan . * @ param virtualWANName The name of the VirtualWAN being updated . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the VirtualWANInner object if successful . */ public VirtualWANInner updateTags ( String resourceGroupName , String virtualWANName ) { } }
return updateTagsWithServiceResponseAsync ( resourceGroupName , virtualWANName ) . toBlocking ( ) . last ( ) . body ( ) ;
public class IBANCountryData { /** * This method is used to create an instance of this class from a string * representation . * @ param sCountryCode * Country code to use . Neither < code > null < / code > nor empty . * @ param nExpectedLength * The expected length having only validation purpose . * @ param sLayout * < code > null < / code > or the layout descriptor * @ param sFixedCheckDigits * < code > null < / code > or fixed check digits ( of length 2) * @ param aValidFrom * Validity start date . May be < code > null < / code > . * @ param aValidTo * Validity end date . May be < code > null < / code > . * @ param sDesc * The string description of this country data . May not be * < code > null < / code > . * @ return The parsed county data . */ @ Nonnull public static IBANCountryData createFromString ( @ Nonnull @ Nonempty final String sCountryCode , @ Nonnegative final int nExpectedLength , @ Nullable final String sLayout , @ Nullable final String sFixedCheckDigits , @ Nullable final LocalDate aValidFrom , @ Nullable final LocalDate aValidTo , @ Nonnull final String sDesc ) { } }
ValueEnforcer . notEmpty ( sDesc , "Desc" ) ; if ( sDesc . length ( ) < 4 ) throw new IllegalArgumentException ( "Cannot converted passed string because it is too short!" ) ; final ICommonsList < IBANElement > aList = _parseElements ( sDesc ) ; final Pattern aPattern = _parseLayout ( sCountryCode , nExpectedLength , sFixedCheckDigits , sLayout ) ; // And we ' re done try { return new IBANCountryData ( nExpectedLength , aPattern , sFixedCheckDigits , aValidFrom , aValidTo , aList ) ; } catch ( final IllegalArgumentException ex ) { throw new IllegalArgumentException ( "Failed to parse '" + sDesc + "': " + ex . getMessage ( ) ) ; }
public class CompareHelper { /** * < code > a > b < / code > * @ param < T > * @ param a * @ param b * @ return true if a > b */ public static < T > boolean gt ( Comparable < T > a , T b ) { } }
return gt ( a . compareTo ( b ) ) ;
public class Post { /** * An immutable list of categories associated with this post . * @ return The list of categories . */ public final ImmutableList < TaxonomyTerm > categories ( ) { } }
ImmutableList < TaxonomyTerm > categories = taxonomyTerms . get ( CATEGORY_TAXONOMY ) ; return categories != null ? categories : ImmutableList . of ( ) ;
public class BatchCmd { /** * / * TODO read args from command line */ public static void main ( String [ ] args ) { } }
RandomRBFGeneratorEvents stream = new RandomRBFGeneratorEvents ( ) ; AbstractClusterer clusterer = new WithKmeans ( ) ; boolean [ ] measureCollection = { true , true , true , true , true , true , true , true } ; int amountInstances = 20000 ; String testfile = "d:\\data\\test.csv" ; runBatch ( stream , clusterer , measureCollection , amountInstances , testfile ) ;
public class HeaderPositionCalculator { /** * Determines if an item in the list should have a header that is different than the item in the * list that immediately precedes it . Items with no headers will always return false . * @ param position of the list item in questions * @ param isReverseLayout TRUE if layout manager has flag isReverseLayout * @ return true if this item has a different header than the previous item in the list * @ see { @ link StickyRecyclerHeadersAdapter # getHeaderId ( int ) } */ public boolean hasNewHeader ( int position , boolean isReverseLayout ) { } }
if ( indexOutOfBounds ( position ) ) { return false ; } int originalPosition = moPubRecyclerAdapter . getOriginalPosition ( position ) ; if ( originalPosition < 0 ) { return false ; } long headerId = mAdapter . getHeaderId ( originalPosition ) ; if ( headerId < 0 ) { return false ; } long nextItemHeaderId = - 1 ; int nextItemPosition = originalPosition + ( isReverseLayout ? 1 : - 1 ) ; if ( ! indexOutOfBounds ( nextItemPosition ) ) { nextItemHeaderId = mAdapter . getHeaderId ( nextItemPosition ) ; } int firstItemPosition = isReverseLayout ? moPubRecyclerAdapter . getItemCount ( ) - 1 : 0 ; return originalPosition == firstItemPosition || headerId != nextItemHeaderId ;