signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class RecurlyClient { /** * Get Account Adjustments
* @ param accountCode recurly account id
* @ param type { @ link com . ning . billing . recurly . model . Adjustments . AdjustmentType }
* @ param state { @ link com . ning . billing . recurly . model . Adjustments . AdjustmentState }
* @ param params { @ link QueryParams }
* @ return the adjustments on the account */
public Adjustments getAccountAdjustments ( final String accountCode , final Adjustments . AdjustmentType type , final Adjustments . AdjustmentState state , final QueryParams params ) { } }
|
final String url = Account . ACCOUNT_RESOURCE + "/" + accountCode + Adjustments . ADJUSTMENTS_RESOURCE ; if ( type != null ) params . put ( "type" , type . getType ( ) ) ; if ( state != null ) params . put ( "state" , state . getState ( ) ) ; return doGET ( url , Adjustments . class , params ) ;
|
public class HadoopFileSystem { /** * entry . */
public OutputStream newOutputStream ( org . apache . hadoop . fs . Path path , Set < ? extends OpenOption > options , FileAttribute < ? > ... attrs ) throws IOException { } }
|
checkWritable ( ) ; boolean hasCreateNew = false ; boolean hasCreate = false ; boolean hasAppend = false ; for ( OpenOption opt : options ) { if ( opt == READ ) throw new IllegalArgumentException ( "READ not allowed" ) ; if ( opt == CREATE_NEW ) hasCreateNew = true ; if ( opt == CREATE ) hasCreate = true ; if ( opt == APPEND ) hasAppend = true ; } // beginRead ( ) ; / / only need a readlock , the " update ( ) " will
/* try { / / try to obtain a writelock when the os is
ensureOpen ( ) ; / / being closed .
/ / Entry e = getEntry0 ( path ) ;
HadoopFileAttributes e = path . getAttributes ( ) ;
if ( e ! = null ) {
if ( e . isDirectory ( ) | | hasCreateNew )
throw new FileAlreadyExistsException ( path . toString ( ) ) ;
if ( hasAppend ) {
InputStream is = getInputStream ( e ) ;
OutputStream os = getOutputStream ( new Entry ( e , Entry . NEW ) ) ;
copyStream ( is , os ) ;
is . close ( ) ;
return os ;
return getOutputStream ( new Entry ( e , Entry . NEW ) ) ;
} else {
if ( ! hasCreate & & ! hasCreateNew )
throw new NoSuchFileException ( path . toString ( ) ) ;
checkParents ( path ) ;
return getOutputStream ( new Entry ( path , Entry . NEW ) ) ;
} finally {
/ / endRead ( ) ; */
FSDataOutputStream outputStream = this . fs . create ( path ) ; /* for ( int i = 0 ; i < attrs . length ; i + + ) {
FileAttribute < ? > item = attrs [ i ] ;
if ( item . value ( ) . getClass ( ) = = PosixFilePermissions . class ) {
Set < PosixFilePermission > itemPs = ( Set < PosixFilePermission > ) item . value ( ) ;
FsPermission p = FsPermission . valueOf ( " - " + PosixFilePermissions . toString ( itemPs ) ) ;
this . fs . setPermission ( path , p ) ;
break ;
System . out . println ( item . getClass ( ) ) ; */
return outputStream ;
|
public class Quaterniond { /** * / * ( non - Javadoc )
* @ see org . joml . Quaterniondc # add ( org . joml . Quaterniondc , org . joml . Quaterniond ) */
public Quaterniond add ( Quaterniondc q2 , Quaterniond dest ) { } }
|
dest . x = x + q2 . x ( ) ; dest . y = y + q2 . y ( ) ; dest . z = z + q2 . z ( ) ; dest . w = w + q2 . w ( ) ; return dest ;
|
public class TrmRouteDataImpl { /** * Summary information for TRM messages */
public void getTraceSummaryLine ( StringBuilder buff ) { } }
|
super . getTraceSummaryLine ( buff ) ; buff . append ( ",originator=" ) ; buff . append ( getOriginator ( ) ) ; appendList ( buff , "cellules" , getCellules ( ) ) ; appendList ( buff , "costs" , getCosts ( ) ) ;
|
public class MTProto { /** * Switch mode of mtproto
* @ param mode new mode */
public void switchMode ( int mode ) { } }
|
if ( this . mode != mode ) { this . mode = mode ; switch ( mode ) { case MODE_GENERAL : case MODE_PUSH : transportPool . switchMode ( TransportPool . MODE_DEFAULT ) ; break ; case MODE_GENERAL_LOW_MODE : case MODE_FILE : transportPool . switchMode ( TransportPool . MODE_LOWMODE ) ; break ; } this . actionsActor . sendOnce ( new RequestPingDelay ( ) ) ; }
|
public class Jose4jRsaJWK { /** * generate a new JWK with the specified parameters
* @ param size
* @ param alg
* @ param use
* @ param type
* @ return */
public static Jose4jRsaJWK getInstance ( int size , String alg , String use , String type ) { } }
|
String kid = RandomUtils . getRandomAlphaNumeric ( KID_LENGTH ) ; KeyPairGenerator keyGenerator = null ; try { keyGenerator = KeyPairGenerator . getInstance ( "RSA" ) ; } catch ( NoSuchAlgorithmException e ) { // This should not happen , since we hardcoded as " RSA "
if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Caught unexpected exception: " + e . getLocalizedMessage ( ) , e ) ; } return null ; } keyGenerator . initialize ( size ) ; KeyPair keypair = keyGenerator . generateKeyPair ( ) ; RSAPublicKey pubKey = ( RSAPublicKey ) keypair . getPublic ( ) ; RSAPrivateKey priKey = ( RSAPrivateKey ) keypair . getPrivate ( ) ; Jose4jRsaJWK jwk = new Jose4jRsaJWK ( pubKey ) ; jwk . setPrivateKey ( priKey ) ; jwk . setAlgorithm ( alg ) ; jwk . setKeyId ( kid ) ; jwk . setUse ( ( use == null ) ? JwkConstants . sig : use ) ; return jwk ;
|
public class TypeExtractionUtils { /** * Returns the raw class of both parameterized types and generic arrays .
* Returns java . lang . Object for all other types . */
public static Class < ? > getRawClass ( Type t ) { } }
|
if ( isClassType ( t ) ) { return typeToClass ( t ) ; } else if ( t instanceof GenericArrayType ) { Type component = ( ( GenericArrayType ) t ) . getGenericComponentType ( ) ; return Array . newInstance ( getRawClass ( component ) , 0 ) . getClass ( ) ; } return Object . class ;
|
public class YamadaParser { /** * 加载模型
* 以序列化方式加载模型
* @ param modelfile
* 模型路径
* @ throws IOException
* @ throws ClassNotFoundException */
public void loadModel ( String modelfile ) throws IOException , ClassNotFoundException { } }
|
ObjectInputStream instream = new ObjectInputStream ( new GZIPInputStream ( new FileInputStream ( modelfile ) ) ) ; factory = ( AlphabetFactory ) instream . readObject ( ) ; models = ( Linear [ ] ) instream . readObject ( ) ; instream . close ( ) ; IFeatureAlphabet features = factory . DefaultFeatureAlphabet ( ) ; features . setStopIncrement ( true ) ;
|
public class StreamEx { /** * Returns a { @ link SortedMap } whose keys and values are the result of
* applying the provided mapping functions to the input elements .
* This is a < a href = " package - summary . html # StreamOps " > terminal < / a >
* operation .
* If the mapped keys contains duplicates ( according to
* { @ link Object # equals ( Object ) } ) , the value mapping function is applied to
* each equal element , and the results are merged using the provided merging
* function .
* Returned { @ code SortedMap } is guaranteed to be modifiable .
* @ param < K > the output type of the key mapping function
* @ param < V > the output type of the value mapping function
* @ param keyMapper a mapping function to produce keys
* @ param valMapper a mapping function to produce values
* @ param mergeFunction a merge function , used to resolve collisions between
* values associated with the same key , as supplied to
* { @ link Map # merge ( Object , Object , BiFunction ) }
* @ return a { @ code SortedMap } whose keys are the result of applying a key
* mapping function to the input elements , and whose values are the
* result of applying a value mapping function to all input elements
* equal to the key and combining them using the merge function
* @ see Collectors # toMap ( Function , Function , BinaryOperator )
* @ see # toSortedMap ( Function , Function )
* @ see # toNavigableMap ( Function , Function , BinaryOperator )
* @ since 0.1.0 */
public < K , V > SortedMap < K , V > toSortedMap ( Function < ? super T , ? extends K > keyMapper , Function < ? super T , ? extends V > valMapper , BinaryOperator < V > mergeFunction ) { } }
|
return rawCollect ( Collectors . toMap ( keyMapper , valMapper , mergeFunction , TreeMap :: new ) ) ;
|
public class SipDeploymentService { /** * { @ inheritDoc } */
public synchronized SIPWebContext getValue ( ) throws IllegalStateException { } }
|
final SIPWebContext sipContext = this . sipContext ; if ( sipContext == null ) { throw new IllegalStateException ( ) ; } return sipContext ;
|
public class ClassPropertiesCache { /** * Get the method / field - < code > Map < / code > by class .
* @ param pvClass The class where the information are saved .
* @ return The Map with reflection informations . */
@ SuppressWarnings ( "unchecked" ) public Map < Object , Object > getClassPropertiesMapByClass ( Class < ? > pvClass ) { } }
|
return ( Map < Object , Object > ) classPropertiesMap . get ( pvClass ) ;
|
public class Strings { /** * Escape XML entities and illegal characters in the given string . This
* enhances the functionality of
* org . apache . commons . lang . StringEscapeUtils . escapeXml by escaping
* low - valued unprintable characters , which are not permitted by the W3C XML
* 1.0 specification .
* @ param s
* a string
* @ return the same string with characters not permitted by the XML
* specification escaped
* @ see < a href = " http : / / www . w3 . org / TR / REC - xml / # charsets " > Extensible Markup
* Language ( XML ) 1.0 ( Fifth Edition ) < / a >
* @ see < a
* href = " http : / / commons . apache . org / lang / api / org / apache / commons / lang / StringEscapeUtils . html # escapeXml ( java . lang . String ) " > org . apache . commons . lang . StringEscapeUtils
* javadoc < / a > */
public static String escapeXml ( String s ) { } }
|
initializeEscapeMap ( ) ; if ( s == null || s . length ( ) == 0 ) { return s ; } char [ ] sChars = s . toCharArray ( ) ; StringBuilder sb = new StringBuilder ( ) ; int lastReplacement = 0 ; for ( int i = 0 ; i < sChars . length ; i ++ ) { if ( isInvalidXMLCharacter ( sChars [ i ] ) ) { // append intermediate string to string builder
sb . append ( sChars , lastReplacement , i - lastReplacement ) ; // substitute control character with escape sequence
sb . append ( sChars [ i ] == 0xFFFE ? "\\ufffe" : xmlLowValueEscapeStrings [ sChars [ i ] ] ) ; // advance last pointer past this character
lastReplacement = i + 1 ; } } if ( lastReplacement < sChars . length ) { sb . append ( sChars , lastReplacement , sChars . length - lastReplacement ) ; } return StringEscapeUtils . escapeXml ( sb . toString ( ) ) ;
|
public class DSUtil { /** * Computes , in linear time , the length of an
* < code > Iterable < / code > . This is done by iterating over all
* elements from < code > itrbl < / code > and counting their number . */
public static < E > int iterableSize ( Iterable < E > itrbl ) { } }
|
int size = 0 ; for ( Iterator < E > iter = itrbl . iterator ( ) ; iter . hasNext ( ) ; ) { iter . next ( ) ; size ++ ; } return size ;
|
public class DataSet { /** * Gets a copy of example i
* @ param i the example to getFromOrigin
* @ return the example at i ( one example ) */
@ Override public DataSet get ( int i ) { } }
|
if ( i >= numExamples ( ) || i < 0 ) throw new IllegalArgumentException ( "invalid example number: must be 0 to " + ( numExamples ( ) - 1 ) + ", got " + i ) ; if ( i == 0 && numExamples ( ) == 1 ) return this ; return new DataSet ( getHelper ( features , i ) , getHelper ( labels , i ) , getHelper ( featuresMask , i ) , getHelper ( labelsMask , i ) ) ;
|
public class CmsAttributeChoiceWidget { /** * Shows the choice menu . < p > */
public void show ( ) { } }
|
addStyleName ( I_CmsLayoutBundle . INSTANCE . attributeChoice ( ) . hovering ( ) ) ; if ( displayAbove ( ) ) { addStyleName ( I_CmsLayoutBundle . INSTANCE . attributeChoice ( ) . displayAbove ( ) ) ; } else { removeStyleName ( I_CmsLayoutBundle . INSTANCE . attributeChoice ( ) . displayAbove ( ) ) ; }
|
public class MongodbQueue { /** * Setter for { @ link # mongoClient } .
* @ param mongoClient
* @ param setMyOwnMongoClient
* @ return */
protected MongodbQueue < ID , DATA > setMongoClient ( MongoClient mongoClient , boolean setMyOwnMongoClient ) { } }
|
if ( myOwnMongoClient && this . mongoClient != null ) { this . mongoClient . close ( ) ; } this . mongoClient = mongoClient ; myOwnMongoClient = setMyOwnMongoClient ; return this ;
|
public class Scanners { /** * A scanner that succeeds and consumes the current character if it equals to any character in
* { @ code chars } . */
public static Parser < Void > among ( String chars ) { } }
|
if ( chars . length ( ) == 0 ) return isChar ( CharPredicates . NEVER ) ; if ( chars . length ( ) == 1 ) return isChar ( chars . charAt ( 0 ) ) ; return isChar ( CharPredicates . among ( chars ) ) ;
|
public class GetAttributeValuesResult { /** * The list of values for an attribute . For example , < code > Throughput Optimized HDD < / code > and
* < code > Provisioned IOPS < / code > are two available values for the < code > AmazonEC2 < / code > < code > volumeType < / code > .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setAttributeValues ( java . util . Collection ) } or { @ link # withAttributeValues ( java . util . Collection ) } if you
* want to override the existing values .
* @ param attributeValues
* The list of values for an attribute . For example , < code > Throughput Optimized HDD < / code > and
* < code > Provisioned IOPS < / code > are two available values for the < code > AmazonEC2 < / code >
* < code > volumeType < / code > .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetAttributeValuesResult withAttributeValues ( AttributeValue ... attributeValues ) { } }
|
if ( this . attributeValues == null ) { setAttributeValues ( new java . util . ArrayList < AttributeValue > ( attributeValues . length ) ) ; } for ( AttributeValue ele : attributeValues ) { this . attributeValues . add ( ele ) ; } return this ;
|
public class PseudoContainsSpecifierChecker { /** * { @ inheritDoc } */
@ Override public Collection < Node > check ( final Collection < Node > nodes ) { } }
|
Assert . notNull ( nodes , "nodes is null!" ) ; this . nodes = nodes ; this . result = new LinkedHashSet < Node > ( ) ; String value = specifier . getValue ( ) ; for ( Node node : nodes ) { String content = helper . getValue ( node ) ; if ( content != null && content . contains ( value ) ) { result . add ( node ) ; } } return this . result ;
|
public class VMath { /** * Computes component - wise v1 * s1 - v2.
* @ param v1 first vector
* @ param s1 the scaling factor for v1
* @ param v2 the vector to be subtracted from this vector
* @ return v1 * s1 - v2 */
public static double [ ] timesMinus ( final double [ ] v1 , final double s1 , final double [ ] v2 ) { } }
|
assert v1 . length == v2 . length : ERR_VEC_DIMENSIONS ; final double [ ] sub = new double [ v1 . length ] ; for ( int i = 0 ; i < v1 . length ; i ++ ) { sub [ i ] = v1 [ i ] * s1 - v2 [ i ] ; } return sub ;
|
public class ClassPathTraversal { /** * Fills the class cache with classes in the specified directory .
* @ param dirthe directory to search
* @ param state the traversal state */
protected void traverseDir ( File dir , TraversalState state ) { } }
|
if ( isLoggingEnabled ( ) ) getLogger ( ) . log ( Level . INFO , "Analyzing directory: " + dir ) ; traverseDir ( null , dir , state ) ;
|
public class AggregationHandler { /** * dplyr - like */
@ Override public DDF groupBy ( List < String > groupedColumns , List < String > aggregateFunctions ) throws DDFException { } }
|
mGroupedColumns = groupedColumns ; return agg ( aggregateFunctions ) ;
|
public class Composite { /** * { @ link CodeChunk # getCode } serializes both the chunk ' s initial statements and its output
* expression . When a composite is the only chunk being serialized , and its value is a variable
* reference , this leads to a redundant trailing expression ( the variable name ) . Override the
* superclass implementation to omit it .
* < p > This heuristic appears to only be depended on by unit tests */
@ Override String getCode ( int startingIndent ) { } }
|
return value ( ) instanceof VariableReference ? new FormattingContext ( startingIndent ) . appendInitialStatements ( this ) . toString ( ) : super . getCode ( startingIndent ) ;
|
public class Machine { /** * Try to inline all states that contain only action and state shift . */
public void contractStates ( ) { } }
|
boolean contracted ; do { contracted = false ; for ( Iterator < State > iterator = states . iterator ( ) ; iterator . hasNext ( ) ; ) { State s = iterator . next ( ) ; if ( s . links . size ( ) == 1 && s . links . get ( 0 ) . type == LinkType . BACKTRACK ) { Link l = s . links . get ( 0 ) ; boolean contractedNow = false ; for ( State s2 : states ) { for ( Link l2 : s2 . links ) { if ( l2 . next == s && l2 . type != LinkType . SENTINEL ) { l2 . code = l2 . code + "\n" + l . code ; l2 . next = l . next ; contractedNow = true ; } } } if ( contractedNow ) { iterator . remove ( ) ; contracted = true ; } } } } while ( contracted ) ; // renumber
for ( int i = 0 ; i < states . size ( ) ; ++ i ) { states . get ( i ) . id = i ; }
|
public class Logger { /** * Log a message at the INFO level .
* @ param message the message string to be logged
* @ param throwable the exception ( throwable ) to log
* @ since 1.0.0 */
public void info ( final String message , final Throwable throwable ) { } }
|
log . info ( sanitize ( message ) , throwable ) ;
|
public class GVRWorld { /** * Remove a { @ link GVRRigidBody } from this physics world .
* @ param gvrBody the { @ link GVRRigidBody } to remove . */
public void removeBody ( final GVRRigidBody gvrBody ) { } }
|
mPhysicsContext . runOnPhysicsThread ( new Runnable ( ) { @ Override public void run ( ) { if ( contains ( gvrBody ) ) { NativePhysics3DWorld . removeRigidBody ( getNative ( ) , gvrBody . getNative ( ) ) ; mPhysicsObject . remove ( gvrBody . getNative ( ) ) ; getGVRContext ( ) . getEventManager ( ) . sendEvent ( GVRWorld . this , IPhysicsEvents . class , "onRemoveRigidBody" , GVRWorld . this , gvrBody ) ; } } } ) ;
|
public class NumberSimplex { /** * . initForSolve ( ) , then . solve ( ) , then . getAnswer ( ) */
public void initForSolve ( PrintStream out ) { } }
|
final int width = matrix . getWidth ( ) ; final int height = matrix . getHeight ( ) ; mTop = fillInTopVariables ( width - 1 ) ; mSide = fillInConstants ( height ) ; // System . out . println ( " INIT - SOLVE matrix . h = " + matrix . getHeight ( ) +
// " matrix . w = " + matrix . getWidth ( ) + " m . array . length = " +
// matrix . getArray ( ) . length + " m . array [ 0 ] . length = " + matrix . getArray ( ) [ 0 ] . length ) ;
mArrayhide = fillInArray ( matrix . getArray ( ) , constantsInThisColumnIndex ) ; nullPrintln ( out , "INIT-SOLVE, TOP.length = " + mTop . length + " matrix.height=" + height + " matrix.w=" + width ) ; printTopArraySide ( out ) ;
|
public class DescriptorMergeStrategyResolverChain { /** * Returns the first non - null strategy returned by a { @ link DescriptorMergeStrategyResolver } of the chain .
* If there a no resolvers in the chain , or non of resolvers returns a { @ link DescriptorMergeStrategy } , a
* default strategy is returned .
* @ param descriptorUrl the URL that identifies the descriptor
* @ param descriptorDom the XML DOM of the descriptor
* @ return the first non - null strategy returned by a { @ link DescriptorMergeStrategyResolver } of the chain ,
* or a default one if all the resolvers returned null . */
public DescriptorMergeStrategy getStrategy ( String descriptorUrl , Document descriptorDom ) { } }
|
DescriptorMergeStrategy strategy ; if ( CollectionUtils . isNotEmpty ( resolvers ) ) { for ( DescriptorMergeStrategyResolver resolver : resolvers ) { strategy = resolver . getStrategy ( descriptorUrl , descriptorDom ) ; if ( strategy != null ) { return strategy ; } } } return defaultStrategy ;
|
public class FeatureOverlay { /** * Ignore drawing tiles if they exist in the tile table represented by the tile dao
* @ param tileDao tile data access object
* @ since 1.2.6 */
public void ignoreTileDao ( TileDao tileDao ) { } }
|
GeoPackageOverlay tileOverlay = new GeoPackageOverlay ( tileDao ) ; linkedOverlays . add ( tileOverlay ) ;
|
public class User { /** * Sets a preferred currency . Default is " EUR " .
* @ param currency a 3 - letter currency code */
public void setCurrency ( String currency ) { } }
|
currency = StringUtils . upperCase ( currency ) ; if ( ! CurrencyUtils . getInstance ( ) . isValidCurrency ( currency ) ) { currency = "EUR" ; } this . currency = currency ;
|
public class ResolvedTypes { /** * / * @ Nullable */
protected List < TypeData > doGetTypeData ( XExpression expression ) { } }
|
List < TypeData > result = basicGetExpressionTypes ( ) . get ( expression ) ; return result ;
|
public class AmazonPinpointClient { /** * Deletes the event stream for an app .
* @ param deleteEventStreamRequest
* @ return Result of the DeleteEventStream operation returned by the service .
* @ throws BadRequestException
* 400 response
* @ throws InternalServerErrorException
* 500 response
* @ throws ForbiddenException
* 403 response
* @ throws NotFoundException
* 404 response
* @ throws MethodNotAllowedException
* 405 response
* @ throws TooManyRequestsException
* 429 response
* @ sample AmazonPinpoint . DeleteEventStream
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / pinpoint - 2016-12-01 / DeleteEventStream " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public DeleteEventStreamResult deleteEventStream ( DeleteEventStreamRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeDeleteEventStream ( request ) ;
|
public class TypedLinkFacetAttributeUpdateMarshaller { /** * Marshall the given parameter object . */
public void marshall ( TypedLinkFacetAttributeUpdate typedLinkFacetAttributeUpdate , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( typedLinkFacetAttributeUpdate == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( typedLinkFacetAttributeUpdate . getAttribute ( ) , ATTRIBUTE_BINDING ) ; protocolMarshaller . marshall ( typedLinkFacetAttributeUpdate . getAction ( ) , ACTION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class PlainChangesLogImpl { /** * { @ inheritDoc } */
public String dump ( ) { } }
|
StringBuilder str = new StringBuilder ( "ChangesLog: \n" ) ; for ( int i = 0 ; i < items . size ( ) ; i ++ ) { str . append ( " " ) . append ( ItemState . nameFromValue ( items . get ( i ) . getState ( ) ) ) . append ( "\t" ) . append ( items . get ( i ) . getData ( ) . getIdentifier ( ) ) ; str . append ( "\t" ) . append ( "isPersisted=" ) . append ( items . get ( i ) . isPersisted ( ) ) . append ( "\t" ) . append ( "isEventFire=" ) ; str . append ( items . get ( i ) . isEventFire ( ) ) . append ( "\t" ) . append ( "isInternallyCreated=" ) . append ( items . get ( i ) . isInternallyCreated ( ) ) . append ( "\t" ) ; str . append ( items . get ( i ) . getData ( ) . getQPath ( ) . getAsString ( ) ) . append ( "\n" ) ; } return str . toString ( ) ;
|
public class ScannerReportWriter { /** * Metadata is mandatory */
public File writeMetadata ( ScannerReport . Metadata metadata ) { } }
|
Protobuf . write ( metadata , fileStructure . metadataFile ( ) ) ; return fileStructure . metadataFile ( ) ;
|
public class PrefixedProperties { /** * ( non - Javadoc )
* @ see java . util . Properties # load ( java . io . InputStream ) */
@ Override public void load ( final InputStream inStream ) throws IOException { } }
|
lock . writeLock ( ) . lock ( ) ; try { properties . load ( inStream ) ; } finally { lock . writeLock ( ) . unlock ( ) ; }
|
public class ReferenceDataAnalysisJobWindowImplListeners { /** * Method used to change the reference data ( String Patterns , Dictionaries ,
* Synonyms ) components ' values in components
* @ param oldReferenceData
* @ param newReferenceData
* @ param referenceDataClass */
private void changeReferenceDataValuesInComponents ( final ReferenceData oldReferenceData , final ReferenceData newReferenceData , final Class < ? > referenceDataClass ) { } }
|
final Collection < ComponentBuilder > componentBuilders = _analysisJobBuilder . getComponentBuilders ( ) ; for ( final ComponentBuilder componentBuilder : componentBuilders ) { final Map < ConfiguredPropertyDescriptor , Object > configuredProperties = componentBuilder . getConfiguredProperties ( ) ; for ( final Map . Entry < ConfiguredPropertyDescriptor , Object > entry : configuredProperties . entrySet ( ) ) { final ConfiguredPropertyDescriptor propertyDescriptor = entry . getKey ( ) ; if ( referenceDataClass . isAssignableFrom ( propertyDescriptor . getBaseType ( ) ) ) { final Object valueObject = entry . getValue ( ) ; // In some cases the configured property is an array
if ( valueObject . getClass ( ) . isArray ( ) ) { final Object [ ] values = ( Object [ ] ) valueObject ; for ( int i = 0 ; i < values . length ; i ++ ) { if ( oldReferenceData . equals ( values [ i ] ) ) { // change the old value of the pattern in the
// array with the new value
values [ i ] = newReferenceData ; } } } else { if ( oldReferenceData . equals ( valueObject ) ) { componentBuilder . setConfiguredProperty ( propertyDescriptor , newReferenceData ) ; } } } } }
|
public class GelfTcpAppender { /** * Send message to socket ' s output stream .
* @ param messageToSend message to send .
* @ return { @ code true } if message was sent successfully , { @ code false } otherwise . */
@ SuppressWarnings ( "checkstyle:illegalcatch" ) private boolean sendMessage ( final byte [ ] messageToSend ) { } }
|
try { connectionPool . execute ( new PooledObjectConsumer < TcpConnection > ( ) { @ Override public void accept ( final TcpConnection tcpConnection ) throws IOException { tcpConnection . write ( messageToSend ) ; } } ) ; } catch ( final Exception e ) { addError ( String . format ( "Error sending message via tcp://%s:%s" , getGraylogHost ( ) , getGraylogPort ( ) ) , e ) ; return false ; } return true ;
|
public class CronTrigger { /** * NOT YET IMPLEMENTED : Returns the final time at which the
* < code > CronTrigger < / code > will fire .
* Note that the return time * may * be in the past . and the date returned is
* not validated against { @ link ICalendar } */
@ Override public Date getFinalFireTime ( ) { } }
|
Date resultTime ; if ( getEndTime ( ) != null ) { resultTime = getTimeBefore ( new Date ( getEndTime ( ) . getTime ( ) + 1000l ) ) ; } else { resultTime = ( m_aCronEx == null ) ? null : m_aCronEx . getFinalFireTime ( ) ; } if ( ( resultTime != null ) && ( getStartTime ( ) != null ) && ( resultTime . before ( getStartTime ( ) ) ) ) { return null ; } return resultTime ;
|
public class PluginProtoCompiler { /** * Get the file extension of the provided stg resource . */
public static String getFileExtension ( String resource ) { } }
|
// E . g uf foo . bar . java . stg , it is the . before " java "
int secondToTheLastDot = resource . lastIndexOf ( '.' , resource . length ( ) - 5 ) ; if ( secondToTheLastDot == - 1 ) { throw new IllegalArgumentException ( "The resource must be named like: 'foo.type.stg' " + "where '.type' will be the file extension of the output files." ) ; } String extension = resource . substring ( secondToTheLastDot , resource . length ( ) - 4 ) ; // to protected against resources like " foo . . stg "
if ( extension . length ( ) < 2 ) { throw new IllegalArgumentException ( "The resource must be named like: 'foo.type.stg' " + "where '.type' will be the file extension of the output files." ) ; } return extension ;
|
public class UpdatePreferencesServlet { /** * Change the number of columns on a specified tab . In the event that the user is decreasing the
* number of columns , extra columns will be stripped from the right - hand side . Any channels in
* these columns will be moved to the bottom of the last preserved column .
* @ param widths array of column widths
* @ param deleted array of deleted column IDs
* @ param acceptor not sure what this is
* @ param request HttpRequest
* @ param response HttpResponse
* @ throws IOException
* @ throws PortalException */
@ RequestMapping ( method = RequestMethod . POST , params = "action=changeColumns" ) public ModelAndView changeColumns ( HttpServletRequest request , HttpServletResponse response , @ RequestParam ( "tabId" ) String tabId , @ RequestParam ( "widths[]" ) String [ ] widths , @ RequestParam ( value = "deleted[]" , required = false ) String [ ] deleted , @ RequestParam ( value = "acceptor" , required = false ) String acceptor ) throws IOException , PortalException { } }
|
final IUserInstance ui = userInstanceManager . getUserInstance ( request ) ; final IPerson person = getPerson ( ui , response ) ; final UserPreferencesManager upm = ( UserPreferencesManager ) ui . getPreferencesManager ( ) ; final IUserLayoutManager ulm = upm . getUserLayoutManager ( ) ; final int newColumnCount = widths . length ; // build a list of the current columns for this tab
final Enumeration < String > columns = ulm . getChildIds ( tabId ) ; final List < String > columnList = new ArrayList < > ( ) ; while ( columns . hasMoreElements ( ) ) { columnList . add ( columns . nextElement ( ) ) ; } final int oldColumnCount = columnList . size ( ) ; final Map < String , Object > model = new HashMap < > ( ) ; // if the new layout has more columns
if ( newColumnCount > oldColumnCount ) { final List < String > newColumnIds = new ArrayList < > ( ) ; for ( int i = columnList . size ( ) ; i < newColumnCount ; i ++ ) { // create new column element
final IUserLayoutFolderDescription newColumn = new UserLayoutFolderDescription ( ) ; newColumn . setName ( "Column" ) ; newColumn . setId ( "tbd" ) ; newColumn . setFolderType ( IUserLayoutFolderDescription . REGULAR_TYPE ) ; newColumn . setHidden ( false ) ; newColumn . setUnremovable ( false ) ; newColumn . setImmutable ( false ) ; // add the column to our layout
final IUserLayoutNodeDescription node = ulm . addNode ( newColumn , tabId , null ) ; newColumnIds . add ( node . getId ( ) ) ; model . put ( "newColumnIds" , newColumnIds ) ; columnList . add ( node . getId ( ) ) ; } } // if the new layout has fewer columns
else if ( deleted != null && deleted . length > 0 ) { for ( String columnId : deleted ) { // move all channels in the current column to the last valid column
final Enumeration channels = ulm . getChildIds ( columnId ) ; while ( channels . hasMoreElements ( ) ) { ulm . addNode ( ulm . getNode ( ( String ) channels . nextElement ( ) ) , acceptor , null ) ; } // delete the column from the user ' s layout
ulm . deleteNode ( columnId ) ; columnList . remove ( columnId ) ; } } /* * Now that the number of columns is established , the appearance and behavior of the columns
* on the page is determined by Structure Stylesheet User Preferences . " Classic " uPortal
* columns use a ' width ' attribute , while the new ( er ) columns based on CSS Flex use a
* ' flexColumns ' attribute . */
resetColumnStylesheetUserPreferences ( request , person , ulm , columnList ) ; // Clear previous selections
// Choose a column layout strategy . . .
boolean useFlexStrategy = false ; // default is " classic "
if ( columnList . size ( ) == 1 ) { int firstColumnWidthAsInt = Integer . parseInt ( widths [ 0 ] ) ; if ( firstColumnWidthAsInt > 100 ) { // A single column with a width parameter creater than 100 ( % )
// signals that we want the flexColumns strategy .
useFlexStrategy = true ; } } // Update Structure Stylesheet User Preferences based on the selected strategy
if ( useFlexStrategy ) { updateColumnStylesheetUserPreferencesFlex ( request , person , ulm , columnList , widths ) ; } else { updateColumnStylesheetUserPreferencesClassic ( request , person , ulm , columnList , widths ) ; } try { ulm . saveUserLayout ( ) ; } catch ( PortalException e ) { logger . warn ( "Error saving layout" , e ) ; } return new ModelAndView ( "jsonView" , model ) ;
|
public class EvaluationErrorPrinter { /** * Outputs falseNegatives and falsePositives spans from the references and
* predictions list .
* @ param references
* @ param predictions
* @ param falseNegatives
* [ out ] the false negatives list
* @ param falsePositives
* [ out ] the false positives list */
private void findErrors ( final Span references [ ] , final Span predictions [ ] , final List < Span > falseNegatives , final List < Span > falsePositives ) { } }
|
falseNegatives . addAll ( Arrays . asList ( references ) ) ; falsePositives . addAll ( Arrays . asList ( predictions ) ) ; for ( final Span referenceName : references ) { for ( final Span prediction : predictions ) { if ( referenceName . equals ( prediction ) ) { // got it , remove from fn and fp
falseNegatives . remove ( referenceName ) ; falsePositives . remove ( prediction ) ; } } }
|
public class DeploymentBuilder { /** * Build
* @ return The deployment */
public Deployment build ( ) { } }
|
return new DeploymentImpl ( identifier , name , archive , classLoader , metadata , activation , resourceAdapter , getConnectionFactories ( ) , getAdminObjects ( ) , classLoaderPlugin ) ;
|
public class Ix { /** * Merges items from an Iterable sequence of Iterable sequences , picking
* the smallest item ( according to a custom comparator ) from all those inner
* Iterables until all sources complete .
* @ param < T > the value type
* @ param sources the Iterable sequence of Iterables
* @ param comparator the comparator to compare items and pick the one that returns negative will be picked
* @ return the new Ix instance
* @ since 1.0 */
public static < T > Ix < T > orderedMerge ( Iterable < ? extends Iterable < ? extends T > > sources , Comparator < ? super T > comparator ) { } }
|
return new IxOrderedMergeIterable < T > ( nullCheck ( sources , "sources is null" ) , nullCheck ( comparator , "comparator is null" ) ) ;
|
public class GroupElement { /** * Converts the group element to the P3 representation .
* @ return The group element in the P3 representation . */
public org . mariadb . jdbc . internal . com . send . authentication . ed25519 . math . GroupElement toP3 ( ) { } }
|
return toRep ( Representation . P3 ) ;
|
public class SARLOperationHelper { /** * Test if the given expression has side effects .
* @ param expression the expression .
* @ param context the list of context expressions .
* @ return { @ code true } if the expression has side effects . */
protected Boolean _hasSideEffects ( XTryCatchFinallyExpression expression , ISideEffectContext context ) { } }
|
final List < Map < String , List < XExpression > > > buffers = new ArrayList < > ( ) ; Map < String , List < XExpression > > buffer = context . createVariableAssignmentBufferForBranch ( ) ; if ( hasSideEffects ( expression . getExpression ( ) , context . branch ( buffer ) ) ) { return true ; } buffers . add ( buffer ) ; for ( final XCatchClause clause : expression . getCatchClauses ( ) ) { context . open ( ) ; buffer = context . createVariableAssignmentBufferForBranch ( ) ; if ( hasSideEffects ( clause . getExpression ( ) , context . branch ( buffer ) ) ) { return true ; } buffers . add ( buffer ) ; context . close ( ) ; } context . mergeBranchVariableAssignments ( buffers ) ; if ( hasSideEffects ( expression . getFinallyExpression ( ) , context ) ) { return true ; } return false ;
|
public class IngestionRecordCountProvider { /** * The record count should be the last component before the filename extension . */
@ Override public long getRecordCount ( Path filepath ) { } }
|
String [ ] components = filepath . getName ( ) . split ( Pattern . quote ( SEPARATOR ) ) ; Preconditions . checkArgument ( components . length >= 2 && StringUtils . isNumeric ( components [ components . length - 2 ] ) , String . format ( "Filename %s does not follow the pattern: FILENAME.RECORDCOUNT.EXTENSION" , filepath ) ) ; return Long . parseLong ( components [ components . length - 2 ] ) ;
|
public class EventQueue { public synchronized long getLastEventDate ( ) throws DevFailed { } }
|
if ( events . size ( ) == 0 ) Except . throw_exception ( "BUFFER_EMPTY" , "Event queue is empty." , "EventQueu.getNextEvent()" ) ; EventData event = events . get ( events . size ( ) - 1 ) ; return event . date ;
|
public class Codec { /** * Get all supported sample formats by this { @ code Codec } . If this { @ code Codec }
* is not an audio codec , then { @ code null } is returned .
* @ return all supported sample formats by this { @ code Codec } . */
public SampleFormat [ ] getSupportedSampleFormats ( ) { } }
|
IntPointer sampleFormatsPointer = avCodec . sample_fmts ( ) ; if ( getType ( ) != MediaType . AUDIO || sampleFormatsPointer == null ) return null ; List < SampleFormat > sampleFormats = new ArrayList < SampleFormat > ( ) ; int format ; int index = 0 ; while ( ( format = sampleFormatsPointer . get ( index ++ ) ) != - 1 ) sampleFormats . add ( SampleFormat . byId ( format ) ) ; return sampleFormats . toArray ( new SampleFormat [ 0 ] ) ;
|
public class JwtHelper { /** * Verify JWT token format and signature . If ignoreExpiry is true , skip expiry verification , otherwise
* verify the expiry before signature verification .
* In most cases , we need to verify the expiry of the jwt token . The only time we need to ignore expiry
* verification is in SPA middleware handlers which need to verify csrf token in jwt against the csrf
* token in the request header to renew the expired token .
* @ param jwt String of Json web token
* @ param ignoreExpiry If true , don ' t verify if the token is expired .
* @ param isToken True if the jwt is an OAuth 2.0 access token
* @ return JwtClaims object
* @ throws InvalidJwtException InvalidJwtException
* @ throws ExpiredTokenException ExpiredTokenException */
public static JwtClaims verifyJwt ( String jwt , boolean ignoreExpiry , boolean isToken ) throws InvalidJwtException , ExpiredTokenException { } }
|
JwtClaims claims ; if ( Boolean . TRUE . equals ( enableJwtCache ) ) { claims = cache . getIfPresent ( jwt ) ; if ( claims != null ) { if ( ! ignoreExpiry ) { try { // if using our own client module , the jwt token should be renewed automatically
// and it will never expired here . However , we need to handle other clients .
if ( ( NumericDate . now ( ) . getValue ( ) - secondsOfAllowedClockSkew ) >= claims . getExpirationTime ( ) . getValue ( ) ) { logger . info ( "Cached jwt token is expired!" ) ; throw new ExpiredTokenException ( "Token is expired" ) ; } } catch ( MalformedClaimException e ) { // This is cached token and it is impossible to have this exception
logger . error ( "MalformedClaimException:" , e ) ; } } // this claims object is signature verified already
return claims ; } } JwtConsumer consumer = new JwtConsumerBuilder ( ) . setSkipAllValidators ( ) . setDisableRequireSignature ( ) . setSkipSignatureVerification ( ) . build ( ) ; JwtContext jwtContext = consumer . process ( jwt ) ; claims = jwtContext . getJwtClaims ( ) ; JsonWebStructure structure = jwtContext . getJoseObjects ( ) . get ( 0 ) ; // need this kid to load public key certificate for signature verification
String kid = structure . getKeyIdHeaderValue ( ) ; // so we do expiration check here manually as we have the claim already for kid
// if ignoreExpiry is false , verify expiration of the token
if ( ! ignoreExpiry ) { try { if ( ( NumericDate . now ( ) . getValue ( ) - secondsOfAllowedClockSkew ) >= claims . getExpirationTime ( ) . getValue ( ) ) { logger . info ( "jwt token is expired!" ) ; throw new ExpiredTokenException ( "Token is expired" ) ; } } catch ( MalformedClaimException e ) { logger . error ( "MalformedClaimException:" , e ) ; throw new InvalidJwtException ( "MalformedClaimException" , new ErrorCodeValidator . Error ( ErrorCodes . MALFORMED_CLAIM , "Invalid ExpirationTime Format" ) , e , jwtContext ) ; } } // get the public key certificate from the cache that is loaded from security . yml if it is not there ,
// go to OAuth2 server / oauth2 / key endpoint to get the public key certificate with kid as parameter .
X509Certificate certificate = certMap == null ? null : certMap . get ( kid ) ; if ( certificate == null ) { certificate = isToken ? getCertForToken ( kid ) : getCertForSign ( kid ) ; if ( certMap == null ) certMap = new HashMap < > ( ) ; // null if bootstrapFromKeyService is true
certMap . put ( kid , certificate ) ; } X509VerificationKeyResolver x509VerificationKeyResolver = new X509VerificationKeyResolver ( certificate ) ; x509VerificationKeyResolver . setTryAllOnNoThumbHeader ( true ) ; consumer = new JwtConsumerBuilder ( ) . setRequireExpirationTime ( ) . setAllowedClockSkewInSeconds ( 315360000 ) // use seconds of 10 years to skip expiration validation as we need skip it in some cases .
. setSkipDefaultAudienceValidation ( ) . setVerificationKeyResolver ( x509VerificationKeyResolver ) . build ( ) ; // Validate the JWT and process it to the Claims
jwtContext = consumer . process ( jwt ) ; claims = jwtContext . getJwtClaims ( ) ; if ( Boolean . TRUE . equals ( enableJwtCache ) ) { cache . put ( jwt , claims ) ; } return claims ;
|
public class GeoPackageCoreConnection { /** * Get the application id
* @ return application id
* @ since 1.2.1 */
public String getApplicationId ( ) { } }
|
String applicationId = null ; Integer applicationIdObject = querySingleTypedResult ( "PRAGMA application_id" , null , GeoPackageDataType . MEDIUMINT ) ; if ( applicationIdObject != null ) { try { applicationId = new String ( ByteBuffer . allocate ( 4 ) . putInt ( applicationIdObject ) . array ( ) , "UTF-8" ) ; } catch ( UnsupportedEncodingException e ) { throw new GeoPackageException ( "Unexpected application id character encoding" , e ) ; } } return applicationId ;
|
public class MAPDialogLsmImpl { /** * ( non - Javadoc )
* @ see org . restcomm . protocols . ss7 . map . api . service . lsm . MAPDialogLsm # addSendRoutingInforForLCSRequestIndication
* ( org . restcomm . protocols . ss7 . map . api . primitives . ISDNAddressString ,
* org . restcomm . protocols . ss7 . map . api . service . lsm . SubscriberIdentity ,
* org . restcomm . protocols . ss7 . map . api . primitives . MAPExtensionContainer ) */
public Long addSendRoutingInfoForLCSRequest ( ISDNAddressString mlcNumber , SubscriberIdentity targetMS , MAPExtensionContainer extensionContainer ) throws MAPException { } }
|
return this . addSendRoutingInfoForLCSRequest ( _Timer_Default , mlcNumber , targetMS , extensionContainer ) ;
|
public class StringUtils { /** * Recursively get every file in a directory and add them to a list .
* @ param inputPath
* the input directory
* @ return the list containing all the files */
public static List < File > getFilesInDir ( final File inputPath ) { } }
|
final List < File > fileList = new ArrayList < File > ( ) ; for ( final File aFile : Files . fileTreeTraverser ( ) . preOrderTraversal ( inputPath ) ) { if ( aFile . isFile ( ) ) { fileList . add ( aFile ) ; } } return fileList ;
|
public class FileChunker { /** * This method defines the returned contentId as the path of the arg file
* minus the path of the arg baseDir , in which the file was found .
* @ param baseDir dir that contained the arg file or one of its parents
* @ param file for which contentId is to be found
* @ return contentId of arg file */
private String getContentId ( File baseDir , File file ) { } }
|
String filePath = file . getPath ( ) ; String basePath = baseDir . getPath ( ) ; int index = filePath . indexOf ( basePath ) ; if ( index == - 1 ) { StringBuilder sb = new StringBuilder ( "Invalid basePath for file: " ) ; sb . append ( "b: '" + basePath + "', " ) ; sb . append ( "f: '" + filePath + "'" ) ; throw new DuraCloudRuntimeException ( sb . toString ( ) ) ; } String contentId = filePath . substring ( index + basePath . length ( ) ) ; if ( contentId . startsWith ( File . separator ) ) { contentId = contentId . substring ( 1 , contentId . length ( ) ) ; } // Replace backslash ( \ ) with forward slash ( / ) for all content IDs
contentId = contentId . replaceAll ( "\\\\" , "/" ) ; return contentId ;
|
public class BabySitter { /** * Create a new BabySitter and make sure it reads the initial children list .
* Use the provided ExecutorService to queue events to , rather than
* creating a private ExecutorService . */
public static BabySitter nonblockingFactory ( ZooKeeper zk , String dir , Callback cb , ExecutorService es ) throws InterruptedException , ExecutionException { } }
|
BabySitter bs = new BabySitter ( zk , dir , cb , es ) ; bs . m_es . submit ( bs . m_eventHandler ) ; return bs ;
|
public class MorcBuilder { /** * Add a set of predicates to validate an incoming exchange - all predicates in a single call
* apply ONLY to a single message , add consecutive calls to addPredicates in order to handle further messages
* @ param predicates A list of predicates that will validate a separate exchange ( in order ) */
public Builder addPredicates ( Predicate ... predicates ) { } }
|
this . predicates . add ( new ArrayList < > ( Arrays . asList ( predicates ) ) ) ; return self ( ) ;
|
public class RuntimeTransformer { /** * ( non - Javadoc )
* @ see net . roboconf . doc . generator . internal . transformers . AbstractRoboconfTransformer
* # getConfiguredLayout ( ) */
@ Override public Layout < AbstractType , String > getConfiguredLayout ( ) { } }
|
return new StaticLayout < AbstractType , String > ( this . graph , this , getGraphDimension ( ) ) ;
|
public class WebProviderAuthenticatorProxy { /** * Remove LTPA token if this is not a FORM login and the JASPI provider has not committed the response . */
private void attemptToRemoveLtpaToken ( WebRequest webRequest , HashMap < String , Object > props ) { } }
|
SSOCookieHelper ssoCh = webAppSecurityConfig . createSSOCookieHelper ( ) ; if ( ! isFormLogin ( props ) ) { HttpServletResponse res = webRequest . getHttpServletResponse ( ) ; if ( ! res . isCommitted ( ) ) { ssoCh . removeSSOCookieFromResponse ( res ) ; } }
|
public class Models { /** * Score a frame with the given model . */
protected static Response scoreOne ( Frame frame , Model score_model , boolean adapt ) { } }
|
return Frames . scoreOne ( frame , score_model ) ;
|
public class ApiTokenClient { /** * Performs a REST API invocation . This is wrapper function for { @ link # invoke ( String , String , String ,
* ApiTokenClient . QueryClosure , ApiTokenClient . RequestClosure ,
* ApiTokenClient . ResponseClosure ) }
* @ param operation operation name , such as < code > tests < / code > , < code > data - stores < / code > , etc
* @ param id resource ID ( if fetching a specific resource )
* @ param requestClosure closure to create the request , such as GET , POST , PUT , DELETE
* @ param responseClosure closure to convert the response JSON into a value - object
* @ param < JsonType > JSON type , such as JsonObject , JsonArray
* @ param < ValueType > value - type , such as { @ link com . loadimpact . resource . Test } , { @ link
* com . loadimpact . resource . UserScenario } , etc
* @ return a single value - object or a list of it
* @ throws com . loadimpact . exception . ApiException if anything goes wrong , such as the server returns HTTP status not being 20x */
protected < JsonType extends JsonStructure , ValueType > ValueType invoke ( String operation , int id , RequestClosure < JsonType > requestClosure , ResponseClosure < JsonType , ValueType > responseClosure ) { } }
|
return invoke ( operation , Integer . toString ( id ) , null , null , requestClosure , responseClosure ) ;
|
public class FluentIterableWrapper { /** * Returns a fluent iterable that wraps { @ code iterable } , or { @ code iterable } itself if it
* is already a { @ code FluentIterable } . */
public static < E > FluentIterableWrapper < E > from ( final Iterable < E > iterable ) { } }
|
return ( iterable instanceof FluentIterableWrapper ) ? ( FluentIterableWrapper < E > ) iterable : new FluentIterableWrapper < E > ( iterable ) { } ;
|
public class LoggerWrapper { /** * Log a method return .
* This is a convenience method that can be used to
* log returning from a method . A LogRecord with message " RETURN " , log level
* FINER , and the given sourceMethod and sourceClass is logged . */
public void exiting ( ) { } }
|
StackTraceElement caller = StackTraceUtils . getCallerStackTraceElement ( ) ; logger . exiting ( caller . getClassName ( ) , caller . getMethodName ( ) ) ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcAppliedValueSelect ( ) { } }
|
if ( ifcAppliedValueSelectEClass == null ) { ifcAppliedValueSelectEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1116 ) ; } return ifcAppliedValueSelectEClass ;
|
public class Utils { /** * Construct a Mesos URI . */
public static Protos . CommandInfo . URI uri ( MesosArtifactResolver resolver , ContainerSpecification . Artifact artifact ) { } }
|
checkNotNull ( resolver ) ; checkNotNull ( artifact ) ; Option < URL > url = resolver . resolve ( artifact . dest ) ; if ( url . isEmpty ( ) ) { throw new IllegalArgumentException ( "Unresolvable artifact: " + artifact . dest ) ; } return Protos . CommandInfo . URI . newBuilder ( ) . setValue ( url . get ( ) . toExternalForm ( ) ) . setOutputFile ( artifact . dest . toString ( ) ) . setExtract ( artifact . extract ) . setCache ( artifact . cachable ) . setExecutable ( artifact . executable ) . build ( ) ;
|
public class SoyTreeUtils { /** * Runs the visitor on all nodes ( including { @ link ExprNode expr nodes } ) reachable from the given
* node . The order of visiting is breadth first .
* < p > If the visitor return { @ code false } from { @ link NodeVisitor # exec ( Node ) } we will short
* circuit visiting . */
public static void visitAllNodes ( Node node , NodeVisitor < ? super Node , VisitDirective > visitor ) { } }
|
ArrayDeque < Node > queue = new ArrayDeque < > ( ) ; queue . add ( node ) ; Node current ; while ( ( current = queue . poll ( ) ) != null ) { switch ( visitor . exec ( current ) ) { case ABORT : return ; case CONTINUE : if ( current instanceof ParentNode < ? > ) { queue . addAll ( ( ( ParentNode < ? > ) current ) . getChildren ( ) ) ; } if ( current instanceof ExprHolderNode ) { queue . addAll ( ( ( ExprHolderNode ) current ) . getExprList ( ) ) ; } continue ; case SKIP_CHILDREN : continue ; } }
|
public class DateUtils { /** * Attempts to pass a HTTP date .
* @ param date The date to parse
* @ return The parsed date , or null if parsing failed */
public static Date parseDate ( final String date ) { } }
|
/* IE9 sends a superflous lenght parameter after date in the
If - Modified - Since header , which needs to be stripped before
parsing . */
final int semicolonIndex = date . indexOf ( ';' ) ; final String trimmedDate = semicolonIndex >= 0 ? date . substring ( 0 , semicolonIndex ) : date ; ParsePosition pp = new ParsePosition ( 0 ) ; SimpleDateFormat dateFormat = RFC1123_PATTERN_FORMAT . get ( ) ; dateFormat . setTimeZone ( GMT_ZONE ) ; Date val = dateFormat . parse ( trimmedDate , pp ) ; if ( val != null && pp . getIndex ( ) == trimmedDate . length ( ) ) { return val ; } pp = new ParsePosition ( 0 ) ; dateFormat = new SimpleDateFormat ( RFC1036_PATTERN , LOCALE_US ) ; dateFormat . setTimeZone ( GMT_ZONE ) ; val = dateFormat . parse ( trimmedDate , pp ) ; if ( val != null && pp . getIndex ( ) == trimmedDate . length ( ) ) { return val ; } pp = new ParsePosition ( 0 ) ; dateFormat = new SimpleDateFormat ( ASCITIME_PATTERN , LOCALE_US ) ; dateFormat . setTimeZone ( GMT_ZONE ) ; val = dateFormat . parse ( trimmedDate , pp ) ; if ( val != null && pp . getIndex ( ) == trimmedDate . length ( ) ) { return val ; } pp = new ParsePosition ( 0 ) ; dateFormat = new SimpleDateFormat ( OLD_COOKIE_PATTERN , LOCALE_US ) ; dateFormat . setTimeZone ( GMT_ZONE ) ; val = dateFormat . parse ( trimmedDate , pp ) ; if ( val != null && pp . getIndex ( ) == trimmedDate . length ( ) ) { return val ; } return null ;
|
public class UserCoreDao { /** * Query for the row where all fields match their values
* @ param fieldValues
* field values
* @ return result */
public TResult queryForFieldValues ( Map < String , Object > fieldValues ) { } }
|
String where = buildWhere ( fieldValues . entrySet ( ) ) ; String [ ] whereArgs = buildWhereArgs ( fieldValues . values ( ) ) ; TResult result = userDb . query ( getTableName ( ) , table . getColumnNames ( ) , where , whereArgs , null , null , null ) ; prepareResult ( result ) ; return result ;
|
public class systemsession { /** * Use this API to fetch filtered set of systemsession resources .
* filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */
public static systemsession [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
|
systemsession obj = new systemsession ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; systemsession [ ] response = ( systemsession [ ] ) obj . getfiltered ( service , option ) ; return response ;
|
public class Config { /** * Retrieves a configuration value with the given key
* @ param key The key of the configuration value ( e . g . application . name )
* @ return The configured value as boolean or false if the key is not configured */
public boolean getBoolean ( String key ) { } }
|
final String value = this . props . getValue ( key ) ; if ( StringUtils . isBlank ( value ) ) { return false ; } return Boolean . parseBoolean ( value ) ;
|
public class StartBuildRequest { /** * An array of < code > ProjectSource < / code > objects .
* @ param secondarySourcesOverride
* An array of < code > ProjectSource < / code > objects . */
public void setSecondarySourcesOverride ( java . util . Collection < ProjectSource > secondarySourcesOverride ) { } }
|
if ( secondarySourcesOverride == null ) { this . secondarySourcesOverride = null ; return ; } this . secondarySourcesOverride = new java . util . ArrayList < ProjectSource > ( secondarySourcesOverride ) ;
|
public class LongBestFitAllocator { /** * / * Check properties of any chunk , whether free , inuse , mmapped etc */
private void checkAnyChunk ( long p ) { } }
|
if ( VALIDATING ) { if ( ! isAligned ( chunkToMem ( p ) ) ) throw new AssertionError ( "Chunk address [mem:" + p + "=>chunk:" + chunkToMem ( p ) + "] is incorrectly aligned" ) ; if ( ! okAddress ( p ) ) throw new AssertionError ( "Memory address " + p + " is invalid" ) ; }
|
public class DeleteVirtualRouterRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteVirtualRouterRequest deleteVirtualRouterRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( deleteVirtualRouterRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteVirtualRouterRequest . getMeshName ( ) , MESHNAME_BINDING ) ; protocolMarshaller . marshall ( deleteVirtualRouterRequest . getVirtualRouterName ( ) , VIRTUALROUTERNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class SAXParser { /** * < p > Reset this < code > SAXParser < / code > to its original configuration . < / p >
* < p > < code > SAXParser < / code > is reset to the same state as when it was created with
* { @ link SAXParserFactory # newSAXParser ( ) } .
* < code > reset ( ) < / code > is designed to allow the reuse of existing < code > SAXParser < / code > s
* thus saving resources associated with the creation of new < code > SAXParser < / code > s . < / p >
* < p > The reset < code > SAXParser < / code > is not guaranteed to have the same { @ link Schema }
* < code > Object < / code > , e . g . { @ link Object # equals ( Object obj ) } . It is guaranteed to have a functionally equal
* < code > Schema < / code > . < / p >
* @ since 1.5 */
public void reset ( ) { } }
|
// implementors should override this method
throw new UnsupportedOperationException ( "This SAXParser, \"" + this . getClass ( ) . getName ( ) + "\", does not support the reset functionality." + " Specification \"" + this . getClass ( ) . getPackage ( ) . getSpecificationTitle ( ) + "\"" + " version \"" + this . getClass ( ) . getPackage ( ) . getSpecificationVersion ( ) + "\"" ) ;
|
public class ReplicationsInner { /** * Updates a replication for a container registry with the specified parameters .
* @ param resourceGroupName The name of the resource group to which the container registry belongs .
* @ param registryName The name of the container registry .
* @ param replicationName The name of the replication .
* @ param tags The tags for the replication .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < ReplicationInner > updateAsync ( String resourceGroupName , String registryName , String replicationName , Map < String , String > tags , final ServiceCallback < ReplicationInner > serviceCallback ) { } }
|
return ServiceFuture . fromResponse ( updateWithServiceResponseAsync ( resourceGroupName , registryName , replicationName , tags ) , serviceCallback ) ;
|
public class UriUtils { /** * Encodes the value , preserving all reserved characters . . Values that are already pct - encoded are
* ignored .
* @ param value inspect .
* @ param type identifying which uri fragment rules to apply .
* @ param charset to use .
* @ return a new String with the reserved characters preserved . */
public static String encodeReserved ( String value , FragmentType type , Charset charset ) { } }
|
/* value is encoded , we need to split it up and skip the parts that are already encoded */
Matcher matcher = PCT_ENCODED_PATTERN . matcher ( value ) ; if ( ! matcher . find ( ) ) { return encodeChunk ( value , type , charset ) ; } int length = value . length ( ) ; StringBuilder encoded = new StringBuilder ( length + 8 ) ; int index = 0 ; do { /* split out the value before the encoded value */
String before = value . substring ( index , matcher . start ( ) ) ; /* encode it */
encoded . append ( encodeChunk ( before , type , charset ) ) ; /* append the encoded value */
encoded . append ( matcher . group ( ) ) ; /* update the string search index */
index = matcher . end ( ) ; } while ( matcher . find ( ) ) ; /* append the rest of the string */
String tail = value . substring ( index , length ) ; encoded . append ( encodeChunk ( tail , type , charset ) ) ; return encoded . toString ( ) ;
|
public class AnnotationUtils { /** * Determine if the supplied { @ link Annotation } is defined in the core JDK
* { @ code java . lang . annotation } package .
* @ param annotation the annotation to check ( never { @ code null } )
* @ return { @ code true } if the annotation is in the { @ code java . lang . annotation } package */
public static boolean isInJavaLangAnnotationPackage ( Annotation annotation ) { } }
|
Assert . notNull ( annotation , "Annotation must not be null" ) ; return annotation . annotationType ( ) . getName ( ) . startsWith ( "java.lang.annotation" ) ;
|
public class PatchMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Patch patch , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( patch == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( patch . getId ( ) , ID_BINDING ) ; protocolMarshaller . marshall ( patch . getReleaseDate ( ) , RELEASEDATE_BINDING ) ; protocolMarshaller . marshall ( patch . getTitle ( ) , TITLE_BINDING ) ; protocolMarshaller . marshall ( patch . getDescription ( ) , DESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( patch . getContentUrl ( ) , CONTENTURL_BINDING ) ; protocolMarshaller . marshall ( patch . getVendor ( ) , VENDOR_BINDING ) ; protocolMarshaller . marshall ( patch . getProductFamily ( ) , PRODUCTFAMILY_BINDING ) ; protocolMarshaller . marshall ( patch . getProduct ( ) , PRODUCT_BINDING ) ; protocolMarshaller . marshall ( patch . getClassification ( ) , CLASSIFICATION_BINDING ) ; protocolMarshaller . marshall ( patch . getMsrcSeverity ( ) , MSRCSEVERITY_BINDING ) ; protocolMarshaller . marshall ( patch . getKbNumber ( ) , KBNUMBER_BINDING ) ; protocolMarshaller . marshall ( patch . getMsrcNumber ( ) , MSRCNUMBER_BINDING ) ; protocolMarshaller . marshall ( patch . getLanguage ( ) , LANGUAGE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class ExecutionGroupVertex { /** * Checks if this vertex is an input vertex in its stage , i . e . has either no
* incoming connections or only incoming connections to group vertices in a lower stage .
* @ return < code > true < / code > if this vertex is an input vertex , < code > false < / code > otherwise */
public boolean isInputVertex ( ) { } }
|
if ( this . backwardLinks . size ( ) == 0 ) { return true ; } final Iterator < ExecutionGroupEdge > it = this . backwardLinks . iterator ( ) ; while ( it . hasNext ( ) ) { if ( it . next ( ) . getSourceVertex ( ) . getStageNumber ( ) == this . getStageNumber ( ) ) { return false ; } } return true ;
|
public class AllureNamingUtils { /** * Replace bad xml charactes in given array by space
* @ param cbuf buffer to replace in
* @ param off Offset from which to start reading characters
* @ param len Number of characters to be replaced */
public static void replaceBadXmlCharactersBySpace ( char [ ] cbuf , int off , int len ) { } }
|
for ( int i = off ; i < off + len ; i ++ ) { if ( isBadXmlCharacter ( cbuf [ i ] ) ) { cbuf [ i ] = '\u0020' ; } }
|
public class Engine { /** * Create a Engine with given configPath and extra - parameters .
* @ param configPath config path
* @ param parameters parameters map
* @ return Engine
* @ since 1.5.0 */
public static Engine create ( final String configPath , final Map < String , Object > parameters ) { } }
|
return create ( createConfigProps ( configPath ) , parameters ) ;
|
public class Document { /** * indexed getter for entities - gets an indexed value -
* @ generated
* @ param i index in the array to get
* @ return value of the element at index i */
public Entity getEntities ( int i ) { } }
|
if ( Document_Type . featOkTst && ( ( Document_Type ) jcasType ) . casFeat_entities == null ) jcasType . jcas . throwFeatMissing ( "entities" , "de.julielab.jules.types.ace.Document" ) ; jcasType . jcas . checkArrayBounds ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Document_Type ) jcasType ) . casFeatCode_entities ) , i ) ; return ( Entity ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefArrayValue ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Document_Type ) jcasType ) . casFeatCode_entities ) , i ) ) ) ;
|
public class ExpressionUtils { /** * Create a new Path expression
* @ param type type of expression
* @ param parent parent path
* @ param property property name
* @ return property path */
public static < T > Path < T > path ( Class < ? extends T > type , Path < ? > parent , String property ) { } }
|
return new PathImpl < T > ( type , parent , property ) ;
|
public class Directory { /** * Creates a new DirectoryService instance for the test rule . Initialization of the service is done in the
* apply Statement phase by invoking the setupService method . */
private DirectoryService createDirectoryService ( ) { } }
|
final DirectoryServiceFactory factory = new DefaultDirectoryServiceFactory ( ) ; try { factory . init ( "scribble" ) ; return factory . getDirectoryService ( ) ; } catch ( Exception e ) { // NOSONAR
throw new AssertionError ( "Unable to create directory service" , e ) ; }
|
public class AVQuery { /** * Add a constraint to the query that requires a particular key ' s value does not match any value
* for a key in the results of another AVQuery .
* @ param key The key whose value is being checked and excluded
* @ param keyInQuery The key in the objects from the sub query to look in
* @ param query The sub query to run
* @ return Returns the query so you can chain this call . */
public AVQuery < T > whereDoesNotMatchKeyInQuery ( String key , String keyInQuery , AVQuery < ? > query ) { } }
|
Map < String , Object > map = AVUtils . createMap ( "className" , query . className ) ; map . put ( "where" , query . conditions . compileWhereOperationMap ( ) ) ; Map < String , Object > queryMap = AVUtils . createMap ( "query" , map ) ; queryMap . put ( "key" , keyInQuery ) ; addWhereItem ( key , "$dontSelect" , queryMap ) ; return this ;
|
public class DocServiceBuilder { /** * Adds the example { @ link HttpHeaders } for the service with the specified type . This method is
* a shortcut to :
* < pre > { @ code
* exampleHttpHeaders ( serviceType . getName ( ) , exampleHttpHeaders ) ;
* } < / pre > */
public DocServiceBuilder exampleHttpHeaders ( Class < ? > serviceType , HttpHeaders ... exampleHttpHeaders ) { } }
|
requireNonNull ( serviceType , "serviceType" ) ; return exampleHttpHeaders ( serviceType . getName ( ) , exampleHttpHeaders ) ;
|
public class JodaBeanSer { /** * Returns a copy of this serializer with the short types flag set .
* @ param shortTypes whether to use short types , not null
* @ return a copy of this object with the short types flag changed , not null */
public JodaBeanSer withShortTypes ( boolean shortTypes ) { } }
|
return new JodaBeanSer ( indent , newLine , converter , iteratorFactory , shortTypes , deserializers , includeDerived ) ;
|
public class BZip2CompressorOutputStream { /** * Writes the current byte to the buffer , run - length encoding it
* if it has been repeated at least four times ( the first step
* RLEs sequences of four identical bytes ) .
* < p > Flushes the current block before writing data if it is
* full . < / p >
* < p > " write to the buffer " means adding to data . buffer starting
* two steps " after " this . last - initially starting at index 1
* ( not 0 ) - and updating this . last to point to the last index
* written minus 1 . < / p > */
private void writeRun ( ) throws IOException { } }
|
final int lastShadow = this . last ; if ( lastShadow < this . allowableBlockSize ) { final int currentCharShadow = this . currentChar ; final Data dataShadow = this . data ; dataShadow . inUse [ currentCharShadow ] = true ; final byte ch = ( byte ) currentCharShadow ; int runLengthShadow = this . runLength ; this . crc . updateCRC ( currentCharShadow , runLengthShadow ) ; switch ( runLengthShadow ) { case 1 : dataShadow . block [ lastShadow + 2 ] = ch ; this . last = lastShadow + 1 ; break ; case 2 : dataShadow . block [ lastShadow + 2 ] = ch ; dataShadow . block [ lastShadow + 3 ] = ch ; this . last = lastShadow + 2 ; break ; case 3 : { final byte [ ] block = dataShadow . block ; block [ lastShadow + 2 ] = ch ; block [ lastShadow + 3 ] = ch ; block [ lastShadow + 4 ] = ch ; this . last = lastShadow + 3 ; } break ; default : { runLengthShadow -= 4 ; dataShadow . inUse [ runLengthShadow ] = true ; final byte [ ] block = dataShadow . block ; block [ lastShadow + 2 ] = ch ; block [ lastShadow + 3 ] = ch ; block [ lastShadow + 4 ] = ch ; block [ lastShadow + 5 ] = ch ; block [ lastShadow + 6 ] = ( byte ) runLengthShadow ; this . last = lastShadow + 5 ; } break ; } } else { endBlock ( ) ; initBlock ( ) ; writeRun ( ) ; }
|
public class Signature { /** * PackageSpecifier :
* Identifier / PackageSpecifier *
* @ param sb
* @ param type */
private static void packageSpecifier ( Result sb , TypeMirror type ) throws IOException { } }
|
switch ( type . getKind ( ) ) { case DECLARED : DeclaredType dt = ( DeclaredType ) type ; PackageElement pkg = El . getPackageOf ( dt . asElement ( ) ) ; sb . append ( pkg . getQualifiedName ( ) . toString ( ) . replace ( '.' , '/' ) ) ; sb . append ( '/' ) ; break ; default : throw new UnsupportedOperationException ( type . getKind ( ) + " unsupported" ) ; }
|
public class SessionContext { /** * appends filter name and status to the filter execution history for the
* current request */
public void addFilterExecutionSummary ( String name , String status , long time ) { } }
|
StringBuilder sb = getFilterExecutionSummary ( ) ; if ( sb . length ( ) > 0 ) sb . append ( ", " ) ; sb . append ( name ) . append ( '[' ) . append ( status ) . append ( ']' ) . append ( '[' ) . append ( time ) . append ( "ms]" ) ;
|
public class Listener { /** * Sets the velocity of the listener . */
public void setVelocity ( float x , float y , float z ) { } }
|
if ( _vx != x || _vy != y || _vz != z ) { AL10 . alListener3f ( AL10 . AL_VELOCITY , _vx = x , _vy = y , _vz = z ) ; }
|
public class EREtoEDT { /** * lightERE offsets are indexed into the document , including text inside tags EDT offsets are
* TODO use the mapping in bue - common - open */
private ImmutableMap < Integer , Integer > lightEREOffsetToEDTOffset ( String document ) { } }
|
final ImmutableMap . Builder < Integer , Integer > offsetMap = ImmutableMap . builder ( ) ; int EDT = 0 ; // lightERE treats these as one , not two ( as an XML parser would )
document = document . replaceAll ( "\\r\\n" , "\n" ) ; for ( int i = 0 ; i < document . length ( ) ; i ++ ) { final String c = document . substring ( i , i + 1 ) ; // skip < tags >
if ( c . equals ( "<" ) ) { i = document . indexOf ( '>' , i ) ; continue ; } offsetMap . put ( i , EDT ) ; EDT ++ ; } return offsetMap . build ( ) ;
|
public class AmazonEC2Client { /** * Deletes the specified ingress or egress entry ( rule ) from the specified network ACL .
* @ param deleteNetworkAclEntryRequest
* @ return Result of the DeleteNetworkAclEntry operation returned by the service .
* @ sample AmazonEC2 . DeleteNetworkAclEntry
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / DeleteNetworkAclEntry " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public DeleteNetworkAclEntryResult deleteNetworkAclEntry ( DeleteNetworkAclEntryRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeDeleteNetworkAclEntry ( request ) ;
|
public class PropertiesUtils { /** * Load a double property . If the key is not present , returns defaultValue . */
public static double getDouble ( Properties props , String key , double defaultValue ) { } }
|
String value = props . getProperty ( key ) ; if ( value != null ) { return Double . parseDouble ( value ) ; } else { return defaultValue ; }
|
public class WarsApi { /** * List kills for a war ( asynchronously ) Return a list of kills related to a
* war - - - This route is cached for up to 3600 seconds
* @ param warId
* A valid war ID ( required )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param ifNoneMatch
* ETag from a previous request . A 304 will be returned if this
* matches the current ETag ( optional )
* @ param page
* Which page of results to return ( optional , default to 1)
* @ param callback
* The callback to be executed when the API call finishes
* @ return The request call
* @ throws ApiException
* If fail to process the API call , e . g . serializing the request
* body object */
public com . squareup . okhttp . Call getWarsWarIdKillmailsAsync ( Integer warId , String datasource , String ifNoneMatch , Integer page , final ApiCallback < List < WarKillmailsResponse > > callback ) throws ApiException { } }
|
com . squareup . okhttp . Call call = getWarsWarIdKillmailsValidateBeforeCall ( warId , datasource , ifNoneMatch , page , callback ) ; Type localVarReturnType = new TypeToken < List < WarKillmailsResponse > > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
|
public class GaloisField { /** * Compute the remainder of a dividend and divisor pair . The index in the
* array corresponds to the power of the entry . For example p [ 0 ] is the
* constant term of the polynomial p .
* @ param dividend dividend polynomial , the remainder will be placed here when return
* @ param divisor divisor polynomial */
public void remainder ( int [ ] dividend , int [ ] divisor ) { } }
|
for ( int i = dividend . length - divisor . length ; i >= 0 ; i -- ) { int ratio = divTable [ dividend [ i + divisor . length - 1 ] ] [ divisor [ divisor . length - 1 ] ] ; for ( int j = 0 ; j < divisor . length ; j ++ ) { int k = j + i ; dividend [ k ] = dividend [ k ] ^ mulTable [ ratio ] [ divisor [ j ] ] ; } }
|
public class DefaultJobManager { /** * Execute one job . */
public void runJob ( ) { } }
|
try { this . currentJob = this . jobQueue . take ( ) ; // Create a clean Execution Context
ExecutionContext context = new ExecutionContext ( ) ; try { this . executionContextManager . initialize ( context ) ; } catch ( ExecutionContextException e ) { throw new RuntimeException ( "Failed to initialize Job " + this . currentJob + " execution context" , e ) ; } this . currentJob . run ( ) ; } catch ( InterruptedException e ) { // Thread has been stopped
} finally { this . execution . removeContext ( ) ; }
|
public class WindowsRegistry { /** * Read value ( s ) and value name ( s ) form given key
* @ param hk the HKEY
* @ param key the key
* @ return the value name ( s ) plus the value ( s )
* @ throws RegistryException when something is not right */
public Map < String , String > readStringValues ( HKey hk , String key ) throws RegistryException { } }
|
return readStringValues ( hk , key , null ) ;
|
public class ScopeController { /** * REST endpoint for retrieving all features for a given sprint and team
* ( the sprint is derived )
* @ param scopeId
* A given scope - owner ' s source - system ID
* @ return A data response list of type Feature containing all features for
* the given team and current sprint */
@ RequestMapping ( value = "/scope/{scopeId}" , method = GET , produces = APPLICATION_JSON_VALUE ) public DataResponse < List < Scope > > scope ( @ RequestParam ( value = "component" , required = true ) String cId , @ PathVariable String scopeId ) { } }
|
ObjectId componentId = new ObjectId ( cId ) ; return this . scopeService . getScope ( componentId , scopeId ) ;
|
public class MonitoringFilter { /** * cette méthode est protected pour pouvoir être surchargée dans une classe définie par l ' application */
protected boolean isAllowed ( HttpServletRequest httpRequest , HttpServletResponse httpResponse ) throws IOException { } }
|
return httpAuth . isAllowed ( httpRequest , httpResponse ) ;
|
public class FacesConfigOrderingTypeImpl { /** * If not already created , a new < code > before < / code > element with the given value will be created .
* Otherwise , the existing < code > before < / code > element will be returned .
* @ return a new or existing instance of < code > FacesConfigOrderingOrderingType < FacesConfigOrderingType < T > > < / code > */
public FacesConfigOrderingOrderingType < FacesConfigOrderingType < T > > getOrCreateBefore ( ) { } }
|
Node node = childNode . getOrCreate ( "before" ) ; FacesConfigOrderingOrderingType < FacesConfigOrderingType < T > > before = new FacesConfigOrderingOrderingTypeImpl < FacesConfigOrderingType < T > > ( this , "before" , childNode , node ) ; return before ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.