signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class UnitResponse { /** * 将unitResponse . data适配为指定的类型 , 请放心大胆做转换 */
public < T > T dataToType ( Class < T > type ) { } } | return Reflection . toType ( data , type ) ; |
public class Stream { /** * Zip together the " a " , " b " and " c " iterators until all of them runs out of values .
* Each triple of values is combined into a single value using the supplied zipFunction function .
* @ param a
* @ param b
* @ param c
* @ param valueForNoneA value to fill if " a " runs out of values .
* @ param valueForNoneB value to fill if " b " runs out of values .
* @ param valueForNoneC value to fill if " c " runs out of values .
* @ param zipFunction
* @ return */
public static < R > Stream < R > zip ( final DoubleStream a , final DoubleStream b , final DoubleStream c , final double valueForNoneA , final double valueForNoneB , final double valueForNoneC , final DoubleTriFunction < R > zipFunction ) { } } | return zip ( a . iteratorEx ( ) , b . iteratorEx ( ) , c . iteratorEx ( ) , valueForNoneA , valueForNoneB , valueForNoneC , zipFunction ) . onClose ( newCloseHandler ( N . asList ( a , b , c ) ) ) ; |
public class ImageArchiveUtil { /** * Build a map of entries by id from an iterable of entries .
* @ param entries
* @ return a map of entries by id */
public static Map < String , ImageArchiveManifestEntry > mapEntriesById ( Iterable < ImageArchiveManifestEntry > entries ) { } } | Map < String , ImageArchiveManifestEntry > mapped = new LinkedHashMap < > ( ) ; for ( ImageArchiveManifestEntry entry : entries ) { mapped . put ( entry . getId ( ) , entry ) ; } return mapped ; |
public class FileExecutor { /** * 扫描文件夹下面所有文件
* @ param folder 文件夹
* @ return 文件路径列表 */
public static String [ ] scanFolderAsArray ( String folder ) { } } | List < String > list = scanFolder ( folder ) ; String [ ] arrays = new String [ list . size ( ) ] ; int i = 0 ; for ( String item : list ) { arrays [ i ++ ] = item ; } return arrays ; |
public class TrialMeter { /** * Calculates the test values for all parameters . The length of the
* resulting { @ code double [ ] } array must be { @ link # dataSize ( ) } .
* @ param function the test function */
public void sample ( final Function < T , double [ ] > function ) { } } | _params . values ( ) . subSeq ( _dataSet . nextParamIndex ( ) ) . forEach ( p -> _dataSet . add ( function . apply ( p ) ) ) ; |
public class CompletableFuture { /** * If not already completed , causes invocations of { @ link # get ( ) } and related methods to throw the
* given exception .
* @ param ex the exception
* @ return { @ code true } if this invocation caused this CompletableFuture to transition to a
* completed state , else { @ code false } */
public boolean completeExceptionally ( Throwable ex ) { } } | boolean triggered = internalComplete ( new AltResult ( Objects . requireNonNull ( ex ) ) ) ; postComplete ( ) ; return triggered ; |
public class A_CmsClipboardTab { /** * Replaces the item with the same id if present . < p >
* @ param item the new item */
public void replaceItem ( CmsListItem item ) { } } | CmsListItem oldItem = getList ( ) . getItem ( item . getId ( ) ) ; if ( oldItem != null ) { int index = getList ( ) . getWidgetIndex ( oldItem ) ; getList ( ) . removeItem ( oldItem ) ; if ( index >= getList ( ) . getWidgetCount ( ) ) { getList ( ) . addItem ( item ) ; } else { getList ( ) . insertItem ( item , index ) ; } } |
public class PrettyTime { /** * Format the given { @ link Calendar } object . If the given { @ link Calendar } is < code > null < / code > , the current value of
* { @ link System # currentTimeMillis ( ) } will be used instead .
* @ param then the { @ link Calendar } whose date is to be formatted
* @ return A formatted string representing { @ code then } */
public String format ( Calendar then ) { } } | if ( then == null ) return format ( now ( ) ) ; return format ( then . getTime ( ) ) ; |
public class ErrorMessage { /** * < p > parseDescription . < / p >
* @ param status a int .
* @ return a { @ link java . lang . String } object . */
public static String parseDescription ( int status ) { } } | String desc = null ; if ( status < 500 ) { if ( status == 402 || ( status > 417 && status < 421 ) || status > 424 ) { desc = ErrorMessage . getLocaleDescription ( 400 ) ; } else { desc = ErrorMessage . getLocaleDescription ( status ) ; } } else { switch ( status ) { case 501 : desc = ErrorMessage . getLocaleDescription ( status ) ; break ; } } if ( desc == null ) { desc = ErrorMessage . getLocaleDescription ( ) ; } return desc ; |
public class Annotate { /** * Enqueue tree for scanning of type annotations , attaching to the Symbol sym . */
public void queueScanTreeAndTypeAnnotate ( JCTree tree , Env < AttrContext > env , Symbol sym , DiagnosticPosition deferPos ) { } } | Assert . checkNonNull ( sym ) ; normal ( ( ) -> tree . accept ( new TypeAnnotate ( env , sym , deferPos ) ) ) ; |
public class RecorderFileSink { /** * Writes samples to file following WAVE format .
* @ param file Recording where to write the header
* @ throws IOException */
private static void writeHeader ( Path file ) throws IOException { } } | try ( FileChannel fout = FileChannel . open ( file , StandardOpenOption . WRITE ) ) { long size = fout . size ( ) ; int sampleSize = ( int ) size - 44 ; if ( logger . isInfoEnabled ( ) ) { logger . info ( "Size " + sampleSize + " of recording file " + file ) ; } ByteBuffer headerBuffer = ByteBuffer . allocateDirect ( 44 ) ; headerBuffer . clear ( ) ; // RIFF
headerBuffer . put ( ( byte ) 0x52 ) ; headerBuffer . put ( ( byte ) 0x49 ) ; headerBuffer . put ( ( byte ) 0x46 ) ; headerBuffer . put ( ( byte ) 0x46 ) ; int length = sampleSize + 36 ; // Length
headerBuffer . put ( ( byte ) ( length ) ) ; headerBuffer . put ( ( byte ) ( length >> 8 ) ) ; headerBuffer . put ( ( byte ) ( length >> 16 ) ) ; headerBuffer . put ( ( byte ) ( length >> 24 ) ) ; // WAVE
headerBuffer . put ( ( byte ) 0x57 ) ; headerBuffer . put ( ( byte ) 0x41 ) ; headerBuffer . put ( ( byte ) 0x56 ) ; headerBuffer . put ( ( byte ) 0x45 ) ; // fmt
headerBuffer . put ( ( byte ) 0x66 ) ; headerBuffer . put ( ( byte ) 0x6d ) ; headerBuffer . put ( ( byte ) 0x74 ) ; headerBuffer . put ( ( byte ) 0x20 ) ; headerBuffer . put ( ( byte ) 0x10 ) ; headerBuffer . put ( ( byte ) 0x00 ) ; headerBuffer . put ( ( byte ) 0x00 ) ; headerBuffer . put ( ( byte ) 0x00 ) ; // format - PCM
headerBuffer . put ( ( byte ) 0x01 ) ; headerBuffer . put ( ( byte ) 0x00 ) ; // format - MONO
headerBuffer . put ( ( byte ) 0x01 ) ; headerBuffer . put ( ( byte ) 0x00 ) ; // sample rate : 8000
headerBuffer . put ( ( byte ) 0x40 ) ; headerBuffer . put ( ( byte ) 0x1F ) ; headerBuffer . put ( ( byte ) 0x00 ) ; headerBuffer . put ( ( byte ) 0x00 ) ; // byte rate
headerBuffer . put ( ( byte ) 0x80 ) ; headerBuffer . put ( ( byte ) 0x3E ) ; headerBuffer . put ( ( byte ) 0x00 ) ; headerBuffer . put ( ( byte ) 0x00 ) ; // Block align
headerBuffer . put ( ( byte ) 0x02 ) ; headerBuffer . put ( ( byte ) 0x00 ) ; // Bits per sample : 16
headerBuffer . put ( ( byte ) 0x10 ) ; headerBuffer . put ( ( byte ) 0x00 ) ; // " data "
headerBuffer . put ( ( byte ) 0x64 ) ; headerBuffer . put ( ( byte ) 0x61 ) ; headerBuffer . put ( ( byte ) 0x74 ) ; headerBuffer . put ( ( byte ) 0x61 ) ; // len
headerBuffer . put ( ( byte ) ( sampleSize ) ) ; headerBuffer . put ( ( byte ) ( sampleSize >> 8 ) ) ; headerBuffer . put ( ( byte ) ( sampleSize >> 16 ) ) ; headerBuffer . put ( ( byte ) ( sampleSize >> 24 ) ) ; headerBuffer . rewind ( ) ; // lets write header
fout . position ( 0 ) ; fout . write ( headerBuffer ) ; } |
public class ContainerAliasResolver { /** * Looks up container id for given alias that is associated with process instance
* @ param alias container alias
* @ param processInstanceId unique process instance id
* @ return
* @ throws IllegalArgumentException in case there are no containers for given alias */
public String forProcessInstance ( String alias , long processInstanceId ) { } } | return registry . getContainerId ( alias , new ByProcessInstanceIdContainerLocator ( processInstanceId ) ) ; |
public class Char { /** * Eats a String of the form " % xx " from a string , where
* xx is a hexadecimal code . If xx is a UTF8 code start ,
* tries to complete the UTF8 - code and decodes it . */
public static char eatPercentage ( String a , int [ ] n ) { } } | // Length 0
if ( ! a . startsWith ( "%" ) || a . length ( ) < 3 ) { n [ 0 ] = 0 ; return ( ( char ) 0 ) ; } char c ; // Try to parse first char
try { c = ( char ) Integer . parseInt ( a . substring ( 1 , 3 ) , 16 ) ; } catch ( Exception e ) { n [ 0 ] = - 1 ; return ( ( char ) 0 ) ; } // For non - UTF8 , return the char
int len = Utf8Length ( c ) ; n [ 0 ] = 3 ; if ( len <= 1 ) return ( c ) ; // Else collect the UTF8
String dec = "" + c ; for ( int i = 1 ; i < len ; i ++ ) { try { dec += ( char ) Integer . parseInt ( a . substring ( 1 + i * 3 , 3 + i * 3 ) , 16 ) ; } catch ( Exception e ) { return ( c ) ; } } // Try to decode the UTF8
int [ ] eatLength = new int [ 1 ] ; char utf8 = eatUtf8 ( dec , eatLength ) ; if ( eatLength [ 0 ] != len ) return ( c ) ; n [ 0 ] = len * 3 ; return ( utf8 ) ; |
public class DrlConstraintParser { /** * Parses the Java code contained in a resource and returns a
* { @ link Expression } that represents it . < br >
* @ param path path to a resource containing Java source code . As resource is accessed through a class loader , a
* leading " / " is not allowed in pathToResource
* @ param encoding encoding of the source code
* @ return Expression representing the Java source code
* @ throws ParseProblemException if the source code has parser errors
* @ throws IOException the path could not be accessed */
public static Expression parseResource ( final String path , Charset encoding ) throws IOException { } } | return simplifiedParse ( EXPRESSION , resourceProvider ( path , encoding ) ) ; |
public class SvdlibcSparseBinaryMatrixBuilder { /** * { @ inheritDoc } */
public synchronized int addColumn ( double [ ] column ) { } } | if ( isFinished ) throw new IllegalStateException ( "Cannot add columns to a MatrixBuilder that is finished" ) ; // Update the size of the matrix based on the size of the array
if ( column . length > numRows ) numRows = column . length ; // Identify how many non - zero values are present in the column
int nonZero = 0 ; for ( int i = 0 ; i < column . length ; ++ i ) { if ( column [ i ] != 0d ) nonZero ++ ; } // Update the total number of non - zero values for the entire matrix
nonZeroValues += nonZero ; // Write the column to file
try { matrixDos . writeInt ( nonZero ) ; for ( int i = 0 ; i < column . length ; ++ i ) { if ( column [ i ] != 0d ) { matrixDos . writeInt ( i ) ; // write the row index
matrixDos . writeFloat ( ( float ) column [ i ] ) ; } } } catch ( IOException ioe ) { throw new IOError ( ioe ) ; } return ++ curCol ; |
public class ArgumentSimplifiedTokenBIOAnnotator { /** * Selects the main argument component from a list of components that are present in the
* sentence ; currently the longest
* @ param argumentComponents list of argument components
* @ return argument component */
protected ArgumentComponent selectMainArgumentComponent ( List < ArgumentComponent > argumentComponents ) { } } | ArgumentComponent result = null ; int maxLength = Integer . MIN_VALUE ; for ( ArgumentComponent argumentComponent : argumentComponents ) { int length = argumentComponent . getEnd ( ) - argumentComponent . getBegin ( ) ; if ( length > maxLength ) { maxLength = length ; result = argumentComponent ; } } if ( result == null ) { throw new IllegalStateException ( "Couldn't find maximum arg. component" ) ; } return result ; |
public class BatchTransactionServiceImpl { /** * ( non - Javadoc )
* @ see com . ibm . jbatch . container . services . IBatchServiceBase # shutdown ( ) */
@ Override public void shutdown ( ) throws BatchContainerServiceException { } } | logger . entering ( CLASSNAME , "shutdown" ) ; logger . fine ( "do nothing" ) ; logger . exiting ( CLASSNAME , "shutdown" ) ; |
public class Calc { /** * Perform linear transformation s * X + B , and store the result in b
* @ param s
* Amount to scale x
* @ param x
* Input coordinate
* @ param b
* Vector to translate ( will be modified )
* @ return b , after modification */
public static Atom scaleAdd ( double s , Atom x , Atom b ) { } } | double xc = s * x . getX ( ) + b . getX ( ) ; double yc = s * x . getY ( ) + b . getY ( ) ; double zc = s * x . getZ ( ) + b . getZ ( ) ; // Atom a = new AtomImpl ( ) ;
b . setX ( xc ) ; b . setY ( yc ) ; b . setZ ( zc ) ; return b ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcInterceptorType ( ) { } } | if ( ifcInterceptorTypeEClass == null ) { ifcInterceptorTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 325 ) ; } return ifcInterceptorTypeEClass ; |
public class EventKeyFormat { /** * Combine multiple EventStore event IDs into a single Databus event key . To get the most compact encoded string ,
* sort the event ID list before encoding it . */
static String encode ( List < String > eventIds ) { } } | checkArgument ( ! eventIds . isEmpty ( ) , "Empty event ID list." ) ; if ( eventIds . size ( ) == 1 ) { return checkValid ( eventIds . get ( 0 ) ) ; } // Concatenate the event IDs using a simple scheme that efficiently encodes events that are the same length and
// share a common prefix .
StringBuilder buf = new StringBuilder ( ) ; String prevId = null ; for ( String eventId : eventIds ) { checkValid ( eventId ) ; int commonPrefixLength ; if ( prevId == null ) { // First event ID
buf . append ( eventId ) ; } else if ( prevId . length ( ) == eventId . length ( ) && ( commonPrefixLength = getCommonPrefixLength ( prevId , eventId ) ) > 0 ) { // Event ID is same length and shares a common prefix with the previous . Just add the part that ' s different .
buf . append ( DELIM_SHARED_PREFIX ) . append ( eventId . substring ( commonPrefixLength ) ) ; } else { buf . append ( DELIM_REGULAR ) . append ( eventId ) ; } prevId = eventId ; } return buf . toString ( ) ; |
public class SparkComputationGraph { /** * Score the examples individually , using the default batch size { @ link # DEFAULT _ EVAL _ SCORE _ BATCH _ SIZE } . Unlike { @ link # calculateScore ( JavaRDD , boolean ) } ,
* this method returns a score for each example separately < br >
* Note : The provided JavaPairRDD has a key that is associated with each example and returned score . < br >
* < b > Note : < / b > The DataSet objects passed in must have exactly one example in them ( otherwise : can ' t have a 1:1 association
* between keys and data sets to score )
* @ param data Data to score
* @ param includeRegularizationTerms If true : include the l1 / l2 regularization terms with the score ( if any )
* @ param < K > Key type
* @ return A { @ code JavaPairRDD < K , Double > } containing the scores of each example
* @ see MultiLayerNetwork # scoreExamples ( DataSet , boolean ) */
public < K > JavaPairRDD < K , Double > scoreExamplesMultiDataSet ( JavaPairRDD < K , MultiDataSet > data , boolean includeRegularizationTerms ) { } } | return scoreExamplesMultiDataSet ( data , includeRegularizationTerms , DEFAULT_EVAL_SCORE_BATCH_SIZE ) ; |
public class ReflectiveInterceptor { /** * If clazz is reloadable < b > and < / b > has been reloaded at least once then return the ReloadableType instance for it ,
* otherwise return null .
* @ param clazz the type which may or may not be reloadable
* @ return the reloadable type or null */
private static ReloadableType getReloadableTypeIfHasBeenReloaded ( Class < ? > clazz ) { } } | if ( TypeRegistry . nothingReloaded ) { return null ; } ReloadableType rtype = getRType ( clazz ) ; if ( rtype != null && rtype . hasBeenReloaded ( ) ) { return rtype ; } else { return null ; } |
public class SpearmansRankCorrelation { /** * Computes the correlation between two datasets .
* @ param list1 The first dataset as a list .
* @ param list2 The second dataset as a list .
* @ return The correlation between the two datasets . */
public static double computeCorrelation ( List < Double > list1 , List < Double > list2 ) { } } | double [ ] l1 = new double [ list1 . size ( ) ] ; double [ ] l2 = new double [ list2 . size ( ) ] ; for ( int i = 0 ; i < list1 . size ( ) ; i ++ ) { l1 [ i ] = list1 . get ( i ) ; } for ( int i = 0 ; i < list2 . size ( ) ; i ++ ) { l2 [ i ] = list2 . get ( i ) ; } SpearmansCorrelation sc = new SpearmansCorrelation ( ) ; return sc . correlation ( l1 , l2 ) ; |
public class VdmModel { /** * ( non - Javadoc )
* @ see org . overture . ide . core . ast . IVdmElement # getClassList ( ) */
public synchronized ClassList getClassList ( ) throws NotAllowedException { } } | ClassList classes = new ClassList ( ) ; for ( Object definition : getRootElementList ( ) ) { if ( definition instanceof SClassDefinition ) classes . add ( ( SClassDefinition ) definition ) ; else throw new NotAllowedException ( "Other definition than ClassDefinition is found: " + definition . getClass ( ) . getName ( ) ) ; } return classes ; |
public class FTPClient { /** * Starts local server in active server mode . */
public void setLocalActive ( ) throws ClientException , IOException { } } | if ( session . serverAddress == null ) { throw new ClientException ( ClientException . CALL_PASSIVE_FIRST ) ; } try { localServer . setActive ( session . serverAddress ) ; } catch ( java . net . UnknownHostException e ) { throw new ClientException ( ClientException . UNKNOWN_HOST ) ; } |
public class Transformers { /** * Buffers the elements into continuous , non - overlapping Lists where the
* boundary is determined by a predicate receiving each item , after being
* buffered , and returns true to indicate a new buffer should start .
* The operator won ' t return an empty first or last buffer .
* < dl >
* < dt > < b > Backpressure Support : < / b > < / dt >
* < dd > This operator supports backpressure . < / dd >
* < dt > < b > Scheduler : < / b > < / dt >
* < dd > This operator does not operate by default on a particular
* { @ link Scheduler } . < / dd >
* < / dl >
* @ param < T >
* the input value type
* @ param predicate
* the Func1 that receives each item , after being buffered , and
* should return true to indicate a new buffer has to start .
* @ param capacityHint
* the expected number of items in each buffer
* @ return the new Observable instance
* @ see # bufferWhile ( Func1)
* @ since ( if this graduates from Experimental / Beta to supported , replace
* this parenthetical with the release number ) */
public static final < T > Transformer < T , List < T > > toListUntil ( Func1 < ? super T , Boolean > predicate , int capacityHint ) { } } | return bufferUntil ( predicate , capacityHint ) ; |
public class ConditionNumberFormatter { /** * 記号用の項の中で 、 指定した記号を含むかどうか 。
* @ param symbol
* @ return */
public boolean containsSymbolTerm ( Token . Symbol symbol ) { } } | for ( Term < FormattedNumber > term : terms ) { if ( ! ( term instanceof NumberTerm . SymbolTerm ) ) { continue ; } final NumberTerm . SymbolTerm symbolTerm = ( NumberTerm . SymbolTerm ) term ; if ( symbolTerm . getToken ( ) . equals ( symbol ) ) { return true ; } } return false ; |
public class RenderScopedContext { /** * Destroy the current context since Render Response has completed . */
@ SuppressWarnings ( { } } | "unchecked" , "rawtypes" , "unused" } ) public void afterPhase ( final PhaseEvent event ) { if ( PhaseId . RENDER_RESPONSE . equals ( event . getPhaseId ( ) ) ) { RenderContext contextInstance = getContextInstance ( ) ; if ( contextInstance != null ) { Integer id = contextInstance . getId ( ) ; RenderContext removed = getRenderContextMap ( ) . remove ( id ) ; Map < Contextual < ? > , Object > componentInstanceMap = getComponentInstanceMap ( ) ; Map < Contextual < ? > , CreationalContext < ? > > creationalContextMap = getCreationalContextMap ( ) ; if ( ( componentInstanceMap != null ) && ( creationalContextMap != null ) ) { for ( Entry < Contextual < ? > , Object > componentEntry : componentInstanceMap . entrySet ( ) ) { Contextual contextual = componentEntry . getKey ( ) ; Object instance = componentEntry . getValue ( ) ; CreationalContext creational = creationalContextMap . get ( contextual ) ; contextual . destroy ( instance , creational ) ; } } } } |
public class NodeGroupClient { /** * Creates a NodeGroup resource in the specified project using the data included in the request .
* < p > Sample code :
* < pre > < code >
* try ( NodeGroupClient nodeGroupClient = NodeGroupClient . create ( ) ) {
* Integer initialNodeCount = 0;
* ProjectZoneName zone = ProjectZoneName . of ( " [ PROJECT ] " , " [ ZONE ] " ) ;
* NodeGroup nodeGroupResource = NodeGroup . newBuilder ( ) . build ( ) ;
* Operation response = nodeGroupClient . insertNodeGroup ( initialNodeCount , zone . toString ( ) , nodeGroupResource ) ;
* < / code > < / pre >
* @ param initialNodeCount Initial count of nodes in the node group .
* @ param zone The name of the zone for this request .
* @ param nodeGroupResource A NodeGroup resource . To create a node group , you must first create a
* node templates . To learn more about node groups and sole - tenant nodes , read the Sole - tenant
* nodes documentation . ( = = resource _ for beta . nodeGroups = = ) ( = = resource _ for v1 . nodeGroups
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation insertNodeGroup ( Integer initialNodeCount , String zone , NodeGroup nodeGroupResource ) { } } | InsertNodeGroupHttpRequest request = InsertNodeGroupHttpRequest . newBuilder ( ) . setInitialNodeCount ( initialNodeCount ) . setZone ( zone ) . setNodeGroupResource ( nodeGroupResource ) . build ( ) ; return insertNodeGroup ( request ) ; |
public class MediaFormatResolver { /** * Add on - the - fly generated media formats if required for responsive image handling
* via image sizes or picture sources .
* @ param mediaArgs Media args
* @ return true if resolution was successful */
private boolean addResponsiveImageMediaFormats ( MediaArgs mediaArgs ) { } } | Map < String , MediaFormat > additionalMediaFormats = new LinkedHashMap < > ( ) ; // check if additional on - the - fly generated media formats needs to be added for responsive image handling
if ( ! resolveForImageSizes ( mediaArgs , additionalMediaFormats ) ) { return false ; } if ( ! resolveForResponsivePictureSources ( mediaArgs , additionalMediaFormats ) ) { return false ; } // if additional media formats where found add them to the media format list in media args
if ( ! additionalMediaFormats . isEmpty ( ) ) { List < MediaFormat > allMediaFormats = new ArrayList < > ( ) ; if ( mediaArgs . getMediaFormats ( ) != null ) { allMediaFormats . addAll ( Arrays . asList ( mediaArgs . getMediaFormats ( ) ) ) ; } allMediaFormats . addAll ( additionalMediaFormats . values ( ) ) ; mediaArgs . mediaFormats ( allMediaFormats . toArray ( new MediaFormat [ allMediaFormats . size ( ) ] ) ) ; } return true ; |
public class JMS390FloatSupport { /** * To convert from S390 to IEEE we use the fomula :
* m . 16 ^ x = m . 2 ^ 4x , and then normalise by shifting the mantissa up to three
* places left */
protected static final float intS390BitsToFloat ( int floatBits ) throws IOException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "intS390BitsToFloat" , floatBits ) ; // Test the sign bit ( 0 = positive , 1 = negative )
boolean positive = ( ( floatBits & FLOAT_SIGN_MASK ) == 0 ) ; // Deal with zero straight away . . .
if ( ( floatBits & 0x7fffffff ) == 0 ) { // + or - 0.0
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "zero" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "intS390BitsToFloat" ) ; if ( positive ) { return 0.0F ; } else { return - ( 0.0F ) ; } } int mantissa = floatBits & S390_FLOAT_MANTISSA_MASK ; int exponent = floatBits & S390_FLOAT_EXPONENT_MASK ; // move the exponent into the LSB
exponent = exponent >> 24 ; // subtract the bias
exponent = exponent - S390_FLOAT_BIAS ; // caculate the IEEE exponent
int ieeeExponent = exponent * 4 ; // Normalise the mantissa
int ieeeMantissa = mantissa ; // Deal with exponents < = - FLOAT _ BIAS
if ( ieeeExponent <= - ( FLOAT_BIAS ) ) { // ieeeMantissa is one place to the right since there is no implicit bit set
ieeeMantissa = ieeeMantissa >> 1 ; // now increase the exponent until it reaches - FLOAT _ BIAS , shifting right one
// place at each stage to compensate
while ( ieeeExponent < - ( FLOAT_BIAS ) ) { ieeeExponent = ieeeExponent + 1 ; ieeeMantissa = ieeeMantissa >> 1 ; } } // Deal with exponents greater than - FLOAT _ BIAS
while ( ( ieeeMantissa != 0 ) && ( ( ieeeMantissa & FLOAT_MANTISSA_MSB_MASK ) == 0 ) && ( ieeeExponent > - ( FLOAT_BIAS ) ) ) { ieeeMantissa = ieeeMantissa << 1 ; ieeeExponent = ieeeExponent - 1 ; } // s390 has a wider range than IEEE , so deal with over and underflows
if ( ieeeExponent < - 149 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "underflow, returning zero" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "intS390BitsToFloat" , 0.0F ) ; return 0.0F ; // underflow
} else if ( ieeeExponent > 128 ) { if ( positive ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "overflow, returning +INFINITY" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "intS390BitsToFloat" , "+infinity" ) ; return ( Float . MAX_VALUE * 2 ) ; // + infinity
} else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "overflow, returning -INFINITY" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "intS390BitsToFloat" , "-infinity" ) ; return - ( Float . MAX_VALUE * 2 ) ; // - infinity
} } // Build the IEEE float
int ieeeBits = 0 ; if ( ! positive ) { ieeeBits = ieeeBits | FLOAT_SIGN_MASK ; } // add the bias to the exponent
ieeeExponent = ieeeExponent + FLOAT_BIAS ; // move it to the IEEE exponent position
ieeeExponent = ieeeExponent << 23 ; // add to the result
ieeeBits = ieeeBits | ieeeExponent ; // mask the top bit of the mantissa ( implicit in IEEE )
ieeeMantissa = ieeeMantissa & FLOAT_MANTISSA_MASK ; // add to the result
ieeeBits = ieeeBits | ieeeMantissa ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "IEEE Bit pattern = " + Integer . toString ( ieeeBits , 16 ) ) ; float result = Float . intBitsToFloat ( ieeeBits ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "intS390BitsToFloat" , result ) ; return result ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link WindowType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link WindowType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/tunnel/2.0" , name = "Window" , substitutionHeadNamespace = "http://www.opengis.net/citygml/tunnel/2.0" , substitutionHeadName = "_Opening" ) public JAXBElement < WindowType > createWindow ( WindowType value ) { } } | return new JAXBElement < WindowType > ( _Window_QNAME , WindowType . class , null , value ) ; |
public class Stack { /** * Removes the object at the top of this stack and returns that
* object as the value of this function .
* @ return The object at the top of this stack ( the last item
* of the < tt > Vector < / tt > object ) .
* @ throws EmptyStackException if this stack is empty . */
public synchronized E pop ( ) { } } | E obj ; int len = size ( ) ; obj = peek ( ) ; removeElementAt ( len - 1 ) ; return obj ; |
public class CommercePaymentMethodGroupRelLocalServiceBaseImpl { /** * Adds the commerce payment method group rel to the database . Also notifies the appropriate model listeners .
* @ param commercePaymentMethodGroupRel the commerce payment method group rel
* @ return the commerce payment method group rel that was added */
@ Indexable ( type = IndexableType . REINDEX ) @ Override public CommercePaymentMethodGroupRel addCommercePaymentMethodGroupRel ( CommercePaymentMethodGroupRel commercePaymentMethodGroupRel ) { } } | commercePaymentMethodGroupRel . setNew ( true ) ; return commercePaymentMethodGroupRelPersistence . update ( commercePaymentMethodGroupRel ) ; |
public class PrecisionScaleRoundedOperator { /** * Creates the rounded Operator from scale and roundingMode
* @ param mathContext the math context , not null .
* @ return the { @ link MonetaryOperator } using the scale and { @ link RoundingMode } used in parameter
* @ throws NullPointerException when the { @ link MathContext } is null
* @ throws IllegalArgumentException if { @ link MathContext # getPrecision ( ) } is lesser than zero
* @ throws IllegalArgumentException if { @ link MathContext # getRoundingMode ( ) } is { @ link RoundingMode # UNNECESSARY }
* @ see RoundingMode */
public static PrecisionScaleRoundedOperator of ( int scale , MathContext mathContext ) { } } | requireNonNull ( mathContext ) ; if ( RoundingMode . UNNECESSARY . equals ( mathContext . getRoundingMode ( ) ) ) { throw new IllegalArgumentException ( "To create the ScaleRoundedOperator you cannot use the RoundingMode.UNNECESSARY on MathContext" ) ; } if ( mathContext . getPrecision ( ) <= 0 ) { throw new IllegalArgumentException ( "To create the ScaleRoundedOperator you cannot use the zero precision on MathContext" ) ; } return new PrecisionScaleRoundedOperator ( scale , mathContext ) ; |
public class IntermediateCatchEventActivityBehavior { /** * Specific leave method for intermediate events : does a normal leave ( ) , except
* when behind an event based gateway . In that case , the other events are cancelled
* ( we ' re only supporting the exclusive event based gateway type currently ) .
* and the process instance is continued through the triggered event . */
public void leaveIntermediateCatchEvent ( DelegateExecution execution ) { } } | EventGateway eventGateway = getPrecedingEventBasedGateway ( execution ) ; if ( eventGateway != null ) { deleteOtherEventsRelatedToEventBasedGateway ( execution , eventGateway ) ; } leave ( execution ) ; // Normal leave |
public class DurationEvaluators { /** * Evaluate the duration of given action on a given element .
* @ param mo the model to consider
* @ param a the action ' class
* @ param e the element identifier
* @ return a positive number if the evaluation succeeded . A negative number otherwise */
public int evaluate ( Model mo , Class < ? extends Action > a , Element e ) throws SchedulerException { } } | ActionDurationEvaluator < Element > ev = durations . get ( a ) ; if ( ev == null ) { throw new SchedulerModelingException ( null , "Unable to estimate the duration of action '" + a . getSimpleName ( ) + "' related to '" + e + "'" ) ; } int d = ev . evaluate ( mo , e ) ; if ( d <= 0 ) { throw new SchedulerModelingException ( null , "The duration for action " + a . getSimpleName ( ) + " over '" + e + "' has been evaluated to a negative value (" + d + "). Unsupported" ) ; } return d ; |
public class ComputerSet { /** * Really creates a new agent . */
@ RequirePOST public synchronized void doDoCreateItem ( StaplerRequest req , StaplerResponse rsp , @ QueryParameter String name , @ QueryParameter String type ) throws IOException , ServletException , FormException { } } | final Jenkins app = Jenkins . getInstance ( ) ; app . checkPermission ( Computer . CREATE ) ; String fixedName = Util . fixEmptyAndTrim ( name ) ; checkName ( fixedName ) ; JSONObject formData = req . getSubmittedForm ( ) ; formData . put ( "name" , fixedName ) ; // TODO type is probably NodeDescriptor . id but confirm
Node result = NodeDescriptor . all ( ) . find ( type ) . newInstance ( req , formData ) ; app . addNode ( result ) ; // take the user back to the agent list top page
rsp . sendRedirect2 ( "." ) ; |
public class MetadataCache { /** * Finish the process of copying a list of tracks to a metadata cache , once they have been listed . This code
* is shared between the implementations that work with the full track list and with playlists , and invoked
* by the { @ link MetadataFinder } .
* @ param trackListEntries the list of menu items identifying which tracks need to be copied to the metadata
* cache
* @ param playlistId the id of playlist being cached , or 0 of all tracks are being cached
* @ param client the connection to the dbserver on the player whose metadata is being cached
* @ param slot the slot in which the media to be cached can be found
* @ param cache the file into which the metadata cache should be written
* @ param listener will be informed after each track is added to the cache file being created and offered
* the opportunity to cancel the process
* @ throws IOException if there is a problem communicating with the player or writing the cache file .
* @ throws TimeoutException if we are unable to lock the client for menu operations */
static void copyTracksToCache ( List < Message > trackListEntries , int playlistId , Client client , SlotReference slot , File cache , MetadataCacheCreationListener listener ) throws IOException , TimeoutException { } } | FileOutputStream fos = null ; BufferedOutputStream bos = null ; ZipOutputStream zos = null ; WritableByteChannel channel = null ; final Set < Integer > tracksAdded = new HashSet < Integer > ( ) ; final Set < Integer > artworkAdded = new HashSet < Integer > ( ) ; try { fos = new FileOutputStream ( cache ) ; bos = new BufferedOutputStream ( fos ) ; zos = new ZipOutputStream ( bos ) ; zos . setMethod ( ZipOutputStream . DEFLATED ) ; addCacheFormatEntry ( trackListEntries , playlistId , zos ) ; channel = Channels . newChannel ( zos ) ; addCacheDetailsEntry ( slot , zos , channel ) ; // Write the actual metadata entries
final int totalToCopy = trackListEntries . size ( ) ; TrackMetadata lastTrackAdded = null ; int tracksCopied = 0 ; for ( Message entry : trackListEntries ) { if ( entry . getMenuItemType ( ) == Message . MenuItemType . UNKNOWN ) { logger . warn ( "Encountered unrecognized track list entry item type: {}" , entry ) ; } int rekordboxId = ( int ) ( ( NumberField ) entry . arguments . get ( 1 ) ) . getValue ( ) ; if ( ! tracksAdded . contains ( rekordboxId ) ) { // Ignore extra copies of a track present on a playlist .
lastTrackAdded = copyTrackToCache ( client , slot , zos , channel , artworkAdded , rekordboxId ) ; tracksAdded . add ( rekordboxId ) ; } if ( listener != null ) { if ( ! listener . cacheCreationContinuing ( lastTrackAdded , ++ tracksCopied , totalToCopy ) ) { logger . info ( "Track metadata cache creation canceled by listener" ) ; if ( ! cache . delete ( ) ) { logger . warn ( "Unable to delete metadata cache file, {}" , cache ) ; } return ; } } Thread . sleep ( getCachePauseInterval ( ) ) ; } } catch ( InterruptedException e ) { logger . warn ( "Interrupted while building metadata cache file, aborting" , e ) ; if ( ! cache . delete ( ) ) { logger . warn ( "Unable to delete metadata cache file, {}" , cache ) ; } } finally { try { if ( channel != null ) { channel . close ( ) ; } } catch ( Exception e ) { logger . error ( "Problem closing byte channel for writing to metadata cache" , e ) ; } try { if ( zos != null ) { zos . close ( ) ; } } catch ( Exception e ) { logger . error ( "Problem closing Zip Output Stream of metadata cache" , e ) ; } try { if ( bos != null ) { bos . close ( ) ; } } catch ( Exception e ) { logger . error ( "Problem closing Buffered Output Stream of metadata cache" , e ) ; } try { if ( fos != null ) { fos . close ( ) ; } } catch ( Exception e ) { logger . error ( "Problem closing File Output Stream of metadata cache" , e ) ; } } |
public class UvaStdImgTripleGenerator_1 { /** * { @ inheritDoc } */
public Set < Triple > getTriplesForObject ( DOReader reader ) throws ResourceIndexException { } } | try { Triple triple = new SimpleTriple ( new SimpleURIReference ( new URI ( Constants . FEDORA . uri . concat ( reader . GetObjectPID ( ) ) ) ) , new SimpleURIReference ( PREDICATE ) , new SimpleLiteral ( "true" ) ) ; return Collections . singleton ( triple ) ; } catch ( ServerException e ) { throw new ResourceIndexException ( e . getLocalizedMessage ( ) , e ) ; } catch ( URISyntaxException e ) { throw new ResourceIndexException ( e . getLocalizedMessage ( ) , e ) ; } |
public class CommerceWarehouseItemPersistenceImpl { /** * Returns all the commerce warehouse items where CProductId = & # 63 ; and CPInstanceUuid = & # 63 ; .
* @ param CProductId the c product ID
* @ param CPInstanceUuid the cp instance uuid
* @ return the matching commerce warehouse items */
@ Override public List < CommerceWarehouseItem > findByCPI_CPIU ( long CProductId , String CPInstanceUuid ) { } } | return findByCPI_CPIU ( CProductId , CPInstanceUuid , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ; |
public class CompositeIdentifierGenerator { /** * Create a new < code > CompositeIdentifierGenerator < / code > that concatenates
* the results of the provided collection of generators . Order is
* determined by the < code > iterator ( ) < / code > method on the collection .
* @ param generators a collection of string identifier generators to
* concatenate
* @ return the composite identifier generator
* @ throws IllegalArgumentException if the generators collection is null ,
* empty , or contains nulls */
public static StringIdentifierGenerator getInstance ( Collection generators ) { } } | if ( generators == null ) { throw new IllegalArgumentException ( "Generator collection must not be null" ) ; } if ( generators . size ( ) == 0 ) { throw new IllegalArgumentException ( "Generator collection must not be empty" ) ; } StringIdentifierGenerator [ ] generatorsCopy = new StringIdentifierGenerator [ generators . size ( ) ] ; int i = 0 ; Iterator it = generators . iterator ( ) ; while ( it . hasNext ( ) ) { generatorsCopy [ i ] = ( StringIdentifierGenerator ) it . next ( ) ; if ( generatorsCopy [ i ] == null ) { throw new IllegalArgumentException ( "Generators must not be null" ) ; } i ++ ; } return new CompositeIdentifierGenerator ( generatorsCopy ) ; |
public class SOABaseDialog { @ Override protected List < JButton > getButtonsLefthand ( ) throws Exception { } } | List < JButton > lhb = super . getButtonsLefthand ( ) ; lhb . add ( getButtonShowContext ( ) ) ; return lhb ; |
public class Zendesk { /** * Create upload article with inline false */
public ArticleAttachments createUploadArticle ( long articleId , File file ) throws IOException { } } | return createUploadArticle ( articleId , file , false ) ; |
public class DefaultGroovyMethods { /** * Returns an iterator equivalent to this iterator but with all duplicated items
* removed where duplicate ( equal ) items are deduced by calling the supplied Closure condition .
* If the supplied Closure takes a single parameter , the argument passed will be each element ,
* and the closure should return a value used for comparison ( either using
* { @ link java . lang . Comparable # compareTo ( java . lang . Object ) } or { @ link java . lang . Object # equals ( java . lang . Object ) } ) .
* If the closure takes two parameters , two items from the Iterator
* will be passed as arguments , and the closure should return an
* int value ( with 0 indicating the items are not unique ) .
* < pre class = " groovyTestCase " >
* def items = " Hello " . toList ( ) + [ null , null ] + " there " . toList ( )
* def toLower = { it = = null ? null : it . toLowerCase ( ) }
* def noDups = items . iterator ( ) . toUnique ( toLower ) . toList ( )
* assert noDups = = [ ' H ' , ' e ' , ' l ' , ' o ' , null , ' t ' , ' r ' ]
* < / pre >
* < pre class = " groovyTestCase " > assert [ 1,4 ] = = [ 1,3,4,5 ] . toUnique { it % 2 } < / pre >
* < pre class = " groovyTestCase " > assert [ 2,3,4 ] = = [ 2,3,3,4 ] . toUnique { a , b - > a < = > b } < / pre >
* @ param self an Iterator
* @ param condition a Closure used to determine unique items
* @ return an Iterator with no duplicate items
* @ since 2.4.0 */
public static < T > Iterator < T > toUnique ( Iterator < T > self , @ ClosureParams ( value = FromString . class , options = { } } | "T" , "T,T" } ) Closure condition ) { return new UniqueIterator < T > ( self , condition . getMaximumNumberOfParameters ( ) == 1 ? new OrderBy < T > ( condition , true ) : new ClosureComparator < T > ( condition ) ) ; |
public class CommonSteps { /** * Click on html element if all ' expected ' parameters equals ' actual ' parameters in conditions .
* @ param page
* The concerned page of toClick
* @ param toClick
* html element .
* @ param conditions
* list of ' expected ' values condition and ' actual ' values ( { @ link com . github . noraui . gherkin . GherkinStepCondition } ) .
* @ throws TechnicalException
* is throws if you have a technical error ( format , configuration , datas , . . . ) in NoraUi .
* Exception with { @ value com . github . noraui . utils . Messages # FAIL _ MESSAGE _ UNABLE _ TO _ OPEN _ ON _ CLICK } message ( with screenshot , no exception )
* @ throws FailureException
* if the scenario encounters a functional error */
@ Conditioned @ Quand ( "Je clique sur '(.*)-(.*)'[\\.|\\?]" ) @ When ( "I click on '(.*)-(.*)'[\\.|\\?]" ) public void clickOn ( String page , String toClick , List < GherkinStepCondition > conditions ) throws TechnicalException , FailureException { } } | logger . debug ( "{} clickOn: {}" , page , toClick ) ; clickOn ( Page . getInstance ( page ) . getPageElementByKey ( '-' + toClick ) ) ; |
public class PolicyNodeImpl { /** * Returns all nodes at the specified depth in the tree .
* @ param depth an int representing the depth of the desired nodes
* @ return a < code > Set < / code > of all nodes at the specified depth */
Set < PolicyNodeImpl > getPolicyNodes ( int depth ) { } } | Set < PolicyNodeImpl > set = new HashSet < > ( ) ; getPolicyNodes ( depth , set ) ; return set ; |
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public IfcVoidingFeatureTypeEnum createIfcVoidingFeatureTypeEnumFromString ( EDataType eDataType , String initialValue ) { } } | IfcVoidingFeatureTypeEnum result = IfcVoidingFeatureTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ; |
public class JobApi { /** * Download the job artifacts file for the specified job ID . The artifacts file will be saved in the
* specified directory with the following name pattern : job - { jobid } - artifacts . zip . If the file already
* exists in the directory it will be overwritten .
* < pre > < code > GitLab Endpoint : GET / projects / : id / jobs / : job _ id / artifacts < / code > < / pre >
* @ param projectIdOrPath id , path of the project , or a Project instance holding the project ID or path
* @ param jobId the job ID to get the artifacts for
* @ param directory the File instance of the directory to save the file to , if null will use " java . io . tmpdir "
* @ return a File instance pointing to the download of the specified job artifacts file
* @ throws GitLabApiException if any exception occurs */
public File downloadArtifactsFile ( Object projectIdOrPath , Integer jobId , File directory ) throws GitLabApiException { } } | Response response = getWithAccepts ( Response . Status . OK , null , MediaType . MEDIA_TYPE_WILDCARD , "projects" , getProjectIdOrPath ( projectIdOrPath ) , "jobs" , jobId , "artifacts" ) ; try { if ( directory == null ) directory = new File ( System . getProperty ( "java.io.tmpdir" ) ) ; String filename = "job-" + jobId + "-artifacts.zip" ; File file = new File ( directory , filename ) ; InputStream in = response . readEntity ( InputStream . class ) ; Files . copy ( in , file . toPath ( ) , StandardCopyOption . REPLACE_EXISTING ) ; return ( file ) ; } catch ( IOException ioe ) { throw new GitLabApiException ( ioe ) ; } |
public class PlatformDependent0 { /** * Package - private for testing only */
static int majorVersion ( final String javaSpecVersion ) { } } | final String [ ] components = javaSpecVersion . split ( "\\." ) ; final int [ ] version = new int [ components . length ] ; for ( int i = 0 ; i < components . length ; i ++ ) { version [ i ] = Integer . parseInt ( components [ i ] ) ; } if ( version [ 0 ] == 1 ) { assert version [ 1 ] >= 6 ; return version [ 1 ] ; } else { return version [ 0 ] ; } |
public class XmlRecord { /** * Add this field in the Record ' s field sequence . . */
public BaseField setupField ( int iFieldSeq ) { } } | BaseField field = null ; if ( iFieldSeq == DBConstants . MAIN_FIELD ) field = new CounterField ( this , "ID" , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; return field ; |
public class EnhancerHelper { /** * Creates a new dirtyableDBObject Enhancer for the given class . If the class already implements
* { @ link DirtyableDBObject } , then a special " no - op " enhancer will be returned that
* doesn ' t do any special enhancement . Otherwise , a byte - code enhancer is returned .
* @ param baseClass class for which to create an enhancer
* @ return new enhancer */
@ SuppressWarnings ( { } } | "unchecked" } ) public static < T > Enhancer < T > getDirtyableDBObjectEnhancer ( Class < T > baseClass ) { if ( dirtyableDBObjectEnhancers . containsKey ( baseClass ) ) { return ( Enhancer < T > ) dirtyableDBObjectEnhancers . get ( baseClass ) ; } synchronized ( dirtyableDBObjectEnhancers ) { Enhancer < T > enhancer ; if ( dirtyableDBObjectEnhancers . get ( baseClass ) != null ) { enhancer = ( Enhancer < T > ) dirtyableDBObjectEnhancers . get ( baseClass ) ; } else if ( DirtyableDBObject . class . isAssignableFrom ( baseClass ) ) { enhancer = new NoOpEnhancer < T > ( baseClass ) ; dirtyableDBObjectEnhancers . put ( baseClass , enhancer ) ; } else { enhancer = new EntityVelocityEnhancer < T > ( baseClass ) { // Implementation - Enhancer
@ Override public boolean needsEnhancement ( Object object ) { return object != null && ! ( object instanceof DirtyableDBObject ) ; } // Implementation - VelocityEnhancer
@ Override protected String getTemplateLocation ( ) { return "org/iternine/jeppetto/dao/mongodb/enhance/dirtyableDBObject.vm" ; } } ; dirtyableDBObjectEnhancers . put ( baseClass , enhancer ) ; } return enhancer ; } |
public class ZipFileSliceReader { /** * Get a short from a byte array .
* @ param arr
* the byte array
* @ param off
* the offset to start reading from
* @ return the short
* @ throws IndexOutOfBoundsException
* the index out of bounds exception */
static int getShort ( final byte [ ] arr , final long off ) throws IndexOutOfBoundsException { } } | final int ioff = ( int ) off ; if ( ioff < 0 || ioff > arr . length - 2 ) { throw new IndexOutOfBoundsException ( ) ; } return ( ( arr [ ioff + 1 ] & 0xff ) << 8 ) | ( arr [ ioff ] & 0xff ) ; |
public class HourRanges { /** * Determines if this instance if " open " at the given time range . < br >
* < br >
* It is only allowed to call this method if the hour ranges represents only one day . This means a value like ' 18:00-03:00 ' will lead to
* an error . To avoid this , call the { @ link # normalize ( ) } function before this one and pass the result per day as an argument to this
* method .
* @ param range
* Time range to verify .
* @ return { @ literal true } if open else { @ literal false } if not open . */
public final boolean openAt ( @ NotNull final HourRange range ) { } } | Contract . requireArgNotNull ( "range" , range ) ; ensureSingleDayOnly ( "this" , this ) ; final BitSet original = range . toMinutes ( ) ; final BitSet anded = range . toMinutes ( ) ; anded . and ( this . toMinutes ( ) ) ; return anded . equals ( original ) ; |
public class GetLifecyclePolicyPreviewRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetLifecyclePolicyPreviewRequest getLifecyclePolicyPreviewRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getLifecyclePolicyPreviewRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getLifecyclePolicyPreviewRequest . getRegistryId ( ) , REGISTRYID_BINDING ) ; protocolMarshaller . marshall ( getLifecyclePolicyPreviewRequest . getRepositoryName ( ) , REPOSITORYNAME_BINDING ) ; protocolMarshaller . marshall ( getLifecyclePolicyPreviewRequest . getImageIds ( ) , IMAGEIDS_BINDING ) ; protocolMarshaller . marshall ( getLifecyclePolicyPreviewRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( getLifecyclePolicyPreviewRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( getLifecyclePolicyPreviewRequest . getFilter ( ) , FILTER_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ResourcesInner { /** * Deletes a resource .
* @ param resourceGroupName The name of the resource group that contains the resource to delete . The name is case insensitive .
* @ param resourceProviderNamespace The namespace of the resource provider .
* @ param parentResourcePath The parent resource identity .
* @ param resourceType The resource type .
* @ param resourceName The name of the resource to delete .
* @ param apiVersion The API version to use for the operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void beginDelete ( String resourceGroupName , String resourceProviderNamespace , String parentResourcePath , String resourceType , String resourceName , String apiVersion ) { } } | beginDeleteWithServiceResponseAsync ( resourceGroupName , resourceProviderNamespace , parentResourcePath , resourceType , resourceName , apiVersion ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class AbstractConcurrentSet { /** * Inserts a new element at the head of the set .
* Note : This method is expected to be synchronized by the calling code */
private boolean insert ( T element ) { } } | if ( ! entries . containsKey ( element ) ) { head = createEntry ( element , head ) ; entries . put ( element , head ) ; return true ; } return false ; |
public class MPPTimephasedBaselineCostNormaliser { /** * This method merges together assignment data for the same day .
* @ param list assignment data */
private void mergeSameDay ( LinkedList < TimephasedCost > list ) { } } | LinkedList < TimephasedCost > result = new LinkedList < TimephasedCost > ( ) ; TimephasedCost previousAssignment = null ; for ( TimephasedCost assignment : list ) { if ( previousAssignment == null ) { assignment . setAmountPerDay ( assignment . getTotalAmount ( ) ) ; result . add ( assignment ) ; } else { Date previousAssignmentStart = previousAssignment . getStart ( ) ; Date previousAssignmentStartDay = DateHelper . getDayStartDate ( previousAssignmentStart ) ; Date assignmentStart = assignment . getStart ( ) ; Date assignmentStartDay = DateHelper . getDayStartDate ( assignmentStart ) ; if ( previousAssignmentStartDay . getTime ( ) == assignmentStartDay . getTime ( ) ) { result . removeLast ( ) ; double cost = previousAssignment . getTotalAmount ( ) . doubleValue ( ) ; cost += assignment . getTotalAmount ( ) . doubleValue ( ) ; TimephasedCost merged = new TimephasedCost ( ) ; merged . setStart ( previousAssignment . getStart ( ) ) ; merged . setFinish ( assignment . getFinish ( ) ) ; merged . setTotalAmount ( Double . valueOf ( cost ) ) ; assignment = merged ; } assignment . setAmountPerDay ( assignment . getTotalAmount ( ) ) ; result . add ( assignment ) ; } previousAssignment = assignment ; } list . clear ( ) ; list . addAll ( result ) ; |
public class MapEntryLite { /** * Compute serialized size .
* @ param < K > the key type
* @ param < V > the value type
* @ param metadata the metadata
* @ param key the key
* @ param value the value
* @ return the int */
static < K , V > int computeSerializedSize ( Metadata < K , V > metadata , K key , V value ) { } } | return CodedConstant . computeElementSize ( metadata . keyType , KEY_FIELD_NUMBER , key ) + CodedConstant . computeElementSize ( metadata . valueType , VALUE_FIELD_NUMBER , value ) ; |
public class A_CmsEditSearchIndexDialog { /** * Creates a " dummy " search index that is not linked to the search manager and has
* a < code > null < / code > name property that will be used for being filled with
* the widget bean technology . < p >
* @ return a " dummy " search index that is not linked to the search manager and has
* a < code > null < / code > name property that will be used for being filled with
* the widget bean technology */
private CmsSearchIndex createDummySearchIndex ( ) { } } | CmsSearchIndex result = new CmsSearchIndex ( ) ; result . setLocale ( Locale . ENGLISH ) ; result . setProject ( "Online" ) ; result . setRebuildMode ( "auto" ) ; // find default source
Map < String , CmsSearchIndexSource > sources = m_searchManager . getSearchIndexSources ( ) ; if ( sources . isEmpty ( ) ) { CmsSearchIndexSource source = createDummyIndexSource ( ) ; sources . put ( source . getName ( ) , source ) ; } result . addSourceName ( sources . keySet ( ) . iterator ( ) . next ( ) ) ; return result ; |
public class CommerceCountryModelImpl { /** * Converts the soap model instances into normal model instances .
* @ param soapModels the soap model instances to convert
* @ return the normal model instances */
public static List < CommerceCountry > toModels ( CommerceCountrySoap [ ] soapModels ) { } } | if ( soapModels == null ) { return null ; } List < CommerceCountry > models = new ArrayList < CommerceCountry > ( soapModels . length ) ; for ( CommerceCountrySoap soapModel : soapModels ) { models . add ( toModel ( soapModel ) ) ; } return models ; |
public class CBADao { /** * PERF : Compute the CBA directly in the database ( faster than re - constructing all invoices ) */
public BigDecimal getAccountCBAFromTransaction ( final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory , final InternalTenantContext context ) { } } | final InvoiceItemSqlDao invoiceItemSqlDao = entitySqlDaoWrapperFactory . become ( InvoiceItemSqlDao . class ) ; return invoiceItemSqlDao . getAccountCBA ( context ) ; |
public class RobotiumUtils { /** * Orders Views by their location on - screen .
* @ param views The views to sort
* @ param yAxisFirst Whether the y - axis should be compared before the x - axis
* @ see ViewLocationComparator */
public static void sortViewsByLocationOnScreen ( List < ? extends View > views , boolean yAxisFirst ) { } } | Collections . sort ( views , new ViewLocationComparator ( yAxisFirst ) ) ; |
public class ThymeleafTemplateCollector { /** * Stops the collector . This methods clear all registered { @ link org . wisdom . api . templates . Template } services . */
@ Invalidate public void stop ( ) { } } | for ( ServiceRegistration < Template > reg : registrations . values ( ) ) { try { reg . unregister ( ) ; } catch ( Exception e ) { // NOSONAR
// Ignore it .
} } registrations . clear ( ) ; |
public class W3CSchemaFactory { /** * Non - public methods */
@ Override protected XMLValidationSchema loadSchema ( InputSource src , Object sysRef ) throws XMLStreamException { } } | /* 26 - Oct - 2007 , TSa : Are sax parser factories safe to share ?
* If not , should just create new instances for each
* parsed schema . */
SAXParserFactory saxFactory = getSaxFactory ( ) ; MyGrammarController ctrl = new MyGrammarController ( ) ; XMLSchemaGrammar grammar = XMLSchemaReader . parse ( src , saxFactory , ctrl ) ; if ( grammar == null ) { String msg = "Failed to load W3C Schema from '" + sysRef + "'" ; String emsg = ctrl . mErrorMsg ; if ( emsg != null ) { msg = msg + ": " + emsg ; } throw new XMLStreamException ( msg ) ; } return new W3CSchema ( grammar ) ; |
import java . util . List ; class CalculateSumInRange { /** * A Java function to calculate the sum of elements in list for a specific index range .
* Examples :
* calculateSumInRange ( Arrays . asList ( 2 , 1 , 5 , 6 , 8 , 3 , 4 , 9 , 10 , 11 , 8 , 12 ) , 8 , 10 ) - > 29
* calculateSumInRange ( Arrays . asList ( 1 , 2 , 3 , 4 , 5 ) , 1 , 2 ) - > 5
* calculateSumInRange ( Arrays . asList ( 1 , 0 , 1 , 2 , 5 , 6 ) , 4 , 5 ) - > 11
* Args :
* numbers ( List & lt ; Integer & gt ; ) : A list of integers
* start ( int ) : Starting index for the range
* end ( int ) : Ending index for the range
* Returns :
* Total sum of the elements in the specified range */
public static int calculateSumInRange ( List < Integer > numbers , int start , int end ) { } } | int total = 0 ; for ( int index = start ; index <= end ; index ++ ) { total += numbers . get ( index ) ; } return total ; |
public class PaginationToken { /** * Generate a PageMetadata object for the page represented by the specified pagination token .
* @ param paginationToken opaque pagination token
* @ param initialParameters the initial view query parameters ( i . e . for the page 1 request ) .
* @ param < K > the view key type
* @ param < V > the view value type
* @ return PageMetadata object for the given page */
static < K , V > PageMetadata < K , V > mergeTokenAndQueryParameters ( String paginationToken , final ViewQueryParameters < K , V > initialParameters ) { } } | // Decode the base64 token into JSON
String json = new String ( Base64 . decodeBase64 ( paginationToken ) , Charset . forName ( "UTF-8" ) ) ; // Get a suitable Gson , we need any adapter registered for the K key type
Gson paginationTokenGson = getGsonWithKeyAdapter ( initialParameters ) ; // Deserialize the pagination token JSON , using the appropriate K , V types
PaginationToken token = paginationTokenGson . fromJson ( json , PaginationToken . class ) ; // Create new query parameters using the initial ViewQueryParameters as a starting point .
ViewQueryParameters < K , V > tokenPageParameters = initialParameters . copy ( ) ; // Merge the values from the token into the new query parameters
tokenPageParameters . descending = token . descending ; tokenPageParameters . endkey = token . endkey ; tokenPageParameters . endkey_docid = token . endkey_docid ; tokenPageParameters . inclusive_end = token . inclusive_end ; tokenPageParameters . startkey = token . startkey ; tokenPageParameters . startkey_docid = token . startkey_docid ; return new PageMetadata < K , V > ( token . direction , token . pageNumber , tokenPageParameters ) ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EEnum getIfcNullStyleEnum ( ) { } } | if ( ifcNullStyleEnumEEnum == null ) { ifcNullStyleEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 860 ) ; } return ifcNullStyleEnumEEnum ; |
public class LObjIntFltPredicateBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */
@ Nonnull public static < T > LObjIntFltPredicateBuilder < T > objIntFltPredicate ( Consumer < LObjIntFltPredicate < T > > consumer ) { } } | return new LObjIntFltPredicateBuilder ( consumer ) ; |
public class _SharedRendererUtils { /** * This method is different in the two versions of _ SharedRendererUtils . */
private static void log ( FacesContext context , String msg , Exception e ) { } } | context . getExternalContext ( ) . log ( msg , e ) ; |
public class StreamAPI { /** * Returns the stream for the app . Is identical to the global stream , but
* only returns objects in the app .
* @ param limit
* How many objects should be returned , defaults to 10
* @ param offset
* How far should the objects be offset , defaults to 0
* @ param dateFrom
* The date and time that all events should be after , defaults to
* no limit
* @ param dateTo
* The date and time that all events should be before , defaults
* to no limit
* @ return The list of stream objects */
public List < StreamObjectV2 > getAppStream ( int appId , Integer limit , Integer offset ) { } } | return getStreamV2 ( "/stream/app/" + appId + "/" , limit , offset , null , null ) ; |
public class Matrix { /** * Set a submatrix .
* @ param r Array of row indices .
* @ param c Array of column indices .
* @ param X A ( r ( : ) , c ( : ) )
* @ throws ArrayIndexOutOfBoundsException Submatrix indices */
public void setMatrix ( int [ ] r , int [ ] c , Matrix X ) { } } | try { for ( int i = 0 ; i < r . length ; i ++ ) { for ( int j = 0 ; j < c . length ; j ++ ) { A [ r [ i ] ] [ c [ j ] ] = X . get ( i , j ) ; } } } catch ( ArrayIndexOutOfBoundsException e ) { throw new ArrayIndexOutOfBoundsException ( "Submatrix indices" ) ; } |
public class MetadataContext { /** * Invokes { @ link # getSchemas ( java . lang . String , java . lang . String ) } on given { @ code context } with given { @ code
* catalog } .
* @ param context the context
* @ param catalog the value for the first parameter of { @ link # getSchemas ( java . lang . String , java . lang . String ) } .
* @ param nonempty a flag for non empty
* @ return a list of schemas
* @ throws SQLException if a database error occurs . */
public static List < Schema > getSchemas ( @ NonNull final MetadataContext context , final String catalog , final boolean nonempty ) throws SQLException { } } | final List < Schema > schemas = context . getSchemas ( catalog , null ) ; if ( schemas . isEmpty ( ) && nonempty ) { final Schema schema = new Schema ( ) ; schema . virtual = true ; schema . setTableCatalog ( catalog ) ; schema . setTableSchem ( "" ) ; if ( ! context . isSuppressionPath ( "schema/functions" ) ) { schema . getFunctions ( ) . addAll ( context . getFunctions ( schema . getTableCatalog ( ) , schema . getTableSchem ( ) , null ) ) ; } if ( ! context . isSuppressionPath ( "schema/procedures" ) ) { schema . getProcedures ( ) . addAll ( context . getProcedures ( schema . getTableCatalog ( ) , schema . getTableSchem ( ) , null ) ) ; } if ( ! context . isSuppressionPath ( "schema/tables" ) ) { schema . getTables ( ) . addAll ( context . getTables ( schema . getTableCatalog ( ) , schema . getTableSchem ( ) , null , null ) ) ; } if ( ! context . isSuppressionPath ( "schema/UDTs" ) ) { schema . getUDTs ( ) . addAll ( context . getUDTs ( schema . getTableCatalog ( ) , schema . getTableSchem ( ) , null , null ) ) ; } schemas . add ( schema ) ; } return schemas ; |
public class ApiUtilDAODefaultImpl { public Database get_db_obj ( final String host , final String port ) throws DevFailed { } } | if ( ApiUtil . getOrb ( ) == null ) { create_orb ( ) ; } // If first time , create vector
if ( db_list == null ) { db_list = new ArrayList < Database > ( ) ; } // Build tango _ host string
final String tango_host = host + ":" + port ; // Search if database object already created for this host and port
if ( defaultDatabase != null ) { if ( defaultDatabase . get_tango_host ( ) . equals ( tango_host ) ) { return defaultDatabase ; } } for ( final Database dbase : db_list ) { if ( dbase . get_tango_host ( ) . equals ( tango_host ) ) { return dbase ; } } // Else , create a new database object
final Database dbase = new Database ( host , port ) ; db_list . add ( dbase ) ; return dbase ; |
public class ModelsImpl { /** * Deletes a regex entity model from the application .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param regexEntityId The regex entity extractor ID .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorResponseException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the OperationStatus object if successful . */
public OperationStatus deleteRegexEntityModel ( UUID appId , String versionId , UUID regexEntityId ) { } } | return deleteRegexEntityModelWithServiceResponseAsync ( appId , versionId , regexEntityId ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class BlockingStateOrdering { /** * Extract prev and next events in the stream events for that particular target subscription from the insertionEvent */
private SubscriptionEvent [ ] findPrevNext ( final List < SubscriptionEvent > events , final UUID targetEntitlementId , final SubscriptionEvent insertionEvent ) { } } | // Find prev / next event for the same entitlement
final SubscriptionEvent [ ] result = new DefaultSubscriptionEvent [ 2 ] ; if ( insertionEvent == null ) { result [ 0 ] = null ; result [ 1 ] = ! events . isEmpty ( ) ? events . get ( 0 ) : null ; return result ; } final Iterator < SubscriptionEvent > it = events . iterator ( ) ; DefaultSubscriptionEvent prev = null ; DefaultSubscriptionEvent next = null ; boolean foundCur = false ; while ( it . hasNext ( ) ) { final DefaultSubscriptionEvent tmp = ( DefaultSubscriptionEvent ) it . next ( ) ; if ( tmp . getEntitlementId ( ) . equals ( targetEntitlementId ) ) { if ( ! foundCur ) { prev = tmp ; } else { next = tmp ; break ; } } // Check both the id and the event type because of multiplexing
if ( tmp . getId ( ) . equals ( insertionEvent . getId ( ) ) && tmp . getSubscriptionEventType ( ) . equals ( insertionEvent . getSubscriptionEventType ( ) ) ) { foundCur = true ; } } result [ 0 ] = prev ; result [ 1 ] = next ; return result ; |
public class NodeTypeClient { /** * Retrieves an aggregated list of node types .
* < p > Sample code :
* < pre > < code >
* try ( NodeTypeClient nodeTypeClient = NodeTypeClient . create ( ) ) {
* ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ;
* for ( NodeTypesScopedList element : nodeTypeClient . aggregatedListNodeTypes ( project ) . iterateAll ( ) ) {
* / / doThingsWith ( element ) ;
* < / code > < / pre >
* @ param project Project ID for this request .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final AggregatedListNodeTypesPagedResponse aggregatedListNodeTypes ( ProjectName project ) { } } | AggregatedListNodeTypesHttpRequest request = AggregatedListNodeTypesHttpRequest . newBuilder ( ) . setProject ( project == null ? null : project . toString ( ) ) . build ( ) ; return aggregatedListNodeTypes ( request ) ; |
public class JavaTokenizer { /** * Build a map for translating between line numbers and
* positions in the input .
* @ return a LineMap */
public Position . LineMap getLineMap ( ) { } } | return Position . makeLineMap ( reader . getRawCharacters ( ) , reader . buflen , false ) ; |
public class CommonOps_DDF3 { /** * Transposes matrix ' a ' and stores the results in ' b ' : < br >
* < br >
* b < sub > ij < / sub > = a < sub > ji < / sub > < br >
* where ' b ' is the transpose of ' a ' .
* @ param input The original matrix . Not modified .
* @ param output Where the transpose is stored . If null a new matrix is created . Modified .
* @ return The transposed matrix . */
public static DMatrix3x3 transpose ( DMatrix3x3 input , DMatrix3x3 output ) { } } | if ( input == null ) input = new DMatrix3x3 ( ) ; output . a11 = input . a11 ; output . a12 = input . a21 ; output . a13 = input . a31 ; output . a21 = input . a12 ; output . a22 = input . a22 ; output . a23 = input . a32 ; output . a31 = input . a13 ; output . a32 = input . a23 ; output . a33 = input . a33 ; return output ; |
public class UcumEssenceService { /** * given a unit , return a formal description of what the units stand for using
* full names
* @ param units the unit code
* @ return formal description
* @ throws UcumException */
@ Override public String analyse ( String unit ) throws UcumException { } } | if ( Utilities . noString ( unit ) ) return "(unity)" ; assert checkStringParam ( unit ) : paramError ( "analyse" , "unit" , "must not be null or empty" ) ; Term term = new ExpressionParser ( model ) . parse ( unit ) ; return new FormalStructureComposer ( ) . compose ( term ) ; |
public class JarafeMEDecoder { /** * @ param features - A list of strings representing the features for a classification instance
* @ return labelposteriorpair list - A list of pairs that include each label and a posterior probability mass */
public List < StringDoublePair > classifyInstanceDistribution ( List < String > features ) { } } | List < scala . Tuple2 < String , Double > > r = maxEnt . decodeInstanceAsDistribution ( features ) ; List < StringDoublePair > res = new ArrayList < StringDoublePair > ( ) ; for ( scala . Tuple2 < String , Double > el : r ) { res . add ( new StringDoublePair ( el . _1 , el . _2 ) ) ; } return res ; |
public class DiscreteFactor { /** * Prints out this factor as a comma - separated values file , suitable for
* reading using { @ link TableFactor # fromDelimitedFile } .
* @ return */
public String toCsv ( ) { } } | StringBuilder sb = new StringBuilder ( ) ; Iterator < Outcome > iter = outcomeIterator ( ) ; while ( iter . hasNext ( ) ) { Outcome outcome = iter . next ( ) ; sb . append ( outcome . toCsv ( ) ) ; if ( iter . hasNext ( ) ) { sb . append ( "\n" ) ; } } return sb . toString ( ) ; |
public class MentionOf { /** * Gets the value of the bundle property .
* @ return
* possible object is
* { @ link org . openprovenance . prov . sql . IDRef } */
@ ManyToOne ( targetEntity = org . openprovenance . prov . sql . QualifiedName . class , cascade = { } } | CascadeType . ALL } ) @ JoinColumn ( name = "BUNDLE" ) public org . openprovenance . prov . model . QualifiedName getBundle ( ) { return bundle ; |
public class NBTIO { /** * Writes an NBT tag .
* @ param out Data output to write to .
* @ param tag Tag to write .
* @ throws java . io . IOException If an I / O error occurs . */
public static void writeTag ( DataOutput out , Tag tag ) throws IOException { } } | out . writeByte ( TagRegistry . getIdFor ( tag . getClass ( ) ) ) ; out . writeUTF ( tag . getName ( ) ) ; tag . write ( out ) ; |
public class JSONArray { /** * Same as { @ link # put } , with added validity checks . */
void checkedPut ( Object value ) throws JSONException { } } | if ( value instanceof Number ) { JSON . checkDouble ( ( ( Number ) value ) . doubleValue ( ) ) ; } put ( value ) ; |
public class StudioModel { /** * Sets name of DB | user | .
* @ param user User name
* @ see # getUser */
public void setUser ( final String user ) { } } | final String old = this . user ; this . user = user ; this . connectionConfig = System . currentTimeMillis ( ) ; this . connectionValidated = false ; this . pcs . firePropertyChange ( "user" , old , this . user ) ; |
public class LevenbergMarquardtMethod { /** * Compute new chisquared error
* This function also modifies the alpha and beta matrixes !
* @ param curparams Parameters to use in computation .
* @ return new chi squared */
private double simulateParameters ( double [ ] curparams ) { } } | // Initialize alpha , beta
for ( int i = 0 ; i < numfit ; i ++ ) { Arrays . fill ( alpha [ i ] , 0. ) ; } Arrays . fill ( beta , 0. ) ; double newchisq = 0.0 ; // Simulation loop over all data
for ( int di = 0 ; di < x . length ; di ++ ) { FittingFunctionResult res = func . eval ( x [ di ] , curparams ) ; // compute inverse squared standard deviation of the point ( confidence ? )
double sigma2inv = 1.0 / ( s [ di ] * s [ di ] ) ; double deltay = y [ di ] - res . y ; // i2 and j2 are the indices that only count the params with dofit true !
for ( int i = 0 , i2 = 0 ; i < numfit ; i ++ ) { if ( dofit [ i ] ) { double wt = res . gradients [ i ] * sigma2inv ; // fill only half of the matrix , use symmetry below to complete the
// remainder .
for ( int j = 0 , j2 = 0 ; j <= i ; j ++ ) { if ( dofit [ j ] ) { alpha [ i2 ] [ j2 ++ ] += wt * res . gradients [ j ] ; } } beta [ i2 ++ ] += deltay * wt ; } } newchisq += deltay * deltay * sigma2inv ; } // fill symmetric side of matrix
for ( int i = 1 ; i < numfit ; i ++ ) { for ( int j = i + 1 ; j < numfit ; j ++ ) { alpha [ i ] [ j ] = alpha [ j ] [ i ] ; } } return newchisq ; |
public class SerializableChecker { /** * Dump with indentation .
* @ param type the type that couldn ' t be serialized
* @ return A very pretty dump */
private String toPrettyPrintedStack ( String type ) { } } | StringBuilder result = new StringBuilder ( ) ; StringBuilder spaces = new StringBuilder ( ) ; result . append ( "Unable to serialize class: " ) ; result . append ( type ) ; result . append ( "\nField hierarchy is:" ) ; for ( Iterator < TraceSlot > i = traceStack . listIterator ( ) ; i . hasNext ( ) ; ) { spaces . append ( " " ) ; TraceSlot slot = i . next ( ) ; result . append ( "\n" ) . append ( spaces ) . append ( slot . fieldDescription ) ; result . append ( " [class=" ) . append ( slot . object . getClass ( ) . getName ( ) ) ; result . append ( "]" ) ; } result . append ( " <----- field that is not serializable" ) ; return result . toString ( ) ; |
public class FreesoundClient { /** * Retrieve the next page of results for a { @ link PagingQuery } .
* @ param < I > The data type of items returned by the query
* @ param query The { @ link PagingQuery } being run
* @ return The results of the query
* @ throws FreesoundClientException If it is not possible to retrieve the next page */
public < I extends Object > PagingResponse < I > nextPage ( final PagingQuery < ? , I > query ) throws FreesoundClientException { } } | final int currentPage = query . getPage ( ) ; query . setPage ( currentPage + 1 ) ; return ( PagingResponse < I > ) executeQuery ( query ) ; |
public class MulticastUtil { /** * Sent out a message to Jolokia ' s multicast group over all network interfaces supporting multicast request ( and no
* logging is used )
* @ param pOutMsg the message to send
* @ param pTimeout timeout used for how long to wait for discovery messages
* @ return list of received answers , never null
* @ throws IOException if something fails during the discovery request */
public static List < DiscoveryIncomingMessage > sendQueryAndCollectAnswers ( DiscoveryOutgoingMessage pOutMsg , int pTimeout ) throws IOException { } } | return sendQueryAndCollectAnswers ( pOutMsg , pTimeout , new QuietLogHandler ( ) ) ; |
public class AmazonQuickSightClient { /** * Creates an Amazon QuickSight user , whose identity is associated with the AWS Identity and Access Management ( IAM )
* identity or role specified in the request .
* The permission resource is
* < code > arn : aws : quicksight : us - east - 1 : < i > & lt ; aws - account - id & gt ; < / i > : user / default / < i > & lt ; user - name & gt ; < / i > < / code > .
* The condition resource is the Amazon Resource Name ( ARN ) for the IAM user or role , and the session name .
* The condition keys are < code > quicksight : IamArn < / code > and < code > quicksight : SessionName < / code > .
* < b > CLI Sample : < / b >
* < code > aws quicksight register - user - \ - aws - account - id = 111122223333 - \ - namespace = default - \ - email = pat @ example . com - \ - identity - type = IAM - \ - user - role = AUTHOR - \ - iam - arn = arn : aws : iam : : 111122223333 : user / Pat < / code >
* @ param registerUserRequest
* @ return Result of the RegisterUser operation returned by the service .
* @ throws AccessDeniedException
* You don ' t have access to this . The provided credentials couldn ' t be validated . You might not be
* authorized to carry out the request . Ensure that your account is authorized to use the Amazon QuickSight
* service , that your policies have the correct permissions , and that you are using the correct access keys .
* @ throws InvalidParameterValueException
* One or more parameters don ' t have a valid value .
* @ throws ResourceNotFoundException
* One or more resources can ' t be found .
* @ throws ThrottlingException
* Access is throttled .
* @ throws LimitExceededException
* A limit is exceeded .
* @ throws ResourceExistsException
* The resource specified doesn ' t exist .
* @ throws PreconditionNotMetException
* One or more preconditions aren ' t met .
* @ throws InternalFailureException
* An internal failure occurred .
* @ throws ResourceUnavailableException
* This resource is currently unavailable .
* @ sample AmazonQuickSight . RegisterUser
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / quicksight - 2018-04-01 / RegisterUser " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public RegisterUserResult registerUser ( RegisterUserRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeRegisterUser ( request ) ; |
public class InternalXbaseParser { /** * InternalXbase . g : 1217:1 : ruleFeatureCallID : ( ( rule _ _ FeatureCallID _ _ Alternatives ) ) ; */
public final void ruleFeatureCallID ( ) throws RecognitionException { } } | int stackSize = keepStackSize ( ) ; try { // InternalXbase . g : 1221:2 : ( ( ( rule _ _ FeatureCallID _ _ Alternatives ) ) )
// InternalXbase . g : 1222:2 : ( ( rule _ _ FeatureCallID _ _ Alternatives ) )
{ // InternalXbase . g : 1222:2 : ( ( rule _ _ FeatureCallID _ _ Alternatives ) )
// InternalXbase . g : 1223:3 : ( rule _ _ FeatureCallID _ _ Alternatives )
{ if ( state . backtracking == 0 ) { before ( grammarAccess . getFeatureCallIDAccess ( ) . getAlternatives ( ) ) ; } // InternalXbase . g : 1224:3 : ( rule _ _ FeatureCallID _ _ Alternatives )
// InternalXbase . g : 1224:4 : rule _ _ FeatureCallID _ _ Alternatives
{ pushFollow ( FOLLOW_2 ) ; rule__FeatureCallID__Alternatives ( ) ; state . _fsp -- ; if ( state . failed ) return ; } if ( state . backtracking == 0 ) { after ( grammarAccess . getFeatureCallIDAccess ( ) . getAlternatives ( ) ) ; } } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { restoreStackSize ( stackSize ) ; } return ; |
public class CreateSiteMessageData { /** * Init Method . */
public void init ( MessageDataParent messageDataParent , String strKey ) { } } | if ( strKey == null ) strKey = CREATE_SITE ; super . init ( messageDataParent , strKey ) ; |
public class WorkflowsInner { /** * Creates or updates a workflow .
* @ param resourceGroupName The resource group name .
* @ param workflowName The workflow name .
* @ param workflow The workflow .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the WorkflowInner object if successful . */
public WorkflowInner createOrUpdate ( String resourceGroupName , String workflowName , WorkflowInner workflow ) { } } | return createOrUpdateWithServiceResponseAsync ( resourceGroupName , workflowName , workflow ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class RunInventoryReport { /** * Runs the example .
* @ param adManagerServices the services factory .
* @ param session the session .
* @ throws ApiException if the API request failed with one or more service errors .
* @ throws RemoteException if the API request failed due to other errors .
* @ throws IOException if the report ' s contents could not be written to a temp file .
* @ throws InterruptedException if the thread was interrupted while waiting for the report to be
* ready . */
public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session ) throws IOException , InterruptedException { } } | // Get the ReportService .
ReportServiceInterface reportService = adManagerServices . get ( session , ReportServiceInterface . class ) ; // Get the NetworkService .
NetworkServiceInterface networkService = adManagerServices . get ( session , NetworkServiceInterface . class ) ; // Get the root ad unit ID to filter on .
String rootAdUnitId = networkService . getCurrentNetwork ( ) . getEffectiveRootAdUnitId ( ) ; // Create statement to filter on a parent ad unit with the root ad unit ID to include all
// ad units in the network .
StatementBuilder statementBuilder = new StatementBuilder ( ) . where ( "PARENT_AD_UNIT_ID = :parentAdUnitId" ) . withBindVariableValue ( "parentAdUnitId" , Long . parseLong ( rootAdUnitId ) ) ; // Create report query .
ReportQuery reportQuery = new ReportQuery ( ) ; reportQuery . setDimensions ( new Dimension [ ] { Dimension . AD_UNIT_ID , Dimension . AD_UNIT_NAME } ) ; reportQuery . setColumns ( new Column [ ] { Column . AD_SERVER_IMPRESSIONS , Column . AD_SERVER_CLICKS , Column . DYNAMIC_ALLOCATION_INVENTORY_LEVEL_IMPRESSIONS , Column . DYNAMIC_ALLOCATION_INVENTORY_LEVEL_CLICKS , Column . TOTAL_INVENTORY_LEVEL_IMPRESSIONS , Column . TOTAL_INVENTORY_LEVEL_CPM_AND_CPC_REVENUE } ) ; // Set the filter statement .
reportQuery . setStatement ( statementBuilder . toStatement ( ) ) ; // Set the ad unit view to hierarchical .
reportQuery . setAdUnitView ( ReportQueryAdUnitView . HIERARCHICAL ) ; // Set the dynamic date range type or a custom start and end date .
reportQuery . setDateRangeType ( DateRangeType . YESTERDAY ) ; // Create report job .
ReportJob reportJob = new ReportJob ( ) ; reportJob . setReportQuery ( reportQuery ) ; // Run report job .
reportJob = reportService . runReportJob ( reportJob ) ; // Create report downloader .
ReportDownloader reportDownloader = new ReportDownloader ( reportService , reportJob . getId ( ) ) ; // Wait for the report to be ready .
reportDownloader . waitForReportReady ( ) ; // Change to your file location .
File file = File . createTempFile ( "inventory-report-" , ".csv.gz" ) ; System . out . printf ( "Downloading report to %s ..." , file . toString ( ) ) ; // Download the report .
ReportDownloadOptions options = new ReportDownloadOptions ( ) ; options . setExportFormat ( ExportFormat . CSV_DUMP ) ; options . setUseGzipCompression ( true ) ; URL url = reportDownloader . getDownloadUrl ( options ) ; Resources . asByteSource ( url ) . copyTo ( Files . asByteSink ( file ) ) ; System . out . println ( "done." ) ; |
public class PreferenceFragment { /** * Returns the text of the example dialog ' s positive button .
* @ return The text of the positive button */
private String getPositiveButtonText ( ) { } } | SharedPreferences sharedPreferences = PreferenceManager . getDefaultSharedPreferences ( getActivity ( ) ) ; String key = getString ( R . string . positive_button_text_preference_key ) ; String defaultValue = getString ( R . string . positive_button_text_preference_default_value ) ; return sharedPreferences . getString ( key , defaultValue ) ; |
public class ResourceConverter { /** * Serializes provided { @ link JSONAPIDocument } into JSON API Spec compatible byte representation .
* @ param documentCollection { @ link JSONAPIDocument } document collection to serialize
* @ param serializationSettings { @ link SerializationSettings } settings that override global serialization settings
* @ return serialized content in bytes
* @ throws DocumentSerializationException thrown in case serialization fails */
public byte [ ] writeDocumentCollection ( JSONAPIDocument < ? extends Iterable < ? > > documentCollection , SerializationSettings serializationSettings ) throws DocumentSerializationException { } } | try { resourceCache . init ( ) ; ArrayNode results = objectMapper . createArrayNode ( ) ; Map < String , ObjectNode > includedDataMap = new LinkedHashMap < > ( ) ; for ( Object object : documentCollection . get ( ) ) { results . add ( getDataNode ( object , includedDataMap , serializationSettings ) ) ; } ObjectNode result = objectMapper . createObjectNode ( ) ; result . set ( DATA , results ) ; // Handle global links and meta
serializeMeta ( documentCollection , result , serializationSettings ) ; serializeLinks ( documentCollection , result , serializationSettings ) ; result = addIncludedSection ( result , includedDataMap ) ; return objectMapper . writeValueAsBytes ( result ) ; } catch ( Exception e ) { throw new DocumentSerializationException ( e ) ; } finally { resourceCache . clear ( ) ; } |
public class CmsPersistentLoginTokenHandler { /** * Validates a token and returns the matching user for which the token is valid . < p >
* Returns null if no user matching the token is found , or if the token for the user is expired
* @ param tokenString the token for which to find the matching user
* @ return the matching user for the token , or null if no matching user was found or the token is expired */
public CmsUser validateToken ( String tokenString ) { } } | if ( CmsStringUtil . isEmpty ( tokenString ) ) { return null ; } Token token = new Token ( tokenString ) ; if ( ! token . isValid ( ) ) { LOG . warn ( "Invalid token: " + tokenString ) ; return null ; } String name = token . getName ( ) ; String key = token . getKey ( ) ; String logContext = "[user=" + name + ",key=" + key + "] " ; try { CmsUser user = m_adminCms . readUser ( name ) ; String infoKey = token . getAdditionalInfoKey ( ) ; String addInfoValue = ( String ) user . getAdditionalInfo ( ) . get ( infoKey ) ; logContext = logContext + "[value=" + addInfoValue + "]" ; if ( addInfoValue == null ) { LOG . warn ( logContext + " no matching additional info value found" ) ; return null ; } try { long expirationDate = Long . parseLong ( addInfoValue ) ; if ( System . currentTimeMillis ( ) > expirationDate ) { LOG . warn ( logContext + "Login token expired" ) ; user . getAdditionalInfo ( ) . remove ( infoKey ) ; try { m_adminCms . writeUser ( user ) ; } catch ( Exception e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } return null ; } } catch ( NumberFormatException e ) { LOG . warn ( logContext + "Invalid format for login token additional info" ) ; return null ; } return user ; } catch ( Exception e ) { LOG . warn ( logContext + "error validating token" , e ) ; return null ; } |
public class Seq { /** * Returns a { @ code Stream } produced by iterative application of a accumulation function
* to an initial element { @ code identity } and next element of the current stream .
* Produces a { @ code Stream } consisting of { @ code identity } , { @ code acc ( identity , value1 ) } ,
* { @ code acc ( acc ( identity , value1 ) , value2 ) } , etc .
* < p > Example :
* < pre >
* < code >
* Seq . of ( new Integer [ 0 ] ) . scan ( ( a , b ) - > a + b ) = > [ ]
* Seq . of ( 1 ) . scan ( ( a , b ) - > a + b ) = > [ 1]
* Seq . of ( 1 , 2 ) . scan ( ( a , b ) - > a + b ) = > [ 1 , 3]
* Seq . of ( 1 , 2 , 3 ) . scan ( ( a , b ) - > a + b ) = > [ 1 , 3 , 6]
* Seq . of ( 1 , 2 , 3 , 3 , 2 , 1 ) . scan ( ( a , b ) - > a + b ) = > [ 1 , 3 , 6 , 9 , 11 , 12]
* < / code >
* < / pre >
* @ param accumulator the accumulation function
* @ return */
public < E extends Exception > List < T > scan ( final Try . BiFunction < ? super T , ? super T , T , E > accumulator ) throws E { } } | N . checkArgNotNull ( accumulator ) ; final List < T > result = new ArrayList < > ( ) ; final Iterator < T > iter = iterator ( ) ; T next = null ; if ( iter . hasNext ( ) ) { result . add ( ( next = iter . next ( ) ) ) ; } while ( iter . hasNext ( ) ) { result . add ( ( next = accumulator . apply ( next , iter . next ( ) ) ) ) ; } return result ; |
public class PGPoolingDataSource { /** * Initializes this DataSource . If the initialConnections is greater than zero , that number of
* connections will be created . After this method is called , the DataSource properties cannot be
* changed . If you do not call this explicitly , it will be called the first time you get a
* connection from the DataSource .
* @ throws SQLException Occurs when the initialConnections is greater than zero , but the
* DataSource is not able to create enough physical connections . */
public void initialize ( ) throws SQLException { } } | synchronized ( lock ) { source = createConnectionPool ( ) ; try { source . initializeFrom ( this ) ; } catch ( Exception e ) { throw new PSQLException ( GT . tr ( "Failed to setup DataSource." ) , PSQLState . UNEXPECTED_ERROR , e ) ; } while ( available . size ( ) < initialConnections ) { available . push ( source . getPooledConnection ( ) ) ; } initialized = true ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.