signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class NonplanarBonds { /** * Locates double bonds to mark as unspecified stereochemistry . * @ return set of double bonds */ private List < IBond > findUnspecifiedDoubleBonds ( int [ ] [ ] adjList ) { } }
List < IBond > unspecifiedDoubleBonds = new ArrayList < > ( ) ; for ( IBond bond : container . bonds ( ) ) { // non - double bond , ignore it if ( bond . getOrder ( ) != DOUBLE ) continue ; final IAtom aBeg = bond . getBegin ( ) ; final IAtom aEnd = bond . getEnd ( ) ; final int beg = atomToIndex . get ( aBeg ) ; final int end = atomToIndex . get ( aEnd ) ; // cyclic bond , ignore it ( FIXME may be a cis / trans bond in macro cycle | V | > 7) if ( ringSearch . cyclic ( beg , end ) ) continue ; // stereo bond , ignore it depiction is correct if ( ( doubleBondElements [ beg ] != null && doubleBondElements [ beg ] . getStereoBond ( ) . equals ( bond ) ) || ( doubleBondElements [ end ] != null && doubleBondElements [ end ] . getStereoBond ( ) . equals ( bond ) ) ) continue ; // is actually a tetrahedral centre if ( tetrahedralElements [ beg ] != null || tetrahedralElements [ end ] != null ) continue ; if ( ! isCisTransEndPoint ( beg ) || ! isCisTransEndPoint ( end ) ) continue ; if ( ! hasOnlyPlainBonds ( beg , bond ) || ! hasOnlyPlainBonds ( end , bond ) ) continue ; if ( hasLinearEqualPaths ( adjList , beg , end ) || hasLinearEqualPaths ( adjList , end , beg ) ) continue ; unspecifiedDoubleBonds . add ( bond ) ; } return unspecifiedDoubleBonds ;
public class LargeBlockManager { /** * Release all the blocks that are on disk , and delete them from the * map that tracks them . * @ throws IOException */ private void releaseAllBlocks ( ) throws IOException { } }
synchronized ( m_accessLock ) { Set < Map . Entry < BlockId , Path > > entries = m_blockPathMap . entrySet ( ) ; while ( ! entries . isEmpty ( ) ) { Map . Entry < BlockId , Path > entry = entries . iterator ( ) . next ( ) ; Files . delete ( entry . getValue ( ) ) ; m_blockPathMap . remove ( entry . getKey ( ) ) ; entries = m_blockPathMap . entrySet ( ) ; } }
public class OrthologizedKam { /** * Wrap a { @ link KamEdge } as an { @ link OrthologousEdge } to allow conversion * of the edge label by the { @ link SpeciesDialect } . The edge ' s * { @ link KamNode } s are also wrapped . * @ param kamEdge { @ link KamEdge } * @ return the wrapped kam edge , * < ol > * < li > { @ code null } if { @ code kamEdge } input is { @ code null } < / li > * < li > { @ link OrthologousEdge } if { @ code kamEdge } has orthologized < / li > * < li > the original { @ code kamEdge } input if it has not orthologized < / li > * < / ol > */ private KamEdge wrapEdge ( KamEdge kamEdge ) { } }
if ( kamEdge == null ) { return null ; } TermParameter param = etp . get ( kamEdge . getId ( ) ) ; if ( param != null ) { return new OrthologousEdge ( kamEdge , param ) ; } return kamEdge ;
public class FnBoolean { /** * Takes a boolean function ( < tt > Function & lt ; ? , Boolean & gt ; < / tt > ) as a * parameter and returns another one which returns true if the specified function * returns false , and false if the function returns true . * @ param function the function to be negated * @ return a function returning true if the specified function returns false , and false if * it returns true . */ public static final < T > Function < T , Boolean > not ( final IFunction < T , Boolean > function ) { } }
return new Not < T > ( function ) ;
public class ListUtils { /** * Returns a random element of the given list . * @ param < T > * Type of list elements * @ param list * List * @ return Random element of < code > list < / code > */ public static < T > T getRandomItem ( List < T > list ) { } }
return list . get ( rand . nextInt ( list . size ( ) ) ) ;
public class WebAppConfigurator { /** * Obtain an attribute value as a mapping . Create and return a new empty mapping * if one is not yet present . * Once obtained as a mapping , the attribute value must always be obtained * as a mapping . A subsequent attempt to obtain the value as a set will fail * with a class cast exception . * @ param key The key of the attribute value . * @ return The attribute value as a mapping . */ @ SuppressWarnings ( "unchecked" ) public < T > Map < String , ConfigItem < T > > getConfigItemMap ( String key ) { } }
Map < String , ConfigItem < T > > configItemMap = ( Map < String , ConfigItem < T > > ) attributes . get ( key ) ; if ( configItemMap == null ) { configItemMap = new HashMap < String , ConfigItem < T > > ( ) ; attributes . put ( key , configItemMap ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "ConfigContext create map instance for {0}" , key ) ; } } return configItemMap ;
public class ObservationTree { /** * Initialize the observation tree with initial hypothesis state . Usually used during { @ link * de . learnlib . api . algorithm . LearningAlgorithm # startLearning ( ) } * @ param state * the initial state of the hypothesis */ public void initialize ( final S state ) { } }
final FastMealyState < O > init = this . observationTree . addInitialState ( ) ; this . nodeToObservationMap . put ( state , init ) ;
public class OllieProvider { /** * Create a Uri for a model row . * @ param type The model type . * @ param id The row Id . * @ return The Uri for the model row . */ public static Uri createUri ( Class < ? extends Model > type , Long id ) { } }
final StringBuilder uri = new StringBuilder ( ) ; uri . append ( "content://" ) ; uri . append ( sAuthority ) ; uri . append ( "/" ) ; uri . append ( Ollie . getTableName ( type ) . toLowerCase ( ) ) ; if ( id != null ) { uri . append ( "/" ) ; uri . append ( id . toString ( ) ) ; } return Uri . parse ( uri . toString ( ) ) ;
public class InputRenderer { /** * Adds " aria - invalid " if the component is invalid . * @ param context the { @ link FacesContext } * @ param component the { @ link UIInput } component to add attributes for * @ throws IOException if any error occurs writing the response */ protected void renderARIAInvalid ( FacesContext context , UIInput component ) throws IOException { } }
if ( ! component . isValid ( ) ) { ResponseWriter writer = context . getResponseWriter ( ) ; writer . writeAttribute ( HTML . ARIA_INVALID , "true" , null ) ; }
public class SimpleULogger { /** * This is our internal implementation for logging regular ( non - parameterized ) * log messages . * @ param level level * @ param message message * @ param t throwable */ private void log ( final String level , final String message , final Throwable t ) { } }
StringBuffer buf = new StringBuffer ( ) ; long millis = System . currentTimeMillis ( ) ; buf . append ( millis - startTime ) ; buf . append ( " [" ) ; buf . append ( Thread . currentThread ( ) . getName ( ) ) ; buf . append ( "] " ) ; buf . append ( level ) ; buf . append ( " " ) ; buf . append ( loggerName ) ; buf . append ( " - " ) ; buf . append ( message ) ; buf . append ( LINE_SEPARATOR ) ; System . out . print ( buf . toString ( ) ) ; if ( t != null ) { t . printStackTrace ( System . out ) ; } System . out . flush ( ) ;
public class S3FileSystem { /** * FileStatus for S3 file systems . */ @ Override public FileStatus getFileStatus ( Path f ) throws IOException { } }
INode inode = store . retrieveINode ( makeAbsolute ( f ) ) ; if ( inode == null ) { throw new FileNotFoundException ( f + ": No such file or directory." ) ; } return new S3FileStatus ( f . makeQualified ( this ) , inode ) ;
public class XMLMaskHelper { /** * Get the entity reference for the specified character . This returns e . g . * & amp ; lt ; for ' & lt ; ' etc . This method has special handling for & lt ; , & gt ; , * & amp ; , & quot ; and ' . All other chars are encoded by their numeric value ( e . g . * & amp ; # 200 ; ) * @ param c * Character to use . * @ return The entity reference string . Never < code > null < / code > nor empty . */ @ Nonnull @ Nonempty public static String getXML10EntityReferenceString ( final char c ) { } }
if ( c == LT ) return "&lt;" ; if ( c == GT ) return "&gt;" ; if ( c == AMPERSAND ) return "&amp;" ; if ( c == DOUBLE_QUOTE ) return "&quot;" ; if ( c == APOS ) return "&apos;" ; return getXMLNumericReference ( c ) ;
public class AnalysisScreen { /** * Move the source key fields to the destinataion keys . * @ param mxKeyFields The key fields to move . */ public void setupSummaryKey ( BaseField [ ] [ ] mxKeyFields ) { } }
for ( int i = 0 ; i < mxKeyFields . length ; i ++ ) { mxKeyFields [ i ] [ SUMMARY ] . moveFieldToThis ( mxKeyFields [ i ] [ BASIS ] ) ; }
public class DefaultRetryClient { /** * On timeout , retry the request until the maximum number of allowed retries * is reached . * @ param face the { @ link Face } on which to retry requests * @ param interest the { @ link Interest } to retry * @ param onData the application ' s success callback * @ param onTimeout the application ' s failure callback * @ throws IOException when the client cannot perform the necessary network IO */ @ Override public void retry ( Face face , Interest interest , OnData onData , OnTimeout onTimeout ) throws IOException { } }
RetryContext context = new RetryContext ( face , interest , onData , onTimeout ) ; retryInterest ( context ) ;
public class ProtoLexer { /** * $ ANTLR start " UINT64" */ public final void mUINT64 ( ) throws RecognitionException { } }
try { int _type = UINT64 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // com / dyuproject / protostuff / parser / ProtoLexer . g : 167:5 : ( ' uint64 ' ) // com / dyuproject / protostuff / parser / ProtoLexer . g : 167:9 : ' uint64' { match ( "uint64" ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class DateUtils { /** * Converts an ISO 8601 formatted Date String to a Java Date ISO 8601 format : * yyyy - MM - dd ' T ' HH : mm : ss * @ param iso8601FormattedDate * @ return Date * @ throws com . fasterxml . jackson . databind . exc . InvalidFormatException */ public static Date fromISO8601DateString ( String iso8601FormattedDate ) throws com . fasterxml . jackson . databind . exc . InvalidFormatException { } }
SimpleDateFormat iso8601Format = new SimpleDateFormat ( "yyyy-MM-dd'T'HH:mm:ss" ) ; // set UTC time zone iso8601Format . setTimeZone ( TimeZone . getTimeZone ( "UTC" ) ) ; try { return iso8601Format . parse ( iso8601FormattedDate ) ; } catch ( ParseException e ) { throw new InvalidFormatException ( "Error parsing as date" , iso8601FormattedDate , Date . class ) ; }
public class DwgUtil { /** * Read a double value from a group of unsigned bytes * @ param data Array of unsigned bytes obtained from the DWG binary file * @ param offset The current bit offset where the value begins * @ throws Exception If an unexpected bit value is found in the DWG file . Occurs * when we are looking for LwPolylines . * @ return Vector This vector has two parts . First is an int value that represents * the new offset , and second is the double value */ public static Vector getBitDouble ( int [ ] data , int offset ) throws Exception { } }
Vector v = new Vector ( ) ; int type = ( ( Integer ) getBits ( data , 2 , offset ) ) . intValue ( ) ; int read = 2 ; double val = 0.0 ; if ( type == 0x00 ) { byte [ ] bytes = ( byte [ ] ) getBits ( data , 64 , ( offset + 2 ) ) ; ByteBuffer bb = ByteBuffer . wrap ( bytes ) ; bb . order ( ByteOrder . LITTLE_ENDIAN ) ; val = bb . getDouble ( ) ; read = 66 ; } else if ( type == 0x01 ) { val = 1.0 ; } else if ( type == 0x02 ) { val = 0.0 ; } else { System . out . println ( "Bad type at bit offset: " + offset ) ; } v . add ( new Integer ( offset + read ) ) ; v . add ( new Double ( val ) ) ; return v ;
public class TagsApi { /** * Get the tag list for a given photo . * This method does not require authentication . * @ return tags list for the photo . Required . * @ throws JinxException if required parameter is missing , or if there are any errors . * @ see < a href = " https : / / www . flickr . com / services / api / flickr . tags . getListPhoto . html " > flickr . tags . getListPhoto < / a > */ public PhotoTagList getListPhoto ( String photoId ) throws JinxException { } }
JinxUtils . validateParams ( photoId ) ; Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.tags.getListPhoto" ) ; params . put ( "photo_id" , photoId ) ; return jinx . flickrGet ( params , PhotoTagList . class , false ) ;
public class ArgumentsBuilder { /** * Converts boot classpath library options into corresponding arguments ( - - bcp / a , - - bcp / p ) . * @ param libraries boot classpath libraries * @ return converted Pax Runner collection of arguments */ private Collection < String > extractArguments ( final BootClasspathLibraryOption [ ] libraries ) { } }
final List < String > arguments = new ArrayList < String > ( ) ; for ( BootClasspathLibraryOption library : libraries ) { if ( library . isBeforeFramework ( ) ) { arguments . add ( "--bcp/p=" + library . getLibraryUrl ( ) . getURL ( ) ) ; } else { arguments . add ( "--bcp/a=" + library . getLibraryUrl ( ) . getURL ( ) ) ; } } return arguments ;
public class IDDImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EList < Triplet > getSDFS ( ) { } }
if ( sdfs == null ) { sdfs = new EObjectContainmentEList . Resolving < Triplet > ( Triplet . class , this , AfplibPackage . IDD__SDFS ) ; } return sdfs ;
public class JsonArray { /** * Convenient method providing a few alternate ways of extracting elements * from a JsonArray . * @ param label label * @ return the first element in the array matching the label or the n - th * element if the label is an integer and the element an object or * an array . */ public JsonElement get ( String label ) { } }
int i = 0 ; try { for ( JsonElement e : this ) { if ( e . isPrimitive ( ) && e . asPrimitive ( ) . asString ( ) . equals ( label ) ) { return e ; } else if ( ( e . isObject ( ) || e . isArray ( ) ) && Integer . valueOf ( label ) . equals ( i ) ) { return e ; } i ++ ; } } catch ( NumberFormatException e ) { // fail gracefully return null ; } // the element was not found return null ;
public class Response { /** * Converts the given Java object to JSON object using Jackson ObjectMapper , * and responds it . * If you just want to respond a text with " application / json " as content type , * use respondJsonText ( text ) . * Content - Type header is set to " application / json " . * " text / json " would make the browser download instead of displaying the content . * It makes debugging a pain . */ public ChannelFuture respondJson ( Object ref ) throws Exception { } }
final String json = jsonObjectMapper . writeValueAsString ( ref ) ; return respondText ( json , "application/json" ) ;
public class ContentsDao { /** * { @ inheritDoc } * Verify optional tables have been created */ @ Override public Contents createIfNotExists ( Contents contents ) throws SQLException { } }
verifyCreate ( contents ) ; return super . createIfNotExists ( contents ) ;
public class SensorEvent { /** * Use this method to return a { @ link SensorEvent } for use . * @ return the { @ link SensorEvent } object . */ static SensorEvent obtain ( ) { } }
final SensorEvent event ; synchronized ( recyclerLock ) { event = recyclerTop ; if ( event == null ) { return new SensorEvent ( ) ; } recyclerTop = event . next ; recyclerUsed -= 1 ; } event . next = null ; return event ;
public class JScoreComponent { /** * Highlights the given elements in the score . * If item ( s ) was previously selected , it is unselected . * @ param elements A collection of score element to be highlighted in the * score . < TT > null < / TT > or empty collection can be specified to remove * highlighting . */ public void setSelectedItems ( Collection elements ) { } }
if ( m_selectedItems != null ) { for ( Object m_selectedItem : m_selectedItems ) { ( ( JScoreElement ) m_selectedItem ) . setColor ( null ) ; } m_selectedItems = null ; } if ( ( elements != null ) && ( elements . size ( ) > 0 ) ) { m_selectedItems = elements ; for ( Object m_selectedItem : m_selectedItems ) { ( ( JScoreElement ) m_selectedItem ) . setColor ( SELECTED_ITEM_COLOR ) ; } }
public class PeanoSpatialSorter { /** * Sort by Peano curve . * @ param objs Objects * @ param start Start index * @ param end End * @ param mms Minmax values * @ param dims Dimensions index * @ param depth Dimension * @ param bits Bit set for inversions * @ param desc Current ordering */ protected void peanoSort ( List < ? extends SpatialComparable > objs , int start , int end , double [ ] mms , int [ ] dims , int depth , long [ ] bits , boolean desc ) { } }
final int numdim = ( dims != null ) ? dims . length : ( mms . length >> 1 ) ; final int edim = ( dims != null ) ? dims [ depth ] : depth ; // Find the splitting points . final double min = mms [ 2 * edim ] , max = mms [ 2 * edim + 1 ] ; final double tfirst = ( min + min + max ) / 3. ; final double tsecond = ( min + max + max ) / 3. ; // Safeguard against duplicate points : if ( max - tsecond < 1E-10 || tsecond - tfirst < 1E-10 || tfirst - min < 1E-10 ) { boolean ok = false ; for ( int d = 0 ; d < numdim ; d ++ ) { int d2 = ( ( dims != null ) ? dims [ d ] : d ) << 1 ; if ( mms [ d2 + 1 ] - mms [ d2 ] >= 1E-10 ) { ok = true ; break ; } } if ( ! ok ) { return ; } } final boolean inv = BitsUtil . get ( bits , edim ) ^ desc ; // Split the data set into three parts int fsplit , ssplit ; if ( ! inv ) { fsplit = pivotizeList1D ( objs , start , end , edim , tfirst , false ) ; ssplit = ( fsplit < end - 1 ) ? pivotizeList1D ( objs , fsplit , end , edim , tsecond , false ) : fsplit ; } else { fsplit = pivotizeList1D ( objs , start , end , edim , tsecond , true ) ; ssplit = ( fsplit < end - 1 ) ? pivotizeList1D ( objs , fsplit , end , edim , tfirst , true ) : fsplit ; } int nextdim = ( depth + 1 ) % numdim ; // Do we need to update the min / max values ? if ( start < fsplit - 1 ) { mms [ 2 * edim ] = ! inv ? min : tsecond ; mms [ 2 * edim + 1 ] = ! inv ? tfirst : max ; peanoSort ( objs , start , fsplit , mms , dims , nextdim , bits , desc ) ; } if ( fsplit < ssplit - 1 ) { BitsUtil . flipI ( bits , edim ) ; // set ( all but dim : we also flip " desc " ) mms [ 2 * edim ] = tfirst ; mms [ 2 * edim + 1 ] = tsecond ; peanoSort ( objs , fsplit , ssplit , mms , dims , nextdim , bits , ! desc ) ; BitsUtil . flipI ( bits , edim ) ; } if ( ssplit < end - 1 ) { mms [ 2 * edim ] = ! inv ? tsecond : min ; mms [ 2 * edim + 1 ] = ! inv ? max : tfirst ; peanoSort ( objs , ssplit , end , mms , dims , nextdim , bits , desc ) ; } // Restore ranges mms [ 2 * edim ] = min ; mms [ 2 * edim + 1 ] = max ;
public class FileSystemUtils { /** * Convenience method for { @ code # persistAndWait ( fs , uri , - 1 ) } . i . e . wait for an indefinite period * of time to persist . This will block for an indefinite period of time if the path is never * persisted . Use with care . * @ param fs { @ link FileSystem } to carry out Alluxio operations * @ param uri the uri of the file to persist */ public static void persistAndWait ( final FileSystem fs , final AlluxioURI uri ) throws FileDoesNotExistException , IOException , AlluxioException , TimeoutException , InterruptedException { } }
persistAndWait ( fs , uri , - 1 ) ;
public class TokenManagerImpl { /** * { @ inheritDoc } */ public Token createToken ( String tokenType , Map < String , Object > tokenData ) throws TokenCreationFailedException { } }
try { TokenService tokenService = getTokenServiceForType ( tokenType ) ; return tokenService . createToken ( tokenData ) ; } catch ( IllegalArgumentException e ) { throw new TokenCreationFailedException ( e . getMessage ( ) , e ) ; }
public class Base64Encoder { /** * base64编码 * @ param source 被编码的base64字符串 * @ param charset 字符集 * @ return 被加密后的字符串 */ public static String encode ( byte [ ] source , String charset ) { } }
return StrUtil . str ( encode ( source , false ) , charset ) ;
public class HudsonHomeDiskUsageMonitor { /** * Depending on whether the user said " yes " or " no " , send him to the right place . */ @ RequirePOST public HttpResponse doAct ( @ QueryParameter String no ) throws IOException { } }
if ( no != null ) { disable ( true ) ; return HttpResponses . redirectViaContextPath ( "/manage" ) ; } else { return HttpResponses . redirectToDot ( ) ; }
public class TupleToAvroRecordConverter { /** * Moves data between a Tuple and an Avro Record */ @ SuppressWarnings ( { } }
"unchecked" , "rawtypes" } ) public Record toRecord ( ITuple tuple , Record reuse ) throws IOException { Record record = reuse ; if ( record == null ) { record = new Record ( avroSchema ) ; } if ( schemaValidation && ! tuple . getSchema ( ) . equals ( pangoolSchema ) ) { throw new IOException ( "Tuple '" + tuple + "' " + "contains schema not expected." + "Expected schema '" + pangoolSchema + " and actual: " + tuple . getSchema ( ) ) ; } for ( int i = 0 ; i < pangoolSchema . getFields ( ) . size ( ) ; i ++ ) { Object obj = tuple . get ( i ) ; Field field = pangoolSchema . getField ( i ) ; if ( obj == null ) { throw new IOException ( "Field '" + field . getName ( ) + "' can't be null in tuple:" + tuple ) ; } switch ( field . getType ( ) ) { case INT : case LONG : case FLOAT : case BOOLEAN : case DOUBLE : case BYTES : record . put ( i , obj ) ; // optimistic break ; case OBJECT : Serializer customSer = customSerializers [ i ] ; DataOutputBuffer buffer = buffers [ i ] ; buffer . reset ( ) ; if ( customSer != null ) { customSer . open ( buffer ) ; customSer . serialize ( obj ) ; customSer . close ( ) ; // TODO is this safe ? } else { hadoopSer . ser ( obj , buffer ) ; } // TODO this byteBuffer instances should be cached and reused ByteBuffer byteBuffer = ByteBuffer . wrap ( buffer . getData ( ) , 0 , buffer . getLength ( ) ) ; record . put ( i , byteBuffer ) ; break ; case ENUM : record . put ( i , obj . toString ( ) ) ; break ; case STRING : record . put ( i , new Utf8 ( obj . toString ( ) ) ) ; // could be directly String ? break ; default : throw new IOException ( "Not correspondence to Avro type from Pangool type " + field . getType ( ) ) ; } } return record ;
public class AbstractRegionPainter { /** * Decodes and returns a color , which is derived from a offset between two * other colors . * @ param color1 The first color * @ param color2 The second color * @ param midPoint The offset between color 1 and color 2 , a value of 0.0 * is color 1 and 1.0 is color 2; * @ return The derived color */ protected final Color decodeColor ( Color color1 , Color color2 , float midPoint ) { } }
return new Color ( deriveARGB ( color1 , color2 , midPoint ) ) ;
public class GroovyScript2RestLoader { /** * Check is specified source < code > script < / code > contains valid Groovy source * code . * @ param name script name . This name will be used by GroovyClassLoader to * identify script , e . g . specified name will be used in error * message in compilation of Groovy fails . If this parameter is * < code > null < / code > then GroovyClassLoader will use automatically * generated name * @ param script Groovy source stream * @ param src set of folders that contains Groovy source files that should be * add in class - path when validate < code > script < / code > , see * { @ link SourceFolder # getPath ( ) } . < b > NOTE < / b > To be able load * Groovy source files from specified folders the following rules * must be observed : * < ul > * < li > Groovy source files must be located in folder with respect * to package structure < / li > * < li > Name of Groovy source files must be the same as name of * class located in file < / li > * < li > Groovy source file must have extension ' . groovy ' < / li > * < / ul > * @ param files set of groovy source files that should be add in class - path * when validate < code > script < / code > . Each item must point directly * to file that contains Groovy source , see * { @ link SourceFile # getPath ( ) } . Source file can have any name and * extension * @ throws MalformedScriptException if < code > script < / code > contains not valid * source code * @ LevelAPI Provisional */ public void validateScript ( String name , InputStream script , SourceFolder [ ] src , SourceFile [ ] files ) throws MalformedScriptException { } }
if ( name != null && name . length ( ) > 0 && name . startsWith ( "/" ) ) { name = name . substring ( 1 ) ; } groovyPublisher . validateResource ( script , name , src , files ) ;
public class CodeWriter { /** * Returns the class named { @ code simpleName } when nested in the class at { @ code stackDepth } . */ private ClassName stackClassName ( int stackDepth , String simpleName ) { } }
ClassName className = ClassName . get ( packageName , typeSpecStack . get ( 0 ) . name ) ; for ( int i = 1 ; i <= stackDepth ; i ++ ) { className = className . nestedClass ( typeSpecStack . get ( i ) . name ) ; } return className . nestedClass ( simpleName ) ;
public class AbstractProfileProfileAligner { /** * Sets the target { @ link Profile } . * @ param target the second { @ link Profile } of the pair to align */ public void setTarget ( Profile < S , C > target ) { } }
this . target = target ; targetFuture = null ; reset ( ) ;
public class BasicEquivalencer { /** * { @ code equivalent } relies on { @ link java . lang . Object # equals ( Object ) equals } * to tell whether two objects are equivalent or not . When both arguments are * { @ code null } , it returns { @ code true } . When one is { @ code null } and the * other is not { @ code null } , it returns { @ code false } . Otherwise , it invokes * { @ link java . lang . Object # equals ( Object ) equals } on the * < strong > first < / strong > object , passing the second object as argument . * @ param x * The first object of the comparison . * @ param y * The second object of the comparison . * @ return { @ code true } when the arguments are equivalent . { @ code false } * otherwise . * @ since 1.0 */ @ Override public boolean equivalent ( X x , Y y ) { } }
if ( x == null && y == null ) return true ; if ( x == null || y == null ) return false ; return x . equals ( y ) ;
public class CloudTasksClient { /** * Pauses the queue . * < p > If a queue is paused then the system will stop dispatching tasks until the queue is resumed * via [ ResumeQueue ] [ google . cloud . tasks . v2 . CloudTasks . ResumeQueue ] . Tasks can still be added when * the queue is paused . A queue is paused if its [ state ] [ google . cloud . tasks . v2 . Queue . state ] is * [ PAUSED ] [ google . cloud . tasks . v2 . Queue . State . PAUSED ] . * < p > Sample code : * < pre > < code > * try ( CloudTasksClient cloudTasksClient = CloudTasksClient . create ( ) ) { * QueueName name = QueueName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ QUEUE ] " ) ; * Queue response = cloudTasksClient . pauseQueue ( name ) ; * < / code > < / pre > * @ param name Required . * < p > The queue name . For example : ` projects / PROJECT _ ID / location / LOCATION _ ID / queues / QUEUE _ ID ` * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Queue pauseQueue ( QueueName name ) { } }
PauseQueueRequest request = PauseQueueRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; return pauseQueue ( request ) ;
public class PosixAPI { /** * Load the JNR implementation of the POSIX APIs for the current platform . * Runtime exceptions will be of type { @ link PosixException } . * { @ link IllegalStateException } will be thrown for methods not implemented on this platform . * @ return some implementation ( even on Windows or unsupported Unix ) * @ since 1.518 */ public static synchronized POSIX jnr ( ) { } }
if ( posix == null ) { posix = POSIXFactory . getPOSIX ( new DefaultPOSIXHandler ( ) { @ Override public void error ( Errno error , String extraData ) { throw new PosixException ( "native error " + error . description ( ) + " " + extraData , convert ( error ) ) ; } @ Override public void error ( Errno error , String methodName , String extraData ) { throw new PosixException ( "native error calling " + methodName + ": " + error . description ( ) + " " + extraData , convert ( error ) ) ; } private org . jruby . ext . posix . POSIX . ERRORS convert ( Errno error ) { try { return org . jruby . ext . posix . POSIX . ERRORS . valueOf ( error . name ( ) ) ; } catch ( IllegalArgumentException x ) { return org . jruby . ext . posix . POSIX . ERRORS . EIO ; // PosixException . message has real error anyway } } } , true ) ; } return posix ;
public class TmdbMovies { /** * This method is used to retrieve a list of the available translations for a specific movie . * @ param movieId * @ return * @ throws MovieDbException */ public ResultList < Translation > getMovieTranslations ( int movieId ) throws MovieDbException { } }
TmdbParameters parameters = new TmdbParameters ( ) ; parameters . add ( Param . ID , movieId ) ; URL url = new ApiUrl ( apiKey , MethodBase . MOVIE ) . subMethod ( MethodSub . TRANSLATIONS ) . buildUrl ( parameters ) ; String webpage = httpTools . getRequest ( url ) ; try { WrapperTranslations wrapper = MAPPER . readValue ( webpage , WrapperTranslations . class ) ; ResultList < Translation > results = new ResultList < > ( wrapper . getTranslations ( ) ) ; wrapper . setResultProperties ( results ) ; return results ; } catch ( IOException ex ) { throw new MovieDbException ( ApiExceptionType . MAPPING_FAILED , "Failed to get translations" , url , ex ) ; }
public class PathOverrideService { /** * Called right now when we add an entry to the path _ profile table * Then we update the table to add a new string that contains the old groups * and the new group ( followed by a comma ) * @ param profileId ID of profile * @ param pathId ID of path * @ param groupNum Group to add */ public void AddGroupByNumber ( int profileId , int pathId , int groupNum ) { } }
logger . info ( "adding group_id={}, to pathId={}" , groupNum , pathId ) ; String oldGroups = getGroupIdsInPathProfile ( profileId , pathId ) ; // make sure the old groups does not contain the current group we want // to add if ( ! intArrayContains ( Utils . arrayFromStringOfIntegers ( oldGroups ) , groupNum ) ) { if ( ! oldGroups . endsWith ( "," ) && ! oldGroups . isEmpty ( ) ) { oldGroups += "," ; } String newGroups = ( oldGroups + groupNum ) ; EditService . updatePathTable ( Constants . PATH_PROFILE_GROUP_IDS , newGroups , pathId ) ; } else { logger . info ( "that group is already contained in for this uuid/path" ) ; }
public class PipelinePauseStateSettingsMarshaller { /** * Marshall the given parameter object . */ public void marshall ( PipelinePauseStateSettings pipelinePauseStateSettings , ProtocolMarshaller protocolMarshaller ) { } }
if ( pipelinePauseStateSettings == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( pipelinePauseStateSettings . getPipelineId ( ) , PIPELINEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DualMessageQueue { /** * Get the message sender . */ public BaseMessageSender createMessageSender ( ) { } }
RemoteTask server = ( RemoteTask ) ( ( Application ) this . getMessageManager ( ) . getApplication ( ) ) . getRemoteTask ( null ) ; try { return new DualMessageSender ( server , this ) ; } catch ( RemoteException ex ) { ex . printStackTrace ( ) ; } return null ;
public class LoggingOutputStream { /** * Writes the specified byte to this output stream . The general contract for * < code > write < / code > is that one byte is written to the output stream . The * byte to be written is the eight low - order bits of the argument * < code > b < / code > . The 24 high - order bits of < code > b < / code > are ignored . * @ param b * the < code > byte < / code > to write */ @ Override public void write ( final int b ) throws IOException { } }
if ( hasBeenClosed ) { throw new IOException ( "The stream has been closed." ) ; } byte [ ] bytes = new byte [ 1 ] ; bytes [ 0 ] = ( byte ) ( b & 0xff ) ; String s = new String ( bytes ) ; if ( s . equals ( "\n" ) ) { flush ( ) ; } else { buffer . append ( s ) ; }
public class _SharedRendererUtils { /** * Iterates through the SelectItems with the given Iterator and tries to obtain * a by - class - converter based on the Class of SelectItem . getValue ( ) . * @ param iterator * @ param facesContext * @ return The first suitable Converter for the given SelectItems or null . */ static Converter getSelectItemsValueConverter ( Iterator < SelectItem > iterator , FacesContext facesContext ) { } }
// Attention ! // This code is duplicated in jsfapi component package . // If you change something here please do the same in the other class ! Converter converter = null ; while ( converter == null && iterator . hasNext ( ) ) { SelectItem item = iterator . next ( ) ; if ( item instanceof SelectItemGroup ) { Iterator < SelectItem > groupIterator = Arrays . asList ( ( ( SelectItemGroup ) item ) . getSelectItems ( ) ) . iterator ( ) ; converter = getSelectItemsValueConverter ( groupIterator , facesContext ) ; } else { Class < ? > selectItemsType = item . getValue ( ) . getClass ( ) ; // optimization : no conversion for String values if ( String . class . equals ( selectItemsType ) ) { return null ; } try { converter = facesContext . getApplication ( ) . createConverter ( selectItemsType ) ; } catch ( FacesException e ) { // nothing - try again } } } return converter ;
public class TrieNode { /** * Gets children . * @ return the children */ public Stream < ? extends TrieNode > getChildren ( ) { } }
if ( getData ( ) . firstChildIndex >= 0 ) { return IntStream . range ( 0 , getData ( ) . numberOfChildren ) . mapToObj ( i -> new TrieNode ( this . trie , getData ( ) . firstChildIndex + i , TrieNode . this ) ) ; } else { return Stream . empty ( ) ; }
public class ApiOvhDedicatedserver { /** * Get this object properties * REST : GET / dedicated / server / { serviceName } / serviceMonitoring / { monitoringId } / alert / email / { alertId } * @ param serviceName [ required ] The internal name of your dedicated server * @ param monitoringId [ required ] This monitoring id * @ param alertId [ required ] This monitoring id */ public OvhEmailAlert serviceName_serviceMonitoring_monitoringId_alert_email_alertId_GET ( String serviceName , Long monitoringId , Long alertId ) throws IOException { } }
String qPath = "/dedicated/server/{serviceName}/serviceMonitoring/{monitoringId}/alert/email/{alertId}" ; StringBuilder sb = path ( qPath , serviceName , monitoringId , alertId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhEmailAlert . class ) ;
public class PhaseOneApplication { /** * Stage one validation of the file , returning the converted document or * { @ code null } . * @ param file XBEL file * @ return Document */ private Document stage1 ( final File file ) { } }
beginStage ( PHASE1_STAGE1_HDR , "1" , NUM_PHASES ) ; final StringBuilder bldr = new StringBuilder ( ) ; stageOutput ( "Validating " + file ) ; long t1 = currentTimeMillis ( ) ; final Stage1Output output ; if ( isBELScript ( file ) ) { output = p1 . stage1BELValidation ( file ) ; } else { output = p1 . stage1XBELValidation ( file ) ; } if ( output . hasValidationErrors ( ) ) { for ( final ValidationError error : output . getValidationErrors ( ) ) { stageError ( error . getUserFacingMessage ( ) ) ; } return null ; } if ( output . hasConversionError ( ) ) { stageError ( output . getConversionError ( ) . getUserFacingMessage ( ) ) ; return null ; } if ( output . getSymbolWarning ( ) != null ) { stageWarning ( output . getSymbolWarning ( ) . getUserFacingMessage ( ) ) ; } long t2 = currentTimeMillis ( ) ; markTime ( bldr , t1 , t2 ) ; markEndStage ( bldr ) ; stageOutput ( bldr . toString ( ) ) ; return output . getDocument ( ) ;
public class JtsBinaryParser { /** * Return the { @ link org . postgis . binary . ValueGetter } for the endian from the given * { @ link org . postgis . binary . ByteGetter } . * @ param bytes { @ link org . postgis . binary . ByteGetter } to read . * @ return The { @ link org . postgis . binary . ValueGetter } for the endian */ public static ValueGetter valueGetterForEndian ( ByteGetter bytes ) { } }
if ( bytes . get ( 0 ) == 0 ) { return new ValueGetter . XDR ( bytes ) ; } else if ( bytes . get ( 0 ) == 1 ) { return new ValueGetter . NDR ( bytes ) ; } else { throw new IllegalArgumentException ( "Unknown Endian type:" + bytes . get ( 0 ) ) ; }
public class WindowsRegistry { /** * Checks if a given key exists . * @ param keyName Key name to check for existence . * @ return < code > true < / code > if the key exists , otherwise * < code > false < / code > . * @ throws RegistryException */ public static boolean existsKey ( String keyName ) throws RegistryException { } }
String [ ] keyNameParts = keyName . split ( REG_PATH_SEPARATOR_REGEX ) ; // first part must be valid hive if ( Hive . getHive ( keyNameParts [ 0 ] ) == null ) { return false ; } for ( int i = 1 ; i < keyNameParts . length ; i ++ ) { // build path StringBuilder path = new StringBuilder ( ) ; for ( int j = 0 ; j < i ; j ++ ) { path . append ( keyNameParts [ j ] ) ; if ( j < i ) { path . append ( REG_PATH_SEPARATOR ) ; } } // check if next element in path exists List < String > subkeys = readSubkeys ( path . toString ( ) ) ; if ( ! subkeys . contains ( keyNameParts [ i ] ) ) { return false ; } } return true ;
public class DateUtils { /** * Get how many days between two date . * @ param date1 date to be tested . * @ param date2 date to be tested . * @ return how many days between two date . */ public static long subDays ( final Date date1 , final Date date2 ) { } }
return subTime ( date1 , date2 , DatePeriod . DAY ) ;
public class Edge { /** * Get numeric edge type from string representation . */ public static @ Type int stringToEdgeType ( String s ) { } }
s = s . toUpperCase ( Locale . ENGLISH ) ; if ( "FALL_THROUGH" . equals ( s ) ) { return FALL_THROUGH_EDGE ; } else if ( "IFCMP" . equals ( s ) ) { return IFCMP_EDGE ; } else if ( "SWITCH" . equals ( s ) ) { return SWITCH_EDGE ; } else if ( "SWITCH_DEFAULT" . equals ( s ) ) { return SWITCH_DEFAULT_EDGE ; } else if ( "JSR" . equals ( s ) ) { return JSR_EDGE ; } else if ( "RET" . equals ( s ) ) { return RET_EDGE ; } else if ( "GOTO" . equals ( s ) ) { return GOTO_EDGE ; } else if ( "RETURN" . equals ( s ) ) { return RETURN_EDGE ; } else if ( "UNHANDLED_EXCEPTION" . equals ( s ) ) { return UNHANDLED_EXCEPTION_EDGE ; } else if ( "HANDLED_EXCEPTION" . equals ( s ) ) { return HANDLED_EXCEPTION_EDGE ; } else if ( "START" . equals ( s ) ) { return START_EDGE ; } else if ( "BACKEDGE_TARGET_EDGE" . equals ( s ) ) { return BACKEDGE_TARGET_EDGE ; } else if ( "BACKEDGE_SOURCE_EDGE" . equals ( s ) ) { return BACKEDGE_SOURCE_EDGE ; } else if ( "EXIT_EDGE" . equals ( s ) ) { return EXIT_EDGE ; } else { throw new IllegalArgumentException ( "Unknown edge type: " + s ) ; }
public class ServletUtil { /** * Extract the parameters and file items allowing for multi part form fields . * @ param request the request being processed * @ param parameters the map to store non - file request parameters in . * @ param files the map to store the uploaded file parameters in . */ public static void extractParameterMap ( final HttpServletRequest request , final Map < String , String [ ] > parameters , final Map < String , FileItem [ ] > files ) { } }
if ( isMultipart ( request ) ) { ServletFileUpload upload = new ServletFileUpload ( ) ; upload . setFileItemFactory ( new DiskFileItemFactory ( ) ) ; try { List fileItems = upload . parseRequest ( request ) ; uploadFileItems ( fileItems , parameters , files ) ; } catch ( FileUploadException ex ) { throw new SystemException ( ex ) ; } // Include Query String Parameters ( only if parameters were not included in the form fields ) for ( Object entry : request . getParameterMap ( ) . entrySet ( ) ) { Map . Entry < String , String [ ] > param = ( Map . Entry < String , String [ ] > ) entry ; if ( ! parameters . containsKey ( param . getKey ( ) ) ) { parameters . put ( param . getKey ( ) , param . getValue ( ) ) ; } } } else { parameters . putAll ( request . getParameterMap ( ) ) ; }
public class ReflectionUtils { /** * Invoke the given callback on all fields in the target class , going up the * class hierarchy to get all declared fields . * @ param clazz the target class to analyze * @ param fc the callback to invoke for each field */ public static void doWithFields ( Class < ? > clazz , FieldCallback fc ) throws IllegalArgumentException { } }
doWithFields ( clazz , fc , null ) ;
public class Transition { /** * Force the transition to move to its end state , ending all the animators . */ void forceToEnd ( @ Nullable ViewGroup sceneRoot ) { } }
ArrayMap < Animator , AnimationInfo > runningAnimators = getRunningAnimators ( ) ; int numOldAnims = runningAnimators . size ( ) ; if ( sceneRoot != null ) { Object windowId = ViewUtils . getWindowId ( sceneRoot ) ; for ( int i = numOldAnims - 1 ; i >= 0 ; i -- ) { AnimationInfo info = runningAnimators . valueAt ( i ) ; if ( info . view != null && windowId != null && windowId . equals ( info . windowId ) ) { Animator anim = runningAnimators . keyAt ( i ) ; anim . end ( ) ; } } }
public class BitsyTransactionContext { /** * This method is called to remove an edge through the IEdgeRemover */ private IEdge removeEdgeOnVertexDelete ( UUID edgeId ) throws BitsyException { } }
// This is called from remove on adjMap , which means that the edge was added in this Tx BitsyEdge edge = changedEdges . remove ( edgeId ) ; // Only an edge that is present in this Tx can be removed by the IEdgeRemover assert ( edge != null ) ; return edge ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AbstractBridgeType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link AbstractBridgeType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/bridge/2.0" , name = "_AbstractBridge" , substitutionHeadNamespace = "http://www.opengis.net/citygml/2.0" , substitutionHeadName = "_Site" ) public JAXBElement < AbstractBridgeType > create_AbstractBridge ( AbstractBridgeType value ) { } }
return new JAXBElement < AbstractBridgeType > ( __AbstractBridge_QNAME , AbstractBridgeType . class , null , value ) ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcConstraintEnum createIfcConstraintEnumFromString ( EDataType eDataType , String initialValue ) { } }
IfcConstraintEnum result = IfcConstraintEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class PolyLabel { /** * Re - projection functions */ private static double latitudeToY ( double latitude ) { } }
double sinLatitude = Math . sin ( latitude * ( Math . PI / 180 ) ) ; return 0.5 - Math . log ( ( 1 + sinLatitude ) / ( 1 - sinLatitude ) ) / ( 4 * Math . PI ) ;
public class LdapIdentityStoreDefinitionWrapper { /** * Evaluate and return the groupMemberOfAttribute . * @ param immediateOnly If true , only return a non - null value if the setting is either an * immediate EL expression or not set by an EL expression . If false , return the * value regardless of where it is evaluated . * @ return The groupMemberOfAttribute or null if immediateOnly = = true AND the value is not evaluated * from a deferred EL expression . */ @ FFDCIgnore ( IllegalArgumentException . class ) private String evaluateGroupMemberOfAttribute ( boolean immediateOnly ) { } }
try { return elHelper . processString ( "groupMemberOfAttribute" , this . idStoreDefinition . groupMemberOfAttribute ( ) , immediateOnly ) ; } catch ( IllegalArgumentException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isWarningEnabled ( ) ) { Tr . warning ( tc , "JAVAEESEC_WARNING_IDSTORE_CONFIG" , new Object [ ] { "groupMemberOfAttribute" , "memberOf" } ) ; } return "memberOf" ; /* Default value from spec . */ }
public class GoogleCloudStorageImpl { /** * Helper for converting a StorageResourceId + StorageObject into a GoogleCloudStorageItemInfo . */ public static GoogleCloudStorageItemInfo createItemInfoForStorageObject ( StorageResourceId resourceId , StorageObject object ) { } }
Preconditions . checkArgument ( resourceId != null , "resourceId must not be null" ) ; Preconditions . checkArgument ( object != null , "object must not be null" ) ; Preconditions . checkArgument ( resourceId . isStorageObject ( ) , "resourceId must be a StorageObject. resourceId: %s" , resourceId ) ; Preconditions . checkArgument ( resourceId . getBucketName ( ) . equals ( object . getBucket ( ) ) , "resourceId.getBucketName() must equal object.getBucket(): '%s' vs '%s'" , resourceId . getBucketName ( ) , object . getBucket ( ) ) ; Preconditions . checkArgument ( resourceId . getObjectName ( ) . equals ( object . getName ( ) ) , "resourceId.getObjectName() must equal object.getName(): '%s' vs '%s'" , resourceId . getObjectName ( ) , object . getName ( ) ) ; Map < String , byte [ ] > decodedMetadata = object . getMetadata ( ) == null ? null : decodeMetadata ( object . getMetadata ( ) ) ; byte [ ] md5Hash = null ; byte [ ] crc32c = null ; if ( ! Strings . isNullOrEmpty ( object . getCrc32c ( ) ) ) { crc32c = BaseEncoding . base64 ( ) . decode ( object . getCrc32c ( ) ) ; } if ( ! Strings . isNullOrEmpty ( object . getMd5Hash ( ) ) ) { md5Hash = BaseEncoding . base64 ( ) . decode ( object . getMd5Hash ( ) ) ; } // GCS API does not make available location and storage class at object level at present // ( it is same for all objects in a bucket ) . Further , we do not use the values for objects . // The GoogleCloudStorageItemInfo thus has ' null ' for location and storage class . return new GoogleCloudStorageItemInfo ( resourceId , object . getUpdated ( ) . getValue ( ) , object . getSize ( ) . longValue ( ) , /* location = */ null , /* storageClass = */ null , object . getContentType ( ) , object . getContentEncoding ( ) , decodedMetadata , object . getGeneration ( ) , object . getMetageneration ( ) , new VerificationAttributes ( md5Hash , crc32c ) ) ;
public class UserDataManager { /** * Get the URL to the passed UDO object . * @ param aRequestScope * The request web scope to be used . Required for cookie - less handling . * May not be < code > null < / code > . * @ param aUDO * The UDO object to get the URL from . * @ return The URL to the user data object , including the context path . Always * starting with a " / " . E . g . < code > / context / user / file . txt < / code > if * this object points to < code > / file . txt < / code > and the user data path * is < code > / user < / code > . */ @ Nonnull @ Nonempty public static SimpleURL getURL ( @ Nonnull final IRequestWebScopeWithoutResponse aRequestScope , @ Nonnull final IUserDataObject aUDO ) { } }
return new SimpleURL ( getURLPath ( aRequestScope , aUDO ) ) ;
public class RelationMetadataProcessorFactory { /** * Gets the relation metadata processor . * @ param relationField * the relation field * @ return the relation metadata processor */ public static RelationMetadataProcessor getRelationMetadataProcessor ( Field relationField , KunderaMetadata kunderaMetadata ) { } }
RelationMetadataProcessor relProcessor = null ; // OneToOne if ( relationField . isAnnotationPresent ( OneToOne . class ) ) { relProcessor = new OneToOneRelationMetadataProcessor ( kunderaMetadata ) ; } // OneToMany else if ( relationField . isAnnotationPresent ( OneToMany . class ) ) { relProcessor = new OneToManyRelationMetadataProcessor ( kunderaMetadata ) ; } // ManyToOne else if ( relationField . isAnnotationPresent ( ManyToOne . class ) ) { relProcessor = new ManyToOneRelationMetadataProcessor ( kunderaMetadata ) ; } // ManyToMany else if ( relationField . isAnnotationPresent ( ManyToMany . class ) ) { relProcessor = new ManyToManyRelationMetadataProcessor ( kunderaMetadata ) ; } return relProcessor ;
public class BaseSelectProvider { /** * 查询总数 * @ param ms * @ return */ public String selectCount ( MappedStatement ms ) { } }
Class < ? > entityClass = getEntityClass ( ms ) ; StringBuilder sql = new StringBuilder ( ) ; sql . append ( SqlHelper . selectCount ( entityClass ) ) ; sql . append ( SqlHelper . fromTable ( entityClass , tableName ( entityClass ) ) ) ; sql . append ( SqlHelper . whereAllIfColumns ( entityClass , isNotEmpty ( ) ) ) ; return sql . toString ( ) ;
public class DecodingResource { /** * Returns a DecodingResource that wraps an existing resource with a decompressor for the given * Content - Encoding . * @ param contentEncoding the value of the Content - Encoding header * @ param source the resource to wrap * @ return the new resource or null if the contentEncoding is not supported */ public static DecodingResource forEncoding ( String contentEncoding , Resource source ) throws IOException { } }
InputStream stream = decodingStream ( contentEncoding , source ) ; if ( stream == null ) { return null ; } return new DecodingResource ( source , stream ) ;
public class EnvironmentsInner { /** * Resets the user password on an environment This operation can take a while to complete . * @ param resourceGroupName The name of the resource group . * @ param labAccountName The name of the lab Account . * @ param labName The name of the lab . * @ param environmentSettingName The name of the environment Setting . * @ param environmentName The name of the environment . * @ param resetPasswordPayload Represents the payload for resetting passwords . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < Void > beginResetPasswordAsync ( String resourceGroupName , String labAccountName , String labName , String environmentSettingName , String environmentName , ResetPasswordPayload resetPasswordPayload ) { } }
return beginResetPasswordWithServiceResponseAsync ( resourceGroupName , labAccountName , labName , environmentSettingName , environmentName , resetPasswordPayload ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
public class RRFedNonFedBudget10V1_1Generator { /** * This method gets ParticipantTraineeSupportCosts details in BudgetYearDataType such as TuitionFeeHealthInsurance * Stipends , Subsistence , Travel , Other , ParticipantTraineeNumber and TotalCost based on the BudgetPeriodInfo for the * RRFedNonFedBudget . * @ param periodInfo ( BudgetPeriodInfo ) budget period entry . * @ return ParticipantTraineeSupportCosts corresponding to the BudgetPeriodInfo object . */ private ParticipantTraineeSupportCosts getParticipantTraineeSupportCosts ( BudgetPeriodDto periodInfo ) { } }
ParticipantTraineeSupportCosts traineeSupportCosts = ParticipantTraineeSupportCosts . Factory . newInstance ( ) ; if ( periodInfo != null ) { TotalDataType totalTution = TotalDataType . Factory . newInstance ( ) ; if ( periodInfo . getPartTuition ( ) != null ) { totalTution . setFederal ( periodInfo . getPartTuition ( ) . bigDecimalValue ( ) ) ; } if ( periodInfo . getPartTuitionCostSharing ( ) != null ) { totalTution . setNonFederal ( periodInfo . getPartTuitionCostSharing ( ) . bigDecimalValue ( ) ) ; if ( periodInfo . getPartTuition ( ) != null ) { totalTution . setTotalFedNonFed ( periodInfo . getPartTuition ( ) . add ( periodInfo . getPartTuitionCostSharing ( ) ) . bigDecimalValue ( ) ) ; } else { totalTution . setTotalFedNonFed ( periodInfo . getPartTuitionCostSharing ( ) . bigDecimalValue ( ) ) ; } } traineeSupportCosts . setTuitionFeeHealthInsurance ( totalTution ) ; TotalDataType totalStipends = TotalDataType . Factory . newInstance ( ) ; if ( periodInfo . getpartStipendCost ( ) != null ) { totalStipends . setFederal ( periodInfo . getpartStipendCost ( ) . bigDecimalValue ( ) ) ; } if ( periodInfo . getPartStipendCostSharing ( ) != null ) { totalStipends . setNonFederal ( periodInfo . getPartStipendCostSharing ( ) . bigDecimalValue ( ) ) ; if ( periodInfo . getpartStipendCost ( ) != null ) { totalStipends . setTotalFedNonFed ( periodInfo . getpartStipendCost ( ) . add ( periodInfo . getPartStipendCostSharing ( ) ) . bigDecimalValue ( ) ) ; } else { totalStipends . setTotalFedNonFed ( periodInfo . getPartStipendCostSharing ( ) . bigDecimalValue ( ) ) ; } } traineeSupportCosts . setStipends ( totalStipends ) ; TotalDataType totalTravel = TotalDataType . Factory . newInstance ( ) ; if ( periodInfo . getpartTravelCost ( ) != null ) { totalTravel . setFederal ( periodInfo . getpartTravelCost ( ) . bigDecimalValue ( ) ) ; } if ( periodInfo . getPartTravelCostSharing ( ) != null ) { totalTravel . setNonFederal ( periodInfo . getPartTravelCostSharing ( ) . bigDecimalValue ( ) ) ; if ( periodInfo . getpartTravelCost ( ) != null ) { totalTravel . setTotalFedNonFed ( periodInfo . getpartTravelCost ( ) . add ( periodInfo . getPartTravelCostSharing ( ) ) . bigDecimalValue ( ) ) ; } else { totalTravel . setTotalFedNonFed ( periodInfo . getPartTravelCostSharing ( ) . bigDecimalValue ( ) ) ; } } traineeSupportCosts . setParticipantTravel ( totalTravel ) ; TotalDataType totalSubsistence = TotalDataType . Factory . newInstance ( ) ; if ( periodInfo . getPartSubsistence ( ) != null ) { totalSubsistence . setFederal ( periodInfo . getPartSubsistence ( ) . bigDecimalValue ( ) ) ; } if ( periodInfo . getPartSubsistenceCostSharing ( ) != null ) { totalSubsistence . setNonFederal ( periodInfo . getPartSubsistenceCostSharing ( ) . bigDecimalValue ( ) ) ; if ( periodInfo . getPartSubsistence ( ) != null ) { totalSubsistence . setTotalFedNonFed ( periodInfo . getPartSubsistence ( ) . add ( periodInfo . getPartSubsistenceCostSharing ( ) ) . bigDecimalValue ( ) ) ; } else { totalSubsistence . setTotalFedNonFed ( periodInfo . getPartSubsistenceCostSharing ( ) . bigDecimalValue ( ) ) ; } } traineeSupportCosts . setSubsistence ( totalSubsistence ) ; traineeSupportCosts . setOther ( getOtherPTSupportCosts ( periodInfo ) ) ; traineeSupportCosts . setParticipantTraineeNumber ( periodInfo . getparticipantCount ( ) ) ; SummaryDataType summary = SummaryDataType . Factory . newInstance ( ) ; summary . setFederalSummary ( periodInfo . getpartOtherCost ( ) . add ( periodInfo . getpartStipendCost ( ) . add ( periodInfo . getpartTravelCost ( ) . add ( periodInfo . getPartSubsistence ( ) . add ( periodInfo . getPartTuition ( ) ) ) ) ) . bigDecimalValue ( ) ) ; summary . setNonFederalSummary ( periodInfo . getPartOtherCostSharing ( ) . add ( periodInfo . getPartStipendCostSharing ( ) . add ( periodInfo . getPartTravelCostSharing ( ) . add ( periodInfo . getPartSubsistenceCostSharing ( ) . add ( periodInfo . getPartTuitionCostSharing ( ) ) ) ) ) . bigDecimalValue ( ) ) ; if ( summary . getNonFederalSummary ( ) != null ) { if ( summary . getFederalSummary ( ) != null ) { summary . setTotalFedNonFedSummary ( summary . getFederalSummary ( ) . add ( summary . getNonFederalSummary ( ) ) ) ; } else { summary . setTotalFedNonFedSummary ( summary . getNonFederalSummary ( ) ) ; } } traineeSupportCosts . setTotalCost ( summary ) ; } return traineeSupportCosts ;
public class LatexStringRepresentation { /** * Returns the latex string for a variable name * @ param name the name * @ return the matching UTF8 symbol */ private static String latexName ( final String name ) { } }
final Matcher matcher = pattern . matcher ( name ) ; if ( ! matcher . matches ( ) ) return name ; if ( matcher . group ( 2 ) . isEmpty ( ) ) return matcher . group ( 1 ) ; return matcher . group ( 1 ) + "_{" + matcher . group ( 2 ) + "}" ;
public class AbstractPipe { /** * Connect provider to this pipe . Doesn ' t allow to connect one provider twice . Does register event listeners if instance of IPipeConnectionListener is given . * @ param provider * Provider * @ param paramMap * Parameters passed with connection , used in concrete pipe implementations * @ return true if provider was added , false otherwise */ public boolean subscribe ( IProvider provider , Map < String , Object > paramMap ) { } }
boolean success = providers . addIfAbsent ( provider ) ; // register event listener if given and just added if ( success && provider instanceof IPipeConnectionListener ) { listeners . addIfAbsent ( ( IPipeConnectionListener ) provider ) ; } return success ;
public class AddPrimaryKeyGeneratorMSSQL { /** * The extension ' s implementation is essentially a copy / paste of the default implementation , with the following changes : * 1 ) Removed other database platform specific logic other than MSSQL ( purely to simplify ) * 2 ) Added support for setting fillFactor * @ param statement * @ param database * @ param sqlGeneratorChain * @ return */ private Sql [ ] generateMSSQLSql ( AddPrimaryKeyStatementMSSQL statement , Database database , SqlGeneratorChain sqlGeneratorChain ) { } }
String sql ; if ( statement . getConstraintName ( ) == null ) { sql = "ALTER TABLE " + database . escapeTableName ( statement . getCatalogName ( ) , statement . getSchemaName ( ) , statement . getTableName ( ) ) + " ADD PRIMARY KEY (" + database . escapeColumnNameList ( statement . getColumnNames ( ) ) + ")" ; } else { sql = "ALTER TABLE " + database . escapeTableName ( statement . getCatalogName ( ) , statement . getSchemaName ( ) , statement . getTableName ( ) ) + " ADD CONSTRAINT " + database . escapeConstraintName ( statement . getConstraintName ( ) ) + " PRIMARY KEY" ; if ( ! statement . isClustered ( ) ) { sql += " NONCLUSTERED" ; } sql += " (" + database . escapeColumnNameList ( statement . getColumnNames ( ) ) + ")" ; } // the only new feature being added is support for fillFactor sql += " WITH (FILLFACTOR = " + statement . getFillFactor ( ) + ")" ; if ( StringUtils . trimToNull ( statement . getTablespace ( ) ) != null && database . supportsTablespaces ( ) ) { sql += " ON " + statement . getTablespace ( ) ; } if ( statement . getForIndexName ( ) != null ) { sql += " USING INDEX " + database . escapeObjectName ( statement . getForIndexCatalogName ( ) , statement . getForIndexSchemaName ( ) , statement . getForIndexName ( ) , Index . class ) ; } return new Sql [ ] { new UnparsedSql ( sql , getAffectedPrimaryKey ( statement ) ) } ;
public class sslservice { /** * Use this API to fetch filtered set of sslservice resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static sslservice [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
sslservice obj = new sslservice ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; sslservice [ ] response = ( sslservice [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class AssetFileDef { /** * < pre > * The tensor to bind the asset filename to . * < / pre > * < code > optional . tensorflow . TensorInfo tensor _ info = 1 ; < / code > */ public org . tensorflow . framework . TensorInfo getTensorInfo ( ) { } }
return tensorInfo_ == null ? org . tensorflow . framework . TensorInfo . getDefaultInstance ( ) : tensorInfo_ ;
public class JKAbstractCacheManager { /** * ( non - Javadoc ) * @ see com . jk . cache . CacheManager # clear ( java . lang . Class ) */ @ Override public void clear ( final Class < ? > clas ) { } }
this . logger . debug ( "@clear:" . concat ( clas . getName ( ) ) ) ; getCachableMap ( clas ) . clear ( ) ;
public class AWSIotClient { /** * Lists the targets ( thing groups ) associated with a given Device Defender security profile . * @ param listTargetsForSecurityProfileRequest * @ return Result of the ListTargetsForSecurityProfile operation returned by the service . * @ throws InvalidRequestException * The request is not valid . * @ throws ResourceNotFoundException * The specified resource does not exist . * @ throws ThrottlingException * The rate exceeds the limit . * @ throws InternalFailureException * An unexpected error has occurred . * @ sample AWSIot . ListTargetsForSecurityProfile */ @ Override public ListTargetsForSecurityProfileResult listTargetsForSecurityProfile ( ListTargetsForSecurityProfileRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListTargetsForSecurityProfile ( request ) ;
public class ApiOvhMsServices { /** * Get active licenses for specific period of time * REST : GET / msServices / { serviceName } / sync / license * @ param period [ required ] Period of time used to determine sync account license statistics * @ param license [ required ] License type * @ param serviceName [ required ] The internal name of your Active Directory organization * API beta */ public ArrayList < OvhSyncDailyLicense > serviceName_sync_license_GET ( String serviceName , OvhSyncLicenseEnum license , OvhLicensePeriodEnum period ) throws IOException { } }
String qPath = "/msServices/{serviceName}/sync/license" ; StringBuilder sb = path ( qPath , serviceName ) ; query ( sb , "license" , license ) ; query ( sb , "period" , period ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t4 ) ;
public class SimpleTimeZone { /** * { @ inheritDoc } */ @ Override public void setID ( String ID ) { } }
if ( isFrozen ( ) ) { throw new UnsupportedOperationException ( "Attempt to modify a frozen SimpleTimeZone instance." ) ; } super . setID ( ID ) ; transitionRulesInitialized = false ;
public class DateUtils { /** * Convert an Object of type Class to an Object . */ public static Object toObject ( Class < ? > clazz , Object value ) throws ParseException { } }
if ( value == null ) { return null ; } if ( clazz == null ) { return value ; } if ( java . sql . Date . class . isAssignableFrom ( clazz ) ) { return toDate ( value ) ; } if ( java . sql . Time . class . isAssignableFrom ( clazz ) ) { return toTime ( value ) ; } if ( java . sql . Timestamp . class . isAssignableFrom ( clazz ) ) { return toTimestamp ( value ) ; } if ( java . util . Date . class . isAssignableFrom ( clazz ) ) { return toDateTime ( value ) ; } return value ;
public class SearchResult { /** * A list of < code > SearchResult < / code > objects . * @ param results * A list of < code > SearchResult < / code > objects . */ public void setResults ( java . util . Collection < SearchRecord > results ) { } }
if ( results == null ) { this . results = null ; return ; } this . results = new java . util . ArrayList < SearchRecord > ( results ) ;
public class HistogramAggregationIterator { /** * Cotr . * @ param spans The spans that join the aggregation * @ param start _ time Any data point strictly before this timestamp will be ignored . * @ param end _ time Any data point strictly after this timestamp will be ignored . * @ param aggregation The aggregation will be applied on the spans * @ param downsampler The downsamper will be applied on each span * @ param query _ start The start time of the actual query * @ param query _ end The end time of the actual query * @ param is _ rollup Whether we are handling the rollup data points * @ return */ public static HistogramAggregationIterator create ( final List < HistogramSpan > spans , final long start_time , final long end_time , final HistogramAggregation aggregation , final DownsamplingSpecification downsampler , final long query_start , final long query_end , final boolean is_rollup ) { } }
final int size = spans . size ( ) ; final HistogramSeekableView [ ] iterators = new HistogramSeekableView [ size ] ; for ( int i = 0 ; i < size ; i ++ ) { HistogramSeekableView it ; if ( downsampler == DownsamplingSpecification . NO_DOWNSAMPLER ) { it = spans . get ( i ) . spanIterator ( ) ; } else { it = spans . get ( i ) . downsampler ( start_time , end_time , downsampler , is_rollup , query_start , query_end ) ; } iterators [ i ] = it ; } return new HistogramAggregationIterator ( iterators , start_time , end_time , aggregation ) ;
public class CommerceDiscountUsageEntryPersistenceImpl { /** * Creates a new commerce discount usage entry with the primary key . Does not add the commerce discount usage entry to the database . * @ param commerceDiscountUsageEntryId the primary key for the new commerce discount usage entry * @ return the new commerce discount usage entry */ @ Override public CommerceDiscountUsageEntry create ( long commerceDiscountUsageEntryId ) { } }
CommerceDiscountUsageEntry commerceDiscountUsageEntry = new CommerceDiscountUsageEntryImpl ( ) ; commerceDiscountUsageEntry . setNew ( true ) ; commerceDiscountUsageEntry . setPrimaryKey ( commerceDiscountUsageEntryId ) ; commerceDiscountUsageEntry . setCompanyId ( companyProvider . getCompanyId ( ) ) ; return commerceDiscountUsageEntry ;
public class PrimaveraXERFileReader { /** * Process tasks . */ private void processTasks ( ) { } }
List < Row > wbs = getRows ( "projwbs" , "proj_id" , m_projectID ) ; List < Row > tasks = getRows ( "task" , "proj_id" , m_projectID ) ; // List < Row > wbsmemos = getRows ( " wbsmemo " , " proj _ id " , m _ projectID ) ; // List < Row > taskmemos = getRows ( " taskmemo " , " proj _ id " , m _ projectID ) ; Collections . sort ( wbs , WBS_ROW_COMPARATOR ) ; m_reader . processTasks ( wbs , tasks /* , wbsmemos , taskmemos */ ) ;
public class GitConfigMonitor { /** * check whether the file has the proper naming and hierarchy * @ param configFilePath the relative path from the repo root * @ return false if the file does not conform */ private boolean checkConfigFilePath ( String configFilePath ) { } }
// The config needs to stored at configDir / flowGroup / flowName . ( pull | job | json | conf ) Path configFile = new Path ( configFilePath ) ; String fileExtension = Files . getFileExtension ( configFile . getName ( ) ) ; if ( configFile . depth ( ) != CONFIG_FILE_DEPTH || ! configFile . getParent ( ) . getParent ( ) . getName ( ) . equals ( folderName ) || ! ( PullFileLoader . DEFAULT_JAVA_PROPS_PULL_FILE_EXTENSIONS . contains ( fileExtension ) || PullFileLoader . DEFAULT_JAVA_PROPS_PULL_FILE_EXTENSIONS . contains ( fileExtension ) ) ) { log . warn ( "Changed file does not conform to directory structure and file name format, skipping: " + configFilePath ) ; return false ; } return true ;
public class ClassParser { /** * Read a constant from the constant pool . Return null for * @ return a StaticConstant * @ throws InvalidClassFileFormatException * @ throws IOException */ private Constant readConstant ( ) throws InvalidClassFileFormatException , IOException { } }
int tag = in . readUnsignedByte ( ) ; if ( tag < 0 || tag >= CONSTANT_FORMAT_MAP . length ) { throw new InvalidClassFileFormatException ( expectedClassDescriptor , codeBaseEntry ) ; } String format = CONSTANT_FORMAT_MAP [ tag ] ; if ( format == null ) { throw new InvalidClassFileFormatException ( expectedClassDescriptor , codeBaseEntry ) ; } Object [ ] data = new Object [ format . length ( ) ] ; for ( int i = 0 ; i < format . length ( ) ; i ++ ) { char spec = format . charAt ( i ) ; switch ( spec ) { case '8' : data [ i ] = in . readUTF ( ) ; break ; case 'I' : data [ i ] = in . readInt ( ) ; break ; case 'F' : data [ i ] = Float . valueOf ( in . readFloat ( ) ) ; break ; case 'L' : data [ i ] = in . readLong ( ) ; break ; case 'D' : data [ i ] = Double . valueOf ( in . readDouble ( ) ) ; break ; case 'i' : data [ i ] = in . readUnsignedShort ( ) ; break ; case 'b' : data [ i ] = in . readUnsignedByte ( ) ; break ; default : throw new IllegalStateException ( ) ; } } return new Constant ( tag , data ) ;
public class AbstractConnectProtocol { /** * Set ssl socket cipher according to options . * @ param sslSocket current ssl socket * @ throws SQLException if a cipher isn ' t known */ private void enabledSslCipherSuites ( SSLSocket sslSocket ) throws SQLException { } }
if ( options . enabledSslCipherSuites != null ) { List < String > possibleCiphers = Arrays . asList ( sslSocket . getSupportedCipherSuites ( ) ) ; String [ ] ciphers = options . enabledSslCipherSuites . split ( "[,;\\s]+" ) ; for ( String cipher : ciphers ) { if ( ! possibleCiphers . contains ( cipher ) ) { throw new SQLException ( "Unsupported SSL cipher '" + cipher + "'. Supported ciphers : " + possibleCiphers . toString ( ) . replace ( "[" , "" ) . replace ( "]" , "" ) ) ; } } sslSocket . setEnabledCipherSuites ( ciphers ) ; }
public class CheckedExceptionsFactory { /** * Constructs and initializes a new { @ link IOException } with the given { @ link String message } * formatted with the given { @ link Object [ ] arguments } . * @ param message { @ link String } describing the { @ link IOException exception } . * @ param args { @ link Object [ ] arguments } used to replace format placeholders in the { @ link String message } . * @ return a new { @ link IOException } with the given { @ link String message } . * @ see # newIOException ( Throwable , String , Object . . . ) * @ see java . io . IOException */ public static IOException newIOException ( String message , Object ... args ) { } }
return newIOException ( null , message , args ) ;
public class OTxSegment { /** * Appends a log entry */ public void addLog ( final byte iOperation , final int iTxId , final int iClusterId , final long iClusterOffset , final byte iRecordType , final int iRecordVersion , final byte [ ] iRecordContent , int dataSegmentId ) throws IOException { } }
final int contentSize = iRecordContent != null ? iRecordContent . length : 0 ; final int size = OFFSET_RECORD_CONTENT + contentSize ; lock . acquireExclusiveLock ( ) ; try { int offset = file . allocateSpace ( size ) ; file . writeByte ( offset , STATUS_COMMITTING ) ; offset += OBinaryProtocol . SIZE_BYTE ; file . writeByte ( offset , iOperation ) ; offset += OBinaryProtocol . SIZE_BYTE ; file . writeInt ( offset , iTxId ) ; offset += OBinaryProtocol . SIZE_INT ; file . writeShort ( offset , ( short ) iClusterId ) ; offset += OBinaryProtocol . SIZE_SHORT ; file . writeLong ( offset , iClusterOffset ) ; offset += OBinaryProtocol . SIZE_LONG ; file . writeByte ( offset , iRecordType ) ; offset += OBinaryProtocol . SIZE_BYTE ; file . writeInt ( offset , iRecordVersion ) ; offset += OBinaryProtocol . SIZE_INT ; file . writeInt ( offset , dataSegmentId ) ; offset += OBinaryProtocol . SIZE_INT ; file . writeInt ( offset , contentSize ) ; offset += OBinaryProtocol . SIZE_INT ; file . write ( offset , iRecordContent ) ; offset += contentSize ; if ( synchEnabled ) file . synch ( ) ; } finally { lock . releaseExclusiveLock ( ) ; }
public class IntArrayFunctionsND { /** * Applies the given unary operator to the elements from the given array , * and stores the result in the given result array . < br > * < br > * If the given result array is < code > null < / code > , then a new array * will be created and returned . < br > * < br > * The source array and the target array may be identical . * @ param a0 The array * @ param op The operator to apply * @ param result The array that will store the result * @ return The result * @ throws IllegalArgumentException If the given arrays do not have * equal sizes . */ public static MutableIntArrayND apply ( IntArrayND a0 , IntUnaryOperator op , MutableIntArrayND result ) { } }
MutableIntArrayND finalResult = validate ( a0 , result ) ; finalResult . coordinates ( ) . parallel ( ) . forEach ( t -> { int operand0 = a0 . get ( t ) ; int r = op . applyAsInt ( operand0 ) ; finalResult . set ( t , r ) ; } ) ; return finalResult ;
public class MapModel { /** * Paintable implementation . First let the PainterVisitor paint this object , then if recursive is true , painter the * layers in order . */ public void accept ( PainterVisitor visitor , Object group , Bbox bounds , boolean recursive ) { } }
// Paint the MapModel itself ( see MapModelPainter ) : visitor . visit ( this , group ) ; // Paint the layers : if ( recursive ) { for ( Layer < ? > layer : layers ) { if ( layer . isShowing ( ) ) { layer . accept ( visitor , group , bounds , recursive ) ; } else { // JDM : paint the top part of the layer , if not we loose the map order layer . accept ( visitor , group , bounds , false ) ; } } } // Paint the editing of a feature ( if a feature is being edited ) : if ( featureEditor . getFeatureTransaction ( ) != null ) { featureEditor . getFeatureTransaction ( ) . accept ( visitor , group , bounds , recursive ) ; }
public class JCusparse { /** * If the given result is < strong > equal < / strong > to * cusparseStatus . JCUSPARSE _ STATUS _ INTERNAL _ ERROR * and exceptions have been enabled , this method will throw a * CudaException with an error message that corresponds to the * given result code . Otherwise , the given result is simply * returned . < br / > * < br / > * This method is used for the functions that do not return * an error code , but a constant value , like a cusparseFillMode . * The respective functions may still return internal errors * from the JNI part . * @ param result The result to check * @ return The result that was given as the parameter * @ throws CudaException If exceptions have been enabled and * the given result code is cusparseStatus . JCUSPARSE _ STATUS _ INTERNAL _ ERROR */ private static int checkForError ( int result ) { } }
if ( exceptionsEnabled && result == cusparseStatus . JCUSPARSE_STATUS_INTERNAL_ERROR ) { throw new CudaException ( cusparseStatus . stringFor ( result ) ) ; } return result ;
public class ProjectCalendar { /** * This private method allows the caller to determine if a given date is a * working day . This method takes account of calendar exceptions . It assumes * that the caller has already calculated the day of the week on which * the given day falls . * @ param date Date to be tested * @ param day Day of the week for the date under test * @ return boolean flag */ private boolean isWorkingDate ( Date date , Day day ) { } }
ProjectCalendarDateRanges ranges = getRanges ( date , null , day ) ; return ranges . getRangeCount ( ) != 0 ;
public class DynamicAccessImpl { /** * Maybe these methods ' exposure needs to be re - thought ? */ public MIMETypedStream getDatastreamDissemination ( Context context , String PID , String dsID , Date asOfDateTime ) throws ServerException { } }
return null ;
public class OutHttpApp { /** * Writes a chunk of bytes to the stream . */ @ Override public void write ( byte [ ] buffer , int offset , int length ) { } }
if ( isClosed ( ) || isHead ( ) ) { return ; } int byteLength = _offset ; while ( true ) { int sublen = Math . min ( length , SIZE - byteLength ) ; System . arraycopy ( buffer , offset , _buffer , byteLength , sublen ) ; offset += sublen ; length -= sublen ; byteLength += sublen ; if ( length <= 0 ) { break ; } _offset = byteLength ; flushByteBuffer ( ) ; byteLength = _offset ; } _offset = byteLength ;
public class EcodInstallation { /** * Set an alternate download location for files * @ param cacheLocation */ public void setCacheLocation ( String cacheLocation ) { } }
if ( cacheLocation . equals ( this . cacheLocation ) ) { return ; // no change } // update location domainsFileLock . writeLock ( ) . lock ( ) ; logger . trace ( "LOCK writelock" ) ; this . cacheLocation = cacheLocation ; logger . trace ( "UNLOCK writelock" ) ; domainsFileLock . writeLock ( ) . unlock ( ) ;
public class MemberSummaryBuilder { /** * Build the summary for the fields . */ public void buildPropertiesSummary ( XMLNode node , Content memberSummaryTree ) { } }
MemberSummaryWriter writer = memberSummaryWriters [ VisibleMemberMap . PROPERTIES ] ; VisibleMemberMap visibleMemberMap = visibleMemberMaps [ VisibleMemberMap . PROPERTIES ] ; addSummary ( writer , visibleMemberMap , true , memberSummaryTree ) ;
public class PrimaryBackupServiceContext { /** * Resets the current index to the given index and timestamp . * @ param index the index to which to reset the current index * @ param timestamp the timestamp to which to reset the current timestamp */ public void resetIndex ( long index , long timestamp ) { } }
currentOperation = OperationType . COMMAND ; operationIndex = index ; currentIndex = index ; currentTimestamp = timestamp ; setCommitIndex ( index ) ; service . tick ( new WallClockTimestamp ( currentTimestamp ) ) ;
public class CPDefinitionVirtualSettingPersistenceImpl { /** * Returns the last cp definition virtual setting in the ordered set where uuid = & # 63 ; . * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp definition virtual setting * @ throws NoSuchCPDefinitionVirtualSettingException if a matching cp definition virtual setting could not be found */ @ Override public CPDefinitionVirtualSetting findByUuid_Last ( String uuid , OrderByComparator < CPDefinitionVirtualSetting > orderByComparator ) throws NoSuchCPDefinitionVirtualSettingException { } }
CPDefinitionVirtualSetting cpDefinitionVirtualSetting = fetchByUuid_Last ( uuid , orderByComparator ) ; if ( cpDefinitionVirtualSetting != null ) { return cpDefinitionVirtualSetting ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( "}" ) ; throw new NoSuchCPDefinitionVirtualSettingException ( msg . toString ( ) ) ;
public class LogGammaDistribution { /** * Compute probit ( inverse cdf ) for LogGamma distributions . * @ param p Probability * @ param k k , alpha aka . " shape " parameter * @ param theta Theta = 1.0 / Beta aka . " scaling " parameter * @ param shift Shift parameter * @ return Probit for Gamma distribution */ public static double quantile ( double p , double k , double theta , double shift ) { } }
return FastMath . exp ( GammaDistribution . quantile ( p , k , theta ) ) + shift ;
public class CommerceShippingFixedOptionRelUtil { /** * Returns an ordered range of all the commerce shipping fixed option rels where commerceShippingFixedOptionId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceShippingFixedOptionRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param commerceShippingFixedOptionId the commerce shipping fixed option ID * @ param start the lower bound of the range of commerce shipping fixed option rels * @ param end the upper bound of the range of commerce shipping fixed option rels ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of matching commerce shipping fixed option rels */ public static List < CommerceShippingFixedOptionRel > findByCommerceShippingFixedOptionId ( long commerceShippingFixedOptionId , int start , int end , OrderByComparator < CommerceShippingFixedOptionRel > orderByComparator ) { } }
return getPersistence ( ) . findByCommerceShippingFixedOptionId ( commerceShippingFixedOptionId , start , end , orderByComparator ) ;
public class WebAgentFeatureCache { /** * A testing hook to set the cached suites to have predictable keys */ public void setSuiteIdsUsingZeroBasedIndex ( ) { } }
synchronized ( cachedSuites ) { List < WebAgentTestSuite > s = new ArrayList < > ( cachedSuites . values ( ) ) ; cachedSuites . clear ( ) ; for ( int index = 0 ; index < s . size ( ) ; index ++ ) { WebAgentTestSuite suite = s . get ( index ) ; cachedSuites . put ( suite . getTestSuiteName ( ) + "-" + index , suite ) ; } }
public class SupportVectorLearner { /** * Performs a kernel evaluation of the a ' th and b ' th vectors in the * { @ link # vecs } array . * @ param a the first vector index * @ param b the second vector index * @ return the kernel evaluation of k ( a , b ) */ protected double kEval ( int a , int b ) { } }
if ( cacheMode == CacheMode . FULL ) { if ( a > b ) { int tmp = a ; a = b ; b = tmp ; } double val = fullCache [ a ] [ b - a ] ; if ( Double . isNaN ( val ) ) // lazy init return fullCache [ a ] [ b - a ] = k ( a , b ) ; return val ; } else if ( cacheMode == CacheMode . ROWS ) { double [ ] cache ; if ( specific_row_cache_row == a ) cache = specific_row_cache_values ; else cache = partialCache . get ( a ) ; if ( cache == null ) // not present { // make a row cache = new double [ vecs . size ( ) ] ; Arrays . fill ( cache , Double . NaN ) ; double [ ] cache_missed = partialCache . putIfAbsentAndGet ( a , cache ) ; if ( cache_missed != null ) cache = cache_missed ; if ( Double . isNaN ( cache [ b ] ) ) return cache [ b ] = k ( a , b ) ; else return cache [ b ] ; } } return k ( a , b ) ;
public class DeliveryDelayManager { /** * Add an DeliveryDelayable reference for an item to the deliveryDelay index . The reference will be * added to the index in order of delivery delay time ( which must be set within the DeliveryDelayable ) . * Once added to the index , it will become eligible for unlock processing at the * designated time . * @ param deliveryDelayable the DeliveryDelayable item for which a reference is to be added to the deliveryDelay index . * @ return true if the reference was added to the index , false otherwise . False may * be returned if the item has not yet been added to an item stream and therefore does not * have a unique ID . False may also be returned if the DeliveryDelayManager has not been started . * @ throws SevereMessageStoreException */ public final boolean addDeliveryDelayable ( DeliveryDelayable deliveryDelayable ) throws SevereMessageStoreException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "addDeliveryDelayable" , "objId=" + ( deliveryDelayable == null ? "null" : String . valueOf ( deliveryDelayable . deliveryDelayableGetID ( ) ) ) + " addEnabled=" + addEnabled ) ; } boolean reply = false ; // Ignore this entry if the deliveryDelayManager has ended or the given entry is null if ( addEnabled && deliveryDelayable != null ) { long deliveryDelayTime = deliveryDelayable . deliveryDelayableGetDeliveryDelayTime ( ) ; DeliveryDelayableReference delayedDeliverableRef = new DeliveryDelayableReference ( deliveryDelayable ) ; delayedDeliverableRef . setDeliveryDelayTime ( deliveryDelayTime ) ; // Ignore this entry if the referenced item has already gone from the message store . if ( deliveryDelayable . deliveryDelayableIsInStore ( ) ) { synchronized ( lockObject ) { // Add the deliveryDelayable to the deliveryDelay index reply = deliveryDelayIndex . put ( delayedDeliverableRef ) ; if ( reply ) { boolean scheduled = false ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( this , tc , "Before scheduleAlarm" , "deliveryDelayIndexSize=" + deliveryDelayIndex . size ( ) + " runEnabled" + runEnabled ) ; } if ( runEnabled && deliveryDelayIndex . size ( ) == 1 ) // We just added the first entry { scheduleAlarm ( interval ) ; scheduled = true ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "Added: DDT=" + delayedDeliverableRef . getDeliveryDelayTime ( ) + ", objId=" + delayedDeliverableRef . getID ( ) + ", scheduled=" + scheduled ) ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "Duplicate deliveyDelayable: DDT=" + delayedDeliverableRef . getDeliveryDelayTime ( ) + ", objId=" + delayedDeliverableRef . getID ( ) ) ; } runEnabled = false ; // End the DeliveryDelayManagerLock daemon thread Object [ ] o = { delayedDeliverableRef . getDeliveryDelayTime ( ) + " : " + delayedDeliverableRef . getID ( ) } ; SevereMessageStoreException e = new SevereMessageStoreException ( "DUPLICATE_DELIVERYDELAYABLE_SIMS2010" , o ) ; lastException = e ; lastExceptionTime = timeNow ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addDeliveryDelayable" ) ; throw e ; } } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "addDeliveryDelayable" , "reply=" + reply ) ; return reply ;