signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class THttpService { /** * Creates a new multiplexed { @ link THttpService } with the specified service implementations , supporting * only the formats specified and defaulting to the specified { @ code defaultSerializationFormat } when the * client doesn ' t specify one . * < p > Currently , the only way to specify a serialization format is by using the HTTP session protocol and * setting the Content - Type header to the appropriate { @ link SerializationFormat # mediaType ( ) } . * @ param implementations a { @ link Map } whose key is service name and value is the implementation of * { @ code * . Iface } or { @ code * . AsyncIface } service interface generated by * the Apache Thrift compiler * @ param defaultSerializationFormat the default serialization format to use when not specified by the * client * @ param otherAllowedSerializationFormats other serialization formats that should be supported by this * service in addition to the default */ public static THttpService ofFormats ( Map < String , ? > implementations , SerializationFormat defaultSerializationFormat , SerializationFormat ... otherAllowedSerializationFormats ) { } }
requireNonNull ( otherAllowedSerializationFormats , "otherAllowedSerializationFormats" ) ; return ofFormats ( implementations , defaultSerializationFormat , Arrays . asList ( otherAllowedSerializationFormats ) ) ;
public class DTDValidator { /** * Method called to update information about the newly encountered ( start ) * element . At this point namespace information has been resolved , but * no DTD validation has been done . Validator is to do these validations , * including checking for attribute value ( and existence ) compatibility . */ @ Override public void validateElementStart ( String localName , String uri , String prefix ) throws XMLStreamException { } }
/* Ok , need to find the element definition ; if not found ( or * only implicitly defined ) , need to throw the exception . */ mTmpKey . reset ( prefix , localName ) ; DTDElement elem = mElemSpecs . get ( mTmpKey ) ; /* Let ' s add the entry in ( even if it ' s a null ) ; this is necessary * to keep things in - sync if allowing graceful handling of validity * errors */ int elemCount = mElemCount ++ ; if ( elemCount >= mElems . length ) { mElems = ( DTDElement [ ] ) DataUtil . growArrayBy50Pct ( mElems ) ; mValidators = ( StructValidator [ ] ) DataUtil . growArrayBy50Pct ( mValidators ) ; } mElems [ elemCount ] = mCurrElem = elem ; if ( elem == null || ! elem . isDefined ( ) ) { reportValidationProblem ( ErrorConsts . ERR_VLD_UNKNOWN_ELEM , mTmpKey . toString ( ) ) ; } // Is this element legal under the parent element ? StructValidator pv = ( elemCount > 0 ) ? mValidators [ elemCount - 1 ] : null ; if ( pv != null && elem != null ) { String msg = pv . tryToValidate ( elem . getName ( ) ) ; if ( msg != null ) { int ix = msg . indexOf ( "$END" ) ; String pname = mElems [ elemCount - 1 ] . toString ( ) ; if ( ix >= 0 ) { msg = msg . substring ( 0 , ix ) + "</" + pname + ">" + msg . substring ( ix + 4 ) ; } reportValidationProblem ( "Validation error, encountered element <" + elem . getName ( ) + "> as a child of <" + pname + ">: " + msg ) ; } } mAttrCount = 0 ; mIdAttrIndex = - 2 ; // -2 as a " don ' t know yet " marker // Ok , need to get the child validator , then : if ( elem == null ) { mValidators [ elemCount ] = null ; mCurrAttrDefs = NO_ATTRS ; mCurrHasAnyFixed = false ; mCurrSpecialAttrs = null ; } else { mValidators [ elemCount ] = elem . getValidator ( ) ; mCurrAttrDefs = elem . getAttributes ( ) ; if ( mCurrAttrDefs == null ) { mCurrAttrDefs = NO_ATTRS ; } mCurrHasAnyFixed = elem . hasFixedAttrs ( ) ; int specCount = elem . getSpecialCount ( ) ; if ( specCount == 0 ) { mCurrSpecialAttrs = null ; } else { BitSet bs = mTmpSpecialAttrs ; if ( bs == null ) { mTmpSpecialAttrs = bs = new BitSet ( specCount ) ; } else { bs . clear ( ) ; } mCurrSpecialAttrs = bs ; } }
public class SAX2DTM2 { /** * Override the processingInstruction ( ) interface in SAX2DTM2. * % OPT % This one is different from SAX2DTM . processingInstruction ( ) * in that we do not use extended types for PI nodes . The name of * the PI is saved in the DTMStringPool . * Receive notification of a processing instruction . * @ param target The processing instruction target . * @ param data The processing instruction data , or null if * none is supplied . * @ throws SAXException Any SAX exception , possibly * wrapping another exception . * @ see org . xml . sax . ContentHandler # processingInstruction */ public void processingInstruction ( String target , String data ) throws SAXException { } }
charactersFlush ( ) ; int dataIndex = m_data . size ( ) ; m_previous = addNode ( DTM . PROCESSING_INSTRUCTION_NODE , DTM . PROCESSING_INSTRUCTION_NODE , m_parents . peek ( ) , m_previous , - dataIndex , false ) ; m_data . addElement ( m_valuesOrPrefixes . stringToIndex ( target ) ) ; m_values . addElement ( data ) ; m_data . addElement ( m_valueIndex ++ ) ;
public class Expression { /** * Resolves the expression * @ return the value of the expression */ public String getValue ( ) { } }
return StringUtils . isEmptyTrimmed ( resolvedValue ) ? ( StringUtils . isEmptyTrimmed ( defaultValue ) ? "" : defaultValue ) : resolvedValue ;
public class StringUtils { /** * Return whether the given string has non whitespace characters . * @ param str The string * @ return True if is */ public static boolean hasText ( @ Nullable CharSequence str ) { } }
if ( isEmpty ( str ) ) { return false ; } int strLen = str . length ( ) ; for ( int i = 0 ; i < strLen ; i ++ ) { if ( ! Character . isWhitespace ( str . charAt ( i ) ) ) { return true ; } } return false ;
public class ThesisScale1DQueryReportPage { /** * Appends query page comment to request . * @ param requestContext request contract * @ param queryPage query page */ private void appendQueryPageComments ( RequestContext requestContext , final QueryPage queryPage ) { } }
PanelStamp panelStamp = RequestUtils . getActiveStamp ( requestContext ) ; Map < Long , Map < String , String > > answers = getRequestAnswerMap ( requestContext ) ; ReportPageCommentProcessor sorter = new Scale1DReportPageCommentProcessor ( panelStamp , queryPage , answers ) ; appendQueryPageComments ( requestContext , queryPage , sorter ) ;
public class Permission { /** * The private CA operations that can be performed by the designated AWS service . * @ param actions * The private CA operations that can be performed by the designated AWS service . * @ return Returns a reference to this object so that method calls can be chained together . * @ see ActionType */ public Permission withActions ( ActionType ... actions ) { } }
java . util . ArrayList < String > actionsCopy = new java . util . ArrayList < String > ( actions . length ) ; for ( ActionType value : actions ) { actionsCopy . add ( value . toString ( ) ) ; } if ( getActions ( ) == null ) { setActions ( actionsCopy ) ; } else { getActions ( ) . addAll ( actionsCopy ) ; } return this ;
public class ChunkerFeatureExtractor { /** * Feats from http : / / www . aclweb . org / anthology / P10-1040 */ public List < String > extractFeatSingle ( int i , final String [ ] tokens , final String [ ] pos ) { } }
List < String > currentFeats = new ArrayList < > ( ) ; for ( int index = Math . max ( 0 , i - 2 ) ; index < Math . min ( i + 3 , tokens . length ) ; index ++ ) { // [ - 2,2] IFeatureExtractor . addFeat ( currentFeats , "w" + ( index - i ) , tokens [ index ] ) ; IFeatureExtractor . addFeat ( currentFeats , "pos" + ( index - i ) , pos [ index ] ) ; if ( index == i - 1 ) { IFeatureExtractor . addFeat ( currentFeats , "w-10" , tokens [ i - 1 ] , tokens [ i ] ) ; IFeatureExtractor . addFeat ( currentFeats , "pos-10" , pos [ i - 1 ] , pos [ i ] ) ; } else if ( index == i + 1 ) { IFeatureExtractor . addFeat ( currentFeats , "w0+1" , tokens [ i ] , tokens [ i + 1 ] ) ; IFeatureExtractor . addFeat ( currentFeats , "pos0+1" , pos [ i ] , pos [ i + 1 ] ) ; } else if ( index == i - 2 ) { IFeatureExtractor . addFeat ( currentFeats , "pos-2-1" , pos [ i - 2 ] , pos [ i - 1 ] ) ; IFeatureExtractor . addFeat ( currentFeats , "pos-2-10" , pos [ i - 2 ] , pos [ i - 1 ] , pos [ i ] ) ; } else if ( index == i + 2 ) { IFeatureExtractor . addFeat ( currentFeats , "pos+1+2" , pos [ i + 1 ] , pos [ i + 2 ] ) ; } if ( index == i - 1 && i < tokens . length - 1 ) { IFeatureExtractor . addFeat ( currentFeats , "pos-10+1" , pos [ i - 1 ] , pos [ i ] , pos [ i + 1 ] ) ; } if ( index == i + 2 ) { IFeatureExtractor . addFeat ( currentFeats , "pos0+1+2" , pos [ i ] , pos [ i + 1 ] , pos [ i + 2 ] ) ; } } currentFeats . addAll ( BrownFeatExtractor . extractBrownFeat ( i , - 2 , 2 , tokens ) ) ; return currentFeats ;
public class ImageSlideView { /** * { @ inheritDoc } */ @ Override protected void initView ( ) { } }
node ( ) . getStyleClass ( ) . add ( "ImageSlide" ) ; if ( model ( ) . getSlide ( ) . getStyle ( ) != null ) { node ( ) . getStyleClass ( ) . add ( model ( ) . getSlide ( ) . getStyle ( ) ) ; } this . image = Resources . create ( new RelImage ( model ( ) . getImage ( ) ) ) . get ( ) ; if ( model ( ) . getSlide ( ) . getShowAnimation ( ) == null || ! "TileIn" . equalsIgnoreCase ( model ( ) . getSlide ( ) . getShowAnimation ( ) . value ( ) ) && ! "TileIn60k" . equalsIgnoreCase ( model ( ) . getSlide ( ) . getShowAnimation ( ) . value ( ) ) ) { node ( ) . getChildren ( ) . add ( ImageViewBuilder . create ( ) . image ( this . image ) . layoutX ( 0 ) . layoutY ( 0 ) . fitWidth ( this . image . getWidth ( ) ) . fitHeight ( this . image . getHeight ( ) ) . build ( ) ) ; // getRootNode ( ) . setOpacity ( 0 ) ; } if ( model ( ) . getSlide ( ) . getShowAnimation ( ) != null && AnimationType . TILE_IN == model ( ) . getSlide ( ) . getShowAnimation ( ) ) { this . tilePerRow = 5 ; } else if ( model ( ) . getSlide ( ) . getShowAnimation ( ) != null && AnimationType . TILE_IN_60_K == model ( ) . getSlide ( ) . getShowAnimation ( ) ) { this . tilePerRow = 50 ; } getTileTransition ( ) ; getFadeTransition ( ) ; if ( model ( ) . getTitle ( ) != null ) { this . slideLabel = new Label ( ) ; this . slideLabel . setPrefSize ( 1200 , 80 ) ; this . slideLabel . setAlignment ( Pos . CENTER ) ; this . slideLabel . getStyleClass ( ) . add ( "ImageTitle" ) ; this . slideLabel . setText ( model ( ) . getTitle ( ) ) ; node ( ) . getChildren ( ) . add ( this . slideLabel ) ; this . slideLabel . setOpacity ( 0.0 ) ; StackPane . setAlignment ( this . slideLabel , Pos . BOTTOM_CENTER ) ; StackPane . setMargin ( this . slideLabel , new Insets ( 0 , 0 , 100 , 0 ) ) ; }
public class Key { /** * { @ inheritDoc } */ @ Override public void write ( DataOutput out ) throws IOException { } }
super . write ( out ) ; out . writeInt ( sortMap . size ( ) ) ; for ( Entry < Integer , SortWritable > entry : sortMap . entrySet ( ) ) { entry . getValue ( ) . write ( out ) ; }
public class SymbolizerWrapper { /** * Set the { @ link ExternalGraphic } ' s path . * < p > Currently one { @ link ExternalGraphic } per { @ link Symbolizer } is supported . * < p > This is used for point styles . * @ param externalGraphicPath the path to set . * @ throws MalformedURLException */ public void setExternalGraphicPath ( String externalGraphicPath ) throws MalformedURLException { } }
if ( externalGraphicPath == null ) { PointSymbolizerWrapper pointSymbolizerWrapper = adapt ( PointSymbolizerWrapper . class ) ; if ( pointSymbolizerWrapper != null ) { Graphic graphic = pointSymbolizerWrapper . getGraphic ( ) ; graphic . graphicalSymbols ( ) . clear ( ) ; externalGraphic = null ; } } else { PointSymbolizerWrapper pointSymbolizerWrapper = adapt ( PointSymbolizerWrapper . class ) ; if ( pointSymbolizerWrapper != null ) { Graphic graphic = pointSymbolizerWrapper . getGraphic ( ) ; graphic . graphicalSymbols ( ) . clear ( ) ; String urlStr = externalGraphicPath ; if ( ! externalGraphicPath . startsWith ( "http:" ) && ! externalGraphicPath . startsWith ( "file:" ) ) { // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ urlStr = "file:" + externalGraphicPath ; // $ NON - NLS - 1 $ } else if ( externalGraphicPath . startsWith ( "./" ) ) { urlStr = "file:" + externalGraphicPath ; // $ NON - NLS - 1 $ } externalGraphic = sb . createExternalGraphic ( new URL ( urlStr ) , getFormat ( externalGraphicPath ) ) ; graphic . graphicalSymbols ( ) . add ( externalGraphic ) ; } setExternalGraphicPath ( externalGraphicPath , externalGraphic ) ; }
public class Node { /** * 计算节点期望 * @ param expected 输出期望 * @ param Z 规范化因子 * @ param size 标签个数 */ public void calcExpectation ( double [ ] expected , double Z , int size ) { } }
double c = Math . exp ( alpha + beta - cost - Z ) ; for ( int i = 0 ; fVector . get ( i ) != - 1 ; i ++ ) { int idx = fVector . get ( i ) + y ; expected [ idx ] += c ; } for ( Path p : lpath ) { p . calcExpectation ( expected , Z , size ) ; }
public class DefaultSearchHandler { /** * Executed when a search command returns . Fills the { @ link FeatureListGrid } , and then calls the * < code > afterSearch < / code > method . */ public void onSearchDone ( SearchEvent event ) { } }
if ( featureListTable != null ) { featureListTable . setLayer ( event . getLayer ( ) ) ; for ( Feature feature : event . getFeatures ( ) ) { featureListTable . addFeature ( feature ) ; } } afterSearch ( ) ;
public class JspRuntimeLibrary { /** * URL encodes a string , based on the supplied character encoding . * This performs the same function as java . next . URLEncode . encode * in J2SDK1.4 , and should be removed if the only platform supported * is 1.4 or higher . * @ param s The String to be URL encoded . * @ param enc The character encoding * @ return The URL encoded String */ public static String URLEncode ( String s , String enc ) { } }
if ( s == null ) { return "null" ; } if ( enc == null ) { enc = "ISO-8859-1" ; // Is this right ? } StringBuffer out = new StringBuffer ( s . length ( ) ) ; ByteArrayOutputStream buf = new ByteArrayOutputStream ( ) ; OutputStreamWriter writer = null ; try { writer = new OutputStreamWriter ( buf , enc ) ; } catch ( java . io . UnsupportedEncodingException ex ) { // Use the default encoding ? writer = new OutputStreamWriter ( buf ) ; } for ( int i = 0 ; i < s . length ( ) ; i ++ ) { int c = s . charAt ( i ) ; if ( c == ' ' ) { out . append ( '+' ) ; } else if ( isSafeChar ( c ) ) { out . append ( ( char ) c ) ; } else { // convert to external encoding before hex conversion try { writer . write ( c ) ; writer . flush ( ) ; } catch ( IOException e ) { buf . reset ( ) ; continue ; } byte [ ] ba = buf . toByteArray ( ) ; for ( int j = 0 ; j < ba . length ; j ++ ) { out . append ( '%' ) ; // Converting each byte in the buffer out . append ( Character . forDigit ( ( ba [ j ] >> 4 ) & 0xf , 16 ) ) ; out . append ( Character . forDigit ( ba [ j ] & 0xf , 16 ) ) ; } buf . reset ( ) ; } } return out . toString ( ) ;
public class Table { /** * Update record in the table using table ' s primary key to locate record in * the table and values of fields of specified object < I > obj < / I > to alter * record fields . * @ param obj object specifying value of primary key and new values of * updated record fields * @ return number of objects actually updated */ public int update ( Connection conn , T obj ) throws SQLException { } }
return update ( conn , obj , null ) ;
public class Driver { /** * Registers a connection handler . * @ param id Handler unique ID * @ param handler Connection handler * @ return Handler previously registered for | id | , or null if none * @ throws IllegalArgumentException if | id | or | handler | is null ( or empty ) * @ see # unregister */ public static ConnectionHandler register ( final String id , final ConnectionHandler handler ) { } }
if ( id == null || id . length ( ) == 0 ) { throw new IllegalArgumentException ( "Invalid ID: " + id ) ; } // end of if if ( handler == null ) { throw new IllegalArgumentException ( "Invalid handler: " + handler ) ; } // end of if return handlers . put ( id , handler ) ;
public class LssClient { /** * Pause your app stream by app name and stream name * @ param app app name * @ param stream stream name * @ return the response */ public PauseAppStreamResponse pauseAppStream ( String app , String stream ) { } }
PauseAppStreamRequest pauseAppStreamRequest = new PauseAppStreamRequest ( ) ; pauseAppStreamRequest . setApp ( app ) ; pauseAppStreamRequest . setStream ( stream ) ; return pauseAppStream ( pauseAppStreamRequest ) ;
public class SraReader { /** * Read an experiment from the specified URL . * @ param url URL , must not be null * @ return an experiment read from the specified URL * @ throws IOException if an I / O error occurs */ public static Experiment readExperiment ( final URL url ) throws IOException { } }
checkNotNull ( url ) ; try ( BufferedReader reader = Resources . asCharSource ( url , Charsets . UTF_8 ) . openBufferedStream ( ) ) { return readExperiment ( reader ) ; }
public class MetricsFileSystemInstrumentation { /** * Add timer metrics to { @ link DistributedFileSystem # listStatus ( Path ) } */ public FileStatus [ ] listStatus ( Path path ) throws IOException { } }
try ( TimerContextWithLog context = new TimerContextWithLog ( listStatusTimer . time ( ) , "listStatus" , path ) ) { FileStatus [ ] statuses = super . listStatus ( path ) ; context . setResult ( statuses ) ; return statuses ; }
public class SimpleMutableDateTime { /** * Creates SimpleMutableDateTime and initializes it to given ZoneId . * @ param zoneId * @ return */ public static final SimpleMutableDateTime now ( ZoneId zoneId ) { } }
ZonedDateTime zdt = ZonedDateTime . now ( zoneId ) ; SimpleMutableDateTime smt = SimpleMutableDateTime . from ( zdt ) ; return smt ;
public class WindowsJNIFaxClientSpi { /** * This function will suspend an existing fax job . * @ param serverName * The fax server name * @ param faxJobID * The fax job ID */ private void winSuspendFaxJob ( String serverName , int faxJobID ) { } }
synchronized ( WindowsFaxClientSpiHelper . NATIVE_LOCK ) { // pre native call this . preNativeCall ( ) ; // invoke native WindowsJNIFaxClientSpi . suspendFaxJobNative ( serverName , faxJobID ) ; }
public class AbstractInjectionEngine { /** * { @ inheritDoc } */ @ Override public void registerObjectFactory ( Class < ? extends Annotation > annotation , Class < ? > type , Class < ? extends ObjectFactory > objectFactory , boolean allowOverride ) // F623-841.1 throws InjectionException { } }
registerObjectFactory ( annotation , type , objectFactory , allowOverride , null , true ) ;
public class RmpAppirater { /** * Reset saved conditions if app version changed . * @ param context Context */ public static void resetIfAppVersionChanged ( Context context ) { } }
SharedPreferences prefs = getSharedPreferences ( context ) ; int appVersionCode = Integer . MIN_VALUE ; final int previousAppVersionCode = prefs . getInt ( PREF_KEY_APP_VERSION_CODE , Integer . MIN_VALUE ) ; try { appVersionCode = context . getPackageManager ( ) . getPackageInfo ( context . getPackageName ( ) , 0 ) . versionCode ; } catch ( PackageManager . NameNotFoundException e ) { Log . w ( TAG , "Occurred PackageManager.NameNotFoundException" , e ) ; } if ( previousAppVersionCode != appVersionCode ) { SharedPreferences . Editor prefsEditor = prefs . edit ( ) ; prefsEditor . putLong ( PREF_KEY_APP_LAUNCH_COUNT , 0 ) ; prefsEditor . putLong ( PREF_KEY_APP_THIS_VERSION_CODE_LAUNCH_COUNT , 0 ) ; prefsEditor . putLong ( PREF_KEY_APP_FIRST_LAUNCHED_DATE , 0 ) ; prefsEditor . putInt ( PREF_KEY_APP_VERSION_CODE , Integer . MIN_VALUE ) ; prefsEditor . putLong ( PREF_KEY_RATE_CLICK_DATE , 0 ) ; prefsEditor . putLong ( PREF_KEY_REMINDER_CLICK_DATE , 0 ) ; prefsEditor . putBoolean ( PREF_KEY_DO_NOT_SHOW_AGAIN , false ) ; prefsEditor . commit ( ) ; }
public class SpoutSpec { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false otherwise */ public boolean isSet ( _Fields field ) { } }
if ( field == null ) { throw new IllegalArgumentException ( ) ; } switch ( field ) { case SPOUT_OBJECT : return is_set_spout_object ( ) ; case COMMON : return is_set_common ( ) ; } throw new IllegalStateException ( ) ;
public class JawrApplicationConfigManager { /** * ( non - Javadoc ) * @ see net . jawr . web . config . jmx . JawrApplicationConfigManagerMBean # * rebuildDirtyBundles ( ) */ @ Override public void rebuildDirtyBundles ( ) { } }
if ( jsMBean != null ) { jsMBean . rebuildDirtyBundles ( ) ; } if ( cssMBean != null ) { cssMBean . rebuildDirtyBundles ( ) ; } if ( binaryMBean != null ) { binaryMBean . rebuildDirtyBundles ( ) ; }
public class RejectVpcEndpointConnectionsRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < RejectVpcEndpointConnectionsRequest > getDryRunRequest ( ) { } }
Request < RejectVpcEndpointConnectionsRequest > request = new RejectVpcEndpointConnectionsRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class BtcFormat { /** * Return a new microcoin - denominated formatter for the given locale with the specified * fractional decimal placing . The returned object will format the fractional part of * numbers with the given minimum number of fractional decimal places . Optionally , * repeating integer arguments can be passed , each indicating the size of an additional * group of fractional decimal places to be used as necessary to avoid rounding , to a * limiting precision of satoshis . */ public static BtcFormat getMicroInstance ( Locale locale , int scale , int ... groups ) { } }
return getInstance ( MICROCOIN_SCALE , locale , scale , boxAsList ( groups ) ) ;
public class MailPublisher { /** * Sends a MimeMessage . * @ param from The mail sender address . * @ param replyTo The reply to address . * @ param to A list of mail recipient addresses . * @ param cc A list of carbon copy mail recipient addresses . * @ param bcc A list of blind carbon copy mail recipient addresses . * @ param subject The mail subject . * @ param msg The mail message text . * @ param html Whether to apply content type " text / html " or the default * content type ( " text / plain " ) . * @ param attachmentFilename The attachment file name . * @ param attachmentFile The file resource to be applied to the mail . * @ throws MessagingException */ public void sendMimeMail ( String from , String replyTo , String [ ] to , String [ ] cc , String [ ] bcc , String subject , String msg , Boolean html , String attachmentFilename , File attachmentFile ) throws MessagingException , MailException { } }
Boolean multipart = false ; // if a attachment file is required , we have to use a multipart massage if ( attachmentFilename != null && attachmentFile != null ) { multipart = true ; } MimeMessage mimeMailMessage = mailSender . createMimeMessage ( ) ; MimeMessageHelper mimeHelper = new MimeMessageHelper ( mimeMailMessage , multipart ) ; // fallback to default mail sender if ( from == null || from . isEmpty ( ) ) { from = defaultMailSender ; } // set minimal configuration mimeHelper . setFrom ( from ) ; mimeHelper . setTo ( to ) ; mimeHelper . setSubject ( subject ) ; mimeHelper . setText ( msg , html ) ; // add replyTo address if set if ( replyTo != null && ! replyTo . isEmpty ( ) ) { mimeHelper . setReplyTo ( replyTo ) ; } // add bcc address ( es ) if set if ( bcc != null && bcc . length > 0 ) { mimeHelper . setBcc ( bcc ) ; } // add cc address ( es ) if set if ( cc != null && cc . length > 0 ) { mimeHelper . setCc ( cc ) ; } // add attachment file if set if ( attachmentFilename != null && attachmentFile != null ) { mimeHelper . addAttachment ( attachmentFilename , attachmentFile ) ; } sendMail ( mimeMailMessage ) ;
public class PaxWicketPageFactory { /** * < p > add . < / p > * @ param pageSource a { @ link org . ops4j . pax . wicket . api . PageFactory } object . * @ throws java . lang . IllegalArgumentException if any . */ public void add ( PageFactory < ? extends IRequestablePage > pageSource ) throws IllegalArgumentException { } }
validateNotNull ( pageSource , "pageSource" ) ; Class < ? extends IRequestablePage > pageClass = pageSource . getPageClass ( ) ; validateNotNull ( pageSource , "pageClass" ) ; synchronized ( this ) { contents . put ( pageClass , pageSource ) ; }
public class TraceOptions { /** * Returns a { @ code TraceOptions } whose representation is copied from the { @ code src } beginning at * the { @ code srcOffset } offset . * @ param src the buffer where the representation of the { @ code TraceOptions } is copied . * @ param srcOffset the offset in the buffer where the representation of the { @ code TraceOptions } * begins . * @ return a { @ code TraceOptions } whose representation is copied from the buffer . * @ throws NullPointerException if { @ code src } is null . * @ throws IndexOutOfBoundsException if { @ code srcOffset + TraceOptions . SIZE } is greater than { @ code * src . length } . * @ since 0.5 * @ deprecated use { @ link # fromByte ( byte ) } . */ @ Deprecated public static TraceOptions fromBytes ( byte [ ] src , int srcOffset ) { } }
Utils . checkIndex ( srcOffset , src . length ) ; return fromByte ( src [ srcOffset ] ) ;
public class SoftDictionary { /** * Insert a string into the dictionary . */ public void put ( String string , Object value ) { } }
put ( ( String ) null , new MyWrapper ( string ) , value ) ;
public class BasicScope { /** * { @ inheritDoc } */ public void setSecurityHandlers ( Set < IScopeSecurityHandler > handlers ) { } }
if ( securityHandlers == null ) { securityHandlers = new CopyOnWriteArraySet < > ( ) ; } // add the specified set of security handlers securityHandlers . addAll ( handlers ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "securityHandlers: {}" , securityHandlers ) ; }
public class Helper { /** * < p > parseArray . < / p > * @ param s a { @ link java . lang . String } object . * @ return an array of { @ link java . lang . String } objects . */ public static String [ ] parseArray ( String s ) { } }
// a : 2 : { i : 0 ; s : 12 : " PHPSUD _ OBJ _ 2 " ; i : 1 ; s : 12 : " PHPSUD _ OBJ _ 3 " ; } String [ ] t = s . split ( ":" ) ; int z = Integer . parseInt ( t [ 1 ] ) ; String [ ] res = new String [ z ] ; for ( int i = 0 ; i < z ; i ++ ) { String a = t [ 5 + 3 * i ] . split ( ";" ) [ 0 ] ; res [ i ] = a . substring ( 1 ) . substring ( 0 , a . length ( ) - 2 ) ; } return res ;
public class MkAppTree { /** * Adjusts the knn distance in the subtree of the specified root entry . * @ param entry the root entry of the current subtree * @ param knnLists a map of knn lists for each leaf entry */ private void adjustApproximatedKNNDistances ( MkAppEntry entry , Map < DBID , KNNList > knnLists ) { } }
MkAppTreeNode < O > node = getNode ( entry ) ; if ( node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkAppLeafEntry leafEntry = ( MkAppLeafEntry ) node . getEntry ( i ) ; // approximateKnnDistances ( leafEntry , // getKNNList ( leafEntry . getRoutingObjectID ( ) , knnLists ) ) ; PolynomialApproximation approx = approximateKnnDistances ( getMeanKNNList ( leafEntry . getDBID ( ) , knnLists ) ) ; leafEntry . setKnnDistanceApproximation ( approx ) ; } } else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkAppEntry dirEntry = node . getEntry ( i ) ; adjustApproximatedKNNDistances ( dirEntry , knnLists ) ; } } // PolynomialApproximation approx1 = node . knnDistanceApproximation ( ) ; ArrayModifiableDBIDs ids = DBIDUtil . newArray ( ) ; leafEntryIDs ( node , ids ) ; PolynomialApproximation approx = approximateKnnDistances ( getMeanKNNList ( ids , knnLists ) ) ; entry . setKnnDistanceApproximation ( approx ) ;
public class Es6ToEs3Util { /** * Warns the user that the given ES6 feature cannot be converted to ES3 * because the transpilation is not yet implemented . A call to this method * is essentially a " TODO ( tbreisacher ) : Implement { @ code feature } " comment . */ static void cannotConvertYet ( AbstractCompiler compiler , Node n , String feature ) { } }
compiler . report ( JSError . make ( n , CANNOT_CONVERT_YET , feature ) ) ;
public class FacesConfigFlowDefinitionFlowCallTypeImpl { /** * If not already created , a new < code > flow - reference < / code > element with the given value will be created . * Otherwise , the existing < code > flow - reference < / code > element will be returned . * @ return a new or existing instance of < code > FacesConfigFlowDefinitionFlowCallFlowReferenceType < FacesConfigFlowDefinitionFlowCallType < T > > < / code > */ public FacesConfigFlowDefinitionFlowCallFlowReferenceType < FacesConfigFlowDefinitionFlowCallType < T > > getOrCreateFlowReference ( ) { } }
Node node = childNode . getOrCreate ( "flow-reference" ) ; FacesConfigFlowDefinitionFlowCallFlowReferenceType < FacesConfigFlowDefinitionFlowCallType < T > > flowReference = new FacesConfigFlowDefinitionFlowCallFlowReferenceTypeImpl < FacesConfigFlowDefinitionFlowCallType < T > > ( this , "flow-reference" , childNode , node ) ; return flowReference ;
public class ConsList { /** * for kryo */ @ Override public void add ( int index , Object element ) { } }
if ( index == 0 ) { _first = element ; } else { _elems . add ( index - 1 , element ) ; }
public class ConfigurationHooksSupport { /** * Called just after manual application configuration ( in application class ) . * Could be used in tests to disable configuration items and ( probably ) replace them . * @ param builder just created builder * @ return used hooks */ public static Set < GuiceyConfigurationHook > run ( final GuiceBundle . Builder builder ) { } }
final Set < GuiceyConfigurationHook > hooks = HOOKS . get ( ) ; if ( hooks != null ) { hooks . forEach ( l -> l . configure ( builder ) ) ; } // clear hooks just after init reset ( ) ; return hooks ;
public class VarOptItemsSketch { /** * Internal implementation of update ( ) which requires the user to know if an item is * marked as coming from the reservoir region of a sketch . The marks are used only in * merging . * @ param item an item of the set being sampled from * @ param weight a strictly positive weight associated with the item * @ param mark true if an item comes from a sketch ' s reservoir region */ void update ( final T item , final double weight , final boolean mark ) { } }
if ( item == null ) { return ; } if ( weight <= 0.0 ) { throw new SketchesArgumentException ( "Item weights must be strictly positive: " + weight + ", for item " + item . toString ( ) ) ; } ++ n_ ; if ( r_ == 0 ) { // exact mode updateWarmupPhase ( item , weight , mark ) ; } else { // sketch is in estimation mode , so we can make the following check assert ( h_ == 0 ) || ( peekMin ( ) >= getTau ( ) ) ; // what tau would be if deletion candidates turn out to be R plus the new item // note : ( r _ + 1 ) - 1 is intentional final double hypotheticalTau = ( weight + totalWtR_ ) / ( ( r_ + 1 ) - 1 ) ; // is new item ' s turn to be considered for reservoir ? final boolean condition1 = ( h_ == 0 ) || ( weight <= peekMin ( ) ) ; // is new item light enough for reservoir ? final boolean condition2 = weight < hypotheticalTau ; if ( condition1 && condition2 ) { updateLight ( item , weight , mark ) ; } else if ( r_ == 1 ) { updateHeavyREq1 ( item , weight , mark ) ; } else { updateHeavyGeneral ( item , weight , mark ) ; } }
public class EncodingGroovyMethods { /** * Produce a Writable object which writes the Base64 encoding of the byte array . * Calling toString ( ) on the result returns the encoding as a String . For more * information on Base64 encoding and chunking see < code > RFC 4648 < / code > . * @ param data byte array to be encoded * @ param chunked whether or not the Base64 encoded data should be MIME chunked * @ return object which will write the Base64 encoding of the byte array * @ since 1.5.7 */ public static Writable encodeBase64 ( final byte [ ] data , final boolean chunked ) { } }
return new Writable ( ) { public Writer writeTo ( final Writer writer ) throws IOException { int charCount = 0 ; final int dLimit = ( data . length / 3 ) * 3 ; for ( int dIndex = 0 ; dIndex != dLimit ; dIndex += 3 ) { int d = ( ( data [ dIndex ] & 0XFF ) << 16 ) | ( ( data [ dIndex + 1 ] & 0XFF ) << 8 ) | ( data [ dIndex + 2 ] & 0XFF ) ; writer . write ( T_TABLE [ d >> 18 ] ) ; writer . write ( T_TABLE [ ( d >> 12 ) & 0X3F ] ) ; writer . write ( T_TABLE [ ( d >> 6 ) & 0X3F ] ) ; writer . write ( T_TABLE [ d & 0X3F ] ) ; if ( chunked && ++ charCount == 19 ) { writer . write ( CHUNK_SEPARATOR ) ; charCount = 0 ; } } if ( dLimit != data . length ) { int d = ( data [ dLimit ] & 0XFF ) << 16 ; if ( dLimit + 1 != data . length ) { d |= ( data [ dLimit + 1 ] & 0XFF ) << 8 ; } writer . write ( T_TABLE [ d >> 18 ] ) ; writer . write ( T_TABLE [ ( d >> 12 ) & 0X3F ] ) ; writer . write ( ( dLimit + 1 < data . length ) ? T_TABLE [ ( d >> 6 ) & 0X3F ] : '=' ) ; writer . write ( '=' ) ; if ( chunked && charCount != 0 ) { writer . write ( CHUNK_SEPARATOR ) ; } } return writer ; } public String toString ( ) { StringWriter buffer = new StringWriter ( ) ; try { writeTo ( buffer ) ; } catch ( IOException e ) { throw new StringWriterIOException ( e ) ; } return buffer . toString ( ) ; } } ;
public class HttpOutputStreamImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . http . channel . internal . outbound . HttpOutputStream # clear ( ) */ @ Override public void clear ( ) { } }
if ( null != this . output ) { for ( int i = 0 ; i < this . output . length ; i ++ ) { if ( null != this . output [ i ] ) { this . output [ i ] . release ( ) ; this . output [ i ] = null ; } } } this . outputIndex = 0 ; this . bufferedCount = 0 ; this . bytesWritten = 0L ; this . isClosing = false ;
public class Common { /** * Reload the default settings . */ public void loadDefaultSettings ( ) { } }
String value = getStorageHandler ( ) . getStorageEngine ( ) . getConfigEntry ( "longmode" ) ; if ( value != null ) { displayFormat = DisplayFormat . valueOf ( value . toUpperCase ( ) ) ; } else { getStorageHandler ( ) . getStorageEngine ( ) . setConfigEntry ( "longmode" , "long" ) ; displayFormat = DisplayFormat . LONG ; } addMetricsGraph ( "Display Format" , displayFormat . toString ( ) ) ; value = getStorageHandler ( ) . getStorageEngine ( ) . getConfigEntry ( "holdings" ) ; if ( value != null && Tools . isValidDouble ( value ) ) { holdings = Double . parseDouble ( value ) ; } else { getStorageHandler ( ) . getStorageEngine ( ) . setConfigEntry ( "holdings" , 100.0 + "" ) ; sendConsoleMessage ( Level . SEVERE , "No default value was set for account creation or was invalid! Defaulting to 100." ) ; holdings = 100.0 ; } value = getStorageHandler ( ) . getStorageEngine ( ) . getConfigEntry ( "bankprice" ) ; if ( value != null && Tools . isValidDouble ( value ) ) { bankPrice = Double . parseDouble ( value ) ; } else { getStorageHandler ( ) . getStorageEngine ( ) . setConfigEntry ( "bankprice" , 100.0 + "" ) ; sendConsoleMessage ( Level . SEVERE , "No default value was set for bank creation or was invalid! Defaulting to 100." ) ; bankPrice = 100.0 ; }
public class Application { /** * Lame method , since android doesn ' t have awt / applet support . * @ param obj * @ param text */ public String getParameterByReflection ( Object obj , String param ) { } }
Object value = null ; try { java . lang . reflect . Method method = obj . getClass ( ) . getMethod ( "getParameter" , String . class ) ; if ( method != null ) value = method . invoke ( obj , param ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } if ( value == null ) return null ; else return value . toString ( ) ;
public class MultiPointerGestureDetector { /** * Restarts the current gesture ( if any ) . */ public void restartGesture ( ) { } }
if ( ! mGestureInProgress ) { return ; } stopGesture ( ) ; for ( int i = 0 ; i < MAX_POINTERS ; i ++ ) { mStartX [ i ] = mCurrentX [ i ] ; mStartY [ i ] = mCurrentY [ i ] ; } startGesture ( ) ;
public class DatasourceConfigReader { /** * read master datasource configuration from xian config . * please refer to { @ link XianDataSource # url } to see database configuration format * @ return datasource configuration url */ public static String getWriteUrl ( ) { } }
String writeUrl = XianConfig . get ( DB_URL_KEY ) ; LOG . info ( "db_url = " + writeUrl ) ; return writeUrl ;
public class PKITools { /** * Private methods */ private byte [ ] getEncryptedKey ( final String fileName , final int keyNum ) throws PKIException { } }
byte [ ] keys = getKeys ( fileName ) ; int foundKeys = 0 ; int keyStart = 0 ; for ( int i = 0 ; i < keys . length ; i ++ ) { if ( keys [ i ] != '\n' ) { continue ; } if ( keyNum != foundKeys ) { keyStart = i + 1 ; foundKeys ++ ; continue ; } // At end of our key int keyLen = i - keyStart ; byte [ ] key = new byte [ keyLen ] ; System . arraycopy ( keys , keyStart , key , 0 , keyLen ) ; return key ; } throw new PKIException ( "Invalid key number" ) ;
public class AnimaQuery { /** * Set the where parameter according to model , * and generate sql like where where age = ? and name = ? * @ param model * @ return AnimaQuery */ public AnimaQuery < T > where ( T model ) { } }
Field [ ] declaredFields = model . getClass ( ) . getDeclaredFields ( ) ; for ( Field declaredField : declaredFields ) { Object value = AnimaUtils . invokeMethod ( model , getGetterName ( declaredField . getName ( ) ) , AnimaUtils . EMPTY_ARG ) ; if ( null == value ) { continue ; } if ( declaredField . getType ( ) . equals ( String . class ) && AnimaUtils . isEmpty ( value . toString ( ) ) ) { continue ; } String columnName = AnimaCache . getColumnName ( declaredField ) ; this . where ( columnName , value ) ; } return this ;
public class TSTraversal { /** * Traverses the given transition system in a breadth - first fashion . The traversal is steered by the specified * visitor . * @ param ts * the transition system . * @ param inputs * the input alphabet . * @ param vis * the visitor . */ public static < S , I , T , D > boolean breadthFirst ( TransitionSystem < S , ? super I , T > ts , int limit , Collection < ? extends I > inputs , TSTraversalVisitor < S , I , T , D > vis ) { } }
Deque < BFSRecord < S , D > > bfsQueue = new ArrayDeque < > ( ) ; // setting the following to false means that the traversal had to be aborted // due to reaching the limit boolean complete = true ; int stateCount = 0 ; Holder < D > dataHolder = new Holder < > ( ) ; for ( S initS : ts . getInitialStates ( ) ) { dataHolder . value = null ; TSTraversalAction act = vis . processInitial ( initS , dataHolder ) ; switch ( act ) { case ABORT_TRAVERSAL : return complete ; case EXPLORE : if ( stateCount != limit ) { bfsQueue . offer ( new BFSRecord < > ( initS , dataHolder . value ) ) ; stateCount ++ ; } else { complete = false ; } break ; default : // case ABORT _ INPUT : case ABORT _ STATE : case IGNORE : } } while ( ! bfsQueue . isEmpty ( ) ) { BFSRecord < S , D > current = bfsQueue . poll ( ) ; S state = current . state ; D data = current . data ; if ( ! vis . startExploration ( state , data ) ) { continue ; } inputs_loop : for ( I input : inputs ) { Collection < T > transitions = ts . getTransitions ( state , input ) ; for ( T trans : transitions ) { S succ = ts . getSuccessor ( trans ) ; dataHolder . value = null ; TSTraversalAction act = vis . processTransition ( state , data , input , trans , succ , dataHolder ) ; switch ( act ) { case ABORT_INPUT : continue inputs_loop ; case ABORT_STATE : break inputs_loop ; case ABORT_TRAVERSAL : return complete ; case EXPLORE : if ( stateCount != limit ) { bfsQueue . offer ( new BFSRecord < > ( succ , dataHolder . value ) ) ; stateCount ++ ; } else { complete = false ; } break ; case IGNORE : break ; default : throw new IllegalStateException ( "Unknown action " + act ) ; } } } } return complete ;
public class ModuleBundleFileWrapperFactoryHook { /** * { @ inheritDoc } */ @ Override public BundleActivator createActivator ( ) { } }
return new BundleActivator ( ) { @ Override public void stop ( BundleContext context ) throws Exception { frameworkStop ( context ) ; } @ Override public void start ( BundleContext context ) throws Exception { frameworkStart ( context ) ; } } ;
public class PhaseOneApplication { /** * Runs stages 2 - 7 which remain static regardless of the BEL document * input . * @ param pedantic the flag for pedantic - ness * @ param document the { @ link Document BEL document } */ private void runCommonStages ( boolean pedantic , Document document ) { } }
if ( ! stage2 ( document ) ) { if ( pedantic ) { bail ( NAMESPACE_RESOLUTION_FAILURE ) ; } } if ( ! stage3 ( document ) ) { if ( pedantic ) { bail ( SYMBOL_VERIFICATION_FAILURE ) ; } } if ( ! stage4 ( document ) ) { if ( pedantic ) { bail ( SEMANTIC_VERIFICATION_FAILURE ) ; } } ProtoNetwork pn = stage5 ( document ) ; if ( ! stage6 ( document , pn ) ) { if ( pedantic ) { bail ( STATEMENT_EXPANSION_FAILURE ) ; } } if ( ! stage7 ( pn , document ) ) { if ( pedantic ) { bail ( PROTO_NETWORK_SAVE_FAILURE ) ; } }
public class AESUtil { /** * Decrypt . * @ param data the data * @ param key the key * @ return the string */ public static String decrypt ( String data , byte [ ] key ) { } }
try { Key k = toSecretKey ( key ) ; Cipher cipher = Cipher . getInstance ( CIPHER_ALGORITHM ) ; cipher . init ( Cipher . DECRYPT_MODE , k ) ; byte [ ] toByteArray = Hex . decodeHex ( data . toCharArray ( ) ) ; byte [ ] decrypted = cipher . doFinal ( toByteArray ) ; return new String ( decrypted , "UTF-8" ) ; } catch ( GeneralSecurityException e ) { throw new RuntimeException ( "Failed to decrypt." , e ) ; } catch ( UnsupportedEncodingException e ) { throw new RuntimeException ( e ) ; } catch ( DecoderException e ) { throw new RuntimeException ( e ) ; }
public class DefaultJiraClient { /** * Process Epic data for a feature , updating the passed in feature * @ param feature * @ param epic */ protected static void processEpicData ( Feature feature , Epic epic ) { } }
feature . setsEpicID ( epic . getId ( ) ) ; feature . setsEpicIsDeleted ( "false" ) ; feature . setsEpicName ( epic . getName ( ) ) ; feature . setsEpicNumber ( epic . getNumber ( ) ) ; feature . setsEpicType ( "" ) ; feature . setsEpicAssetState ( epic . getStatus ( ) ) ; feature . setsEpicBeginDate ( epic . getBeginDate ( ) ) ; feature . setsEpicChangeDate ( epic . getChangeDate ( ) ) ; feature . setsEpicEndDate ( epic . getEndDate ( ) ) ; feature . setsEpicUrl ( epic . getUrl ( ) ) ;
public class DeploymentBuilder { /** * Get connection factories * @ return The value */ public Collection < ConnectionFactory > getConnectionFactories ( ) { } }
if ( connectionFactories == null ) return Collections . emptyList ( ) ; return Collections . unmodifiableCollection ( connectionFactories ) ;
public class Command { /** * Create a CORBA Any object and insert a int array in it . * @ param data The int array to be inserted into the Any object * @ exception DevFailed If the Any object creation failed . * Click < a href = " . . / . . / tango _ basic / idl _ html / Tango . html # DevFailed " > here < / a > to read * < b > DevFailed < / b > exception specification */ public Any insert ( int [ ] data ) throws DevFailed { } }
Any out_any = alloc_any ( ) ; DevVarLongArrayHelper . insert ( out_any , data ) ; return out_any ;
public class Matrix4x3d { /** * / * ( non - Javadoc ) * @ see org . joml . Matrix4x3dc # get4x4 ( int , java . nio . ByteBuffer ) */ public ByteBuffer get4x4 ( int index , ByteBuffer buffer ) { } }
MemUtil . INSTANCE . put4x4 ( this , index , buffer ) ; return buffer ;
public class CsvReader { /** * Configures which fields of the CSV file should be included and which should be skipped . The * positions in the string ( read from position 0 to its length ) define whether the field at * the corresponding position in the CSV schema should be included . * parser will look at the first { @ code n } fields , where { @ code n } is the length of the mask string * The parser will skip over all fields where the character at the corresponding position * in the string is { @ code ' 0 ' } , { @ code ' F ' } , or { @ code ' f ' } ( representing the value * { @ code false } ) . The result contains the fields where the corresponding position in * the boolean array is { @ code ' 1 ' } , { @ code ' T ' } , or { @ code ' t ' } ( representing the value { @ code true } ) . * @ param mask The string mask defining which fields to include and which to skip . * @ return The CSV reader instance itself , to allow for fluent function chaining . */ public CsvReader includeFields ( String mask ) { } }
boolean [ ] includedMask = new boolean [ mask . length ( ) ] ; for ( int i = 0 ; i < mask . length ( ) ; i ++ ) { char c = mask . charAt ( i ) ; if ( c == '1' || c == 'T' || c == 't' ) { includedMask [ i ] = true ; } else if ( c != '0' && c != 'F' && c != 'f' ) { throw new IllegalArgumentException ( "Mask string may contain only '0' and '1'." ) ; } } return includeFields ( includedMask ) ;
public class ConfigImpl { /** * mapping used for script ( JSR 223) * @ return */ public Mapping getScriptMapping ( ) { } }
if ( scriptMapping == null ) { // Physical resource TODO make in RAM Resource physical = getConfigDir ( ) . getRealResource ( "jsr223" ) ; if ( ! physical . exists ( ) ) physical . mkdirs ( ) ; this . scriptMapping = new MappingImpl ( this , "/mapping-script/" , physical . getAbsolutePath ( ) , null , ConfigImpl . INSPECT_NEVER , true , true , true , true , false , true , null , - 1 , - 1 ) ; } return scriptMapping ;
public class Snowflake { /** * 下一个ID * @ return ID */ public synchronized long nextId ( ) { } }
long timestamp = genTime ( ) ; if ( timestamp < lastTimestamp ) { // 如果服务器时间有问题 ( 时钟后退 ) 报错 。 throw new IllegalStateException ( StrUtil . format ( "Clock moved backwards. Refusing to generate id for {}ms" , lastTimestamp - timestamp ) ) ; } if ( lastTimestamp == timestamp ) { sequence = ( sequence + 1 ) & sequenceMask ; if ( sequence == 0 ) { timestamp = tilNextMillis ( lastTimestamp ) ; } } else { sequence = 0L ; } lastTimestamp = timestamp ; return ( ( timestamp - twepoch ) << timestampLeftShift ) | ( datacenterId << datacenterIdShift ) | ( workerId << workerIdShift ) | sequence ;
public class ExpressRouteCircuitsInner { /** * Updates an express route circuit tags . * @ param resourceGroupName The name of the resource group . * @ param circuitName The name of the circuit . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ExpressRouteCircuitInner object if successful . */ public ExpressRouteCircuitInner beginUpdateTags ( String resourceGroupName , String circuitName ) { } }
return beginUpdateTagsWithServiceResponseAsync ( resourceGroupName , circuitName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class NamespaceTable { /** * { @ inheritDoc } */ @ Override protected void _from ( ObjectInput in ) throws IOException , ClassNotFoundException { } }
// 1 : read number of table namespaces final int size = in . readInt ( ) ; for ( int i = 0 ; i < size ; i ++ ) { final TableNamespace ns = new TableNamespace ( ) ; // 1 : read table namespace ns . readExternal ( in ) ; // 2 : read number of documents final int documentsSize = in . readInt ( ) ; for ( int j = 0 ; j < documentsSize ; j ++ ) { // 1 : read document index addNamespace ( ns , in . readInt ( ) ) ; } }
public class QueueContainer { /** * Reserves an ID for a future queue item and associates it with the given { @ code transactionId } . * The item is not yet visible in the queue , it is just reserved for future insertion . * @ param transactionId the ID of the transaction offering this item * @ param itemId the ID of the item being reserved */ public void txnOfferBackupReserve ( long itemId , String transactionId ) { } }
TxQueueItem o = txnOfferReserveInternal ( itemId , transactionId ) ; if ( o != null ) { logger . severe ( "txnOfferBackupReserve operation-> Item exists already at txMap for itemId: " + itemId ) ; }
public class Schemas { /** * Checks if a constraint exists for a given label and a list of properties * This method checks for constraints on node * @ param labelName * @ param propertyNames * @ return true if the constraint exists otherwise it returns false */ private Boolean constraintsExists ( String labelName , List < String > propertyNames ) { } }
Schema schema = db . schema ( ) ; for ( ConstraintDefinition constraintDefinition : Iterables . asList ( schema . getConstraints ( Label . label ( labelName ) ) ) ) { List < String > properties = Iterables . asList ( constraintDefinition . getPropertyKeys ( ) ) ; if ( properties . equals ( propertyNames ) ) { return true ; } } return false ;
public class RottenTomatoesApi { /** * Returns similar movies to a movie * @ param movieId RT Movie ID * @ param limit Limit number of returned movies * @ return * @ throws RottenTomatoesException */ public List < RTMovie > getMoviesSimilar ( int movieId , int limit ) throws RottenTomatoesException { } }
properties . clear ( ) ; properties . put ( ApiBuilder . PROPERTY_ID , String . valueOf ( movieId ) ) ; properties . put ( ApiBuilder . PROPERTY_URL , URL_MOVIES_SIMILAR ) ; properties . put ( ApiBuilder . PROPERTY_LIMIT , ApiBuilder . validateLimit ( limit ) ) ; WrapperLists wrapper = response . getResponse ( WrapperLists . class , properties ) ; if ( wrapper != null && wrapper . getMovies ( ) != null ) { return wrapper . getMovies ( ) ; } else { return Collections . emptyList ( ) ; }
public class GetOfferingStatusResult { /** * When specified , gets the offering status for the current period . * @ param current * When specified , gets the offering status for the current period . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetOfferingStatusResult withCurrent ( java . util . Map < String , OfferingStatus > current ) { } }
setCurrent ( current ) ; return this ;
public class CssSkinGenerator { /** * Update the skin variants from the directory path given in parameter * @ param rsBrowser * the resource browser * @ param path * the skin path * @ param skinVariants * the set of skin variants to update */ private void updateSkinVariants ( ResourceBrowser rsBrowser , String path , Set < String > skinVariants ) { } }
Set < String > skinPaths = rsBrowser . getResourceNames ( path ) ; for ( Iterator < String > itSkinPath = skinPaths . iterator ( ) ; itSkinPath . hasNext ( ) ; ) { String skinPath = path + itSkinPath . next ( ) ; if ( rsBrowser . isDirectory ( skinPath ) ) { String skinDirName = PathNormalizer . getPathName ( skinPath ) ; skinVariants . add ( skinDirName ) ; } }
public class StringUtils { /** * < p > Checks if a String { @ code str } contains Unicode digits , * if yes then concatenate all the digits in { @ code str } and return it as a String . < / p > * < p > An empty ( " " ) String will be returned if no digits found in { @ code str } . < / p > * < pre > * StringUtils . getDigits ( null ) = null * StringUtils . getDigits ( " " ) = " " * StringUtils . getDigits ( " abc " ) = " " * StringUtils . getDigits ( " 1000 $ " ) = " 1000" * StringUtils . getDigits ( " 1123 ~ 45 " ) = " 12345" * StringUtils . getDigits ( " ( 541 ) 754-3010 " ) = " 5417543010" * StringUtils . getDigits ( " \ u0967 \ u0968 \ u0969 " ) = " \ u0967 \ u0968 \ u0969" * < / pre > * @ param str the String to extract digits from , may be null * @ return String with only digits , * or an empty ( " " ) String if no digits found , * or { @ code null } String if { @ code str } is null * @ since 3.6 */ public static String getDigits ( final String str ) { } }
if ( isEmpty ( str ) ) { return str ; } final int sz = str . length ( ) ; final StringBuilder strDigits = new StringBuilder ( sz ) ; for ( int i = 0 ; i < sz ; i ++ ) { final char tempChar = str . charAt ( i ) ; if ( Character . isDigit ( tempChar ) ) { strDigits . append ( tempChar ) ; } } return strDigits . toString ( ) ;
public class ModuleSpaces { /** * Create a Space . * @ param space CMASpace * @ return { @ link CMASpace } result instance * @ throws IllegalArgumentException if space is null . */ public CMASpace create ( CMASpace space ) { } }
assertNotNull ( space , "space" ) ; final CMASystem system = space . getSystem ( ) ; space . setSystem ( null ) ; try { return service . create ( space ) . blockingFirst ( ) ; } finally { space . setSystem ( system ) ; }
public class ReadWriteLockDataStore { /** * Deserializes an { @ code Optional < Object > } from the given stream . * The given stream will not be close . * @ param input the serialized object input stream , must not be null * @ return the serialized object * @ throws IOException in case the load operation failed */ private static Optional < Object > deserialize ( InputStream input ) throws IOException { } }
try { BufferedInputStream bis = new BufferedInputStream ( input ) ; ObjectInputStream ois = new ObjectInputStream ( bis ) ; return Optional . of ( ois . readObject ( ) ) ; } catch ( ClassNotFoundException ex ) { LOG . log ( Level . SEVERE , "An error occurred during deserialization an object." , ex ) ; } return Optional . empty ( ) ;
public class SarlLinkFactory { /** * Build the link for the wildcard . * @ param link the link . * @ param linkInfo the information on the link . * @ param type the type . */ protected void getLinkForWildcard ( Content link , LinkInfo linkInfo , Type type ) { } }
linkInfo . isTypeBound = true ; link . addContent ( "?" ) ; // $ NON - NLS - 1 $ final WildcardType wildcardType = type . asWildcardType ( ) ; final Type [ ] extendsBounds = wildcardType . extendsBounds ( ) ; final SARLFeatureAccess kw = Utils . getKeywords ( ) ; for ( int i = 0 ; i < extendsBounds . length ; i ++ ) { link . addContent ( i > 0 ? kw . getCommaKeyword ( ) + " " // $ NON - NLS - 1 $ : " " + kw . getExtendsKeyword ( ) + " " ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ setBoundsLinkInfo ( linkInfo , extendsBounds [ i ] ) ; link . addContent ( getLink ( linkInfo ) ) ; } final Type [ ] superBounds = wildcardType . superBounds ( ) ; for ( int i = 0 ; i < superBounds . length ; i ++ ) { link . addContent ( i > 0 ? kw . getCommaKeyword ( ) + " " // $ NON - NLS - 1 $ : " " + kw . getSuperKeyword ( ) + " " ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ setBoundsLinkInfo ( linkInfo , superBounds [ i ] ) ; link . addContent ( getLink ( linkInfo ) ) ; }
public class JavaClassProcessor { /** * are found in the access flags of visitInnerClass ( . . ) */ private void correctModifiersForNestedClass ( String innerTypeName , int access ) { } }
if ( innerTypeName . equals ( className ) ) { javaClassBuilder . withModifiers ( JavaModifier . getModifiersForClass ( access ) ) ; }
public class VersionFactory { /** * Join this set of ranges together . This could result in a set , or in a * logical range . */ public IVersionRange getRange ( String [ ] versions ) throws InvalidRangeException { } }
if ( versions == null ) { return null ; } IVersionRange results = null ; for ( String version : versions ) { IVersionRange range = getRange ( version ) ; if ( results == null ) { results = range ; } else { results = new OrRange ( results , range ) ; } } return results ;
public class CreateAppProfileRequest { /** * Sets the optional long form description of the use case for the AppProfile . */ @ SuppressWarnings ( "WeakerAccess" ) public CreateAppProfileRequest setDescription ( @ Nonnull String description ) { } }
proto . getAppProfileBuilder ( ) . setDescription ( description ) ; return this ;
public class Fiat { /** * < p > Parses an amount expressed in the way humans are used to . The amount is cut to 4 digits after the comma . < / p > * < p > This takes string in a format understood by { @ link BigDecimal # BigDecimal ( String ) } , for example " 0 " , " 1 " , " 0.10 " , * " 1.23E3 " , " 1234.5E - 5 " . < / p > * @ throws IllegalArgumentException * if you try to specify a value out of range . */ public static Fiat parseFiatInexact ( final String currencyCode , final String str ) { } }
try { long val = new BigDecimal ( str ) . movePointRight ( SMALLEST_UNIT_EXPONENT ) . longValue ( ) ; return Fiat . valueOf ( currencyCode , val ) ; } catch ( ArithmeticException e ) { throw new IllegalArgumentException ( e ) ; }
public class CommerceTaxFixedRateAddressRelPersistenceImpl { /** * Returns all the commerce tax fixed rate address rels . * @ return the commerce tax fixed rate address rels */ @ Override public List < CommerceTaxFixedRateAddressRel > findAll ( ) { } }
return findAll ( QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
public class CSVAppender { /** * Returns a the value as the first matching schema type or null . * Note that if the value may be null even if the schema does not allow the * value to be null . * @ param value a value * @ param schema a Schema * @ return a String representation of the value according to the Schema type */ private static String valueString ( Object value , Schema schema ) { } }
if ( value == null || schema . getType ( ) == Schema . Type . NULL ) { return null ; } switch ( schema . getType ( ) ) { case BOOLEAN : case FLOAT : case DOUBLE : case INT : case LONG : case STRING : return value . toString ( ) ; case ENUM : // serialize as the ordinal from the schema return String . valueOf ( schema . getEnumOrdinal ( value . toString ( ) ) ) ; case UNION : int index = ReflectData . get ( ) . resolveUnion ( schema , value ) ; return valueString ( value , schema . getTypes ( ) . get ( index ) ) ; default : // FIXED , BYTES , MAP , ARRAY , RECORD are not supported throw new DatasetOperationException ( "Unsupported field type:" + schema . getType ( ) ) ; }
public class HtmlWriter { /** * Signals that a < CODE > String < / CODE > was added to the < CODE > Document < / CODE > . * @ param string a String to add to the HTML * @ return < CODE > true < / CODE > if the string was added , < CODE > false < / CODE > if not . */ public boolean add ( String string ) { } }
if ( pause ) { return false ; } try { write ( string ) ; return true ; } catch ( IOException ioe ) { throw new ExceptionConverter ( ioe ) ; }
public class Provenance { /** * syntactic sugar */ public Provenance addReason ( Coding t ) { } }
if ( t == null ) return this ; if ( this . reason == null ) this . reason = new ArrayList < Coding > ( ) ; this . reason . add ( t ) ; return this ;
public class DbWebServlet { /** * Return a list of order specifiers found in request * @ param request http request containing a ( possibly empty ) set of ' orderBy ' parms . * @ return List of order specifiers given by those orderBy parameters * @ throws Exception */ private List < OrderSpecifier > getOrderByList ( HttpServletRequest request ) throws Exception { } }
String [ ] orderByStrings = request . getParameterValues ( "orderBy" ) ; if ( null == orderByStrings ) { return null ; } if ( 0 == orderByStrings . length ) { return null ; } List < OrderSpecifier > result = new Vector < OrderSpecifier > ( ) ; for ( String orderByString : orderByStrings ) { OrderSpecifier orderSpecifier = parseOrderSpecifier ( orderByString ) ; result . add ( orderSpecifier ) ; } return result ;
public class ThreadExecutorMap { /** * Decorate the given { @ link ThreadFactory } and ensure { @ link # currentExecutor ( ) } will return { @ code eventExecutor } * when called from within the { @ link Runnable } during execution . */ public static ThreadFactory apply ( final ThreadFactory threadFactory , final EventExecutor eventExecutor ) { } }
ObjectUtil . checkNotNull ( threadFactory , "command" ) ; ObjectUtil . checkNotNull ( eventExecutor , "eventExecutor" ) ; return new ThreadFactory ( ) { @ Override public Thread newThread ( Runnable r ) { return threadFactory . newThread ( apply ( r , eventExecutor ) ) ; } } ;
public class FileStatusTaskStatusHandler { /** * Returns a JSON string with the status of that task ( pending , successful , failed , etc ) , and the URL for hosts . * { " taskStatus " : " completed " , " propertiesURL " : " url " , " hostsURL " : " url " } */ private void taskStatus ( RESTRequest request , RESTResponse response ) { } }
String taskID = RESTHelper . getRequiredParam ( request , APIConstants . PARAM_TASK_ID ) ; String taskStatusJson = getMultipleRoutingHelper ( ) . getStatus ( taskID ) ; OutputHelper . writeJsonOutput ( response , taskStatusJson ) ;
public class TransactionWriteRequest { /** * Adds update operation ( to be executed on object ) to the list of transaction write operations . * transactionWriteExpression is used to conditionally update object . */ public TransactionWriteRequest addUpdate ( Object object , DynamoDBTransactionWriteExpression transactionWriteExpression ) { } }
return addUpdate ( object , transactionWriteExpression , null /* returnValuesOnConditionCheckFailure */ ) ;
public class QuartzScheduler { /** * Pause all of the < code > { @ link com . helger . quartz . IJobDetail } s < / code > in the * matching groups - by pausing all of their < code > Trigger < / code > s . */ public void pauseJobs ( final GroupMatcher < JobKey > groupMatcher ) throws SchedulerException { } }
validateState ( ) ; final ICommonsCollection < String > pausedGroups = m_aResources . getJobStore ( ) . pauseJobs ( _getOrDefault ( groupMatcher ) ) ; notifySchedulerThread ( 0L ) ; for ( final String pausedGroup : pausedGroups ) { notifySchedulerListenersPausedJobs ( pausedGroup ) ; }
public class CodedConstant { /** * write list to { @ link CodedOutputStream } object . * @ param out target output stream to write * @ param order field order * @ param type field type * @ param list target list object to be serialized */ public static void writeToList ( CodedOutputStream out , int order , FieldType type , List list ) throws IOException { } }
if ( list == null ) { return ; } for ( Object object : list ) { writeObject ( out , order , type , object , true ) ; }
public class MarvinColorModelConverter { /** * Converts an image in RGB mode to BINARY mode * @ param img image * @ param threshold grays cale threshold * @ return new MarvinImage instance in BINARY mode */ public static MarvinImage rgbToBinary ( MarvinImage img , int threshold ) { } }
MarvinImage resultImage = new MarvinImage ( img . getWidth ( ) , img . getHeight ( ) , MarvinImage . COLOR_MODEL_BINARY ) ; for ( int y = 0 ; y < img . getHeight ( ) ; y ++ ) { for ( int x = 0 ; x < img . getWidth ( ) ; x ++ ) { int gray = ( int ) ( ( img . getIntComponent0 ( x , y ) * 0.3 ) + ( img . getIntComponent1 ( x , y ) * 0.59 ) + ( img . getIntComponent2 ( x , y ) * 0.11 ) ) ; if ( gray <= threshold ) { resultImage . setBinaryColor ( x , y , true ) ; } else { resultImage . setBinaryColor ( x , y , false ) ; } } } return resultImage ;
public class TrafficSplit { /** * < pre > * Mapping from version IDs within the service to fractional * ( 0.000 , 1 ] allocations of traffic for that version . Each version can * be specified only once , but some versions in the service may not * have any traffic allocation . Services that have traffic allocated * cannot be deleted until either the service is deleted or * their traffic allocation is removed . Allocations must sum to 1. * Up to two decimal place precision is supported for IP - based splits and * up to three decimal places is supported for cookie - based splits . * < / pre > * < code > map & lt ; string , double & gt ; allocations = 2 ; < / code > */ public double getAllocationsOrDefault ( java . lang . String key , double defaultValue ) { } }
if ( key == null ) { throw new java . lang . NullPointerException ( ) ; } java . util . Map < java . lang . String , java . lang . Double > map = internalGetAllocations ( ) . getMap ( ) ; return map . containsKey ( key ) ? map . get ( key ) : defaultValue ;
public class BufferUtil { /** * Bounds check the access range and throw a { @ link IndexOutOfBoundsException } if exceeded . * @ param buffer to be checked . * @ param index at which the access will begin . * @ param length of the range accessed . */ public static void boundsCheck ( final byte [ ] buffer , final long index , final int length ) { } }
final int capacity = buffer . length ; final long resultingPosition = index + ( long ) length ; if ( index < 0 || resultingPosition > capacity ) { throw new IndexOutOfBoundsException ( "index=" + index + " length=" + length + " capacity=" + capacity ) ; }
public class KernelPoints { /** * Computes the Euclidean distance in the kernel space between the * { @ code k } ' th KernelPoint and the given vector * @ param k the index of the KernelPoint in this set to contribute to the * dot product * @ param x the point to get the Euclidean distance to * @ param qi the query information for the vector , or { @ code null } only if * the kernel in use does not support acceleration . * @ return the Euclidean distance between the { @ code k } ' th KernelPoint and * { @ code x } in the kernel space */ public double dist ( int k , Vec x , List < Double > qi ) { } }
return points . get ( k ) . dist ( x , qi ) ;
public class Time { /** * Parse a duration * @ param duration 3h , 2mn , 7s * @ return The number of seconds */ public static int parseDuration ( String duration ) { } }
if ( duration == null ) { return 60 * 60 * 24 * 30 ; } Integer toAdd = null ; if ( days . matcher ( duration ) . matches ( ) ) { Matcher matcher = days . matcher ( duration ) ; matcher . matches ( ) ; toAdd = Integer . parseInt ( matcher . group ( 1 ) ) * ( 60 * 60 ) * 24 ; } else if ( hours . matcher ( duration ) . matches ( ) ) { Matcher matcher = hours . matcher ( duration ) ; matcher . matches ( ) ; toAdd = Integer . parseInt ( matcher . group ( 1 ) ) * ( 60 * 60 ) ; } else if ( minutes . matcher ( duration ) . matches ( ) ) { Matcher matcher = minutes . matcher ( duration ) ; matcher . matches ( ) ; toAdd = Integer . parseInt ( matcher . group ( 1 ) ) * ( 60 ) ; } else if ( seconds . matcher ( duration ) . matches ( ) ) { Matcher matcher = seconds . matcher ( duration ) ; matcher . matches ( ) ; toAdd = Integer . parseInt ( matcher . group ( 1 ) ) ; } else if ( "forever" . equals ( duration ) ) { toAdd = - 1 ; } if ( toAdd == null ) { throw new IllegalArgumentException ( "Invalid duration pattern : " + duration ) ; } return toAdd ;
public class WeakManagedBeanCache { /** * Add a ManagedBean instance ( held in a BeanO ) to this weak reference * cache , keyed by the wrapper instance associated with it . < p > * @ param wrapper object representing a wrapper for the managed bean * @ param bean ManagedBean context , which holds the ManagedBean instance */ public void add ( EJSWrapperBase wrapper , BeanO bean ) { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "add(" + Util . identity ( wrapper ) + ", " + bean + ")" ) ; poll ( ) ; WeakReference < EJSWrapperBase > key = new WeakReference < EJSWrapperBase > ( wrapper , ivRefQueue ) ; synchronized ( ivCache ) { ivCache . put ( key , bean ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "add : key = " + key ) ;
public class LongStreamEx { /** * does not add overhead as it appears in bytecode anyways as bridge method */ @ Override public < U > U chain ( Function < ? super LongStreamEx , U > mapper ) { } }
return mapper . apply ( this ) ;
public class ICPImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setXFilSize ( Integer newXFilSize ) { } }
Integer oldXFilSize = xFilSize ; xFilSize = newXFilSize ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . ICP__XFIL_SIZE , oldXFilSize , xFilSize ) ) ;
public class NameUtils { /** * Calculate the name for a getter method to retrieve the specified property . * @ param propertyName The property name * @ return The name for the getter method for this property , if it were to exist , i . e . getConstraints */ public static String getGetterName ( String propertyName ) { } }
final String suffix = getSuffixForGetterOrSetter ( propertyName ) ; return PROPERTY_GET_PREFIX + suffix ;
public class BdbStoreShutdownHook { /** * Cleanly shuts down all databases in the provided environment . * @ throws IOException if a database delete attempt failed . * @ throws DatabaseException if an error occurs while closing the class * catalog database . */ void shutdown ( ) throws IOException { } }
synchronized ( this ) { for ( BdbEnvironmentInfo envInfo : this . envInfos ) { try { envInfo . getClassCatalog ( ) . close ( ) ; } catch ( DatabaseException ignore ) { LOGGER . log ( Level . SEVERE , "Failure closing class catalog" , ignore ) ; } envInfo . closeAndRemoveAllDatabaseHandles ( ) ; try ( Environment env = envInfo . getEnvironment ( ) ) { if ( this . deleteOnExit ) { FileUtil . deleteDirectory ( env . getHome ( ) ) ; } } } this . envInfos . clear ( ) ; }
public class PreferenceActivity { /** * Returns the char sequence , which is specified by a specific intent extra . The char sequence * can either be specified as a string or as a resource id . * @ param intent * The intent , which specifies the char sequence , as an instance of the class { @ link * Intent } . The intent may not be null * @ param name * The name of the intent extra , which specifies the char sequence , as a { @ link String } . * The name may not be null * @ return The char sequence , which is specified by the given intent , as an instance of the * class { @ link CharSequence } or null , if the intent does not specify a char sequence with the * given name */ private CharSequence getCharSequenceFromIntent ( @ NonNull final Intent intent , @ NonNull final String name ) { } }
CharSequence charSequence = intent . getCharSequenceExtra ( name ) ; if ( charSequence == null ) { int resourceId = intent . getIntExtra ( name , 0 ) ; if ( resourceId != 0 ) { charSequence = getText ( resourceId ) ; } } return charSequence ;
public class WDataTable { /** * Retrieves the starting row index for the current page . Will always return zero for tables which are not * paginated . * @ return the starting row index for the current page . */ private int getCurrentPageStartRow ( ) { } }
int startRow = 0 ; if ( getPaginationMode ( ) != PaginationMode . NONE ) { int rowsPerPage = getRowsPerPage ( ) ; TableDataModel model = getDataModel ( ) ; if ( model instanceof TreeTableDataModel ) { // For tree tables , pagination only occurs on first - level nodes ( ie . those // underneath the root node ) , however they might not be consecutively // numbered . Therefore , the start and end row indices need to be adjusted . TreeTableDataModel treeModel = ( TreeTableDataModel ) model ; TreeNode root = treeModel . getNodeAtLine ( 0 ) . getRoot ( ) ; int startNode = getCurrentPage ( ) * rowsPerPage ; startRow = ( ( TableTreeNode ) root . getChildAt ( startNode ) ) . getRowIndex ( ) - 1 ; // -1 as the root is not included in the table } else { startRow = getCurrentPage ( ) * rowsPerPage ; } } return startRow ;
public class CollectionHelper { /** * Retrieve the tickets and compile them from the different sources * @ param basedTickets A list of based tickets * @ param methodAnnotation The method annotation that could contain tickets * @ param classAnnotation The class annotation that could contain tickets * @ return The tickets from the different sources */ public static Set < String > getTickets ( Set < String > basedTickets , RoxableTest methodAnnotation , RoxableTestClass classAnnotation ) { } }
Set < String > tickets ; if ( basedTickets == null ) { tickets = new HashSet < > ( ) ; } else { tickets = populateTickets ( basedTickets , new HashSet < String > ( ) ) ; } if ( classAnnotation != null && classAnnotation . tickets ( ) != null ) { tickets = populateTickets ( new HashSet < > ( Arrays . asList ( classAnnotation . tickets ( ) ) ) , tickets ) ; } if ( methodAnnotation != null && methodAnnotation . tickets ( ) != null ) { tickets = populateTickets ( new HashSet < > ( Arrays . asList ( methodAnnotation . tickets ( ) ) ) , tickets ) ; } return tickets ;
public class JmxClient { /** * Return an array of the attributes associated with the bean name . */ public MBeanAttributeInfo [ ] getAttributesInfo ( ObjectName name ) throws JMException { } }
checkClientConnected ( ) ; try { return mbeanConn . getMBeanInfo ( name ) . getAttributes ( ) ; } catch ( Exception e ) { throw createJmException ( "Problems getting bean information from " + name , e ) ; }
public class OMVRBTree { /** * Remove a node from the tree . * @ param p * Node to remove * @ return Node that was removed . Passed and removed nodes may be different in case node to remove contains two children . In this * case node successor will be found and removed but it ' s content will be copied to the node that was passed in method . */ protected OMVRBTreeEntry < K , V > removeNode ( OMVRBTreeEntry < K , V > p ) { } }
modCount ++ ; // If strictly internal , copy successor ' s element to p and then make p // point to successor . if ( p . getLeft ( ) != null && p . getRight ( ) != null ) { OMVRBTreeEntry < K , V > s = next ( p ) ; p . copyFrom ( s ) ; p = s ; } // p has 2 children // Start fixup at replacement node , if it exists . final OMVRBTreeEntry < K , V > replacement = ( p . getLeft ( ) != null ? p . getLeft ( ) : p . getRight ( ) ) ; if ( replacement != null ) { // Link replacement to parent replacement . setParent ( p . getParent ( ) ) ; if ( p . getParent ( ) == null ) setRoot ( replacement ) ; else if ( p == p . getParent ( ) . getLeft ( ) ) p . getParent ( ) . setLeft ( replacement ) ; else p . getParent ( ) . setRight ( replacement ) ; // Null out links so they are OK to use by fixAfterDeletion . p . setLeft ( null ) ; p . setRight ( null ) ; p . setParent ( null ) ; // Fix replacement if ( p . getColor ( ) == BLACK ) fixAfterDeletion ( replacement ) ; } else if ( p . getParent ( ) == null ) { // return if we are the only node . clear ( ) ; } else { // No children . Use self as phantom replacement and unlink . if ( p . getColor ( ) == BLACK ) fixAfterDeletion ( p ) ; if ( p . getParent ( ) != null ) { if ( p == p . getParent ( ) . getLeft ( ) ) p . getParent ( ) . setLeft ( null ) ; else if ( p == p . getParent ( ) . getRight ( ) ) p . getParent ( ) . setRight ( null ) ; p . setParent ( null ) ; } } return p ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcPositiveRatioMeasure ( ) { } }
if ( ifcPositiveRatioMeasureEClass == null ) { ifcPositiveRatioMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 903 ) ; } return ifcPositiveRatioMeasureEClass ;
public class Features { /** * Get a feature from its class or interface . * @ param < C > The custom feature type . * @ param feature The feature class or interface . * @ return The feature instance . * @ throws LionEngineException If the feature was not found . */ public < C extends Feature > C get ( Class < C > feature ) { } }
final Feature found = typeToFeature . get ( feature ) ; if ( found != null ) { return feature . cast ( found ) ; } throw new LionEngineException ( ERROR_FEATURE_NOT_FOUND + feature . getName ( ) ) ;