signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class BlockTemplate { /** * If the tree is a { @ link JCBlock } , returns a list of disjoint matches corresponding to the exact
* list of template statements found consecutively ; otherwise , returns an empty list . */
@ Override public Iterable < BlockTemplateMatch > match ( JCTree tree , Context context ) { } } | // TODO ( lowasser ) : consider nonconsecutive matches ?
if ( tree instanceof JCBlock ) { JCBlock block = ( JCBlock ) tree ; ImmutableList < JCStatement > targetStatements = ImmutableList . copyOf ( block . getStatements ( ) ) ; return matchesStartingAnywhere ( block , 0 , targetStatements , context ) . first ( ) . or ( List . < BlockTemplateMatch > nil ( ) ) ; } return ImmutableList . of ( ) ; |
public class FNCImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setFNNRGLen ( Integer newFNNRGLen ) { } } | Integer oldFNNRGLen = fnnrgLen ; fnnrgLen = newFNNRGLen ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . FNC__FNNRG_LEN , oldFNNRGLen , fnnrgLen ) ) ; |
public class SegmentationHelper { /** * Read all of the bytes in an input stream .
* @ param bytes the { @ link InputStream } of bytes to read
* @ return an array of all bytes retrieved from the stream
* @ throws IOException if the stream fails */
public static byte [ ] readAll ( InputStream bytes ) throws IOException { } } | ByteArrayOutputStream builder = new ByteArrayOutputStream ( ) ; int read = bytes . read ( ) ; while ( read != - 1 ) { builder . write ( read ) ; read = bytes . read ( ) ; } builder . flush ( ) ; bytes . close ( ) ; return builder . toByteArray ( ) ; |
public class FacesServlet { /** * < p class = " changed _ modified _ 2_0 " > < span
* class = " changed _ modified _ 2_2 " > Process < / span > an incoming request ,
* and create the corresponding response according to the following
* specification . < / p >
* < div class = " changed _ modified _ 2_0 " >
* < p > If the < code > request < / code > and < code > response < / code >
* arguments to this method are not instances of
* < code > HttpServletRequest < / code > and
* < code > HttpServletResponse < / code > , respectively , the results of
* invoking this method are undefined . < / p >
* < p > This method must respond to requests that < span
* class = " changed _ modified _ 2_2 " > contain < / span > the following
* strings by invoking the < code > sendError < / code > method on the
* response argument ( cast to < code > HttpServletResponse < / code > ) ,
* passing the code < code > HttpServletResponse . SC _ NOT _ FOUND < / code > as
* the argument . < / p >
* < ul >
* < pre > < code >
* / WEB - INF /
* / WEB - INF
* / META - INF /
* / META - INF
* < / code > < / pre >
* < / ul >
* < p > If none of the cases described above in the specification for
* this method apply to the servicing of this request , the following
* action must be taken to service the request . < / p >
* < p > Acquire a { @ link FacesContext } instance for this request . < / p >
* < p > Acquire the < code > ResourceHandler < / code > for this request by
* calling { @ link
* javax . faces . application . Application # getResourceHandler } . Call
* { @ link
* javax . faces . application . ResourceHandler # isResourceRequest } . If
* this returns < code > true < / code > call { @ link
* javax . faces . application . ResourceHandler # handleResourceRequest } .
* If this returns < code > false < / code > , < span
* class = " changed _ added _ 2_2 " > call { @ link
* javax . faces . lifecycle . Lifecycle # attachWindow } followed by < / span >
* { @ link javax . faces . lifecycle . Lifecycle # execute } followed by
* { @ link javax . faces . lifecycle . Lifecycle # render } . If a { @ link
* javax . faces . FacesException } is thrown in either case , extract the
* cause from the < code > FacesException < / code > . If the cause is
* < code > null < / code > extract the message from the
* < code > FacesException < / code > , put it inside of a new
* < code > ServletException < / code > instance , and pass the
* < code > FacesException < / code > instance as the root cause , then
* rethrow the < code > ServletException < / code > instance . If the cause
* is an instance of < code > ServletException < / code > , rethrow the
* cause . If the cause is an instance of < code > IOException < / code > ,
* rethrow the cause . Otherwise , create a new
* < code > ServletException < / code > instance , passing the message from
* the cause , as the first argument , and the cause itself as the
* second argument . < / p >
* < p class = " changed _ modified _ 2_0 _ rev _ a " > The implementation must
* make it so { @ link javax . faces . context . FacesContext # release } is
* called within a finally block as late as possible in the
* processing for the JSF related portion of this request . < / p >
* < / div >
* @ param req The servlet request we are processing
* @ param resp The servlet response we are creating
* @ throws IOException if an input / output error occurs during processing
* @ throws ServletException if a servlet error occurs during processing */
@ Override public void service ( ServletRequest req , ServletResponse resp ) throws IOException , ServletException { } } | HttpServletRequest request = ( HttpServletRequest ) req ; HttpServletResponse response = ( HttpServletResponse ) resp ; requestStart ( request . getRequestURI ( ) ) ; // V3 Probe hook
if ( ! isHttpMethodValid ( request ) ) { response . sendError ( HttpServletResponse . SC_BAD_REQUEST ) ; return ; } if ( Thread . currentThread ( ) . isInterrupted ( ) ) { if ( LOGGER . isLoggable ( Level . FINER ) ) { LOGGER . log ( Level . FINE , "Thread {0} given to FacesServlet.service() in interrupted state" , Thread . currentThread ( ) . getName ( ) ) ; } } // If prefix mapped , then ensure requests for / WEB - INF are
// not processed .
String pathInfo = request . getPathInfo ( ) ; if ( pathInfo != null ) { pathInfo = pathInfo . toUpperCase ( ) ; if ( pathInfo . contains ( "/WEB-INF/" ) || pathInfo . contains ( "/WEB-INF" ) || pathInfo . contains ( "/META-INF/" ) || pathInfo . contains ( "/META-INF" ) ) { response . sendError ( HttpServletResponse . SC_NOT_FOUND ) ; return ; } } if ( ! initFacesContextReleased ) { FacesContext initFacesContext = FacesContext . getCurrentInstance ( ) ; if ( null != initFacesContext ) { initFacesContext . release ( ) ; } // Bug 20458755 : ensure the special factory is removed , so as not
// to incur an additional performance penalty at request processing
// time .
FactoryFinder . getFactory ( "com.sun.faces.ServletContextFacesContextFactory_Removal" ) ; initFacesContextReleased = true ; } // Acquire the FacesContext instance for this request
FacesContext context = facesContextFactory . getFacesContext ( servletConfig . getServletContext ( ) , request , response , lifecycle ) ; // Execute the request processing lifecycle for this request
try { ResourceHandler handler = context . getApplication ( ) . getResourceHandler ( ) ; if ( handler . isResourceRequest ( context ) ) { handler . handleResourceRequest ( context ) ; } else { lifecycle . attachWindow ( context ) ; lifecycle . execute ( context ) ; lifecycle . render ( context ) ; } } catch ( FacesException e ) { Throwable t = e . getCause ( ) ; if ( t == null ) { throw new ServletException ( e . getMessage ( ) , e ) ; } else { if ( t instanceof ServletException ) { throw ( ( ServletException ) t ) ; } else if ( t instanceof IOException ) { throw ( ( IOException ) t ) ; } else { throw new ServletException ( t . getMessage ( ) , t ) ; } } } finally { // Release the FacesContext instance for this request
context . release ( ) ; } requestEnd ( ) ; // V3 Probe hook |
public class Derulo { /** * Get a java object from a JSON value
* @ param jsonString a JSON value in string form
* @ return a Number , Boolean , JsonObject , JsonArray or JsonNull
* @ throws JsonException */
public static Object fromJSON ( String jsonString ) { } } | List < Token > tokens = Derulo . toTokens ( jsonString ) ; return fromJSON ( tokens ) ; |
public class CmsTouch { /** * Rewrites the content of the given file . < p >
* @ param resource the resource to rewrite the content for
* @ throws CmsException if something goes wrong */
private static void hardTouch ( CmsObject cms , CmsResource resource ) throws CmsException { } } | CmsFile file = cms . readFile ( resource ) ; file . setContents ( file . getContents ( ) ) ; cms . writeFile ( file ) ; |
public class EditManager { /** * Evaluate whether attribute changes exist in the ilfChild and if so apply them . Returns true
* if some changes existed . If changes existed but matched those in the original node then they
* are not applicable , are removed from the editSet , and false is returned . */
public static boolean applyEditSet ( Element plfChild , Element original ) { } } | // first get edit set if it exists
Element editSet = null ; try { editSet = getEditSet ( plfChild , null , null , false ) ; } catch ( Exception e ) { // should never occur unless problem during create in getEditSet
// and we are telling it not to create .
return false ; } if ( editSet == null || editSet . getChildNodes ( ) . getLength ( ) == 0 ) return false ; if ( original . getAttribute ( Constants . ATT_EDIT_ALLOWED ) . equals ( "false" ) ) { // can ' t change anymore so discard changes
plfChild . removeChild ( editSet ) ; return false ; } Document ilf = original . getOwnerDocument ( ) ; boolean attributeChanged = false ; Element edit = ( Element ) editSet . getFirstChild ( ) ; while ( edit != null ) { String attribName = edit . getAttribute ( Constants . ATT_NAME ) ; Attr attr = plfChild . getAttributeNode ( attribName ) ; // preferences are only updated at preference storage time so
// if a preference change exists in the edit set assume it is
// still valid so that the node being edited will persist in
// the PLF .
if ( edit . getNodeName ( ) . equals ( Constants . ELM_PREF ) ) attributeChanged = true ; else if ( attr == null ) { // attribute removed . See if needs removing in original .
attr = original . getAttributeNode ( attribName ) ; if ( attr == null ) // edit irrelevant ,
editSet . removeChild ( edit ) ; else { // edit differs , apply to original
original . removeAttribute ( attribName ) ; attributeChanged = true ; } } else { // attribute there , see if original is also there
Attr origAttr = original . getAttributeNode ( attribName ) ; if ( origAttr == null ) { // original attribute isn ' t defined so need to add
origAttr = ( Attr ) ilf . importNode ( attr , true ) ; original . setAttributeNode ( origAttr ) ; attributeChanged = true ; } else { // original attrib found , see if different
if ( attr . getValue ( ) . equals ( origAttr . getValue ( ) ) ) { // they are the same , edit irrelevant
editSet . removeChild ( edit ) ; } else { // edit differs , apply to original
origAttr . setValue ( attr . getValue ( ) ) ; attributeChanged = true ; } } } edit = ( Element ) edit . getNextSibling ( ) ; } return attributeChanged ; |
public class JsonParser { /** * Main parsing routine
* @ throws JsonParseException
* In case a parse error occurs . */
public void parse ( ) throws JsonParseException { } } | _readValue ( ) ; // Check for trailing whitespaces
_skipSpaces ( ) ; final IJsonParsePosition aStartPos = m_aPos . getClone ( ) ; // Check for expected end of input
final int c = _readChar ( ) ; if ( c != EOI ) throw _parseEx ( aStartPos , "Invalid character " + _getPrintableChar ( c ) + " after JSON root object" ) ; |
public class fe_copy { /** * h = f */
public static void fe_copy ( int [ ] h , int [ ] f ) { } } | int f0 = f [ 0 ] ; int f1 = f [ 1 ] ; int f2 = f [ 2 ] ; int f3 = f [ 3 ] ; int f4 = f [ 4 ] ; int f5 = f [ 5 ] ; int f6 = f [ 6 ] ; int f7 = f [ 7 ] ; int f8 = f [ 8 ] ; int f9 = f [ 9 ] ; h [ 0 ] = f0 ; h [ 1 ] = f1 ; h [ 2 ] = f2 ; h [ 3 ] = f3 ; h [ 4 ] = f4 ; h [ 5 ] = f5 ; h [ 6 ] = f6 ; h [ 7 ] = f7 ; h [ 8 ] = f8 ; h [ 9 ] = f9 ; |
public class SelectOneMenuRenderer { /** * Renders the optional label . This method is protected in order to allow
* third - party frameworks to derive from it .
* @ param rw
* the response writer
* @ param clientId
* the id used by the label to refernce the input field
* @ throws IOException
* may be thrown by the response writer */
protected void addLabel ( ResponseWriter rw , String clientId , SelectOneMenu menu , String outerClientId ) throws IOException { } } | String label = menu . getLabel ( ) ; { if ( ! menu . isRenderLabel ( ) ) { label = null ; } } if ( label != null ) { rw . startElement ( "label" , menu ) ; rw . writeAttribute ( "for" , clientId , "for" ) ; generateErrorAndRequiredClass ( menu , rw , outerClientId , menu . getLabelStyleClass ( ) , Responsive . getResponsiveLabelClass ( menu ) , "control-label" ) ; writeAttribute ( rw , "style" , menu . getLabelStyle ( ) ) ; rw . writeText ( label , null ) ; rw . endElement ( "label" ) ; } |
public class StaticTypeCheckingVisitor { /** * A helper method which determines which receiver class should be used in error messages when a field or attribute
* is not found . The returned type class depends on whether we have temporary type information available ( due to
* instanceof checks ) and whether there is a single candidate in that case .
* @ param expr the expression for which an unknown field has been found
* @ param type the type of the expression ( used as fallback type )
* @ return if temporary information is available and there ' s only one type , returns the temporary type class
* otherwise falls back to the provided type class . */
protected ClassNode findCurrentInstanceOfClass ( final Expression expr , final ClassNode type ) { } } | if ( ! typeCheckingContext . temporaryIfBranchTypeInformation . empty ( ) ) { List < ClassNode > nodes = getTemporaryTypesForExpression ( expr ) ; if ( nodes != null && nodes . size ( ) == 1 ) return nodes . get ( 0 ) ; } return type ; |
public class ArtifactoryServer { /** * To populate the dropdown list from the jelly */
public List < Integer > getConnectionRetries ( ) { } } | List < Integer > items = new ArrayList < Integer > ( ) ; for ( int i = 0 ; i < 10 ; i ++ ) { items . add ( i ) ; } return items ; |
public class NioGroovyMethods { /** * Create a new ObjectOutputStream for this path and then pass it to the
* closure . This method ensures the stream is closed after the closure
* returns .
* @ param self a Path
* @ param closure a closure
* @ return the value returned by the closure
* @ throws java . io . IOException if an IOException occurs .
* @ see IOGroovyMethods # withStream ( java . io . OutputStream , groovy . lang . Closure )
* @ since 2.3.0 */
public static < T > T withObjectOutputStream ( Path self , @ ClosureParams ( value = SimpleType . class , options = "java.io.ObjectOutputStream" ) Closure < T > closure ) throws IOException { } } | return IOGroovyMethods . withStream ( newObjectOutputStream ( self ) , closure ) ; |
public class JdbcUtil { /** * Imports the data from < code > DataSet < / code > to database .
* @ param dataset
* @ param selectColumnNames
* @ param offset
* @ param count
* @ param stmt the column order in the sql must be consistent with the column order in the DataSet .
* @ return
* @ throws UncheckedSQLException */
public static int importData ( final DataSet dataset , final Collection < String > selectColumnNames , final int offset , final int count , final PreparedStatement stmt , final int batchSize , final int batchInterval ) throws UncheckedSQLException { } } | return importData ( dataset , selectColumnNames , offset , count , Fn . alwaysTrue ( ) , stmt , batchSize , batchInterval ) ; |
public class JKLogger { /** * Trace .
* @ param format the format
* @ param msg the msg */
public void trace ( Object format , Object ... msg ) { } } | if ( logger . isTraceEnabled ( ) ) { if ( format . toString ( ) . contains ( "{" ) ) { logger . trace ( format . toString ( ) , msg ) ; } else { logger . trace ( format . toString ( ) . concat ( " " ) . concat ( JKStringUtil . concat ( msg ) ) ) ; } } |
public class MailMessageConverter { /** * Construct base64 body part from image data .
* @ param image
* @ param contentType
* @ return
* @ throws IOException */
protected BodyPart handleImageBinaryPart ( MimePart image , String contentType ) throws IOException , MessagingException { } } | ByteArrayOutputStream bos = new ByteArrayOutputStream ( ) ; FileCopyUtils . copy ( image . getInputStream ( ) , bos ) ; String base64 = Base64 . encodeBase64String ( bos . toByteArray ( ) ) ; return new BodyPart ( base64 , contentType ) ; |
public class ZipUtils { /** * APDPlat中的重要打包机制
* 将jar文件中的某个文件夹里面的内容复制到某个文件夹
* @ param jar 包含静态资源的jar包
* @ param subDir jar中包含待复制静态资源的文件夹名称
* @ param loc 静态资源复制到的目标文件夹
* @ param force 目标静态资源存在的时候是否强制覆盖 */
public static void unZip ( String jar , String subDir , String loc , boolean force ) { } } | try { File base = new File ( loc ) ; if ( ! base . exists ( ) ) { base . mkdirs ( ) ; } ZipFile zip = new ZipFile ( new File ( jar ) ) ; Enumeration < ? extends ZipEntry > entrys = zip . entries ( ) ; while ( entrys . hasMoreElements ( ) ) { ZipEntry entry = entrys . nextElement ( ) ; String name = entry . getName ( ) ; if ( ! name . startsWith ( subDir ) ) { continue ; } // 去掉subDir
name = name . replace ( subDir , "" ) . trim ( ) ; if ( name . length ( ) < 2 ) { LOG . debug ( name + " 长度 < 2" ) ; continue ; } if ( entry . isDirectory ( ) ) { File dir = new File ( base , name ) ; if ( ! dir . exists ( ) ) { dir . mkdirs ( ) ; LOG . debug ( "创建目录" ) ; } else { LOG . debug ( "目录已经存在" ) ; } LOG . debug ( name + " 是目录" ) ; } else { File file = new File ( base , name ) ; if ( file . exists ( ) && force ) { file . delete ( ) ; } if ( ! file . exists ( ) ) { InputStream in = zip . getInputStream ( entry ) ; Files . copy ( in , file . toPath ( ) ) ; LOG . debug ( "创建文件" ) ; } else { LOG . debug ( "文件已经存在" ) ; } LOG . debug ( name + " 不是目录" ) ; } } } catch ( ZipException ex ) { LOG . error ( "文件解压失败" , ex ) ; } catch ( IOException ex ) { LOG . error ( "文件操作失败" , ex ) ; } |
public class ExpiresFilter { /** * Parse configuration lines like '
* < tt > access plus 1 month 15 days 2 hours < / tt > ' or '
* < tt > modification 1 day 2 hours 5 seconds < / tt > '
* @ param line */
protected ExpiresConfiguration parseExpiresConfiguration ( String line ) { } } | line = line . trim ( ) ; StringTokenizer tokenizer = new StringTokenizer ( line , " " ) ; String currentToken ; try { currentToken = tokenizer . nextToken ( ) ; } catch ( NoSuchElementException e ) { throw new IllegalStateException ( "Starting point (access|now|modification|a<seconds>|m<seconds>) not found in directive '" + line + "'" ) ; } StartingPoint startingPoint ; if ( "access" . equalsIgnoreCase ( currentToken ) || "now" . equalsIgnoreCase ( currentToken ) ) { startingPoint = StartingPoint . ACCESS_TIME ; } else if ( "modification" . equalsIgnoreCase ( currentToken ) ) { startingPoint = StartingPoint . LAST_MODIFICATION_TIME ; } else if ( ! tokenizer . hasMoreTokens ( ) && startsWithIgnoreCase ( currentToken , "a" ) ) { startingPoint = StartingPoint . ACCESS_TIME ; // trick : convert duration configuration from old to new style
tokenizer = new StringTokenizer ( currentToken . substring ( 1 ) + " seconds" , " " ) ; } else if ( ! tokenizer . hasMoreTokens ( ) && startsWithIgnoreCase ( currentToken , "m" ) ) { startingPoint = StartingPoint . LAST_MODIFICATION_TIME ; // trick : convert duration configuration from old to new style
tokenizer = new StringTokenizer ( currentToken . substring ( 1 ) + " seconds" , " " ) ; } else { throw new IllegalStateException ( "Invalid starting point (access|now|modification|a<seconds>|m<seconds>) '" + currentToken + "' in directive '" + line + "'" ) ; } try { currentToken = tokenizer . nextToken ( ) ; } catch ( NoSuchElementException e ) { throw new IllegalStateException ( "Duration not found in directive '" + line + "'" ) ; } if ( "plus" . equalsIgnoreCase ( currentToken ) ) { // skip
try { currentToken = tokenizer . nextToken ( ) ; } catch ( NoSuchElementException e ) { throw new IllegalStateException ( "Duration not found in directive '" + line + "'" ) ; } } List < Duration > durations = new ArrayList < Duration > ( ) ; while ( currentToken != null ) { int amount ; try { amount = Integer . parseInt ( currentToken ) ; } catch ( NumberFormatException e ) { throw new IllegalStateException ( "Invalid duration (number) '" + currentToken + "' in directive '" + line + "'" ) ; } try { currentToken = tokenizer . nextToken ( ) ; } catch ( NoSuchElementException e ) { throw new IllegalStateException ( "Duration unit not found after amount " + amount + " in directive '" + line + "'" ) ; } DurationUnit durationUnit ; if ( "year" . equalsIgnoreCase ( currentToken ) || "years" . equalsIgnoreCase ( currentToken ) ) { durationUnit = DurationUnit . YEAR ; } else if ( "month" . equalsIgnoreCase ( currentToken ) || "months" . equalsIgnoreCase ( currentToken ) ) { durationUnit = DurationUnit . MONTH ; } else if ( "week" . equalsIgnoreCase ( currentToken ) || "weeks" . equalsIgnoreCase ( currentToken ) ) { durationUnit = DurationUnit . WEEK ; } else if ( "day" . equalsIgnoreCase ( currentToken ) || "days" . equalsIgnoreCase ( currentToken ) ) { durationUnit = DurationUnit . DAY ; } else if ( "hour" . equalsIgnoreCase ( currentToken ) || "hours" . equalsIgnoreCase ( currentToken ) ) { durationUnit = DurationUnit . HOUR ; } else if ( "minute" . equalsIgnoreCase ( currentToken ) || "minutes" . equalsIgnoreCase ( currentToken ) ) { durationUnit = DurationUnit . MINUTE ; } else if ( "second" . equalsIgnoreCase ( currentToken ) || "seconds" . equalsIgnoreCase ( currentToken ) ) { durationUnit = DurationUnit . SECOND ; } else { throw new IllegalStateException ( "Invalid duration unit (years|months|weeks|days|hours|minutes|seconds) '" + currentToken + "' in directive '" + line + "'" ) ; } Duration duration = new Duration ( amount , durationUnit ) ; durations . add ( duration ) ; if ( tokenizer . hasMoreTokens ( ) ) { currentToken = tokenizer . nextToken ( ) ; } else { currentToken = null ; } } return new ExpiresConfiguration ( startingPoint , durations ) ; |
public class LogPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getSettingsSaved ( ) { } } | if ( settingsSavedEClass == null ) { settingsSavedEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( LogPackage . eNS_URI ) . getEClassifiers ( ) . get ( 12 ) ; } return settingsSavedEClass ; |
public class CorporationApi { /** * Get corporation shareholders Return the current shareholders of a
* corporation . - - - This route is cached for up to 3600 seconds - - - Requires
* one of the following EVE corporation role ( s ) : Director SSO Scope :
* esi - wallet . read _ corporation _ wallets . v1
* @ param corporationId
* An EVE corporation ID ( required )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param ifNoneMatch
* ETag from a previous request . A 304 will be returned if this
* matches the current ETag ( optional )
* @ param page
* Which page of results to return ( optional , default to 1)
* @ param token
* Access token to use if unable to set a header ( optional )
* @ return List & lt ; CorporationShareholdersResponse & gt ;
* @ throws ApiException
* If fail to call the API , e . g . server error or cannot
* deserialize the response body */
public List < CorporationShareholdersResponse > getCorporationsCorporationIdShareholders ( Integer corporationId , String datasource , String ifNoneMatch , Integer page , String token ) throws ApiException { } } | ApiResponse < List < CorporationShareholdersResponse > > resp = getCorporationsCorporationIdShareholdersWithHttpInfo ( corporationId , datasource , ifNoneMatch , page , token ) ; return resp . getData ( ) ; |
public class VersionImpl { /** * { @ inheritDoc } */
public Version [ ] getPredecessors ( ) throws RepositoryException { } } | checkValid ( ) ; PropertyData predecessorsData = ( PropertyData ) dataManager . getItemData ( nodeData ( ) , new QPathEntry ( Constants . JCR_PREDECESSORS , 0 ) , ItemType . PROPERTY ) ; if ( predecessorsData == null ) { return new Version [ 0 ] ; } List < ValueData > predecessorsValues = predecessorsData . getValues ( ) ; Version [ ] predecessors = new Version [ predecessorsValues . size ( ) ] ; for ( int i = 0 ; i < predecessorsValues . size ( ) ; i ++ ) { String videntifier = ValueDataUtil . getString ( predecessorsValues . get ( i ) ) ; VersionImpl version = ( VersionImpl ) dataManager . getItemByIdentifier ( videntifier , false , false ) ; if ( version != null ) { predecessors [ i ] = version ; } else { throw new RepositoryException ( "Predecessor version is not found " + videntifier + ", this version " + getPath ( ) ) ; } } return predecessors ; |
public class ToLongFunctionBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */
@ Nonnull public static < T > ToLongFunctionBuilder < T > toLongFunction ( Consumer < ToLongFunction < T > > consumer ) { } } | return new ToLongFunctionBuilder ( consumer ) ; |
public class CommerceTaxMethodWrapper { /** * Sets the localized names of this commerce tax method from the map of locales and localized names , and sets the default locale .
* @ param nameMap the locales and localized names of this commerce tax method
* @ param defaultLocale the default locale */
@ Override public void setNameMap ( Map < java . util . Locale , String > nameMap , java . util . Locale defaultLocale ) { } } | _commerceTaxMethod . setNameMap ( nameMap , defaultLocale ) ; |
public class UniverseApi { /** * Get constellation information ( asynchronously ) Get information on a
* constellation - - - This route expires daily at 11:05
* @ param constellationId
* constellation _ id integer ( required )
* @ param acceptLanguage
* Language to use in the response ( optional , default to en - us )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param ifNoneMatch
* ETag from a previous request . A 304 will be returned if this
* matches the current ETag ( optional )
* @ param language
* Language to use in the response , takes precedence over
* Accept - Language ( optional , default to en - us )
* @ param callback
* The callback to be executed when the API call finishes
* @ return The request call
* @ throws ApiException
* If fail to process the API call , e . g . serializing the request
* body object */
public com . squareup . okhttp . Call getUniverseConstellationsConstellationIdAsync ( Integer constellationId , String acceptLanguage , String datasource , String ifNoneMatch , String language , final ApiCallback < ConstellationResponse > callback ) throws ApiException { } } | com . squareup . okhttp . Call call = getUniverseConstellationsConstellationIdValidateBeforeCall ( constellationId , acceptLanguage , datasource , ifNoneMatch , language , callback ) ; Type localVarReturnType = new TypeToken < ConstellationResponse > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ; |
public class ZipUtil { /** * Compresses the given directory and all of its sub - directories into the passed in
* stream . It is the responsibility of the caller to close the passed in
* stream properly .
* @ param sourceDir
* root directory .
* @ param os
* output stream ( will be buffered in this method ) .
* @ param compressionLevel
* compression level
* @ since 1.10 */
public static void pack ( File sourceDir , OutputStream os , int compressionLevel ) { } } | pack ( sourceDir , os , IdentityNameMapper . INSTANCE , compressionLevel ) ; |
public class MarkLogicClientImpl { /** * as we use mergeGraphs , baseURI is always file . toURI */
public void performAdd ( File file , String baseURI , RDFFormat dataFormat , Transaction tx , Resource ... contexts ) throws RDFParseException { } } | try { graphManager . setDefaultMimetype ( dataFormat . getDefaultMIMEType ( ) ) ; if ( dataFormat . equals ( RDFFormat . NQUADS ) || dataFormat . equals ( RDFFormat . TRIG ) ) { graphManager . mergeGraphs ( new FileHandle ( file ) , tx ) ; } else { if ( notNull ( contexts ) && contexts . length > 0 ) { for ( int i = 0 ; i < contexts . length ; i ++ ) { if ( notNull ( contexts [ i ] ) ) { graphManager . mergeAs ( contexts [ i ] . toString ( ) , new FileHandle ( file ) , getGraphPerms ( ) , tx ) ; } else { graphManager . mergeAs ( DEFAULT_GRAPH_URI , new FileHandle ( file ) , getGraphPerms ( ) , tx ) ; } } } else { graphManager . mergeAs ( DEFAULT_GRAPH_URI , new FileHandle ( file ) , getGraphPerms ( ) , tx ) ; } } } catch ( FailedRequestException e ) { logger . error ( e . getLocalizedMessage ( ) ) ; throw new RDFParseException ( "Request to MarkLogic server failed, check file and format." ) ; } |
public class AutoValueOrOneOfProcessor { /** * Returns the contents of the { @ code AutoValue . CopyAnnotations . exclude } element , as a set of
* { @ code TypeMirror } where each type is an annotation type . */
private Set < TypeMirror > getExcludedAnnotationTypes ( Element element ) { } } | Optional < AnnotationMirror > maybeAnnotation = getAnnotationMirror ( element , COPY_ANNOTATIONS_NAME ) ; if ( ! maybeAnnotation . isPresent ( ) ) { return ImmutableSet . of ( ) ; } @ SuppressWarnings ( "unchecked" ) List < AnnotationValue > excludedClasses = ( List < AnnotationValue > ) getAnnotationValue ( maybeAnnotation . get ( ) , "exclude" ) . getValue ( ) ; return excludedClasses . stream ( ) . map ( annotationValue -> ( DeclaredType ) annotationValue . getValue ( ) ) . collect ( toCollection ( TypeMirrorSet :: new ) ) ; |
public class LinkedListNodeImpl { /** * Inserts newElement after currentElement in the list defined by this
* LinkedListManager .
* @ param currentElement the reference element
* @ param newElement the new element */
@ Override public void listInsertAfter ( final T currentElement , final T newElement ) throws FrameworkException { } } | if ( currentElement . getUuid ( ) . equals ( newElement . getUuid ( ) ) ) { throw new IllegalStateException ( "Cannot link a node to itself!" ) ; } final T next = listGetNext ( currentElement ) ; if ( next == null ) { linkNodes ( getSiblingLinkType ( ) , currentElement , newElement ) ; } else { // unlink predecessor and successor
unlinkNodes ( getSiblingLinkType ( ) , currentElement , next ) ; // link predecessor to new element
linkNodes ( getSiblingLinkType ( ) , currentElement , newElement ) ; // dont create self link
if ( ! newElement . getUuid ( ) . equals ( next . getUuid ( ) ) ) { // link new element to successor
linkNodes ( getSiblingLinkType ( ) , newElement , next ) ; } } |
public class RTMP { /** * Returns channel information for a given channel id .
* @ param channelId
* @ return channel info */
private ChannelInfo getChannelInfo ( int channelId ) { } } | ChannelInfo info = channels . putIfAbsent ( channelId , new ChannelInfo ( ) ) ; if ( info == null ) { info = channels . get ( channelId ) ; } return info ; |
public class XSLTElementProcessor { /** * Set the properties of an object from the given attribute list .
* @ param handler The stylesheet ' s Content handler , needed for
* error reporting .
* @ param rawName The raw name of the owner element , needed for
* error reporting .
* @ param attributes The list of attributes .
* @ param target The target element where the properties will be set .
* @ param throwError True if it should throw an error if an
* attribute is not defined .
* @ return the attributes not allowed on this element .
* @ throws TransformerException */
Attributes setPropertiesFromAttributes ( StylesheetHandler handler , String rawName , Attributes attributes , ElemTemplateElement target , boolean throwError ) throws org . xml . sax . SAXException { } } | XSLTElementDef def = getElemDef ( ) ; AttributesImpl undefines = null ; boolean isCompatibleMode = ( ( null != handler . getStylesheet ( ) && handler . getStylesheet ( ) . getCompatibleMode ( ) ) || ! throwError ) ; if ( isCompatibleMode ) undefines = new AttributesImpl ( ) ; // Keep track of which XSLTAttributeDefs have been processed , so
// I can see which default values need to be set .
List processedDefs = new ArrayList ( ) ; // Keep track of XSLTAttributeDefs that were invalid
List errorDefs = new ArrayList ( ) ; int nAttrs = attributes . getLength ( ) ; for ( int i = 0 ; i < nAttrs ; i ++ ) { String attrUri = attributes . getURI ( i ) ; // Hack for Crimson . - sb
if ( ( null != attrUri ) && ( attrUri . length ( ) == 0 ) && ( attributes . getQName ( i ) . startsWith ( "xmlns:" ) || attributes . getQName ( i ) . equals ( "xmlns" ) ) ) { attrUri = org . apache . xalan . templates . Constants . S_XMLNAMESPACEURI ; } String attrLocalName = attributes . getLocalName ( i ) ; XSLTAttributeDef attrDef = def . getAttributeDef ( attrUri , attrLocalName ) ; if ( null == attrDef ) { if ( ! isCompatibleMode ) { // Then barf , because this element does not allow this attribute .
handler . error ( XSLTErrorResources . ER_ATTR_NOT_ALLOWED , new Object [ ] { attributes . getQName ( i ) , rawName } , null ) ; // " \ " " + attributes . getQName ( i ) + " \ " "
// + " attribute is not allowed on the " + rawName
// + " element ! " , null ) ;
} else { undefines . addAttribute ( attrUri , attrLocalName , attributes . getQName ( i ) , attributes . getType ( i ) , attributes . getValue ( i ) ) ; } } else { // handle secure processing
if ( handler . getStylesheetProcessor ( ) == null ) System . out . println ( "stylesheet processor null" ) ; if ( attrDef . getName ( ) . compareTo ( "*" ) == 0 && handler . getStylesheetProcessor ( ) . isSecureProcessing ( ) ) { // foreign attributes are not allowed in secure processing mode
// Then barf , because this element does not allow this attribute .
handler . error ( XSLTErrorResources . ER_ATTR_NOT_ALLOWED , new Object [ ] { attributes . getQName ( i ) , rawName } , null ) ; // " \ " " + attributes . getQName ( i ) + " \ " "
// + " attribute is not allowed on the " + rawName
// + " element ! " , null ) ;
} else { boolean success = attrDef . setAttrValue ( handler , attrUri , attrLocalName , attributes . getQName ( i ) , attributes . getValue ( i ) , target ) ; // Now we only add the element if it passed a validation check
if ( success ) processedDefs . add ( attrDef ) ; else errorDefs . add ( attrDef ) ; } } } XSLTAttributeDef [ ] attrDefs = def . getAttributes ( ) ; int nAttrDefs = attrDefs . length ; for ( int i = 0 ; i < nAttrDefs ; i ++ ) { XSLTAttributeDef attrDef = attrDefs [ i ] ; String defVal = attrDef . getDefault ( ) ; if ( null != defVal ) { if ( ! processedDefs . contains ( attrDef ) ) { attrDef . setDefAttrValue ( handler , target ) ; } } if ( attrDef . getRequired ( ) ) { if ( ( ! processedDefs . contains ( attrDef ) ) && ( ! errorDefs . contains ( attrDef ) ) ) handler . error ( XSLMessages . createMessage ( XSLTErrorResources . ER_REQUIRES_ATTRIB , new Object [ ] { rawName , attrDef . getName ( ) } ) , null ) ; } } return undefines ; |
public class POContextStack { /** * Pop a non - stateful context from the Stack . Stateful contexts can be removed with { @ link # clearStateContexts ( ) } */
@ Override public synchronized IPOContext pop ( ) { } } | IPOContext obj = peek ( ) ; int len = size ( ) ; for ( int i = len - 1 ; i >= 0 ; i -- ) { if ( ! this . get ( i ) . isStateful ( ) ) { removeElementAt ( i ) ; return obj ; } } return obj ; |
public class ClusterManager { /** * Allows the management thread to passively take part in the cluster
* operations .
* Other cluster members will not be made aware of this instance . */
public void launchAuto ( boolean active ) { } } | killAuto ( ) ; if ( mSock != null ) { try { mAuto = new AutomaticClusterManagementThread ( this , mCluster . getClusterName ( ) , active ) ; } catch ( Exception e ) { mAuto = new AutomaticClusterManagementThread ( this , active ) ; } if ( mAuto != null ) { mAuto . start ( ) ; } } |
public class XAbstractWhileExpressionImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setPredicate ( XExpression newPredicate ) { } } | if ( newPredicate != predicate ) { NotificationChain msgs = null ; if ( predicate != null ) msgs = ( ( InternalEObject ) predicate ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - XbasePackage . XABSTRACT_WHILE_EXPRESSION__PREDICATE , null , msgs ) ; if ( newPredicate != null ) msgs = ( ( InternalEObject ) newPredicate ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - XbasePackage . XABSTRACT_WHILE_EXPRESSION__PREDICATE , null , msgs ) ; msgs = basicSetPredicate ( newPredicate , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , XbasePackage . XABSTRACT_WHILE_EXPRESSION__PREDICATE , newPredicate , newPredicate ) ) ; |
public class ShellCommandParser { /** * Parses arguments of a shell command line
* @ param args the arguments to parse
* @ param excluded a collection of commands that should not be parsed
* @ return the parser result
* @ throws IntrospectionException if a { @ link de . undercouch . citeproc . tool . CSLToolCommand }
* could not be introspected
* @ throws InvalidOptionException if the command line contains an
* option ( only commands are allowed in the interactive shell ) */
public static Result parse ( String [ ] args , Collection < Class < ? extends Command > > excluded ) throws IntrospectionException , InvalidOptionException { } } | List < Class < ? extends Command > > classes = new ArrayList < > ( ) ; return getCommandClass ( args , 0 , classes , new HashSet < > ( excluded ) ) ; |
public class ListDialogDecorator { /** * Adapts the color of the dialog ' s list items . */
private void adaptItemColor ( ) { } } | if ( adapter != null ) { RecyclerView . Adapter < ? > wrappedAdapter = adapter . getWrappedAdapter ( ) ; if ( wrappedAdapter instanceof ArrayRecyclerViewAdapter ) { ( ( ArrayRecyclerViewAdapter ) wrappedAdapter ) . setItemColor ( itemColor ) ; } } |
public class ServerConfiguration { /** * Gets the value for the given key as a list .
* @ param key the key to get the value for
* @ param delimiter the delimiter to split the values
* @ return the list of values for the given key */
public static List < String > getList ( PropertyKey key , String delimiter ) { } } | return sConf . getList ( key , delimiter ) ; |
public class DebugRingSet { /** * { @ inheritDoc } */
@ Override public boolean contains ( IAtom atom ) { } } | logger . debug ( "Contains atom: " , super . contains ( atom ) ) ; return super . contains ( atom ) ; |
public class Bagging { /** * Creates a new data set from the given sample counts . Points sampled
* multiple times will have multiple entries in the data set .
* @ param dataSet the data set that was sampled from
* @ param sampledCounts the sampling values obtained from
* { @ link # sampleWithReplacement ( int [ ] , int , java . util . Random ) }
* @ return a new sampled classification data set */
public static ClassificationDataSet getSampledDataSet ( ClassificationDataSet dataSet , int [ ] sampledCounts ) { } } | ClassificationDataSet destination = new ClassificationDataSet ( dataSet . getNumNumericalVars ( ) , dataSet . getCategories ( ) , dataSet . getPredicting ( ) ) ; for ( int i = 0 ; i < sampledCounts . length ; i ++ ) for ( int j = 0 ; j < sampledCounts [ i ] ; j ++ ) { DataPoint dp = dataSet . getDataPoint ( i ) ; destination . addDataPoint ( dp . getNumericalValues ( ) , dp . getCategoricalValues ( ) , dataSet . getDataPointCategory ( i ) ) ; } return destination ; |
public class MetaLocale { /** * Render this locale to a string in compact or expanded form . */
private String render ( boolean compact ) { } } | StringBuilder buf = new StringBuilder ( ) ; render ( buf , LANGUAGE , UNDEF_LANGUAGE , compact ) ; render ( buf , SCRIPT , UNDEF_SCRIPT , compact ) ; render ( buf , TERRITORY , UNDEF_TERRITORY , compact ) ; render ( buf , VARIANT , UNDEF_VARIANT , compact ) ; return buf . toString ( ) ; |
public class EnumerableType { /** * Returns a new Enumerable ( unmodifiable when possible )
* with items from sourceEnumerable mapped by mapFunction . */
public Object map ( Object sourceEnumerable , Function mapFunction ) { } } | return map ( sourceEnumerable , mapFunction , false ) ; |
public class RepositoryResolver { /** * Create the install lists for the resources which we were asked to resolve
* @ return the install lists */
List < List < RepositoryResource > > createInstallLists ( ) { } } | List < List < RepositoryResource > > installLists = new ArrayList < > ( ) ; // Create install list for each sample
for ( SampleResource sample : samplesToInstall ) { installLists . add ( createInstallList ( sample ) ) ; } // Create install list for each requested feature
for ( String featureName : requestedFeatureNames ) { List < RepositoryResource > installList = createInstallList ( featureName ) ; // May get an empty list if the requested feature is already installed
if ( ! installList . isEmpty ( ) ) { installLists . add ( installList ) ; } } // Create install list for each autofeature which wasn ' t explicitly requested ( otherwise we ' d have covered it above ) and isn ' t already installed
for ( ProvisioningFeatureDefinition feature : resolvedFeatures . values ( ) ) { if ( feature . isAutoFeature ( ) && ! requestedFeatureNames . contains ( feature . getSymbolicName ( ) ) && feature instanceof KernelResolverEsa ) { installLists . add ( createInstallList ( feature . getSymbolicName ( ) ) ) ; } } return installLists ; |
public class LifecycleManager { /** * Unregister query related MBeans for a cache , primarily the statistics , but also all other MBeans from the same
* related group . */
private void unregisterQueryMBeans ( ComponentRegistry cr , String cacheName ) { } } | if ( mbeanServer != null ) { try { InfinispanQueryStatisticsInfo stats = cr . getComponent ( InfinispanQueryStatisticsInfo . class ) ; if ( stats != null ) { GlobalJmxStatisticsConfiguration jmxConfig = cr . getGlobalComponentRegistry ( ) . getGlobalConfiguration ( ) . globalJmxStatistics ( ) ; String queryGroupName = getQueryGroupName ( jmxConfig . cacheManagerName ( ) , cacheName ) ; String queryMBeanFilter = stats . getObjectName ( ) . getDomain ( ) + ":" + queryGroupName + ",*" ; JmxUtil . unregisterMBeans ( queryMBeanFilter , mbeanServer ) ; } } catch ( Exception e ) { throw new CacheException ( "Unable to unregister query MBeans" , e ) ; } } |
public class gslbvserver { /** * Use this API to fetch filtered set of gslbvserver resources .
* filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */
public static gslbvserver [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } } | gslbvserver obj = new gslbvserver ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; gslbvserver [ ] response = ( gslbvserver [ ] ) obj . getfiltered ( service , option ) ; return response ; |
public class JCudaDriver { /** * Allocates pitched device memory .
* < pre >
* CUresult cuMemAllocPitch (
* CUdeviceptr * dptr ,
* size _ t * pPitch ,
* size _ t WidthInBytes ,
* size _ t Height ,
* unsigned int ElementSizeBytes )
* < / pre >
* < div >
* < p > Allocates pitched device memory .
* Allocates at least < tt > WidthInBytes < / tt > * < tt > Height < / tt > bytes of
* linear memory on the device and returns in < tt > * dptr < / tt > a pointer
* to the allocated memory . The function may pad the allocation to ensure
* that corresponding pointers in any given
* row will continue to meet the alignment
* requirements for coalescing as the address is updated from row to row .
* < tt > ElementSizeBytes < / tt > specifies the size of the largest reads and
* writes that will be performed on the memory range . < tt > ElementSizeBytes < / tt > may be 4 , 8 or 16 ( since coalesced memory
* transactions are not possible on other data sizes ) . If < tt > ElementSizeBytes < / tt > is smaller than the actual read / write size of a
* kernel , the kernel will run correctly , but possibly at reduced speed .
* The
* pitch returned in < tt > * pPitch < / tt > by
* cuMemAllocPitch ( ) is the width in bytes of the allocation . The intended
* usage of pitch is as a separate parameter of the allocation , used to
* compute addresses within the 2D array .
* Given the row and column of an array element of type < strong > T < / strong > ,
* the address is computed as :
* < pre > T * pElement = ( T * ) ( ( char * ) BaseAddress
* + Row * Pitch ) + Column ; < / pre >
* < p > The pitch returned by cuMemAllocPitch ( )
* is guaranteed to work with cuMemcpy2D ( ) under all circumstances . For
* allocations of 2D arrays , it is recommended that programmers consider
* performing pitch allocations
* using cuMemAllocPitch ( ) . Due to alignment
* restrictions in the hardware , this is especially true if the application
* will be performing 2D memory copies
* between different regions of device
* memory ( whether linear memory or CUDA arrays ) .
* < p > The byte alignment of the pitch returned
* by cuMemAllocPitch ( ) is guaranteed to match or exceed the alignment
* requirement for texture binding with cuTexRefSetAddress2D ( ) .
* < div >
* < span > Note : < / span >
* < p > Note that this
* function may also return error codes from previous , asynchronous
* launches .
* < / div >
* < / div >
* @ param dptr Returned device pointer
* @ param pPitch Returned pitch of allocation in bytes
* @ param WidthInBytes Requested allocation width in bytes
* @ param Height Requested allocation height in rows
* @ param ElementSizeBytes Size of largest reads / writes for range
* @ return CUDA _ SUCCESS , CUDA _ ERROR _ DEINITIALIZED , CUDA _ ERROR _ NOT _ INITIALIZED ,
* CUDA _ ERROR _ INVALID _ CONTEXT , CUDA _ ERROR _ INVALID _ VALUE ,
* CUDA _ ERROR _ OUT _ OF _ MEMORY
* @ see JCudaDriver # cuArray3DCreate
* @ see JCudaDriver # cuArray3DGetDescriptor
* @ see JCudaDriver # cuArrayCreate
* @ see JCudaDriver # cuArrayDestroy
* @ see JCudaDriver # cuArrayGetDescriptor
* @ see JCudaDriver # cuMemAlloc
* @ see JCudaDriver # cuMemAllocHost
* @ see JCudaDriver # cuMemcpy2D
* @ see JCudaDriver # cuMemcpy2DAsync
* @ see JCudaDriver # cuMemcpy2DUnaligned
* @ see JCudaDriver # cuMemcpy3D
* @ see JCudaDriver # cuMemcpy3DAsync
* @ see JCudaDriver # cuMemcpyAtoA
* @ see JCudaDriver # cuMemcpyAtoD
* @ see JCudaDriver # cuMemcpyAtoH
* @ see JCudaDriver # cuMemcpyAtoHAsync
* @ see JCudaDriver # cuMemcpyDtoA
* @ see JCudaDriver # cuMemcpyDtoD
* @ see JCudaDriver # cuMemcpyDtoDAsync
* @ see JCudaDriver # cuMemcpyDtoH
* @ see JCudaDriver # cuMemcpyDtoHAsync
* @ see JCudaDriver # cuMemcpyHtoA
* @ see JCudaDriver # cuMemcpyHtoAAsync
* @ see JCudaDriver # cuMemcpyHtoD
* @ see JCudaDriver # cuMemcpyHtoDAsync
* @ see JCudaDriver # cuMemFree
* @ see JCudaDriver # cuMemFreeHost
* @ see JCudaDriver # cuMemGetAddressRange
* @ see JCudaDriver # cuMemGetInfo
* @ see JCudaDriver # cuMemHostAlloc
* @ see JCudaDriver # cuMemHostGetDevicePointer
* @ see JCudaDriver # cuMemsetD2D8
* @ see JCudaDriver # cuMemsetD2D16
* @ see JCudaDriver # cuMemsetD2D32
* @ see JCudaDriver # cuMemsetD8
* @ see JCudaDriver # cuMemsetD16
* @ see JCudaDriver # cuMemsetD32 */
public static int cuMemAllocPitch ( CUdeviceptr dptr , long pPitch [ ] , long WidthInBytes , long Height , int ElementSizeBytes ) { } } | return checkResult ( cuMemAllocPitchNative ( dptr , pPitch , WidthInBytes , Height , ElementSizeBytes ) ) ; |
public class HttpResponse { /** * 将结果对象输出
* @ param obj 输出对象 */
@ SuppressWarnings ( "unchecked" ) public void finish ( final Object obj ) { } } | finish ( request . getJsonConvert ( ) , ( Type ) null , obj ) ; |
public class SimpleGapFunction { /** * Executes the gap function . If the first interval ( < code > lhs < / code > ) is
* before the second ( < code > rhs < / code > ) and the distance between them is
* less than or equal to the < code > maximumGap < / code > , then
* this method returns < code > true < / code > , otherwise it returns
* < code > false < / code > .
* @ param lhs the first { @ link Interval } .
* @ param rhs the second { @ link Interval } .
* @ return < code > true < / code > or < code > false < / code > . */
@ Override public boolean execute ( Interval lhs , Interval rhs ) { } } | if ( this . relation == null ) { return false ; } else { return this . relation . hasRelation ( lhs , rhs ) ; } |
public class PopupController { /** * init fxml when loaded . */
@ PostConstruct public void init ( ) { } } | try { popup = new JFXPopup ( FXMLLoader . load ( getClass ( ) . getResource ( "/fxml/ui/popup/DemoPopup.fxml" ) ) ) ; } catch ( IOException ioExc ) { ioExc . printStackTrace ( ) ; } burger1 . setOnMouseClicked ( ( e ) -> popup . show ( rippler1 , PopupVPosition . TOP , PopupHPosition . LEFT ) ) ; burger2 . setOnMouseClicked ( ( e ) -> popup . show ( rippler2 , PopupVPosition . TOP , PopupHPosition . RIGHT ) ) ; burger3 . setOnMouseClicked ( ( e ) -> popup . show ( rippler3 , PopupVPosition . BOTTOM , PopupHPosition . LEFT ) ) ; burger4 . setOnMouseClicked ( ( e ) -> popup . show ( rippler4 , PopupVPosition . BOTTOM , PopupHPosition . RIGHT ) ) ; |
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EEnum getOBPRGLength ( ) { } } | if ( obprgLengthEEnum == null ) { obprgLengthEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 53 ) ; } return obprgLengthEEnum ; |
public class CrowdingDistance { /** * Assigns crowding distances to all solutions in a < code > SolutionSet < / code > .
* @ param solutionList The < code > SolutionSet < / code > .
* @ throws org . uma . jmetal . util . JMetalException */
@ Override public void computeDensityEstimator ( List < S > solutionList ) { } } | int size = solutionList . size ( ) ; if ( size == 0 ) { return ; } if ( size == 1 ) { solutionList . get ( 0 ) . setAttribute ( getAttributeIdentifier ( ) , Double . POSITIVE_INFINITY ) ; return ; } if ( size == 2 ) { solutionList . get ( 0 ) . setAttribute ( getAttributeIdentifier ( ) , Double . POSITIVE_INFINITY ) ; solutionList . get ( 1 ) . setAttribute ( getAttributeIdentifier ( ) , Double . POSITIVE_INFINITY ) ; return ; } // Use a new SolutionSet to avoid altering the original solutionSet
List < S > front = new ArrayList < > ( size ) ; for ( S solution : solutionList ) { front . add ( solution ) ; } for ( int i = 0 ; i < size ; i ++ ) { front . get ( i ) . setAttribute ( getAttributeIdentifier ( ) , 0.0 ) ; } double objetiveMaxn ; double objetiveMinn ; double distance ; int numberOfObjectives = solutionList . get ( 0 ) . getNumberOfObjectives ( ) ; for ( int i = 0 ; i < numberOfObjectives ; i ++ ) { // Sort the population by Obj n
Collections . sort ( front , new ObjectiveComparator < S > ( i ) ) ; objetiveMinn = front . get ( 0 ) . getObjective ( i ) ; objetiveMaxn = front . get ( front . size ( ) - 1 ) . getObjective ( i ) ; // Set de crowding distance
front . get ( 0 ) . setAttribute ( getAttributeIdentifier ( ) , Double . POSITIVE_INFINITY ) ; front . get ( size - 1 ) . setAttribute ( getAttributeIdentifier ( ) , Double . POSITIVE_INFINITY ) ; for ( int j = 1 ; j < size - 1 ; j ++ ) { distance = front . get ( j + 1 ) . getObjective ( i ) - front . get ( j - 1 ) . getObjective ( i ) ; distance = distance / ( objetiveMaxn - objetiveMinn ) ; distance += ( double ) front . get ( j ) . getAttribute ( getAttributeIdentifier ( ) ) ; front . get ( j ) . setAttribute ( getAttributeIdentifier ( ) , distance ) ; } } |
public class SVGPlot { /** * Create a SVG rectangle
* @ param x X coordinate
* @ param y Y coordinate
* @ param w Width
* @ param h Height
* @ return new element */
public Element svgRect ( double x , double y , double w , double h ) { } } | return SVGUtil . svgRect ( document , x , y , w , h ) ; |
public class NetworkTopology { /** * Check if two nodes are on the same rack
* @ param node1 one node
* @ param node2 another node
* @ return true if node1 and node2 are pm the same rack ; false otherwise
* @ exception IllegalArgumentException when either node1 or node2 is null , or
* node1 or node2 do not belong to the cluster */
public boolean isOnSameRack ( Node node1 , Node node2 ) { } } | if ( node1 == null || node2 == null ) { return false ; } netlock . readLock ( ) . lock ( ) ; try { return node1 . getParent ( ) == node2 . getParent ( ) ; } finally { netlock . readLock ( ) . unlock ( ) ; } |
public class DB { /** * Selects user metadata with a specified key .
* @ param userId The user id .
* @ param key The metadata key .
* @ return The list of values .
* @ throws SQLException on database error . */
public List < Meta > userMetadata ( final long userId , final String key ) throws SQLException { } } | Connection conn = null ; PreparedStatement stmt = null ; ResultSet rs = null ; List < Meta > meta = Lists . newArrayListWithExpectedSize ( 16 ) ; Timer . Context ctx = metrics . userMetadataTimer . time ( ) ; try { conn = connectionSupplier . getConnection ( ) ; stmt = conn . prepareStatement ( selectUserMetaKeySQL ) ; stmt . setLong ( 1 , userId ) ; stmt . setString ( 2 , key ) ; rs = stmt . executeQuery ( ) ; while ( rs . next ( ) ) { meta . add ( new Meta ( rs . getLong ( 1 ) , rs . getString ( 2 ) , rs . getString ( 3 ) ) ) ; } return meta ; } finally { ctx . stop ( ) ; SQLUtil . closeQuietly ( conn , stmt , rs ) ; } |
public class PlannerReader { /** * This method extracts data for a single calendar from a Planner file .
* @ param plannerCalendar Calendar data
* @ param parentMpxjCalendar parent of derived calendar */
private void readCalendar ( net . sf . mpxj . planner . schema . Calendar plannerCalendar , ProjectCalendar parentMpxjCalendar ) throws MPXJException { } } | // Create a calendar instance
ProjectCalendar mpxjCalendar = m_projectFile . addCalendar ( ) ; // Populate basic details
mpxjCalendar . setUniqueID ( getInteger ( plannerCalendar . getId ( ) ) ) ; mpxjCalendar . setName ( plannerCalendar . getName ( ) ) ; mpxjCalendar . setParent ( parentMpxjCalendar ) ; // Set working and non working days
DefaultWeek dw = plannerCalendar . getDefaultWeek ( ) ; setWorkingDay ( mpxjCalendar , Day . MONDAY , dw . getMon ( ) ) ; setWorkingDay ( mpxjCalendar , Day . TUESDAY , dw . getTue ( ) ) ; setWorkingDay ( mpxjCalendar , Day . WEDNESDAY , dw . getWed ( ) ) ; setWorkingDay ( mpxjCalendar , Day . THURSDAY , dw . getThu ( ) ) ; setWorkingDay ( mpxjCalendar , Day . FRIDAY , dw . getFri ( ) ) ; setWorkingDay ( mpxjCalendar , Day . SATURDAY , dw . getSat ( ) ) ; setWorkingDay ( mpxjCalendar , Day . SUNDAY , dw . getSun ( ) ) ; // Set working hours
processWorkingHours ( mpxjCalendar , plannerCalendar ) ; // Process exception days
processExceptionDays ( mpxjCalendar , plannerCalendar ) ; m_eventManager . fireCalendarReadEvent ( mpxjCalendar ) ; // Process any derived calendars
List < net . sf . mpxj . planner . schema . Calendar > calendarList = plannerCalendar . getCalendar ( ) ; for ( net . sf . mpxj . planner . schema . Calendar cal : calendarList ) { readCalendar ( cal , mpxjCalendar ) ; } |
public class FacebookAlbumListFragment { /** * Asynchronously requests the user name associated with the linked account . Tries to finish the
* { @ link FacebookSettingsActivity } when completed . */
private void requestAccountName ( ) { } } | GraphUserCallback callback = new GraphUserCallback ( ) { @ Override public void onCompleted ( GraphUser user , Response response ) { FacebookSettingsActivity activity = ( FacebookSettingsActivity ) getActivity ( ) ; if ( activity == null || activity . isFinishing ( ) ) { return ; } if ( response != null && response . getError ( ) == null && user != null ) { String accountName = user . getFirstName ( ) + " " + user . getLastName ( ) ; if ( accountName != null && accountName . length ( ) > 0 ) { activity . mAccountName = accountName ; } else { activity . mHasErrorOccurred = true ; } } else { activity . mHasErrorOccurred = true ; } activity . tryFinish ( ) ; } } ; mFacebookEndpoint . requestAccountName ( callback ) ; |
public class SentenceDetectorME { /** * Detect the position of the first words of sentences in a String .
* @ param s The string to be processed .
* @ return A integer array containing the positions of the end index of
* every sentence */
public int [ ] sentPosDetect ( String s ) { } } | double sentProb = 1 ; sentProbs . clear ( ) ; StringBuffer sb = new StringBuffer ( s ) ; List enders = scanner . getPositions ( s ) ; List positions = new ArrayList ( enders . size ( ) ) ; for ( int i = 0 , end = enders . size ( ) , index = 0 ; i < end ; i ++ ) { Integer candidate = ( Integer ) enders . get ( i ) ; int cint = candidate . intValue ( ) ; // skip over the leading parts of non - token final delimiters
int fws = getFirstWS ( s , cint + 1 ) ; if ( ( ( i + 1 ) < end ) && ( ( ( Integer ) enders . get ( i + 1 ) ) . intValue ( ) < fws ) ) { continue ; } Pair pair = new Pair ( sb , candidate ) ; double [ ] probs = model . eval ( cgen . getContext ( pair ) ) ; String bestOutcome = model . getBestOutcome ( probs ) ; sentProb *= probs [ model . getIndex ( bestOutcome ) ] ; if ( bestOutcome . equals ( "T" ) && isAcceptableBreak ( s , index , cint ) ) { if ( index != cint ) { positions . add ( INT_POOL . get ( getFirstNonWS ( s , getFirstWS ( s , cint + 1 ) ) ) ) ; sentProbs . add ( new Double ( probs [ model . getIndex ( bestOutcome ) ] ) ) ; } index = cint + 1 ; } } int [ ] sentPositions = new int [ positions . size ( ) ] ; for ( int i = 0 ; i < sentPositions . length ; i ++ ) { sentPositions [ i ] = ( ( Integer ) positions . get ( i ) ) . intValue ( ) ; } return sentPositions ; |
public class GVRAccessibilitySpeech { /** * Start speech recognizer .
* @ param speechListener */
public void start ( GVRAccessibilitySpeechListener speechListener ) { } } | mTts . setSpeechListener ( speechListener ) ; mTts . getSpeechRecognizer ( ) . startListening ( mTts . getSpeechRecognizerIntent ( ) ) ; |
public class BackupClientImpl { /** * failureProcessing .
* @ param data
* response data
* @ return String
* result
* @ throws BackupExecuteException
* will be generated BackupExecuteException */
private String failureProcessing ( BackupAgentResponse response ) throws BackupExecuteException { } } | try { String result = "\nFailure :\n" + "\tstatus code : " + response . getStatus ( ) + "\n" + "\tmessage : " + new String ( response . getResponseData ( ) , "UTF-8" ) + "\n\n" ; return result ; } catch ( UnsupportedEncodingException e ) { throw new BackupExecuteException ( "Can not encoded the responce : " + e . getMessage ( ) , e ) ; } |
public class SubscribableQueue { /** * Adds a collection of objects to the queue , targeting only the subscriber identified by the provided identifier object , unless said object is null . The added objects will only be visible to the
* subscriber .
* @ param c The collection of objects to add
* @ param identifier The identifier object , can be null
* @ return True if this queue changed as a result of the call
* @ see # addAll ( java . util . Collection ) */
public boolean addAll ( Collection < ? extends T > c , Object identifier ) { } } | checkNotNullArgument ( c ) ; if ( isPublisherThread ( ) ) { boolean changed = false ; if ( identifier != null ) { final Long id = subscriberIdentifiers . get ( identifier ) ; checkNotNullIdentifier ( id ) ; changed = queues . get ( id ) . addAll ( c ) ; } else { for ( Queue < T > queue : queues . values ( ) ) { if ( queue . addAll ( c ) ) { changed = true ; } } } return changed ; } return getCurrentThreadQueue ( ) . addAll ( c ) ; |
public class EntityTagResourceProvider { /** * todo : response for case mixed case where some subset of creations fail */
@ Override public Collection < String > createResources ( Request request ) throws InvalidQueryException , ResourceNotFoundException , ResourceAlreadyExistsException { } } | Collection < String > relativeUrls = new ArrayList < > ( ) ; AtlasQuery atlasQuery = queryFactory . createEntityQuery ( request ) ; Collection < String > guids = new ArrayList < > ( ) ; for ( Map < String , Object > entityMap : atlasQuery . execute ( ) ) { guids . add ( String . valueOf ( entityMap . get ( "id" ) ) ) ; } Collection < Map < String , String > > tagMaps = request . getProperty ( "tags" ) ; for ( Map < String , String > tagMap : tagMaps ) { Result termResult = getTermQueryResult ( tagMap . get ( "name" ) ) ; relativeUrls . addAll ( tagEntities ( guids , termResult . getPropertyMaps ( ) . iterator ( ) . next ( ) ) ) ; } return relativeUrls ; |
public class InputSettings { /** * Used to select the audio stream to decode for inputs that have multiple available .
* @ param audioSelectors
* Used to select the audio stream to decode for inputs that have multiple available . */
public void setAudioSelectors ( java . util . Collection < AudioSelector > audioSelectors ) { } } | if ( audioSelectors == null ) { this . audioSelectors = null ; return ; } this . audioSelectors = new java . util . ArrayList < AudioSelector > ( audioSelectors ) ; |
public class CoordinatesResolver { /** * { @ inheritDoc } */
public final File resolve ( String value ) throws Exception { } } | MavenGAV mavenGAV = resolveCoordinates ( value ) ; logger . info ( String . format ( "Using maven coordinates '%s' as project" , mavenGAV ) ) ; resolveTemporaryProjectFile ( ) ; return resolveProjectFile ( ) ; |
public class AmfView { /** * { @ inheritDoc } */
@ Override protected void renderMergedOutputModel ( Map < String , Object > model , HttpServletRequest request , HttpServletResponse response ) throws Exception { } } | Object value = filterModel ( model ) ; try { AmfTrace trace = null ; if ( log . isDebugEnabled ( ) ) { trace = new AmfTrace ( ) ; } ByteArrayOutputStream outBuffer = new ByteArrayOutputStream ( ) ; SerializationContext context = new SerializationContext ( ) ; Amf3Output out = new Amf3Output ( context ) ; if ( trace != null ) { out . setDebugTrace ( trace ) ; } out . setOutputStream ( outBuffer ) ; out . writeObject ( value ) ; out . flush ( ) ; outBuffer . flush ( ) ; response . setContentLength ( outBuffer . size ( ) ) ; outBuffer . writeTo ( response . getOutputStream ( ) ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Wrote AMF message:\n" + trace ) ; } } finally { FlexContext . clearThreadLocalObjects ( ) ; SerializationContext . clearThreadLocalObjects ( ) ; } |
public class EmailTemplate { /** * Process a PluginMessage and creates email content based on templates .
* @ param msg the PluginMessage to be processed
* @ return a Map with following entries :
* - " emailSubject " : Subject of the email
* - " emailBodyPlain " : Content for plain text email
* - " emailBodyHtml " : Content for html email
* @ throws Exception on any problem */
public Map < String , String > processTemplate ( ActionMessage msg ) throws Exception { } } | Map < String , String > emailProcessed = new HashMap < > ( ) ; PluginMessageDescription pmDesc = new PluginMessageDescription ( msg ) ; // Prepare emailSubject directly from PluginMessageDescription class
emailProcessed . put ( "emailSubject" , pmDesc . getEmailSubject ( ) ) ; // Check if templates are defined in properties
String plain ; String html ; String templateLocale = pmDesc . getProps ( ) != null ? pmDesc . getProps ( ) . get ( EmailPlugin . PROP_TEMPLATE_LOCALE ) : null ; if ( templateLocale != null ) { plain = pmDesc . getProps ( ) . get ( EmailPlugin . PROP_TEMPLATE_PLAIN + "." + templateLocale ) ; html = pmDesc . getProps ( ) . get ( EmailPlugin . PROP_TEMPLATE_HTML + "." + templateLocale ) ; } else { plain = pmDesc . getProps ( ) != null ? pmDesc . getProps ( ) . get ( EmailPlugin . PROP_TEMPLATE_PLAIN ) : null ; html = pmDesc . getProps ( ) != null ? pmDesc . getProps ( ) . get ( EmailPlugin . PROP_TEMPLATE_HTML ) : null ; } /* Invoke freemarker template with PluginMessageDescription as root object for dynamic data .
PluginMessageDescription fields are accessible within . ftl templates . */
StringWriter writerPlain = new StringWriter ( ) ; StringWriter writerHtml = new StringWriter ( ) ; if ( ! isEmpty ( plain ) ) { StringReader plainReader = new StringReader ( plain ) ; ftlTemplate = new Template ( "plainTemplate" , plainReader , ftlCfg ) ; ftlTemplate . process ( pmDesc , writerPlain ) ; } else { ftlTemplatePlain . process ( pmDesc , writerPlain ) ; } if ( ! isEmpty ( html ) ) { StringReader htmlReader = new StringReader ( html ) ; ftlTemplate = new Template ( "htmlTemplate" , htmlReader , ftlCfg ) ; ftlTemplate . process ( pmDesc , writerHtml ) ; } else { ftlTemplateHtml . process ( pmDesc , writerHtml ) ; } writerPlain . flush ( ) ; writerPlain . close ( ) ; emailProcessed . put ( "emailBodyPlain" , writerPlain . toString ( ) ) ; writerHtml . flush ( ) ; writerHtml . close ( ) ; emailProcessed . put ( "emailBodyHtml" , writerHtml . toString ( ) ) ; return emailProcessed ; |
public class BaseHashMap { /** * clear all the key / value data in a range . */
private void clearElementArrays ( final int from , final int to ) { } } | if ( isIntKey ) { int counter = to ; while ( -- counter >= from ) { intKeyTable [ counter ] = 0 ; } } else if ( isLongKey ) { int counter = to ; while ( -- counter >= from ) { longKeyTable [ counter ] = 0 ; } } else if ( isObjectKey ) { int counter = to ; while ( -- counter >= from ) { objectKeyTable [ counter ] = null ; } } if ( isIntValue ) { int counter = to ; while ( -- counter >= from ) { intValueTable [ counter ] = 0 ; } } else if ( isLongValue ) { int counter = to ; while ( -- counter >= from ) { longValueTable [ counter ] = 0 ; } } else if ( isObjectValue ) { int counter = to ; while ( -- counter >= from ) { objectValueTable [ counter ] = null ; } } if ( accessTable != null ) { int counter = to ; while ( -- counter >= from ) { accessTable [ counter ] = 0 ; } } if ( multiValueTable != null ) { int counter = to ; while ( -- counter >= from ) { multiValueTable [ counter ] = false ; } } |
public class SVGParser { /** * < view > element */
private void view ( Attributes attributes ) throws SVGParseException { } } | debug ( "<view>" ) ; if ( currentElement == null ) throw new SVGParseException ( "Invalid document. Root element must be <svg>" ) ; SVG . View obj = new SVG . View ( ) ; obj . document = svgDocument ; obj . parent = currentElement ; parseAttributesCore ( obj , attributes ) ; parseAttributesConditional ( obj , attributes ) ; parseAttributesViewBox ( obj , attributes ) ; currentElement . addChild ( obj ) ; currentElement = obj ; |
public class GroovyMain { /** * package - level visibility for testing purposes ( just usage / errors at this stage ) */
static void processArgs ( String [ ] args , final PrintStream out , final PrintStream err ) { } } | GroovyCommand groovyCommand = new GroovyCommand ( ) ; CommandLine parser = new CommandLine ( groovyCommand ) . setUnmatchedArgumentsAllowed ( true ) . setStopAtUnmatched ( true ) ; try { List < CommandLine > result = parser . parse ( args ) ; if ( CommandLine . printHelpIfRequested ( result , out , err , Help . Ansi . AUTO ) ) { return ; } // TODO : pass printstream ( s ) down through process
if ( ! groovyCommand . process ( parser ) ) { // If we fail , then exit with an error so scripting frameworks can catch it .
System . exit ( 1 ) ; } } catch ( ParameterException ex ) { // command line arguments could not be parsed
err . println ( ex . getMessage ( ) ) ; ex . getCommandLine ( ) . usage ( err ) ; } catch ( IOException ioe ) { err . println ( "error: " + ioe . getMessage ( ) ) ; } |
public class SeaGlassTabbedPaneUI { /** * Update the Synth styles when something changes .
* @ param c the component . */
private void updateStyle ( JTabbedPane c ) { } } | SeaGlassContext context = getContext ( c , ENABLED ) ; SynthStyle oldStyle = style ; style = SeaGlassLookAndFeel . updateStyle ( context , this ) ; tabPlacement = tabPane . getTabPlacement ( ) ; orientation = ControlOrientation . getOrientation ( tabPlacement == LEFT || tabPlacement == RIGHT ? VERTICAL : HORIZONTAL ) ; closeButtonArmedIndex = - 1 ; Object o = c . getClientProperty ( "JTabbedPane.closeButton" ) ; if ( o != null && "left" . equals ( o ) ) { tabCloseButtonPlacement = LEFT ; } else if ( o != null && "right" . equals ( o ) ) { tabCloseButtonPlacement = RIGHT ; } else { tabCloseButtonPlacement = CENTER ; } closeButtonSize = style . getInt ( context , "closeButtonSize" , 6 ) ; closeButtonInsets = ( Insets ) style . get ( context , "closeButtonInsets" ) ; if ( closeButtonInsets == null ) { closeButtonInsets = new Insets ( 2 , 2 , 2 , 2 ) ; } o = c . getClientProperty ( "JTabbedPane.closeListener" ) ; if ( o != null && o instanceof SeaGlassTabCloseListener ) { if ( tabCloseListener == null ) { tabCloseListener = ( SeaGlassTabCloseListener ) o ; } } // Add properties other than JComponent colors , Borders and
// opacity settings here :
if ( style != oldStyle ) { tabRunOverlay = 0 ; textIconGap = style . getInt ( context , "TabbedPane.textIconGap" , 0 ) ; selectedTabPadInsets = ( Insets ) style . get ( context , "TabbedPane.selectedTabPadInsets" ) ; if ( selectedTabPadInsets == null ) { selectedTabPadInsets = new Insets ( 0 , 0 , 0 , 0 ) ; } if ( oldStyle != null ) { uninstallKeyboardActions ( ) ; installKeyboardActions ( ) ; } } context . dispose ( ) ; if ( tabContext != null ) { tabContext . dispose ( ) ; } tabContext = getContext ( c , Region . TABBED_PANE_TAB , ENABLED ) ; this . tabStyle = SeaGlassLookAndFeel . updateStyle ( tabContext , this ) ; tabInsets = tabStyle . getInsets ( tabContext , null ) ; if ( tabCloseContext != null ) { tabCloseContext . dispose ( ) ; } tabCloseContext = getContext ( c , SeaGlassRegion . TABBED_PANE_TAB_CLOSE_BUTTON , ENABLED ) ; this . tabCloseStyle = SeaGlassLookAndFeel . updateStyle ( tabCloseContext , this ) ; if ( tabAreaContext != null ) { tabAreaContext . dispose ( ) ; } tabAreaContext = getContext ( c , Region . TABBED_PANE_TAB_AREA , ENABLED ) ; this . tabAreaStyle = SeaGlassLookAndFeel . updateStyle ( tabAreaContext , this ) ; tabAreaInsets = tabAreaStyle . getInsets ( tabAreaContext , null ) ; if ( tabContentContext != null ) { tabContentContext . dispose ( ) ; } tabContentContext = getContext ( c , Region . TABBED_PANE_CONTENT , ENABLED ) ; this . tabContentStyle = SeaGlassLookAndFeel . updateStyle ( tabContentContext , this ) ; contentBorderInsets = tabContentStyle . getInsets ( tabContentContext , null ) ; |
public class Atom10Generator { /** * Utility method to serialize an entry to writer . */
public static void serializeEntry ( final Entry entry , final Writer writer ) throws IllegalArgumentException , FeedException , IOException { } } | // Build a feed containing only the entry
final List < Entry > entries = new ArrayList < Entry > ( ) ; entries . add ( entry ) ; final Feed feed1 = new Feed ( ) ; feed1 . setFeedType ( "atom_1.0" ) ; feed1 . setEntries ( entries ) ; // Get Rome to output feed as a JDOM document
final WireFeedOutput wireFeedOutput = new WireFeedOutput ( ) ; final Document feedDoc = wireFeedOutput . outputJDom ( feed1 ) ; // Grab entry element from feed and get JDOM to serialize it
final Element entryElement = feedDoc . getRootElement ( ) . getChildren ( ) . get ( 0 ) ; final XMLOutputter outputter = new XMLOutputter ( ) ; outputter . output ( entryElement , writer ) ; |
public class TSDB { /** * Collects the stats and metrics tracked by this instance .
* @ param collector The collector to use . */
public void collectStats ( final StatsCollector collector ) { } } | final byte [ ] [ ] kinds = { METRICS_QUAL . getBytes ( CHARSET ) , TAG_NAME_QUAL . getBytes ( CHARSET ) , TAG_VALUE_QUAL . getBytes ( CHARSET ) } ; try { final Map < String , Long > used_uids = UniqueId . getUsedUIDs ( this , kinds ) . joinUninterruptibly ( ) ; collectUidStats ( metrics , collector ) ; if ( config . getBoolean ( "tsd.core.uid.random_metrics" ) ) { collector . record ( "uid.ids-used" , 0 , "kind=" + METRICS_QUAL ) ; collector . record ( "uid.ids-available" , 0 , "kind=" + METRICS_QUAL ) ; } else { collector . record ( "uid.ids-used" , used_uids . get ( METRICS_QUAL ) , "kind=" + METRICS_QUAL ) ; collector . record ( "uid.ids-available" , ( Internal . getMaxUnsignedValueOnBytes ( metrics . width ( ) ) - used_uids . get ( METRICS_QUAL ) ) , "kind=" + METRICS_QUAL ) ; } collectUidStats ( tag_names , collector ) ; collector . record ( "uid.ids-used" , used_uids . get ( TAG_NAME_QUAL ) , "kind=" + TAG_NAME_QUAL ) ; collector . record ( "uid.ids-available" , ( Internal . getMaxUnsignedValueOnBytes ( tag_names . width ( ) ) - used_uids . get ( TAG_NAME_QUAL ) ) , "kind=" + TAG_NAME_QUAL ) ; collectUidStats ( tag_values , collector ) ; collector . record ( "uid.ids-used" , used_uids . get ( TAG_VALUE_QUAL ) , "kind=" + TAG_VALUE_QUAL ) ; collector . record ( "uid.ids-available" , ( Internal . getMaxUnsignedValueOnBytes ( tag_values . width ( ) ) - used_uids . get ( TAG_VALUE_QUAL ) ) , "kind=" + TAG_VALUE_QUAL ) ; } catch ( Exception e ) { throw new RuntimeException ( "Shouldn't be here" , e ) ; } collector . record ( "uid.filter.rejected" , rejected_dps . get ( ) , "kind=raw" ) ; collector . record ( "uid.filter.rejected" , rejected_aggregate_dps . get ( ) , "kind=aggregate" ) ; { final Runtime runtime = Runtime . getRuntime ( ) ; collector . record ( "jvm.ramfree" , runtime . freeMemory ( ) ) ; collector . record ( "jvm.ramused" , runtime . totalMemory ( ) ) ; } collector . addExtraTag ( "class" , "IncomingDataPoints" ) ; try { collector . record ( "hbase.latency" , IncomingDataPoints . putlatency , "method=put" ) ; } finally { collector . clearExtraTag ( "class" ) ; } collector . addExtraTag ( "class" , "TSDB" ) ; try { collector . record ( "datapoints.added" , datapoints_added , "type=all" ) ; } finally { collector . clearExtraTag ( "class" ) ; } collector . addExtraTag ( "class" , "TsdbQuery" ) ; try { collector . record ( "hbase.latency" , TsdbQuery . scanlatency , "method=scan" ) ; } finally { collector . clearExtraTag ( "class" ) ; } final ClientStats stats = client . stats ( ) ; collector . record ( "hbase.root_lookups" , stats . rootLookups ( ) ) ; collector . record ( "hbase.meta_lookups" , stats . uncontendedMetaLookups ( ) , "type=uncontended" ) ; collector . record ( "hbase.meta_lookups" , stats . contendedMetaLookups ( ) , "type=contended" ) ; collector . record ( "hbase.rpcs" , stats . atomicIncrements ( ) , "type=increment" ) ; collector . record ( "hbase.rpcs" , stats . deletes ( ) , "type=delete" ) ; collector . record ( "hbase.rpcs" , stats . gets ( ) , "type=get" ) ; collector . record ( "hbase.rpcs" , stats . puts ( ) , "type=put" ) ; collector . record ( "hbase.rpcs" , stats . appends ( ) , "type=append" ) ; collector . record ( "hbase.rpcs" , stats . rowLocks ( ) , "type=rowLock" ) ; collector . record ( "hbase.rpcs" , stats . scannersOpened ( ) , "type=openScanner" ) ; collector . record ( "hbase.rpcs" , stats . scans ( ) , "type=scan" ) ; collector . record ( "hbase.rpcs.batched" , stats . numBatchedRpcSent ( ) ) ; collector . record ( "hbase.flushes" , stats . flushes ( ) ) ; collector . record ( "hbase.connections.created" , stats . connectionsCreated ( ) ) ; collector . record ( "hbase.connections.idle_closed" , stats . idleConnectionsClosed ( ) ) ; collector . record ( "hbase.nsre" , stats . noSuchRegionExceptions ( ) ) ; collector . record ( "hbase.nsre.rpcs_delayed" , stats . numRpcDelayedDueToNSRE ( ) ) ; collector . record ( "hbase.region_clients.open" , stats . regionClients ( ) ) ; collector . record ( "hbase.region_clients.idle_closed" , stats . idleConnectionsClosed ( ) ) ; compactionq . collectStats ( collector ) ; // Collect Stats from Plugins
if ( startup != null ) { try { collector . addExtraTag ( "plugin" , "startup" ) ; startup . collectStats ( collector ) ; } finally { collector . clearExtraTag ( "plugin" ) ; } } if ( rt_publisher != null ) { try { collector . addExtraTag ( "plugin" , "publish" ) ; rt_publisher . collectStats ( collector ) ; } finally { collector . clearExtraTag ( "plugin" ) ; } } if ( authentication != null ) { try { collector . addExtraTag ( "plugin" , "authentication" ) ; authentication . collectStats ( collector ) ; } finally { collector . clearExtraTag ( "plugin" ) ; } } if ( search != null ) { try { collector . addExtraTag ( "plugin" , "search" ) ; search . collectStats ( collector ) ; } finally { collector . clearExtraTag ( "plugin" ) ; } } if ( storage_exception_handler != null ) { try { collector . addExtraTag ( "plugin" , "storageExceptionHandler" ) ; storage_exception_handler . collectStats ( collector ) ; } finally { collector . clearExtraTag ( "plugin" ) ; } } if ( ts_filter != null ) { try { collector . addExtraTag ( "plugin" , "timeseriesFilter" ) ; ts_filter . collectStats ( collector ) ; } finally { collector . clearExtraTag ( "plugin" ) ; } } if ( uid_filter != null ) { try { collector . addExtraTag ( "plugin" , "uidFilter" ) ; uid_filter . collectStats ( collector ) ; } finally { collector . clearExtraTag ( "plugin" ) ; } } |
public class AgigaSentenceReader { /** * Assumes the position of vn is at a " sent " tag
* @ return */
private List < AgigaTypedDependency > parseDependencies ( VTDNav vn , DependencyForm form ) throws NavException , PilotException { } } | require ( vn . matchElement ( AgigaConstants . SENTENCE ) ) ; // Move to the < basic - deps > tag
require ( vn . toElement ( VTDNav . FC , form . getXmlTag ( ) ) ) ; List < AgigaTypedDependency > agigaDeps = new ArrayList < AgigaTypedDependency > ( ) ; // Loop through the dep tags
AutoPilot basicDepRelAp = new AutoPilot ( vn ) ; basicDepRelAp . selectElement ( AgigaConstants . DEP ) ; while ( basicDepRelAp . iterate ( ) ) { // Read the type , governor , and dependent
String type = vn . toString ( vn . getAttrVal ( AgigaConstants . DEP_TYPE ) ) ; require ( vn . toElement ( VTDNav . FC , AgigaConstants . GOVERNOR ) ) ; int governorId = vn . parseInt ( vn . getText ( ) ) ; require ( vn . toElement ( VTDNav . NS , AgigaConstants . DEPENDENT ) ) ; int dependentId = vn . parseInt ( vn . getText ( ) ) ; log . finer ( String . format ( "\tdep type=%s\t%d-->%d" , type , governorId , dependentId ) ) ; // Subtract one , since the tokens are one - indexed in the XML but
// zero - indexed in this API
AgigaTypedDependency agigaDep = new AgigaTypedDependency ( type , governorId - 1 , dependentId - 1 ) ; agigaDeps . add ( agigaDep ) ; } return agigaDeps ; |
public class WebUtils { /** * Converts the given params into a query string started with ?
* @ param params The params
* @ param encoding The encoding to use
* @ return The query string
* @ throws UnsupportedEncodingException If the given encoding is not supported */
@ SuppressWarnings ( "rawtypes" ) public static String toQueryString ( Map params , String encoding ) throws UnsupportedEncodingException { } } | if ( encoding == null ) encoding = "UTF-8" ; StringBuilder queryString = new StringBuilder ( "?" ) ; for ( Iterator i = params . entrySet ( ) . iterator ( ) ; i . hasNext ( ) ; ) { Map . Entry entry = ( Map . Entry ) i . next ( ) ; boolean hasMore = i . hasNext ( ) ; boolean wasAppended = appendEntry ( entry , queryString , encoding , "" ) ; if ( hasMore && wasAppended ) queryString . append ( '&' ) ; } return queryString . toString ( ) ; |
public class DataUtils { public static < T > List < T > getList ( final Cursor cursor , final Class < T > klass ) { } } | try { return getObjectsFromCursor ( cursor , klass ) ; } catch ( final Exception e ) { Logger . ex ( e ) ; return null ; } |
public class VirtualMachineScaleSetsInner { /** * Gets a list of SKUs available for your VM scale set , including the minimum and maximum VM instances allowed for each SKU .
* @ param resourceGroupName The name of the resource group .
* @ param vmScaleSetName The name of the VM scale set .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; VirtualMachineScaleSetSkuInner & gt ; object */
public Observable < Page < VirtualMachineScaleSetSkuInner > > listSkusAsync ( final String resourceGroupName , final String vmScaleSetName ) { } } | return listSkusWithServiceResponseAsync ( resourceGroupName , vmScaleSetName ) . map ( new Func1 < ServiceResponse < Page < VirtualMachineScaleSetSkuInner > > , Page < VirtualMachineScaleSetSkuInner > > ( ) { @ Override public Page < VirtualMachineScaleSetSkuInner > call ( ServiceResponse < Page < VirtualMachineScaleSetSkuInner > > response ) { return response . body ( ) ; } } ) ; |
public class OldDataMonitor { /** * Inform monitor that some data in a deprecated format has been loaded ,
* and converted in - memory to a new structure .
* @ param obj Saveable object ; calling save ( ) on this object will persist
* the data in its new format to disk .
* @ param version Hudson release when the data structure changed . */
public static void report ( Saveable obj , String version ) { } } | OldDataMonitor odm = get ( Jenkins . getInstance ( ) ) ; try { SaveableReference ref = referTo ( obj ) ; while ( true ) { VersionRange vr = odm . data . get ( ref ) ; if ( vr != null && odm . data . replace ( ref , vr , new VersionRange ( vr , version , null ) ) ) { break ; } else if ( odm . data . putIfAbsent ( ref , new VersionRange ( null , version , null ) ) == null ) { break ; } } } catch ( IllegalArgumentException ex ) { LOGGER . log ( Level . WARNING , "Bad parameter given to OldDataMonitor" , ex ) ; } |
public class CmdLine { /** * Prints the help on the command line
* @ param options Options object from commons - cli */
public static void printHelp ( final Options options ) { } } | Collection < Option > c = options . getOptions ( ) ; System . out . println ( "Command line options are:" ) ; int longestLongOption = 0 ; for ( Option op : c ) { if ( op . getLongOpt ( ) . length ( ) > longestLongOption ) { longestLongOption = op . getLongOpt ( ) . length ( ) ; } } longestLongOption += 2 ; String spaces = StringUtils . repeat ( " " , longestLongOption ) ; for ( Option op : c ) { System . out . print ( "\t-" + op . getOpt ( ) + " --" + op . getLongOpt ( ) ) ; if ( op . getLongOpt ( ) . length ( ) < spaces . length ( ) ) { System . out . print ( spaces . substring ( op . getLongOpt ( ) . length ( ) ) ) ; } else { System . out . print ( " " ) ; } System . out . println ( op . getDescription ( ) ) ; } |
public class ContextServiceImpl { /** * Adds each thread context configuration from this - the base instance - to a another context service
* if the thread context configuration is not already present on the context service .
* Precondition : invoker must have a write lock on the contextSvc parameter .
* @ param contextSvc ContextService that is using this instance as a base instance . */
private void addComplementaryThreadContextConfigurationsTo ( ContextServiceImpl contextSvc ) { } } | final boolean trace = TraceComponent . isAnyTracingEnabled ( ) ; lock . writeLock ( ) . lock ( ) ; try { // Detect and stop infinite recursion from baseContextRef
if ( lock . getWriteHoldCount ( ) > 1 ) { IllegalArgumentException x = ignoreWarnOrFail ( null , IllegalArgumentException . class , "CWWKC1020.baseContextRef.infinite" , name ) ; if ( x == null ) return ; else throw x ; } else if ( threadContextConfigurations == null ) init ( ) ; modificationListeners . add ( contextSvc ) ; for ( Map . Entry < String , Map < String , ? > > threadContextConfig : threadContextConfigurations . entrySet ( ) ) { String name = threadContextConfig . getKey ( ) ; if ( ! contextSvc . threadContextConfigurations . containsKey ( name ) ) { contextSvc . threadContextConfigurations . put ( name , threadContextConfig . getValue ( ) ) ; if ( trace && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "adding " + name , threadContextConfig . getValue ( ) ) ; } } } finally { lock . writeLock ( ) . unlock ( ) ; } |
public class CmsGalleryService { /** * Returns a map with the available locales . < p >
* The map entry key is the current locale and the value the localized nice name . < p >
* @ return the map representation of all available locales */
private Map < String , String > buildLocalesMap ( ) { } } | TreeMap < String , String > localesMap = new TreeMap < String , String > ( ) ; Iterator < Locale > it = OpenCms . getLocaleManager ( ) . getAvailableLocales ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { Locale locale = it . next ( ) ; localesMap . put ( locale . toString ( ) , locale . getDisplayName ( getWorkplaceLocale ( ) ) ) ; } return localesMap ; |
public class GlobalCluster { /** * The list of cluster IDs for secondary clusters within the global database cluster . Currently limited to 1 item .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setGlobalClusterMembers ( java . util . Collection ) } or { @ link # withGlobalClusterMembers ( java . util . Collection ) }
* if you want to override the existing values .
* @ param globalClusterMembers
* The list of cluster IDs for secondary clusters within the global database cluster . Currently limited to 1
* item .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GlobalCluster withGlobalClusterMembers ( GlobalClusterMember ... globalClusterMembers ) { } } | if ( this . globalClusterMembers == null ) { setGlobalClusterMembers ( new com . amazonaws . internal . SdkInternalList < GlobalClusterMember > ( globalClusterMembers . length ) ) ; } for ( GlobalClusterMember ele : globalClusterMembers ) { this . globalClusterMembers . add ( ele ) ; } return this ; |
public class RNAUtils { /** * / * To Do check for other modified phosphates ? */
private static boolean hasPhosphat ( MonomerNotationUnitRNA monomerNotationUnitRNA ) { } } | if ( monomerNotationUnitRNA . getContents ( ) . get ( monomerNotationUnitRNA . getContents ( ) . size ( ) - 1 ) . getUnit ( ) . endsWith ( "P" ) ) { LOG . info ( "MonomerNotationUnitRNA " + monomerNotationUnitRNA . getUnit ( ) + " has a phosphate" ) ; return true ; } LOG . info ( "MonomerNotationUnitRNA " + monomerNotationUnitRNA . getUnit ( ) + " has no phosphate" ) ; return false ; |
public class WrappingUtils { /** * Wraps the parent ' s child with a ScaleTypeDrawable . */
static ScaleTypeDrawable wrapChildWithScaleType ( DrawableParent parent , ScalingUtils . ScaleType scaleType ) { } } | Drawable child = parent . setDrawable ( sEmptyDrawable ) ; child = maybeWrapWithScaleType ( child , scaleType ) ; parent . setDrawable ( child ) ; Preconditions . checkNotNull ( child , "Parent has no child drawable!" ) ; return ( ScaleTypeDrawable ) child ; |
public class AbstractGeometryIndexController { /** * Get the real world location of the event , while making sure it is within the maximum bounds . If no maximum bounds
* have been set , the original event location in world space is returned .
* @ param event
* The event to extract the location from .
* @ return The location within maximum bounds . */
public Coordinate getLocationWithinMaxBounds ( HumanInputEvent < ? > event ) { } } | Coordinate location = getLocation ( event , RenderSpace . WORLD ) ; location = getLocationWithinMaxBounds ( location ) ; return location ; |
public class HystrixTimer { /** * Clears all listeners .
* NOTE : This will result in race conditions if { @ link # addTimerListener ( com . netflix . hystrix . util . HystrixTimer . TimerListener ) } is being concurrently called . */
public static void reset ( ) { } } | ScheduledExecutor ex = INSTANCE . executor . getAndSet ( null ) ; if ( ex != null && ex . getThreadPool ( ) != null ) { ex . getThreadPool ( ) . shutdownNow ( ) ; } |
public class BatchGetDevEndpointsRequest { /** * The list of DevEndpoint names , which may be the names returned from the < code > ListDevEndpoint < / code > operation .
* @ param devEndpointNames
* The list of DevEndpoint names , which may be the names returned from the < code > ListDevEndpoint < / code >
* operation . */
public void setDevEndpointNames ( java . util . Collection < String > devEndpointNames ) { } } | if ( devEndpointNames == null ) { this . devEndpointNames = null ; return ; } this . devEndpointNames = new java . util . ArrayList < String > ( devEndpointNames ) ; |
public class DatabaseTableMetrics { /** * Record the row count for an individual database table .
* @ param registry The registry to bind metrics to .
* @ param tableName The name of the table to report table size for .
* @ param dataSourceName Will be used to tag metrics with " db " .
* @ param dataSource The data source to use to run the row count query .
* @ param tags Tags to apply to all recorded metrics . Must be an even number of arguments representing key / value pairs of tags . */
public static void monitor ( MeterRegistry registry , String tableName , String dataSourceName , DataSource dataSource , String ... tags ) { } } | monitor ( registry , dataSource , dataSourceName , tableName , Tags . of ( tags ) ) ; |
public class StatusCounter { /** * Increments finalStatus counter by single value .
* @ param status
* finalStatus for which the counter should be incremented . */
public void incrementFor ( Status status ) { } } | final int statusCounter = getValueFor ( status ) + 1 ; this . counter . put ( status , statusCounter ) ; size ++ ; if ( finalStatus == Status . PASSED && status != Status . PASSED ) { finalStatus = Status . FAILED ; } |
public class InJvmContainerExecutor { /** * Overrides the parent method while still invoking it . Since
* { @ link # isContainerActive ( ContainerId ) } method is also overridden here and
* always returns ' false ' the super . launchContainer ( . . ) will only go through
* the prep routine ( e . g . , creating temp dirs etc . ) while never launching the
* actual container via the launch script . This will ensure that all the
* expectations of the container to be launched ( e . g . , symlinks etc . ) are
* satisfied . The actual launch will be performed by invoking
* { @ link # doLaunch ( Container , Path ) } method . */
public int launchContainer ( ContainerStartContext containerStartContext ) throws IOException { } } | Container container = containerStartContext . getContainer ( ) ; Path containerWorkDir = containerStartContext . getContainerWorkDir ( ) ; super . launchContainer ( containerStartContext ) ; int exitCode = 0 ; if ( container . getLaunchContext ( ) . getCommands ( ) . toString ( ) . contains ( "bin/java" ) ) { ExecJavaCliParser result = this . createExecCommandParser ( containerWorkDir . toString ( ) ) ; try { exitCode = this . doLaunch ( container , containerWorkDir ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( ( "Returned: " + exitCode ) ) ; } } catch ( Exception e ) { e . printStackTrace ( ) ; } } else { String cmd = container . getLaunchContext ( ) . getCommands ( ) . get ( 0 ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( "Running Command: " + cmd ) ; } ExecShellCliParser execShellCliParser = new ExecShellCliParser ( cmd ) ; try { exitCode = execShellCliParser . runCommand ( ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } if ( logger . isInfoEnabled ( ) ) { logger . info ( ( "Returned: " + exitCode ) ) ; } } return exitCode ; |
public class CreateFleetRequest { /** * Range of IP addresses and port settings that permit inbound traffic to access game sessions that running on the
* fleet . For fleets using a custom game build , this parameter is required before game sessions running on the fleet
* can accept connections . For Realtime Servers fleets , Amazon GameLift automatically sets TCP and UDP ranges for
* use by the Realtime servers . You can specify multiple permission settings or add more by updating the fleet .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setEC2InboundPermissions ( java . util . Collection ) } or
* { @ link # withEC2InboundPermissions ( java . util . Collection ) } if you want to override the existing values .
* @ param eC2InboundPermissions
* Range of IP addresses and port settings that permit inbound traffic to access game sessions that running
* on the fleet . For fleets using a custom game build , this parameter is required before game sessions
* running on the fleet can accept connections . For Realtime Servers fleets , Amazon GameLift automatically
* sets TCP and UDP ranges for use by the Realtime servers . You can specify multiple permission settings or
* add more by updating the fleet .
* @ return Returns a reference to this object so that method calls can be chained together . */
public CreateFleetRequest withEC2InboundPermissions ( IpPermission ... eC2InboundPermissions ) { } } | if ( this . eC2InboundPermissions == null ) { setEC2InboundPermissions ( new java . util . ArrayList < IpPermission > ( eC2InboundPermissions . length ) ) ; } for ( IpPermission ele : eC2InboundPermissions ) { this . eC2InboundPermissions . add ( ele ) ; } return this ; |
public class ConvertImage { /** * Converts a { @ link InterleavedF64 } into a { @ link GrayF64 } by computing the average value of each pixel
* across all the bands .
* @ param input ( Input ) The ImageInterleaved that is being converted . Not modified .
* @ param output ( Optional ) The single band output image . If null a new image is created . Modified .
* @ return Converted image . */
public static GrayF64 average ( InterleavedF64 input , GrayF64 output ) { } } | if ( output == null ) { output = new GrayF64 ( input . width , input . height ) ; } else { output . reshape ( input . width , input . height ) ; } if ( BoofConcurrency . USE_CONCURRENT ) { ConvertInterleavedToSingle_MT . average ( input , output ) ; } else { ConvertInterleavedToSingle . average ( input , output ) ; } return output ; |
public class LogRepositoryComponent { /** * Dumps trace records stored in the memory buffer to disk . This action
* happens only if trace destination was set to memory . */
public static synchronized void dumpTraceMemory ( ) { } } | LogRepositoryWriter writer = getBinaryHandler ( ) . getTraceWriter ( ) ; if ( writer instanceof LogRepositoryWriterCBuffImpl ) { ( ( LogRepositoryWriterCBuffImpl ) writer ) . dumpItems ( ) ; } |
public class TranspilationPasses { /** * Process transpilations if the input language needs transpilation from certain features , on any
* JS file that has features not present in the compiler ' s output language mode .
* @ param compiler An AbstractCompiler
* @ param combinedRoot The combined root for all JS files .
* @ param featureSet The features which this pass helps transpile .
* @ param callbacks The callbacks that should be invoked if a file has ES6 features . */
static void processTranspile ( AbstractCompiler compiler , Node combinedRoot , FeatureSet featureSet , Callback ... callbacks ) { } } | if ( compiler . getOptions ( ) . needsTranspilationFrom ( featureSet ) ) { FeatureSet languageOutFeatures = compiler . getOptions ( ) . getOutputFeatureSet ( ) ; for ( Node singleRoot : combinedRoot . children ( ) ) { // Only run the transpilation if this file has features not in the compiler ' s target output
// language . For example , if this file is purely ES6 and the output language is ES6 , don ' t
// run any transpilation passes on it .
// TODO ( lharker ) : We could save time by being more selective about what files we transpile .
// e . g . if a file has async functions but not ` * * ` , don ' t run ` * * ` transpilation on it .
// Right now we know what features were in a file at parse time , but not what features were
// added to that file by other transpilation passes .
if ( doesScriptHaveUnsupportedFeatures ( singleRoot , languageOutFeatures ) ) { for ( Callback callback : callbacks ) { singleRoot . putBooleanProp ( Node . TRANSPILED , true ) ; NodeTraversal . traverse ( compiler , singleRoot , callback ) ; } } } } |
public class URLUtils { /** * Retrieve the element ID from the path
* @ param relativePath path
* @ return element ID , may be { @ code null } */
public static String getElementID ( final String relativePath ) { } } | final String fragment = FileUtils . getFragment ( relativePath ) ; if ( fragment != null ) { if ( fragment . lastIndexOf ( SLASH ) != - 1 ) { final String id = fragment . substring ( fragment . lastIndexOf ( SLASH ) + 1 ) ; return id . isEmpty ( ) ? null : id ; } } return null ; |
public class Calc { /** * Convert an array of atoms into an array of vecmath points
* @ param atoms
* list of atoms
* @ return list of Point3ds storing the x , y , z coordinates of each atom */
public static Point3d [ ] atomsToPoints ( Atom [ ] atoms ) { } } | Point3d [ ] points = new Point3d [ atoms . length ] ; for ( int i = 0 ; i < atoms . length ; i ++ ) { points [ i ] = atoms [ i ] . getCoordsAsPoint3d ( ) ; } return points ; |
public class IndexGenerator { /** * Starts index generation using the database credentials in the
* properties file specified in args [ 0 ] . < br >
* The properties file should have the following structure :
* < ul > < li > host = dbhost < / li >
* < li > db = revisiondb < / li >
* < li > user = username < / li >
* < li > password = pwd < / li >
* < li > output = outputFile < / li >
* < li > writeDirectlyToDB = true | false ( optional ) < / li >
* < li > charset = UTF8 ( or others ) ( optional ) < / li >
* < li > buffer = 15000 ( optional ) < / li >
* < li > maxAllowedPackets = 16760832 ( optional ) < / li > < / ul >
* < br >
* @ param args allows only one entry that contains the path to the config file */
public static void main ( String [ ] args ) { } } | if ( args == null || args . length != 1 ) { System . out . println ( ( "You need to specify the database configuration file. \n" + "It should contain the access credentials to you revision database in the following format: \n" + " host=dbhost \n" + " db=revisiondb \n" + " user=username \n" + " password=pwd \n" + " output=outputFile \n" + " outputDatabase=true|false (optional)\n" + " outputDatafile=true|false (optional)\n" + " charset=UTF8 (optional)\n" + " buffer=15000 (optional)\n" + " maxAllowedPackets=16760832 (optional)\n\n" + " The default output mode is SQL Dump" ) ) ; throw new IllegalArgumentException ( ) ; } else { Properties props = load ( args [ 0 ] ) ; RevisionAPIConfiguration config = new RevisionAPIConfiguration ( ) ; config . setHost ( props . getProperty ( "host" ) ) ; config . setDatabase ( props . getProperty ( "db" ) ) ; config . setUser ( props . getProperty ( "user" ) ) ; config . setPassword ( props . getProperty ( "password" ) ) ; String charset = props . getProperty ( "charset" ) ; String buffer = props . getProperty ( "buffer" ) ; String maxAllowedPackets = props . getProperty ( "maxAllowedPackets" ) ; if ( charset != null ) { config . setCharacterSet ( charset ) ; } else { config . setCharacterSet ( "UTF-8" ) ; } if ( buffer != null ) { config . setBufferSize ( Integer . parseInt ( buffer ) ) ; } else { config . setBufferSize ( 15000 ) ; } if ( maxAllowedPackets != null ) { config . setMaxAllowedPacket ( Long . parseLong ( maxAllowedPackets ) ) ; } else { config . setMaxAllowedPacket ( 16 * 1024 * 1023 ) ; } if ( props . getProperty ( "outputDatabase" ) != null && Boolean . parseBoolean ( props . getProperty ( "outputDatabase" ) ) ) { config . setOutputType ( OutputTypes . DATABASE ) ; } else if ( props . getProperty ( "outputDatafile" ) != null && Boolean . parseBoolean ( props . getProperty ( "outputDatafile" ) ) ) { config . setOutputType ( OutputTypes . DATAFILE ) ; } else { config . setOutputType ( OutputTypes . SQL ) ; } String output = props . getProperty ( "output" ) ; File outfile = new File ( output ) ; if ( outfile . isDirectory ( ) ) { config . setOutputPath ( output ) ; } else { config . setOutputPath ( outfile . getParentFile ( ) . getPath ( ) ) ; } try { new IndexGenerator ( config ) . generate ( ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } System . out . println ( "TERMINATED" ) ; } |
public class CustomCreative { /** * Gets the customCreativeAssets value for this CustomCreative .
* @ return customCreativeAssets * A list of file assets that are associated with this creative ,
* and can be
* referenced in the snippet . */
public com . google . api . ads . admanager . axis . v201902 . CustomCreativeAsset [ ] getCustomCreativeAssets ( ) { } } | return customCreativeAssets ; |
public class HttpResponseMessageImpl { /** * Method to update the caching related headers for a response message . This
* will configure the message such that if Set - Cookie ( 2 ) information is
* present then additional headers will be added to ensure that the message
* is not cached on any intermediate caches . */
private void updateCacheControl ( ) { } } | // regular HTTP path , the Set - Cookie values are already put into BNFHdrs
// but localhttp they might still be in the cookie cache
boolean addSet1 = containsHeader ( HttpHeaderKeys . HDR_SET_COOKIE ) || isCookieCacheDirty ( HttpHeaderKeys . HDR_SET_COOKIE ) ; boolean addSet2 = containsHeader ( HttpHeaderKeys . HDR_SET_COOKIE2 ) || isCookieCacheDirty ( HttpHeaderKeys . HDR_SET_COOKIE2 ) ; if ( ! addSet1 && ! addSet2 ) { // set - cookie ( 2 ) does not exist , nothing to do
return ; } // make sure the Expires header exists
if ( ! containsHeader ( HttpHeaderKeys . HDR_EXPIRES ) ) { // add the Expires header
setSpecialHeader ( HttpHeaderKeys . HDR_EXPIRES , LONG_AGO ) ; } // check whether we need to update an existing Cache - Control header
// or simply add one
if ( containsHeader ( HttpHeaderKeys . HDR_CACHE_CONTROL ) ) { // need to update the existing value
HeaderHandler handler = new HeaderHandler ( this , ',' , HttpHeaderKeys . HDR_CACHE_CONTROL ) ; if ( ! handler . contains ( "no-cache" ) ) { boolean updated = false ; if ( addSet1 ) { updated = handler . add ( "no-cache" , "set-cookie" ) ; } if ( addSet2 ) { updated = handler . add ( "no-cache" , "set-cookie2" ) | updated ; } if ( updated ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Updating Cache-Control for Set-Cookie" ) ; } setSpecialHeader ( HttpHeaderKeys . HDR_CACHE_CONTROL , handler . marshall ( ) ) ; } } } else { // not present , just write what we want
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Adding Cache-Control due to Set-Cookie" ) ; } setSpecialHeader ( HttpHeaderKeys . HDR_CACHE_CONTROL , NOCACHE_VALUE ) ; } |
public class RoomInfoImpl { /** * / * ( non - Javadoc )
* @ see com . tvd12 . ezyfox . core . command . RoomInfo # room ( com . tvd12 . ezyfox . core . entities . ApiRoom ) */
@ Override public RoomInfo room ( ApiRoom room ) { } } | this . apiRoom = room ; this . room = CommandUtil . getSfsRoom ( room , extension ) ; return this ; |
public class Strings { /** * Initialize regular expressions used in unescaping . This method will be
* invoked automatically the first time a string is unescaped . */
public static boolean initializeUnescapePattern ( ) { } } | if ( paternIsInitialized == true ) { return true ; } synchronized ( unescapeInitLockObject ) { if ( paternIsInitialized == true ) { return true ; } try { unescapePattern = Pattern . compile ( unicodeUnescapeMatchExpression ) ; } catch ( PatternSyntaxException pse ) { /* * the pattern is compiled from a final string , so this
* exception should never be thrown */
System . err . println ( "Imposible error: " + "static final regular expression pattern " + "failed to compile. Exception: " + pse . toString ( ) ) ; return false ; } paternIsInitialized = true ; } return true ; |
public class FlowTypeCheck { /** * check type information in a flow - sensitive fashion through a block of
* statements , whilst type checking each statement and expression .
* @ param block
* Block of statements to flow sensitively type check
* @ param environment
* Determines the type of all variables immediately going into this
* block
* @ return */
private Environment checkBlock ( Stmt . Block block , Environment environment , EnclosingScope scope ) { } } | for ( int i = 0 ; i != block . size ( ) ; ++ i ) { Stmt stmt = block . get ( i ) ; environment = checkStatement ( stmt , environment , scope ) ; } return environment ; |
public class StrBuilder { /** * Advanced search and replaces within the builder using a matcher .
* Matchers can be used to perform advanced behaviour .
* For example you could write a matcher to delete all occurrences
* where the character ' a ' is followed by a number .
* @ param matcher the matcher to use to find the deletion , null causes no action
* @ param replaceStr the string to replace the match with , null is a delete
* @ param startIndex the start index , inclusive , must be valid
* @ param endIndex the end index , exclusive , must be valid except
* that if too large it is treated as end of string
* @ param replaceCount the number of times to replace , - 1 for replace all
* @ return this , to enable chaining
* @ throws IndexOutOfBoundsException if start index is invalid */
public StrBuilder replace ( final StrMatcher matcher , final String replaceStr , final int startIndex , int endIndex , final int replaceCount ) { } } | endIndex = validateRange ( startIndex , endIndex ) ; return replaceImpl ( matcher , replaceStr , startIndex , endIndex , replaceCount ) ; |
public class LoggingFilter { /** * { @ inheritDoc } */
@ Override public void aroundWriteTo ( final WriterInterceptorContext writerInterceptorContext ) throws IOException , WebApplicationException { } } | final LoggingStream stream = ( LoggingStream ) writerInterceptorContext . getProperty ( ENTITY_LOGGER_PROPERTY ) ; writerInterceptorContext . proceed ( ) ; final Object requestId = Requests . getProperty ( LOGGING_ID_PROPERTY ) ; final long id = requestId != null ? ( Long ) requestId : _id . incrementAndGet ( ) ; StringBuilder b = ( StringBuilder ) writerInterceptorContext . getProperty ( LOGGER_BUFFER_PROPERTY ) ; if ( b == null ) { b = new StringBuilder ( ) ; writerInterceptorContext . setProperty ( LOGGER_BUFFER_PROPERTY , b ) ; } printPrefixedHeaders ( b , id , RESPONSE_PREFIX , HeaderUtils . asStringHeaders ( writerInterceptorContext . getHeaders ( ) ) ) ; if ( stream != null ) { log ( stream . getStringBuilder ( MessageUtils . getCharset ( writerInterceptorContext . getMediaType ( ) ) ) ) ; } else { log ( b ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.