signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class DefaultWardenService { /** * Starts the scheduled executor service . */
private void _startScheduledExecutorService ( ) { } } | DisableWardenAlertsThread disableWardenAlertThread = new DisableWardenAlertsThread ( ) ; _scheduledExecutorService . scheduleAtFixedRate ( disableWardenAlertThread , 0L , TIME_BETWEEN_WARDEN_ALERT_DISABLEMENT_MILLIS , TimeUnit . MILLISECONDS ) ; |
public class DocumentLine { /** * setter for endings - sets
* @ generated
* @ param v value to set into the feature */
public void setEndings ( FloatArray v ) { } } | if ( DocumentLine_Type . featOkTst && ( ( DocumentLine_Type ) jcasType ) . casFeat_endings == null ) jcasType . jcas . throwFeatMissing ( "endings" , "ch.epfl.bbp.uima.types.DocumentLine" ) ; jcasType . ll_cas . ll_setRefValue ( addr , ( ( DocumentLine_Type ) jcasType ) . casFeatCode_endings , jcasType . ll_cas . ll_getFSRef ( v ) ) ; |
public class InteropFramework { /** * Determines whether this format received as argument is an input format .
* @ param format a { @ link ProvFormat }
* @ return true if format is an input format */
public Boolean isInputFormat ( ProvFormat format ) { } } | ProvFormatType t = provTypeMap . get ( format ) ; return ( t . equals ( ProvFormatType . INPUT ) || t . equals ( ProvFormatType . INPUTOUTPUT ) ) ; |
public class MappedValueExpression { /** * ( non - Javadoc )
* @ see javax . el . ValueExpression # getValue ( javax . el . ELContext ) */
public Object getValue ( ELContext context ) { } } | Object base = this . orig . getValue ( context ) ; if ( base != null ) { context . setPropertyResolved ( true ) ; return new Entry ( ( Map ) base , key ) ; } return null ; |
public class ns_ns_timeout { /** * < pre >
* Use this operation to set timeout on NetScaler Instance .
* < / pre > */
public static ns_ns_timeout set ( nitro_service client , ns_ns_timeout resource ) throws Exception { } } | return ( ( ns_ns_timeout [ ] ) resource . update_resource ( client ) ) [ 0 ] ; |
public class CmsPublishManager { /** * Returns a publish list with all the given resources , filtered only by state . < p >
* @ param cms the cms request context
* @ param directPublishResources the { @ link CmsResource } objects which will be directly published
* @ param directPublishSiblings < code > true < / code > , if all eventual siblings of the direct
* published resources should also get published
* @ param isUserPublishList if true , the publish list consists of resources directly selected by the user to publish
* @ return a publish list
* @ throws CmsException if something goes wrong */
public CmsPublishList getPublishListAll ( CmsObject cms , List < CmsResource > directPublishResources , boolean directPublishSiblings , boolean isUserPublishList ) throws CmsException { } } | CmsPublishList pubList = new CmsPublishList ( true , directPublishResources , directPublishSiblings ) ; pubList . setUserPublishList ( isUserPublishList ) ; return m_securityManager . fillPublishList ( cms . getRequestContext ( ) , pubList ) ; |
public class SubnetworkInfo { /** * Returns a builder for a { @ code SubnetworkInfo } object given the identity of the subnetwork , the
* identity of the network this subnetwork belongs to and the range of IPv4 addresses owned by
* this subnetwork . { @ code ipRange } must be a CIDR specification , for example : { @ code
* 192.168.0.0/16 } .
* @ see < a href = " https : / / wikipedia . org / wiki / Classless _ Inter - Domain _ Routing " > CIDR < / a > */
public static Builder newBuilder ( SubnetworkId subnetworkId , NetworkId network , String ipRange ) { } } | return new BuilderImpl ( subnetworkId , network , ipRange ) ; |
public class PortletCookieServiceImpl { /** * Check the { @ link HttpSession } for the ID of the Portal Cookie . This is useful if the customer
* does not wish to accept cookies .
* @ param session
* @ return */
protected IPortalCookie locatePortalCookieInSession ( HttpSession session ) { } } | synchronized ( WebUtils . getSessionMutex ( session ) ) { final String portalCookieId = ( String ) session . getAttribute ( SESSION_ATTRIBUTE__PORTAL_COOKIE_ID ) ; if ( portalCookieId == null ) { return null ; } IPortalCookie portalCookie = this . portletCookieDao . getPortalCookie ( portalCookieId ) ; return portalCookie ; } |
public class NonBlockingStringWriter { /** * Appends a subsequence of the specified character sequence to this writer .
* An invocation of this method of the form < tt > out . append ( csq , start ,
* end ) < / tt > when < tt > csq < / tt > is not < tt > null < / tt > , behaves in exactly the same
* way as the invocation
* < pre >
* out . write ( csq . subSequence ( start , end ) . toString ( ) )
* < / pre >
* @ param aCS
* The character sequence from which a subsequence will be appended . If
* < tt > csq < / tt > is < tt > null < / tt > , then characters will be appended as if
* < tt > csq < / tt > contained the four characters < tt > " null " < / tt > .
* @ param nStart
* The index of the first character in the subsequence
* @ param nEnd
* The index of the character following the last character in the
* subsequence
* @ return This writer
* @ throws IndexOutOfBoundsException
* If < tt > start < / tt > or < tt > end < / tt > are negative , < tt > start < / tt > is
* greater than < tt > end < / tt > , or < tt > end < / tt > is greater than
* < tt > csq . length ( ) < / tt > */
@ Override public NonBlockingStringWriter append ( final CharSequence aCS , final int nStart , final int nEnd ) { } } | final CharSequence cs = ( aCS == null ? "null" : aCS ) ; write ( cs . subSequence ( nStart , nEnd ) . toString ( ) ) ; return this ; |
public class ThreadSafeJOptionPane { /** * Shows a question - message dialog requesting input from the user parented
* to < code > parentComponent < / code > . The dialog is displayed on top of the
* < code > Component < / code > ' s frame , and is usually positioned below the
* < code > Component < / code > .
* @ param parentComponent
* the parent < code > Component < / code > for the dialog
* @ param message
* the < code > Object < / code > to display
* @ return user ' s input , or < code > null < / code > meaning the user canceled the
* input */
public static String showInputDialog ( final Component parentComponent , final Object message ) { } } | return execute ( new StringOptionPane ( ) { public void show ( final StringResult result ) { result . setResult ( JOptionPane . showInputDialog ( parentComponent , message ) ) ; } } ) ; |
public class YamlConfigReader { /** * Reads configuration from a file , parameterize it with given values and
* returns a new ConfigParams object .
* @ param correlationId ( optional ) transaction id to trace execution through
* call chain .
* @ param path a path to configuration file .
* @ param parameters values to parameters the configuration or null to skip
* parameterization .
* @ return ConfigParams configuration .
* @ throws ApplicationException when error occured . */
public static ConfigParams readConfig ( String correlationId , String path , ConfigParams parameters ) throws ApplicationException { } } | return new YamlConfigReader ( path ) . readConfig ( correlationId , parameters ) ; |
public class Transformers { /** * Returns the source { @ link Observable } merged with all of the other
* observables using the given { @ link Comparator } for order . A precondition
* is that the source and other are already ordered . This transformer
* supports backpressure and its inputs must also support backpressure .
* < img src =
* " https : / / github . com / davidmoten / rxjava - extras / blob / master / src / docs / orderedMerge . png ? raw = true "
* alt = " marble diagram " >
* @ param others
* a collection of already ordered observables to merge with
* @ param comparator
* the ordering to use
* @ param < T >
* the generic type of the objects being compared
* @ return merged and ordered observable */
public static final < T > Transformer < T , T > orderedMergeWith ( final Collection < Observable < T > > others , final Comparator < ? super T > comparator ) { } } | return new Transformer < T , T > ( ) { @ Override public Observable < T > call ( Observable < T > source ) { List < Observable < T > > collection = new ArrayList < Observable < T > > ( ) ; collection . add ( source ) ; collection . addAll ( others ) ; return OrderedMerge . < T > create ( collection , comparator , false , RxRingBuffer . SIZE ) ; } } ; |
public class RestTracerAdapter { /** * 适配服务端serverReceived */
public static void serverReceived ( NettyHttpRequest request ) { } } | try { SofaRequest sofaRequest = new SofaRequest ( ) ; HttpHeaders headers = request . getHttpHeaders ( ) ; String rpcTraceContext = headers . getHeaderString ( RemotingConstants . NEW_RPC_TRACE_NAME ) ; if ( StringUtils . isNotBlank ( rpcTraceContext ) ) { // 新格式
sofaRequest . addRequestProp ( RemotingConstants . NEW_RPC_TRACE_NAME , rpcTraceContext ) ; } else { String traceIdKey = headers . getHeaderString ( RemotingConstants . HTTP_HEADER_TRACE_ID_KEY ) ; String rpcIdKey = headers . getHeaderString ( RemotingConstants . HTTP_HEADER_RPC_ID_KEY ) ; if ( StringUtils . isEmpty ( rpcIdKey ) ) { rpcIdKey = request . getUri ( ) . getQueryParameters ( ) . getFirst ( RemotingConstants . RPC_ID_KEY ) ; } if ( StringUtils . isEmpty ( traceIdKey ) ) { traceIdKey = request . getUri ( ) . getQueryParameters ( ) . getFirst ( RemotingConstants . TRACE_ID_KEY ) ; } if ( StringUtils . isNotEmpty ( traceIdKey ) && StringUtils . isNotEmpty ( rpcIdKey ) ) { Map < String , String > map = new HashMap < String , String > ( ) ; map . put ( RemotingConstants . TRACE_ID_KEY , traceIdKey ) ; map . put ( RemotingConstants . RPC_ID_KEY , rpcIdKey ) ; String penAttrs = headers . getHeaderString ( RemotingConstants . PEN_ATTRS_KEY ) ; map . put ( RemotingConstants . PEN_ATTRS_KEY , penAttrs ) ; sofaRequest . addRequestProp ( RemotingConstants . RPC_TRACE_NAME , map ) ; } } Tracers . serverReceived ( sofaRequest ) ; } catch ( Throwable t ) { if ( LOGGER . isWarnEnabled ( ) ) { LOGGER . warn ( "the process of rest tracer server receive occur error " , t ) ; } } |
public class MultiPath { /** * Inserts a path from an array of 2D Points .
* @ param pathIndex
* The path index of the multipath to place the new path .
* @ param points
* The array of points defining the new path .
* @ param pointsOffset
* The offset into the array to start reading .
* @ param count
* The number of points to insert into the new path .
* @ param bForward
* When FALSE , the points are inserted in reverse order . */
void insertPath ( int pathIndex , Point2D [ ] points , int pointsOffset , int count , boolean bForward ) { } } | m_impl . insertPath ( pathIndex , points , pointsOffset , count , bForward ) ; |
public class Helper { /** * Encodes a String so it can be used as path param .
* @ param v
* @ return */
public static String encodePath ( String v ) { } } | String encoded = urlPathEscape . escape ( v ) ; return encoded ; |
public class CollectionHelpers { /** * Returns an unmodifiable Set View made up of the given Sets while translating the items into a common type . The returned
* Set View does not copy any of the data from any of the given Sets , therefore any changes in the two Sets will be
* reflected in the View .
* NOTE : The iterator of this SetView will not de - duplicate items , so if the two Sets are not disjoint , of if the converter
* functions yield the same value for different inputs , the same item may appear multiple times .
* @ param set1 The first Set , which contains items of type Type1.
* @ param converter1 A Function that translates from Type1 to OutputType .
* @ param set2 The second Set , which contains items of type Type2.
* @ param converter2 A Function that translates from Type2 to OutputType .
* @ param < OutputType > The type of the items in the returned Set View .
* @ param < Type1 > The type of the items in Set 1.
* @ param < Type2 > The type of the items in Set 2.
* @ return A new Set View made up of the two Collections , with translation applied . */
public static < OutputType , Type1 , Type2 > Set < OutputType > joinSets ( Set < Type1 > set1 , Function < Type1 , OutputType > converter1 , Set < Type2 > set2 , Function < Type2 , OutputType > converter2 ) { } } | return new ConvertedSetView < > ( set1 , converter1 , set2 , converter2 ) ; |
public class NativeDate { /** * # string _ id _ map # */
@ Override protected int findPrototypeId ( String s ) { } } | int id ; // # generated # Last update : 2009-07-22 05:44:02 EST
L0 : { id = 0 ; String X = null ; int c ; L : switch ( s . length ( ) ) { case 6 : c = s . charAt ( 0 ) ; if ( c == 'g' ) { X = "getDay" ; id = Id_getDay ; } else if ( c == 't' ) { X = "toJSON" ; id = Id_toJSON ; } break L ; case 7 : switch ( s . charAt ( 3 ) ) { case 'D' : c = s . charAt ( 0 ) ; if ( c == 'g' ) { X = "getDate" ; id = Id_getDate ; } else if ( c == 's' ) { X = "setDate" ; id = Id_setDate ; } break L ; case 'T' : c = s . charAt ( 0 ) ; if ( c == 'g' ) { X = "getTime" ; id = Id_getTime ; } else if ( c == 's' ) { X = "setTime" ; id = Id_setTime ; } break L ; case 'Y' : c = s . charAt ( 0 ) ; if ( c == 'g' ) { X = "getYear" ; id = Id_getYear ; } else if ( c == 's' ) { X = "setYear" ; id = Id_setYear ; } break L ; case 'u' : X = "valueOf" ; id = Id_valueOf ; break L ; } break L ; case 8 : switch ( s . charAt ( 3 ) ) { case 'H' : c = s . charAt ( 0 ) ; if ( c == 'g' ) { X = "getHours" ; id = Id_getHours ; } else if ( c == 's' ) { X = "setHours" ; id = Id_setHours ; } break L ; case 'M' : c = s . charAt ( 0 ) ; if ( c == 'g' ) { X = "getMonth" ; id = Id_getMonth ; } else if ( c == 's' ) { X = "setMonth" ; id = Id_setMonth ; } break L ; case 'o' : X = "toSource" ; id = Id_toSource ; break L ; case 't' : X = "toString" ; id = Id_toString ; break L ; } break L ; case 9 : X = "getUTCDay" ; id = Id_getUTCDay ; break L ; case 10 : c = s . charAt ( 3 ) ; if ( c == 'M' ) { c = s . charAt ( 0 ) ; if ( c == 'g' ) { X = "getMinutes" ; id = Id_getMinutes ; } else if ( c == 's' ) { X = "setMinutes" ; id = Id_setMinutes ; } } else if ( c == 'S' ) { c = s . charAt ( 0 ) ; if ( c == 'g' ) { X = "getSeconds" ; id = Id_getSeconds ; } else if ( c == 's' ) { X = "setSeconds" ; id = Id_setSeconds ; } } else if ( c == 'U' ) { c = s . charAt ( 0 ) ; if ( c == 'g' ) { X = "getUTCDate" ; id = Id_getUTCDate ; } else if ( c == 's' ) { X = "setUTCDate" ; id = Id_setUTCDate ; } } break L ; case 11 : switch ( s . charAt ( 3 ) ) { case 'F' : c = s . charAt ( 0 ) ; if ( c == 'g' ) { X = "getFullYear" ; id = Id_getFullYear ; } else if ( c == 's' ) { X = "setFullYear" ; id = Id_setFullYear ; } break L ; case 'M' : X = "toGMTString" ; id = Id_toGMTString ; break L ; case 'S' : X = "toISOString" ; id = Id_toISOString ; break L ; case 'T' : X = "toUTCString" ; id = Id_toUTCString ; break L ; case 'U' : c = s . charAt ( 0 ) ; if ( c == 'g' ) { c = s . charAt ( 9 ) ; if ( c == 'r' ) { X = "getUTCHours" ; id = Id_getUTCHours ; } else if ( c == 't' ) { X = "getUTCMonth" ; id = Id_getUTCMonth ; } } else if ( c == 's' ) { c = s . charAt ( 9 ) ; if ( c == 'r' ) { X = "setUTCHours" ; id = Id_setUTCHours ; } else if ( c == 't' ) { X = "setUTCMonth" ; id = Id_setUTCMonth ; } } break L ; case 's' : X = "constructor" ; id = Id_constructor ; break L ; } break L ; case 12 : c = s . charAt ( 2 ) ; if ( c == 'D' ) { X = "toDateString" ; id = Id_toDateString ; } else if ( c == 'T' ) { X = "toTimeString" ; id = Id_toTimeString ; } break L ; case 13 : c = s . charAt ( 0 ) ; if ( c == 'g' ) { c = s . charAt ( 6 ) ; if ( c == 'M' ) { X = "getUTCMinutes" ; id = Id_getUTCMinutes ; } else if ( c == 'S' ) { X = "getUTCSeconds" ; id = Id_getUTCSeconds ; } } else if ( c == 's' ) { c = s . charAt ( 6 ) ; if ( c == 'M' ) { X = "setUTCMinutes" ; id = Id_setUTCMinutes ; } else if ( c == 'S' ) { X = "setUTCSeconds" ; id = Id_setUTCSeconds ; } } break L ; case 14 : c = s . charAt ( 0 ) ; if ( c == 'g' ) { X = "getUTCFullYear" ; id = Id_getUTCFullYear ; } else if ( c == 's' ) { X = "setUTCFullYear" ; id = Id_setUTCFullYear ; } else if ( c == 't' ) { X = "toLocaleString" ; id = Id_toLocaleString ; } break L ; case 15 : c = s . charAt ( 0 ) ; if ( c == 'g' ) { X = "getMilliseconds" ; id = Id_getMilliseconds ; } else if ( c == 's' ) { X = "setMilliseconds" ; id = Id_setMilliseconds ; } break L ; case 17 : X = "getTimezoneOffset" ; id = Id_getTimezoneOffset ; break L ; case 18 : c = s . charAt ( 0 ) ; if ( c == 'g' ) { X = "getUTCMilliseconds" ; id = Id_getUTCMilliseconds ; } else if ( c == 's' ) { X = "setUTCMilliseconds" ; id = Id_setUTCMilliseconds ; } else if ( c == 't' ) { c = s . charAt ( 8 ) ; if ( c == 'D' ) { X = "toLocaleDateString" ; id = Id_toLocaleDateString ; } else if ( c == 'T' ) { X = "toLocaleTimeString" ; id = Id_toLocaleTimeString ; } } break L ; } if ( X != null && X != s && ! X . equals ( s ) ) id = 0 ; break L0 ; } // # / generated #
return id ; |
public class AmazonNeptuneClient { /** * Returns the default engine and system parameter information for the specified database engine .
* @ param describeEngineDefaultParametersRequest
* @ return Result of the DescribeEngineDefaultParameters operation returned by the service .
* @ sample AmazonNeptune . DescribeEngineDefaultParameters
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / neptune - 2014-10-31 / DescribeEngineDefaultParameters "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public EngineDefaults describeEngineDefaultParameters ( DescribeEngineDefaultParametersRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribeEngineDefaultParameters ( request ) ; |
public class ErrorBuilder { /** * create an error description for a nullability warning
* @ param errorMessage the error message object .
* @ param suggestTree the location at which a fix suggestion should be made
* @ param descriptionBuilder the description builder for the error .
* @ return the error description */
public Description createErrorDescription ( ErrorMessage errorMessage , @ Nullable Tree suggestTree , Description . Builder descriptionBuilder ) { } } | Description . Builder builder = descriptionBuilder . setMessage ( errorMessage . message ) ; if ( config . suggestSuppressions ( ) && suggestTree != null ) { switch ( errorMessage . messageType ) { case DEREFERENCE_NULLABLE : case RETURN_NULLABLE : case PASS_NULLABLE : case ASSIGN_FIELD_NULLABLE : case SWITCH_EXPRESSION_NULLABLE : if ( config . getCastToNonNullMethod ( ) != null ) { builder = addCastToNonNullFix ( suggestTree , builder ) ; } else { builder = addSuppressWarningsFix ( suggestTree , builder , suppressionName ) ; } break ; case CAST_TO_NONNULL_ARG_NONNULL : builder = removeCastToNonNullFix ( suggestTree , builder ) ; break ; case WRONG_OVERRIDE_RETURN : builder = addSuppressWarningsFix ( suggestTree , builder , suppressionName ) ; break ; case WRONG_OVERRIDE_PARAM : builder = addSuppressWarningsFix ( suggestTree , builder , suppressionName ) ; break ; case METHOD_NO_INIT : case FIELD_NO_INIT : builder = addSuppressWarningsFix ( suggestTree , builder , INITIALIZATION_CHECK_NAME ) ; break ; case ANNOTATION_VALUE_INVALID : break ; default : builder = addSuppressWarningsFix ( suggestTree , builder , suppressionName ) ; } } // # letbuildersbuild
return builder . build ( ) ; |
public class A_CmsConfiguredHtmlParser { /** * Returns the result of subsequent parsing to the & lt ; cms : parse & lt ; tag implementation . < p >
* @ param encoding the encoding to use for parsing
* @ param html the html content to parse
* @ param noAutoCloseTags a list of upper case tag names for which parsing / visiting should not correct missing closing tags .
* @ return the result of subsequent parsing to the & lt ; cms : parse & lt ; tag implementation
* @ throws ParserException if something goes wrong at parsing
* @ throws CmsException if something goes wrong at accessing OpenCms core functionality */
public String doParse ( String html , String encoding , List < String > noAutoCloseTags ) throws ParserException , CmsException { } } | m_visitor = createVisitorInstance ( ) ; m_visitor . setNoAutoCloseTags ( noAutoCloseTags ) ; String result = "" ; m_visitor . process ( html , encoding ) ; result = m_visitor . getResult ( ) ; return result ; |
public class DocumentCursorTools { /** * Returns all Paragraphs of Document without footnotes etc .
* Returns null if it fails */
@ Nullable List < String > getAllTextParagraphs ( ) { } } | try { List < String > allParas = new ArrayList < > ( ) ; if ( xPCursor == null ) { return null ; } xPCursor . gotoStart ( false ) ; xPCursor . gotoStartOfParagraph ( false ) ; xPCursor . gotoEndOfParagraph ( true ) ; allParas . add ( xPCursor . getString ( ) ) ; while ( xPCursor . gotoNextParagraph ( false ) ) { xPCursor . gotoStartOfParagraph ( false ) ; xPCursor . gotoEndOfParagraph ( true ) ; allParas . add ( xPCursor . getString ( ) ) ; } return allParas ; } catch ( Throwable t ) { MessageHandler . printException ( t ) ; // all Exceptions thrown by UnoRuntime . queryInterface are caught
return null ; // Return null as method failed
} |
public class Interpreter { /** * Evaluate the inputstream in this interpreter ' s global namespace . */
public Object eval ( Reader in ) throws EvalError { } } | return eval ( in , globalNameSpace , null == sourceFileInfo ? "eval stream" : sourceFileInfo ) ; |
public class UtilizationByTime { /** * The groups that this utilization result uses .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setGroups ( java . util . Collection ) } or { @ link # withGroups ( java . util . Collection ) } if you want to override the
* existing values .
* @ param groups
* The groups that this utilization result uses .
* @ return Returns a reference to this object so that method calls can be chained together . */
public UtilizationByTime withGroups ( ReservationUtilizationGroup ... groups ) { } } | if ( this . groups == null ) { setGroups ( new java . util . ArrayList < ReservationUtilizationGroup > ( groups . length ) ) ; } for ( ReservationUtilizationGroup ele : groups ) { this . groups . add ( ele ) ; } return this ; |
public class HttpContext { /** * / * response - thread local set - cookies - > client */
public void writeTo ( HttpServletResponse response ) { } } | long time = System . currentTimeMillis ( ) ; long processTime = time - contextTsCurrent . get ( ) ; long processTimeTotal = contextMsProcess . get ( ) + processTime ; long networkTimeTotal = contextMsNetwork . get ( ) ; StringBuilder cookiesDump = new StringBuilder ( ) ; for ( Entry < String , Cookie > entry : responseCookies . get ( ) . entrySet ( ) ) { javax . servlet . http . Cookie c = buildServletCookie ( entry . getKey ( ) , entry . getValue ( ) ) ; response . addCookie ( c ) ; cookiesDump . append ( c . toString ( ) ) . append ( ";" ) ; } if ( verbose ) log . info ( "<<< Time total " + String . valueOf ( time - contextTsStart . get ( ) ) + " (+" + processTime + "p " + processTimeTotal + "p " + networkTimeTotal + "n)" + " [" + contextUrl . get ( ) + "]" + ". Set-Cookie to client: " + cookiesDump . toString ( ) ) ; |
public class StreamUtil { /** * Recursively copy a file or directory to a directory . */
public static void copy ( Path fileOrDirectory , Path toDir ) { } } | Path copy = PathUtil . create ( toDir , PathUtil . getName ( fileOrDirectory ) ) ; if ( PathUtil . isDirectory ( fileOrDirectory ) ) { // noinspection ResultOfMethodCallIgnored
try { PathUtil . mkdir ( copy ) ; Files . list ( fileOrDirectory ) . forEach ( child -> copy ( child , copy ) ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } else { // noinspection ResultOfMethodCallIgnored
try ( InputStream is = new BufferedInputStream ( Files . newInputStream ( fileOrDirectory ) ) ; OutputStream os = new BufferedOutputStream ( Files . newOutputStream ( copy ) ) ) { StreamUtil . copy ( is , os ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } |
public class TreeMultiMap { /** * { @ inheritDoc } */
public boolean put ( K key , V value ) { } } | Set < V > values = map . get ( key ) ; if ( values == null ) { values = new HashSet < V > ( ) ; map . put ( key , values ) ; } boolean added = values . add ( value ) ; if ( added ) { range ++ ; if ( parent != null ) { parent . updateParentRange ( 1 ) ; } } return added ; |
public class TableFactoryUtil { /** * Returns a table sink matching the descriptor . */
public static < T > TableSink < T > findAndCreateTableSink ( Descriptor descriptor ) { } } | Map < String , String > properties = descriptor . toProperties ( ) ; TableSink tableSink ; try { tableSink = TableFactoryService . find ( TableSinkFactory . class , properties ) . createTableSink ( properties ) ; } catch ( Throwable t ) { throw new TableException ( "findAndCreateTableSink failed." , t ) ; } return tableSink ; |
public class ListSecurityProfilesResult { /** * An array of < code > SecurityProfileSummary < / code > objects .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setSecurityProfileSummaryList ( java . util . Collection ) } or
* { @ link # withSecurityProfileSummaryList ( java . util . Collection ) } if you want to override the existing values .
* @ param securityProfileSummaryList
* An array of < code > SecurityProfileSummary < / code > objects .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListSecurityProfilesResult withSecurityProfileSummaryList ( SecurityProfileSummary ... securityProfileSummaryList ) { } } | if ( this . securityProfileSummaryList == null ) { setSecurityProfileSummaryList ( new java . util . ArrayList < SecurityProfileSummary > ( securityProfileSummaryList . length ) ) ; } for ( SecurityProfileSummary ele : securityProfileSummaryList ) { this . securityProfileSummaryList . add ( ele ) ; } return this ; |
public class ServerDnsAliasesInner { /** * Gets a server DNS alias .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server that the alias is pointing to .
* @ param dnsAliasName The name of the server DNS alias .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the ServerDnsAliasInner object */
public Observable < ServerDnsAliasInner > getAsync ( String resourceGroupName , String serverName , String dnsAliasName ) { } } | return getWithServiceResponseAsync ( resourceGroupName , serverName , dnsAliasName ) . map ( new Func1 < ServiceResponse < ServerDnsAliasInner > , ServerDnsAliasInner > ( ) { @ Override public ServerDnsAliasInner call ( ServiceResponse < ServerDnsAliasInner > response ) { return response . body ( ) ; } } ) ; |
public class CmsSelectWidgetOption { /** * Returns the default option from the given list of select options ,
* or < code > null < / code > in case there is no default option in the given list . < p >
* If an element found in the given list is not of type
* < code > { @ link CmsSelectWidgetOption } < / code > , this is ignored . < p >
* @ param options the list of select options to get the default from
* @ return the default option from the given list of select options , or < code > null < / code > in case there is no default option */
public static CmsSelectWidgetOption getDefaultOption ( List < CmsSelectWidgetOption > options ) { } } | if ( ( options == null ) || ( options . size ( ) == 0 ) ) { return null ; } for ( int i = 0 ; i < options . size ( ) ; i ++ ) { Object o = options . get ( i ) ; if ( o instanceof CmsSelectWidgetOption ) { CmsSelectWidgetOption option = ( CmsSelectWidgetOption ) o ; if ( option . isDefault ( ) ) { return option ; } } } return null ; |
public class Operand { /** * Gets the adGroupCriterionLabel value for this Operand .
* @ return adGroupCriterionLabel */
public com . google . api . ads . adwords . axis . v201809 . cm . AdGroupCriterionLabel getAdGroupCriterionLabel ( ) { } } | return adGroupCriterionLabel ; |
public class FatCatAligner { /** * run AFP chaining allowing up to maxTra flexible regions .
* Input is original coordinates . */
private static Group [ ] chainAfp ( FatCatParameters params , AFPChain afpChain , Atom [ ] ca1 , Atom [ ] ca2 ) throws StructureException { } } | // we don ; t want to rotate input atoms , do we ?
Atom [ ] ca2clone = StructureTools . cloneAtomArray ( ca2 ) ; List < AFP > afpSet = afpChain . getAfpSet ( ) ; if ( debug ) System . out . println ( "entering chainAfp" ) ; int afpNum = afpSet . size ( ) ; if ( afpNum < 1 ) return new Group [ 0 ] ; long bgtime = System . currentTimeMillis ( ) ; if ( debug ) { System . out . println ( String . format ( "total AFP %d\n" , afpNum ) ) ; } // run AFP chaining
AFPChainer . doChainAfp ( params , afpChain , ca1 , ca2 ) ; int afpChainLen = afpChain . getAfpChainLen ( ) ; if ( afpChainLen < 1 ) { afpChain . setShortAlign ( true ) ; return new Group [ 0 ] ; } // very short alignment
long chaintime = System . currentTimeMillis ( ) ; if ( debug ) { System . out . println ( "Afp chaining: time " + ( chaintime - bgtime ) ) ; } // do post processing
AFPPostProcessor . postProcess ( params , afpChain , ca1 , ca2 ) ; // Optimize the final alignment
AFPOptimizer . optimizeAln ( params , afpChain , ca1 , ca2 ) ; AFPOptimizer . blockInfo ( afpChain ) ; AFPOptimizer . updateScore ( params , afpChain ) ; AFPAlignmentDisplay . getAlign ( afpChain , ca1 , ca2 ) ; Group [ ] twistedPDB = AFPTwister . twistPDB ( afpChain , ca1 , ca2clone ) ; SigEva sig = new SigEva ( ) ; double probability = sig . calSigAll ( params , afpChain ) ; afpChain . setProbability ( probability ) ; double normAlignScore = sig . calNS ( params , afpChain ) ; afpChain . setNormAlignScore ( normAlignScore ) ; double tmScore = AFPChainScorer . getTMScore ( afpChain , ca1 , ca2 , false ) ; afpChain . setTMScore ( tmScore ) ; /* SIGEVA sig ;
probability = sig . calSigAll ( maxTra , sparse , pro1Len , pro2Len , alignScore , totalRmsdOpt , optLength , blockNum - 1 ) ;
normAlignScore = sig . calNS ( pro1Len , pro2Len , alignScore , totalRmsdOpt , optLength , blockNum - 1 ) ; */
// if ( maxTra = = 0 ) probability = sig . calSigRigid ( pro1Len , pro2Len , alignScore , totalRmsdOpt , optLength ) ;
// else probability = sig . calSigFlexi ( pro1Len , pro2Len , alignScore , totalRmsdOpt , optLength , blockNum - 1 ) ;
if ( debug ) { long nowtime = System . currentTimeMillis ( ) ; long diff = nowtime - chaintime ; System . out . println ( "Alignment optimization: time " + diff ) ; System . out . println ( "score: " + afpChain . getAlignScore ( ) ) ; System . out . println ( "opt length: " + afpChain . getOptLength ( ) ) ; System . out . println ( "opt rmsd: " + afpChain . getTotalRmsdOpt ( ) ) ; } return twistedPDB ; |
public class JoinNode { /** * Apply implied transitive constant filter to join expressions
* outer . partkey = ? and outer . partkey = inner . partkey is equivalent to
* outer . partkey = ? and inner . partkey = ?
* @ param innerTableExprs inner table expressions
* @ param outerTableExprs outer table expressions
* @ param innerOuterTableExprs inner - outer tables expressions */
protected static void applyTransitiveEquivalence ( List < AbstractExpression > outerTableExprs , List < AbstractExpression > innerTableExprs , List < AbstractExpression > innerOuterTableExprs ) { } } | List < AbstractExpression > simplifiedOuterExprs = applyTransitiveEquivalence ( innerTableExprs , innerOuterTableExprs ) ; List < AbstractExpression > simplifiedInnerExprs = applyTransitiveEquivalence ( outerTableExprs , innerOuterTableExprs ) ; outerTableExprs . addAll ( simplifiedOuterExprs ) ; innerTableExprs . addAll ( simplifiedInnerExprs ) ; |
public class InboundNatRulesInner { /** * Gets all the inbound nat rules in a load balancer .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; InboundNatRuleInner & gt ; object */
public Observable < Page < InboundNatRuleInner > > listNextAsync ( final String nextPageLink ) { } } | return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < InboundNatRuleInner > > , Page < InboundNatRuleInner > > ( ) { @ Override public Page < InboundNatRuleInner > call ( ServiceResponse < Page < InboundNatRuleInner > > response ) { return response . body ( ) ; } } ) ; |
public class VirtualNetworkGatewayConnectionsInner { /** * Updates a virtual network gateway connection tags .
* @ param resourceGroupName The name of the resource group .
* @ param virtualNetworkGatewayConnectionName The name of the virtual network gateway connection .
* @ param tags Resource tags .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the VirtualNetworkGatewayConnectionListEntityInner object if successful . */
public VirtualNetworkGatewayConnectionListEntityInner beginUpdateTags ( String resourceGroupName , String virtualNetworkGatewayConnectionName , Map < String , String > tags ) { } } | return beginUpdateTagsWithServiceResponseAsync ( resourceGroupName , virtualNetworkGatewayConnectionName , tags ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class ResponseMessage { /** * @ see javax . servlet . ServletResponse # setLocale ( java . util . Locale ) */
@ Override public void setLocale ( Locale inLocale ) { } } | if ( isCommitted ( ) || null == inLocale ) { return ; } this . locale = inLocale ; if ( null != this . outWriter || null != this . encoding ) { return ; } EncodingUtils encodingUtils = connection . getEncodingUtils ( ) ; this . encoding = encodingUtils . getEncodingFromLocale ( inLocale ) ; if ( null == this . encoding ) { this . encoding = encodingUtils . getDefaultEncoding ( ) ; } if ( null != this . contentType ) { int index = this . contentType . indexOf ( "charset=" ) ; StringBuilder sb = new StringBuilder ( ) ; if ( - 1 != index ) { sb . append ( this . contentType . substring ( 0 , index - 1 ) . trim ( ) ) ; } if ( this . encoding != null ) { sb . append ( ";charset=" ) . append ( this . encoding ) ; } this . response . setHeader ( "Content-Type" , sb . toString ( ) ) ; } |
public class JavaCompiler { /** * Parses a list of files . */
public List < JCCompilationUnit > parseFiles ( Iterable < JavaFileObject > fileObjects ) { } } | if ( shouldStop ( CompileState . PARSE ) ) return List . nil ( ) ; // parse all files
ListBuffer < JCCompilationUnit > trees = new ListBuffer < > ( ) ; Set < JavaFileObject > filesSoFar = new HashSet < > ( ) ; for ( JavaFileObject fileObject : fileObjects ) { if ( ! filesSoFar . contains ( fileObject ) ) { filesSoFar . add ( fileObject ) ; trees . append ( parse ( fileObject ) ) ; } } return trees . toList ( ) ; |
public class StreamsUtils { /** * < p > Generates a stream by taking one element at a time from each of the provided streams , and transforming them
* using the provided bifunction .
* < p > Example : < / p >
* < pre > { @ code
* Stream < String > stream0 = Stream . of ( " a " , " b " , " c " , " d " ) ;
* Stream < Integer > stream1 = Stream . of ( 0 , 1 , 2 , 3 ) ;
* Bifunction < String , Integer , String > zipper = ( s , i ) - > s + " - " + i ;
* Stream < String > zippingStream = StreamsUtils . zip ( stream0 , stream1 , zipper ) ;
* List < String > collect = zippingStream . collect ( Collectors . toList ( ) ) ;
* / / The collect list is [ " a - 0 " , " b - 1 " , " c - 2 " , " d - 3 " ]
* } < / pre >
* < p > The characteristics of the returned spliterator is the bitwise < code > AND < / code > of the characteristics of
* the provided streams . Those streams should have the same characteristics , so there will be no change on
* this point . < / p >
* < p > The returned stream will stop producing elements as soon as one of the provided stream stops to do so .
* So some of the elements of the provided streams might not be consumed . < / p >
* < p > A < code > NullPointerException < / code > will be thrown if the < code > zipper < / code > generates a null value . So
* the returned stream is guaranteed not to have null values . < / p >
* < p > In case you cannot be sure that your zipper returns < code > null < / code > , then you can provide a
* < code > zipper < / code > than wraps its result in an < code > Optional < / code > ( using the
* < code > Optional . ofNullable ( ) < / code > factory method ) , and flat map the returned stream . Your nulls will then
* be silently removed from the stream . < / p >
* @ param stream1 The first stream to be zipped . Will throw a < code > NullPointerException < / code > if < code > null < / code > .
* @ param stream2 The second stream to be zipped . Will throw a < code > NullPointerException < / code > if < code > null < / code > .
* @ param zipper The bifunction used to transform the elements of the two streams .
* Will throw a < code > NullPointerException < / code > if < code > null < / code > .
* @ param < E1 > The type of the elements of the first provided stream .
* @ param < E2 > The type of the elements of the second provided stream .
* @ param < R > The type of the elements of the returned stream .
* @ return A zipped stream . */
public static < E1 , E2 , R > Stream < R > zip ( Stream < E1 > stream1 , Stream < E2 > stream2 , BiFunction < ? super E1 , ? super E2 , ? extends R > zipper ) { } } | Objects . requireNonNull ( stream1 ) ; Objects . requireNonNull ( stream2 ) ; Objects . requireNonNull ( zipper ) ; ZippingSpliterator . Builder < E1 , E2 , R > builder = new ZippingSpliterator . Builder < > ( ) ; ZippingSpliterator < E1 , E2 , R > spliterator = builder . with ( stream1 . spliterator ( ) ) . and ( stream2 . spliterator ( ) ) . mergedBy ( zipper ) . build ( ) ; return StreamSupport . stream ( spliterator , stream1 . isParallel ( ) && stream2 . isParallel ( ) ) . onClose ( ( ) -> { stream1 . close ( ) ; stream2 . close ( ) ; } ) ; |
public class Pcg32 { /** * Returns the next pseudorandom , approximately uniformly distributed { @ code int }
* value from this random number generator ' s sequence . The general
* contract of { @ code nextInt } is that one { @ code int } value is
* pseudorandomly generated and returned . All 2 < font size = " - 1 " > < sup > 32
* < / sup > < / font > possible { @ code int } values are produced with
* ( approximately ) equal probability .
* @ see java . util . Random # nextInt ( ) */
@ Override public int nextInt ( ) { } } | long oldState = state ; state = oldState * MULTIPLIER + inc ; int xorShifted = ( int ) ( ( ( oldState >>> 18 ) ^ oldState ) >>> 27 ) ; int rot = ( int ) ( oldState >>> 59 ) ; return Integer . rotateRight ( xorShifted , rot ) ; |
public class Bits { /** * A { @ link BitReader } that sources its bits from an array of bytes . Bits are
* read from the byte array starting at index zero . Within each byte , the
* most significant bits are read first .
* @ param bytes
* the source bytes
* @ param size
* the number of bits that may be read , not negative and no
* greater than the number of bits supplied by the array
* @ return a bit reader over the bytes */
public static BitReader readerFrom ( byte [ ] bytes , long size ) { } } | if ( bytes == null ) throw new IllegalArgumentException ( "null bytes" ) ; checkSize ( size , ( ( long ) bytes . length ) << 3 ) ; return new ByteArrayBitReader ( bytes , size ) ; |
public class QuadTreeNode { /** * Set the second child of this node .
* @ param newChild is the new child for the second zone
* @ return < code > true < / code > on success , otherwhise < code > false < / code > */
public boolean setSecondChild ( N newChild ) { } } | final N oldChild = this . nNorthEast ; if ( oldChild == newChild ) { return false ; } if ( oldChild != null ) { oldChild . setParentNodeReference ( null , true ) ; -- this . notNullChildCount ; firePropertyChildRemoved ( 1 , oldChild ) ; } if ( newChild != null ) { final N oldParent = newChild . getParentNode ( ) ; if ( oldParent != this ) { newChild . removeFromParent ( ) ; } } this . nNorthEast = newChild ; if ( newChild != null ) { newChild . setParentNodeReference ( toN ( ) , true ) ; ++ this . notNullChildCount ; firePropertyChildAdded ( 1 , newChild ) ; } return true ; |
public class ImageExtensions { /** * Gets the buffered image from the given byte array quietly .
* @ param byteArray
* the byte array
* @ return the buffered image or null if the read process failed . */
public static BufferedImage readQuietly ( final byte [ ] byteArray ) { } } | BufferedImage img = null ; try { img = read ( byteArray ) ; } catch ( IOException e ) { log . log ( Level . SEVERE , "Reading image failed." , e ) ; } return img ; |
public class RelationalOperations { /** * Returns true if the relation holds */
private static boolean polygonRelateMultiPoint_ ( Polygon polygon_a , MultiPoint multipoint_b , double tolerance , int relation , ProgressTracker progress_tracker ) { } } | switch ( relation ) { case Relation . disjoint : return polygonDisjointMultiPoint_ ( polygon_a , multipoint_b , tolerance , true , progress_tracker ) ; case Relation . contains : return polygonContainsMultiPoint_ ( polygon_a , multipoint_b , tolerance , progress_tracker ) ; case Relation . touches : return polygonTouchesMultiPoint_ ( polygon_a , multipoint_b , tolerance , progress_tracker ) ; case Relation . crosses : return polygonCrossesMultiPoint_ ( polygon_a , multipoint_b , tolerance , progress_tracker ) ; default : break ; // warning fix
} return false ; |
public class DataSetClientServices { /** * Process the specified data set lookup request .
* @ param request The data set lookup request
* @ throws Exception It there is an unexpected error trying to execute the lookup request . */
public void lookupDataSet ( final DataSetLookup request , final DataSetReadyCallback listener ) throws Exception { } } | // Look always into the client data set manager .
if ( clientDataSetManager . getDataSet ( request . getDataSetUUID ( ) ) != null ) { DataSet dataSet = clientDataSetManager . lookupDataSet ( request ) ; listener . callback ( dataSet ) ; } // If the data set is not in client , then look up remotely ( only if the remote access is available ) .
else if ( dataSetLookupServices != null ) { // First of all , get the target data set estimated size .
fetchMetadata ( request . getDataSetUUID ( ) , new DataSetMetadataCallback ( ) { public void callback ( DataSetMetadata metatada ) { // Push the data set to client if and only if the push feature is enabled , the data set is
// pushable & the data set is smaller than the max push size defined .
DataSetDef dsetDef = metatada . getDefinition ( ) ; int estimatedSize = metatada . getEstimatedSize ( ) / 1000 ; boolean isPushable = dsetDef != null && dsetDef . isPushEnabled ( ) && estimatedSize < dsetDef . getPushMaxSize ( ) ; if ( pushRemoteDataSetEnabled && isPushable ) { // Check if a push is already in progress .
// ( This is necessary in order to avoid repeating multiple push requests over the same data set ) .
DataSetPushHandler pushHandler = pushRequestMap . get ( request . getDataSetUUID ( ) ) ; if ( pushHandler == null ) { // Create a push handler .
pushHandler = new DataSetPushHandler ( metatada ) ; // Send the lookup request to the server . . .
DataSetLookup lookupSourceDataSet = new DataSetLookup ( request . getDataSetUUID ( ) ) ; _lookupDataSet ( lookupSourceDataSet , pushHandler ) ; } // Register the lookup request into the current handler .
pushHandler . registerLookup ( request , listener ) ; } // Lookup the remote data set otherwise .
else { _lookupDataSet ( request , listener ) ; } } // Data set metadata not found
public void notFound ( ) { listener . notFound ( ) ; } @ Override public boolean onError ( final ClientRuntimeError error ) { return listener . onError ( error ) ; } } ) ; } // Data set not found on client .
else { listener . notFound ( ) ; } |
public class FileDownloadSerialQueue { /** * Attempts to stop the working task , halts the processing of waiting tasks , and returns a list
* of the tasks that were awaiting execution . These tasks are drained ( removed ) from the task
* queue upon return from this method . */
public List < BaseDownloadTask > shutdown ( ) { } } | DownloadTask [ ] tasks = serialQueue . shutdown ( ) ; List < BaseDownloadTask > notRunningTasks = new ArrayList < > ( ) ; for ( DownloadTask task : tasks ) { final DownloadTaskAdapter notRunningTask = FileDownloadUtils . findDownloadTaskAdapter ( task ) ; if ( notRunningTask != null ) { notRunningTasks . add ( notRunningTask ) ; FileDownloadList . getImpl ( ) . remove ( notRunningTask ) ; } } return notRunningTasks ; |
public class BaseFilterQueryBuilder { /** * Add a Field Search Condition that will search a field for a specified value using the following SQL logic :
* { @ code field = ' value ' }
* @ param propertyName The name of the field as defined in the Entity mapping class .
* @ param value The value to search against . */
protected void addEqualsCondition ( final String propertyName , final String value ) { } } | addEqualsCondition ( getRootPath ( ) . get ( propertyName ) . as ( String . class ) , value ) ; |
public class PDTWebDateHelper { /** * Parses a Date out of a String with a date in W3C date - time format or in a
* RFC822 format .
* @ param sDate
* string to parse for a date .
* @ return the Date represented by the given W3C date - time string . It returns
* < b > null < / b > if it was not possible to parse the given string into a
* Date . */
@ Nullable public static LocalDateTime getLocalDateTimeFromW3COrRFC822 ( @ Nullable final String sDate ) { } } | final ZonedDateTime aDateTime = getDateTimeFromW3COrRFC822 ( sDate ) ; return aDateTime == null ? null : aDateTime . toLocalDateTime ( ) ; |
public class FeaturePack { /** * Retrieves all subsystems included in the feature pack config files .
* @ return
* @ throws IOException
* @ throws XMLStreamException */
public Set < String > getSubsystems ( ) throws IOException , XMLStreamException { } } | final Set < String > result = new HashSet < > ( ) ; for ( ConfigFile configFile : description . getConfig ( ) . getDomainConfigFiles ( ) ) { for ( Map < String , SubsystemConfig > subsystems : configFile . getSubsystemConfigs ( featurePackFile ) . values ( ) ) { result . addAll ( subsystems . keySet ( ) ) ; } } for ( ConfigFile configFile : description . getConfig ( ) . getStandaloneConfigFiles ( ) ) { for ( Map < String , SubsystemConfig > subsystems : configFile . getSubsystemConfigs ( featurePackFile ) . values ( ) ) { result . addAll ( subsystems . keySet ( ) ) ; } } for ( ConfigFile configFile : description . getConfig ( ) . getHostConfigFiles ( ) ) { for ( Map < String , SubsystemConfig > subsystems : configFile . getSubsystemConfigs ( featurePackFile ) . values ( ) ) { result . addAll ( subsystems . keySet ( ) ) ; } } return result ; |
public class RetrievalWorker { /** * Checks to see if the checksums of the local file and remote file match */
protected boolean checksumsMatch ( File localFile , String remoteChecksum ) throws IOException { } } | if ( remoteChecksum == null || "" . equals ( remoteChecksum ) ) { if ( contentStream != null ) { remoteChecksum = contentStream . getChecksum ( ) ; } else { remoteChecksum = source . getSourceChecksum ( contentItem ) ; } } String localChecksum = getChecksum ( localFile ) ; return localChecksum . equals ( remoteChecksum ) ; |
public class AWSSimpleSystemsManagementClient { /** * Stop an Automation that is currently running .
* @ param stopAutomationExecutionRequest
* @ return Result of the StopAutomationExecution operation returned by the service .
* @ throws AutomationExecutionNotFoundException
* There is no automation execution information for the requested automation execution ID .
* @ throws InvalidAutomationStatusUpdateException
* The specified update status operation is not valid .
* @ throws InternalServerErrorException
* An error occurred on the server side .
* @ sample AWSSimpleSystemsManagement . StopAutomationExecution
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ssm - 2014-11-06 / StopAutomationExecution " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public StopAutomationExecutionResult stopAutomationExecution ( StopAutomationExecutionRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeStopAutomationExecution ( request ) ; |
public class CollUtil { /** * 新建一个LinkedHashSet
* @ param < T > 集合元素类型
* @ param ts 元素数组
* @ return HashSet对象
* @ since 4.1.10 */
@ SafeVarargs public static < T > LinkedHashSet < T > newLinkedHashSet ( T ... ts ) { } } | return ( LinkedHashSet < T > ) newHashSet ( true , ts ) ; |
public class EclipselinkIntrospection { /** * ClassDescriptor */
private void processClassDescriptorCollection ( final Object classDescriptorCollection ) { } } | if ( classDescriptorCollection == null || ! isCastable ( "java.util.Collection" , classDescriptorCollection . getClass ( ) ) ) { return ; } final Collection < ? > c = ( Collection < ? > ) classDescriptorCollection ; for ( Object descriptor : c ) { processClassDescriptorObject ( descriptor ) ; } |
public class EC2TagSetMarshaller { /** * Marshall the given parameter object . */
public void marshall ( EC2TagSet eC2TagSet , ProtocolMarshaller protocolMarshaller ) { } } | if ( eC2TagSet == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( eC2TagSet . getEc2TagSetList ( ) , EC2TAGSETLIST_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class LengthValidator { /** * The validate method */
@ Override public ValidationResult validate ( PMContext ctx ) { } } | final ValidationResult res = new ValidationResult ( ) ; final Object object = ctx . getFieldValue ( ) ; final String fieldId = ctx . getField ( ) . getId ( ) ; if ( object instanceof String ) { String fieldvalue = ( String ) object ; res . setSuccessful ( true ) ; Integer len = fieldvalue . length ( ) ; Integer maxl = getInt ( "max-length" ) ; if ( maxl != null ) { if ( len > maxl ) { res . setSuccessful ( false ) ; res . getMessages ( ) . add ( MessageFactory . error ( ctx . getEntity ( ) , ctx . getField ( ) , get ( "max-length-msg" , "pm_core.validator.toolong" ) , fieldId , len . toString ( ) , maxl . toString ( ) ) ) ; } } Integer minl = getInt ( "min-length" ) ; if ( minl != null ) { if ( len < minl ) { res . setSuccessful ( false ) ; res . getMessages ( ) . add ( MessageFactory . error ( ctx . getEntity ( ) , ctx . getField ( ) , get ( "min-length-msg" , "pm_core.validator.tooshort" ) , fieldId , len . toString ( ) , minl . toString ( ) ) ) ; } } } else { res . setSuccessful ( false ) ; res . getMessages ( ) . add ( MessageFactory . error ( ctx . getEntity ( ) , ctx . getField ( ) , "pm_core.validator.fieldnotstring" , fieldId ) ) ; } return res ; |
public class BaseLayer { /** * Get the updater for the given parameter . Typically the same updater will be used for all updaters , but this is
* not necessarily the case
* @ param paramName Parameter name
* @ return IUpdater for the parameter */
@ Override public IUpdater getUpdaterByParam ( String paramName ) { } } | if ( biasUpdater != null && initializer ( ) . isBiasParam ( this , paramName ) ) { return biasUpdater ; } return iUpdater ; |
public class PBS { /** * PBS tracejob command .
* Equivalent to tracejob - n [ numberOfDays ] [ jobId ]
* @ param jobId job id
* @ param numberOfDays number of days to look for the job
* @ param quiet quiet mode flag
* @ return tracejob output */
public static CommandOutput traceJob ( String jobId , int numberOfDays , boolean quiet ) { } } | final CommandLine cmdLine = new CommandLine ( COMMAND_TRACEJOB ) ; cmdLine . addArgument ( PARAMETER_NUMBER_OF_DAYS ) ; cmdLine . addArgument ( Integer . toString ( numberOfDays ) ) ; if ( quiet ) { cmdLine . addArgument ( PARAMETER_QUIET_MODE ) ; } cmdLine . addArgument ( jobId ) ; final OutputStream out = new ByteArrayOutputStream ( ) ; final OutputStream err = new ByteArrayOutputStream ( ) ; DefaultExecuteResultHandler resultHandler ; try { resultHandler = execute ( cmdLine , null , out , err ) ; resultHandler . waitFor ( DEFAULT_TIMEOUT ) ; } catch ( ExecuteException e ) { throw new PBSException ( "Failed to execute tracejob command: " + e . getMessage ( ) , e ) ; } catch ( IOException e ) { throw new PBSException ( "Failed to execute tracejob command: " + e . getMessage ( ) , e ) ; } catch ( InterruptedException e ) { throw new PBSException ( "Failed to execute tracejob command: " + e . getMessage ( ) , e ) ; } final int exitValue = resultHandler . getExitValue ( ) ; LOGGER . info ( "tracejob exit value: " + exitValue ) ; LOGGER . fine ( "tracejob output: " + out . toString ( ) ) ; return new CommandOutput ( out . toString ( ) , err . toString ( ) ) ; |
public class HivePartitionFinder { /** * Will find all datasets according to whitelist , except the backup , trash and staging tables . */
@ Override public List < HivePartitionDataset > findDatasets ( ) throws IOException { } } | List < HivePartitionDataset > list = new ArrayList < > ( ) ; for ( HiveDataset hiveDataset : this . hiveDatasets ) { for ( Partition partition : hiveDataset . getPartitionsFromDataset ( ) ) { list . add ( new HivePartitionDataset ( partition ) ) ; } } String selectionPolicyString = this . state . getProp ( ComplianceConfigurationKeys . DATASET_SELECTION_POLICY_CLASS , ComplianceConfigurationKeys . DEFAULT_DATASET_SELECTION_POLICY_CLASS ) ; Policy < HivePartitionDataset > selectionPolicy = GobblinConstructorUtils . invokeConstructor ( Policy . class , selectionPolicyString ) ; return selectionPolicy . selectedList ( list ) ; |
public class vpnvserver_binding { /** * Use this API to fetch vpnvserver _ binding resource of given name . */
public static vpnvserver_binding get ( nitro_service service , String name ) throws Exception { } } | vpnvserver_binding obj = new vpnvserver_binding ( ) ; obj . set_name ( name ) ; vpnvserver_binding response = ( vpnvserver_binding ) obj . get_resource ( service ) ; return response ; |
public class HtmlSanitizerUtil { /** * Create a Policy from a named local resource .
* @ param resourceName the path to AntiSamy policy file
* @ return the AntiSamy Policy */
public static Policy createPolicy ( final String resourceName ) { } } | if ( StringUtils . isBlank ( resourceName ) ) { throw new SystemException ( "AntiSamy Policy resourceName cannot be null " ) ; } URL resource = HtmlSanitizerUtil . class . getClassLoader ( ) . getResource ( resourceName ) ; if ( resource == null ) { throw new SystemException ( "Could not find AntiSamy Policy XML resource." ) ; } try { return Policy . getInstance ( resource ) ; } catch ( PolicyException ex ) { throw new SystemException ( "Could not create AntiSamy Policy" + ex . getMessage ( ) , ex ) ; } |
public class ObjToIntMap { /** * If table already contains a key that equals to keyArg , return that key
* while setting its value to zero , otherwise add keyArg with 0 value to
* the table and return it . */
public Object intern ( Object keyArg ) { } } | boolean nullKey = false ; if ( keyArg == null ) { nullKey = true ; keyArg = UniqueTag . NULL_VALUE ; } int index = ensureIndex ( keyArg ) ; values [ index ] = 0 ; return ( nullKey ) ? null : keys [ index ] ; |
public class PlayRecordContext { /** * Played when all data entry attempts have succeeded .
* Consists of one or more audio segments . No default .
* @ return The array of audio prompts . Array will be empty if none is specified . */
private String [ ] getSuccessAnnouncementSegments ( ) { } } | String value = Optional . fromNullable ( getParameter ( SignalParameters . SUCCESS_ANNOUNCEMENT . symbol ( ) ) ) . or ( "" ) ; return value . isEmpty ( ) ? new String [ 0 ] : value . split ( "," ) ; |
public class Gen { /** * Generate code for a class definition .
* @ param env The attribution environment that belongs to the
* outermost class containing this class definition .
* We need this for resolving some additional symbols .
* @ param cdef The tree representing the class definition .
* @ return True if code is generated with no errors . */
public boolean genClass ( Env < AttrContext > env , JCClassDecl cdef ) { } } | try { attrEnv = env ; ClassSymbol c = cdef . sym ; this . toplevel = env . toplevel ; this . endPosTable = toplevel . endPositions ; c . pool = pool ; pool . reset ( ) ; /* method normalizeDefs ( ) can add references to external classes into the constant pool */
cdef . defs = normalizeDefs ( cdef . defs , c ) ; generateReferencesToPrunedTree ( c , pool ) ; Env < GenContext > localEnv = new Env < > ( cdef , new GenContext ( ) ) ; localEnv . toplevel = env . toplevel ; localEnv . enclClass = cdef ; for ( List < JCTree > l = cdef . defs ; l . nonEmpty ( ) ; l = l . tail ) { genDef ( l . head , localEnv ) ; } if ( pool . numEntries ( ) > Pool . MAX_ENTRIES ) { log . error ( cdef . pos ( ) , "limit.pool" ) ; nerrs ++ ; } if ( nerrs != 0 ) { // if errors , discard code
for ( List < JCTree > l = cdef . defs ; l . nonEmpty ( ) ; l = l . tail ) { if ( l . head . hasTag ( METHODDEF ) ) ( ( JCMethodDecl ) l . head ) . sym . code = null ; } } cdef . defs = List . nil ( ) ; // discard trees
return nerrs == 0 ; } finally { // note : this method does NOT support recursion .
attrEnv = null ; this . env = null ; toplevel = null ; endPosTable = null ; nerrs = 0 ; } |
public class MRCompactorJobRunner { /** * Data should be published if : ( 1 ) this . policy = = { @ link Policy # DO _ PUBLISH _ DATA } ; ( 2 ) either
* compaction . abort . upon . new . data = false , or no new data is found in the input folder since jobStartTime . */
private boolean shouldPublishData ( DateTime jobStartTime ) throws IOException { } } | if ( this . policy != Policy . DO_PUBLISH_DATA ) { return false ; } if ( ! this . dataset . jobProps ( ) . getPropAsBoolean ( COMPACTION_JOB_ABORT_UPON_NEW_DATA , DEFAULT_COMPACTION_JOB_ABORT_UPON_NEW_DATA ) ) { return true ; } for ( Path inputPath : getInputPaths ( ) ) { if ( findNewDataSinceCompactionStarted ( inputPath , jobStartTime ) ) { return false ; } } return true ; |
public class VaultConfig { /** * In the 3.0 xsd the vault configuration and its options are part of the vault xsd .
* @ param reader the reader at the vault element
* @ param expectedNs the namespace
* @ return the vault configuration */
static VaultConfig readVaultElement_3_0 ( XMLExtendedStreamReader reader , Namespace expectedNs ) throws XMLStreamException { } } | final VaultConfig config = new VaultConfig ( ) ; final int count = reader . getAttributeCount ( ) ; for ( int i = 0 ; i < count ; i ++ ) { final String value = reader . getAttributeValue ( i ) ; String name = reader . getAttributeLocalName ( i ) ; if ( name . equals ( CODE ) ) { config . code = value ; } else if ( name . equals ( MODULE ) ) { config . module = value ; } else { unexpectedVaultAttribute ( reader . getAttributeLocalName ( i ) , reader ) ; } } if ( config . code == null && config . module != null ) { throw new XMLStreamException ( "Attribute 'module' was specified without an attribute" + " 'code' for element '" + VAULT + "' at " + reader . getLocation ( ) ) ; } readVaultOptions ( reader , config ) ; return config ; |
public class RequestCollapser { /** * Called from RequestVariable . shutdown ( ) to unschedule the task . */
public void shutdown ( ) { } } | RequestBatch < BatchReturnType , ResponseType , RequestArgumentType > currentBatch = batch . getAndSet ( null ) ; if ( currentBatch != null ) { currentBatch . shutdown ( ) ; } if ( timerListenerReference . get ( ) != null ) { // if the timer was started we ' ll clear it so it stops ticking
timerListenerReference . get ( ) . clear ( ) ; } |
public class BitmapUtils { /** * Gets a { @ link Bitmap } from a { @ link Uri } . Resizes the image to a determined width and height .
* @ param uri The { @ link Uri } from which the image is obtained .
* @ param maxWidth The maximum width of the image used to scale it . If null , the image won ' t be scaled
* @ param maxHeight The maximum height of the image used to scale it . If null , the image won ' t be scaled
* @ return { @ link Bitmap } The resized image . */
public static Bitmap toBitmap ( Uri uri , Integer maxWidth , Integer maxHeight ) { } } | try { Context context = AbstractApplication . get ( ) ; // First decode with inJustDecodeBounds = true to check dimensions
Options options = new Options ( ) ; options . inJustDecodeBounds = true ; InputStream openInputStream = context . getContentResolver ( ) . openInputStream ( uri ) ; BitmapFactory . decodeStream ( openInputStream , null , options ) ; openInputStream . close ( ) ; // Calculate inSampleSize
if ( ( maxWidth != null ) && ( maxHeight != null ) ) { float scale = Math . min ( maxWidth . floatValue ( ) / options . outWidth , maxHeight . floatValue ( ) / options . outHeight ) ; options . inSampleSize = Math . round ( 1 / scale ) ; } // Decode bitmap with inSampleSize set
openInputStream = context . getContentResolver ( ) . openInputStream ( uri ) ; options . inJustDecodeBounds = false ; Bitmap result = BitmapFactory . decodeStream ( openInputStream , null , options ) ; openInputStream . close ( ) ; return result ; } catch ( Exception e ) { LOGGER . error ( e . getMessage ( ) , e ) ; return null ; } |
public class Util { /** * Creates a new buffer and fills it with bytes from the
* provided channel . The amount of data to read is specified
* in the arguments .
* @ param in the channel to read from
* @ param size the number of bytes to read into a new buffer
* @ return a new buffer containing the bytes read
* @ throws IOException if an IO error occurs */
public static ByteBuffer fill ( ReadableByteChannel in , int size ) throws IOException { } } | return fill ( in , ByteBuffer . allocate ( size ) ) ; |
public class UndoRedoStack { /** * Pops the last command from the redo stack , puts it on the com . github . vatbub . common . core . undo stack and calls its { @ link Command # redo ( ) } - method . If no command is found on the redo stack , the last command from the undo stack is redone .
* @ return The command that was redone . < b > Its redo method was already called ! < / b > */
public Command redoNext ( ) { } } | Command nextCommand ; if ( redoStack . size ( ) > 0 ) { nextCommand = redoStack . pop ( ) ; } else { // redo the last command that was done
nextCommand = undoStack . pop ( ) ; // put it again on the stack
undoStack . push ( nextCommand ) ; } undoStack . push ( nextCommand ) ; nextCommand . redo ( ) ; return nextCommand ; |
public class AggregationQueryAction { /** * 判断某个字段名称是否是别名 */
private boolean isAliasFiled ( String filedName ) { } } | if ( select . getFields ( ) . size ( ) > 0 ) { for ( Field field : select . getFields ( ) ) { if ( null != field . getAlias ( ) && field . getAlias ( ) . equals ( filedName ) ) { return true ; } } } return false ; |
public class ConnectionPropertyFileScannerPlugin { /** * Load a class , e . g . the JDBC driver .
* @ param driver
* The class name . */
private void loadDriver ( String driver ) throws IOException { } } | if ( driver != null ) { try { Class . forName ( driver ) ; } catch ( ClassNotFoundException e ) { throw new IOException ( driver + " cannot be loaded, skipping scan of schema." , e ) ; } } |
public class XMLConfiguration { /** * Fetches the specified property . This task is delegated to the associated
* expression engine .
* @ param key the key to be looked up
* @ return the found value */
@ Override public List < Object > getProperty ( String key ) { } } | List < ? > nodes = fetchNodeList ( key ) ; if ( nodes . size ( ) == 0 ) { return null ; } else { List < Object > list = new ArrayList < > ( ) ; for ( Object node : nodes ) { ConfigurationNode configurationNode = ( ConfigurationNode ) node ; if ( configurationNode . getValue ( ) != null ) { list . add ( configurationNode . getValue ( ) ) ; } } if ( list . size ( ) < 1 ) { return null ; } else { return list ; } } |
public class AccountACL { /** * Checks if a player can deposit money
* @ param name The player name
* @ return True if the player can deposit money , else false */
public boolean canDeposit ( String name ) { } } | if ( getParent ( ) . ignoreACL ( ) ) { return true ; } String newName = name . toLowerCase ( ) ; boolean result = false ; if ( aclList . containsKey ( newName ) ) { result = aclList . get ( newName ) . canDeposit ( ) ; } return result ; |
public class DatagraphEnvironment { /** * Sets the specified instance for the given key ( instance type ) .
* @ param type instance type
* @ param value instance
* @ param < T > type of the instance
* @ return this environment */
@ SuppressWarnings ( "unchecked" ) public < T > DatagraphEnvironment setInstance ( Class < T > type , T value ) { } } | ( ( Map < Class < T > , T > ) instances ) . put ( type , value ) ; return this ; |
public class Props { /** * Recursive Clone function of Props
* @ param source the source Props object
* @ return the cloned Props object */
private static Props copyNext ( final Props source ) { } } | Props priorNodeCopy = null ; if ( source . getParent ( ) != null ) { priorNodeCopy = copyNext ( source . getParent ( ) ) ; } final Props dest = new Props ( priorNodeCopy ) ; for ( final String key : source . localKeySet ( ) ) { dest . put ( key , source . get ( key ) ) ; } return dest ; |
public class ManagedDatabaseVulnerabilityAssessmentScansInner { /** * Convert an existing scan result to a human readable format . If already exists nothing happens .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param managedInstanceName The name of the managed instance .
* @ param databaseName The name of the scanned database .
* @ param scanId The vulnerability assessment scan Id .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < DatabaseVulnerabilityAssessmentScansExportInner > exportAsync ( String resourceGroupName , String managedInstanceName , String databaseName , String scanId , final ServiceCallback < DatabaseVulnerabilityAssessmentScansExportInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( exportWithServiceResponseAsync ( resourceGroupName , managedInstanceName , databaseName , scanId ) , serviceCallback ) ; |
public class CassandraClient { /** * Execute cql query asynchronously
* @ param query CQL query
* @ return ResultSetFuture */
public ResultSetFuture executeAsync ( @ NotNull String keyspace , @ NotNull String query ) { } } | Timer time = getMetricsFactory ( ) . getTimer ( MetricsType . CASSANDRA_EXECUTE_ASYNC . name ( ) ) ; getMetricsFactory ( ) . getCounter ( MetricsType . CASSANDRA_PROCESSING_QUERIES . name ( ) ) . inc ( ) ; ResultSetFuture resultSetFuture = getOrCreateSession ( keyspace ) . executeAsync ( query ) ; Futures . addCallback ( resultSetFuture , new StatementExecutionCallback ( keyspace , query ) ) ; monitorFuture ( time , resultSetFuture ) ; return resultSetFuture ; |
public class DomainValidationMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DomainValidation domainValidation , ProtocolMarshaller protocolMarshaller ) { } } | if ( domainValidation == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( domainValidation . getDomainName ( ) , DOMAINNAME_BINDING ) ; protocolMarshaller . marshall ( domainValidation . getValidationEmails ( ) , VALIDATIONEMAILS_BINDING ) ; protocolMarshaller . marshall ( domainValidation . getValidationDomain ( ) , VALIDATIONDOMAIN_BINDING ) ; protocolMarshaller . marshall ( domainValidation . getValidationStatus ( ) , VALIDATIONSTATUS_BINDING ) ; protocolMarshaller . marshall ( domainValidation . getResourceRecord ( ) , RESOURCERECORD_BINDING ) ; protocolMarshaller . marshall ( domainValidation . getValidationMethod ( ) , VALIDATIONMETHOD_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class MessageItemReference { /** * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . impl . interfaces . ControllableResource # createControlAdapter ( ) */
@ Override public void createControlAdapter ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createControlAdapter" ) ; DestinationHandler dh = null ; try { ReferenceStream rs = getReferenceStream ( ) ; if ( rs instanceof MessageReferenceStream ) { ItemStream is = rs . getItemStream ( ) ; if ( is instanceof PubSubMessageItemStream ) { dh = ( ( PubSubMessageItemStream ) is ) . getDestinationHandler ( ) ; controlAdapter = new QueuedMessage ( this , dh , rs ) ; } } } catch ( Exception e ) { // FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.store.items.MessageItemReference.createControlAdapter" , "1:1266:1.147" , this ) ; SibTr . exception ( tc , e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createControlAdapter" ) ; |
public class SingleDbJDBCConnection { /** * { @ inheritDoc } */
protected ResultSet findWorkspaceDataSize ( ) throws SQLException { } } | if ( findWorkspaceDataSize == null ) { findWorkspaceDataSize = dbConnection . prepareStatement ( FIND_WORKSPACE_DATA_SIZE ) ; } findWorkspaceDataSize . setString ( 1 , containerConfig . containerName ) ; return findWorkspaceDataSize . executeQuery ( ) ; |
public class Force { /** * Set directions .
* @ param fh The horizontal direction .
* @ param fv The vertical direction . */
public void setDirection ( double fh , double fv ) { } } | fhLast = fh ; fvLast = fv ; this . fh = fh ; this . fv = fv ; fixForce ( ) ; |
public class MiniSatBackbone { /** * Adds an arbitrary formula to the solver .
* @ param formula the formula */
public void add ( final Formula formula ) { } } | final Formula cnf = formula . cnf ( ) ; switch ( cnf . type ( ) ) { case TRUE : break ; case FALSE : case LITERAL : case OR : this . addClause ( generateClauseVector ( cnf ) , null ) ; break ; case AND : for ( final Formula op : cnf ) { this . addClause ( generateClauseVector ( op ) , null ) ; } break ; default : throw new IllegalStateException ( "Unexpected formula type in CNF: " + cnf . type ( ) ) ; } |
public class ManualDescriptor { /** * setter for geneSymbolList - sets GeneSymbolList in PubMed
* @ generated
* @ param v value to set into the feature */
public void setGeneSymbolList ( StringArray v ) { } } | if ( ManualDescriptor_Type . featOkTst && ( ( ManualDescriptor_Type ) jcasType ) . casFeat_geneSymbolList == null ) jcasType . jcas . throwFeatMissing ( "geneSymbolList" , "de.julielab.jules.types.pubmed.ManualDescriptor" ) ; jcasType . ll_cas . ll_setRefValue ( addr , ( ( ManualDescriptor_Type ) jcasType ) . casFeatCode_geneSymbolList , jcasType . ll_cas . ll_getFSRef ( v ) ) ; |
public class BoundedBuffer { /** * F743-12896 - Start */
protected synchronized boolean cancel ( Object x ) { } } | // First check the expedited buffer
synchronized ( lock ) { if ( expeditedPutIndex > expeditedTakeIndex ) { for ( int i = expeditedTakeIndex ; i < expeditedPutIndex ; i ++ ) { if ( expeditedBuffer [ i ] == x ) { System . arraycopy ( expeditedBuffer , i + 1 , expeditedBuffer , i , expeditedPutIndex - i - 1 ) ; expeditedPutIndex -- ; expeditedBuffer [ expeditedPutIndex ] = null ; numberOfUsedExpeditedSlots . getAndDecrement ( ) ; // D615053
return true ; } } } else if ( expeditedPutIndex != expeditedTakeIndex || expeditedBuffer [ expeditedTakeIndex ] != null ) { for ( int i = expeditedTakeIndex ; i < buffer . length ; i ++ ) { if ( expeditedBuffer [ i ] == x ) { if ( i != expeditedBuffer . length - 1 ) { System . arraycopy ( expeditedBuffer , i + 1 , expeditedBuffer , i , expeditedBuffer . length - i - 1 ) ; } if ( expeditedPutIndex != 0 ) { expeditedBuffer [ expeditedBuffer . length - 1 ] = expeditedBuffer [ 0 ] ; System . arraycopy ( expeditedBuffer , 1 , expeditedBuffer , 0 , expeditedPutIndex - 1 ) ; expeditedPutIndex -- ; } else { expeditedPutIndex = expeditedBuffer . length - 1 ; } expeditedBuffer [ expeditedPutIndex ] = null ; numberOfUsedExpeditedSlots . getAndDecrement ( ) ; // D615053
return true ; } } // D610567 - Scan first section of expedited BoundedBuffer
for ( int i = 0 ; i < expeditedPutIndex ; i ++ ) { if ( expeditedBuffer [ i ] == x ) { System . arraycopy ( expeditedBuffer , i + 1 , expeditedBuffer , i , expeditedPutIndex - i - 1 ) ; expeditedPutIndex -- ; expeditedBuffer [ expeditedPutIndex ] = null ; numberOfUsedExpeditedSlots . getAndDecrement ( ) ; // D615053
return true ; } } } // Next check the main buffer
if ( putIndex > takeIndex ) { for ( int i = takeIndex ; i < putIndex ; i ++ ) { if ( buffer [ i ] == x ) { System . arraycopy ( buffer , i + 1 , buffer , i , putIndex - i - 1 ) ; putIndex -- ; buffer [ putIndex ] = null ; numberOfUsedSlots . getAndDecrement ( ) ; // D615053
return true ; } } } else if ( putIndex != takeIndex || buffer [ takeIndex ] != null ) { for ( int i = takeIndex ; i < buffer . length ; i ++ ) { if ( buffer [ i ] == x ) { if ( i != buffer . length - 1 ) { System . arraycopy ( buffer , i + 1 , buffer , i , buffer . length - i - 1 ) ; } if ( putIndex != 0 ) { buffer [ buffer . length - 1 ] = buffer [ 0 ] ; System . arraycopy ( buffer , 1 , buffer , 0 , putIndex - 1 ) ; putIndex -- ; } else { putIndex = buffer . length - 1 ; } buffer [ putIndex ] = null ; numberOfUsedSlots . getAndDecrement ( ) ; // D615053
return true ; } } // D610567 - Scan first section of BoundedBuffer
for ( int i = 0 ; i < putIndex ; i ++ ) { if ( buffer [ i ] == x ) { System . arraycopy ( buffer , i + 1 , buffer , i , putIndex - i - 1 ) ; putIndex -- ; buffer [ putIndex ] = null ; numberOfUsedSlots . getAndDecrement ( ) ; // D615053
return true ; } } } } return false ; |
public class Util { /** * Grows the file to the specified number of bytes . This only happenes if
* the current file position is sufficiently close ( less than 4K ) to end of
* file .
* @ param f output stream to pad
* @ param currentSize application keeps track of the cuurent file size
* @ param preAllocSize how many bytes to pad
* @ return the new file size . It can be the same as currentSize if no
* padding was done .
* @ throws IOException */
public static long padLogFile ( FileOutputStream f , long currentSize , long preAllocSize ) throws IOException { } } | long position = f . getChannel ( ) . position ( ) ; if ( position + 4096 >= currentSize ) { currentSize = currentSize + preAllocSize ; fill . position ( 0 ) ; f . getChannel ( ) . write ( fill , currentSize - fill . remaining ( ) ) ; } return currentSize ; |
public class Stitch { /** * Initializes an app client for Stitch to use when using { @ link Stitch # getAppClient ( String ) } } .
* Can only be called once per client app id .
* @ param clientAppId the client app id to initialize an app client for .
* @ return the app client that was just initialized . */
public static StitchAppClient initializeAppClient ( @ Nonnull final String clientAppId ) { } } | return initializeAppClient ( clientAppId , new StitchAppClientConfiguration . Builder ( ) . build ( ) ) ; |
public class DelegatingDbSession { @ Override public < T > Cursor < T > selectCursor ( String statement ) { } } | return delegate . selectCursor ( statement ) ; |
public class AFactoryAppBeans { /** * < p > Get UtilXml in lazy mode . < / p >
* @ return UtilXml - UtilXml
* @ throws Exception - an exception */
public final UtilXml lazyGetUtilXml ( ) throws Exception { } } | String beanName = getUtilXmlName ( ) ; UtilXml utilXml = ( UtilXml ) this . beansMap . get ( beanName ) ; if ( utilXml == null ) { utilXml = new UtilXml ( ) ; this . beansMap . put ( beanName , utilXml ) ; lazyGetLogger ( ) . info ( null , AFactoryAppBeans . class , beanName + " has been created." ) ; } return utilXml ; |
public class MatchingImpl { /** * Create a concrete instance of a LikeOperator */
public Operator createLikeOperator ( Selector ar , String pattern , boolean escaped , char escape ) { } } | Object parsed = Pattern . parsePattern ( pattern , escaped , escape ) ; if ( parsed == null ) return null ; else if ( parsed == Pattern . matchMany ) return createOperator ( Selector . NOT , createOperator ( Selector . ISNULL , ar ) ) ; else if ( parsed instanceof String ) return createOperator ( Selector . EQ , ar , createLiteral ( parsed ) ) ; else return new LikeOperatorImpl ( Selector . LIKE , ar , ( Pattern ) parsed , pattern , escaped , escape ) ; |
public class CalendarPicker { /** * / * [ deutsch ]
* < p > Erzeugt einen neuen { @ code CalendarPicker } f & uuml ; r den islamischen Kalender . < / p >
* @ param variantSource the variant of the underlying islamic calendar
* @ param locale the language and country configuration
* @ param todaySupplier determines the current calendar date
* @ return CalendarPicker
* @ return CalendarPicker */
public static CalendarPicker < HijriCalendar > hijri ( VariantSource variantSource , Locale locale , Supplier < HijriCalendar > todaySupplier ) { } } | return CalendarPicker . create ( HijriCalendar . family ( ) , new FXCalendarSystemHijri ( variantSource . getVariant ( ) ) , locale , todaySupplier ) ; |
public class RemoteDomainConnectionService { /** * Resolve the subsystem versions .
* @ param extensions the extensions to install
* @ return the subsystem versions */
private ModelNode resolveSubsystems ( final List < ModelNode > extensions ) { } } | HostControllerLogger . ROOT_LOGGER . debug ( "Applying extensions provided by master" ) ; final ModelNode result = operationExecutor . installSlaveExtensions ( extensions ) ; if ( ! SUCCESS . equals ( result . get ( OUTCOME ) . asString ( ) ) ) { throw HostControllerLogger . ROOT_LOGGER . failedToAddExtensions ( result . get ( FAILURE_DESCRIPTION ) ) ; } final ModelNode subsystems = new ModelNode ( ) ; for ( final ModelNode extension : extensions ) { extensionRegistry . recordSubsystemVersions ( extension . asString ( ) , subsystems ) ; } return subsystems ; |
public class PebbleDictionary { /** * Associate the specified byte array with the provided key in the dictionary . If another key - value pair with the
* same key is already present in the dictionary , it will be replaced .
* @ param key
* key with which the specified value is associated
* @ param bytes
* value to be associated with the specified key */
public void addBytes ( int key , byte [ ] bytes ) { } } | PebbleTuple t = PebbleTuple . create ( key , PebbleTuple . TupleType . BYTES , PebbleTuple . Width . NONE , bytes ) ; addTuple ( t ) ; |
public class ArffParser { /** * Parse the " @ attribute " section of the ARFF file .
* @ param br Input
* @ param names List ( to fill ) of attribute names
* @ param types List ( to fill ) of attribute types
* @ throws IOException */
private void parseAttributeStatements ( BufferedReader br , ArrayList < String > names , ArrayList < String > types ) throws IOException { } } | String line ; // Load attribute metadata
while ( true ) { line = br . readLine ( ) ; if ( line == null ) { throw new AbortException ( ARFF_HEADER_DATA + " not found in file." ) ; } // Skip comments and empty lines
if ( ARFF_COMMENT . reset ( line ) . matches ( ) || EMPTY . reset ( line ) . matches ( ) ) { continue ; } // Break on data statement to continue
if ( ARFF_HEADER_DATA . reset ( line ) . matches ( ) ) { break ; } // Expect an attribute specification
Matcher matcher = ARFF_HEADER_ATTRIBUTE . reset ( line ) ; if ( matcher . matches ( ) ) { String name = matcher . group ( 1 ) ; if ( name . charAt ( 0 ) == '\'' && name . charAt ( name . length ( ) - 1 ) == '\'' ) { name = name . substring ( 1 , name . length ( ) - 1 ) ; } else if ( name . charAt ( 0 ) == '"' && name . charAt ( name . length ( ) - 1 ) == '"' ) { name = name . substring ( 1 , name . length ( ) - 1 ) ; } String type = matcher . group ( 2 ) ; names . add ( name ) ; types . add ( type ) ; // logger . warning ( " Attribute name : " + name + " type : " + type ) ;
continue ; } throw new AbortException ( "Unrecognized line: " + line ) ; } assert ( names . size ( ) == types . size ( ) ) ; |
public class VariableInterpreter { /** * sets a variable to page Context
* @ param pc pagecontext of the new variable
* @ param var String of variable definition
* @ param value value to set to variable
* @ return value setted
* @ throws PageException */
public static Object setVariable ( PageContext pc , String var , Object value ) throws PageException { } } | StringList list = parse ( pc , new ParserString ( var ) , false ) ; if ( list == null ) throw new InterpreterException ( "invalid variable name declaration [" + var + "]" ) ; if ( list . size ( ) == 1 ) { return pc . undefinedScope ( ) . set ( list . next ( ) , value ) ; } // min 2 elements
int scope = scopeString2Int ( pc . ignoreScopes ( ) , list . next ( ) ) ; Object coll ; if ( scope == Scope . SCOPE_UNDEFINED ) { coll = pc . touch ( pc . undefinedScope ( ) , KeyImpl . init ( list . current ( ) ) ) ; } else { coll = VariableInterpreter . scope ( pc , scope , true ) ; // coll = pc . scope ( scope ) ;
} while ( list . hasNextNext ( ) ) { coll = pc . touch ( coll , KeyImpl . init ( list . next ( ) ) ) ; } return pc . set ( coll , KeyImpl . init ( list . next ( ) ) , value ) ; |
public class BasicScope { /** * Dispatches event ( notifies all listeners )
* @ param event Event to dispatch */
public void dispatchEvent ( IEvent event ) { } } | for ( IEventListener listener : listeners ) { if ( event . getSource ( ) == null || event . getSource ( ) != listener ) { listener . notifyEvent ( event ) ; } } |
public class WFieldSet { /** * { @ inheritDoc } */
@ Override protected void validateComponent ( final List < Diagnostic > diags ) { } } | super . validateComponent ( diags ) ; if ( isMandatory ( ) && ! hasInputWithValue ( ) ) { diags . add ( createMandatoryDiagnostic ( ) ) ; } |
public class SemanticHeadFinder { /** * This looks to see whether any of the children is a preterminal headed by a word
* which is within the set verbalSet ( which in practice is either
* auxiliary or copula verbs ) . It only returns true if it ' s a preterminal head , since
* you don ' t want to pick things up in phrasal daughters . That is an error .
* @ param kids The child trees
* @ param verbalSet The set of words
* @ return Returns true if one of the child trees is a preterminal verb headed
* by a word in verbalSet */
private boolean hasVerbalAuxiliary ( Tree [ ] kids , HashSet < String > verbalSet ) { } } | if ( DEBUG ) { System . err . println ( "Checking for verbal auxiliary" ) ; } for ( Tree kid : kids ) { if ( DEBUG ) { System . err . println ( " checking in " + kid ) ; } if ( kid . isPreTerminal ( ) ) { Label kidLabel = kid . label ( ) ; String tag = null ; if ( kidLabel instanceof HasTag ) { tag = ( ( HasTag ) kidLabel ) . tag ( ) ; } if ( tag == null ) { tag = kid . value ( ) ; } Label wordLabel = kid . firstChild ( ) . label ( ) ; String word = null ; if ( wordLabel instanceof HasWord ) { word = ( ( HasWord ) wordLabel ) . word ( ) ; } if ( word == null ) { word = wordLabel . value ( ) ; } if ( DEBUG ) { System . err . println ( "Checking " + kid . value ( ) + " head is " + word + '/' + tag ) ; } String lcWord = word . toLowerCase ( ) ; if ( verbalTags . contains ( tag ) && verbalSet . contains ( lcWord ) ) { if ( DEBUG ) { System . err . println ( "hasVerbalAuxiliary returns true" ) ; } return true ; } } } if ( DEBUG ) { System . err . println ( "hasVerbalAuxiliary returns false" ) ; } return false ; |
public class CharsetUtil { /** * ' utf8 ' COLLATE ' utf8 _ general _ ci '
* @ param charset
* @ return */
public static final String collateCharset ( String charset ) { } } | String [ ] output = StringUtils . split ( charset , "COLLATE" ) ; return output [ 0 ] . replace ( '\'' , ' ' ) . trim ( ) ; |
public class FileConvert { /** * Prints the content of an Atom object as a PDB formatted line .
* @ param a
* @ return */
public static String toPDB ( Atom a ) { } } | StringBuffer w = new StringBuffer ( ) ; toPDB ( a , w ) ; return w . toString ( ) ; |
public class BranchController { /** * Bulk update for a branch . */
@ RequestMapping ( value = "branches/{branchId}/update/bulk" , method = RequestMethod . PUT ) public Branch bulkUpdate ( @ PathVariable ID branchId , @ RequestBody BranchBulkUpdateRequest request ) { } } | // Gets the branch
Branch branch = structureService . getBranch ( branchId ) ; // Performs the update
return copyService . update ( branch , request ) ; |
public class AlphanumComparator { /** * Length of string is passed in for improved efficiency ( only need to calculate it once ) * */
private final String getChunk ( String s , int slength , int marker ) { } } | StringBuilder chunk = new StringBuilder ( ) ; char c = s . charAt ( marker ) ; chunk . append ( c ) ; marker ++ ; if ( isDigit ( c ) ) { while ( marker < slength ) { c = s . charAt ( marker ) ; if ( ! isDigit ( c ) ) break ; chunk . append ( c ) ; marker ++ ; } } else { while ( marker < slength ) { c = s . charAt ( marker ) ; if ( isDigit ( c ) ) break ; chunk . append ( c ) ; marker ++ ; } } return chunk . toString ( ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.