signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class CmsDriverManager { /** * Returns the access control list of a given resource . < p >
* If < code > inheritedOnly < / code > is set , only inherited access control entries
* are returned . < p >
* Note : For file resources , * all * permissions set at the immediate parent folder are inherited ,
* not only these marked to inherit .
* @ param dbc the current database context
* @ param resource the resource
* @ param inheritedOnly skip non - inherited entries if set
* @ return the access control list of the resource
* @ throws CmsException if something goes wrong */
public CmsAccessControlList getAccessControlList ( CmsDbContext dbc , CmsResource resource , boolean inheritedOnly ) throws CmsException { } } | return getAccessControlList ( dbc , resource , inheritedOnly , resource . isFolder ( ) , 0 ) ; |
public class JobOperations { /** * Lists the { @ link CloudJob jobs } in the Batch account .
* @ param detailLevel A { @ link DetailLevel } used for filtering the list and for controlling which properties are retrieved from the service .
* @ return A list of { @ link CloudJob } objects .
* @ throws BatchErrorException Exception thrown when an error response is received from the Batch service .
* @ throws IOException Exception thrown when there is an error in serialization / deserialization of data sent to / received from the Batch service . */
public PagedList < CloudJob > listJobs ( DetailLevel detailLevel ) throws BatchErrorException , IOException { } } | return listJobs ( detailLevel , null ) ; |
public class DeployerProxy { /** * Creates a proxied HTTP GET request to Apache Brooklyn to retrieve the Application ' s Entities
* @ param brooklynId of the desired application to fetch . This ID may differ from SeaClouds Application ID
* @ return List < EntitySummary > with all the children entities of the application */
public List < EntitySummary > getEntitiesFromApplication ( String brooklynId ) throws IOException { } } | Invocation invocation = getJerseyClient ( ) . target ( getEndpoint ( ) + "/v1/applications/" + brooklynId + "/entities" ) . request ( ) . buildGet ( ) ; return invocation . invoke ( ) . readEntity ( new GenericType < List < EntitySummary > > ( ) { } ) ; |
public class CmsLocationController { /** * Called on longitude value change . < p >
* @ param longitude the longitude */
protected void onLongitudeChange ( String longitude ) { } } | try { m_editValue . setLongitude ( longitude ) ; updateMarkerPosition ( ) ; updateAddress ( ) ; } catch ( Throwable t ) { CmsErrorDialog . handleException ( t ) ; } |
public class TemplateProcessorMojo { /** * Apply templates to < code > . ktype < / code > files ( single - argument ) . */
private List < OutputFile > processTemplates ( List < TemplateFile > inputs ) throws IOException { } } | List < OutputFile > outputs = new ArrayList < > ( ) ; for ( TemplateFile f : inputs ) { String fileName = f . getFileName ( ) ; if ( ! fileName . contains ( "VType" ) && fileName . contains ( "KType" ) ) { for ( Type ktype : Type . values ( ) ) { TemplateOptions options = new TemplateOptions ( ktype ) ; options . templateFile = f . path ; generate ( f , outputs , options ) ; } } if ( fileName . contains ( "KTypeVType" ) ) { for ( Type ktype : Type . values ( ) ) { for ( Type vtype : Type . values ( ) ) { TemplateOptions options = new TemplateOptions ( ktype , vtype ) ; options . templateFile = f . path ; generate ( f , outputs , options ) ; } } } } return outputs ; |
public class CmsDefaultLinkSubstitutionHandler { /** * Generates the cache key for Online links .
* @ param cms the current CmsObject
* @ param targetSiteRoot the target site root
* @ param detailPagePart the detail page part
* @ param absoluteLink the absolute ( site - relative ) link to the resource
* @ return the cache key */
protected String generateCacheKey ( CmsObject cms , String targetSiteRoot , String detailPagePart , String absoluteLink ) { } } | return cms . getRequestContext ( ) . getSiteRoot ( ) + ":" + targetSiteRoot + ":" + detailPagePart + absoluteLink ; |
public class Window { /** * Removes a WindowListener if it is present .
* @ param listener The listener to remove */
public void removeListener ( WindowListener < K , R , P > listener ) { } } | this . listeners . remove ( new UnwrappedWeakReference < WindowListener < K , R , P > > ( listener ) ) ; |
public class XmlDocumentReader { /** * Parses XML to a W3C Document object with activated or deactivated validation .
* In case of validation , a correct header is needed . Therefore , we try to fix the
* header before we try to parse the input . Also , an error handler is activated
* that throws { @ link SAXException } if an error due the validation or parsing process
* will be thrown .
* In case of deactivated validation , there will be no SAXException occur !
* @ param xml in string format
* @ param validation turn validation on or off
* @ return Document of XML object .
* @ throws IOException if an IO error occurs
* @ throws SAXException only occurs when validation is on and an error due parsing the document occurred .
* @ throws IllegalArgumentException if the argument is null */
public static Document parse ( String xml , boolean validation ) throws IOException , SAXException , IllegalArgumentException { } } | // if validation is on , we need to add headers
if ( validation ) { xml = MathDoc . fixingHeaderAndNS ( xml ) ; } InputSource src = stringToSource ( xml ) ; if ( validation ) { return ValidationBuilder . parse ( src ) ; } else { return NoValidationBuilder . parse ( src ) ; } |
public class AddressMapping { /** * Parses the address declaration for tokens
* that need to be resolved against the statement context .
* @ return */
public Map < String , Integer > getRequiredStatements ( ) { } } | Map < String , Integer > required = new HashMap < String , Integer > ( ) ; for ( Token token : address ) { if ( ! token . hasKey ( ) ) { // a single token or token expression
// These are currently skipped : See asResource ( ) parsing logic
continue ; } else { // a value expression . key and value of the expression might be resolved
// TODO : keys are not supported : Do we actually need that ?
String value_ref = token . getValue ( ) ; if ( value_ref . startsWith ( "{" ) ) { value_ref = value_ref . substring ( 1 , value_ref . length ( ) - 1 ) ; if ( ! required . containsKey ( value_ref ) ) { required . put ( value_ref , 1 ) ; } else { Integer count = required . get ( value_ref ) ; ++ count ; required . put ( value_ref , count ) ; } } } } return required ; |
public class AIStream { /** * Turn a tick to L / R . Used to :
* ( 1 ) Reject a Q / G that timed out
* ( 2 ) Implement AIH . reject invoked by the RCD
* ( 3 ) Implement AIH . rolledback invoked by a dirty accepted item */
public void updateToRejected ( long tick , SendDispatcher sendDispatcher ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "updateToRejected" , Long . valueOf ( tick ) ) ; _targetStream . setCursor ( tick ) ; TickRange tr = _targetStream . getNext ( ) ; if ( tr . type == TickRange . Requested ) { writeRejected ( tick , tick , 0 ) ; sendDispatcher . sendReject ( tick , tick , 0L , false ) ; } if ( tr . type == TickRange . Value ) { long unlockCount = ( ( AIValueTick ) ( tr . value ) ) . getRMEUnlockCount ( ) ; writeRejected ( tick , tick , unlockCount ) ; sendDispatcher . sendReject ( tick , tick , unlockCount , false ) ; } else { // Can only process requested and value ticks
SIErrorException e1 = new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0001" , new Object [ ] { "com.ibm.ws.sib.processor.gd.AIStream" , "1:972:1.108.2.18" } , null ) ) ; // FFDC
FFDCFilter . processException ( e1 , "com.ibm.ws.sib.processor.gd.AIStream.updateToRejected" , "1:979:1.108.2.18" , this ) ; SibTr . exception ( tc , e1 ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0001" , new Object [ ] { "com.ibm.ws.sib.processor.gd.AIStream" , "1:988:1.108.2.18" } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "updateToRejected" , null ) ; throw e1 ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "updateToRejected" ) ; |
public class CreateFileExtensions { /** * Creates the parent directories from the given file .
* @ param file
* the file
* @ deprecated use instead the method
* { @ linkplain CreateFileExtensions # newDirectories ( Path , FileAttribute . . . ) } < br >
* < br >
* Note : will be removed in the next minor release */
@ Deprecated public static void newParentDirectories ( final File file ) { } } | if ( ! file . exists ( ) ) { final File parent = file . getParentFile ( ) ; if ( parent != null && ! parent . exists ( ) ) { parent . mkdirs ( ) ; } } |
public class BaseServiceMessageTransport { /** * This is the application code for handling the message . . Once the
* message is received the application can retrieve the soap part , the
* attachment part if there are any , or any other information from the
* message .
* @ param message The incoming message to process . */
public Object processMessage ( Object message ) { } } | Utility . getLogger ( ) . info ( "processMessage called in service message" ) ; BaseMessage msgReplyInternal = null ; try { BaseMessage messageIn = new TreeMessage ( null , null ) ; new ServiceTrxMessageIn ( messageIn , message ) ; msgReplyInternal = this . processIncomingMessage ( messageIn , null ) ; Utility . getLogger ( ) . info ( "msgReplyInternal: " + msgReplyInternal ) ; int iErrorCode = this . convertToExternal ( msgReplyInternal , null ) ; Utility . getLogger ( ) . info ( "externalMessageReply: " + msgReplyInternal ) ; Object msg = null ; // fac . createMessage ( ) ;
if ( iErrorCode == DBConstants . NORMAL_RETURN ) { msg = msgReplyInternal . getExternalMessage ( ) . getRawData ( ) ; String strTrxID = ( String ) msgReplyInternal . getMessageHeader ( ) . get ( TrxMessageHeader . LOG_TRX_ID ) ; this . logMessage ( strTrxID , msgReplyInternal , MessageInfoTypeModel . REPLY , MessageTypeModel . MESSAGE_OUT , MessageStatusModel . SENTOK , null , null ) ; // Sent ( no reply required )
} return msg ; } catch ( Throwable ex ) { ex . printStackTrace ( ) ; String strError = "Error in processing or replying to a message" ; Utility . getLogger ( ) . warning ( strError ) ; if ( msgReplyInternal != null ) { String strTrxID = ( String ) msgReplyInternal . getMessageHeader ( ) . get ( TrxMessageHeader . LOG_TRX_ID ) ; this . logMessage ( strTrxID , msgReplyInternal , MessageInfoTypeModel . REPLY , MessageTypeModel . MESSAGE_OUT , MessageStatusModel . ERROR , strError , null ) ; } return null ; } |
public class BigramExtractor { /** * Returns the & Chi ; < sup > 2 < / sup > score of the contingency table */
private double chiSq ( int [ ] contingencyTable ) { } } | // Rename for short - hand convenience
int [ ] t = contingencyTable ; int col1sum = t [ 0 ] + t [ 2 ] ; int col2sum = t [ 1 ] + t [ 3 ] ; int row1sum = t [ 0 ] + t [ 1 ] ; int row2sum = t [ 2 ] + t [ 3 ] ; double sum = row1sum + row2sum ; // Calculate the expected values for a , b , c , d
double aExp = ( row1sum / sum ) * col1sum ; double bExp = ( row1sum / sum ) * col2sum ; double cExp = ( row2sum / sum ) * col1sum ; double dExp = ( row2sum / sum ) * col2sum ; // Chi - squared is ( Observed - Expected ) ^ 2 / Expected
return ( ( t [ 0 ] - aExp ) * ( t [ 0 ] - aExp ) / aExp ) + ( ( t [ 1 ] - bExp ) * ( t [ 1 ] - bExp ) / bExp ) + ( ( t [ 2 ] - cExp ) * ( t [ 2 ] - cExp ) / cExp ) + ( ( t [ 3 ] - dExp ) * ( t [ 3 ] - dExp ) / dExp ) ; |
public class EntityManagerFactory { /** * Creates and return a new { @ link EntityManager } using the provided JSON formatted credentials .
* @ param projectId
* the project ID
* @ param jsonCredentialsFile
* the JSON formatted credentials file for the target Cloud project .
* @ return a new { @ link EntityManager } */
public EntityManager createEntityManager ( String projectId , String jsonCredentialsFile ) { } } | return createEntityManager ( projectId , jsonCredentialsFile , null ) ; |
public class ListOfELongImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public EList < Long > getList ( ) { } } | return ( EList < Long > ) eGet ( Ifc4Package . Literals . LIST_OF_ELONG__LIST , true ) ; |
public class BeanUtil { /** * 判断是否有Setter方法 < br >
* 判定方法是是否存在只有一个参数的setXXX方法
* @ param clazz 待测试类
* @ return 是否为Bean对象
* @ since 4.2.2 */
public static boolean hasSetter ( Class < ? > clazz ) { } } | if ( ClassUtil . isNormalClass ( clazz ) ) { final Method [ ] methods = clazz . getMethods ( ) ; for ( Method method : methods ) { if ( method . getParameterTypes ( ) . length == 1 && method . getName ( ) . startsWith ( "set" ) ) { // 检测包含标准的setXXX方法即视为标准的JavaBean
return true ; } } } return false ; |
public class FairSchedulerServlet { /** * Print the administration form for the MemBasedLoadManager */
private void showAdminFormMemBasedLoadMgr ( PrintWriter out , boolean advancedView ) { } } | if ( ! ( loadMgr instanceof MemBasedLoadManager ) ) { return ; } out . print ( "<h2>Memory Based Scheduling</h2>\n" ) ; MemBasedLoadManager memLoadMgr = ( MemBasedLoadManager ) loadMgr ; Collection < String > possibleThresholds = Arrays . asList ( ( "0,1,2,3,4,5,6,7,8,9,10,1000" ) . split ( "," ) ) ; long reservedMemGB = ( long ) ( memLoadMgr . getReservedPhysicalMemoryOnTT ( ) / 1024D + 0.5 ) ; out . printf ( "<p>Reserve %s GB memory on one node." , generateSelect ( possibleThresholds , "" + reservedMemGB , "/fairscheduler?setTtThreshold=<CHOICE>" + ( advancedView ? "&advanced" : "" ) ) ) ; |
public class XMLFilterImpl { /** * Set the value of a feature .
* < p > This will always fail if the parent is null . < / p >
* @ param name The feature name .
* @ param value The requested feature value .
* @ exception org . xml . sax . SAXNotRecognizedException If the feature
* value can ' t be assigned or retrieved from the parent .
* @ exception org . xml . sax . SAXNotSupportedException When the
* parent recognizes the feature name but
* cannot set the requested value . */
public void setFeature ( String name , boolean value ) throws SAXNotRecognizedException , SAXNotSupportedException { } } | if ( parent != null ) { parent . setFeature ( name , value ) ; } else { throw new SAXNotRecognizedException ( "Feature: " + name ) ; } |
public class DenseD2Matrix64F { @ Override public void reshape ( int numRows , int numCols ) { } } | if ( numRows <= data . length ) { this . numRows = numRows ; } else { throw new IllegalArgumentException ( "Requested number of rows is too great." ) ; } if ( numCols <= data [ 0 ] . length ) { this . numCols = numCols ; } else { throw new IllegalArgumentException ( "Requested number of columns is too great." ) ; } |
public class Database { public void unregisterService ( String serviceName , String instanceName , String devname ) throws DevFailed { } } | databaseDAO . unregisterService ( this , serviceName , instanceName , devname ) ; |
public class JMTimeUtil { /** * Change iso timestamp to long long .
* @ param isoTimestamp the iso timestamp
* @ return the long */
public static long changeIsoTimestampToLong ( String isoTimestamp ) { } } | try { return ZonedDateTime . parse ( isoTimestamp ) . toInstant ( ) . toEpochMilli ( ) ; } catch ( Exception e ) { return changeIsoTimestampToLong ( isoTimestamp , DEFAULT_ZONE_ID ) ; } |
public class SqlServerParser { /** * 复制 OrderByElement
* @ param orig 原 OrderByElement
* @ param expression 新 OrderByElement 的排序要素
* @ return 复制的新 OrderByElement */
protected OrderByElement cloneOrderByElement ( OrderByElement orig , Expression expression ) { } } | OrderByElement element = new OrderByElement ( ) ; element . setAsc ( orig . isAsc ( ) ) ; element . setAscDescPresent ( orig . isAscDescPresent ( ) ) ; element . setNullOrdering ( orig . getNullOrdering ( ) ) ; element . setExpression ( expression ) ; return element ; |
public class CERTConverter { /** * Builds a CERT record from a Certificate */
public static CERTRecord buildRecord ( Name name , int dclass , long ttl , Certificate cert ) { } } | return buildRecord ( name , dclass , ttl , cert , 0 , 0 ) ; |
public class JSR310DeserializerBase { /** * Helper method used to peel off spurious wrappings of DateTimeException
* @ param e DateTimeException to peel
* @ return DateTimeException that does not have another DateTimeException as its cause . */
protected DateTimeException _peelDTE ( DateTimeException e ) { } } | while ( true ) { Throwable t = e . getCause ( ) ; if ( t != null && t instanceof DateTimeException ) { e = ( DateTimeException ) t ; continue ; } break ; } return e ; |
public class TensorUtils { /** * Gets a tensor in the s semiring , where the input values are assumed to be in the reals . */
public static Tensor getVectorFromReals ( Algebra s , double ... values ) { } } | Tensor t0 = getVectorFromValues ( RealAlgebra . getInstance ( ) , values ) ; return t0 . copyAndConvertAlgebra ( s ) ; |
public class LogManagementPluginFactory { /** * Get implementation for log management plugin .
* @ return The implementation of log management plugin . */
public static LogManagementPlugin getLogManagementPlugin ( ) { } } | final String loggerImpl = org . slf4j . impl . StaticLoggerBinder . getSingleton ( ) . getLoggerFactoryClassStr ( ) ; if ( "Log4jLoggerFactory" . equals ( loggerImpl . substring ( loggerImpl . lastIndexOf ( "." ) + 1 ) ) ) { return new LogManagementPluginLog4jImpl ( ) ; } else if ( loggerImpl . indexOf ( "logback." ) > 1 && "ContextSelectorStaticBinder" . equals ( loggerImpl . substring ( loggerImpl . lastIndexOf ( "." ) + 1 ) ) ) { return new LogManagementPluginLogbackImpl ( ) ; } throw new UnsupportedOperationException ( ) ; |
public class ApiOvhHostingprivateDatabase { /** * Get this object properties
* REST : GET / hosting / privateDatabase / { serviceName } / whitelist / { ip }
* @ param serviceName [ required ] The internal name of your private database
* @ param ip [ required ] The whitelisted IP in your instance */
public OvhWhitelist serviceName_whitelist_ip_GET ( String serviceName , String ip ) throws IOException { } } | String qPath = "/hosting/privateDatabase/{serviceName}/whitelist/{ip}" ; StringBuilder sb = path ( qPath , serviceName , ip ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhWhitelist . class ) ; |
public class IOUtils { /** * Get bytes from given input stream .
* @ param sourceInputStream Source inputStream object to be handled .
* @ return bytes from given input stream .
* @ throws IOException */
public static byte [ ] getBytes ( final InputStream sourceInputStream ) throws IOException { } } | ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream ( ) ; byte [ ] buffer = new byte [ BUFFER_SIZE ] ; for ( int len = 0 ; ( len = sourceInputStream . read ( buffer ) ) != - 1 ; ) { byteArrayOutputStream . write ( buffer , 0 , len ) ; } byte [ ] arrayOfByte = byteArrayOutputStream . toByteArray ( ) ; return arrayOfByte ; |
public class BsonDataOutput { /** * Write all content to the supplied channel .
* @ param channel the channel to which the content is to be written .
* @ throws IOException if there is a problem writing to the supplied stream */
public void writeTo ( WritableByteChannel channel ) throws IOException { } } | int numberOfBytesToWrite = size ; for ( ByteBuffer buffer : buffers ) { if ( buffer == null ) { // already flushed
continue ; } int numBytesInBuffer = Math . min ( numberOfBytesToWrite , bufferSize ) ; buffer . position ( numBytesInBuffer ) ; buffer . flip ( ) ; channel . write ( buffer ) ; numberOfBytesToWrite -= numBytesInBuffer ; } buffers . clear ( ) ; |
public class ConfigDrivenComponentRegistry { /** * Creates the component and registers it in the registry .
* @ param componentType the component type
* @ return the component
* @ throws ComponentNotFoundException when a policy tries to get a component from
* the context but the component doesn ' t exist or is otherwise not available . */
public < T extends IComponent > T createAndRegisterComponent ( Class < T > componentType ) throws ComponentNotFoundException { } } | try { synchronized ( components ) { Class < ? extends T > componentClass = engineConfig . getComponentClass ( componentType , pluginRegistry ) ; Map < String , String > componentConfig = engineConfig . getComponentConfig ( componentType ) ; T component = create ( componentClass , componentConfig ) ; components . put ( componentType , component ) ; // Because components are lazily created , we need to initialize them here
// if necessary .
DependsOnComponents annotation = componentClass . getAnnotation ( DependsOnComponents . class ) ; if ( annotation != null ) { Class < ? extends IComponent > [ ] value = annotation . value ( ) ; for ( Class < ? extends IComponent > theC : value ) { Method setter = ReflectionUtils . findSetter ( componentClass , theC ) ; if ( setter != null ) { IComponent injectedComponent = getComponent ( theC ) ; try { setter . invoke ( component , new Object [ ] { injectedComponent } ) ; } catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { throw new RuntimeException ( e ) ; } } } } if ( component instanceof IRequiresInitialization ) { ( ( IRequiresInitialization ) component ) . initialize ( ) ; } return component ; } } catch ( Exception e ) { throw new ComponentNotFoundException ( componentType . getName ( ) ) ; } |
public class AbstractX509FileSystemStore { /** * Write a PEM like header .
* @ param out the output buffered writer to write to .
* @ param type the type to be written in the header .
* @ throws IOException on error . */
private static void writeHeader ( BufferedWriter out , String type ) throws IOException { } } | out . write ( PEM_BEGIN + type + DASHES ) ; out . newLine ( ) ; |
public class ProcessInitiator { /** * Sequentially invokes the required Streaming Engine processes . */
@ Override protected void startUp ( ) { } } | try { startRTS ( ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Cannot initiate RTS. Missing or bad arguments" ) ; } try { startHFTA ( ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Cannot initiate HFTA processes. Missing or bad arguments" ) ; } try { startGSEXIT ( ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Cannot initiate GSEXIT processes. Missing or bad arguments" ) ; } |
public class SecureHash { /** * Computes the sha1 value for the given string .
* @ param string a non - null string
* @ return the SHA1 value for the given string . */
public static String sha1 ( String string ) { } } | try { byte [ ] sha1 = SecureHash . getHash ( SecureHash . Algorithm . SHA_1 , string . getBytes ( ) ) ; return SecureHash . asHexString ( sha1 ) ; } catch ( NoSuchAlgorithmException e ) { throw new SystemFailureException ( e ) ; } |
public class EndpointGroupRegistry { /** * Get the { @ link EndpointGroup } for the specified case - insensitive { @ code groupName } .
* @ return the { @ link EndpointSelector } , or { @ code null } if { @ code groupName } has not been registered yet . */
@ Nullable public static EndpointGroup get ( String groupName ) { } } | groupName = normalizeGroupName ( groupName ) ; final EndpointSelector endpointSelector = serverGroups . get ( groupName ) ; if ( endpointSelector == null ) { return null ; } return endpointSelector . group ( ) ; |
public class Request { /** * Get an Integer request parameter or null . Emit error for non - null and
* non integer
* @ param name name of parameter
* @ param errProp error to emit
* @ return Integer value or null
* @ throws Throwable on error */
public Integer getIntReqPar ( final String name , final String errProp ) throws Throwable { } } | try { return super . getIntReqPar ( name ) ; } catch ( final Throwable t ) { getErr ( ) . emit ( errProp , getReqPar ( name ) ) ; return null ; } |
public class SceneStructureMetric { /** * Returns the number of view with parameters that are not fixed
* @ return non - fixed view count */
public int getUnknownViewCount ( ) { } } | int total = 0 ; for ( int i = 0 ; i < views . length ; i ++ ) { if ( ! views [ i ] . known ) { total ++ ; } } return total ; |
public class HealthCheckServlet { /** * { @ inheritDoc } */
@ Override protected void service ( final HttpServletRequest request , final HttpServletResponse response ) throws ServletException , IOException { } } | findHealthService ( request ) ; healthService . performHealthCheck ( request , response ) ; |
public class PurgeJmsQueuesAction { /** * Purges a queue destination .
* @ param queue
* @ param session
* @ throws JMSException */
private void purgeQueue ( Queue queue , Session session ) throws JMSException { } } | purgeDestination ( queue , session , queue . getQueueName ( ) ) ; |
public class Parsers { /** * We always convert { @ link Iterable } to an array to avoid the cost of creating
* a new { @ Link java . util . Iterator } object each time the parser runs . */
@ SuppressWarnings ( { } } | "unchecked" , "rawtypes" } ) @ Private static < T > Parser < T > [ ] toArray ( Iterable < ? extends Parser < ? extends T > > parsers ) { if ( parsers instanceof Collection < ? > ) { return toArray ( ( Collection ) parsers ) ; } return toArrayWithIteration ( parsers ) ; |
public class AbstractIncrementalGenerator { /** * This method determines whether a { @ link CachedGeneratorResult } is obsolete or can be reused .
* @ param cachedGeneratorResult is the { @ link CachedGeneratorResult } .
* @ param typeName is the full - qualified name of the { @ link Class } to generate .
* @ return { @ code true } if the { @ link CachedGeneratorResult } is obsolete and has to be re - generated , { @ code false }
* otherwise ( if it can be reused ) . */
protected boolean isCachedResultObsolete ( CachedGeneratorResult cachedGeneratorResult , String typeName ) { } } | try { URL javaFileUrl = Thread . currentThread ( ) . getContextClassLoader ( ) . getResource ( typeName . replace ( '.' , '/' ) + ".java" ) ; String protocol = javaFileUrl . getProtocol ( ) . toLowerCase ( ) ; if ( "file" . equals ( protocol ) ) { String urlString = URLDecoder . decode ( javaFileUrl . getFile ( ) , "UTF-8" ) ; File javaFile = new File ( urlString ) ; long lastModified = javaFile . lastModified ( ) ; long timeGenerated = cachedGeneratorResult . getTimeGenerated ( ) ; return ( lastModified > timeGenerated ) ; } else { throw new IllegalCaseException ( protocol ) ; } } catch ( UnsupportedEncodingException e ) { throw new IllegalStateException ( e ) ; } |
public class TreeView { /** * Returns { @ link Indenter } that has the fixed indentation width .
* Used for assisting view rendering . */
public Indenter createFixedIndenter ( String d ) { } } | final int depth = Integer . parseInt ( d ) ; return new Indenter ( ) { protected int getNestLevel ( Job job ) { return depth ; } } ; |
public class WebDriverHelper { /** * Checks for presence of the text in a html page .
* @ param text
* the text to be searched for
* @ return true if the text is present within the page or false otherwise */
public boolean isTextPresentInPage ( final String text ) { } } | WebElement body = driver . findElement ( By . tagName ( "body" ) ) ; return body . getText ( ) . contains ( text ) ; |
public class ColumnLayoutExample { /** * Add a column layout which will change its rendering on small screens . */
private void addResponsiveExample ( ) { } } | add ( new WHeading ( HeadingLevel . H2 , "Default responsive design" ) ) ; add ( new ExplanatoryText ( "This example applies the theme's default responsive design rules for ColumnLayout.\n " + "The columns have width and alignment and there is also a hgap and a vgap." ) ) ; WPanel panel = new WPanel ( ) ; panel . setLayout ( new ColumnLayout ( new int [ ] { 33 , 33 , 33 } , new Alignment [ ] { Alignment . LEFT , Alignment . CENTER , Alignment . RIGHT } , 12 , 18 ) ) ; panel . setHtmlClass ( HtmlClassProperties . RESPOND ) ; add ( panel ) ; panel . add ( new BoxComponent ( "Left" ) ) ; panel . add ( new BoxComponent ( "Center" ) ) ; panel . add ( new BoxComponent ( "Right" ) ) ; panel . add ( new BoxComponent ( "Left" ) ) ; panel . add ( new BoxComponent ( "Center" ) ) ; panel . add ( new BoxComponent ( "Right" ) ) ; |
public class CoverTree { /** * Returns the max distance of the reference point p in current node to
* it ' s children nodes .
* @ param v the stack of DistanceNode objects .
* @ return the distance of the furthest child . */
private double max ( ArrayList < DistanceSet > v ) { } } | double max = 0.0 ; for ( DistanceSet n : v ) { if ( max < n . dist . get ( n . dist . size ( ) - 1 ) ) { max = n . dist . get ( n . dist . size ( ) - 1 ) ; } } return max ; |
public class CmsGroup { /** * Removes the prefix if the given String starts with { @ link I _ CmsPrincipal # PRINCIPAL _ GROUP } followed by a dot . < p >
* < ul >
* < li > Works if the given String is < code > null < / code > .
* < li > If the given String does not start with { @ link I _ CmsPrincipal # PRINCIPAL _ GROUP } followed by a dot it is returned unchanged .
* < li > Removes white spaces around the group name .
* < li > Also works with prefixes not being in upper case .
* < li > Does not check if the group after the prefix actually exists .
* < / ul >
* @ param principalName the group name to remove the prefix from
* @ return the given String with the prefix { @ link I _ CmsPrincipal # PRINCIPAL _ GROUP } with the following dot removed */
public static String removePrefix ( String principalName ) { } } | String result = principalName ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( principalName ) ) { if ( hasPrefix ( principalName ) ) { result = principalName . trim ( ) . substring ( I_CmsPrincipal . PRINCIPAL_GROUP . length ( ) + 1 ) ; } } return result ; |
public class AbstractTransitionBuilder { /** * Builds a { @ link Transition } , the created object will not be modified when the builder ' s modifier
* methods are called .
* @ return */
public final S build ( ) { } } | S vt = createTransition ( ) ; markObjectAsModifiabilityFlag ( vt ) ; vt . setId ( mId ) ; if ( mInterpolator != null ) { vt . setInterpolator ( mInterpolator ) ; } if ( mReverse ) { vt . reverse ( ) ; } return vt ; |
public class LogOutputSpecFactory { public LogOutputSpec createSpec ( String containerId , ImageConfiguration imageConfiguration ) { } } | LogOutputSpec . Builder builder = new LogOutputSpec . Builder ( ) ; LogConfiguration logConfig = extractLogConfiguration ( imageConfiguration ) ; addLogFormat ( builder , logConfig ) ; addPrefix ( builder , logConfig . getPrefix ( ) , imageConfiguration , containerId ) ; builder . file ( logConfig . getFileLocation ( ) ) . useColor ( useColor ) . logStdout ( logStdout ) . color ( logConfig . getColor ( ) ) ; return builder . build ( ) ; |
public class SearchExpression { /** * A list of search expression objects .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setSubExpressions ( java . util . Collection ) } or { @ link # withSubExpressions ( java . util . Collection ) } if you want
* to override the existing values .
* @ param subExpressions
* A list of search expression objects .
* @ return Returns a reference to this object so that method calls can be chained together . */
public SearchExpression withSubExpressions ( SearchExpression ... subExpressions ) { } } | if ( this . subExpressions == null ) { setSubExpressions ( new java . util . ArrayList < SearchExpression > ( subExpressions . length ) ) ; } for ( SearchExpression ele : subExpressions ) { this . subExpressions . add ( ele ) ; } return this ; |
public class AwsSecurityFindingFilters { /** * The principal that created a note .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setNoteUpdatedBy ( java . util . Collection ) } or { @ link # withNoteUpdatedBy ( java . util . Collection ) } if you want
* to override the existing values .
* @ param noteUpdatedBy
* The principal that created a note .
* @ return Returns a reference to this object so that method calls can be chained together . */
public AwsSecurityFindingFilters withNoteUpdatedBy ( StringFilter ... noteUpdatedBy ) { } } | if ( this . noteUpdatedBy == null ) { setNoteUpdatedBy ( new java . util . ArrayList < StringFilter > ( noteUpdatedBy . length ) ) ; } for ( StringFilter ele : noteUpdatedBy ) { this . noteUpdatedBy . add ( ele ) ; } return this ; |
public class SSLChannel { /** * @ see com . ibm . wsspi . channelfw . Channel # init ( ) */
@ Override public void init ( ) throws ChannelException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "init" ) ; } // Prevent duplicate initialization .
if ( this . isInitialized ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "init" ) ; } return ; } // Extract the channel properties .
try { Properties channelProps = getConfig ( ) . getProperties ( ) ; // Handle a potentially null property map .
if ( channelProps != null ) { this . alias = channelProps . getProperty ( SSLChannelData . ALIAS_KEY ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { if ( this . alias != null ) { Tr . debug ( tc , "Found alias in SSL properties, " + this . alias ) ; } else { Tr . debug ( tc , "No alias found in SSL properties" ) ; } } // PI52696 - Timeout value for which the SSL closing handshake loop will attempt to complete final handshake
// write before giving up .
String timeoutValueInSSLClosingHandshake = channelProps . getProperty ( SSLChannelConstants . TIMEOUT_VALUE_IN_SSL_CLOSING_HANDSHAKE ) ; if ( timeoutValueInSSLClosingHandshake != null ) { this . timeoutValueInSSLClosingHandshake = Integer . parseInt ( timeoutValueInSSLClosingHandshake ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Found timeoutValueInSSLClosingHandshake in SSL properties, " + this . timeoutValueInSSLClosingHandshake ) ; } } // Check for system property so all SSL Channels can have the property enabled
// APAR PI70332 - Add Java custom property to allow this property to be set on all inbound / outbound channels
// without need of SSL configuration .
String timeoutValueSystemProperty = AccessController . doPrivileged ( new java . security . PrivilegedAction < String > ( ) { @ Override public String run ( ) { return ( System . getProperty ( SSLChannelConstants . TIMEOUT_VALUE_IN_SSL_CLOSING_HANDSHAKE ) ) ; } } ) ; if ( timeoutValueSystemProperty != null ) { this . timeoutValueInSSLClosingHandshake = Integer . parseInt ( timeoutValueSystemProperty ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Found timeoutValueInSSLClosingHandshake in SSL system properties, " + this . timeoutValueInSSLClosingHandshake ) ; } } String protocolVersion = channelProps . getProperty ( SSLChannelConstants . PROPNAME_PROTOCOL_VERSION ) ; if ( protocolVersion != null ) { if ( SSLChannelConstants . PROTOCOL_VERSION_11 . equalsIgnoreCase ( protocolVersion ) ) { this . useH2ProtocolAttribute = Boolean . FALSE ; } else if ( SSLChannelConstants . PROTOCOL_VERSION_2 . equalsIgnoreCase ( protocolVersion ) ) { this . useH2ProtocolAttribute = Boolean . TRUE ; } if ( ( TraceComponent . isAnyTracingEnabled ( ) ) && ( tc . isEventEnabled ( ) ) && useH2ProtocolAttribute != null ) { Tr . event ( tc , "SSL Channel Config: versionProtocolOption has been set to " + protocolVersion . toLowerCase ( Locale . ENGLISH ) ) ; } } } } catch ( Exception e ) { // no FFDC required
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "init received exception handling properties; " + e ) ; } throw new ChannelException ( e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "jsseProvider=" + this . jsseProvider ) ; } // Indicate that initialization is complete .
this . isInitialized = true ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "init" ) ; } |
public class Similarity { /** * Computes < a href = " http : / / en . wikipedia . org / wiki / Kendall % 27s _ tau " > Kendall ' s
* tau < / a > of the values in the two vectors . This method uses tau - b , which
* is suitable for vectors with duplicate values .
* @ throws IllegalArgumentException when the length of the two vectors are
* not the same . */
public static double kendallsTau ( DoubleVector a , DoubleVector b ) { } } | check ( a , b ) ; // NOTE : slow n ^ 2 version . Needs to be replaced at some point with the
// n - log - n method and to take into account sparse vectors . - jurgens
int length = a . length ( ) ; double numerator = 0 ; // For both a and b , keep track of how many times each position i tied
// with some other position for rank .
SparseIntegerVector tiesInA = new CompactSparseIntegerVector ( length ) ; SparseIntegerVector tiesInB = new CompactSparseIntegerVector ( length ) ; boolean foundTies = false ; int concordant = 0 ; int discordant = 0 ; // For all pairs , track how many pairs satisfy the ordering
for ( int i = 0 ; i < length ; ++ i ) { for ( int j = i + 1 ; j < length ; ++ j ) { // NOTE : this value will be 1 if there exists an match or
// " concordance " in the ordering of the two pairs . Otherwise
// it , will be a - 1 of the pairs are not matched or are
// " discordant .
double ai = a . get ( i ) ; double aj = a . get ( j ) ; double bi = b . get ( i ) ; double bj = b . get ( j ) ; // Check for ties
boolean atie = ai == aj ; if ( ai == aj ) { tiesInA . add ( i , 1 ) ; foundTies = true ; } if ( bi == bj ) { tiesInB . add ( i , 1 ) ; foundTies = true ; } // If there was a tied rank , don ' t count the comparisons towards
// the concordance totals
if ( ai != aj && bi != bj ) { if ( ( ai < aj && bi < bj ) || ( ai > aj && bi > bj ) ) concordant ++ ; else discordant ++ ; } } } int n = concordant - discordant ; double d = ( .5 * ( length * ( length - 1 ) ) ) ; if ( foundTies ) { // IMPORTANT NOTE : for the summations , add 1 to the number of ties ,
// rather than subtract 1 . All the online pseudo code has ( ties *
// ( ties - 1 ) ) / 2 , which assumes that for a tied rank , ties will
// always have a value of 2 or more . I think they ' re double
// counting ties somehow , so we add 1 to account for this . Most
// importantly , adding 1 causes all the online Kendall ' s tau
// calculators to agree with our result .
double aSum = 0 ; for ( int i : tiesInA . getNonZeroIndices ( ) ) { int ties = tiesInA . get ( i ) ; aSum += ( ties * ( ties + 1 ) * .5 ) ; } double bSum = 0 ; for ( int i : tiesInB . getNonZeroIndices ( ) ) { int ties = tiesInB . get ( i ) ; bSum += ( ties * ( ties + 1 ) * .5 ) ; } return n / Math . sqrt ( ( d - aSum ) * ( d - bSum ) ) ; } else return n / d ; |
public class DropwizardExports { /** * Export a histogram snapshot as a prometheus SUMMARY .
* @ param dropwizardName metric name .
* @ param snapshot the histogram snapshot .
* @ param count the total sample count for this snapshot .
* @ param factor a factor to apply to histogram values . */
MetricFamilySamples fromSnapshotAndCount ( String dropwizardName , Snapshot snapshot , long count , double factor , String helpMessage ) { } } | List < MetricFamilySamples . Sample > samples = Arrays . asList ( sampleBuilder . createSample ( dropwizardName , "" , Arrays . asList ( "quantile" ) , Arrays . asList ( "0.5" ) , snapshot . getMedian ( ) * factor ) , sampleBuilder . createSample ( dropwizardName , "" , Arrays . asList ( "quantile" ) , Arrays . asList ( "0.75" ) , snapshot . get75thPercentile ( ) * factor ) , sampleBuilder . createSample ( dropwizardName , "" , Arrays . asList ( "quantile" ) , Arrays . asList ( "0.95" ) , snapshot . get95thPercentile ( ) * factor ) , sampleBuilder . createSample ( dropwizardName , "" , Arrays . asList ( "quantile" ) , Arrays . asList ( "0.98" ) , snapshot . get98thPercentile ( ) * factor ) , sampleBuilder . createSample ( dropwizardName , "" , Arrays . asList ( "quantile" ) , Arrays . asList ( "0.99" ) , snapshot . get99thPercentile ( ) * factor ) , sampleBuilder . createSample ( dropwizardName , "" , Arrays . asList ( "quantile" ) , Arrays . asList ( "0.999" ) , snapshot . get999thPercentile ( ) * factor ) , sampleBuilder . createSample ( dropwizardName , "_count" , new ArrayList < String > ( ) , new ArrayList < String > ( ) , count ) ) ; return new MetricFamilySamples ( samples . get ( 0 ) . name , Type . SUMMARY , helpMessage , samples ) ; |
public class ShardedCounterServiceImpl { /** * The cache will expire after { @ code defaultCounterCountExpiration } seconds , so the counter will be accurate after
* a minute because it performs a load from the datastore .
* @ param counterName
* @ param skipCache A boolean that allows a caller to skip memcache when retrieving a counter . Set to { @ code true }
* to load the counter and all of its shards directly from the Datastore . Set to { @ code false } to attempt
* to load the count from memcache , with fallback to the datastore .
* @ return */
@ Override public Optional < Counter > getCounter ( final String counterName , final boolean skipCache ) { } } | Preconditions . checkNotNull ( counterName ) ; // This method always load the CounterData from the Datastore ( or its Objectify cache ) , but sometimes returns
// the
// cached count value .
// ShortCircuit : If nothing is present in the datastore .
final Optional < CounterData > optCounterData = this . getCounterData ( counterName ) ; if ( ! optCounterData . isPresent ( ) ) { logger . log ( Level . FINEST , String . format ( "Counter '%s' was not found in hte Datastore!" , counterName ) ) ; return Optional . absent ( ) ; } final CounterData counterData = optCounterData . get ( ) ; // ShortCircuit : If the counter is in an indeterminate state , then return its count as 0.
if ( this . counterStatusYieldsIndeterminateCount ( counterData . getCounterStatus ( ) ) ) { logger . log ( Level . FINEST , String . format ( "Counter '%s' was in an indeterminate state. Returning 0!" , counterName ) ) ; return Optional . of ( new CounterBuilder ( counterData ) . withCount ( BigInteger . ZERO ) . build ( ) ) ; } // ShortCircuit : If the counter was found in memcache .
final String memCacheKey = this . assembleCounterKeyforMemcache ( counterName ) ; if ( ! skipCache ) { final BigInteger cachedCounterCount = this . memcacheSafeGet ( memCacheKey ) ; if ( cachedCounterCount != null ) { // The count was found in memcache , so return it .
logger . log ( Level . FINEST , String . format ( "Cache Hit for Counter Named '%s': value=%s" , counterName , cachedCounterCount ) ) ; return Optional . of ( new CounterBuilder ( counterData ) . withCount ( cachedCounterCount ) . build ( ) ) ; } else { logger . log ( Level . FINE , String . format ( "Cache Miss for CounterData Named '%s': value='%s'. Checking Datastore instead!" , counterName , cachedCounterCount ) ) ; } } // skipCache was true or the count was NOT found in memcache !
// Note : No Need to clear the Objectify session cache here because it will be cleared automatically and
// repopulated upon every request .
logger . log ( Level . FINE , String . format ( "Aggregating counts from '%s' CounterDataShards for CounterData named '%s'!" , counterData . getNumShards ( ) , counterData . getName ( ) ) ) ; // Assemble a List of CounterShardData Keys to retrieve in parallel !
final List < Key < CounterShardData > > keysToLoad = Lists . newArrayList ( ) ; for ( int i = 0 ; i < counterData . getNumShards ( ) ; i ++ ) { final Key < CounterShardData > counterShardKey = CounterShardData . key ( counterData . getTypedKey ( ) , i ) ; keysToLoad . add ( counterShardKey ) ; } long sum = 0 ; // For added performance , we could spawn multiple threads to wait for each value to be returned from the
// DataStore , and then aggregate that way . However , the simple summation below is not very expensive , so
// creating multiple threads to get each value would probably be overkill . Just let objectify do this for
// us . Even though we have to wait for all entities to return before summation begins , the summation is a quick
// in - memory operation with a relatively small number of shards , so parallelizing it would likely not increase
// performance .
// No TX - get is Strongly consistent by default , and we will exceed the TX limit for high - shard - count
// counters if we try to do this in a TX .
final Map < Key < CounterShardData > , CounterShardData > counterShardDatasMap = ObjectifyService . ofy ( ) . transactionless ( ) . load ( ) . keys ( keysToLoad ) ; final Collection < CounterShardData > counterShardDatas = counterShardDatasMap . values ( ) ; for ( CounterShardData counterShardData : counterShardDatas ) { if ( counterShardData != null ) { sum += counterShardData . getCount ( ) ; } } logger . log ( Level . FINE , String . format ( "The Datastore is reporting a count of %s for CounterData '%s' count. Resetting memcache " + "count to %s for this counter name." , sum , counterData . getName ( ) , sum ) ) ; final BigInteger bdSum = BigInteger . valueOf ( sum ) ; try { // This method will only get here if there was nothing in Memcache , or if the caller requested to skip
// reading the Counter count from memcache . In these cases , the value in memcache should always be replaced .
memcacheService . put ( memCacheKey , bdSum , config . getDefaultCounterCountExpiration ( ) , SetPolicy . SET_ALWAYS ) ; } catch ( MemcacheServiceException mse ) { // Do nothing . The method will still return even though memcache is not available .
} return Optional . of ( new CounterBuilder ( counterData ) . withCount ( bdSum ) . build ( ) ) ; |
public class ToStringOption { /** * Return a < code > ToStringOption < / code > instance with { @ link # appendStatic } option set .
* if the current instance is not { @ link # DEFAULT _ OPTION default instance } then set
* on the current instance and return the current instance . Otherwise , clone the default
* instance and set on the clone and return the clone
* @ param appendStatic
* @ return this option instance or clone if this is the { @ link # DEFAULT _ OPTION } */
public ToStringOption setAppendStatic ( boolean appendStatic ) { } } | ToStringOption op = this ; if ( this == DEFAULT_OPTION ) { op = new ToStringOption ( this . appendStatic , this . appendTransient ) ; } op . appendStatic = appendStatic ; return op ; |
public class PathMappingResultBuilder { /** * Adds an encoded path parameter , which will be decoded in UTF - 8 automatically . */
public PathMappingResultBuilder rawParam ( String name , String value ) { } } | params . put ( requireNonNull ( name , "name" ) , ArmeriaHttpUtil . decodePath ( requireNonNull ( value , "value" ) ) ) ; return this ; |
public class DerivativeLaplacian { /** * Computes the Laplacian of ' orig ' .
* @ param orig Input image . Not modified .
* @ param deriv Where the Laplacian is written to . Modified . */
public static void process ( GrayF32 orig , GrayF32 deriv , @ Nullable ImageBorder_F32 border ) { } } | deriv . reshape ( orig . width , orig . height ) ; if ( BoofConcurrency . USE_CONCURRENT ) { DerivativeLaplacian_Inner_MT . process ( orig , deriv ) ; } else { DerivativeLaplacian_Inner . process ( orig , deriv ) ; } if ( border != null ) { border . setImage ( orig ) ; ConvolveJustBorder_General_SB . convolve ( kernel_F32 , border , deriv ) ; } |
public class Header { /** * setter for doi - sets document object identifier
* @ generated
* @ param v value to set into the feature */
public void setDoi ( String v ) { } } | if ( Header_Type . featOkTst && ( ( Header_Type ) jcasType ) . casFeat_doi == null ) jcasType . jcas . throwFeatMissing ( "doi" , "de.julielab.jules.types.Header" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( Header_Type ) jcasType ) . casFeatCode_doi , v ) ; |
public class AppServiceCertificateOrdersInner { /** * List all certificates associated with a certificate order .
* List all certificates associated with a certificate order .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param certificateOrderName Name of the certificate order .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; AppServiceCertificateResourceInner & gt ; object */
public Observable < Page < AppServiceCertificateResourceInner > > listCertificatesAsync ( final String resourceGroupName , final String certificateOrderName ) { } } | return listCertificatesWithServiceResponseAsync ( resourceGroupName , certificateOrderName ) . map ( new Func1 < ServiceResponse < Page < AppServiceCertificateResourceInner > > , Page < AppServiceCertificateResourceInner > > ( ) { @ Override public Page < AppServiceCertificateResourceInner > call ( ServiceResponse < Page < AppServiceCertificateResourceInner > > response ) { return response . body ( ) ; } } ) ; |
public class TypeUtils { /** * Convert modifiers into a string representation , e . g . " public static final " .
* @ param modifiers
* The field or method modifiers .
* @ param modifierType
* The { @ link ModifierType } these modifiers apply to .
* @ param isDefault
* for methods , true if this is a default method ( else ignored ) .
* @ param buf
* The buffer to write the result into . */
public static void modifiersToString ( final int modifiers , final ModifierType modifierType , final boolean isDefault , final StringBuilder buf ) { } } | if ( ( modifiers & Modifier . PUBLIC ) != 0 ) { appendModifierKeyword ( buf , "public" ) ; } else if ( ( modifiers & Modifier . PRIVATE ) != 0 ) { appendModifierKeyword ( buf , "private" ) ; } else if ( ( modifiers & Modifier . PROTECTED ) != 0 ) { appendModifierKeyword ( buf , "protected" ) ; } if ( modifierType != ModifierType . FIELD && ( modifiers & Modifier . ABSTRACT ) != 0 ) { appendModifierKeyword ( buf , "abstract" ) ; } if ( ( modifiers & Modifier . STATIC ) != 0 ) { appendModifierKeyword ( buf , "static" ) ; } if ( modifierType == ModifierType . FIELD ) { if ( ( modifiers & Modifier . VOLATILE ) != 0 ) { // " bridge " and " volatile " overlap in bit 0x40
appendModifierKeyword ( buf , "volatile" ) ; } if ( ( modifiers & Modifier . TRANSIENT ) != 0 ) { appendModifierKeyword ( buf , "transient" ) ; } } if ( ( modifiers & Modifier . FINAL ) != 0 ) { appendModifierKeyword ( buf , "final" ) ; } if ( modifierType == ModifierType . METHOD ) { if ( ( modifiers & Modifier . SYNCHRONIZED ) != 0 ) { appendModifierKeyword ( buf , "synchronized" ) ; } if ( isDefault ) { appendModifierKeyword ( buf , "default" ) ; } } if ( ( modifiers & 0x1000 ) != 0 ) { appendModifierKeyword ( buf , "synthetic" ) ; } if ( modifierType != ModifierType . FIELD && ( modifiers & 0x40 ) != 0 ) { // " bridge " and " volatile " overlap in bit 0x40
appendModifierKeyword ( buf , "bridge" ) ; } if ( modifierType == ModifierType . METHOD && ( modifiers & Modifier . NATIVE ) != 0 ) { appendModifierKeyword ( buf , "native" ) ; } if ( modifierType != ModifierType . FIELD && ( modifiers & Modifier . STRICT ) != 0 ) { appendModifierKeyword ( buf , "strictfp" ) ; } // Ignored :
// ACC _ SUPER ( 0x0020 ) : Treat superclass methods specially when invoked by the invokespecial instruction |
public class InstanceInfo { /** * Returns the unique id of the instance .
* ( Note ) now that id is set at creation time within the instanceProvider , why do the other checks ?
* This is still necessary for backwards compatibility when upgrading in a deployment with multiple
* client versions ( some with the change , some without ) .
* @ return the unique id . */
@ JsonIgnore public String getId ( ) { } } | if ( instanceId != null && ! instanceId . isEmpty ( ) ) { return instanceId ; } else if ( dataCenterInfo instanceof UniqueIdentifier ) { String uniqueId = ( ( UniqueIdentifier ) dataCenterInfo ) . getId ( ) ; if ( uniqueId != null && ! uniqueId . isEmpty ( ) ) { return uniqueId ; } } return hostName ; |
public class JobExecutionsInner { /** * Starts an elastic job execution .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param jobAgentName The name of the job agent .
* @ param jobName The name of the job to get .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the JobExecutionInner object if successful . */
public JobExecutionInner create ( String resourceGroupName , String serverName , String jobAgentName , String jobName ) { } } | return createWithServiceResponseAsync ( resourceGroupName , serverName , jobAgentName , jobName ) . toBlocking ( ) . last ( ) . body ( ) ; |
public class UserMessages { /** * Does the property has user message ?
* @ param property the name of property , which may have user messages . ( NotNull )
* @ return The determination , true or false . */
public boolean hasMessageOf ( String property ) { } } | assertArgumentNotNull ( "property" , property ) ; final UserMessageItem item = getPropertyItem ( property ) ; return item != null && ! item . getMessageList ( ) . isEmpty ( ) ; |
public class FilesImpl { /** * Gets the properties of the specified task file .
* @ param jobId The ID of the job that contains the task .
* @ param taskId The ID of the task whose file you want to get the properties of .
* @ param filePath The path to the task file that you want to get the properties of .
* @ param fileGetPropertiesFromTaskOptions Additional parameters for the operation
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Void > getPropertiesFromTaskAsync ( String jobId , String taskId , String filePath , FileGetPropertiesFromTaskOptions fileGetPropertiesFromTaskOptions , final ServiceCallback < Void > serviceCallback ) { } } | return ServiceFuture . fromHeaderResponse ( getPropertiesFromTaskWithServiceResponseAsync ( jobId , taskId , filePath , fileGetPropertiesFromTaskOptions ) , serviceCallback ) ; |
public class ObjectExplorer { /** * Explores an object graph ( defined by a root object and whatever is
* reachable through it , following non - static fields ) while using an
* { @ link ObjectVisitor } to both control the traversal and return a value .
* < p > The { @ code features } further customizes the exploration behavior .
* In particular :
* < ul >
* < li > If { @ link Feature # VISIT _ PRIMITIVES } is contained in features ,
* the visitor will also be notified about exploration of primitive values .
* < li > If { @ link Feature # VISIT _ NULL } is contained in features , the visitor
* will also be notified about exploration of { @ code null } values .
* < / ul >
* In both cases above , the return value of
* { @ link ObjectVisitor # visit ( Chain ) } is ignored , since neither primitive
* values or { @ code null } can be further explored .
* @ param < T > the type of the value obtained ( after the traversal ) by the
* ObjectVisitor
* @ param rootObject an object to be recursively explored
* @ param visitor a visitor that is notified for each explored path
* and decides whether to continue exploration of that path , and constructs
* a return value at the end of the exploration
* @ param features a set of desired features that the object exploration should have
* @ return whatever value is returned by the visitor at the end of the traversal
* @ see ObjectVisitor */
public static < T > T exploreObject ( Object rootObject , ObjectVisitor < T > visitor , EnumSet < Feature > features ) { } } | Deque < Chain > stack = new ArrayDeque < Chain > ( 32 ) ; if ( rootObject != null ) stack . push ( Chain . root ( rootObject ) ) ; while ( ! stack . isEmpty ( ) ) { Chain chain = stack . pop ( ) ; // the only place where the return value of visit ( ) is considered
Traversal traversal = visitor . visit ( chain ) ; switch ( traversal ) { case SKIP : continue ; case EXPLORE : break ; default : throw new AssertionError ( ) ; } // only nonnull values pushed in the stack
@ Nonnull Object value = chain . getValue ( ) ; Class < ? > valueClass = value . getClass ( ) ; if ( valueClass . isArray ( ) ) { boolean isPrimitive = valueClass . getComponentType ( ) . isPrimitive ( ) ; /* * Since we push paths to explore in a stack , we push references found in the array in
* reverse order , so when we pop them , they will be in the array ' s order . */
for ( int i = Array . getLength ( value ) - 1 ; i >= 0 ; i -- ) { Object childValue = Array . get ( value , i ) ; if ( isPrimitive ) { if ( features . contains ( Feature . VISIT_PRIMITIVES ) ) { visitor . visit ( chain . appendArrayIndex ( i , childValue ) ) ; } } else if ( childValue == null ) { if ( features . contains ( Feature . VISIT_NULL ) ) { visitor . visit ( chain . appendArrayIndex ( i , childValue ) ) ; } } else { stack . push ( chain . appendArrayIndex ( i , childValue ) ) ; } } } else { /* * Reflection usually provides fields in declaration order . As above in arrays , we push
* them to the stack in reverse order , so when we pop them , we get them in the original
* ( declaration ) order . */
for ( Field field : Lists . reverse ( getAllFields ( value ) ) ) { Object childValue = null ; try { childValue = field . get ( value ) ; } catch ( Exception e ) { throw new AssertionError ( e ) ; } if ( childValue == null ) { // handling nulls
if ( features . contains ( Feature . VISIT_NULL ) ) { visitor . visit ( chain . appendField ( field , childValue ) ) ; } } else { // handling primitives or references
boolean isPrimitive = field . getType ( ) . isPrimitive ( ) ; Chain extendedChain = chain . appendField ( field , childValue ) ; if ( isPrimitive ) { if ( features . contains ( Feature . VISIT_PRIMITIVES ) ) { visitor . visit ( extendedChain ) ; } } else { stack . push ( extendedChain ) ; } } } } } return visitor . result ( ) ; |
public class AmazonEC2Client { /** * Describes the ClassicLink DNS support status of one or more VPCs . If enabled , the DNS hostname of a linked
* EC2 - Classic instance resolves to its private IP address when addressed from an instance in the VPC to which it ' s
* linked . Similarly , the DNS hostname of an instance in a VPC resolves to its private IP address when addressed
* from a linked EC2 - Classic instance . For more information , see < a
* href = " https : / / docs . aws . amazon . com / AWSEC2 / latest / UserGuide / vpc - classiclink . html " > ClassicLink < / a > in the < i > Amazon
* Elastic Compute Cloud User Guide < / i > .
* @ param describeVpcClassicLinkDnsSupportRequest
* @ return Result of the DescribeVpcClassicLinkDnsSupport operation returned by the service .
* @ sample AmazonEC2 . DescribeVpcClassicLinkDnsSupport
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / DescribeVpcClassicLinkDnsSupport "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DescribeVpcClassicLinkDnsSupportResult describeVpcClassicLinkDnsSupport ( DescribeVpcClassicLinkDnsSupportRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribeVpcClassicLinkDnsSupport ( request ) ; |
public class ExpandableGridView { /** * Sets the adapter that provides data to this view .
* @ param adapter
* The adapter , which should be set , as an instance of the type { @ link
* ExpandableListAdapter } or null , if no adapter should be set */
public final void setAdapter ( @ Nullable final ExpandableListAdapter adapter ) { } } | expandedGroups . clear ( ) ; if ( adapter != null ) { this . adapter = new AdapterWrapper ( adapter ) ; super . setAdapter ( this . adapter ) ; } else { this . adapter = null ; super . setAdapter ( null ) ; } |
public class Polarizability { /** * calculate bond polarizability .
* @ param atomContainer AtomContainer
* @ param bond Bond bond for which the polarizabilitiy should be calculated
* @ return polarizabilitiy */
public double calculateBondPolarizability ( IAtomContainer atomContainer , IBond bond ) { } } | double polarizabilitiy = 0 ; IAtomContainer acH = atomContainer . getBuilder ( ) . newInstance ( IAtomContainer . class , atomContainer ) ; addExplicitHydrogens ( acH ) ; if ( bond . getAtomCount ( ) == 2 ) { polarizabilitiy += getKJPolarizabilityFactor ( acH , bond . getBegin ( ) ) ; polarizabilitiy += getKJPolarizabilityFactor ( acH , bond . getEnd ( ) ) ; } return ( polarizabilitiy / 2 ) ; |
public class AbstractRedG { /** * Returns a list of insert statements , one for each added entity in the respective order they were added .
* @ return The SQL Insert strings */
public List < String > generateSQLStatements ( ) { } } | return getEntitiesSortedForInsert ( ) . stream ( ) . map ( RedGEntity :: getSQLString ) . collect ( Collectors . toList ( ) ) ; |
public class AnnotationLookup { /** * / * @ Nullable */
public JvmAnnotationReference findAnnotation ( /* @ NonNull */
JvmAnnotationTarget annotationTarget , /* @ NonNull */
Class < ? extends Annotation > lookupType ) { } } | // avoid creating an empty list for all given targets but check for # eIsSet first
if ( annotationTarget . eIsSet ( TypesPackage . Literals . JVM_ANNOTATION_TARGET__ANNOTATIONS ) ) { for ( JvmAnnotationReference annotation : annotationTarget . getAnnotations ( ) ) { JvmAnnotationType annotationType = annotation . getAnnotation ( ) ; if ( annotationType != null && lookupType . getCanonicalName ( ) . equals ( annotationType . getQualifiedName ( ) ) ) { return annotation ; } } } return null ; |
public class ExpressRoutePortsInner { /** * Retrieves the requested ExpressRoutePort resource .
* @ param resourceGroupName The name of the resource group .
* @ param expressRoutePortName The name of ExpressRoutePort .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the ExpressRoutePortInner object if successful . */
public ExpressRoutePortInner getByResourceGroup ( String resourceGroupName , String expressRoutePortName ) { } } | return getByResourceGroupWithServiceResponseAsync ( resourceGroupName , expressRoutePortName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class PresentationManager { /** * Return the entity of the given id
* @ param id Entity id
* @ return The entity */
public Entity getEntity ( String id ) { } } | Entity e = getEntities ( ) . get ( id ) ; if ( e == null ) { return null ; } return e ; |
public class Strings { /** * Join array with delimiter .
* @ param delimiter The delimiter .
* @ param chars The char array to join .
* @ return The joined string . */
public static String joinP ( String delimiter , char ... chars ) { } } | StringBuilder builder = new StringBuilder ( chars . length + ( delimiter . length ( ) * chars . length ) ) ; boolean first = true ; for ( char c : chars ) { if ( first ) { first = false ; } else { builder . append ( delimiter ) ; } builder . append ( c ) ; } return builder . toString ( ) ; |
public class CStrChunk { /** * Optimized substring ( ) method for a buffer of only ASCII characters .
* The presence of UTF - 8 multi - byte characters would give incorrect results
* for the string length , which is required here .
* @ param nc NewChunk to be filled with substrings in this chunk
* @ param startIndex The beginning index of the substring , inclusive
* @ param endIndex The ending index of the substring , exclusive
* @ return Filled NewChunk */
public NewChunk asciiSubstring ( NewChunk nc , int startIndex , int endIndex ) { } } | // copy existing data
nc = this . extractRows ( nc , 0 , _len ) ; // update offsets and byte array
for ( int i = 0 ; i < _len ; i ++ ) { int off = UnsafeUtils . get4 ( _mem , idx ( i ) ) ; if ( off != NA ) { int len = 0 ; while ( _mem [ _valstart + off + len ] != 0 ) len ++ ; // Find length
nc . set_is ( i , startIndex < len ? off + startIndex : off + len ) ; for ( ; len > endIndex - 1 ; len -- ) { nc . _ss [ off + len ] = 0 ; // Set new end
} } } return nc ; |
public class MessageComposerHolder { /** * Add new images to attachment band .
* @ param imagesToAttach an array of new images to add */
public void addImagesToImageAttachmentBand ( final List < ImageItem > imagesToAttach ) { } } | if ( imagesToAttach == null || imagesToAttach . size ( ) == 0 ) { return ; } attachments . setupLayoutListener ( ) ; attachments . setVisibility ( View . VISIBLE ) ; images . addAll ( imagesToAttach ) ; setAttachButtonState ( ) ; addAdditionalAttachItem ( ) ; attachments . notifyDataSetChanged ( ) ; |
public class CassQuery { /** * ( non - Javadoc )
* @ see com . impetus . kundera . query . QueryImpl # populateEntities ( com . impetus . kundera . metadata . model . EntityMetadata ,
* com . impetus . kundera . client . Client ) */
@ Override protected List < Object > populateEntities ( EntityMetadata m , Client client ) { } } | if ( log . isDebugEnabled ( ) ) { log . debug ( "Populating entities for Cassandra query {}." , getJPAQuery ( ) ) ; } List < Object > result = new ArrayList < Object > ( ) ; ApplicationMetadata appMetadata = kunderaMetadata . getApplicationMetadata ( ) ; externalProperties = ( ( CassandraClientBase ) client ) . getExternalProperties ( ) ; // if id attribute is embeddable , it is meant for CQL translation .
// make it independent of embedded stuff and allow even to add non
// composite into where clause and let cassandra complain for it .
MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( m != null ? m . getPersistenceUnit ( ) : client . getPersistenceUnit ( ) ) ; String query = appMetadata . getQuery ( getJPAQuery ( ) ) ; boolean isNative = kunderaQuery . isNative ( ) ; if ( ! isNative && ! MetadataUtils . useSecondryIndex ( ( ( ClientBase ) client ) . getClientMetadata ( ) ) ) { result = populateUsingLucene ( m , client , result , getKunderaQuery ( ) . getResult ( ) ) ; } // change for embeddable
else if ( ! isNative && ( ( CassandraClientBase ) client ) . isCql3Enabled ( m ) && MetadataUtils . useSecondryIndex ( ( ( ClientBase ) client ) . getClientMetadata ( ) ) ) { result = ( ( CassandraClientBase ) client ) . executeQuery ( m . getEntityClazz ( ) , null , isNative , onQueryOverCQL3 ( m , client , metaModel , null ) ) ; } else { if ( isNative ) { // XXX
if ( ! kunderaQuery . getBindParameters ( ) . isEmpty ( ) ) { result = ( ( CassandraClientBase ) client ) . executeQuery ( null , null , isNative , query != null ? query : getJPAQuery ( ) , kunderaQuery . getBindParameters ( ) ) ; } else { result = ( ( CassandraClientBase ) client ) . executeQuery ( m != null ? m . getEntityClazz ( ) : null , null , isNative , query != null ? query : getJPAQuery ( ) ) ; } } else { if ( MetadataUtils . useSecondryIndex ( ( ( ClientBase ) client ) . getClientMetadata ( ) ) ) { // Index in Inverted Index table if applicable
boolean useInvertedIndex = CassandraIndexHelper . isInvertedIndexingApplicable ( m , MetadataUtils . useSecondryIndex ( ( ( ClientBase ) client ) . getClientMetadata ( ) ) ) ; Map < Boolean , List < IndexClause > > ixClause = prepareIndexClause ( m , useInvertedIndex ) ; if ( useInvertedIndex && ! getKunderaQuery ( ) . getFilterClauseQueue ( ) . isEmpty ( ) ) { result = ( List ) ( ( CassandraEntityReader ) getReader ( ) ) . readFromIndexTable ( m , client , ixClause ) ; } else { boolean isRowKeyQuery = ixClause . keySet ( ) . iterator ( ) . next ( ) ; if ( ! isRowKeyQuery ) { result = ( ( CassandraClientBase ) client ) . find ( ixClause . get ( isRowKeyQuery ) , m , false , null , isSingleResult ? 1 : this . maxResult , getColumnList ( m , metaModel , getKunderaQuery ( ) . getResult ( ) , null ) ) ; } else { result = ( ( CassandraEntityReader ) getReader ( ) ) . handleFindByRange ( m , client , result , ixClause , isRowKeyQuery , getColumnList ( m , metaModel , getKunderaQuery ( ) . getResult ( ) , null ) , isSingleResult ? 1 : this . maxResult ) ; } } } } } return result ; |
public class CheckBox { /** * Sets the value of the CheckBox
* @ param value
* - String that can be one of two values : CHECK - sets checkbox
* from UNCHECKED to CHECK UNCHECK - sets checkbox from CHECKED
* to UNCHECKED */
@ Override public void setValue ( Object value ) throws WidgetException { } } | boolean set = false ; try { if ( value instanceof String ) { if ( ( ( String ) value ) . equalsIgnoreCase ( UNCHECK ) ) { doAction ( true ) ; } else if ( ( ( String ) value ) . equalsIgnoreCase ( CHECK ) ) { doAction ( false ) ; } set = true ; } else { throw new WidgetRuntimeException ( "value must be a String of either 'check' or 'uncheck'" , getByLocator ( ) ) ; } } catch ( Exception e ) { throw new WidgetException ( "Error while checking/unchecking" , getByLocator ( ) , e ) ; } if ( ! set ) throw new WidgetException ( "Invalid set value for checkbox. It must be either 'check' or 'uncheck'" , getByLocator ( ) ) ; |
public class CollectionSchemaUpdate { /** * Static factory method to create an CollectionUpdate for the specified key
* @ param key : JSON attribute to update
* @ param operation : operation to carry out on the attribute
* @ return the updated CollectionSchemaUpdate */
public static CollectionSchemaUpdate update ( String key , IOperation operation ) { } } | return new CollectionSchemaUpdate ( ) . set ( key , operation ) ; |
public class DynamicCDXIndex { /** * ( non - Javadoc )
* @ see org . archive . wayback . resourceindex . SearchResultSource # getPrefixIterator ( java . lang . String ) */
public CloseableIterator < CaptureSearchResult > getPrefixIterator ( String prefix ) throws ResourceIndexNotAvailableException { } } | if ( getState ( ) != STATE_SYNCHED ) { throw new ResourceIndexNotAvailableException ( "Not synchronized" ) ; } return super . getPrefixIterator ( prefix ) ; |
public class ServicePlatform { /** * Reloads the platform with the given application context at its root .
* < p > If a PlatformControl is detected in the root web application context , module application contexts are < i > not < / i > to be
* loaded automatically when the servlet context is initialized . Consequently , this method can be called later to load all
* module application contexts . < / p >
* < p > This method can be called also to reload the platform , after the platform is unloaded by a call to unload ( ) . < / p > */
public void realize ( ApplicationContext wac , ConfigurableApplicationContext child ) { } } | if ( WebApplicationContextUtils . getWebApplicationContext ( _context ) == null ) { _context . setAttribute ( WebApplicationContext . ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE , wac ) ; } else { _logger . warning ( "Already realized" ) ; return ; } if ( child != null ) wac = child ; ServiceModuleInfo info = new ServiceModuleInfo ( ) ; if ( wac . containsBean ( PERSISTENCE ) ) { // persistence may not be there if persistent storage is not required
info . persistence . first = info . persistence . second = ( Persistence ) wac . getBean ( PERSISTENCE ) ; _persistences . put ( "-" , info . persistence . first ) ; } _logger . log ( Level . CONFIG , "install packaged modules" ) ; wac = install ( wac , sort ( _context , _packaged ) , info ) ; _logger . log ( Level . CONFIG , "install extension modules" ) ; wac = install ( wac , sort ( _context , _extended ) , info ) ; _logger . log ( Level . CONFIG , "install service augmentations" ) ; for ( ServiceAugmentation fi : info . augmentations ) { fi . install ( _registry ) ; } String hide = System . getProperty ( "xillium.service.HideDescription" ) ; if ( hide == null || hide . length ( ) == 0 ) { _registry . put ( "x!/desc" , new Pair < Service , Persistence > ( new DescService ( info . descriptions ) , info . persistence . first ) ) ; _registry . put ( "x!/list" , new Pair < Service , Persistence > ( new ListService ( _registry ) , info . persistence . first ) ) ; } _registry . put ( "x!/ping" , new Pair < Service , Persistence > ( new PingService ( ) , info . persistence . first ) ) ; if ( System . getProperty ( "xillium.persistence.DisablePrecompilation" ) == null ) { for ( Persistence persistence : _persistences . values ( ) ) { if ( persistence . getTransactionManager ( ) != null ) { persistence . doReadWrite ( null , new Persistence . Task < Void , Void > ( ) { public Void run ( Void facility , Persistence persistence ) throws Exception { _logger . info ( "parametric statements compiled: " + persistence . compile ( ) ) ; return null ; } } ) ; } else { _logger . warning ( "Persistence precompilation is ON (default) but TransactionManager is not configured" ) ; } } } |
public class DiskBasedCache { /** * Prunes the cache to fit the amount of bytes specified .
* @ param neededSpace The amount of bytes we are trying to fit into the cache . */
private void pruneIfNeeded ( int neededSpace ) { } } | if ( ( mTotalSize + neededSpace ) < mMaxCacheSizeInBytes ) { return ; } if ( VolleyLog . DEBUG ) { VolleyLog . v ( "Pruning old cache entries." ) ; } long before = mTotalSize ; int prunedFiles = 0 ; long startTime = System . currentTimeMillis ( ) ; Iterator < Map . Entry < String , CacheHeader > > iterator = mEntries . entrySet ( ) . iterator ( ) ; while ( iterator . hasNext ( ) ) { Map . Entry < String , CacheHeader > entry = iterator . next ( ) ; CacheHeader e = entry . getValue ( ) ; boolean deleted = getFileForKey ( e . key ) . delete ( ) ; if ( deleted ) { mTotalSize -= e . size ; } else { VolleyLog . d ( "Could not delete cache entry for key=%s, filename=%s" , e . key , getFilenameForKey ( e . key ) ) ; } iterator . remove ( ) ; prunedFiles ++ ; if ( ( mTotalSize + neededSpace ) < mMaxCacheSizeInBytes * HYSTERESIS_FACTOR ) { break ; } } if ( VolleyLog . DEBUG ) { VolleyLog . v ( "pruned %d files, %d bytes, %d ms" , prunedFiles , ( mTotalSize - before ) , System . currentTimeMillis ( ) - startTime ) ; } |
public class PrefixedProperties { /** * Gets the prefixed key and parse it to an String [ ] < br >
* Each comma - separated list can be used . If the key couldn ' t get found , the
* default will be used .
* @ param key
* the key
* @ param def
* default value
* @ return String [ ] */
public String [ ] getArray ( final String key , final String [ ] def ) { } } | final String [ ] value = getArray ( key ) ; if ( value != null ) { return value ; } return def ; |
public class Slf4jLogger { /** * This method is similar to { @ link # info ( String , Object ) } method except that the
* marker data is also taken into consideration .
* @ param marker the marker data specific to this log statement
* @ param format the format string
* @ param arg the argument */
public void info ( Marker marker , String format , Object arg ) { } } | if ( m_delegate . isInfoEnabled ( ) ) { FormattingTuple tuple = MessageFormatter . format ( format , arg ) ; setMDCMarker ( marker ) ; m_delegate . inform ( tuple . getMessage ( ) , tuple . getThrowable ( ) ) ; resetMDCMarker ( ) ; } |
public class Util { /** * Creates a symlink to targetPath at baseDir + symlinkPath .
* If there ' s a prior symlink at baseDir + symlinkPath , it will be overwritten .
* @ param baseDir
* Base directory to resolve the ' symlinkPath ' parameter .
* @ param targetPath
* The file that the symlink should point to . Usually relative to the directory of the symlink but may instead be an absolute path .
* @ param symlinkPath
* Where to create a symlink in ( relative to { @ code baseDir } ) */
public static void createSymlink ( @ Nonnull File baseDir , @ Nonnull String targetPath , @ Nonnull String symlinkPath , @ Nonnull TaskListener listener ) throws InterruptedException { } } | try { Path path = fileToPath ( new File ( baseDir , symlinkPath ) ) ; Path target = Paths . get ( targetPath , MemoryReductionUtil . EMPTY_STRING_ARRAY ) ; final int maxNumberOfTries = 4 ; final int timeInMillis = 100 ; for ( int tryNumber = 1 ; tryNumber <= maxNumberOfTries ; tryNumber ++ ) { Files . deleteIfExists ( path ) ; try { Files . createSymbolicLink ( path , target ) ; break ; } catch ( FileAlreadyExistsException fileAlreadyExistsException ) { if ( tryNumber < maxNumberOfTries ) { TimeUnit . MILLISECONDS . sleep ( timeInMillis ) ; // trying to defeat likely ongoing race condition
continue ; } LOGGER . log ( Level . WARNING , "symlink FileAlreadyExistsException thrown {0} times => cannot createSymbolicLink" , maxNumberOfTries ) ; throw fileAlreadyExistsException ; } } } catch ( UnsupportedOperationException e ) { PrintStream log = listener . getLogger ( ) ; log . print ( "Symbolic links are not supported on this platform" ) ; Functions . printStackTrace ( e , log ) ; } catch ( IOException e ) { if ( Functions . isWindows ( ) && e instanceof FileSystemException ) { warnWindowsSymlink ( ) ; return ; } PrintStream log = listener . getLogger ( ) ; log . printf ( "ln %s %s failed%n" , targetPath , new File ( baseDir , symlinkPath ) ) ; Functions . printStackTrace ( e , log ) ; } |
public class BsDuplicateHostCA { public void filter ( String name , EsAbstractConditionQuery . OperatorCall < BsDuplicateHostCQ > queryLambda , ConditionOptionCall < FilterAggregationBuilder > opLambda , OperatorCall < BsDuplicateHostCA > aggsLambda ) { } } | DuplicateHostCQ cq = new DuplicateHostCQ ( ) ; if ( queryLambda != null ) { queryLambda . callback ( cq ) ; } FilterAggregationBuilder builder = regFilterA ( name , cq . getQuery ( ) ) ; if ( opLambda != null ) { opLambda . callback ( builder ) ; } if ( aggsLambda != null ) { DuplicateHostCA ca = new DuplicateHostCA ( ) ; aggsLambda . callback ( ca ) ; ca . getAggregationBuilderList ( ) . forEach ( builder :: subAggregation ) ; } |
public class PropagatingCorrector { /** * / * ( non - Javadoc )
* @ see org . jboss . as . controller . AttributeValueCorrector # correct ( org . jboss . dmr . ModelNode , org . jboss . dmr . ModelNode ) */
@ Override public ModelNode correct ( ModelNode newValue , ModelNode currentValue ) { } } | if ( newValue . getType ( ) == ModelType . UNDEFINED ) { return newValue ; } if ( newValue . getType ( ) != ModelType . OBJECT || currentValue . getType ( ) != ModelType . OBJECT ) { return newValue ; } final Set < String > operationKeys = newValue . keys ( ) ; final Set < String > currentKeys = currentValue . keys ( ) ; for ( String currentKey : currentKeys ) { if ( ! operationKeys . contains ( currentKey ) ) { newValue . get ( currentKey ) . set ( currentValue . get ( currentKey ) ) ; } } return newValue ; |
public class CRFLogConditionalObjectiveFunction { /** * Calculates both value and partial derivatives at the point x , and save them internally . */
@ Override public void calculate ( double [ ] x ) { } } | double prob = 0.0 ; // the log prob of the sequence given the model , which is the negation of value at this point
double [ ] [ ] weights = to2D ( x ) ; // the expectations over counts
// first index is feature index , second index is of possible labeling
double [ ] [ ] E = empty2D ( ) ; // iterate over all the documents
for ( int m = 0 ; m < data . length ; m ++ ) { int [ ] [ ] [ ] docData = data [ m ] ; int [ ] docLabels = labels [ m ] ; // make a clique tree for this document
CRFCliqueTree cliqueTree = CRFCliqueTree . getCalibratedCliqueTree ( weights , docData , labelIndices , numClasses , classIndex , backgroundSymbol ) ; // compute the log probability of the document given the model with the parameters x
int [ ] given = new int [ window - 1 ] ; Arrays . fill ( given , classIndex . indexOf ( backgroundSymbol ) ) ; if ( docLabels . length > docData . length ) { // only true for self - training
// fill the given array with the extra docLabels
System . arraycopy ( docLabels , 0 , given , 0 , given . length ) ; // shift the docLabels array left
int [ ] newDocLabels = new int [ docData . length ] ; System . arraycopy ( docLabels , docLabels . length - newDocLabels . length , newDocLabels , 0 , newDocLabels . length ) ; docLabels = newDocLabels ; } // iterate over the positions in this document
for ( int i = 0 ; i < docData . length ; i ++ ) { int label = docLabels [ i ] ; double p = cliqueTree . condLogProbGivenPrevious ( i , label , given ) ; if ( VERBOSE ) { System . err . println ( "P(" + label + "|" + ArrayMath . toString ( given ) + ")=" + p ) ; } prob += p ; System . arraycopy ( given , 1 , given , 0 , given . length - 1 ) ; given [ given . length - 1 ] = label ; } // compute the expected counts for this document , which we will need to compute the derivative
// iterate over the positions in this document
for ( int i = 0 ; i < data [ m ] . length ; i ++ ) { // for each possible clique at this position
for ( int j = 0 ; j < data [ m ] [ i ] . length ; j ++ ) { Index < CRFLabel > labelIndex = labelIndices [ j ] ; // for each possible labeling for that clique
for ( int k = 0 ; k < labelIndex . size ( ) ; k ++ ) { int [ ] label = labelIndex . get ( k ) . getLabel ( ) ; double p = cliqueTree . prob ( i , label ) ; // probability of these labels occurring in this clique with these features
for ( int n = 0 ; n < data [ m ] [ i ] [ j ] . length ; n ++ ) { E [ data [ m ] [ i ] [ j ] [ n ] ] [ k ] += p ; } } } } } if ( Double . isNaN ( prob ) ) { // shouldn ' t be the case
throw new RuntimeException ( "Got NaN for prob in CRFLogConditionalObjectiveFunction.calculate()" ) ; } value = - prob ; if ( VERBOSE ) { System . err . println ( "value is " + value ) ; } // compute the partial derivative for each feature by comparing expected counts to empirical counts
int index = 0 ; for ( int i = 0 ; i < E . length ; i ++ ) { for ( int j = 0 ; j < E [ i ] . length ; j ++ ) { derivative [ index ++ ] = ( E [ i ] [ j ] - Ehat [ i ] [ j ] ) ; if ( VERBOSE ) { System . err . println ( "deriv(" + i + "," + j + ") = " + E [ i ] [ j ] + " - " + Ehat [ i ] [ j ] + " = " + derivative [ index - 1 ] ) ; } } } // incorporate priors
if ( prior == QUADRATIC_PRIOR ) { double sigmaSq = sigma * sigma ; for ( int i = 0 ; i < x . length ; i ++ ) { double k = 1.0 ; double w = x [ i ] ; value += k * w * w / 2.0 / sigmaSq ; derivative [ i ] += k * w / sigmaSq ; } } else if ( prior == HUBER_PRIOR ) { double sigmaSq = sigma * sigma ; for ( int i = 0 ; i < x . length ; i ++ ) { double w = x [ i ] ; double wabs = Math . abs ( w ) ; if ( wabs < epsilon ) { value += w * w / 2.0 / epsilon / sigmaSq ; derivative [ i ] += w / epsilon / sigmaSq ; } else { value += ( wabs - epsilon / 2 ) / sigmaSq ; derivative [ i ] += ( ( w < 0.0 ) ? - 1.0 : 1.0 ) / sigmaSq ; } } } else if ( prior == QUARTIC_PRIOR ) { double sigmaQu = sigma * sigma * sigma * sigma ; for ( int i = 0 ; i < x . length ; i ++ ) { double k = 1.0 ; double w = x [ i ] ; value += k * w * w * w * w / 2.0 / sigmaQu ; derivative [ i ] += k * w / sigmaQu ; } } |
public class LibraryLoader { /** * Load a system library from a stream . Copies the library to a temp file
* and loads from there .
* @ param libname name of the library ( just used in constructing the library name )
* @ param is InputStream pointing to the library */
private void loadLibraryFromStream ( String libname , InputStream is ) { } } | try { File tempfile = createTempFile ( libname ) ; OutputStream os = new FileOutputStream ( tempfile ) ; logger . debug ( "tempfile.getPath() = " + tempfile . getPath ( ) ) ; long savedTime = System . currentTimeMillis ( ) ; // Leo says 8k block size is STANDARD ; )
byte buf [ ] = new byte [ 8192 ] ; int len ; while ( ( len = is . read ( buf ) ) > 0 ) { os . write ( buf , 0 , len ) ; } os . flush ( ) ; InputStream lock = new FileInputStream ( tempfile ) ; os . close ( ) ; double seconds = ( double ) ( System . currentTimeMillis ( ) - savedTime ) / 1e3 ; logger . debug ( "Copying took " + seconds + " seconds." ) ; logger . debug ( "Loading library from " + tempfile . getPath ( ) + "." ) ; System . load ( tempfile . getPath ( ) ) ; lock . close ( ) ; } catch ( IOException io ) { logger . error ( "Could not create the temp file: " + io . toString ( ) + ".\n" ) ; } catch ( UnsatisfiedLinkError ule ) { logger . error ( "Couldn't load copied link file: " + ule . toString ( ) + ".\n" ) ; throw ule ; } |
public class DOM3SerializerImpl { /** * Serializes the Level 3 DOM node by creating an instance of DOM3TreeWalker
* which traverses the DOM tree and invokes handler events to serialize
* the DOM NOde . Throws an exception only if an I / O exception occured
* while serializing .
* This interface is a public API .
* @ param node the Level 3 DOM node to serialize
* @ throws IOException if an I / O exception occured while serializing */
public void serializeDOM3 ( Node node ) throws IOException { } } | try { DOM3TreeWalker walker = new DOM3TreeWalker ( fSerializationHandler , fErrorHandler , fSerializerFilter , fNewLine ) ; walker . traverse ( node ) ; } catch ( org . xml . sax . SAXException se ) { throw new WrappedRuntimeException ( se ) ; } |
public class ConceptDrawProjectReader { /** * Read a project from a ConceptDraw PROJECT file .
* @ param project ConceptDraw PROJECT project */
private void readProject ( Project project ) { } } | Task mpxjTask = m_projectFile . addTask ( ) ; // project . getAuthor ( )
mpxjTask . setBaselineCost ( project . getBaselineCost ( ) ) ; mpxjTask . setBaselineFinish ( project . getBaselineFinishDate ( ) ) ; mpxjTask . setBaselineStart ( project . getBaselineStartDate ( ) ) ; // project . getBudget ( ) ;
// project . getCompany ( )
mpxjTask . setFinish ( project . getFinishDate ( ) ) ; // project . getGoal ( )
// project . getHyperlinks ( )
// project . getMarkerID ( )
mpxjTask . setName ( project . getName ( ) ) ; mpxjTask . setNotes ( project . getNote ( ) ) ; mpxjTask . setPriority ( project . getPriority ( ) ) ; // project . getSite ( )
mpxjTask . setStart ( project . getStartDate ( ) ) ; // project . getStyleProject ( )
// project . getTask ( )
// project . getTimeScale ( )
// project . getViewProperties ( )
String projectIdentifier = project . getID ( ) . toString ( ) ; mpxjTask . setGUID ( UUID . nameUUIDFromBytes ( projectIdentifier . getBytes ( ) ) ) ; // Sort the tasks into the correct order
List < Document . Projects . Project . Task > tasks = new ArrayList < Document . Projects . Project . Task > ( project . getTask ( ) ) ; final AlphanumComparator comparator = new AlphanumComparator ( ) ; Collections . sort ( tasks , new Comparator < Document . Projects . Project . Task > ( ) { @ Override public int compare ( Document . Projects . Project . Task o1 , Document . Projects . Project . Task o2 ) { return comparator . compare ( o1 . getOutlineNumber ( ) , o2 . getOutlineNumber ( ) ) ; } } ) ; Map < String , Task > map = new HashMap < String , Task > ( ) ; map . put ( "" , mpxjTask ) ; for ( Document . Projects . Project . Task task : tasks ) { readTask ( projectIdentifier , map , task ) ; } |
public class StackTraceUtil { /** * Crop the stack trace with a max depth .
* @ param callStack the original stack trace
* @ param maxDepth the max depth of real stack trace that will be cropped ,
* 0 means no limitation
* @ return the cropped stack trace */
private static StackTraceElement [ ] cropStackTrace ( StackTraceElement [ ] callStack , int maxDepth ) { } } | int realDepth = callStack . length ; if ( maxDepth > 0 ) { realDepth = Math . min ( maxDepth , realDepth ) ; } StackTraceElement [ ] realStack = new StackTraceElement [ realDepth ] ; System . arraycopy ( callStack , 0 , realStack , 0 , realDepth ) ; return realStack ; |
public class Path { /** * Add a curve to the specified location ( specifing the number of segments )
* @ param x The destination x coordinate
* @ param y The destination y coordiante
* @ param cx1 The x coordiante of the first control point
* @ param cy1 The y coordiante of the first control point
* @ param cx2 The x coordinate of the second control point
* @ param cy2 The y coordinate of the second control point
* @ param segments The number of segments to use for the new curve */
public void curveTo ( float x , float y , float cx1 , float cy1 , float cx2 , float cy2 , int segments ) { } } | // special case for zero movement
if ( ( cx == x ) && ( cy == y ) ) { return ; } Curve curve = new Curve ( new Vector2f ( cx , cy ) , new Vector2f ( cx1 , cy1 ) , new Vector2f ( cx2 , cy2 ) , new Vector2f ( x , y ) ) ; float step = 1.0f / segments ; for ( int i = 1 ; i < segments + 1 ; i ++ ) { float t = i * step ; Vector2f p = curve . pointAt ( t ) ; if ( hole != null ) { hole . add ( new float [ ] { p . x , p . y } ) ; } else { localPoints . add ( new float [ ] { p . x , p . y } ) ; } cx = p . x ; cy = p . y ; } pointsDirty = true ; |
public class ElasticPoolsInner { /** * Gets an elastic pool .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param elasticPoolName The name of the elastic pool to be retrieved .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the ElasticPoolInner object */
public Observable < ElasticPoolInner > getAsync ( String resourceGroupName , String serverName , String elasticPoolName ) { } } | return getWithServiceResponseAsync ( resourceGroupName , serverName , elasticPoolName ) . map ( new Func1 < ServiceResponse < ElasticPoolInner > , ElasticPoolInner > ( ) { @ Override public ElasticPoolInner call ( ServiceResponse < ElasticPoolInner > response ) { return response . body ( ) ; } } ) ; |
public class TreeGraphNode { /** * Return the node containing the head word for this node ( or
* < code > null < / code > if none ) , as recorded in this node ' s { @ link
* CyclicCoreLabel < code > CyclicCoreLabel < / code > } . ( In contrast to { @ link
* edu . stanford . nlp . ling . CategoryWordTag
* < code > CategoryWordTag < / code > } , we store head words and head
* tags as references to nodes , not merely as
* < code > String < / code > s . )
* @ return the node containing the head word for this node */
public TreeGraphNode headWordNode ( ) { } } | TreeGraphNode hwn = safeCast ( label . get ( TreeCoreAnnotations . HeadWordAnnotation . class ) ) ; if ( hwn == null || ( hwn . treeGraph ( ) != null && ! ( hwn . treeGraph ( ) . equals ( this . treeGraph ( ) ) ) ) ) { return null ; } return hwn ; |
public class ClosableBlockingQueue { /** * Tries to close the queue . Closing the queue only succeeds when no elements are
* in the queue when this method is called . Checking whether the queue is empty , and
* marking the queue as closed is one atomic operation .
* @ return True , if the queue is closed , false if the queue remains open . */
public boolean close ( ) { } } | lock . lock ( ) ; try { if ( open ) { if ( elements . isEmpty ( ) ) { open = false ; nonEmpty . signalAll ( ) ; return true ; } else { return false ; } } else { // already closed
return true ; } } finally { lock . unlock ( ) ; } |
public class DefaultRemoteManagerImplementation { /** * Registers an event handler for a message type and returns a subscription . */
@ Override public < T , U extends T > AutoCloseable registerHandler ( final Class < U > messageType , final EventHandler < RemoteMessage < T > > theHandler ) { } } | if ( LOG . isLoggable ( Level . FINE ) ) { LOG . log ( Level . FINE , "RemoteManager: {0} messageType: {1} handler: {2}" , new Object [ ] { this . name , messageType . getCanonicalName ( ) , theHandler . getClass ( ) . getCanonicalName ( ) } ) ; } return this . handlerContainer . registerHandler ( messageType , theHandler ) ; |
public class ServerConfig { /** * only used for test
* @ deprecated */
public void setSecureSiteUrl ( String secureSiteUrl ) { } } | this . secureSiteUrl = StringUtils . isBlank ( secureSiteUrl ) ? new ServerSiteUrlConfig ( ) : new ServerSiteUrlConfig ( secureSiteUrl ) ; |
public class EventHandler { /** * Pushes an event onto the event buffer and flushes if specified or if
* the buffer has reached maximum capacity . */
private synchronized void pushEvent ( Event event , boolean flushBuffer ) { } } | eventBuffer . add ( event ) ; if ( flushBuffer || eventBuffer . size ( ) >= maxEntries ) { this . flushEventBuffer ( ) ; } |
public class Provider { /** * Get an unmodifiable Set of all services supported by
* this Provider .
* @ return an unmodifiable Set of all services supported by
* this Provider
* @ since 1.5 */
public synchronized Set < Service > getServices ( ) { } } | checkInitialized ( ) ; if ( legacyChanged || servicesChanged ) { serviceSet = null ; } if ( serviceSet == null ) { ensureLegacyParsed ( ) ; Set < Service > set = new LinkedHashSet < > ( ) ; if ( serviceMap != null ) { set . addAll ( serviceMap . values ( ) ) ; } if ( legacyMap != null ) { set . addAll ( legacyMap . values ( ) ) ; } serviceSet = Collections . unmodifiableSet ( set ) ; servicesChanged = false ; } return serviceSet ; |
public class XBELConverterServiceImpl { /** * { @ inheritDoc } */
@ Override public String toXML ( final Document d ) throws JAXBException , IOException { } } | XBELDocument xdoc = new DocumentConverter ( ) . convert ( d ) ; return converter . marshal ( xdoc ) ; |
public class CmsDriverManager { /** * Returns all resources associated to a given principal via an ACE with the given permissions . < p >
* If the < code > includeAttr < / code > flag is set it returns also all resources associated to
* a given principal through some of following attributes . < p >
* < ul >
* < li > User Created < / li >
* < li > User Last Modified < / li >
* < / ul > < p >
* @ param dbc the current database context
* @ param project the to read the entries from
* @ param principalId the id of the principal
* @ param permissions a set of permissions to match , can be < code > null < / code > for all ACEs
* @ param includeAttr a flag to include resources associated by attributes
* @ return a set of < code > { @ link CmsResource } < / code > objects
* @ throws CmsException if something goes wrong */
public Set < CmsResource > getResourcesForPrincipal ( CmsDbContext dbc , CmsProject project , CmsUUID principalId , CmsPermissionSet permissions , boolean includeAttr ) throws CmsException { } } | Set < CmsResource > resources = new HashSet < CmsResource > ( getVfsDriver ( dbc ) . readResourcesForPrincipalACE ( dbc , project , principalId ) ) ; if ( permissions != null ) { Iterator < CmsResource > itRes = resources . iterator ( ) ; while ( itRes . hasNext ( ) ) { CmsAccessControlEntry ace = readAccessControlEntry ( dbc , itRes . next ( ) , principalId ) ; if ( ( ace . getPermissions ( ) . getPermissions ( ) & permissions . getPermissions ( ) ) != permissions . getPermissions ( ) ) { // remove if permissions does not match
itRes . remove ( ) ; } } } if ( includeAttr ) { resources . addAll ( getVfsDriver ( dbc ) . readResourcesForPrincipalAttr ( dbc , project , principalId ) ) ; } return resources ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.