signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class FileServersInner { /** * Creates a File Server in the given workspace .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param workspaceName The name of the workspace . Workspace names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long .
* @ param fileServerName The name of the file server within the specified resource group . File server names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long .
* @ param parameters The parameters to provide for File Server creation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the FileServerInner object if successful . */
public FileServerInner create ( String resourceGroupName , String workspaceName , String fileServerName , FileServerCreateParameters parameters ) { } } | return createWithServiceResponseAsync ( resourceGroupName , workspaceName , fileServerName , parameters ) . toBlocking ( ) . last ( ) . body ( ) ; |
public class ApiOvhOrder { /** * Get allowed durations for ' backupStorage ' option
* REST : GET / order / dedicated / server / { serviceName } / backupStorage
* @ param capacity [ required ] The capacity in gigabytes of your backup storage
* @ param serviceName [ required ] The internal name of your dedicated server */
public ArrayList < String > dedicated_server_serviceName_backupStorage_GET ( String serviceName , OvhBackupStorageCapacityEnum capacity ) throws IOException { } } | String qPath = "/order/dedicated/server/{serviceName}/backupStorage" ; StringBuilder sb = path ( qPath , serviceName ) ; query ( sb , "capacity" , capacity ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t1 ) ; |
public class SessionManagerConfigurationException { /** * Converts a Throwable to a SessionManagerConfigurationException . If the Throwable is a
* SessionManagerConfigurationException , it will be passed through unmodified ; otherwise , it will be wrapped
* in a new SessionManagerConfigurationException .
* @ param cause the Throwable to convert
* @ return a SessionManagerConfigurationException */
public static SessionManagerConfigurationException fromThrowable ( Throwable cause ) { } } | return ( cause instanceof SessionManagerConfigurationException ) ? ( SessionManagerConfigurationException ) cause : new SessionManagerConfigurationException ( cause ) ; |
public class Matrix { /** * Updates all elements of the specified column in this matrix by applying given { @ code function } .
* @ param j the column index
* @ param function the vector function */
public void updateColumn ( int j , VectorFunction function ) { } } | VectorIterator it = iteratorOfColumn ( j ) ; while ( it . hasNext ( ) ) { double x = it . next ( ) ; int i = it . index ( ) ; it . set ( function . evaluate ( i , x ) ) ; } |
public class MemoryPersistenceManagerImpl { /** * It might seems like this should delete related partition - level step executions as well .
* However these are owned by the job execution , not the step thread instance .
* @ param stepThreadInstanceKey */
@ Override public void deleteStepThreadInstanceOfRelatedPartitions ( TopLevelStepInstanceKey stepInstanceKey ) { } } | long compareInstanceId = stepInstanceKey . getJobInstance ( ) ; for ( StepThreadInstanceEntity stepThreadInstance : data . stepThreadInstanceData . values ( ) ) { if ( ( stepThreadInstance . getJobInstance ( ) . getInstanceId ( ) == compareInstanceId ) && ( stepThreadInstance . getStepName ( ) . equals ( stepInstanceKey . getStepName ( ) ) ) && ( ! ( stepThreadInstance instanceof TopLevelStepInstanceEntity ) ) ) { StepThreadInstanceKey removalKey = new StepThreadInstanceKey ( stepThreadInstance ) ; data . stepThreadInstanceData . remove ( removalKey ) ; } } |
public class BufferUtil { /** * 拷贝ByteBuffer
* @ param src 源ByteBuffer
* @ param srcStart 源开始的位置
* @ param dest 目标ByteBuffer
* @ param destStart 目标开始的位置
* @ param length 长度
* @ return 目标ByteBuffer */
public static ByteBuffer copy ( ByteBuffer src , int srcStart , ByteBuffer dest , int destStart , int length ) { } } | System . arraycopy ( src . array ( ) , srcStart , dest . array ( ) , destStart , length ) ; return dest ; |
public class KbTermWrapper { /** * @ Override
* public Collection < String > getComments ( String ctxStr ) {
* return wrapped ( ) . getComments ( ctxStr ) ; */
@ Override public Fact addComment ( String comment , Context ctx ) throws KbTypeException , CreateException { } } | return wrapped ( ) . addComment ( comment , ctx ) ; |
public class KuraCloudConsumer { /** * Helpers */
private void onInternalMessageArrived ( String deviceId , String appTopic , KuraPayload message , int qos , boolean retain , boolean control ) { } } | log . debug ( "Received message with deviceId {}, application topic {}." , deviceId , appTopic ) ; Exchange exchange = anExchange ( getEndpoint ( ) . getCamelContext ( ) ) . withBody ( message ) . withHeader ( CAMEL_KURA_CLOUD_TOPIC , appTopic ) . withHeader ( CAMEL_KURA_CLOUD_DEVICEID , deviceId ) . withHeader ( CAMEL_KURA_CLOUD_QOS , qos ) . withHeader ( CAMEL_KURA_CLOUD_CONTROL , control ) . withHeader ( CAMEL_KURA_CLOUD_RETAIN , retain ) . build ( ) ; exchange . setFromEndpoint ( getEndpoint ( ) ) ; try { getProcessor ( ) . process ( exchange ) ; } catch ( Exception e ) { handleException ( "Error while processing an incoming message:" , e ) ; } |
public class ServletContexConfigSupport { /** * Load config .
* @ param servletContext
* the servlet context
* @ return the configuration */
Configuration loadConfig ( ServletContext servletContext ) { } } | String handlers = servletContext . getInitParameter ( ContextConfigParams . PARAM_HANDLERS ) ; String layout = servletContext . getInitParameter ( ContextConfigParams . PARAM_LAYOUT ) ; String filters = servletContext . getInitParameter ( ContextConfigParams . PARAM_FILTERS ) ; String options = servletContext . getInitParameter ( ContextConfigParams . PARAM_OPTIONS ) ; String metaData = servletContext . getInitParameter ( ContextConfigParams . PARAM_META_DATA ) ; String properties = servletContext . getInitParameter ( ContextConfigParams . PARAM_PROPERTIES ) ; Configuration config = Configuration . INSTANCE ; if ( handlers != null && ! handlers . equals ( "" ) ) { config . setHandlers ( new ReflectUtil < Handler > ( ) . getNewInstanceList ( handlers . split ( ";" ) ) ) ; } config . setLayout ( new ReflectUtil < Layout > ( ) . getNewInstance ( layout ) ) ; if ( filters != null && ! filters . equals ( "" ) ) { config . setFilters ( new ReflectUtil < AuditEventFilter > ( ) . getNewInstanceList ( filters . split ( ";" ) ) ) ; } config . setCommands ( options ) ; config . setMetaData ( new ReflectUtil < MetaData > ( ) . getNewInstance ( metaData ) ) ; if ( properties != null && ! properties . equals ( "" ) ) { String [ ] propertiesList = properties . split ( ";" ) ; for ( String property : propertiesList ) { String [ ] keyValue = property . split ( ":" ) ; config . addProperty ( keyValue [ 0 ] , keyValue [ 1 ] ) ; } } return config ; |
public class CmsStringUtil { /** * Escapes a String so it may be printed as text content or attribute
* value in a HTML page or an XML file . < p >
* This method replaces the following characters in a String :
* < ul >
* < li > < b > & lt ; < / b > with & amp ; lt ;
* < li > < b > & gt ; < / b > with & amp ; gt ;
* < li > < b > & amp ; < / b > with & amp ; amp ;
* < li > < b > & quot ; < / b > with & amp ; quot ;
* < / ul > < p >
* @ param source the string to escape
* @ param doubleEscape if < code > false < / code > , all entities that already are escaped are left untouched
* @ return the escaped string */
private static String escapeXml ( String source ) { } } | if ( source == null ) { return null ; } StringBuffer result = new StringBuffer ( source . length ( ) * 2 ) ; for ( int i = 0 ; i < source . length ( ) ; ++ i ) { char ch = source . charAt ( i ) ; switch ( ch ) { case '<' : result . append ( "<" ) ; break ; case '>' : result . append ( ">" ) ; break ; case '&' : // don ' t escape already escaped international and special characters
int terminatorIndex = source . indexOf ( ";" , i ) ; if ( terminatorIndex > 0 ) { if ( source . substring ( i + 1 , terminatorIndex ) . matches ( "#[0-9]+" ) ) { result . append ( ch ) ; break ; } } // note that to other " break " in the above " if " block
result . append ( "&" ) ; break ; case '"' : result . append ( """ ) ; break ; default : result . append ( ch ) ; } } return new String ( result ) ; |
public class FileEncryptor { /** * < p > Reads data from the < code > InputStream < / code > and writes it to the < code > OutputStream < / code > . < / p >
* @ param is the input stream
* @ param os the output stream
* @ throws IOException */
private void copy ( InputStream is , OutputStream os ) throws IOException { } } | byte [ ] buffer = new byte [ bufferSize ] ; int nRead ; while ( ( nRead = is . read ( buffer ) ) != - 1 ) { os . write ( buffer , 0 , nRead ) ; } os . flush ( ) ; |
public class PayloadReader { /** * Make the request to the Twilio API to perform the read .
* @ param client TwilioRestClient with which to make the request
* @ return Payload ResourceSet */
@ Override public ResourceSet < Payload > read ( final TwilioRestClient client ) { } } | return new ResourceSet < > ( this , client , firstPage ( client ) ) ; |
public class HBaseBackfillMergeMapper { /** * Given a multimap returned by the merge iterator , return an Op that should be the new value in the
* live production cube , or null if the value in the live production cube should be deleted .
* @ return an ActionRowKeyAndOp telling what action should be taken for this row */
private static final ActionRowKeyAndOp makeNewLiveCubeOp ( Deserializer < ? > deserializer , Collection < Result > liveCubeResults , Collection < Result > snapshotResults , Collection < Result > backfilledResults , Context ctx ) throws IOException { } } | Op liveCubeOp = null ; Op snapshotOp = null ; Op backfilledOp = null ; byte [ ] rowKey = null ; if ( ! liveCubeResults . isEmpty ( ) ) { Result result = liveCubeResults . iterator ( ) . next ( ) ; liveCubeOp = deserializer . fromBytes ( result . value ( ) ) ; rowKey = result . getRow ( ) ; } if ( ! snapshotResults . isEmpty ( ) ) { Result result = snapshotResults . iterator ( ) . next ( ) ; snapshotOp = deserializer . fromBytes ( result . value ( ) ) ; rowKey = result . getRow ( ) ; } if ( ! backfilledResults . isEmpty ( ) ) { Result result = backfilledResults . iterator ( ) . next ( ) ; backfilledOp = deserializer . fromBytes ( result . value ( ) ) ; rowKey = result . getRow ( ) ; } /* * Merge the live cube table , the snapshot table , and the backfill table . We assume that the
* snapshot table contains the values that existing before the backfill began , which means
* that we can estimate the values that arrived since the snapshot by ( live - snapshot ) . By
* adding the recently - arrived values to the backfilled values , we solve the problem of data
* arriving during the snapshot that might not otherwise have been counted .
* The following if - else statements enumerate all 8 possibilities of presence / absence of
* snapshot row , backfill row , and livecube row . */
// Case : snapshot exists , backfill exists , liveCube exists
// If live = = snap :
// new value is backfill
// Else :
// new value is ( live - snap ) + backfill
if ( snapshotOp != null && backfilledOp != null && liveCubeOp != null ) { if ( liveCubeOp . equals ( snapshotOp ) ) { return new ActionRowKeyAndOp ( Action . OVERWRITE , rowKey , backfilledOp ) ; } Op newLiveCubeValue = ( liveCubeOp . subtract ( snapshotOp ) ) . add ( backfilledOp ) ; if ( newLiveCubeValue . equals ( liveCubeOp ) ) { return new ActionRowKeyAndOp ( Action . LEAVE_ALONE , rowKey , null ) ; } else { return new ActionRowKeyAndOp ( Action . OVERWRITE , rowKey , newLiveCubeValue ) ; } } // Case : snapshot exists , backfill empty , liveCube exists
// If live = = snap :
// no ops occurred during snapshot , delete row
// Else
// New value is ( live - snap )
else if ( snapshotOp != null && backfilledOp == null && liveCubeOp != null ) { if ( liveCubeOp . equals ( snapshotOp ) ) { return new ActionRowKeyAndOp ( Action . DELETE , rowKey , null ) ; } else { Op newLiveCubeValue = liveCubeOp . subtract ( snapshotOp ) ; return new ActionRowKeyAndOp ( Action . OVERWRITE , rowKey , newLiveCubeValue ) ; } } // Case : snapshot empty , backfill exists , liveCube exists
// New value is backfill + live
else if ( snapshotOp == null && backfilledOp != null && liveCubeOp != null ) { Op newLiveCubeValue = backfilledOp . add ( liveCubeOp ) ; return new ActionRowKeyAndOp ( Action . OVERWRITE , rowKey , newLiveCubeValue ) ; } // Case : snapshot empty , backfill exists , liveCube empty
// New value is backfill
else if ( snapshotOp == null && backfilledOp != null && liveCubeOp == null ) { return new ActionRowKeyAndOp ( Action . OVERWRITE , rowKey , backfilledOp ) ; } // Case : snapshot empty , backfill empty , liveCube exists
// Leave alone
else if ( snapshotOp == null && backfilledOp == null && liveCubeOp != null ) { return new ActionRowKeyAndOp ( Action . LEAVE_ALONE , rowKey , null ) ; } // Case : snapshot empty , backfill empty , liveCube empty
// No such case , we won ' t be called , merge iterator doesn ' t return nonexistent rows
else if ( snapshotOp == null && backfilledOp == null && liveCubeOp == null ) { throw new RuntimeException ( "This shouldn't happen, at least one of the ops must be " + "non-null" ) ; } // Case : snapshot exists , backfill exists , liveCube empty
// Error , row should be in live cube if it ' s in the snapshot
else if ( snapshotOp != null && backfilledOp != null && liveCubeOp == null ) { throw new RuntimeException ( "Row shouldn't have disappeared from live cube during " + "snapshotting, something weird is going on. (case 1)" ) ; } // Case : snapshot exists , backfill empty , liveCube empty
// Error , row should be in live cube if it ' s in the snapshot
else { throw new RuntimeException ( "Row shouldn't have disappeared from live cube during " + "snapshotting, something weird is going on. (case 2)" ) ; } |
public class XMLEncodingDetector { /** * Convenience function used in all XML scanners . */
private void reportFatalError ( String msgId , String arg ) throws JspCoreException { } } | throw new JspCoreException ( msgId , new Object [ ] { arg } ) ; // err . jspError ( msgId , arg ) ; |
public class PhoenixInputStream { /** * Read the header from the Phoenix file .
* @ param stream input stream
* @ return raw header data */
private String readHeaderString ( BufferedInputStream stream ) throws IOException { } } | int bufferSize = 100 ; stream . mark ( bufferSize ) ; byte [ ] buffer = new byte [ bufferSize ] ; stream . read ( buffer ) ; Charset charset = CharsetHelper . UTF8 ; String header = new String ( buffer , charset ) ; int prefixIndex = header . indexOf ( "PPX!!!!|" ) ; int suffixIndex = header . indexOf ( "|!!!!XPP" ) ; if ( prefixIndex != 0 || suffixIndex == - 1 ) { throw new IOException ( "File format not recognised" ) ; } int skip = suffixIndex + 9 ; stream . reset ( ) ; stream . skip ( skip ) ; return header . substring ( prefixIndex + 8 , suffixIndex ) ; |
public class AbstractRestExceptionHandler { /** * / / / / Template methods / / / / / */
public ResponseEntity < T > handleException ( E ex , HttpServletRequest req ) { } } | logException ( ex , req ) ; T body = createBody ( ex , req ) ; HttpHeaders headers = createHeaders ( ex , req ) ; return new ResponseEntity < > ( body , headers , getStatus ( ) ) ; |
public class NavigationView { /** * Updates the visibility of the way name view that is show below
* the navigation icon .
* If you ' d like to use this method without being overridden by the default visibility values
* values we provide , please disabled auto - query with
* { @ link NavigationMapboxMap # updateWaynameQueryMap ( boolean ) } .
* @ param isVisible true to show , false to hide */
@ Override public void updateWayNameVisibility ( boolean isVisible ) { } } | wayNameView . updateVisibility ( isVisible ) ; if ( navigationMap != null ) { navigationMap . updateWaynameQueryMap ( isVisible ) ; } |
public class ConsumerMonitorRegistrar { /** * Method addConsumerToRegisteredMonitors
* This method adds a new consumer to the appropriate places in each of the monitor
* maps .
* @ param mc
* @ param exactMonitorList
* @ param wildcardMonitorList */
public void addConsumerToRegisteredMonitors ( MonitoredConsumer mc , ArrayList exactMonitorList , ArrayList wildcardMonitorList ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "addConsumerToRegisteredMonitors" , new Object [ ] { mc , exactMonitorList , wildcardMonitorList } ) ; // Add consumer to correct places in the maps
addConsumerToRegisteredMonitorMap ( mc , exactMonitorList , _registeredExactConsumerMonitors ) ; // Now process the wildcard monitor list
addConsumerToRegisteredMonitorMap ( mc , wildcardMonitorList , _registeredWildcardConsumerMonitors ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addConsumerToRegisteredMonitors" ) ; |
public class Operator { /** * Sets the name of this operator . This overrides the default name , which is either
* a generated description of the operation ( such as for example " Aggregate ( 1 : SUM , 2 : MIN ) " )
* or the name the user - defined function or input / output format executed by the operator .
* @ param newName The name for this operator .
* @ return The operator with a new name . */
public O name ( String newName ) { } } | this . name = newName ; @ SuppressWarnings ( "unchecked" ) O returnType = ( O ) this ; return returnType ; |
public class AmazonSageMakerClient { /** * Updates a notebook instance . NotebookInstance updates include upgrading or downgrading the ML compute instance
* used for your notebook instance to accommodate changes in your workload requirements . You can also update the VPC
* security groups .
* @ param updateNotebookInstanceRequest
* @ return Result of the UpdateNotebookInstance operation returned by the service .
* @ throws ResourceLimitExceededException
* You have exceeded an Amazon SageMaker resource limit . For example , you might have too many training jobs
* created .
* @ sample AmazonSageMaker . UpdateNotebookInstance
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / sagemaker - 2017-07-24 / UpdateNotebookInstance "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public UpdateNotebookInstanceResult updateNotebookInstance ( UpdateNotebookInstanceRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeUpdateNotebookInstance ( request ) ; |
public class GeomUtil { /** * Adds the target rectangle to the bounds of the source rectangle . If the source rectangle is
* null , a new rectangle is created that is the size of the target rectangle .
* @ return the source rectangle . */
public static Rectangle grow ( Rectangle source , Rectangle target ) { } } | if ( target == null ) { log . warning ( "Can't grow with null rectangle [src=" + source + ", tgt=" + target + "]." , new Exception ( ) ) ; } else if ( source == null ) { source = new Rectangle ( target ) ; } else { source . add ( target ) ; } return source ; |
public class RepositoryXmlHandler { /** * startElement callback .
* Only some Elements need special start operations .
* @ throws MetadataException indicating mapping errors */
public void startElement ( String uri , String name , String qName , Attributes atts ) { } } | boolean isDebug = logger . isDebugEnabled ( ) ; m_CurrentString = null ; try { switch ( getLiteralId ( qName ) ) { case MAPPING_REPOSITORY : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( MAPPING_REPOSITORY ) ) ; this . m_CurrentAttrContainer = m_repository ; String defIso = atts . getValue ( tags . getTagById ( ISOLATION_LEVEL ) ) ; this . m_repository . setDefaultIsolationLevel ( LockHelper . getIsolationLevelFor ( defIso ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( ISOLATION_LEVEL ) + ": " + defIso ) ; String proxyPrefetchingLimit = atts . getValue ( tags . getTagById ( PROXY_PREFETCHING_LIMIT ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( PROXY_PREFETCHING_LIMIT ) + ": " + proxyPrefetchingLimit ) ; if ( proxyPrefetchingLimit != null ) { defProxyPrefetchingLimit = Integer . parseInt ( proxyPrefetchingLimit ) ; } // check repository version :
String version = atts . getValue ( tags . getTagById ( REPOSITORY_VERSION ) ) ; if ( DescriptorRepository . getVersion ( ) . equals ( version ) ) { if ( isDebug ) logger . debug ( " " + tags . getTagById ( REPOSITORY_VERSION ) + ": " + version ) ; } else { throw new MetadataException ( "Repository version does not match. expected " + DescriptorRepository . getVersion ( ) + " but found: " + version + ". Please update your repository.dtd and your repository.xml" + " version attribute entry" ) ; } break ; } case CLASS_DESCRIPTOR : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( CLASS_DESCRIPTOR ) ) ; m_CurrentCLD = new ClassDescriptor ( m_repository ) ; // prepare for custom attributes
this . m_CurrentAttrContainer = this . m_CurrentCLD ; // set isolation - level attribute
String isoLevel = atts . getValue ( tags . getTagById ( ISOLATION_LEVEL ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( ISOLATION_LEVEL ) + ": " + isoLevel ) ; /* arminw :
only when an isolation - level is set in CLD , set it .
Else the CLD use the default iso - level defined in the repository */
if ( checkString ( isoLevel ) ) m_CurrentCLD . setIsolationLevel ( LockHelper . getIsolationLevelFor ( isoLevel ) ) ; // set class attribute
String classname = atts . getValue ( tags . getTagById ( CLASS_NAME ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CLASS_NAME ) + ": " + classname ) ; try { m_CurrentCLD . setClassOfObject ( ClassHelper . getClass ( classname ) ) ; } catch ( ClassNotFoundException e ) { m_CurrentCLD = null ; throw new MetadataException ( "Class " + classname + " could not be found" + " in the classpath. This could cause unexpected behaviour of OJB," + " please remove or comment out this class descriptor" + " in the repository.xml file." , e ) ; } // set schema attribute
String schema = atts . getValue ( tags . getTagById ( SCHEMA_NAME ) ) ; if ( schema != null ) { if ( isDebug ) logger . debug ( " " + tags . getTagById ( SCHEMA_NAME ) + ": " + schema ) ; m_CurrentCLD . setSchema ( schema ) ; } // set proxy attribute
String proxy = atts . getValue ( tags . getTagById ( CLASS_PROXY ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CLASS_PROXY ) + ": " + proxy ) ; if ( checkString ( proxy ) ) { if ( proxy . equalsIgnoreCase ( ClassDescriptor . DYNAMIC_STR ) ) { m_CurrentCLD . setProxyClassName ( ClassDescriptor . DYNAMIC_STR ) ; } else { m_CurrentCLD . setProxyClassName ( proxy ) ; } } // set proxyPrefetchingLimit attribute
String proxyPrefetchingLimit = atts . getValue ( tags . getTagById ( PROXY_PREFETCHING_LIMIT ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( PROXY_PREFETCHING_LIMIT ) + ": " + proxyPrefetchingLimit ) ; if ( proxyPrefetchingLimit == null ) { m_CurrentCLD . setProxyPrefetchingLimit ( defProxyPrefetchingLimit ) ; } else { m_CurrentCLD . setProxyPrefetchingLimit ( Integer . parseInt ( proxyPrefetchingLimit ) ) ; } // set table attribute :
String table = atts . getValue ( tags . getTagById ( TABLE_NAME ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( TABLE_NAME ) + ": " + table ) ; m_CurrentCLD . setTableName ( table ) ; if ( table == null ) { m_CurrentCLD . setIsInterface ( true ) ; } // set row - reader attribute
String rowreader = atts . getValue ( tags . getTagById ( ROW_READER ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( ROW_READER ) + ": " + rowreader ) ; if ( rowreader != null ) { m_CurrentCLD . setRowReader ( rowreader ) ; } // set if extends
// arminw : TODO : this feature doesn ' t work , remove this stuff ?
String extendsAtt = atts . getValue ( tags . getTagById ( EXTENDS ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( EXTENDS ) + ": " + extendsAtt ) ; if ( checkString ( extendsAtt ) ) { m_CurrentCLD . setSuperClass ( extendsAtt ) ; } // set accept - locks attribute
String acceptLocks = atts . getValue ( tags . getTagById ( ACCEPT_LOCKS ) ) ; if ( acceptLocks == null ) acceptLocks = "true" ; // default is true
logger . debug ( " " + tags . getTagById ( ACCEPT_LOCKS ) + ": " + acceptLocks ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( ACCEPT_LOCKS ) + ": " + acceptLocks ) ; boolean b = ( Boolean . valueOf ( acceptLocks ) ) . booleanValue ( ) ; m_CurrentCLD . setAcceptLocks ( b ) ; // set initializationMethod attribute
String initializationMethod = atts . getValue ( tags . getTagById ( INITIALIZATION_METHOD ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( INITIALIZATION_METHOD ) + ": " + initializationMethod ) ; if ( initializationMethod != null ) { m_CurrentCLD . setInitializationMethod ( initializationMethod ) ; } // set factoryClass attribute
String factoryClass = atts . getValue ( tags . getTagById ( FACTORY_CLASS ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( FACTORY_CLASS ) + ": " + factoryClass ) ; if ( factoryClass != null ) { m_CurrentCLD . setFactoryClass ( factoryClass ) ; } // set factoryMethod attribute
String factoryMethod = atts . getValue ( tags . getTagById ( FACTORY_METHOD ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( FACTORY_METHOD ) + ": " + factoryMethod ) ; if ( factoryMethod != null ) { m_CurrentCLD . setFactoryMethod ( factoryMethod ) ; } // set refresh attribute
String refresh = atts . getValue ( tags . getTagById ( REFRESH ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( REFRESH ) + ": " + refresh ) ; b = ( Boolean . valueOf ( refresh ) ) . booleanValue ( ) ; m_CurrentCLD . setAlwaysRefresh ( b ) ; // TODO : remove this or make offical feature
// persistent field
String pfClassName = atts . getValue ( "persistent-field-class" ) ; if ( isDebug ) logger . debug ( " persistent-field-class: " + pfClassName ) ; m_CurrentCLD . setPersistentFieldClassName ( pfClassName ) ; // put cld to the metadata repository
m_repository . put ( classname , m_CurrentCLD ) ; break ; } case OBJECT_CACHE : { // we only interessted in object - cache tags declared within
// an class - descriptor
if ( m_CurrentCLD != null ) { String className = atts . getValue ( tags . getTagById ( CLASS_NAME ) ) ; if ( checkString ( className ) ) { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( OBJECT_CACHE ) ) ; ObjectCacheDescriptor ocd = new ObjectCacheDescriptor ( ) ; this . m_CurrentAttrContainer = ocd ; ocd . setObjectCache ( ClassHelper . getClass ( className ) ) ; if ( m_CurrentCLD != null ) { m_CurrentCLD . setObjectCacheDescriptor ( ocd ) ; } if ( isDebug ) logger . debug ( " " + tags . getTagById ( CLASS_NAME ) + ": " + className ) ; } } break ; } case CLASS_EXTENT : { String classname = atts . getValue ( "class-ref" ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CLASS_EXTENT ) + ": " + classname ) ; m_CurrentCLD . addExtentClass ( classname ) ; break ; } case FIELD_DESCRIPTOR : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( FIELD_DESCRIPTOR ) ) ; String strId = atts . getValue ( tags . getTagById ( ID ) ) ; m_lastId = ( strId == null ? m_lastId + 1 : Integer . parseInt ( strId ) ) ; String strAccess = atts . getValue ( tags . getTagById ( ACCESS ) ) ; if ( RepositoryElements . TAG_ACCESS_ANONYMOUS . equalsIgnoreCase ( strAccess ) ) { m_CurrentFLD = new AnonymousFieldDescriptor ( m_CurrentCLD , m_lastId ) ; } else { m_CurrentFLD = new FieldDescriptor ( m_CurrentCLD , m_lastId ) ; } m_CurrentFLD . setAccess ( strAccess ) ; m_CurrentCLD . addFieldDescriptor ( m_CurrentFLD ) ; // prepare for custom attributes
this . m_CurrentAttrContainer = this . m_CurrentFLD ; String fieldName = atts . getValue ( tags . getTagById ( FIELD_NAME ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( FIELD_NAME ) + ": " + fieldName ) ; if ( RepositoryElements . TAG_ACCESS_ANONYMOUS . equalsIgnoreCase ( strAccess ) ) { AnonymousFieldDescriptor anonymous = ( AnonymousFieldDescriptor ) m_CurrentFLD ; anonymous . setPersistentField ( null , fieldName ) ; } else { String classname = m_CurrentCLD . getClassNameOfObject ( ) ; PersistentField pf = PersistentFieldFactory . createPersistentField ( m_CurrentCLD . getPersistentFieldClassName ( ) , ClassHelper . getClass ( classname ) , fieldName ) ; m_CurrentFLD . setPersistentField ( pf ) ; } String columnName = atts . getValue ( tags . getTagById ( COLUMN_NAME ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( COLUMN_NAME ) + ": " + columnName ) ; m_CurrentFLD . setColumnName ( columnName ) ; String jdbcType = atts . getValue ( tags . getTagById ( JDBC_TYPE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( JDBC_TYPE ) + ": " + jdbcType ) ; m_CurrentFLD . setColumnType ( jdbcType ) ; String primaryKey = atts . getValue ( tags . getTagById ( PRIMARY_KEY ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( PRIMARY_KEY ) + ": " + primaryKey ) ; boolean b = ( Boolean . valueOf ( primaryKey ) ) . booleanValue ( ) ; m_CurrentFLD . setPrimaryKey ( b ) ; String nullable = atts . getValue ( tags . getTagById ( NULLABLE ) ) ; if ( nullable != null ) { if ( isDebug ) logger . debug ( " " + tags . getTagById ( NULLABLE ) + ": " + nullable ) ; b = ! ( Boolean . valueOf ( nullable ) ) . booleanValue ( ) ; m_CurrentFLD . setRequired ( b ) ; } String indexed = atts . getValue ( tags . getTagById ( INDEXED ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( INDEXED ) + ": " + indexed ) ; b = ( Boolean . valueOf ( indexed ) ) . booleanValue ( ) ; m_CurrentFLD . setIndexed ( b ) ; String autoincrement = atts . getValue ( tags . getTagById ( AUTO_INCREMENT ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( AUTO_INCREMENT ) + ": " + autoincrement ) ; b = ( Boolean . valueOf ( autoincrement ) ) . booleanValue ( ) ; m_CurrentFLD . setAutoIncrement ( b ) ; String sequenceName = atts . getValue ( tags . getTagById ( SEQUENCE_NAME ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( SEQUENCE_NAME ) + ": " + sequenceName ) ; m_CurrentFLD . setSequenceName ( sequenceName ) ; String locking = atts . getValue ( tags . getTagById ( LOCKING ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( LOCKING ) + ": " + locking ) ; b = ( Boolean . valueOf ( locking ) ) . booleanValue ( ) ; m_CurrentFLD . setLocking ( b ) ; String updateLock = atts . getValue ( tags . getTagById ( UPDATE_LOCK ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( UPDATE_LOCK ) + ": " + updateLock ) ; if ( checkString ( updateLock ) ) { b = ( Boolean . valueOf ( updateLock ) ) . booleanValue ( ) ; m_CurrentFLD . setUpdateLock ( b ) ; } String fieldConversion = atts . getValue ( tags . getTagById ( FIELD_CONVERSION ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( FIELD_CONVERSION ) + ": " + fieldConversion ) ; if ( fieldConversion != null ) { m_CurrentFLD . setFieldConversionClassName ( fieldConversion ) ; } // set length attribute
String length = atts . getValue ( tags . getTagById ( LENGTH ) ) ; if ( length != null ) { int i = Integer . parseInt ( length ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( LENGTH ) + ": " + i ) ; m_CurrentFLD . setLength ( i ) ; m_CurrentFLD . setLengthSpecified ( true ) ; } // set precision attribute
String precision = atts . getValue ( tags . getTagById ( PRECISION ) ) ; if ( precision != null ) { int i = Integer . parseInt ( precision ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( PRECISION ) + ": " + i ) ; m_CurrentFLD . setPrecision ( i ) ; m_CurrentFLD . setPrecisionSpecified ( true ) ; } // set scale attribute
String scale = atts . getValue ( tags . getTagById ( SCALE ) ) ; if ( scale != null ) { int i = Integer . parseInt ( scale ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( SCALE ) + ": " + i ) ; m_CurrentFLD . setScale ( i ) ; m_CurrentFLD . setScaleSpecified ( true ) ; } break ; } case REFERENCE_DESCRIPTOR : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( REFERENCE_DESCRIPTOR ) ) ; // set name attribute
name = atts . getValue ( tags . getTagById ( FIELD_NAME ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( FIELD_NAME ) + ": " + name ) ; // set class - ref attribute
String classRef = atts . getValue ( tags . getTagById ( REFERENCED_CLASS ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( REFERENCED_CLASS ) + ": " + classRef ) ; ObjectReferenceDescriptor ord ; if ( name . equals ( TAG_SUPER ) ) { // no longer needed sine SuperReferenceDescriptor was used
// checkThis ( classRef ) ;
// AnonymousObjectReferenceDescriptor aord =
// new AnonymousObjectReferenceDescriptor ( m _ CurrentCLD ) ;
// aord . setPersistentField ( null , TAG _ SUPER ) ;
// ord = aord ;
ord = new SuperReferenceDescriptor ( m_CurrentCLD ) ; } else { ord = new ObjectReferenceDescriptor ( m_CurrentCLD ) ; PersistentField pf = PersistentFieldFactory . createPersistentField ( m_CurrentCLD . getPersistentFieldClassName ( ) , m_CurrentCLD . getClassOfObject ( ) , name ) ; ord . setPersistentField ( pf ) ; } m_CurrentORD = ord ; // now we add the new descriptor
m_CurrentCLD . addObjectReferenceDescriptor ( m_CurrentORD ) ; m_CurrentORD . setItemClass ( ClassHelper . getClass ( classRef ) ) ; // prepare for custom attributes
this . m_CurrentAttrContainer = m_CurrentORD ; // set proxy attribute
String proxy = atts . getValue ( tags . getTagById ( PROXY_REFERENCE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( PROXY_REFERENCE ) + ": " + proxy ) ; boolean b = ( Boolean . valueOf ( proxy ) ) . booleanValue ( ) ; m_CurrentORD . setLazy ( b ) ; // set proxyPrefetchingLimit attribute
String proxyPrefetchingLimit = atts . getValue ( tags . getTagById ( PROXY_PREFETCHING_LIMIT ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( PROXY_PREFETCHING_LIMIT ) + ": " + proxyPrefetchingLimit ) ; if ( proxyPrefetchingLimit == null ) { m_CurrentORD . setProxyPrefetchingLimit ( defProxyPrefetchingLimit ) ; } else { m_CurrentORD . setProxyPrefetchingLimit ( Integer . parseInt ( proxyPrefetchingLimit ) ) ; } // set refresh attribute
String refresh = atts . getValue ( tags . getTagById ( REFRESH ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( REFRESH ) + ": " + refresh ) ; b = ( Boolean . valueOf ( refresh ) ) . booleanValue ( ) ; m_CurrentORD . setRefresh ( b ) ; // set auto - retrieve attribute
String autoRetrieve = atts . getValue ( tags . getTagById ( AUTO_RETRIEVE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( AUTO_RETRIEVE ) + ": " + autoRetrieve ) ; b = ( Boolean . valueOf ( autoRetrieve ) ) . booleanValue ( ) ; m_CurrentORD . setCascadeRetrieve ( b ) ; // set auto - update attribute
String autoUpdate = atts . getValue ( tags . getTagById ( AUTO_UPDATE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( AUTO_UPDATE ) + ": " + autoUpdate ) ; if ( autoUpdate != null ) { m_CurrentORD . setCascadingStore ( autoUpdate ) ; } // set auto - delete attribute
String autoDelete = atts . getValue ( tags . getTagById ( AUTO_DELETE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( AUTO_DELETE ) + ": " + autoDelete ) ; if ( autoDelete != null ) { m_CurrentORD . setCascadingDelete ( autoDelete ) ; } // set otm - dependent attribute
String otmDependent = atts . getValue ( tags . getTagById ( OTM_DEPENDENT ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( OTM_DEPENDENT ) + ": " + otmDependent ) ; b = ( Boolean . valueOf ( otmDependent ) ) . booleanValue ( ) ; m_CurrentORD . setOtmDependent ( b ) ; break ; } case FOREIGN_KEY : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( FOREIGN_KEY ) ) ; String fieldIdRef = atts . getValue ( tags . getTagById ( FIELD_ID_REF ) ) ; if ( fieldIdRef != null ) { if ( isDebug ) logger . debug ( " " + tags . getTagById ( FIELD_ID_REF ) + ": " + fieldIdRef ) ; try { int fieldId ; fieldId = Integer . parseInt ( fieldIdRef ) ; m_CurrentORD . addForeignKeyField ( fieldId ) ; } catch ( NumberFormatException rex ) { throw new MetadataException ( tags . getTagById ( FIELD_ID_REF ) + " attribute must be an int. Found: " + fieldIdRef + ". Please check your repository file." , rex ) ; } } else { String fieldRef = atts . getValue ( tags . getTagById ( FIELD_REF ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( FIELD_REF ) + ": " + fieldRef ) ; m_CurrentORD . addForeignKeyField ( fieldRef ) ; } break ; } case COLLECTION_DESCRIPTOR : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( COLLECTION_DESCRIPTOR ) ) ; m_CurrentCOD = new CollectionDescriptor ( m_CurrentCLD ) ; // prepare for custom attributes
this . m_CurrentAttrContainer = m_CurrentCOD ; // set name attribute
name = atts . getValue ( tags . getTagById ( FIELD_NAME ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( FIELD_NAME ) + ": " + name ) ; PersistentField pf = PersistentFieldFactory . createPersistentField ( m_CurrentCLD . getPersistentFieldClassName ( ) , m_CurrentCLD . getClassOfObject ( ) , name ) ; m_CurrentCOD . setPersistentField ( pf ) ; // set collection - class attribute
String collectionClassName = atts . getValue ( tags . getTagById ( COLLECTION_CLASS ) ) ; if ( collectionClassName != null ) { if ( isDebug ) logger . debug ( " " + tags . getTagById ( COLLECTION_CLASS ) + ": " + collectionClassName ) ; m_CurrentCOD . setCollectionClass ( ClassHelper . getClass ( collectionClassName ) ) ; } // set element - class - ref attribute
String elementClassRef = atts . getValue ( tags . getTagById ( ITEMS_CLASS ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( ITEMS_CLASS ) + ": " + elementClassRef ) ; if ( elementClassRef != null ) { m_CurrentCOD . setItemClass ( ClassHelper . getClass ( elementClassRef ) ) ; } // set orderby and sort attributes :
String orderby = atts . getValue ( tags . getTagById ( ORDERBY ) ) ; String sort = atts . getValue ( tags . getTagById ( SORT ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( SORT ) + ": " + orderby + ", " + sort ) ; if ( orderby != null ) { m_CurrentCOD . addOrderBy ( orderby , "ASC" . equalsIgnoreCase ( sort ) ) ; } // set indirection - table attribute
String indirectionTable = atts . getValue ( tags . getTagById ( INDIRECTION_TABLE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( INDIRECTION_TABLE ) + ": " + indirectionTable ) ; m_CurrentCOD . setIndirectionTable ( indirectionTable ) ; // set proxy attribute
String proxy = atts . getValue ( tags . getTagById ( PROXY_REFERENCE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( PROXY_REFERENCE ) + ": " + proxy ) ; boolean b = ( Boolean . valueOf ( proxy ) ) . booleanValue ( ) ; m_CurrentCOD . setLazy ( b ) ; // set proxyPrefetchingLimit attribute
String proxyPrefetchingLimit = atts . getValue ( tags . getTagById ( PROXY_PREFETCHING_LIMIT ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( PROXY_PREFETCHING_LIMIT ) + ": " + proxyPrefetchingLimit ) ; if ( proxyPrefetchingLimit == null ) { m_CurrentCOD . setProxyPrefetchingLimit ( defProxyPrefetchingLimit ) ; } else { m_CurrentCOD . setProxyPrefetchingLimit ( Integer . parseInt ( proxyPrefetchingLimit ) ) ; } // set refresh attribute
String refresh = atts . getValue ( tags . getTagById ( REFRESH ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( REFRESH ) + ": " + refresh ) ; b = ( Boolean . valueOf ( refresh ) ) . booleanValue ( ) ; m_CurrentCOD . setRefresh ( b ) ; // set auto - retrieve attribute
String autoRetrieve = atts . getValue ( tags . getTagById ( AUTO_RETRIEVE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( AUTO_RETRIEVE ) + ": " + autoRetrieve ) ; b = ( Boolean . valueOf ( autoRetrieve ) ) . booleanValue ( ) ; m_CurrentCOD . setCascadeRetrieve ( b ) ; // set auto - update attribute
String autoUpdate = atts . getValue ( tags . getTagById ( AUTO_UPDATE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( AUTO_UPDATE ) + ": " + autoUpdate ) ; if ( autoUpdate != null ) { m_CurrentCOD . setCascadingStore ( autoUpdate ) ; } // set auto - delete attribute
String autoDelete = atts . getValue ( tags . getTagById ( AUTO_DELETE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( AUTO_DELETE ) + ": " + autoDelete ) ; if ( autoDelete != null ) { m_CurrentCOD . setCascadingDelete ( autoDelete ) ; } // set otm - dependent attribute
String otmDependent = atts . getValue ( tags . getTagById ( OTM_DEPENDENT ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( OTM_DEPENDENT ) + ": " + otmDependent ) ; b = ( Boolean . valueOf ( otmDependent ) ) . booleanValue ( ) ; m_CurrentCOD . setOtmDependent ( b ) ; m_CurrentCLD . addCollectionDescriptor ( m_CurrentCOD ) ; break ; } case ORDERBY : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( ORDERBY ) ) ; name = atts . getValue ( tags . getTagById ( FIELD_NAME ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( FIELD_NAME ) + ": " + name ) ; String sort = atts . getValue ( tags . getTagById ( SORT ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( SORT ) + ": " + name + ", " + sort ) ; m_CurrentCOD . addOrderBy ( name , "ASC" . equalsIgnoreCase ( sort ) ) ; break ; } case INVERSE_FK : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( INVERSE_FK ) ) ; String fieldIdRef = atts . getValue ( tags . getTagById ( FIELD_ID_REF ) ) ; if ( fieldIdRef != null ) { if ( isDebug ) logger . debug ( " " + tags . getTagById ( FIELD_ID_REF ) + ": " + fieldIdRef ) ; try { int fieldId ; fieldId = Integer . parseInt ( fieldIdRef ) ; m_CurrentCOD . addForeignKeyField ( fieldId ) ; } catch ( NumberFormatException rex ) { throw new MetadataException ( tags . getTagById ( FIELD_ID_REF ) + " attribute must be an int. Found: " + fieldIdRef + " Please check your repository file." , rex ) ; } } else { String fieldRef = atts . getValue ( tags . getTagById ( FIELD_REF ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( FIELD_REF ) + ": " + fieldRef ) ; m_CurrentCOD . addForeignKeyField ( fieldRef ) ; } break ; } case FK_POINTING_TO_THIS_CLASS : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( FK_POINTING_TO_THIS_CLASS ) ) ; String column = atts . getValue ( "column" ) ; if ( isDebug ) logger . debug ( " " + "column" + ": " + column ) ; m_CurrentCOD . addFkToThisClass ( column ) ; break ; } case FK_POINTING_TO_ITEMS_CLASS : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( FK_POINTING_TO_ITEMS_CLASS ) ) ; String column = atts . getValue ( "column" ) ; if ( isDebug ) logger . debug ( " " + "column" + ": " + column ) ; m_CurrentCOD . addFkToItemClass ( column ) ; break ; } case ATTRIBUTE : { // handle custom attributes
String attributeName = atts . getValue ( tags . getTagById ( ATTRIBUTE_NAME ) ) ; String attributeValue = atts . getValue ( tags . getTagById ( ATTRIBUTE_VALUE ) ) ; // If we have a container to store this attribute in , then do so .
if ( this . m_CurrentAttrContainer != null ) { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( ATTRIBUTE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( ATTRIBUTE_NAME ) + ": " + attributeName ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( ATTRIBUTE_VALUE ) + ": " + attributeValue ) ; this . m_CurrentAttrContainer . addAttribute ( attributeName , attributeValue ) ; } else { // logger . debug ( " Found attribute ( name = " + attributeName + " , value = " + attributeValue +
// " ) but I can not assign them to a descriptor " ) ;
} break ; } // case SEQUENCE _ MANAGER :
// if ( isDebug ) logger . debug ( " > " + tags . getTagById ( SEQUENCE _ MANAGER ) ) ;
// / / currently it ' s not possible to specify SM on class - descriptor level
// / / thus we use a dummy object to prevent ATTRIBUTE container report
// / / unassigned attributes
// this . m _ CurrentAttrContainer = new SequenceDescriptor ( null ) ;
// break ;
case QUERY_CUSTOMIZER : { // set collection - class attribute
String className = atts . getValue ( "class" ) ; QueryCustomizer queryCust ; if ( className != null ) { if ( isDebug ) logger . debug ( " " + "class" + ": " + className ) ; queryCust = ( QueryCustomizer ) ClassHelper . newInstance ( className ) ; m_CurrentAttrContainer = queryCust ; m_CurrentCOD . setQueryCustomizer ( queryCust ) ; } break ; } case INDEX_DESCRIPTOR : { m_CurrentIndexDescriptor = new IndexDescriptor ( ) ; m_CurrentIndexDescriptor . setName ( atts . getValue ( tags . getTagById ( NAME ) ) ) ; m_CurrentIndexDescriptor . setUnique ( Boolean . valueOf ( atts . getValue ( tags . getTagById ( UNIQUE ) ) ) . booleanValue ( ) ) ; break ; } case INDEX_COLUMN : { m_CurrentIndexDescriptor . getIndexColumns ( ) . add ( atts . getValue ( tags . getTagById ( NAME ) ) ) ; break ; } case INSERT_PROCEDURE : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( INSERT_PROCEDURE ) ) ; // Get the proc name and the ' include all fields ' setting
String procName = atts . getValue ( tags . getTagById ( NAME ) ) ; String includeAllFields = atts . getValue ( tags . getTagById ( INCLUDE_ALL_FIELDS ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( NAME ) + ": " + procName ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( INCLUDE_ALL_FIELDS ) + ": " + includeAllFields ) ; // create the procedure descriptor
InsertProcedureDescriptor proc = new InsertProcedureDescriptor ( m_CurrentCLD , procName , Boolean . valueOf ( includeAllFields ) . booleanValue ( ) ) ; m_CurrentProcedure = proc ; // Get the name of the field ref that will receive the
// return value .
String returnFieldRefName = atts . getValue ( tags . getTagById ( RETURN_FIELD_REF ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( RETURN_FIELD_REF ) + ": " + returnFieldRefName ) ; proc . setReturnValueFieldRef ( returnFieldRefName ) ; break ; } case UPDATE_PROCEDURE : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( UPDATE_PROCEDURE ) ) ; // Get the proc name and the ' include all fields ' setting
String procName = atts . getValue ( tags . getTagById ( NAME ) ) ; String includeAllFields = atts . getValue ( tags . getTagById ( INCLUDE_ALL_FIELDS ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( NAME ) + ": " + procName ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( INCLUDE_ALL_FIELDS ) + ": " + includeAllFields ) ; // create the procedure descriptor
UpdateProcedureDescriptor proc = new UpdateProcedureDescriptor ( m_CurrentCLD , procName , Boolean . valueOf ( includeAllFields ) . booleanValue ( ) ) ; m_CurrentProcedure = proc ; // Get the name of the field ref that will receive the
// return value .
String returnFieldRefName = atts . getValue ( tags . getTagById ( RETURN_FIELD_REF ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( RETURN_FIELD_REF ) + ": " + returnFieldRefName ) ; proc . setReturnValueFieldRef ( returnFieldRefName ) ; break ; } case DELETE_PROCEDURE : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( DELETE_PROCEDURE ) ) ; // Get the proc name and the ' include all fields ' setting
String procName = atts . getValue ( tags . getTagById ( NAME ) ) ; String includeAllPkFields = atts . getValue ( tags . getTagById ( INCLUDE_PK_FIELDS_ONLY ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( NAME ) + ": " + procName ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( INCLUDE_PK_FIELDS_ONLY ) + ": " + includeAllPkFields ) ; // create the procedure descriptor
DeleteProcedureDescriptor proc = new DeleteProcedureDescriptor ( m_CurrentCLD , procName , Boolean . valueOf ( includeAllPkFields ) . booleanValue ( ) ) ; m_CurrentProcedure = proc ; // Get the name of the field ref that will receive the
// return value .
String returnFieldRefName = atts . getValue ( tags . getTagById ( RETURN_FIELD_REF ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( RETURN_FIELD_REF ) + ": " + returnFieldRefName ) ; proc . setReturnValueFieldRef ( returnFieldRefName ) ; break ; } case CONSTANT_ARGUMENT : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( CONSTANT_ARGUMENT ) ) ; ArgumentDescriptor arg = new ArgumentDescriptor ( m_CurrentProcedure ) ; // Get the value
String value = atts . getValue ( tags . getTagById ( VALUE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( VALUE ) + ": " + value ) ; // Set the value for the argument
arg . setValue ( value ) ; // Add the argument to the procedure .
m_CurrentProcedure . addArgument ( arg ) ; break ; } case RUNTIME_ARGUMENT : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( RUNTIME_ARGUMENT ) ) ; ArgumentDescriptor arg = new ArgumentDescriptor ( m_CurrentProcedure ) ; // Get the name of the field ref
String fieldRefName = atts . getValue ( tags . getTagById ( FIELD_REF ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( FIELD_REF ) + ": " + fieldRefName ) ; // Get the ' return ' value .
String returnValue = atts . getValue ( tags . getTagById ( RETURN ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( RETURN ) + ": " + returnValue ) ; // Set the value for the argument .
if ( ( fieldRefName != null ) && ( fieldRefName . trim ( ) . length ( ) != 0 ) ) { arg . setValue ( fieldRefName , Boolean . valueOf ( returnValue ) . booleanValue ( ) ) ; } // Add the argument to the procedure .
m_CurrentProcedure . addArgument ( arg ) ; break ; } default : { // nop
} } } catch ( Exception ex ) { logger . error ( "Exception while read metadata" , ex ) ; if ( ex instanceof MetadataException ) throw ( MetadataException ) ex ; else throw new MetadataException ( "Exception when reading metadata information," + " please check your repository.xml file" , ex ) ; } |
public class AbortVaultLockRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( AbortVaultLockRequest abortVaultLockRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( abortVaultLockRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( abortVaultLockRequest . getAccountId ( ) , ACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( abortVaultLockRequest . getVaultName ( ) , VAULTNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class EntityDescFactory { /** * エンティティの名前を指定してエンティティ記述を作成します 。
* @ param tableMeta テーブルメタデータ
* @ param entityPrefix エンティティクラスのプリフィックス
* @ param entitySuffix エンティティクラスのサフィックス
* @ param simpleName エンティティ名
* @ return エンティティ記述 */
public EntityDesc createEntityDesc ( TableMeta tableMeta , String entityPrefix , String entitySuffix , String simpleName ) { } } | EntityDesc entityDesc = new EntityDesc ( ) ; entityDesc . setNamingType ( namingType ) ; entityDesc . setOriginalStatesPropertyName ( originalStatesPropertyName ) ; entityDesc . setCatalogName ( tableMeta . getCatalogName ( ) ) ; entityDesc . setSchemaName ( tableMeta . getSchemaName ( ) ) ; entityDesc . setTableName ( tableMeta . getName ( ) ) ; entityDesc . setQualifiedTableName ( tableMeta . getQualifiedTableName ( ) ) ; entityDesc . setPackageName ( packageName ) ; entityDesc . setEntityPrefix ( StringUtil . defaultString ( entityPrefix , "" ) ) ; entityDesc . setEntitySuffix ( StringUtil . defaultString ( entitySuffix , "" ) ) ; entityDesc . setSimpleName ( simpleName ) ; if ( superclass != null ) { entityDesc . setSuperclassSimpleName ( superclass . getSimpleName ( ) ) ; } entityDesc . setListenerClassSimpleName ( entityDesc . getEntityPrefix ( ) + ClassUtil . getSimpleName ( entityDesc . getSimpleName ( ) + entityDesc . getEntitySuffix ( ) + Constants . ENTITY_LISTENER_SUFFIX ) ) ; entityDesc . setCompositeId ( tableMeta . hasCompositePrimaryKey ( ) ) ; entityDesc . setComment ( tableMeta . comment ) ; entityDesc . setShowCatalogName ( showCatalogName ) ; entityDesc . setShowSchemaName ( showSchemaName ) ; entityDesc . setShowDbComment ( true ) ; entityDesc . setUseAccessor ( useAccessor ) ; entityDesc . setUseListener ( useListener ) ; entityDesc . setTemplateName ( Constants . ENTITY_TEMPLATE ) ; handleShowTableName ( entityDesc , tableMeta ) ; handleEntityPropertyDesc ( entityDesc , tableMeta ) ; handleImportName ( entityDesc , tableMeta ) ; return entityDesc ; |
public class CommerceShippingMethodUtil { /** * Returns a range of all the commerce shipping methods where groupId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceShippingMethodModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param groupId the group ID
* @ param start the lower bound of the range of commerce shipping methods
* @ param end the upper bound of the range of commerce shipping methods ( not inclusive )
* @ return the range of matching commerce shipping methods */
public static List < CommerceShippingMethod > findByGroupId ( long groupId , int start , int end ) { } } | return getPersistence ( ) . findByGroupId ( groupId , start , end ) ; |
public class InnerJoinNodeImpl { /** * TODO : explain */
private IQTree liftBindingAfterPropagatingCondition ( ImmutableList < IQTree > initialChildren , VariableGenerator variableGenerator , IQProperties currentIQProperties ) { } } | final ImmutableSet < Variable > projectedVariables = getProjectedVariables ( initialChildren ) ; // Non - final
ImmutableList < IQTree > currentChildren = initialChildren ; ImmutableSubstitution < ImmutableTerm > currentSubstitution = substitutionFactory . getSubstitution ( ) ; Optional < ImmutableExpression > currentJoiningCondition = getOptionalFilterCondition ( ) ; boolean hasConverged = false ; try { int i = 0 ; while ( ( ! hasConverged ) && ( i ++ < MAX_ITERATIONS ) ) { LiftingStepResults results = liftChildBinding ( currentChildren , currentJoiningCondition , variableGenerator ) ; hasConverged = results . hasConverged ; currentChildren = results . children ; currentSubstitution = results . substitution . composeWith ( currentSubstitution ) ; currentJoiningCondition = results . joiningCondition ; } if ( i >= MAX_ITERATIONS ) throw new MinorOntopInternalBugException ( "InnerJoin.liftBinding() did not converge after " + MAX_ITERATIONS ) ; IQTree joinIQ = createJoinOrFilterOrTrue ( currentChildren , currentJoiningCondition , currentIQProperties ) ; AscendingSubstitutionNormalization ascendingNormalization = normalizeAscendingSubstitution ( currentSubstitution , projectedVariables ) ; IQTree newJoinIQ = ascendingNormalization . normalizeChild ( joinIQ ) ; ImmutableSet < Variable > childrenVariables = getProjectedVariables ( currentChildren ) ; /* * NB : creates a construction if a substitution needs to be propagated and / or if some variables
* have to be projected away */
return ascendingNormalization . generateTopConstructionNode ( ) . map ( Optional :: of ) . orElseGet ( ( ) -> Optional . of ( projectedVariables ) . filter ( vars -> ! vars . equals ( childrenVariables ) ) . map ( iqFactory :: createConstructionNode ) ) . map ( constructionNode -> ( IQTree ) iqFactory . createUnaryIQTree ( constructionNode , newJoinIQ , currentIQProperties . declareLifted ( ) ) ) . orElse ( newJoinIQ ) ; } catch ( EmptyIQException e ) { return iqFactory . createEmptyNode ( projectedVariables ) ; } |
public class DelegatingLifecycleProvider { /** * Returns a LifecycleProvider that delegates to an existing provider . The provider is managed by Guice
* and injected . */
public static < U > LifecycleProvider < U > of ( @ Nonnull final Class < ? extends Provider < U > > providerClass ) { } } | return new DelegatingLifecycleProvider < U > ( providerClass , null ) ; |
public class OffsetDateTime { /** * Returns a copy of this { @ code OffsetDateTime } with the specified number of years subtracted .
* This method subtracts the specified amount from the years field in three steps :
* < ol >
* < li > Subtract the input years from the year field < / li >
* < li > Check if the resulting date would be invalid < / li >
* < li > Adjust the day - of - month to the last valid day if necessary < / li >
* < / ol >
* For example , 2008-02-29 ( leap year ) minus one year would result in the
* invalid date 2009-02-29 ( standard year ) . Instead of returning an invalid
* result , the last valid day of the month , 2009-02-28 , is selected instead .
* This instance is immutable and unaffected by this method call .
* @ param years the years to subtract , may be negative
* @ return an { @ code OffsetDateTime } based on this date - time with the years subtracted , not null
* @ throws DateTimeException if the result exceeds the supported date range */
public OffsetDateTime minusYears ( long years ) { } } | return ( years == Long . MIN_VALUE ? plusYears ( Long . MAX_VALUE ) . plusYears ( 1 ) : plusYears ( - years ) ) ; |
public class GetRegionsResult { /** * An array of key - value pairs containing information about your get regions request .
* @ param regions
* An array of key - value pairs containing information about your get regions request . */
public void setRegions ( java . util . Collection < Region > regions ) { } } | if ( regions == null ) { this . regions = null ; return ; } this . regions = new java . util . ArrayList < Region > ( regions ) ; |
public class DTMDefaultBase { /** * Given an expanded name , return an ID . If the expanded - name does not
* exist in the internal tables , the entry will be created , and the ID will
* be returned . Any additional nodes that are created that have this
* expanded name will use this ID .
* @ param type The simple type , i . e . one of ELEMENT , ATTRIBUTE , etc .
* @ param namespace The namespace URI , which may be null , may be an empty
* string ( which will be the same as null ) , or may be a
* namespace URI .
* @ param localName The local name string , which must be a valid
* < a href = " http : / / www . w3 . org / TR / REC - xml - names / " > NCName < / a > .
* @ return the expanded - name id of the node . */
public int getExpandedTypeID ( String namespace , String localName , int type ) { } } | ExpandedNameTable ent = m_expandedNameTable ; return ent . getExpandedTypeID ( namespace , localName , type ) ; |
public class FunctionRegistry { /** * Check if variable expression is a custom function .
* Expression has to start with one of the registered function library prefix .
* @ param variableExpression to be checked
* @ return flag ( true / false ) */
public boolean isFunction ( final String variableExpression ) { } } | if ( variableExpression == null || variableExpression . length ( ) == 0 ) { return false ; } for ( int i = 0 ; i < functionLibraries . size ( ) ; i ++ ) { FunctionLibrary lib = ( FunctionLibrary ) functionLibraries . get ( i ) ; if ( variableExpression . startsWith ( lib . getPrefix ( ) ) ) { return true ; } } return false ; |
public class Feature { /** * Set attribute value of given type .
* @ param name attribute name
* @ param value attribute value */
public void setIntegerAttribute ( String name , Integer value ) { } } | Attribute attribute = getAttributes ( ) . get ( name ) ; if ( ! ( attribute instanceof IntegerAttribute ) ) { throw new IllegalStateException ( "Cannot set integer value on attribute with different type, " + attribute . getClass ( ) . getName ( ) + " setting value " + value ) ; } ( ( IntegerAttribute ) attribute ) . setValue ( value ) ; |
public class USCImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eSet ( int featureID , Object newValue ) { } } | switch ( featureID ) { case AfplibPackage . USC__BYPSIDEN : setBYPSIDEN ( ( Integer ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ; |
public class WaveformDetailComponent { /** * Change the waveform preview being drawn . This will be quickly overruled if a player is being monitored , but
* can be used in other contexts .
* @ param waveform the waveform detail to display
* @ param cueList used to draw cue and memory points
* @ param beatGrid the locations of the beats , so they can be drawn */
public void setWaveform ( WaveformDetail waveform , CueList cueList , BeatGrid beatGrid ) { } } | this . waveform . set ( waveform ) ; this . cueList . set ( cueList ) ; this . beatGrid . set ( beatGrid ) ; clearPlaybackState ( ) ; repaint ( ) ; if ( ! autoScroll . get ( ) ) { invalidate ( ) ; } |
public class SearchDevicesResult { /** * The devices that meet the specified set of filter criteria , in sort order .
* @ param devices
* The devices that meet the specified set of filter criteria , in sort order . */
public void setDevices ( java . util . Collection < DeviceData > devices ) { } } | if ( devices == null ) { this . devices = null ; return ; } this . devices = new java . util . ArrayList < DeviceData > ( devices ) ; |
public class PluginArgument { /** * Resolves this plugin argument to an argument resolver .
* @ return An argument resolver that represents this plugin argument . */
public Plugin . Factory . UsingReflection . ArgumentResolver toArgumentResolver ( ) { } } | return new Plugin . Factory . UsingReflection . ArgumentResolver . ForIndex . WithDynamicType ( index , value ) ; |
public class Route { /** * Matches / index to / index or / me / 1 to / person / { id } .
* @ param method the method
* @ param uri the uri
* @ return True if the actual route matches a raw rout . False if not . */
public boolean matches ( HttpMethod method , String uri ) { } } | if ( this . httpMethod == method ) { Matcher matcher = regex . matcher ( uri ) ; return matcher . matches ( ) ; } else { return false ; } |
public class TileSetColorImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setSIZE3 ( Integer newSIZE3 ) { } } | Integer oldSIZE3 = size3 ; size3 = newSIZE3 ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . TILE_SET_COLOR__SIZE3 , oldSIZE3 , size3 ) ) ; |
public class EmbeddedNeo4jDialect { /** * When dealing with some scenarios like , for example , a bidirectional association , OGM calls this method twice :
* the first time with the information related to the owner of the association and the { @ link RowKey } ,
* the second time using the same { @ link RowKey } but with the { @ link AssociationKey } referring to the other side of the association .
* @ param associatedEntityKeyMetadata
* @ param associationContext */
private Relationship createRelationship ( AssociationKey associationKey , Tuple associationRow , AssociatedEntityKeyMetadata associatedEntityKeyMetadata , AssociationContext associationContext ) { } } | switch ( associationKey . getMetadata ( ) . getAssociationKind ( ) ) { case EMBEDDED_COLLECTION : return createRelationshipWithEmbeddedNode ( associationKey , associationRow , associatedEntityKeyMetadata ) ; case ASSOCIATION : return findOrCreateRelationshipWithEntityNode ( associationKey , associationRow , associatedEntityKeyMetadata , associationContext . getTupleTypeContext ( ) ) ; default : throw new AssertionFailure ( "Unrecognized associationKind: " + associationKey . getMetadata ( ) . getAssociationKind ( ) ) ; } |
public class AbstractRedisStorage { /** * Attempt to acquire a lock
* @ return true if lock was successfully acquired ; false otherwise */
public boolean lock ( T jedis ) { } } | UUID lockId = UUID . randomUUID ( ) ; final String setResponse = jedis . set ( redisSchema . lockKey ( ) , lockId . toString ( ) , "NX" , "PX" , lockTimeout ) ; boolean lockAcquired = ! isNullOrEmpty ( setResponse ) && setResponse . equals ( "OK" ) ; if ( lockAcquired ) { // save the random value used to lock so that we can successfully unlock later
lockValue = lockId ; } return lockAcquired ; |
public class SocketReceiver { /** * Returns a Vector of SocketDetail representing the IP / Domain name
* of the currently connected sockets that this receiver has
* been responsible for creating .
* @ return Vector of SocketDetails */
public Vector getConnectedSocketDetails ( ) { } } | Vector details = new Vector ( socketList . size ( ) ) ; for ( Enumeration enumeration = socketList . elements ( ) ; enumeration . hasMoreElements ( ) ; ) { Socket socket = ( Socket ) enumeration . nextElement ( ) ; details . add ( new SocketDetail ( socket , ( SocketNode13 ) socketMap . get ( socket ) ) ) ; } return details ; |
public class Strings { /** * Test if the given String ends with the specified suffix ,
* ignoring upper / lower case .
* @ param str the String to check
* @ param suffix the suffix to look for
* @ see java . lang . String # endsWith */
public static boolean endsWithIgnoreCase ( String str , String suffix ) { } } | if ( str == null || suffix == null ) { return false ; } if ( str . endsWith ( suffix ) ) { return true ; } if ( str . length ( ) < suffix . length ( ) ) { return false ; } String lcStr = str . substring ( str . length ( ) - suffix . length ( ) ) . toLowerCase ( ) ; String lcSuffix = suffix . toLowerCase ( ) ; return lcStr . equals ( lcSuffix ) ; |
public class JvmSpecializedTypeReferenceImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } } | switch ( featureID ) { case TypesPackage . JVM_SPECIALIZED_TYPE_REFERENCE__EQUIVALENT : return getEquivalent ( ) ; } return super . eGet ( featureID , resolve , coreType ) ; |
public class JaxWsUtils { /** * Get serviceName ' s QName of Web Service
* @ param classInfo
* @ return */
public static QName getServiceQName ( ClassInfo classInfo , String seiClassName , String targetNamespace ) { } } | AnnotationInfo annotationInfo = getAnnotationInfoFromClass ( classInfo , "Service QName" ) ; if ( annotationInfo == null ) { return null ; } // serviceName can only be defined in implementation bean , targetNamespace should be the implemented one .
return getQName ( classInfo , targetNamespace , annotationInfo . getValue ( JaxWsConstants . SERVICENAME_ATTRIBUTE ) . getStringValue ( ) , JaxWsConstants . SERVICENAME_ATTRIBUTE_SUFFIX ) ; |
public class CmsHtmlConverter { /** * Converts the given HTML code according to the settings of this converter . < p >
* If an any error occurs during the conversion process , the original input is returned unmodified . < p >
* @ param htmlInput HTML input stored in an array of bytes
* @ return array of bytes containing the converted HTML */
public byte [ ] convertToByteSilent ( byte [ ] htmlInput ) { } } | try { return convertToByte ( htmlInput ) ; } catch ( Exception e ) { if ( LOG . isWarnEnabled ( ) ) { LOG . warn ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_CONVERSION_BYTE_FAILED_0 ) , e ) ; } return htmlInput ; } |
public class DriverFactory { /** * Loads all instances according to default configuration file . */
public static void configure ( ) { } } | InputStream inputStream = null ; try { URL configUrl = getConfigUrl ( ) ; if ( configUrl == null ) { throw new ConfigurationException ( "esigate.properties configuration file " + "was not found in the classpath" ) ; } inputStream = configUrl . openStream ( ) ; Properties merged = new Properties ( ) ; if ( inputStream != null ) { Properties props = new Properties ( ) ; props . load ( inputStream ) ; merged . putAll ( props ) ; } configure ( merged ) ; } catch ( IOException e ) { throw new ConfigurationException ( "Error loading configuration" , e ) ; } finally { try { if ( inputStream != null ) { inputStream . close ( ) ; } } catch ( IOException e ) { throw new ConfigurationException ( "failed to close stream" , e ) ; } } |
public class ClassUtils { /** * Determines whether the given Object is an instance of the specified Class . Note , an Object cannot be an
* instance of null , so this method returns false if the Class type is null or the Object is null .
* @ param obj the Object to test as an instance of the specified Class type .
* @ param type the Class type used in the instanceof operation .
* @ return a boolean value indicating whether the Object is an instance of the Class type .
* @ see java . lang . Class # isInstance ( Object )
* @ see # assignableTo ( Class , Class ) */
@ NullSafe public static boolean instanceOf ( Object obj , Class < ? > type ) { } } | return type != null && type . isInstance ( obj ) ; |
public class VfsStringWriter { /** * Opens a write stream using this StringWriter as the resulting string */
public WriteStreamOld openWrite ( ) { } } | if ( _cb != null ) _cb . clear ( ) ; else _cb = CharBuffer . allocate ( ) ; if ( _ws == null ) _ws = new WriteStreamOld ( this ) ; else _ws . init ( this ) ; try { _ws . setEncoding ( "utf-8" ) ; } catch ( UnsupportedEncodingException e ) { } return _ws ; |
public class TraceImpl { /** * Internal implementation of info NLS message tracing .
* @ param source
* @ param sourceClass
* @ param methodName
* @ param messageIdentifier
* @ param object */
private final void internalInfo ( Object source , Class sourceClass , String methodName , String messageIdentifier , Object object ) { } } | if ( usePrintWriterForTrace ) { java . io . PrintWriter printWriter = traceFactory . getPrintWriter ( ) ; if ( printWriter != null ) { StringBuffer stringBuffer = new StringBuffer ( new java . util . Date ( ) . toString ( ) ) ; stringBuffer . append ( " I " ) ; stringBuffer . append ( sourceClass . getName ( ) ) ; stringBuffer . append ( "." ) ; stringBuffer . append ( methodName ) ; printWriter . println ( stringBuffer . toString ( ) ) ; printWriter . println ( "\t\t" + traceFactory . nls . format ( messageIdentifier , object ) ) ; printWriter . flush ( ) ; } } if ( object != null ) { SibTr . info ( traceComponent , messageIdentifier , object ) ; } else { SibTr . info ( traceComponent , messageIdentifier ) ; } |
public class TumblingProcessingTimeWindows { /** * Creates a new { @ code TumblingProcessingTimeWindows } { @ link WindowAssigner } that assigns
* elements to time windows based on the element timestamp and offset .
* < p > For example , if you want window a stream by hour , but window begins at the 15th minutes
* of each hour , you can use { @ code of ( Time . hours ( 1 ) , Time . minutes ( 15 ) ) } , then you will get
* time windows start at 0:15:00,1:15:00,2:15:00 , etc .
* < p > Rather than that , if you are living in somewhere which is not using UTC ± 00:00 time ,
* such as China which is using UTC + 08:00 , and you want a time window with size of one day ,
* and window begins at every 00:00:00 of local time , you may use { @ code of ( Time . days ( 1 ) , Time . hours ( - 8 ) ) } .
* The parameter of offset is { @ code Time . hours ( - 8 ) ) } since UTC + 08:00 is 8 hours earlier than UTC time .
* @ param size The size of the generated windows .
* @ param offset The offset which window start would be shifted by .
* @ return The time policy . */
public static TumblingProcessingTimeWindows of ( Time size , Time offset ) { } } | return new TumblingProcessingTimeWindows ( size . toMilliseconds ( ) , offset . toMilliseconds ( ) ) ; |
public class LongBuffer { /** * Writes all the remaining longs of the { @ code src } long buffer to this buffer ' s current
* position , and increases both buffers ' position by the number of longs copied .
* @ param src the source long buffer .
* @ return this buffer .
* @ exception BufferOverflowException if { @ code src . remaining ( ) } is greater than this buffer ' s
* { @ code remaining ( ) } .
* @ exception IllegalArgumentException if { @ code src } is this buffer .
* @ exception ReadOnlyBufferException if no changes may be made to the contents of this buffer . */
public LongBuffer put ( LongBuffer src ) { } } | if ( src == this ) { throw new IllegalArgumentException ( ) ; } if ( src . remaining ( ) > remaining ( ) ) { throw new BufferOverflowException ( ) ; } long [ ] contents = new long [ src . remaining ( ) ] ; src . get ( contents ) ; put ( contents ) ; return this ; |
public class CommerceWarehousePersistenceImpl { /** * Returns a range of all the commerce warehouses .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceWarehouseModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param start the lower bound of the range of commerce warehouses
* @ param end the upper bound of the range of commerce warehouses ( not inclusive )
* @ return the range of commerce warehouses */
@ Override public List < CommerceWarehouse > findAll ( int start , int end ) { } } | return findAll ( start , end , null ) ; |
public class CPDefinitionVirtualSettingPersistenceImpl { /** * Returns the first cp definition virtual setting in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp definition virtual setting
* @ throws NoSuchCPDefinitionVirtualSettingException if a matching cp definition virtual setting could not be found */
@ Override public CPDefinitionVirtualSetting findByUuid_C_First ( String uuid , long companyId , OrderByComparator < CPDefinitionVirtualSetting > orderByComparator ) throws NoSuchCPDefinitionVirtualSettingException { } } | CPDefinitionVirtualSetting cpDefinitionVirtualSetting = fetchByUuid_C_First ( uuid , companyId , orderByComparator ) ; if ( cpDefinitionVirtualSetting != null ) { return cpDefinitionVirtualSetting ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( ", companyId=" ) ; msg . append ( companyId ) ; msg . append ( "}" ) ; throw new NoSuchCPDefinitionVirtualSettingException ( msg . toString ( ) ) ; |
public class TCAPProviderImpl { public Future createOperationTimer ( Runnable operationTimerTask , long invokeTimeout ) { } } | return this . _EXECUTOR . schedule ( operationTimerTask , invokeTimeout , TimeUnit . MILLISECONDS ) ; |
public class Settings { /** * load the files from - Dcom . pahakia . settings . dir = directory */
public static final void init ( ) { } } | String settingDir = System . getProperty ( SETTINGS_DIR ) ; if ( settingDir == null ) { settingDir = System . getenv ( SETTINGS_DIR ) ; } if ( settingDir == null ) { throw Fault . create ( SettingsFaultCodes . SettingsDirNotSpecified , SETTINGS_DIR ) ; } init ( settingDir ) ; |
public class EnvelopesApi { /** * Creates an envelope .
* Creates an envelope . Using this function you can : * Create an envelope and send it . * Create an envelope from an existing template and send it . In either case , you can choose to save the envelope as a draft envelope instead of sending it by setting the request & # 39 ; s & # x60 ; status & # x60 ; property to & # x60 ; created & # x60 ; instead of & # x60 ; sent & # x60 ; . # # Sending Envelopes Documents can be included with the Envelopes : : create call itself or a template can include documents . Documents can be added by using a multi - part / form request or by using the & # x60 ; documentBase64 & # x60 ; field of the [ & # x60 ; document & # x60 ; object ] ( # / definitions / document ) # # # Recipient Types An [ & # x60 ; envelopeDefinition & # x60 ; object ] ( # / definitions / envelopeDefinition ) is used as the method & # 39 ; s body . Envelope recipients can be defined in the envelope or in templates . The & # x60 ; envelopeDefinition & # x60 ; object & # 39 ; s & # x60 ; recipients & # x60 ; field is an [ & # x60 ; EnvelopeRecipients & # x60 ; resource object ] ( # / definitions / EnvelopeRecipients ) . It includes arrays of the seven types of recipients defined by DocuSign : Recipient type | Object definition - - - - - | - - - - - agent ( can add name and email information for later recipients / signers ) | [ & # x60 ; agent & # x60 ; ] ( # / definitions / agent ) carbon copy ( receives a copy of the documents ) | [ & # x60 ; carbonCopy & # x60 ; ] ( # / definitions / carbonCopy ) certified delivery ( receives a copy of the documents and must acknowledge receipt ) | [ & # x60 ; certifiedDelivery & # x60 ; ] ( # / definitions / certifiedDelivery ) editor ( can change recipients and document fields for later recipients / signers ) | [ & # x60 ; editor & # x60 ; ] ( # / definitions / editor ) in - person signer ( \ & quot ; hosts \ & quot ; someone who signs in - person ) | [ & # x60 ; inPersonSigner & # x60 ; ] ( # / definitions / inPersonSigner ) intermediary ( can add name and email information for some later recipients / signers . ) | [ & # x60 ; intermediary & # x60 ; ] ( # / definitions / intermediary ) signer ( signs and / or updates document fields ) | [ & # x60 ; signer & # x60 ; ] ( # / definitions / signer ) Additional information about the different types of recipients is available from the [ & # x60 ; EnvelopeRecipients & # x60 ; resource page ] ( . . / . . / EnvelopeRecipients ) and from the [ Developer Center ] ( https : / / www . docusign . com / developer - center / explore / features / recipients ) # # # Tabs Tabs ( also referred to as & # x60 ; tags & # x60 ; and as & # x60 ; fields & # x60 ; in the web sending user interface ) , can be defined in the & # x60 ; envelopeDefinition & # x60 ; , in templates , by transforming PDF Form Fields , or by using Composite Templates ( see below ) . Defining tabs : the & # x60 ; inPersonSigner & # x60 ; , and & # x60 ; signer & # x60 ; recipient objects include a & # x60 ; tabs & # x60 ; field . It is an [ & # x60 ; EnvelopeTabs & # x60 ; resource object ] ( # / definitions / EnvelopeTabs ) . It includes arrays of the 24 different tab types available . See the [ & # x60 ; EnvelopeTabs & # x60 ; resource ] ( . . / . . / EnvelopeTabs ) for more information . # # Using Templates Envelopes use specific people or groups as recipients . Templates can specify a role , eg & # x60 ; account _ manager . & # x60 ; When a template is used in an envelope , the roles must be replaced with specific people or groups . When you create an envelope using a & # x60 ; templateId & # x60 ; , the different recipient type objects within the [ & # x60 ; EnvelopeRecipients & # x60 ; object ] ( # / definitions / EnvelopeRecipients ) are used to assign recipients to the template & # 39 ; s roles via the & # x60 ; roleName & # x60 ; property . The recipient objects can also override settings that were specified in the template , and set values for tab fields that were defined in the template . # # # Message Lock When a template is added or applied to an envelope and the template has a locked email subject and message , that subject and message are used for the envelope and cannot be changed even if another locked template is subsequently added or applied to the envelope . The field & # x60 ; messageLock & # x60 ; is used to lock the email subject and message . If an email subject or message is entered before adding or applying a template with & # x60 ; messageLock & # x60 ; * * true * * , the email subject and message is overwritten with the locked email subject and message from the template . # # Envelope Status The status of sent envelopes can be determined through the DocuSign webhook system or by polling . Webhooks are highly recommended : they provide your application with the quickest updates when an envelope & # 39 ; s status changes . DocuSign limits polling to once every 15 minutes or less frequently . When a webhook is used , DocuSign calls your application , via the URL you provide , with a notification XML message . See the [ Webhook recipe ] ( https : / / www . docusign . com / developer - center / recipes / webhook - status ) for examples and live demos of using webhooks . # # Webhook Options The two webhook options , * eventNotification * and * Connect * use the same notification mechanism and message formats . eventNotification is used to create a webhook for a specific envelope sent via the API . Connect webhooks can be used for any envelope sent from an account , from any user , from any client . # # # eventNotification Webhooks The Envelopes : : create method includes an optional [ eventNotification object ] ( # definition - eventNotification ) that adds a webhook to the envelope . eventNotification webhooks are available for all DocuSign accounts with API access . # # # Connect Webhooks Connect can be used to create a webhook for all envelopes sent by all users in an account , either through the API or via other DocuSign clients ( web , mobile , etc ) . Connect configurations are independent of specific envelopes . A Connect configuration includes a filter that may be used to limit the webhook to specific users , envelope statuses , etc . Connect configurations may be created and managed using the [ ConnectConfigurations resource ] ( . . / . . / Connect / ConnectConfigurations ) . Configurations can also be created and managed from the Administration tool accessed by selecting \ & quot ; Go to Admin \ & quot ; from the menu next to your picture on the DocuSign web app . See the Integrations / Connect section of the Admin tool . For repeatability , and to minimize support questions , creating Connect configurations via the API is recommended , especially for ISVs . Connect is available for some DocuSign account types . Please contact DocuSign Sales for more information . # # Composite Templates The Composite Templates feature , like [ compositing in film production ] ( https : / / en . wikipedia . org / wiki / Compositing ) , enables you to * overlay * document , recipient , and tab definitions from multiple sources , including PDF Form Field definitions , templates defined on the server , and more . Each Composite Template consists of optional elements : server templates , inline templates , PDF Metadata templates , and documents . * The Composite Template ID is an optional element used to identify the composite template . It is used as a reference when adding document object information via a multi - part HTTP message . If used , the document content - disposition must include the & # x60 ; compositeTemplateId & # x60 ; to which the document should be added . If & # x60 ; compositeTemplateId & # x60 ; is not specified in the content - disposition , the document is applied based on the & # x60 ; documentId & # x60 ; only . If no document object is specified , the composite template inherits the first document . * Server Templates are server - side templates stored on the DocuSign platform . If supplied , they are overlaid into the envelope in the order of their Sequence value . * Inline Templates provide a container to add documents , recipients , tabs , and custom fields . If inline templates are supplied , they are overlaid into the envelope in the order of their Sequence value . * Document objects are optional structures that provide a container to pass in a document or form . If this object is not included , the composite template inherits the * first * document it finds from a server template or inline template , starting with the lowest sequence value . PDF Form objects are only transformed from the document object . DocuSign does not derive PDF form properties from server templates or inline templates . To instruct DocuSign to transform fields from the PDF form , set & # x60 ; transformPdfFields & # x60 ; to \ & quot ; true \ & quot ; for the document . See the Transform PDF Fields section for more information about process . * PDF Metadata Templates provide a container to embed design - time template information into a PDF document . DocuSign uses this information when processing the Envelope . This convention allows the document to carry the signing instructions with it , so that less information needs to be provided at run - time through an inline template or synchronized with an external structure like a server template . PDF Metadata templates are stored in the Metadata layer of a PDF in accordance with Acrobat & # 39 ; s XMP specification . DocuSign will only find PDF Metadata templates inside documents passed in the Document object ( see below ) . If supplied , the PDF metadata template will be overlaid into the envelope in the order of its Sequence value . # # # Compositing the definitions Each Composite Template adds a new document and templates overlay into the envelope . For each Composite Template these rules are applied : * Templates are overlaid in the order of their Sequence value . * If Document is not passed into the Composite Template & # 39 ; s & # x60 ; document & # x60 ; field , the * first * template & # 39 ; s document ( based on the template & # 39 ; s Sequence value ) is used . * Last in wins in all cases except for the document ( i . e . envelope information , recipient information , secure field information ) . There is no special casing . For example , if you want higher security on a tab , then that needs to be specified in a later template ( by sequence number ) then where the tab is included . If you want higher security on a role recipient , then it needs to be in a later template then where that role recipient is specified . * Recipient matching is based on Recipient Role and Routing Order . If there are matches , the recipient information is merged together . A final pass is done on all Composite Templates , after all template overlays have been applied , to collapse recipients with the same email , username and routing order . This prevents having the same recipients at the same routing order . * If you specify in a template that a recipient is locked , once that recipient is overlaid the recipient attributes can no longer be changed . The only items that can be changed for the recipient in this case are the email , username , access code and IDCheckInformationInput . * Tab matching is based on Tab Labels , Tab Types and Documents . If a Tab Label matches but the Document is not supplied , the Tab is overlaid for all the Documents . For example , if you have a simple inline template with only one tab in it with a label and a value , the Signature , Initial , Company , Envelope ID , User Name tabs will only be matched and collapsed if they fall in the exact same X and Y locations . * roleName and tabLabel matching is case sensitive . * The defaultRecipient field enables you to specify which recipient the generated tabs from a PDF form are mapped to . You can also set PDF form generated tabs to a recipient other than the DefaultRecipient by specifying the mapping of the tab label that is created to one of the template recipients . * You can use tabLabel wild carding to map a series of tabs from the PDF form . To use this you must end a tab label with \ & quot ; \ \ * \ & quot ; and then the system matches tabs that start with the label . * If no DefaultRecipient is specified , tabs must be explicitly mapped to recipients in order to be generated from the form . Unmapped form objects will not be generated into their DocuSign equivalents . ( In the case of Signature / Initials , the tabs will be disregarded entirely ; in the case of pdf text fields , the field data will be flattened on the Envelope document , but there will not be a corresponding DocuSign data tab . ) # # # Including the Document Content for Composite Templates Document content can be supplied inline , using the & # x60 ; documentBase64 & # x60 ; or can be included in a multi - part HTTP message . If a multi - part message is used and there are multiple Composite Templates , the document content - disposition can include the & # x60 ; compositeTemplateId & # x60 ; to which the document should be added . Using the & # x60 ; compositeTemplateId & # x60 ; sets which documents are associated with particular composite templates . An example of this usage is : & # x60 ; & # x60 ; & # x60 ; - - 5cd3320a - 5aac - 4453 - b3a4 - cbb52a4cba5d Content - Type : application / pdf Content - Disposition : file ; filename & # x3D ; \ & quot ; eula . pdf \ & quot ; ; documentId & # x3D ; 1 ; compositeTemplateId & # x3D ; \ & quot ; 1 \ & quot ; Content - Transfer - Encoding : base64 & # x60 ; & # x60 ; & # x60 ; # # # PDF Form Field Transformation Only the following PDF Form FieldTypes will be transformed to DocuSign tabs : CheckBox , DateTime , ListBox , Numeric , Password , Radio , Signature , and Text Field Properties that will be transformed : Read Only , Required , Max Length , Positions , and Initial Data . When transforming a * PDF Form Digital Signature Field , * the following rules are used : If the PDF Field Name Contains | Then the DocuSign Tab Will be - - - - - | - - - - - DocuSignSignHere or eSignSignHere | Signature DocuSignSignHereOptional or eSignSignHereOptional | Optional Signature DocuSignInitialHere or eSignInitialHere | Initials DocuSignInitialHereOptional or eSignInitialHereOptional | Optional Initials Any other PDF Form Digital Signature Field will be transformed to a DocuSign Signature tab When transforming * PDF Form Text Fields , * the following rules are used : If the PDF Field Name Contains | Then the DocuSign Tab Will be - - - - - | - - - - - DocuSignSignHere or eSignSignHere | Signature DocuSignSignHereOptional or eSignSignHereOptional | Optional Signature DocuSignInitialHere or eSignInitialHere | Initials DocuSignInitialHereOptional or eSignInitialHereOptional | Optional Initials DocuSignEnvelopeID or eSignEnvelopeID | EnvelopeID DocuSignCompany or eSignCompany | Company DocuSignDateSigned or eSignDateSigned | Date Signed DocuSignTitle or eSignTitle | Title DocuSignFullName or eSignFullName | Full Name DocuSignSignerAttachmentOptional or eSignSignerAttachmentOptional | Optional Signer Attachment Any other PDF Form Text Field will be transformed to a DocuSign data ( text ) tab . PDF Form Field Names that include \ & quot ; DocuSignIgnoreTransform \ & quot ; or \ & quot ; eSignIgnoreTransform \ & quot ; will not be transformed . PDF Form Date fields will be transformed to Date Signed fields if their name includes DocuSignDateSigned or eSignDateSigned . # # Template Email Subject Merge Fields This feature enables you to insert recipient name and email address merge fields into the email subject line when creating or sending from a template . The merge fields , based on the recipient & # 39 ; s & # x60 ; roleName & # x60 ; , are added to the & # x60 ; emailSubject & # x60 ; when the template is created or when the template is used to create an envelope . After a template sender adds the name and email information for the recipient and sends the envelope , the recipient information is automatically merged into the appropriate fields in the email subject line . Both the sender and the recipients will see the information in the email subject line for any emails associated with the template . This provides an easy way for senders to organize their envelope emails without having to open an envelope to check the recipient . If merging the recipient information into the subject line causes the subject line to exceed 100 characters , then any characters over the 100 character limit are not included in the subject line . For cases where the recipient name or email is expected to be long , you should consider placing the merge field at the start of the email subject . * To add a recipient & # 39 ; s name in the subject line add the following text in the & # x60 ; emailSubject & # x60 ; when creating the template or when sending an envelope from a template : [ [ & lt ; roleName & gt ; _ UserName ] ] Example : & # x60 ; \ & quot ; emailSubject \ & quot ; : \ & quot ; [ [ Signer 1 _ UserName ] ] , Please sign this NDA \ & quot ; , & # x60 ; * To add a recipient & # 39 ; s email address in the subject line add the following text in the emailSubject when creating the template or when sending an envelope from a template : [ [ & lt ; roleName & gt ; _ Email ] ] Example : & # x60 ; \ & quot ; emailSubject \ & quot ; : \ & quot ; [ [ Signer 1 _ Email ] ] , Please sign this NDA \ & quot ; , & # x60 ; In both cases the & lt ; roleName & gt ; is the recipient & # 39 ; s & # x60 ; roleName & # x60 ; in the template . For cases where another recipient ( such as an Agent , Editor , or Intermediary recipient ) is entering the name and email information for the recipient included in the email subject , then [ [ & lt ; roleName & gt ; _ UserName ] ] or [ [ & lt ; roleName & gt ; _ Email ] ] is shown in the email subject . # # Branding an envelope The following rules are used to determine the & # x60 ; brandId & # x60 ; used in an envelope : * If a & # x60 ; brandId & # x60 ; is specified in the envelope / template and that brandId is available to the account , that brand is used in the envelope . * If more than one template is used in an envelope and more than one & # x60 ; brandId & # x60 ; is specified , the first & # x60 ; brandId & # x60 ; specified is used throughout the envelope . * In cases where no brand is specified and the sender belongs to a Group ; if there is only one brand associated with the Group , then that brand is used in the envelope . Otherwise , the account & # 39 ; s default signing brand is used . * For envelopes that do not meet any of the previous criteria , the account & # 39 ; s default signing brand is used for the envelope . # # BCC Email address feature The BCC Email address feature is designed to provide a copy of all email communications for external archiving purposes . DocuSign recommends that envelopes sent using the BCC for Email Archive feature , including the BCC Email Override option , include additional signer authentication options . To send a copy of the envelope to a recipient who does not need to sign , don & # 39 ; t use the BCC Email field . Use a Carbon Copy or Certified Delivery Recipient type . # # Merge Recipient Roles for Draft Envelopes When an envelope with multiple templates is sent , the recipients from the templates are merged according to the template roles , and empty recipients are removed . When creating an envelope with multiple templates , but not sending it ( keeping it in a created state ) , duplicate recipients are not merged , which could cause leave duplicate recipients in the envelope . To prevent this , the query parameter & # x60 ; merge _ roles _ on _ draft & # x60 ; should be added when posting a draft envelope ( status & # x3D ; created ) with multiple templates . Doing this will merge template roles and remove empty recipients . # # # # # Note : DocuSign recommends that the & # x60 ; merge _ roles _ on _ draft & # x60 ; query parameter be used anytime you are creating an envelope with multiple templates and keeping it in draft ( created ) status .
* @ param accountId The external account number ( int ) or account ID Guid . ( required )
* @ param envelopeDefinition ( optional )
* @ return EnvelopeSummary */
public EnvelopeSummary createEnvelope ( String accountId , EnvelopeDefinition envelopeDefinition ) throws ApiException { } } | return createEnvelope ( accountId , envelopeDefinition , null ) ; |
public class ViewFetcher { /** * Returns true if the view is sufficiently shown
* @ param view the view to check
* @ return true if the view is sufficiently shown */
public final boolean isViewSufficientlyShown ( View view ) { } } | final int [ ] xyView = new int [ 2 ] ; final int [ ] xyParent = new int [ 2 ] ; if ( view == null ) return false ; final float viewHeight = view . getHeight ( ) ; final View parent = getScrollOrListParent ( view ) ; view . getLocationOnScreen ( xyView ) ; if ( parent == null ) { xyParent [ 1 ] = 0 ; } else { parent . getLocationOnScreen ( xyParent ) ; } if ( xyView [ 1 ] + ( viewHeight / 2.0f ) > getScrollListWindowHeight ( view ) ) return false ; else if ( xyView [ 1 ] + ( viewHeight / 2.0f ) < xyParent [ 1 ] ) return false ; return true ; |
public class CredentialFactory { /** * Initializes OAuth2 credential using preconfigured ServiceAccount settings on the local GCE VM .
* See : < a href = " https : / / developers . google . com / compute / docs / authentication " > Authenticating from
* Google Compute Engine < / a > . */
public Credential getCredentialFromMetadataServiceAccount ( ) throws IOException , GeneralSecurityException { } } | logger . atFine ( ) . log ( "getCredentialFromMetadataServiceAccount()" ) ; Credential cred = new ComputeCredentialWithRetry ( new ComputeCredential . Builder ( getStaticHttpTransport ( ) , JSON_FACTORY ) . setRequestInitializer ( new CredentialHttpRetryInitializer ( ) ) ) ; try { cred . refreshToken ( ) ; } catch ( IOException e ) { throw new IOException ( "Error getting access token from metadata server at: " + cred . getTokenServerEncodedUrl ( ) , e ) ; } return cred ; |
public class GrafeasV1Beta1Client { /** * Gets the specified occurrence .
* < p > Sample code :
* < pre > < code >
* try ( GrafeasV1Beta1Client grafeasV1Beta1Client = GrafeasV1Beta1Client . create ( ) ) {
* OccurrenceName name = OccurrenceName . of ( " [ PROJECT ] " , " [ OCCURRENCE ] " ) ;
* Occurrence response = grafeasV1Beta1Client . getOccurrence ( name ) ;
* < / code > < / pre >
* @ param name The name of the occurrence in the form of
* ` projects / [ PROJECT _ ID ] / occurrences / [ OCCURRENCE _ ID ] ` .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final Occurrence getOccurrence ( OccurrenceName name ) { } } | GetOccurrenceRequest request = GetOccurrenceRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; return getOccurrence ( request ) ; |
public class AABBUtils { /** * Offsets the passed { @ link AxisAlignedBB } by the { @ link BlockPos } coordinates .
* @ param pos the pos
* @ param aabb the aabb
* @ return the axis aligned bb */
public static AxisAlignedBB offset ( BlockPos pos , AxisAlignedBB aabb ) { } } | if ( aabb == null || pos == null ) return aabb ; return aabb . offset ( pos . getX ( ) , pos . getY ( ) , pos . getZ ( ) ) ; |
public class PHPMethods { /** * Shuffles the values of any array in place using the provided random generator .
* @ param < T >
* @ param array
* @ param rnd */
public static < T > void shuffle ( T [ ] array , Random rnd ) { } } | // Implementing Fisher - Yates shuffle
T tmp ; for ( int i = array . length - 1 ; i > 0 ; -- i ) { int index = rnd . nextInt ( i + 1 ) ; tmp = array [ index ] ; array [ index ] = array [ i ] ; array [ i ] = tmp ; } |
public class JavaCompiler { /** * Attribute a parse tree .
* @ return the attributed parse tree */
public Env < AttrContext > attribute ( Env < AttrContext > env ) { } } | if ( compileStates . isDone ( env , CompileState . ATTR ) ) return env ; if ( verboseCompilePolicy ) printNote ( "[attribute " + env . enclClass . sym + "]" ) ; if ( verbose ) log . printVerbose ( "checking.attribution" , env . enclClass . sym ) ; if ( ! taskListener . isEmpty ( ) ) { TaskEvent e = new TaskEvent ( TaskEvent . Kind . ANALYZE , env . toplevel , env . enclClass . sym ) ; taskListener . started ( e ) ; } JavaFileObject prev = log . useSource ( env . enclClass . sym . sourcefile != null ? env . enclClass . sym . sourcefile : env . toplevel . sourcefile ) ; try { attr . attrib ( env ) ; if ( errorCount ( ) > 0 && ! shouldStop ( CompileState . ATTR ) ) { // if in fail - over mode , ensure that AST expression nodes
// are correctly initialized ( e . g . they have a type / symbol )
attr . postAttr ( env . tree ) ; } compileStates . put ( env , CompileState . ATTR ) ; } finally { log . useSource ( prev ) ; } return env ; |
public class HeapQuickSelectSketch { /** * array stays the same size . Changes theta and thus count */
private final void quickSelectAndRebuild ( ) { } } | final int arrLongs = 1 << lgArrLongs_ ; final int pivot = ( 1 << lgNomLongs_ ) + 1 ; // pivot for QS
thetaLong_ = selectExcludingZeros ( cache_ , curCount_ , pivot ) ; // messes up the cache _
// now we rebuild to clean up dirty data , update count , reconfigure as a hash table
final long [ ] tgtArr = new long [ arrLongs ] ; curCount_ = HashOperations . hashArrayInsert ( cache_ , tgtArr , lgArrLongs_ , thetaLong_ ) ; cache_ = tgtArr ; // hashTableThreshold stays the same |
public class ApiOvhStore { /** * Create a new product for partner
* REST : POST / store / partner / { partnerId } / product
* @ param partnerId [ required ] Id of the partner
* @ param description [ required ] Description of product
* @ param name [ required ] Name of product
* @ param otherDetails [ required ] Additional information
* @ param category [ required ] Name of product category
* API beta */
public OvhEditResponse partner_partnerId_product_POST ( String partnerId , String category , String description , String name , String otherDetails ) throws IOException { } } | String qPath = "/store/partner/{partnerId}/product" ; StringBuilder sb = path ( qPath , partnerId ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "category" , category ) ; addBody ( o , "description" , description ) ; addBody ( o , "name" , name ) ; addBody ( o , "otherDetails" , otherDetails ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhEditResponse . class ) ; |
public class CertPathValidator { /** * Returns the default { @ code CertPathValidator } type as specified by
* the { @ code certpathvalidator . type } security property , or the string
* { @ literal " PKIX " } if no such property exists .
* < p > The default { @ code CertPathValidator } type can be used by
* applications that do not want to use a hard - coded type when calling one
* of the { @ code getInstance } methods , and want to provide a default
* type in case a user does not specify its own .
* < p > The default { @ code CertPathValidator } type can be changed by
* setting the value of the { @ code certpathvalidator . type } security
* property to the desired type .
* @ see java . security . Security security properties
* @ return the default { @ code CertPathValidator } type as specified
* by the { @ code certpathvalidator . type } security property , or the string
* { @ literal " PKIX " } if no such property exists . */
public final static String getDefaultType ( ) { } } | String cpvtype = AccessController . doPrivileged ( new PrivilegedAction < String > ( ) { public String run ( ) { return Security . getProperty ( CPV_TYPE ) ; } } ) ; return ( cpvtype == null ) ? "PKIX" : cpvtype ; |
public class CmsRpcAction { /** * Shows the ' loading message ' . < p >
* Overwrite to customize the message . < p >
* @ param blocking shows an blocking overlay if < code > true < / code > */
protected void show ( boolean blocking ) { } } | if ( blocking ) { m_notification = CmsNotification . get ( ) . sendBusy ( CmsNotification . Type . NORMAL , m_loadingMessage ) ; } else { m_notification = CmsNotification . get ( ) . sendSticky ( CmsNotification . Type . NORMAL , m_loadingMessage ) ; } |
public class Utilities { /** * Replies the fully qualified name with generic parameters .
* @ param type the type . Never < code > null < / code > .
* @ return the qualified name . */
public static String getNameWithTypeParameters ( IType type ) { } } | assert type != null ; final String superName = type . getFullyQualifiedName ( '.' ) ; if ( ! JavaModelUtil . is50OrHigher ( type . getJavaProject ( ) ) ) { return superName ; } try { final ITypeParameter [ ] typeParameters = type . getTypeParameters ( ) ; if ( typeParameters != null && typeParameters . length > 0 ) { final StringBuffer buf = new StringBuffer ( superName ) ; buf . append ( '<' ) ; for ( int k = 0 ; k < typeParameters . length ; ++ k ) { if ( k != 0 ) { buf . append ( ',' ) . append ( ' ' ) ; } buf . append ( typeParameters [ k ] . getElementName ( ) ) ; } buf . append ( '>' ) ; return buf . toString ( ) ; } } catch ( JavaModelException e ) { // ignore
} return superName ; |
public class MapMaker { /** * Guides the allowed concurrency among update operations . Used as a hint for internal sizing . The
* table is internally partitioned to try to permit the indicated number of concurrent updates
* without contention . Because assignment of entries to these partitions is not necessarily
* uniform , the actual concurrency observed may vary . Ideally , you should choose a value to
* accommodate as many threads as will ever concurrently modify the table . Using a significantly
* higher value than you need can waste space and time , and a significantly lower value can lead
* to thread contention . But overestimates and underestimates within an order of magnitude do not
* usually have much noticeable impact . A value of one permits only one thread to modify the map
* at a time , but since read operations can proceed concurrently , this still yields higher
* concurrency than full synchronization . Defaults to 4.
* < p > < b > Note : < / b > Prior to Guava release 9.0 , the default was 16 . It is possible the default will
* change again in the future . If you care about this value , you should always choose it
* explicitly .
* @ throws IllegalArgumentException if { @ code concurrencyLevel } is nonpositive
* @ throws IllegalStateException if a concurrency level was already set */
@ Override public MapMaker concurrencyLevel ( int concurrencyLevel ) { } } | checkState ( this . concurrencyLevel == UNSET_INT , "concurrency level was already set to %s" , this . concurrencyLevel ) ; checkArgument ( concurrencyLevel > 0 ) ; this . concurrencyLevel = concurrencyLevel ; return this ; |
public class HostCertificateManager { /** * Installs a given SSL certificate on the server .
* @ param cert SSL certificate in PEM format
* @ throws HostConfigFault
* @ throws RuntimeFault
* @ throws RemoteException */
public void installServerCertificate ( String cert ) throws HostConfigFault , RuntimeFault , RemoteException { } } | getVimService ( ) . installServerCertificate ( getMOR ( ) , cert ) ; |
public class MediaExceptionProcessor { /** * ( non - Javadoc )
* @ see
* com . microsoft . windowsazure . services . media . entityoperations . EntityContract
* # update ( com . microsoft . windowsazure . services . media . entityoperations .
* EntityUpdateOperation ) */
@ Override public String update ( EntityUpdateOperation updater ) throws ServiceException { } } | try { return service . update ( updater ) ; } catch ( UniformInterfaceException e ) { throw processCatch ( new ServiceException ( e ) ) ; } catch ( ClientHandlerException e ) { throw processCatch ( new ServiceException ( e ) ) ; } |
public class CmsSitemapNavPosCalculator { /** * Helper method to generate a list of floats between two given values . < p >
* @ param min the lower bound
* @ param max the upper bound
* @ param steps the number of floats to generate
* @ return the generated floats */
private List < Float > interpolateBetween ( float min , float max , int steps ) { } } | float delta = ( max - min ) / ( steps + 1 ) ; List < Float > result = new ArrayList < Float > ( ) ; float num = min ; for ( int i = 0 ; i < steps ; i ++ ) { num += delta ; result . add ( new Float ( num ) ) ; } return result ; |
public class Formatter { /** * Writes a formatted string to this object ' s destination using the
* specified locale , format string , and arguments .
* @ param l
* The { @ linkplain java . util . Locale locale } to apply during
* formatting . If { @ code l } is { @ code null } then no localization
* is applied . This does not change this object ' s locale that was
* set during construction .
* @ param format
* A format string as described in < a href = " # syntax " > Format string
* syntax < / a >
* @ param args
* Arguments referenced by the format specifiers in the format
* string . If there are more arguments than format specifiers , the
* extra arguments are ignored . The maximum number of arguments is
* limited by the maximum dimension of a Java array as defined by
* < cite > The Java & trade ; Virtual Machine Specification < / cite > .
* @ throws IllegalFormatException
* If a format string contains an illegal syntax , a format
* specifier that is incompatible with the given arguments ,
* insufficient arguments given the format string , or other
* illegal conditions . For specification of all possible
* formatting errors , see the < a href = " # detail " > Details < / a >
* section of the formatter class specification .
* @ throws FormatterClosedException
* If this formatter has been closed by invoking its { @ link
* # close ( ) } method
* @ return This formatter */
public Formatter format ( Locale l , String format , Object ... args ) { } } | ensureOpen ( ) ; // index of last argument referenced
int last = - 1 ; // last ordinary index
int lasto = - 1 ; FormatString [ ] fsa = parse ( format ) ; for ( int i = 0 ; i < fsa . length ; i ++ ) { FormatString fs = fsa [ i ] ; int index = fs . index ( ) ; try { switch ( index ) { case - 2 : // fixed string , " % n " , or " % % "
fs . print ( null , l ) ; break ; case - 1 : // relative index
if ( last < 0 || ( args != null && last > args . length - 1 ) ) throw new MissingFormatArgumentException ( fs . toString ( ) ) ; fs . print ( ( args == null ? null : args [ last ] ) , l ) ; break ; case 0 : // ordinary index
lasto ++ ; last = lasto ; if ( args != null && lasto > args . length - 1 ) throw new MissingFormatArgumentException ( fs . toString ( ) ) ; fs . print ( ( args == null ? null : args [ lasto ] ) , l ) ; break ; default : // explicit index
last = index - 1 ; if ( args != null && last > args . length - 1 ) throw new MissingFormatArgumentException ( fs . toString ( ) ) ; fs . print ( ( args == null ? null : args [ last ] ) , l ) ; break ; } } catch ( IOException x ) { lastException = x ; } } return this ; |
public class FLVReader { /** * { @ inheritDoc } */
@ Override public void close ( ) { } } | log . debug ( "Reader close: {}" , file . getName ( ) ) ; try { lock . lock ( ) ; if ( in != null ) { in . free ( ) ; in = null ; } if ( channel != null ) { try { channel . close ( ) ; fis . close ( ) ; } catch ( IOException e ) { log . error ( "FLVReader close" , e ) ; } } log . debug ( "Reader closed: {}" , file . getName ( ) ) ; } finally { if ( lock . isLocked ( ) ) { lock . unlock ( ) ; } } |
public class Configuration { /** * Get the configuration string as a content .
* @ param key the key to look for in the configuration file
* @ param o string or content argument added to configuration text
* @ return a content tree for the text */
public Content getResource ( String key , Object o1 , Object o2 ) { } } | return getResource ( key , o1 , o2 , null ) ; |
public class ShellActivity { /** * Read required input attributes . */
private void readAttributes ( ) { } } | ItemRuleMap attributeItemRuleMap = getRequestRule ( ) . getAttributeItemRuleMap ( ) ; if ( attributeItemRuleMap != null && ! attributeItemRuleMap . isEmpty ( ) ) { ItemRuleList attributeItemRuleList = new ItemRuleList ( attributeItemRuleMap . values ( ) ) ; determineSimpleMode ( attributeItemRuleList ) ; if ( procedural ) { console . setStyle ( "GREEN" ) ; console . writeLine ( "Required attributes:" ) ; console . styleOff ( ) ; if ( ! simpleInputMode ) { for ( ItemRule itemRule : attributeItemRuleList ) { Token [ ] tokens = itemRule . getAllTokens ( ) ; if ( tokens == null ) { Token t = new Token ( TokenType . PARAMETER , itemRule . getName ( ) ) ; t . setDefaultValue ( itemRule . getDefaultValue ( ) ) ; tokens = new Token [ ] { t } ; } String mandatoryMarker = itemRule . isMandatory ( ) ? " * " : " " ; console . setStyle ( "YELLOW" ) ; console . write ( mandatoryMarker ) ; console . styleOff ( ) ; console . setStyle ( "bold" ) ; console . write ( itemRule . getName ( ) ) ; console . styleOff ( ) ; console . write ( ": " ) ; writeToken ( tokens ) ; console . writeLine ( ) ; } } } readRequiredAttributes ( attributeItemRuleList ) ; } |
public class CmsElementRename { /** * Returns a list of xml pages from the specified folder . < p >
* @ return a list of xml pages from the specified folder */
private List getXmlPages ( ) { } } | boolean isRecursive = Boolean . valueOf ( getParamRecursive ( ) ) . booleanValue ( ) ; // filter definition to read only the required resources
int xmlPageId ; try { xmlPageId = OpenCms . getResourceManager ( ) . getResourceType ( CmsResourceTypeXmlPage . getStaticTypeName ( ) ) . getTypeId ( ) ; } catch ( CmsLoaderException e1 ) { xmlPageId = CmsResourceTypeXmlPage . getStaticTypeId ( ) ; } CmsResourceFilter filter = CmsResourceFilter . IGNORE_EXPIRATION . addRequireType ( xmlPageId ) ; // trying to read the resources
List xmlPages = null ; try { xmlPages = getCms ( ) . readResources ( getParamResource ( ) , filter , isRecursive ) ; } catch ( CmsException e ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } return xmlPages ; |
public class ExecutionTimesReport { /** * Read hints from all resources in a collection , retaining
* < code > suiteNames < / code > . If < code > suiteNames < / code > is null ,
* everything is retained . */
public static Map < String , List < Long > > mergeHints ( Collection < ResourceCollection > resources , Collection < String > suiteNames ) { } } | final Map < String , List < Long > > hints = new HashMap < > ( ) ; for ( ResourceCollection rc : resources ) { Iterator < Resource > i = rc . iterator ( ) ; while ( i . hasNext ( ) ) { InputStream is = null ; Resource r = i . next ( ) ; try { is = r . getInputStream ( ) ; mergeHints ( is , hints ) ; // Early prune the hints to those we have on the list .
if ( suiteNames != null ) { hints . keySet ( ) . retainAll ( suiteNames ) ; } } catch ( IOException e ) { throw new BuildException ( "Could not read hints from resource: " + r . getDescription ( ) , e ) ; } finally { try { if ( is != null ) is . close ( ) ; } catch ( IOException e ) { throw new BuildException ( "Could not close hints file: " + r . getDescription ( ) ) ; } } } } return hints ; |
public class OrmElf { /** * Load an object by it ' s ID . The @ Id annotated field ( s ) of the object is used to
* set query parameters .
* @ param connection a SQL Connection object
* @ param clazz the class of the object to load
* @ param args the query parameter used to find the object by it ' s ID
* @ param < T > the type of the object to load
* @ return the populated object
* @ throws SQLException if a { @ link SQLException } occurs */
public static < T > T objectById ( Connection connection , Class < T > clazz , Object ... args ) throws SQLException { } } | return OrmReader . objectById ( connection , clazz , args ) ; |
public class ExtensionsInner { /** * Enables the Operations Management Suite ( OMS ) on the HDInsight cluster .
* @ param resourceGroupName The name of the resource group .
* @ param clusterName The name of the cluster .
* @ param parameters The Operations Management Suite ( OMS ) workspace parameters .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < Void > enableMonitoringAsync ( String resourceGroupName , String clusterName , ClusterMonitoringRequest parameters ) { } } | return enableMonitoringWithServiceResponseAsync ( resourceGroupName , clusterName , parameters ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ; |
public class SPX { /** * / * [ deutsch ]
* < p > Implementierungsmethode des Interface { @ link Externalizable } . < / p >
* < p > Das erste Byte enth & auml ; lt um 4 Bits nach links verschoben den
* Typ des zu serialisierenden Objekts . Danach folgen die Daten - Bits
* in einer bit - komprimierten Darstellung . < / p >
* @ serialData data layout see { @ code writeReplace ( ) } - method of object
* to be serialized
* @ param out output stream
* @ throws IOException in any case of IO - failures */
@ Override public void writeExternal ( ObjectOutput out ) throws IOException { } } | switch ( this . type ) { case DATE_TYPE : this . writeDate ( out ) ; break ; case TIME_TYPE : this . writeTime ( out ) ; break ; case WEEKMODEL_TYPE : this . writeWeekmodel ( out ) ; break ; case MOMENT_TYPE : this . writeMoment ( out ) ; break ; case MACHINE_TIME_TYPE : this . writeMachineTime ( out ) ; break ; case DURATION_TYPE : this . writeDuration ( out ) ; break ; case DAY_PERIOD_TYPE : this . writeDayPeriod ( out ) ; break ; case TIMESTAMP_TYPE : this . writeTimestamp ( out ) ; break ; default : throw new InvalidClassException ( "Unknown serialized type." ) ; } |
public class ApiOvhOrder { /** * Create order
* REST : POST / order / dedicated / server / { serviceName } / firewall / { duration }
* @ param firewallModel [ required ] Firewall type
* @ param serviceName [ required ] The internal name of your dedicated server
* @ param duration [ required ] Duration */
public OvhOrder dedicated_server_serviceName_firewall_duration_POST ( String serviceName , String duration , OvhFirewallModelEnum firewallModel ) throws IOException { } } | String qPath = "/order/dedicated/server/{serviceName}/firewall/{duration}" ; StringBuilder sb = path ( qPath , serviceName , duration ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "firewallModel" , firewallModel ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhOrder . class ) ; |
public class AgentFilter { /** * The current health state of the agent . Values can be set to < b > HEALTHY < / b > or < b > UNHEALTHY < / b > .
* @ param agentHealths
* The current health state of the agent . Values can be set to < b > HEALTHY < / b > or < b > UNHEALTHY < / b > .
* @ see AgentHealth */
public void setAgentHealths ( java . util . Collection < String > agentHealths ) { } } | if ( agentHealths == null ) { this . agentHealths = null ; return ; } this . agentHealths = new java . util . ArrayList < String > ( agentHealths ) ; |
public class N { /** * Mostly it ' s designed for one - step operation to complete the operation in one step .
* < code > java . util . stream . Stream < / code > is preferred for multiple phases operation .
* @ param c
* @ return */
public static < T > List < T > distinct ( final Collection < ? extends T > c ) { } } | if ( N . isNullOrEmpty ( c ) ) { return new ArrayList < > ( ) ; } return distinct ( c , 0 , c . size ( ) ) ; |
public class Subtypes2 { /** * Get the first common superclass of arrays with different numbers of
* dimensions .
* @ param aArrType
* an ArrayType
* @ param bArrType
* another ArrayType
* @ return ReferenceType representing first common superclass */
private ReferenceType computeFirstCommonSuperclassOfDifferentDimensionArrays ( ArrayType aArrType , ArrayType bArrType ) { } } | assert aArrType . getDimensions ( ) != bArrType . getDimensions ( ) ; boolean aBaseTypeIsPrimitive = ( aArrType . getBasicType ( ) instanceof BasicType ) ; boolean bBaseTypeIsPrimitive = ( bArrType . getBasicType ( ) instanceof BasicType ) ; if ( aBaseTypeIsPrimitive || bBaseTypeIsPrimitive ) { int minDimensions , maxDimensions ; if ( aArrType . getDimensions ( ) < bArrType . getDimensions ( ) ) { minDimensions = aArrType . getDimensions ( ) ; maxDimensions = bArrType . getDimensions ( ) ; } else { minDimensions = bArrType . getDimensions ( ) ; maxDimensions = aArrType . getDimensions ( ) ; } if ( minDimensions == 1 ) { // One of the types was something like int [ ] .
// The only possible common supertype is Object .
return Type . OBJECT ; } else { // Weird case : e . g . ,
// - first common supertype of int [ ] [ ] and char [ ] [ ] [ ] is
// Object [ ]
// because f . c . s . of int [ ] and char [ ] [ ] is Object
// - first common supertype of int [ ] [ ] [ ] and char [ ] [ ] [ ] [ ] [ ] is
// Object [ ] [ ]
// because f . c . s . of int [ ] and char [ ] [ ] [ ] is Object
return new ArrayType ( Type . OBJECT , maxDimensions - minDimensions ) ; } } else { // Both a and b have base types which are ObjectTypes .
// Since the arrays have different numbers of dimensions , the
// f . c . s . will have Object as its base type .
// E . g . , f . c . s . of Cat [ ] and Dog [ ] [ ] is Object [ ]
return new ArrayType ( Type . OBJECT , Math . min ( aArrType . getDimensions ( ) , bArrType . getDimensions ( ) ) ) ; } |
public class Jarafe { /** * Process a text string as input and return a List of LightAnnot objects that serve as
* stand - off annotations over the input text .
* @ param s Input string
* @ return a list of LightAnnot objects */
public List < LightAnnot > processRawStringAsAnnotList ( String s ) { } } | Annotation [ ] annots = decoderPipeline . processRawStringAsAnnotList ( s ) ; List < LightAnnot > alist = new ArrayList < LightAnnot > ( ) ; int i = 0 ; for ( i = 0 ; i < annots . length ; i ++ ) { Annotation ann = annots [ i ] ; alist . add ( new LightAnnot ( ann . st ( ) , ann . en ( ) , ann . typ ( ) . toString ( ) ) ) ; } return ( alist ) ; |
public class Fastpath { /** * Send a function call to the PostgreSQL backend .
* @ param fnId Function id
* @ param resultType True if the result is a numeric ( Integer or Long )
* @ param args FastpathArguments to pass to fastpath
* @ return null if no data , Integer if an integer result , Long if a long result , or byte [ ]
* otherwise
* @ throws SQLException if a database - access error occurs .
* @ deprecated please use { @ link # fastpath ( int , FastpathArg [ ] ) } */
@ Deprecated public Object fastpath ( int fnId , boolean resultType , FastpathArg [ ] args ) throws SQLException { } } | // Run it .
byte [ ] returnValue = fastpath ( fnId , args ) ; // Interpret results .
if ( ! resultType || returnValue == null ) { return returnValue ; } if ( returnValue . length == 4 ) { return ByteConverter . int4 ( returnValue , 0 ) ; } else if ( returnValue . length == 8 ) { return ByteConverter . int8 ( returnValue , 0 ) ; } else { throw new PSQLException ( GT . tr ( "Fastpath call {0} - No result was returned and we expected a numeric." , fnId ) , PSQLState . NO_DATA ) ; } |
public class SimpleNwwViewer { /** * A way to open the NWW viewer and get the tools panel , which has reference to the rest .
* @ param appName
* @ return */
public static ToolsPanelController openNww ( String appName , int onCloseAction ) { } } | try { if ( appName == null ) { appName = APPNAME ; } if ( onCloseAction < 0 ) { onCloseAction = JFrame . EXIT_ON_CLOSE ; } Class < SimpleNwwViewer > class1 = SimpleNwwViewer . class ; ImageIcon icon = new ImageIcon ( class1 . getResource ( "/org/hortonmachine/images/hm150.png" ) ) ; Component nwwComponent = NwwPanel . createNwwPanel ( true ) ; NwwPanel wwjPanel = null ; LayersPanelController layerPanel = null ; ToolsPanelController toolsPanel = null ; if ( nwwComponent instanceof NwwPanel ) { wwjPanel = ( NwwPanel ) nwwComponent ; ( ( Component ) wwjPanel . getWwd ( ) ) . setPreferredSize ( new Dimension ( 500 , 500 ) ) ; wwjPanel . addOsmLayer ( ) ; ViewControlsLayer viewControls = wwjPanel . addViewControls ( ) ; viewControls . setScale ( 1.5 ) ; layerPanel = new LayersPanelController ( wwjPanel ) ; toolsPanel = new ToolsPanelController ( wwjPanel , layerPanel ) ; } final JFrame nwwFrame = new JFrame ( ) ; nwwFrame . setTitle ( appName + ": map view" ) ; nwwFrame . setIconImage ( icon . getImage ( ) ) ; // JPanel mapPanel = new JPanel ( new BorderLayout ( ) ) ;
// mapPanel . add ( nwwComponent , BorderLayout . CENTER ) ;
nwwFrame . getContentPane ( ) . add ( nwwComponent , BorderLayout . CENTER ) ; nwwFrame . setResizable ( true ) ; nwwFrame . setPreferredSize ( new Dimension ( 800 , 800 ) ) ; nwwFrame . pack ( ) ; WWUtil . alignComponent ( null , nwwFrame , AVKey . CENTER ) ; if ( wwjPanel != null ) { final JFrame layersFrame = new JFrame ( ) ; layersFrame . setTitle ( appName + ": layers view" ) ; layersFrame . setIconImage ( icon . getImage ( ) ) ; java . awt . EventQueue . invokeLater ( new Runnable ( ) { public void run ( ) { layersFrame . setVisible ( true ) ; } } ) ; layersFrame . getContentPane ( ) . add ( layerPanel , BorderLayout . CENTER ) ; layersFrame . setResizable ( true ) ; layersFrame . setPreferredSize ( new Dimension ( 400 , 500 ) ) ; layersFrame . setLocation ( 0 , 0 ) ; layersFrame . pack ( ) ; final JFrame toolsFrame = new JFrame ( ) ; toolsFrame . setTitle ( appName + ": tools view" ) ; toolsFrame . setIconImage ( icon . getImage ( ) ) ; java . awt . EventQueue . invokeLater ( new Runnable ( ) { public void run ( ) { toolsFrame . setVisible ( true ) ; } } ) ; toolsFrame . getContentPane ( ) . add ( toolsPanel , BorderLayout . CENTER ) ; toolsFrame . setResizable ( true ) ; toolsFrame . setPreferredSize ( new Dimension ( 400 , 400 ) ) ; toolsFrame . setLocation ( 0 , 510 ) ; toolsFrame . pack ( ) ; toolsFrame . setDefaultCloseOperation ( onCloseAction ) ; layersFrame . setDefaultCloseOperation ( onCloseAction ) ; } nwwFrame . setDefaultCloseOperation ( onCloseAction ) ; java . awt . EventQueue . invokeLater ( new Runnable ( ) { public void run ( ) { nwwFrame . setVisible ( true ) ; } } ) ; return toolsPanel ; } catch ( Exception e ) { Logging . logger ( ) . log ( java . util . logging . Level . SEVERE , "Exception at application start" , e ) ; } return null ; |
public class DocBrowserTable { /** * Retrieves date from the cache or from the annis rest service for a specific
* document .
* @ param document The document the data are fetched for .
* @ return The a list of meta data . Can be empty but never null . */
private List < Annotation > getDocMetaData ( String document ) { } } | // lookup up meta data in the cache
if ( ! docMetaDataCache . containsKey ( docBrowserPanel . getCorpus ( ) ) ) { // get the metadata for the corpus
WebResource res = Helper . getAnnisWebResource ( ) ; res = res . path ( "meta/corpus/" ) . path ( urlPathEscape . escape ( docBrowserPanel . getCorpus ( ) ) ) . path ( "closure" ) ; Map < String , List < Annotation > > metaDataMap = new HashMap < > ( ) ; // create a document - > metadata map
for ( Annotation a : res . get ( new Helper . AnnotationListType ( ) ) ) { if ( a . getAnnotationPath ( ) != null && ! a . getAnnotationPath ( ) . isEmpty ( ) && a . getType ( ) . equals ( "DOCUMENT" ) ) { String docName = a . getAnnotationPath ( ) . get ( 0 ) ; if ( ! metaDataMap . containsKey ( docName ) ) { metaDataMap . put ( docName , new ArrayList < Annotation > ( ) ) ; } metaDataMap . get ( docName ) . add ( a ) ; } } docMetaDataCache . put ( docBrowserPanel . getCorpus ( ) , metaDataMap ) ; } if ( docMetaDataCache . get ( docBrowserPanel . getCorpus ( ) ) . containsKey ( document ) ) { return docMetaDataCache . get ( docBrowserPanel . getCorpus ( ) ) . get ( document ) ; } else { return new ArrayList < Annotation > ( ) ; } |
public class StringUtils { /** * Convert a list of token objects ( e . g . a tokenized sentence ) into an array
* containing the token strings .
* @ param tokenizedSentence
* the list of token objects
* @ return the array of token strings */
public static String [ ] convertListTokenToArrayStrings ( final List < Token > tokenizedSentence ) { } } | final List < String > tokensList = new ArrayList < > ( ) ; for ( final Token token : tokenizedSentence ) { tokensList . add ( token . getTokenValue ( ) ) ; } return tokensList . toArray ( new String [ tokensList . size ( ) ] ) ; |
public class WebUtils { /** * Gets in progress authentication .
* @ return the in progress authentication */
public static Authentication getInProgressAuthentication ( ) { } } | val context = RequestContextHolder . getRequestContext ( ) ; val authentication = context != null ? WebUtils . getAuthentication ( context ) : null ; if ( authentication == null ) { return AuthenticationCredentialsThreadLocalBinder . getInProgressAuthentication ( ) ; } return authentication ; |
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertIfcOccupantTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class GetJobStatusPRequest { /** * < code > optional . alluxio . grpc . job . GetJobStatusPOptions options = 2 ; < / code > */
public alluxio . grpc . GetJobStatusPOptionsOrBuilder getOptionsOrBuilder ( ) { } } | return options_ == null ? alluxio . grpc . GetJobStatusPOptions . getDefaultInstance ( ) : options_ ; |
public class EntryTreeNode { /** * TreeNode Interfaces */
public TreeNode getChildAt ( int childIndex ) { } } | if ( entry instanceof GroupEntry ) { Entry childEntry = ( ( GroupEntry ) entry ) . getEntries ( ) . get ( childIndex ) ; return new EntryTreeNode ( this , childEntry , editors ) ; } else if ( entry instanceof NextCatalogEntry && childIndex == 0 ) { try { if ( childCatalog == null ) { childCatalog = CatalogUtilities . parseDocument ( ( ( NextCatalogEntry ) entry ) . getCatalog ( ) . toURL ( ) ) ; } } catch ( Exception e ) { LOGGER . error ( "Exception caught trying to parse nested catalog in xml catalog" , e ) ; } return new CatalogTreeNode ( childCatalog , editors ) ; } return null ; |
public class MpscLinkedQueue { /** * $ gen : ignore */
public static < E > MpscLinkedQueue < E > newMpscLinkedQueue ( ) { } } | if ( UnsafeAccess . SUPPORTS_GET_AND_SET ) { return new MpscLinkedQueue8 < E > ( ) ; } else { return new MpscLinkedQueue7 < E > ( ) ; } |
public class ConcurrentLinkedQueue { /** * Returns the first live ( non - deleted ) node on list , or null if none .
* This is yet another variant of poll / peek ; here returning the
* first node , not element . We could make peek ( ) a wrapper around
* first ( ) , but that would cost an extra volatile read of item ,
* and the need to add a retry loop to deal with the possibility
* of losing a race to a concurrent poll ( ) . */
Node < E > first ( ) { } } | restartFromHead : for ( ; ; ) { for ( Node < E > h = head , p = h , q ; ; ) { boolean hasItem = ( p . item != null ) ; if ( hasItem || ( q = p . next ) == null ) { updateHead ( h , p ) ; return hasItem ? p : null ; } else if ( sentinel ( ) == q ) continue restartFromHead ; else p = q ; } } |
public class OsmLayerParam { /** * Validate the properties have the correct values . */
public void postConstruct ( ) { } } | Assert . equals ( NUMBER_OF_EXTENT_COORDS , this . maxExtent . length , "maxExtent must have exactly 4 elements to the array. Was: " + Arrays . toString ( this . maxExtent ) ) ; Assert . equals ( 2 , this . tileSize . length , "tileSize must have exactly 2 elements to the array. Was: " + Arrays . toString ( this . tileSize ) ) ; Assert . isTrue ( this . resolutions . length > 0 , "resolutions must have at least one value" ) ; Arrays . sort ( this . resolutions , Collections . reverseOrder ( ) ) ; Assert . isTrue ( validateBaseUrl ( ) , "invalid baseURL" ) ; |
public class Input { /** * Remove a controller listener that will no longer be notified
* @ param listener The listen to be removed */
public void removeControllerListener ( ControllerListener listener ) { } } | controllerListeners . remove ( listener ) ; if ( ! mouseListeners . contains ( listener ) && ! keyListeners . contains ( listener ) ) { allListeners . remove ( listener ) ; } |
public class ApiOvhMe { /** * List of all the orders the logged account has
* REST : GET / me / order
* @ param date _ to [ required ] Filter the value of date property ( < = )
* @ param date _ from [ required ] Filter the value of date property ( > = ) */
public ArrayList < Long > order_GET ( Date date_from , Date date_to ) throws IOException { } } | String qPath = "/me/order" ; StringBuilder sb = path ( qPath ) ; query ( sb , "date.from" , date_from ) ; query ( sb , "date.to" , date_to ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t2 ) ; |
public class Expressions { /** * Create a new Operation expression
* @ param type type of expression
* @ param operator operator
* @ param args operation arguments
* @ return operation expression */
public static < T extends Comparable < ? > > TimeOperation < T > timeOperation ( Class < ? extends T > type , Operator operator , Expression < ? > ... args ) { } } | return new TimeOperation < T > ( type , operator , args ) ; |
public class VersionValues { /** * Find the enumerated object matching the input name . If this name has
* never been seen prior , then a new object is created by this call .
* @ param name
* @ return VersionValues
* @ throws IllegalArgumentException
* - if format is incorrect
* @ throws NullPointerException
* if input name is null */
public static VersionValues find ( String name ) { } } | return myMatcher . match ( name , 0 , name . length ( ) , true ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.